code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package poker.core.handanalyzer
import org.scalatest.FunSuite
import poker.TestHelpers._
import poker.core.{HandStatus, HandType, Kickers}
final class HighCardAnalyzerTest extends FunSuite {
test("analyze") {
val highCardAnalyzer = new HighCardAnalyzer
assert(highCardAnalyzer.analyze(royalFlushHand) === HandStatus(HandType.HighCard, Kickers(royalFlushHand)))
assert(highCardAnalyzer.analyze(royalFlushHandMixed) === HandStatus(HandType.HighCard, Kickers(royalFlushHand)))
assert(highCardAnalyzer.analyze(straightFlushHand) === HandStatus(HandType.HighCard, Kickers(straightFlushHand)))
assert(highCardAnalyzer.analyze(straightFlushHandMixed) === HandStatus(HandType.HighCard, Kickers(straightFlushHand)))
assert(highCardAnalyzer.analyze(highCardHand) === highCardStatus)
}
}
| kyuksel/poker | src/test/scala/poker/core/handanalyzer/HighCardAnalyzerTest.scala | Scala | mit | 808 |
package me.reminisce.gameboard.questions
import java.util.concurrent.TimeUnit
import akka.testkit.{TestActorRef, TestProbe}
import me.reminisce.database.MongoCollections
import me.reminisce.database.MongoDBEntities.{FBLocation, FBPlace, FBPost}
import me.reminisce.database.MongoDBFormats._
import me.reminisce.gameboard.board.GameboardEntities.{GeolocationQuestion, TextPostSubject}
import me.reminisce.gameboard.questions.QuestionGenerator.{CreateQuestion, NotEnoughData}
import org.scalatest.DoNotDiscover
import reactivemongo.api.collections.bson.BSONCollection
import reactivemongo.api.commands.WriteConcern
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
@DoNotDiscover
class WhichCoordinatesWereYouAtSpec extends QuestionTester("WhichCoordinatesWereYouAtSpec") {
val userId = "TestUserWhichCoordinatesWereYouAt"
"WhichCoordinatesWereYouAt" must {
"not create question when there is no post." in {
testWithDb {
db =>
val itemId = "This post does not exist"
val actorRef = TestActorRef(WhichCoordinatesWereYouAt.props(db))
val testProbe = TestProbe()
testProbe.send(actorRef, CreateQuestion(userId, itemId))
testProbe.expectMsg(NotEnoughData(s"Post has no place or post does not exist : $itemId"))
}
}
"not create question when there is no location." in {
testWithDb {
db =>
val postsCollection = db[BSONCollection](MongoCollections.fbPosts)
val itemId = "This post does not exist"
val fbPost = FBPost(postId = itemId, userId = userId)
Await.result(postsCollection.update(fbPost, fbPost, WriteConcern.Acknowledged, upsert = true), Duration(10, TimeUnit.SECONDS))
val actorRef = TestActorRef(WhichCoordinatesWereYouAt.props(db))
val testProbe = TestProbe()
testProbe.send(actorRef, CreateQuestion(userId, itemId))
testProbe.expectMsg(NotEnoughData(s"Post has no place or post does not exist : $itemId"))
}
}
"create a valid question when the post and place is there." in {
testWithDb {
db =>
val postsCollection = db[BSONCollection](MongoCollections.fbPosts)
val userId = "TestUser"
val itemId = "PostId"
val postMessage = "Awesome Message"
val latitude = 6.2
val longitude = 45.13
val location = FBLocation(None, None, latitude = latitude, longitude = longitude, None, None)
val place = FBPlace(None, name = Some("SuperPlace"), location = location, None)
val fbPost = FBPost(postId = itemId, userId = userId, message = Some(postMessage), place = Some(place))
Await.result(postsCollection.update(fbPost, fbPost, WriteConcern.Acknowledged, upsert = true), Duration(10, TimeUnit.SECONDS))
val actorRef = TestActorRef(WhichCoordinatesWereYouAt.props(db))
val testProbe = TestProbe()
testProbe.send(actorRef, CreateQuestion(userId, itemId))
checkFinished[GeolocationQuestion](testProbe) {
question =>
checkSubject[TextPostSubject](question.subject) {
subject =>
val answer = question.answer
assert(subject.text == fbPost.message.getOrElse(""))
assert(answer.latitude == latitude)
assert(answer.longitude == longitude)
}
}
}
}
}
} | reminisceme/game-creator | src/test/scala/me/reminisce/gameboard/questions/WhichCoordinatesWereYouAtSpec.scala | Scala | apache-2.0 | 3,528 |
package dog
package autodoc
import httpz._
import argonaut._, Argonaut._
object AutodocTest extends DogAutodoc with Assert {
def str(value: String) = new ByteArray(value.getBytes())
def interpreter(value: String, status: Int, headers: Map[String, List[String]] = Map()) =
FakeInterpreter(str(value), status, headers).sequential.empty
def run[A: Show](nel: ActionNel[Autodoc[A]], value: String, status: Int, headers: Map[String, List[String]] = Map()) =
Autodoc[A](interpreter(value, status, headers), nel) { res =>
equal(200, res.status)
}
val getApi = Autodoc.string(Request(
method = "GET",
url = "http://localhost/api"
)).leftMap(Error.http).nel
val `simple GET api` = {
val expected = """## GET /api
#### Request
```
GET /api
```
#### Response
```
200
"{}"
```"""
for {
doc <- run[String](getApi, "{}", 200)
_ <-
assert
.equal(expected, doc.generate("GET /api", Autodoc.Markdown()))
.lift
} yield doc
}
val getApiWithDescription = Autodoc.string(Request(
method = "GET",
url = "http://localhost/api"
), "test api").leftMap(Error.http).nel
val `include description` = {
val expected = """## GET /api
test api
#### Request
```
GET /api
```
#### Response
```
200
"{}"
```"""
for {
doc <- Autodoc[String](interpreter("{}", 200), getApiWithDescription) { res =>
equal(200, res.status)
}
_ <-
assert
.equal(expected, doc.generate("GET /api", Autodoc.Markdown()))
.lift
} yield doc
}
case class Person(name: String, age: Int) extends JsonToString[Person]
implicit val personCodec: CodecJson[Person] = casecodec2(Person.apply, Person.unapply)("name", "age")
val getPerson = Autodoc.json[Person](Request(
method = "GET",
url = "http://localhost/person/1"
)).leftMap(Error.http).nel
val `get json` = {
val expected = """## GET /person/:id
#### Request
```
GET /person/1
```
#### Response
```
200
{
"name" : "Alice",
"age" : 17
}
```"""
for {
doc <- run[Person](getPerson, Person("Alice", 17).toString, 200)
_ <-
assert
.equal(expected, doc.generate("GET /person/:id", Autodoc.Markdown()))
.lift
} yield doc
}
val getApiWithHeader = Autodoc.string(Request(
method = "GET",
url = "http://localhost/api",
headers = Map("Content-Type" -> "text/plain")
)).leftMap(Error.http).nel
val `GET api with header` = {
val expected = """## GET /api
#### Request
```
GET /api
Content-Type: text/plain
```
#### Response
```
200
X-XSS-Protection: 1; mode=block
"{}"
```"""
for {
doc <- run[String](getApiWithHeader, "{}", 200,
Map("X-XSS-Protection" -> List("1", "mode=block"))
)
_ <-
assert
.equal(expected, doc.generate("GET /api", Autodoc.Markdown()))
.lift
} yield doc
}
val queryPerson = Autodoc.json[Person](Request(
method = "GET",
url = "http://localhost/persons",
params = Map("foo" -> "bar", "a" -> "b")
)).leftMap(Error.http).nel
val `query json` = {
val expected = """## GET /persons?foo=bar&a=b
#### Request
```
GET /persons?foo=bar&a=b
```
#### Response
```
200
{
"name" : "Alice",
"age" : 17
}
```"""
for {
doc <- run[Person](queryPerson, Person("Alice", 17).toString, 200)
_ <-
assert
.equal(expected, doc.generate("GET /persons?foo=bar&a=b", Autodoc.Markdown()))
.lift
} yield doc
}
val `generate simple html` = {
val expected = """<h2>GET /api</h2>
<h4>Request</h4>
<pre><code>GET /api
</code></pre>
<h4>Response</h4>
<pre><code>200
"{}"
</code></pre>"""
for {
doc <- run[String](getApi, "{}", 200)
_ <-
assert
.equal(expected, doc.generate("GET /api", Autodoc.Html()))
.lift
} yield doc
}
}
| scala-kennel/dog-autodoc | autodoc/src/test/scala/dog/autodoc/AutodocTest.scala | Scala | mit | 3,937 |
package circumflex
package web
import core._
import collection.mutable.Map
import collection.Iterator
import collection.JavaConversions._
import java.util.{Enumeration => JEnumeration}
import javax.servlet.http.{HttpSession => ServletSession}
import java.io.Serializable
class HttpSession
extends Map[String, Serializable]
with KeyValueCoercion {
def rawSession: Option[ServletSession] = requestOption.flatMap { req =>
val s = req.raw.getSession(false)
if (s == null) None
else Some(s)
}
def id: Option[String] = rawSession.map(_.getId)
def +=(kv: (String, Serializable)): this.type = {
requestOption.map(_.raw.getSession(true).setAttribute(kv._1, kv._2))
this
}
def -=(key: String): this.type = {
rawSession.map(_.removeAttribute(key))
this
}
def iterator: Iterator[(String, Serializable)] = {
rawSession.map(s =>
s.getAttributeNames
.asInstanceOf[JEnumeration[String]]
.flatMap(k => s.getAttribute(k) match {
case s: Serializable => Some(k -> s)
case _ => None
}).toIterator).getOrElse(Iterator.empty)
}
def get(key: String): Option[Serializable] =
rawSession.flatMap(sess => any2option(sess.getAttribute(key))) match {
case Some(s: Serializable) => Some(s)
case _ => None
}
def invalidate(): this.type = {
rawSession.map(_.invalidate())
this
}
}
| inca/circumflex | web/src/main/scala/session.scala | Scala | bsd-2-clause | 1,399 |
/**
* Copyright (c) 2013 Saddle Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.saddle.stats
import org.saddle._
import Series.Vec2ExpandingStats
/**
* Expanding statistical methods made available on numeric Series objects via enrichment.
* These methods scan over the Series and compute values over a specified historical
* window.
*/
class SeriesExpandingStats[X: ST: ORD, T: Vec2ExpandingStats: ST](s: Series[X, T]) {
protected val ev = implicitly[Vec2ExpandingStats[T]]
/**
* Cumulative sum; each successive element of the output is the cumulative
* sum from the initial element, ignoring NAs.
*/
def cumSum: Series[X, T] = Series(ev(s.values).cumSum, s.index)
/**
* Cumulative count; each successive element of the output is the cumulative
* count from the initial element, ignoring NAs.
*/
def cumCount: Series[X, Int] = Series(ev(s.values).cumCount, s.index)
/**
* Cumulative min; each successive element of the output is the cumulative
* min from the initial element, ignoring NAs.
*/
def cumMin: Series[X, T] = Series(ev(s.values).cumMin, s.index)
/**
* Cumulative max; each successive element of the output is the cumulative
* max from the initial element, ignoring NAs.
*/
def cumMax: Series[X, T] = Series(ev(s.values).cumMax, s.index)
/**
* Cumulative product; each successive element of the output is the cumulative
* product from the initial element, ignoring NAs.
*/
def cumProd: Series[X, T] = Series(ev(s.values).cumProd, s.index)
}
object SeriesExpandingStats {
/**
* Factory method for creating an enriched Series object containing statistical functions;
* usually created implicitly.
*
* @param s Series to wrap
* @tparam X Type of index
* @tparam T Type of elements
*/
def apply[X: ST: ORD, T: Vec2ExpandingStats: ST](s: Series[X, T]) =
new SeriesExpandingStats(s)
} | jyt109/saddle | saddle-core/src/main/scala/org/saddle/stats/SeriesExpandingStats.scala | Scala | apache-2.0 | 2,442 |
package scala.scalanative
package util
import scala.language.implicitConversions
trait Show[T] { def apply(t: T): Show.Result }
object Show {
sealed abstract class Result {
override def toString = {
val sb = new StringBuilder
var indentation = 0
def nl(res: Result) = {
sb.append("\\n")
sb.append(" " * indentation)
loop(res)
}
def loop(result: Result): Unit = result match {
case None => ()
case Str(value) => sb.append(value)
case Sequence(xs @ _ *) => xs.foreach(loop)
case Repeat(xs, _, _, _) if xs.isEmpty => ()
case Repeat(xs, sep, pre, post) =>
loop(pre)
xs.init.foreach { x =>
loop(x)
loop(sep)
}
loop(xs.last)
loop(post)
case Indent(res, n) =>
indentation += n
nl(res)
indentation -= n
case Unindent(res, n) =>
indentation -= n
nl(res)
indentation += n
case Newline(res) =>
nl(res)
case Interpolated(parts, args) =>
parts.init.zip(args).foreach {
case (part, arg) =>
sb.append(part)
loop(arg)
}
sb.append(parts.last)
}
loop(this)
sb.toString
}
}
final case object None extends Result
final case class Str(value: String) extends Result
final case class Sequence(xs: Result*) extends Result
final case class Repeat(xs: Seq[Result],
sep: Result = None,
pre: Result = None,
post: Result = None)
extends Result
final case class Indent(res: Result, n: Int = 1) extends Result
final case class Unindent(res: Result, n: Int = 1) extends Result
final case class Newline(res: Result) extends Result
final case class Interpolated(parts: Seq[String], args: Seq[Result])
extends Result
def apply[T](f: T => Result): Show[T] =
new Show[T] { def apply(input: T): Result = f(input) }
implicit def showResult[R <: Result]: Show[R] = apply(identity)
implicit def showString[T <: String]: Show[T] = apply(Show.Str(_))
implicit def showByte[T <: Byte]: Show[T] = apply(i => Show.Str(i.toString))
implicit def showShort[T <: Short]: Show[T] =
apply(i => Show.Str(i.toString))
implicit def showInt[T <: Int]: Show[T] = apply(i => Show.Str(i.toString))
implicit def showLong[T <: Long]: Show[T] = apply(i => Show.Str(i.toString))
implicit def showFloat[T <: Float]: Show[T] =
apply(f => Show.Str(f.toString))
implicit def showDouble[T <: Double]: Show[T] =
apply(f => Show.Str(f.toString))
implicit def toResult[T: Show](t: T): Result =
implicitly[Show[T]].apply(t)
implicit def seqToResult[T: Show](ts: Seq[T]): Seq[Result] =
ts.map { t =>
implicitly[Show[T]].apply(t)
}
}
| cedricviaccoz/scala-native | util/src/main/scala/scala/scalanative/util/Show.scala | Scala | bsd-3-clause | 3,012 |
package org.jetbrains.plugins.scala.lang.typeInference
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
class SingletonTypesConformanceTest extends ScalaLightCodeInsightFixtureTestAdapter {
def testSCL11192(): Unit = checkTextHasNoErrors(
"""
|trait HList
|trait Second[L <: HList] {
| type Out
| def apply(value: L): Out
|}
|
|object Second {
| type Aux[L <: HList, O] = Second[L] {type Out = O}
| def apply[L <: HList](implicit inst: Second[L]): Aux[L, inst.Out] = inst
|}
""".stripMargin
)
def testSCL11285(): Unit = {
checkTextHasNoErrors(
"""trait Input {
| type Value
|}
|
|def requestInput[Res](req: Input {type Value = Res}): Res = ???
|
|def test(req: Input): Unit =
| requestInput[req.Value](req)
""".stripMargin)
}
def testSCL13607(): Unit = {
checkTextHasNoErrors(
"""
|trait Foo {
| type Bar
|}
|
|def apply[A](foo: Foo { type Bar = A }): Unit = ()
|
|def test(f: Foo): Unit = apply[f.Bar](f)
""".stripMargin)
}
def testSCL13797(): Unit = {
checkTextHasNoErrors(
"""
|trait Test {
| type X
| def self: Test { type X = Test.this.X } = this
|}
""".stripMargin)
}
def testSCL7017(): Unit =
checkTextHasNoErrors(
"""
|class SCL7017 {
| abstract class A
| case object B extends A
| case object C extends A
| case class X[T <: A](o: T, n: Int) {
| def +(that: X[o.type]): Int = 1
| }
| val i: Int = X(B, 1) + X(B, 2)
|}
""".stripMargin.trim
)
def testSCL18169(): Unit = checkTextHasNoErrors(
"""object DemonstrateTypeAliasError {
| val s: String = "7"
| type AliasForString = s.type
| val t: AliasForString = s // required AliasForString, found String
|}""".stripMargin
)
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/typeInference/SingletonTypesConformanceTest.scala | Scala | apache-2.0 | 2,046 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.lwcapi
import akka.http.scaladsl.model.ws.Message
import akka.http.scaladsl.testkit.RouteTestTimeout
import akka.http.scaladsl.testkit.WSProbe
import com.netflix.atlas.akka.DiagnosticMessage
import com.netflix.atlas.akka.RequestHandler
import com.netflix.atlas.akka.testkit.MUnitRouteSuite
import com.netflix.atlas.eval.model.LwcDatapoint
import com.netflix.atlas.eval.model.LwcExpression
import com.netflix.atlas.eval.model.LwcHeartbeat
import com.netflix.atlas.eval.model.LwcMessages
import com.netflix.atlas.eval.model.LwcSubscription
import com.netflix.atlas.json.Json
import com.netflix.spectator.api.NoopRegistry
import com.typesafe.config.ConfigFactory
class SubscribeApiSuite extends MUnitRouteSuite {
import scala.concurrent.duration._
private implicit val routeTestTimeout = RouteTestTimeout(5.second)
private val config = ConfigFactory.load()
private val sm = new StreamSubscriptionManager
private val splitter = new ExpressionSplitter(config)
private val api = new SubscribeApi(config, new NoopRegistry, sm, splitter, materializer)
private val routes = RequestHandler.standardOptions(api.routes)
override def beforeEach(context: BeforeEach): Unit = {
sm.clear()
}
//
// Subscribe websocket
//
private def parse(msg: Message): AnyRef = {
LwcMessages.parse(msg.asTextMessage.getStrictText)
}
test("subscribe websocket") {
val client = WSProbe()
WS("/api/v1/subscribe/111", client.flow) ~> routes ~> check {
assert(isWebSocketUpgrade)
// Send list of expressions to subscribe to
val exprs = List(LwcExpression("name,cpu,:eq,:avg", 60000))
client.sendMessage(Json.encode(exprs))
// Look for subscription messages, one for sum and one for count
var subscriptions = List.empty[LwcSubscription]
while (subscriptions.size < 2) {
parse(client.expectMessage()) match {
case _: DiagnosticMessage =>
case sub: LwcSubscription => subscriptions = sub :: subscriptions
case h: LwcHeartbeat => assertEquals(h.step, 60000L)
case v => throw new MatchError(v)
}
}
// Verify subscription is in the manager, push a message to the queue check that it
// is received by the client
assertEquals(subscriptions.flatMap(_.metrics).size, 2)
subscriptions.flatMap(_.metrics).foreach { m =>
val tags = Map("name" -> "cpu")
val datapoint = LwcDatapoint(60000, m.id, tags, 42.0)
val handlers = sm.handlersForSubscription(m.id)
assertEquals(handlers.size, 1)
handlers.head.offer(Seq(datapoint))
assertEquals(parse(client.expectMessage()), datapoint)
}
}
}
private def parseBatch(msg: Message): List[AnyRef] = {
LwcMessages.parseBatch(msg.asBinaryMessage.getStrictData)
}
test("subscribe websocket V2") {
val client = WSProbe()
WS("/api/v2/subscribe/222", client.flow) ~> routes ~> check {
assert(isWebSocketUpgrade)
// Send list of expressions to subscribe to
val exprs = List(LwcExpression("name,disk,:eq,:avg", 60000))
client.sendMessage(LwcMessages.encodeBatch(exprs))
// Look for subscription messages, one for sum and one for count
var subscriptions = List.empty[LwcSubscription]
while (subscriptions.size < 2) {
parseBatch(client.expectMessage()).foreach {
case _: DiagnosticMessage =>
case sub: LwcSubscription => subscriptions = sub :: subscriptions
case h: LwcHeartbeat => assertEquals(h.step, 60000L)
case v => throw new MatchError(v)
}
}
// Verify subscription is in the manager, push a message to the queue check that it
// is received by the client
assertEquals(subscriptions.flatMap(_.metrics).size, 2)
subscriptions.flatMap(_.metrics).foreach { m =>
val tags = Map("name" -> "disk")
val datapoint = LwcDatapoint(60000, m.id, tags, 42.0)
val handlers = sm.handlersForSubscription(m.id)
assertEquals(handlers.size, 1)
handlers.head.offer(Seq(datapoint))
assertEquals(parseBatch(client.expectMessage()), List(datapoint))
}
}
}
}
| brharrington/atlas | atlas-lwcapi/src/test/scala/com/netflix/atlas/lwcapi/SubscribeApiSuite.scala | Scala | apache-2.0 | 4,857 |
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
object O {
type A = Unit
}
object Test extends App {
val expr = reify {
import O.{A => X}
val a: X = ()
object P {
type B = Unit
}
import P.{B => Y}
val b: Y = ()
}
println(expr.eval)
} | felixmulder/scala | test/files/run/reify_renamed_type_local_to_reifee.scala | Scala | bsd-3-clause | 302 |
package com.aristocrat.mandrill.requests.Exports
import com.aristocrat.mandrill.requests.MandrillRequest
import org.joda.time.DateTime
case class Activity(
key: String,
notifyEmail: Option[String] = None,
dateFrom: Option[DateTime] = None,
dateTo: Option[DateTime] = None,
tags: Seq[String] = Seq(),
senders: Seq[String] = Seq(),
states: Seq[String] = Seq(),
apiKeys: Seq[String] = Seq()) extends MandrillRequest
| aristocratic/mandrill | src/main/scala/com/aristocrat/mandrill/requests/Exports/Activity.scala | Scala | mit | 447 |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package kafka.api
import java.time.Duration
import java.util.{Collections, HashMap, Properties}
import kafka.api.QuotaTestClients._
import kafka.server.{ClientQuotaManager, ClientQuotaManagerConfig, DynamicConfig, KafkaConfig, KafkaServer, QuotaType}
import kafka.utils.TestUtils
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.clients.producer._
import org.apache.kafka.clients.producer.internals.ErrorLoggingCallback
import org.apache.kafka.common.{Metric, MetricName, TopicPartition}
import org.apache.kafka.common.metrics.{KafkaMetric, Quota}
import org.apache.kafka.common.security.auth.KafkaPrincipal
import org.junit.Assert._
import org.junit.{Before, Test}
import scala.collection.JavaConverters._
abstract class BaseQuotaTest extends IntegrationTestHarness {
override val brokerCount = 2
protected def producerClientId = "QuotasTestProducer-1"
protected def consumerClientId = "QuotasTestConsumer-1"
protected def createQuotaTestClients(topic: String, leaderNode: KafkaServer): QuotaTestClients
this.serverConfig.setProperty(KafkaConfig.ControlledShutdownEnableProp, "false")
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicReplicationFactorProp, "2")
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicPartitionsProp, "1")
this.serverConfig.setProperty(KafkaConfig.GroupMinSessionTimeoutMsProp, "100")
this.serverConfig.setProperty(KafkaConfig.GroupMaxSessionTimeoutMsProp, "30000")
this.serverConfig.setProperty(KafkaConfig.GroupInitialRebalanceDelayMsProp, "0")
this.producerConfig.setProperty(ProducerConfig.ACKS_CONFIG, "-1")
this.producerConfig.setProperty(ProducerConfig.BUFFER_MEMORY_CONFIG, "300000")
this.producerConfig.setProperty(ProducerConfig.CLIENT_ID_CONFIG, producerClientId)
this.consumerConfig.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "QuotasTest")
this.consumerConfig.setProperty(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 4096.toString)
this.consumerConfig.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
this.consumerConfig.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, consumerClientId)
this.consumerConfig.setProperty(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "0")
this.consumerConfig.setProperty(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "0")
// Low enough quota that a producer sending a small payload in a tight loop should get throttled
val defaultProducerQuota = 8000
val defaultConsumerQuota = 2500
val defaultRequestQuota = Int.MaxValue
val topic1 = "topic-1"
var leaderNode: KafkaServer = _
var followerNode: KafkaServer = _
var quotaTestClients: QuotaTestClients = _
@Before
override def setUp() {
super.setUp()
val numPartitions = 1
val leaders = createTopic(topic1, numPartitions, brokerCount)
leaderNode = if (leaders(0) == servers.head.config.brokerId) servers.head else servers(1)
followerNode = if (leaders(0) != servers.head.config.brokerId) servers.head else servers(1)
quotaTestClients = createQuotaTestClients(topic1, leaderNode)
}
@Test
def testThrottledProducerConsumer() {
val numRecords = 1000
val produced = quotaTestClients.produceUntilThrottled(numRecords)
quotaTestClients.verifyProduceThrottle(expectThrottle = true)
// Consumer should read in a bursty manner and get throttled immediately
quotaTestClients.consumeUntilThrottled(produced)
quotaTestClients.verifyConsumeThrottle(expectThrottle = true)
}
@Test
def testProducerConsumerOverrideUnthrottled() {
// Give effectively unlimited quota for producer and consumer
val props = new Properties()
props.put(DynamicConfig.Client.ProducerByteRateOverrideProp, Long.MaxValue.toString)
props.put(DynamicConfig.Client.ConsumerByteRateOverrideProp, Long.MaxValue.toString)
quotaTestClients.overrideQuotas(Long.MaxValue, Long.MaxValue, Int.MaxValue)
quotaTestClients.waitForQuotaUpdate(Long.MaxValue, Long.MaxValue, Int.MaxValue)
val numRecords = 1000
assertEquals(numRecords, quotaTestClients.produceUntilThrottled(numRecords))
quotaTestClients.verifyProduceThrottle(expectThrottle = false)
// The "client" consumer does not get throttled.
assertEquals(numRecords, quotaTestClients.consumeUntilThrottled(numRecords))
quotaTestClients.verifyConsumeThrottle(expectThrottle = false)
}
@Test
def testQuotaOverrideDelete() {
// Override producer and consumer quotas to unlimited
quotaTestClients.overrideQuotas(Long.MaxValue, Long.MaxValue, Int.MaxValue)
quotaTestClients.waitForQuotaUpdate(Long.MaxValue, Long.MaxValue, Int.MaxValue)
val numRecords = 1000
assertEquals(numRecords, quotaTestClients.produceUntilThrottled(numRecords))
quotaTestClients.verifyProduceThrottle(expectThrottle = false)
assertEquals(numRecords, quotaTestClients.consumeUntilThrottled(numRecords))
quotaTestClients.verifyConsumeThrottle(expectThrottle = false)
// Delete producer and consumer quota overrides. Consumer and producer should now be
// throttled since broker defaults are very small
quotaTestClients.removeQuotaOverrides()
val produced = quotaTestClients.produceUntilThrottled(numRecords)
quotaTestClients.verifyProduceThrottle(expectThrottle = true)
// Since producer may have been throttled after producing a couple of records,
// consume from beginning till throttled
quotaTestClients.consumer.seekToBeginning(Collections.singleton(new TopicPartition(topic1, 0)))
quotaTestClients.consumeUntilThrottled(numRecords + produced)
quotaTestClients.verifyConsumeThrottle(expectThrottle = true)
}
@Test
def testThrottledRequest() {
quotaTestClients.overrideQuotas(Long.MaxValue, Long.MaxValue, 0.1)
quotaTestClients.waitForQuotaUpdate(Long.MaxValue, Long.MaxValue, 0.1)
val consumer = quotaTestClients.consumer
consumer.subscribe(Collections.singleton(topic1))
val endTimeMs = System.currentTimeMillis + 10000
var throttled = false
while ((!throttled || quotaTestClients.exemptRequestMetric == null) && System.currentTimeMillis < endTimeMs) {
consumer.poll(Duration.ofMillis(100L))
val throttleMetric = quotaTestClients.throttleMetric(QuotaType.Request, consumerClientId)
throttled = throttleMetric != null && metricValue(throttleMetric) > 0
}
assertTrue("Should have been throttled", throttled)
quotaTestClients.verifyConsumerClientThrottleTimeMetric(expectThrottle = true,
Some(ClientQuotaManagerConfig.DefaultQuotaWindowSizeSeconds * 1000.0))
val exemptMetric = quotaTestClients.exemptRequestMetric
assertNotNull("Exempt requests not recorded", exemptMetric)
assertTrue("Exempt requests not recorded", metricValue(exemptMetric) > 0)
}
}
object QuotaTestClients {
def metricValue(metric: Metric): Double = metric.metricValue().asInstanceOf[Double]
}
abstract class QuotaTestClients(topic: String,
leaderNode: KafkaServer,
producerClientId: String,
consumerClientId: String,
val producer: KafkaProducer[Array[Byte], Array[Byte]],
val consumer: KafkaConsumer[Array[Byte], Array[Byte]]) {
def userPrincipal: KafkaPrincipal
def overrideQuotas(producerQuota: Long, consumerQuota: Long, requestQuota: Double)
def removeQuotaOverrides()
def quotaMetricTags(clientId: String): Map[String, String]
def quota(quotaManager: ClientQuotaManager, userPrincipal: KafkaPrincipal, clientId: String): Quota = {
quotaManager.quota(userPrincipal, clientId)
}
def produceUntilThrottled(maxRecords: Int, waitForRequestCompletion: Boolean = true): Int = {
var numProduced = 0
var throttled = false
do {
val payload = numProduced.toString.getBytes
val future = producer.send(new ProducerRecord[Array[Byte], Array[Byte]](topic, null, null, payload),
new ErrorLoggingCallback(topic, null, null, true))
numProduced += 1
do {
val metric = throttleMetric(QuotaType.Produce, producerClientId)
throttled = metric != null && metricValue(metric) > 0
} while (!future.isDone && (!throttled || waitForRequestCompletion))
} while (numProduced < maxRecords && !throttled)
numProduced
}
def consumeUntilThrottled(maxRecords: Int, waitForRequestCompletion: Boolean = true): Int = {
consumer.subscribe(Collections.singleton(topic))
var numConsumed = 0
var throttled = false
do {
numConsumed += consumer.poll(Duration.ofMillis(100L)).count
val metric = throttleMetric(QuotaType.Fetch, consumerClientId)
throttled = metric != null && metricValue(metric) > 0
} while (numConsumed < maxRecords && !throttled)
// If throttled, wait for the records from the last fetch to be received
if (throttled && numConsumed < maxRecords && waitForRequestCompletion) {
val minRecords = numConsumed + 1
while (numConsumed < minRecords)
numConsumed += consumer.poll(Duration.ofMillis(100L)).count
}
numConsumed
}
def verifyProduceThrottle(expectThrottle: Boolean, verifyClientMetric: Boolean = true): Unit = {
verifyThrottleTimeMetric(QuotaType.Produce, producerClientId, expectThrottle)
if (verifyClientMetric)
verifyProducerClientThrottleTimeMetric(expectThrottle)
}
def verifyConsumeThrottle(expectThrottle: Boolean, verifyClientMetric: Boolean = true): Unit = {
verifyThrottleTimeMetric(QuotaType.Fetch, consumerClientId, expectThrottle)
if (verifyClientMetric)
verifyConsumerClientThrottleTimeMetric(expectThrottle)
}
def verifyThrottleTimeMetric(quotaType: QuotaType, clientId: String, expectThrottle: Boolean): Unit = {
val throttleMetricValue = metricValue(throttleMetric(quotaType, clientId))
if (expectThrottle) {
assertTrue(s"Client with id=$clientId should have been throttled", throttleMetricValue > 0)
} else {
assertTrue(s"Client with id=$clientId should not have been throttled", throttleMetricValue.isNaN)
}
}
def throttleMetricName(quotaType: QuotaType, clientId: String): MetricName = {
leaderNode.metrics.metricName("throttle-time",
quotaType.toString,
quotaMetricTags(clientId).asJava)
}
def throttleMetric(quotaType: QuotaType, clientId: String): KafkaMetric = {
leaderNode.metrics.metrics.get(throttleMetricName(quotaType, clientId))
}
def exemptRequestMetric: KafkaMetric = {
val metricName = leaderNode.metrics.metricName("exempt-request-time", QuotaType.Request.toString, "")
leaderNode.metrics.metrics.get(metricName)
}
def verifyProducerClientThrottleTimeMetric(expectThrottle: Boolean) {
val tags = new HashMap[String, String]
tags.put("client-id", producerClientId)
val avgMetric = producer.metrics.get(new MetricName("produce-throttle-time-avg", "producer-metrics", "", tags))
val maxMetric = producer.metrics.get(new MetricName("produce-throttle-time-max", "producer-metrics", "", tags))
if (expectThrottle) {
TestUtils.waitUntilTrue(() => metricValue(avgMetric) > 0.0 && metricValue(maxMetric) > 0.0,
s"Producer throttle metric not updated: avg=${metricValue(avgMetric)} max=${metricValue(maxMetric)}")
} else
assertEquals("Should not have been throttled", 0.0, metricValue(maxMetric), 0.0)
}
def verifyConsumerClientThrottleTimeMetric(expectThrottle: Boolean, maxThrottleTime: Option[Double] = None) {
val tags = new HashMap[String, String]
tags.put("client-id", consumerClientId)
val avgMetric = consumer.metrics.get(new MetricName("fetch-throttle-time-avg", "consumer-fetch-manager-metrics", "", tags))
val maxMetric = consumer.metrics.get(new MetricName("fetch-throttle-time-max", "consumer-fetch-manager-metrics", "", tags))
if (expectThrottle) {
TestUtils.waitUntilTrue(() => metricValue(avgMetric) > 0.0 && metricValue(maxMetric) > 0.0,
s"Consumer throttle metric not updated: avg=${metricValue(avgMetric)} max=${metricValue(maxMetric)}")
maxThrottleTime.foreach(max => assertTrue(s"Maximum consumer throttle too high: ${metricValue(maxMetric)}",
metricValue(maxMetric) <= max))
} else
assertEquals("Should not have been throttled", 0.0, metricValue(maxMetric), 0.0)
}
def quotaProperties(producerQuota: Long, consumerQuota: Long, requestQuota: Double): Properties = {
val props = new Properties()
props.put(DynamicConfig.Client.ProducerByteRateOverrideProp, producerQuota.toString)
props.put(DynamicConfig.Client.ConsumerByteRateOverrideProp, consumerQuota.toString)
props.put(DynamicConfig.Client.RequestPercentageOverrideProp, requestQuota.toString)
props
}
def waitForQuotaUpdate(producerQuota: Long, consumerQuota: Long, requestQuota: Double, server: KafkaServer = leaderNode) {
TestUtils.retry(10000) {
val quotaManagers = server.dataPlaneRequestProcessor.quotas
val overrideProducerQuota = quota(quotaManagers.produce, userPrincipal, producerClientId)
val overrideConsumerQuota = quota(quotaManagers.fetch, userPrincipal, consumerClientId)
val overrideProducerRequestQuota = quota(quotaManagers.request, userPrincipal, producerClientId)
val overrideConsumerRequestQuota = quota(quotaManagers.request, userPrincipal, consumerClientId)
assertEquals(s"ClientId $producerClientId of user $userPrincipal must have producer quota", Quota.upperBound(producerQuota), overrideProducerQuota)
assertEquals(s"ClientId $consumerClientId of user $userPrincipal must have consumer quota", Quota.upperBound(consumerQuota), overrideConsumerQuota)
assertEquals(s"ClientId $producerClientId of user $userPrincipal must have request quota", Quota.upperBound(requestQuota), overrideProducerRequestQuota)
assertEquals(s"ClientId $consumerClientId of user $userPrincipal must have request quota", Quota.upperBound(requestQuota), overrideConsumerRequestQuota)
}
}
}
| KevinLiLu/kafka | core/src/test/scala/integration/kafka/api/BaseQuotaTest.scala | Scala | apache-2.0 | 14,574 |
package org.joda.time.convert
import org.joda.time.Chronology
object LongConverter {
val INSTANCE = new LongConverter()
}
class LongConverter
extends AbstractConverter()
with InstantConverter
with PartialConverter
with DurationConverter {
override def getInstantMillis(`object`: AnyRef, chrono: Chronology): Long = {
`object`.asInstanceOf[Long].longValue()
}
def getDurationMillis(`object`: AnyRef): Long = {
`object`.asInstanceOf[Long].longValue()
}
def getSupportedType(): Class[_] = classOf[Long]
}
| mdedetrich/soda-time | shared/src/main/scala/org/joda/time/convert/LongConverter.scala | Scala | bsd-2-clause | 544 |
/*
* Copyright 2012 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.money.core.logging
import com.comcast.money.core.Money
import org.slf4j.{ Logger, LoggerFactory }
trait TraceLogging {
lazy val shouldLogExceptions: Boolean = Money.Environment.logExceptions
val logger: Logger = LoggerFactory.getLogger(classOf[TraceLogging])
def logException(t: Throwable) = if (shouldLogExceptions) {
logger.error("Tracing exception", t)
}
}
| Comcast/money | money-core/src/main/scala/com/comcast/money/core/logging/TraceLogging.scala | Scala | apache-2.0 | 1,025 |
package scalacopts
import scala.reflect.macros.blackbox
import scala.language.experimental.macros
object Macros {
def hello: String = macro macroSettings
def macroSettings(c: blackbox.Context): c.Expr[String] = {
import c.universe._
// c.settings are the values from scalac's -Xmacro-settings
val s = c.settings.mkString(",")
c.Expr(q"""${s}""")
}
} | bazelbuild/rules_scala | test/scalacopts/A.scala | Scala | apache-2.0 | 374 |
/*
* Copyright (c) 2016-2018 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.iglu.schemaddl.jsonschema
import cats.syntax.either._
import scala.annotation.tailrec
import com.snowplowanalytics.iglu.schemaddl.jsonschema.JsonPointer._
case class JsonPointer private(value: List[Cursor]) extends AnyVal {
def get: List[Cursor] = value.reverse
def last = value.headOption
def show: String = "/" ++ (get.map {
case Cursor.DownField(key) => key
case Cursor.DownProperty(property) => property.key.name
case Cursor.At(index) => index.toString
} mkString "/")
def downProperty(schemaProperty: SchemaProperty): JsonPointer =
JsonPointer(Cursor.DownProperty(schemaProperty) :: value)
def downField(key: String): JsonPointer =
JsonPointer(Cursor.DownField(key) :: value)
def at(index: Int): JsonPointer =
JsonPointer(Cursor.At(index) :: value)
}
object JsonPointer {
val Root = JsonPointer(Nil)
sealed trait SchemaProperty extends Product with Serializable {
import SchemaProperty._
def key: Symbol
/** Get a parse function that will enforce correct Cursor
* (e.g. only indexes allowed in Items and OneOf as they're arrays)
*/
def next: String => Either[String, Cursor] = this match {
case Items => i => Either.catchNonFatal(i.toInt).leftMap(_.getMessage).map(Cursor.At.apply)
case OneOf => i => Either.catchNonFatal(i.toInt).leftMap(_.getMessage).map(Cursor.At.apply)
case Properties => i => Cursor.DownField(i).asRight
case PatternProperties => i => Cursor.DownField(i).asRight
case AdditionalItems => i => fromString(i).map(Cursor.DownProperty.apply)
case AdditionalProperties => i => fromString(i).map(Cursor.DownProperty.apply)
}
}
object SchemaProperty {
case object Items extends SchemaProperty { def key = 'items }
case object AdditionalItems extends SchemaProperty { def key = 'additionalItems }
case object Properties extends SchemaProperty { def key = 'properties }
case object AdditionalProperties extends SchemaProperty { def key = 'additionalProperties }
case object PatternProperties extends SchemaProperty { def key = 'patternProperties }
case object OneOf extends SchemaProperty { def key = 'oneOf }
val all = List(Items, AdditionalItems, Properties, AdditionalProperties, PatternProperties, OneOf)
def fromString(s: String): Either[String, SchemaProperty] =
all.find(x => x.key.name == s).toRight(s)
}
/**
* Parse function, that tries to preserve correct cursors
* In case structure of fields is incorrect it fallbacks to Left all-DownField,
* which gives same string representation, but can be wrong semantically
*/
def parse(string: String): Either[JsonPointer, JsonPointer] = {
@tailrec def go(remaining: List[String], acc: JsonPointer): Either[JsonPointer, JsonPointer] = {
remaining match {
case Nil => acc.asRight
case current :: tail =>
def giveUp =
JsonPointer((current :: tail).reverse.map(Cursor.DownField.apply) ++ acc.value).asLeft
acc match {
case Root => SchemaProperty.fromString(current) match {
case Right(property) => go(tail, Root.downProperty(property))
case Left(_) => giveUp
}
case JsonPointer(previousCursor :: old) => previousCursor match {
case Cursor.DownProperty(property) =>
property.next(current) match {
case Right(next) => go(tail, JsonPointer(next :: previousCursor :: old))
case Left(_) => giveUp
}
case _ => SchemaProperty.fromString(current) match {
case Right(next) => go(tail, JsonPointer(Cursor.DownProperty(next) :: previousCursor :: old))
case Left(_) => giveUp
}
}
}
}
}
go(string.split("/").filter(_.nonEmpty).toList, Root)
}
sealed trait Cursor
object Cursor {
case class DownProperty(property: SchemaProperty) extends Cursor
case class DownField(key: String) extends Cursor
case class At(index: Int) extends Cursor
}
}
| snowplow/iglu | 0-common/schema-ddl/src/main/scala/com.snowplowanalytics/iglu.schemaddl/jsonschema/JsonPointer.scala | Scala | apache-2.0 | 4,851 |
package com.twitter.concurrent
import org.junit.runner.RunWith
import org.scalatest.WordSpec
import org.scalatest.junit.JUnitRunner
import com.twitter.common.objectsize.ObjectSizeCalculator
import com.twitter.util.{Await, Return}
@RunWith(classOf[JUnitRunner])
class BrokerTest extends WordSpec {
"Broker" should {
"send data (send, recv)" in {
val br = new Broker[Int]
val sendF = br.send(123).sync()
assert(sendF.isDefined === false)
val recvF = br.recv.sync()
assert(recvF.isDefined === true)
assert(Await.result(recvF) === 123)
assert(sendF.isDefined === true)
}
"send data (recv, send)" in {
val br = new Broker[Int]
val recvF = br.recv.sync()
assert(recvF.isDefined === false)
val sendF = br.send(123).sync()
assert(sendF.isDefined === true)
assert(recvF.isDefined === true)
assert(Await.result(recvF) === 123)
}
"queue receivers (recv, recv, send, send)" in {
val br = new Broker[Int]
val r0, r1 = br.recv.sync()
assert(r0.isDefined === false)
assert(r1.isDefined === false)
val s = br.send(123)
assert(s.sync().poll === Some(Return.Unit))
assert(r0.poll === Some(Return(123)))
assert(r1.isDefined === false)
assert(s.sync().poll === Some(Return.Unit))
assert(r1.poll === Some(Return(123)))
assert(s.sync().isDefined === false)
}
"queue senders (send, send, recv, recv)" in {
val br = new Broker[Int]
val s0, s1 = br.send(123).sync()
assert(s0.isDefined === false)
assert(s1.isDefined === false)
val r = br.recv
assert(r.sync().poll === Some(Return(123)))
assert(s0.poll === Some(Return.Unit))
assert(s1.isDefined === false)
assert(r.sync().poll === Some(Return(123)))
assert(s1.poll === Some(Return.Unit))
assert(r.sync().isDefined === false)
}
"interrupts" should {
"removes queued receiver" in {
val br = new Broker[Int]
val recvF = br.recv.sync()
recvF.raise(new Exception)
assert(br.send(123).sync().poll === None)
assert(recvF.poll === None)
}
"removes queued sender" in {
val br = new Broker[Int]
val sendF = br.send(123).sync()
sendF.raise(new Exception)
assert(br.recv.sync().poll === None)
assert(sendF.poll === None)
}
"doesn't result in space leaks" in {
val br = new Broker[Int]
assert(Offer.select(Offer.const(1), br.recv).poll === Some(Return(1)))
val initial = ObjectSizeCalculator.getObjectSize(br)
for (_ <- 0 until 1000) {
assert(Offer.select(Offer.const(1), br.recv).poll === Some(Return(1)))
assert(ObjectSizeCalculator.getObjectSize(br) === initial)
}
}
"works with orElse" in {
val b0, b1 = new Broker[Int]
val o = b0.recv orElse b1.recv
val f = o.sync()
assert(f.isDefined === false)
val sendf0 = b0.send(12).sync()
assert(sendf0.isDefined === false)
val sendf1 = b1.send(32).sync()
assert(sendf1.isDefined === true)
assert(f.poll === Some(Return(32)))
assert(o.sync().poll === Some(Return(12)))
assert(sendf0.poll === Some(Return.Unit))
}
}
"integrate" in {
val br = new Broker[Int]
val offer = Offer.choose(br.recv, Offer.const(999))
assert(offer.sync().poll === Some(Return(999)))
val item = br.recv.sync()
assert(item.isDefined === false)
assert(br.send(123).sync().poll === Some(Return.Unit))
assert(item.poll === Some(Return(123)))
}
}
}
| stremlenye/util | util-core/src/test/scala/com/twitter/concurrent/BrokerTest.scala | Scala | apache-2.0 | 3,681 |
/*
* Copyright (c) 2017 Andrzej Jozwik
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package pl.jozwik.smtp
package server
import java.net.InetSocketAddress
import akka.actor.Props
import akka.io.Tcp._
import pl.jozwik.smtp.AkkaUtils._
import pl.jozwik.smtp.util.Constants._
object FakeSmtpActor {
def props(bindAddress: InetSocketAddress): Props = Props(new FakeSmtpActor(bindAddress))
}
class FakeSmtpActor(bindAddress: InetSocketAddress) extends AbstractSmtpActor(bindAddress) {
def receive: Receive = {
case Connected(_, _) =>
sender() ! Register(self)
sender() ! toWrite(s"$SERVICE_READY SMTP DEMO")
case Received(data) =>
val str = data.utf8String
logger.debug(s"$str")
sender() ! toWrite(s"ALA MA KOTA")
}
} | ajozwik/akka-smtp-server | akka-smtp/src/test/scala/pl/jozwik/smtp/server/FakeSmtpActor.scala | Scala | mit | 1,798 |
/*
* Copyright 2017 Sumo Logic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ws.epigraph.java.service.projections.req.output
import ws.epigraph.java.JavaGenNames.jn
import ws.epigraph.java.NewlineStringInterpolator.NewlineHelper
import ws.epigraph.java.service.assemblers.EntityAsmGen
import ws.epigraph.java.service.projections.req._
import ws.epigraph.java.service.projections.req.output.ReqOutputProjectionGen.{classNamePrefix, classNameSuffix}
import ws.epigraph.java.{GenContext, JavaGen}
import ws.epigraph.lang.Qn
import ws.epigraph.projections.op._
import ws.epigraph.types.{DatumTypeApi, TypeKind}
/**
* @author <a href="mailto:konstantin.sobolev@gmail.com">Konstantin Sobolev</a>
*/
class ReqOutputEntityProjectionGen(
protected val baseNamespaceProvider: BaseNamespaceProvider,
val op: OpEntityProjection,
baseNamespaceOpt: Option[Qn],
_namespaceSuffix: Qn,
override val parentClassGenOpt: Option[ReqOutputEntityProjectionGen],
protected val ctx: GenContext) extends ReqOutputTypeProjectionGen with ReqEntityProjectionGen {
override type OpProjectionType = OpEntityProjection
override type OpTagProjectionEntryType = OpTagProjectionEntry
override protected type GenType = ReqOutputEntityProjectionGen
override protected def baseNamespace: Qn = ReqProjectionGen.baseNamespace(
referenceNameOpt,
baseNamespaceOpt.getOrElse(super.baseNamespace)
)
override protected def namespaceSuffix: Qn = ReqProjectionGen.namespaceSuffix(referenceNameOpt, _namespaceSuffix)
override val shortClassName: String = genShortClassName(classNamePrefix, classNameSuffix)
override protected def normalizedFromGenOpt: Option[ReqOutputEntityProjectionGen] =
Option(op.normalizedFrom()).map { nfo =>
new ReqOutputEntityProjectionGen(
baseNamespaceProvider,
nfo,
baseNamespaceOpt,
_namespaceSuffix,
None,
ctx
)
}
override protected def tailGenerator(op: OpEntityProjection, normalized: Boolean): ReqOutputEntityProjectionGen =
new ReqOutputEntityProjectionGen(
baseNamespaceProvider,
op,
Some(baseNamespace),
tailNamespaceSuffix(op.`type`(), normalized),
Some(this),
ctx
) {
override lazy val normalizedTailGenerators: Map[OpEntityProjection, ReqProjectionGen] = Map()
}
override protected def tagGenerator(
pgo: Option[ReqEntityProjectionGen],
tpe: OpTagProjectionEntry): ReqProjectionGen =
tagGenerator(
tpe,
pgo.flatMap(pg => pg.findTagGenerator(tpe.tag().name()).map(_.asInstanceOf[ReqOutputModelProjectionGen]))
)
protected def tagGenerator(
tpe: OpTagProjectionEntry,
parentTagGenOpt: Option[ReqOutputModelProjectionGen]): ReqProjectionGen =
ReqOutputModelProjectionGen.dataProjectionGen(
baseNamespaceProvider,
tpe.modelProjection(),
Some(baseNamespace),
namespaceSuffix.append(jn(tpe.tag().name()).toLowerCase),
parentTagGenOpt,
ctx
)
override lazy val children: Iterable[JavaGen] =
if (tagGenerators.isEmpty /*|| namespace.contains(Namespaces.TAILS_SEGMENT)*/ ) super.children
else super.children ++ Iterable(new EntityAsmGen(this, ctx))
override protected lazy val flag: CodeChunk = CodeChunk(/*@formatter:off*/sn"""\\
/**
* @return {@code true} if entity is requried
*/
public boolean required() {
return raw.flag();
}
"""/*@formatter:on*/
)
}
object ReqOutputEntityProjectionGen {
def dataProjectionGen(
baseNamespaceProvider: BaseNamespaceProvider,
op: OpProjection[_, _],
baseNamespaceOpt: Option[Qn],
namespaceSuffix: Qn,
parentClassGenOpt: Option[ReqOutputTypeProjectionGen],
ctx: GenContext): ReqOutputTypeProjectionGen =
ReqTypeProjectionGenCache.lookup(
Option(op.referenceName()),
ctx.reqOutputProjections,
op.`type`().kind() match {
case TypeKind.ENTITY =>
new ReqOutputEntityProjectionGen(
baseNamespaceProvider,
op.asEntityProjection(),
baseNamespaceOpt,
namespaceSuffix,
parentClassGenOpt.map(pg => pg.asInstanceOf[ReqOutputEntityProjectionGen]),
ctx
)
case _ =>
val modelOp: OpModelProjection[_, _, _ <: DatumTypeApi, _] =
op.asModelProjection().asInstanceOf[OpModelProjection[_, _, _ <: DatumTypeApi, _]]
ReqOutputModelProjectionGen.dataProjectionGen(
baseNamespaceProvider,
modelOp,
baseNamespaceOpt,
namespaceSuffix,
parentClassGenOpt.map(pg => pg.asInstanceOf[ReqOutputModelProjectionGen]),
ctx
)
}
)
}
| SumoLogic/epigraph | java/codegen/src/main/scala/ws/epigraph/java/service/projections/req/output/ReqOutputEntityProjectionGen.scala | Scala | apache-2.0 | 5,217 |
// This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
package ducttape.workflow.builder
import ducttape.syntax.AbstractSyntaxTree.BranchGraftElement
import ducttape.workflow.Branch
import ducttape.workflow.BranchFactory
import ducttape.workflow.BranchPoint
import ducttape.workflow.BranchPointFactory
import org.scalatest.WordSpec
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BranchGraftGlobTest extends WordSpec {
private def assertEqual(a: Seq[BranchGraftElement], b: Seq[BranchGraftElement]) {
import scala.collection.immutable.HashSet
assert ( HashSet(a) == HashSet(b) )
}
val branchPointFactory = new BranchPointFactory
val branchFactory = new BranchFactory(branchPointFactory)
val branchesPerBranchPoint = 3
// Define some branches
Seq("a","b","c").foreach( x => {
1.to(branchesPerBranchPoint).foreach( i => {
branchFactory.get(s"${x}${i}",x.toUpperCase(),isBaseline=(i==1))
})
})
"A branch graft with no globs" should {
"expand to itself" in {
val unexpandedBranchGraftElements = Seq(
new BranchGraftElement("A","a1"),
new BranchGraftElement("B","b1"),
new BranchGraftElement("C","c1")
)
val expandedBranchGraftElements = BranchGraftGlob.expand(unexpandedBranchGraftElements, branchPointFactory, branchFactory)
expectResult(1)(expandedBranchGraftElements.size)
assertEqual(unexpandedBranchGraftElements, expandedBranchGraftElements.head)
}
}
"A branch graft with one glob" should {
"expand" in {
val unexpandedBranchGraftElements = Seq(
new BranchGraftElement("A","*"),
new BranchGraftElement("B","b1"),
new BranchGraftElement("C","c1")
)
val expandedBranchGraftElements = BranchGraftGlob.expand(unexpandedBranchGraftElements, branchPointFactory, branchFactory)
expectResult(branchesPerBranchPoint)(expandedBranchGraftElements.size)
}
}
"A branch graft with two globs" should {
"expand" in {
val unexpandedBranchGraftElements = Seq(
new BranchGraftElement("A","*"),
new BranchGraftElement("B","b1"),
new BranchGraftElement("C","*")
)
val expandedBranchGraftElements = BranchGraftGlob.expand(unexpandedBranchGraftElements, branchPointFactory, branchFactory)
expectResult(branchesPerBranchPoint*branchesPerBranchPoint)(expandedBranchGraftElements.size)
}
}
"A branch graft with all globs" should {
"expand" in {
val unexpandedBranchGraftElements = Seq(
new BranchGraftElement("A","*"),
new BranchGraftElement("B","*"),
new BranchGraftElement("C","*")
)
val expandedBranchGraftElements = BranchGraftGlob.expand(unexpandedBranchGraftElements, branchPointFactory, branchFactory)
expectResult(Math.pow(branchesPerBranchPoint, 3))(expandedBranchGraftElements.size)
}
}
} | jhclark/ducttape | src/test/scala/ducttape/workflow/builder/BranchGraftGlobTest.scala | Scala | mpl-2.0 | 3,139 |
//: ----------------------------------------------------------------------------
//: Copyright (C) 2015 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package quiver
import org.scalacheck._
import org.scalacheck.Arbitrary._
object GraphGen {
def graphGen[N: Arbitrary, A: Arbitrary, B: Arbitrary]: Gen[Graph[N,A,B]] = for {
vs <- Gen.listOf(genNode[N,A])
es <- Gen.listOf(genEdge[N,B])
} yield safeMkGraph(vs, es)
def genNode[N: Arbitrary, A: Arbitrary]: Gen[LNode[N,A]] = for {
a <- arbitrary[A]
v <- arbitrary[N]
} yield LNode(v, a)
def genEdge[N: Arbitrary, A: Arbitrary]: Gen[LEdge[N,A]] = for {
x <- arbitrary[N]
y <- arbitrary[N]
a <- arbitrary[A]
} yield LEdge(x, y, a)
def genContext[N: Arbitrary, A: Arbitrary, B: Arbitrary]: Gen[Context[N,A,B]] = for {
ins <- arbitrary[Adj[N,B]]
outs <- arbitrary[Adj[N,B]]
n <- arbitrary[N]
a <- arbitrary[A]
} yield Context(ins, n, a, outs)
implicit def arbitraryContext[A: Arbitrary, B: Arbitrary, N: Arbitrary] = Arbitrary(genContext[N,A,B])
implicit def arbitraryEdge[A: Arbitrary, N: Arbitrary] = Arbitrary(genEdge[N,A])
implicit def arbitraryNode[A: Arbitrary, N: Arbitrary] = Arbitrary(genNode[N,A])
implicit def arbitraryGraph[A: Arbitrary, B: Arbitrary, N: Arbitrary] =
Arbitrary(graphGen[N,A,B])
}
| runarorama/quiver | core/src/test/scala/GraphGen.scala | Scala | apache-2.0 | 1,990 |
/***********************************************************************
* Copyright (c) 2017 IBM
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.cassandra.index
import java.nio.charset.StandardCharsets
import com.google.common.primitives.{Longs, Shorts}
import org.locationtech.geomesa.cassandra.{NamedColumn, RowValue}
import org.opengis.feature.simple.SimpleFeatureType
trait CassandraZ3Layout extends CassandraFeatureIndex {
private val Shard = NamedColumn("shard", 0, "tinyint", classOf[Byte], partition = true)
private val Period = NamedColumn("period", 1, "smallint", classOf[Short], partition = true)
private val ZValue = NamedColumn("z", 2, "bigint", classOf[Long])
private val FeatureId = NamedColumn("fid", 3, "text", classOf[String])
override protected val columns: Seq[NamedColumn] = Seq(Shard, Period, ZValue, FeatureId)
// * - 1 byte identifying the sft (OPTIONAL - only if table is shared)
// * - 1 byte shard
// * - 2 byte period
// * - 8 bytes z value
// * - n bytes feature ID
override protected def rowToColumns(sft: SimpleFeatureType, row: Array[Byte]): Seq[RowValue] = {
import CassandraFeatureIndex.RichByteArray
var shard: java.lang.Byte = null
var period: java.lang.Short = null
var z: java.lang.Long = null
var fid: String = null
if (row.length > 0) {
shard = row(0)
if (row.length > 1) {
period = Shorts.fromBytes(row(1), row(2))
if (row.length > 3) {
z = Longs.fromBytes(row(3), row.getOrElse(4, 0), row.getOrElse(5, 0), row.getOrElse(6, 0),
row.getOrElse(7, 0), row.getOrElse(8, 0), row.getOrElse(9, 0), row.getOrElse(10, 0))
if (row.length > 11) {
fid = new String(row, 11, row.length - 11, StandardCharsets.UTF_8)
}
}
}
}
Seq(RowValue(Shard, shard), RowValue(Period, period), RowValue(ZValue, z), RowValue(FeatureId, fid))
}
override protected def columnsToRow(columns: Seq[RowValue]): Array[Byte] = {
val shard = columns.head.value.asInstanceOf[Byte]
val period = Shorts.toByteArray(columns(1).value.asInstanceOf[Short])
val z = Longs.toByteArray(columns(2).value.asInstanceOf[Long])
val fid = columns(3).value.asInstanceOf[String].getBytes(StandardCharsets.UTF_8)
val row = Array.ofDim[Byte](11 + fid.length)
row(0) = shard
System.arraycopy(period, 0, row, 1, 2)
System.arraycopy(z, 0, row, 3, 8)
System.arraycopy(fid, 0, row, 11, fid.length)
row
}
}
| nagavallia/geomesa | geomesa-cassandra/geomesa-cassandra-datastore/src/main/scala/org/locationtech/geomesa/cassandra/index/CassandraZ3Layout.scala | Scala | apache-2.0 | 2,900 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package viper.silicon.supporters.functions
import viper.silver.ast
import viper.silver.ast.{Field, Predicate, FuncApp, LocationAccess}
import viper.silicon.{Map, Set, Stack}
import viper.silicon.interfaces.state.Mergeable
import viper.silicon.state.terms._
trait FunctionRecorder extends Mergeable[FunctionRecorder] {
def data: Option[FunctionData]
private[functions] def locToSnaps: Map[ast.LocationAccess, Set[(Stack[Term], Term)]]
def locToSnap: Map[ast.LocationAccess, Term]
private[functions] def fappToSnaps: Map[ast.FuncApp, Set[(Stack[Term], Term)]]
def fappToSnap: Map[ast.FuncApp, Term]
def freshFvfs: Set[(ast.Field, Term)]
def freshPsfs: Set[(ast.Predicate, Term)]
def qpTerms: Set[(Seq[Var], Stack[Term], Iterable[Term])]
def recordSnapshot(loc: ast.LocationAccess, guards: Stack[Term], snap: Term): FunctionRecorder
def recordSnapshot(fapp: ast.FuncApp, guards: Stack[Term], snap: Term): FunctionRecorder
def recordQPTerms(qvars: Seq[Var], guards: Stack[Term], ts: Iterable[Term]): FunctionRecorder
def recordFvf(field: ast.Field, fvf: Term): FunctionRecorder
def recordPsf(predicate: ast.Predicate, psf:Term) : FunctionRecorder
}
case class ActualFunctionRecorder(private val _data: FunctionData,
private[functions] val locToSnaps: Map[ast.LocationAccess, Set[(Stack[Term], Term)]] = Map(),
private[functions] val fappToSnaps: Map[ast.FuncApp, Set[(Stack[Term], Term)]] = Map(),
freshFvfs: Set[(ast.Field, Term)] = Set(),
freshPsfs: Set[(ast.Predicate, Term)] = Set(),
qpTerms: Set[(Seq[Var], Stack[Term], Iterable[Term])] = Set())
extends FunctionRecorder {
val data = Some(_data)
def locToSnap: Map[ast.LocationAccess, Term] = {
locToSnaps.map { case (loc, guardsToSnap) =>
/* We (arbitrarily) make the snap of the head pair (guards -> snap) of
* guardsToSnap the inner-most else-clause, i.e., we drop the guards.
*/
val conditionalSnap =
guardsToSnap.tail.foldLeft(guardsToSnap.head._2) { case (tailSnap, (guards, snap)) =>
Ite(And(guards.toSet), snap, tailSnap)
}
loc -> conditionalSnap
}
}
def fappToSnap: Map[ast.FuncApp, Term] = {
fappToSnaps.map { case (fapp, guardsToSnap) =>
/* We (arbitrarily) make the snap of the head pair (guards -> snap) of
* guardsToSnap the inner-most else-clause, i.e., we drop the guards.
*/
val conditionalSnap =
guardsToSnap.tail.foldLeft(guardsToSnap.head._2) { case (tailSnap, (guards, snap)) =>
Ite(And(guards.toSet), snap, tailSnap)
}
fapp -> conditionalSnap
}
}
def recordSnapshot(loc: ast.LocationAccess, guards: Stack[Term], snap: Term) = {
val guardsToSnaps = locToSnaps.getOrElse(loc, Set()) + (guards -> snap)
copy(locToSnaps = locToSnaps + (loc -> guardsToSnaps))
}
def recordSnapshot(fapp: ast.FuncApp, guards: Stack[Term], snap: Term) = {
val guardsToSnaps = fappToSnaps.getOrElse(fapp, Set()) + (guards -> snap)
copy(fappToSnaps = fappToSnaps + (fapp -> guardsToSnaps))
}
def recordQPTerms(qvars: Seq[Var], guards: Stack[Term], ts: Iterable[Term]) = {
copy(qpTerms = qpTerms + ((qvars, guards, ts)))
}
def recordFvf(field: ast.Field, fvf: Term) = {
copy(freshFvfs = freshFvfs + ((field, fvf)))
}
def recordPsf(predicate: ast.Predicate, psf: Term) = {
copy(freshPsfs = freshPsfs + ((predicate, psf)))
}
def merge(other: FunctionRecorder): FunctionRecorder = {
assert(other.getClass == this.getClass)
assert(other.asInstanceOf[ActualFunctionRecorder]._data eq this._data)
val lts =
other.locToSnaps.foldLeft(locToSnaps){case (accLts, (loc, guardsToSnaps)) =>
val guardsToSnaps1 = accLts.getOrElse(loc, Set()) ++ guardsToSnaps
accLts + (loc -> guardsToSnaps1)
}
val fts =
other.fappToSnaps.foldLeft(fappToSnaps){case (accFts, (fapp, guardsToSnaps)) =>
val guardsToSnaps1 = accFts.getOrElse(fapp, Set()) ++ guardsToSnaps
accFts + (fapp -> guardsToSnaps1)
}
val fvfs = freshFvfs ++ other.freshFvfs
val psfs = freshPsfs ++ other.freshPsfs
val qpts = qpTerms ++ other.qpTerms
copy(locToSnaps = lts, fappToSnaps = fts, freshFvfs = fvfs, freshPsfs = psfs, qpTerms = qpts)
}
override lazy val toString = {
val ltsStrs = locToSnaps map {case (k, v) => s"$k |==> $v"}
val ftsStrs = fappToSnap map {case (k, v) => s"$k |==> $v"}
s"""SnapshotRecorder(
| locToSnaps:
| ${ltsStrs.mkString("\\n ")}
| fappToSnap:
| ${ftsStrs.mkString("\\n ")}
|)
""".stripMargin
}
}
case object NoopFunctionRecorder extends FunctionRecorder {
val data = None
private[functions] val fappToSnaps: Map[FuncApp, Set[(Stack[Term], Term)]] = Map.empty
val fappToSnap: Map[ast.FuncApp, Term] = Map.empty
private[functions] val locToSnaps: Map[LocationAccess, Set[(Stack[Term], Term)]] = Map.empty
val locToSnap: Map[ast.LocationAccess, Term] = Map.empty
val qpTerms: Set[(Seq[Var], Stack[Term], Iterable[Term])] = Set.empty
val freshFvfs: Set[(Field, Term)] = Set.empty
val freshPsfs: Set[(Predicate, Term)] = Set.empty
def merge(other: FunctionRecorder): FunctionRecorder = {
assert(other == this)
this
}
def recordSnapshot(loc: LocationAccess, guards: Stack[Term], snap: Term): FunctionRecorder = this
def recordFvf(field: Field, fvf: Term): FunctionRecorder = this
def recordPsf(predicate: Predicate, psf: Term): FunctionRecorder = this
def recordQPTerms(qvars: Seq[Var], guards: Stack[Term], ts: Iterable[Term]): FunctionRecorder = this
def recordSnapshot(fapp: FuncApp, guards: Stack[Term], snap: Term): FunctionRecorder = this
}
| sccblom/vercors | viper/silicon/src/main/scala/supporters/functions/FunctionRecorder.scala | Scala | mpl-2.0 | 6,242 |
package com.malaska.spark.training.streaming.structured
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.streaming.{GroupState, GroupStateTimeout}
object CountingInAStreamMapWithState {
Logger.getLogger("org").setLevel(Level.OFF)
Logger.getLogger("akka").setLevel(Level.OFF)
def main(args:Array[String]): Unit = {
val host = args(0)
val port = args(1)
val checkpointFolder = args(2)
val isLocal = true
val sparkSession = if (isLocal) {
SparkSession.builder
.master("local")
.appName("my-spark-app")
.config("spark.some.config.option", "config-value")
.config("spark.driver.host","127.0.0.1")
.config("spark.sql.parquet.compression.codec", "gzip")
.master("local[3]")
.getOrCreate()
} else {
SparkSession.builder
.appName("my-spark-app")
.config("spark.some.config.option", "config-value")
.master("local[3]")
.getOrCreate()
}
import sparkSession.implicits._
val socketLines = sparkSession.readStream
.format("socket")
.option("host", host)
.option("port", port)
.load()
val messageDs = socketLines.as[String].
flatMap(line => line.toLowerCase().split(" ")).
map(word => WordCountEvent(word, 1))
// Generate running word count
val wordCounts = messageDs.groupByKey(tuple => tuple.word).
mapGroupsWithState[WordCountInMemory, WordCountReturn](GroupStateTimeout.ProcessingTimeTimeout) {
case (word: String, events: Iterator[WordCountEvent], state: GroupState[WordCountInMemory]) =>
var newCount = if (state.exists) state.get.countOfWord else 0
events.foreach(tuple => {
newCount += tuple.countOfWord
})
state.update(WordCountInMemory(newCount))
WordCountReturn(word, newCount)
}
// Start running the query that prints the running counts to the console
val query = wordCounts.writeStream
.outputMode("update")
.format("console")
.start()
query.awaitTermination()
}
}
case class WordCountEvent(word:String, countOfWord:Int) extends Serializable {
}
case class WordCountInMemory(countOfWord: Int) extends Serializable {
}
case class WordCountReturn(word:String, countOfWord:Int) extends Serializable {
}
| TedBear42/spark_training | src/main/scala/com/malaska/spark/training/streaming/structured/CountingInAStreamMapWithState.scala | Scala | apache-2.0 | 2,366 |
package org.openurp.edu.eams.teach.grade.course.web.action
import org.beangle.commons.collection.Collections
import org.openurp.edu.teach.code.CourseTakeType
import org.openurp.edu.teach.code.ExamStatus
import org.openurp.edu.teach.code.GradeType
import org.openurp.edu.eams.teach.code.industry.ScoreMarkStyle
import org.openurp.edu.eams.teach.grade.lesson.model.GradeInputSwitch
import org.openurp.edu.teach.grade.model.CourseGradeState
import org.openurp.edu.teach.grade.model.ExamGradeState
import org.openurp.edu.eams.teach.lesson.GradeTypeConstants
import org.openurp.edu.teach.lesson.Lesson
class TeacherGaAction extends TeacherAction {
protected override def getGradeTypes(gradeState: CourseGradeState): List[GradeType] = {
var gradeTypes = getAttribute("gradeTypes").asInstanceOf[List[GradeType]]
if (null == gradeTypes) {
gradeTypes = Collections.newBuffer[Any]
val gis = getAttribute("gradeInputSwitch").asInstanceOf[GradeInputSwitch]
val eles = settings.getSetting(getProject).getGaElementTypes
for (`type` <- eles) {
val gradeType = baseCodeService.getCode(classOf[GradeType], `type`.id).asInstanceOf[GradeType]
val egs = gradeState.getState(gradeType)
if (null != egs && (null == egs.getPercent || egs.getPercent <= 0)) //continue
if (null != gis && gis.getTypes.contains(gradeType)) gradeTypes.add(gradeType)
}
gradeTypes.add(entityDao.get(classOf[GradeType], GradeTypeConstants.GA_ID))
put("gradeTypes", gradeTypes)
}
gradeTypes
}
def input(): String = {
val result = checkState()
if (null != result) {
return result
}
val gradeState = getGradeState
val gradeTypes = settings.getSetting(getProject).getGaElementTypes
var updatePercent = false
for (gradeType <- gradeTypes) {
val prefix = "examGradeState" + gradeType.id
val percent = getFloat(prefix + ".percent")
val egs = getState(gradeType)
if (null != percent &&
(null == egs.getPercent ||
0 !=
java.lang.Float.compare(percent / 100F, egs.getPercent))) {
egs.setPercent(percent / 100F)
updatePercent = true
}
val examMarkStyleId = getInt(prefix + ".scoreMarkStyle.id")
if (null != examMarkStyleId) egs.setScoreMarkStyle(entityDao.get(classOf[ScoreMarkStyle], examMarkStyleId))
}
val msg = checkLessonPermission(gradeState.getLesson)
if (null != msg) {
return forwardError(msg)
}
entityDao.saveOrUpdate(gradeState)
if (updatePercent) courseGradeService.recalculate(getGradeState)
val lesson = gradeState.getLesson
putGradeMap(lesson, getCourseTakes(lesson))
buildGradeConfig(lesson, getGradeTypes(gradeState))
val putSomeParams = Collections.newSet[Any]
putSomeParams.add("isTeacher")
putSomeParams.add("GA")
putSomeParams.add("NEW")
putSomeParams.add("CONFIRMED")
putSomeParams.add("gradeConverterConfig")
putSomeParams.add("examStatuses")
putSomeParams.add("USUAL")
putSomeParams.add("VIOLATION")
putSomeParams.add("CHEAT")
putSomeParams.add("ABSENT")
putSomeParams.add("DELAY")
putSomeParams.add("gradeRateConfigs")
put("setting", settings.getSetting(getProject))
buildSomeParams(lesson, putSomeParams)
put("NormalTakeType", baseCodeService.getCode(classOf[CourseTakeType], CourseTakeType.NORMAL))
put("NormalExamStatus", baseCodeService.getCode(classOf[ExamStatus], ExamStatus.NORMAL))
put("lesson", lesson)
forward()
}
}
| openurp/edu-eams-webapp | grade/src/main/scala/org/openurp/edu/eams/teach/grade/course/web/action/TeacherGaAction.scala | Scala | gpl-3.0 | 3,537 |
case class Check[A](val value: A)
case class C2(checks: Check[_]*);
object C {
def m(x : C2): Any = (null: Any) match {
case C2(_, rest : _*) => {
rest.map(_.value)
}
}
}
///////////////////
object Container {
trait Exp[+T]
abstract class FuncExp[-S, +T]
sealed abstract class FoundNode[T, Repr] {
def optimize[TupleT, U, That](parentNode: FlatMap[T, Repr, U, That]): Any
def optimize2[TupleT, U, That](parentNode: Any): Any
}
class FlatMap[T, Repr, U, That]
val Seq(fn: FoundNode[t, repr]) = Seq[FoundNode[_, _]]()
fn.optimize(null) // was: scala.MatchError: ? (of class BoundedWildcardType) @ Variances#varianceInType
fn.optimize2(null) // was: fatal error: bad type: ?(class scala.reflect.internal.Types$BoundedWildcardType) @ Pickle.putType
}
| yusuke2255/dotty | tests/pending/pos/t3577.scala | Scala | bsd-3-clause | 799 |
var occupations = [
"Malcolm": "Captain",
"Kaylee": "Mechanic",
]
occupations["Jayne"] = "Public Relations"
| leverich/swiftislikescala | comparisons/collections/maps/swift.scala | Scala | mit | 116 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import org.scalatest.enablers.Sortable
import FailureMessages.decorateToStringValue
import Matchers._
import exceptions.TestFailedException
class ShouldBeSortedLogicalAndSpec extends Spec {
//ADDITIONAL//
def wasEqualTo(left: Any, right: Any): String =
decorateToStringValue(left) + " was equal to " + decorateToStringValue(right)
def wasNotEqualTo(left: Any, right: Any): String =
decorateToStringValue(left) + " was not equal to " + decorateToStringValue(right)
def equaled(left: Any, right: Any): String =
decorateToStringValue(left) + " equaled " + decorateToStringValue(right)
def didNotEqual(left: Any, right: Any): String =
decorateToStringValue(left) + " did not equal " + decorateToStringValue(right)
def wasNotSorted(left: Any): String =
decorateToStringValue(left) + " was not sorted"
def wasSorted(left: Any): String =
decorateToStringValue(left) + " was sorted"
def allInspectionFailed(idx: Int, message: String, lineNumber:Int, left: Any) =
"'all' inspection failed, because: \n" +
" at index " + idx + ", " + message + " (ShouldBeSortedLogicalAndSpec.scala:" + lineNumber + ") \n" +
"in " + decorateToStringValue(left)
case class Student(name: String, scores: Int)
implicit val studentOrdering = new Ordering[Student] {
def compare(a: Student, b: Student) = a.scores compare b.scores
}
val orderedInts = List(1, 2, 3)
val outOfOrderInts = List(3, 2, 1)
val orderedStudents = List(Student("Student 1", 80), Student("Student 2", 88), Student("Student 3", 90))
val outOfOrderStudents = List(Student("Student 3", 90), Student("Student 2", 88), Student("Student 1", 80))
val orderedString = "123"
val outOfOrderString = "321"
val trueSortable =
new Sortable[List[Int]] {
def isSorted(o: List[Int]) = true
}
val falseSortable =
new Sortable[List[Int]] {
def isSorted(o: List[Int]) = false
}
object `Sorted matcher` {
object `when work with 'xs should be (sorted)'` {
def `should do nothing when xs is sorted` {
orderedInts should (be (sorted) and be (sorted))
orderedInts should (be (sorted) and be_== (orderedInts))
orderedInts should (be_== (orderedInts) and be (sorted))
orderedStudents should (be (sorted) and be (sorted))
orderedStudents should (be (sorted) and equal (orderedStudents))
orderedStudents should (equal (orderedStudents) and be (sorted))
orderedString should (be (sorted) and be (sorted))
orderedString should (be (sorted) and be_== (orderedString))
orderedString should (be_== (orderedString) and be (sorted))
}
def `should throw TestFailedException with correct stack depth when xs is not sorted` {
val caught1 = intercept[TestFailedException] {
outOfOrderInts should (be_== (outOfOrderInts) and be (sorted))
}
assert(caught1.message === Some(wasEqualTo(outOfOrderInts, outOfOrderInts) + ", but " + wasNotSorted(outOfOrderInts)))
assert(caught1.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
outOfOrderInts should (be (sorted) and be_== (outOfOrderInts))
}
assert(caught2.message === Some(wasNotSorted(outOfOrderInts)))
assert(caught2.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
(outOfOrderStudents should (equal (outOfOrderStudents) and be (sorted)))
}
assert(caught3.message === Some(equaled(outOfOrderStudents, outOfOrderStudents) + ", but " + wasNotSorted(outOfOrderStudents)))
assert(caught3.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
(outOfOrderStudents should (be (sorted) and equal (outOfOrderStudents)))
}
assert(caught4.message === Some(wasNotSorted(outOfOrderStudents)))
assert(caught4.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[TestFailedException] {
outOfOrderString should (be_== (outOfOrderString) and be (sorted))
}
assert(caught5.message === Some(wasEqualTo(outOfOrderString, outOfOrderString) + ", but " + wasNotSorted(outOfOrderString)))
assert(caught5.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught6 = intercept[TestFailedException] {
outOfOrderString should (be (sorted) and be_== (outOfOrderString))
}
assert(caught6.message === Some(wasNotSorted(outOfOrderString)))
assert(caught6.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
}
def `should use implicit Sortable when available` {
intercept[TestFailedException] {
outOfOrderInts should (be (sorted) and be_== (outOfOrderInts))
}
implicit val imp = trueSortable
outOfOrderInts should (be (sorted) and be_== (outOfOrderInts))
}
def `should use explicitly specified Sortable` {
intercept[TestFailedException] {
outOfOrderInts should (be (sorted) and equal (outOfOrderInts))
}
(outOfOrderInts should (be (sorted) and equal (outOfOrderInts))) (trueSortable, defaultEquality[List[Int]])
}
}
object `when work with 'xs should not be sorted'` {
def `should do nothing when xs is not sorted` {
outOfOrderInts should (not be sorted and not be_== outOfOrderStudents)
outOfOrderInts should (not be_== outOfOrderStudents and not be sorted)
outOfOrderStudents should (not be sorted and not equal outOfOrderInts)
outOfOrderStudents should (not equal outOfOrderInts and not be sorted)
outOfOrderString should (not be sorted and not be_== outOfOrderStudents)
outOfOrderString should (not be_== outOfOrderStudents and not be sorted)
}
def `should throw TestFailedException with correct stack depth when xs is not sorted` {
val caught1 = intercept[TestFailedException] {
orderedInts should (not be_== outOfOrderStudents and not be sorted)
}
assert(caught1.message === Some(wasNotEqualTo(orderedInts, outOfOrderStudents) + ", but " + wasSorted(orderedInts)))
assert(caught1.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
orderedInts should (not be sorted and not be_== outOfOrderStudents)
}
assert(caught2.message === Some(wasSorted(orderedInts)))
assert(caught2.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
orderedStudents should (not equal outOfOrderInts and not be sorted)
}
assert(caught3.message === Some(didNotEqual(orderedStudents, outOfOrderInts) + ", but " + wasSorted(orderedStudents)))
assert(caught3.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
orderedStudents should (not be sorted and not equal outOfOrderInts)
}
assert(caught4.message === Some(wasSorted(orderedStudents)))
assert(caught4.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[TestFailedException] {
orderedString should (not be_== outOfOrderStudents and not be sorted)
}
assert(caught5.message === Some(wasNotEqualTo(orderedString, outOfOrderStudents) + ", but " + wasSorted(orderedString)))
assert(caught5.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught6 = intercept[TestFailedException] {
orderedString should (not be sorted and not be_== outOfOrderStudents)
}
assert(caught6.message === Some(wasSorted(orderedString)))
assert(caught6.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
}
def `should use implicit Sortable when available` {
intercept[TestFailedException] {
orderedInts should (not be (sorted) and not be_== (outOfOrderInts))
}
implicit val imp = falseSortable
orderedInts should (not be (sorted) and not be_== (outOfOrderInts))
}
def `should use explicitly specified Sortable` {
intercept[TestFailedException] {
orderedInts should (not be (sorted) and not equal (outOfOrderInts))
}
(orderedInts should (not be (sorted) and not equal (outOfOrderInts))) (falseSortable, defaultEquality[List[Int]])
}
}
object `when work with 'all(xs) should be (sorted)'` {
def `should do nothing when xs is sorted` {
all(List(orderedInts)) should (be (sorted) and be (sorted))
all(List(orderedInts)) should (be (sorted) and be_== (orderedInts))
all(List(orderedInts)) should (be_== (orderedInts) and be (sorted))
all(List(orderedStudents)) should (be (sorted) and be (sorted))
all(List(orderedStudents)) should (be (sorted) and equal (orderedStudents))
all(List(orderedStudents)) should (equal (orderedStudents) and be (sorted))
all(List(orderedString)) should (be (sorted) and be (sorted))
all(List(orderedString)) should (be (sorted) and be_== (orderedString))
all(List(orderedString)) should (be_== (orderedString) and be (sorted))
}
def `should throw TestFailedException with correct stack depth when xs is not sorted` {
val left1 = List(outOfOrderInts)
val caught1 = intercept[TestFailedException] {
all(left1) should (be_== (outOfOrderInts) and be (sorted))
}
assert(caught1.message === Some(allInspectionFailed(0, wasEqualTo(outOfOrderInts, outOfOrderInts) + ", but " + wasNotSorted(outOfOrderInts), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(outOfOrderInts)
val caught2 = intercept[TestFailedException] {
all(left2) should (be (sorted) and be_== (outOfOrderInts))
}
assert(caught2.message === Some(allInspectionFailed(0, wasNotSorted(outOfOrderInts), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(outOfOrderStudents)
val caught3 = intercept[TestFailedException] {
all(left3) should (equal (outOfOrderStudents) and be (sorted))
}
assert(caught3.message === Some(allInspectionFailed(0, equaled(outOfOrderStudents, outOfOrderStudents) + ", but " + wasNotSorted(outOfOrderStudents), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(outOfOrderStudents)
val caught4 = intercept[TestFailedException] {
all(left4) should (be (sorted) and equal (outOfOrderStudents))
}
assert(caught4.message === Some(allInspectionFailed(0, wasNotSorted(outOfOrderStudents), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val left5 = List(outOfOrderString)
val caught5 = intercept[TestFailedException] {
all(left5) should (be_== (outOfOrderString) and be (sorted))
}
assert(caught5.message === Some(allInspectionFailed(0, wasEqualTo(outOfOrderString, outOfOrderString) + ", but " + wasNotSorted(outOfOrderString), thisLineNumber - 2, left5)))
assert(caught5.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
val left6 = List(outOfOrderString)
val caught6 = intercept[TestFailedException] {
all(left6) should (be (sorted) and be_== (outOfOrderString))
}
assert(caught6.message === Some(allInspectionFailed(0, wasNotSorted(outOfOrderString), thisLineNumber - 2, left6)))
assert(caught6.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
}
def `should use implicit Sortable when available` {
intercept[TestFailedException] {
all(List(outOfOrderInts)) should (be (sorted) and be_== (outOfOrderInts))
}
implicit val imp = trueSortable
all(List(outOfOrderInts)) should (be (sorted) and be_== (outOfOrderInts))
}
def `should use explicitly specified Sortable` {
intercept[TestFailedException] {
all(List(outOfOrderInts)) should (be (sorted) and equal (outOfOrderInts))
}
(all(List(outOfOrderInts)) should (be (sorted) and equal (outOfOrderInts))) (trueSortable, defaultEquality[List[Int]])
}
}
object `when work with 'all(xs) should not be sorted'` {
def `should do nothing when xs is not sorted` {
all(List(outOfOrderInts)) should (not be sorted and not be_== outOfOrderStudents)
all(List(outOfOrderInts)) should (not be_== outOfOrderStudents and not be sorted)
all(List(outOfOrderStudents)) should (not be sorted and not equal outOfOrderInts)
all(List(outOfOrderStudents)) should (not equal outOfOrderInts and not be sorted)
all(List(outOfOrderString)) should (not be sorted and not be_== outOfOrderStudents)
all(List(outOfOrderString)) should (not be_== outOfOrderStudents and not be sorted)
}
def `should throw TestFailedException with correct stack depth when xs is not sorted` {
val left1 = List(orderedInts)
val caught1 = intercept[TestFailedException] {
all(left1) should (not be_== outOfOrderStudents and not be sorted)
}
assert(caught1.message === Some(allInspectionFailed(0, wasNotEqualTo(orderedInts, outOfOrderStudents) + ", but " + wasSorted(orderedInts), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(orderedInts)
val caught2 = intercept[TestFailedException] {
all(left2) should (not be sorted and not be_== outOfOrderStudents)
}
assert(caught2.message === Some(allInspectionFailed(0, wasSorted(orderedInts), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(orderedStudents)
val caught3 = intercept[TestFailedException] {
all(left3) should (not equal outOfOrderInts and not be sorted)
}
assert(caught3.message === Some(allInspectionFailed(0, didNotEqual(orderedStudents, outOfOrderInts) + ", but " + wasSorted(orderedStudents), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(orderedStudents)
val caught4 = intercept[TestFailedException] {
all(left4) should (not be sorted and not equal outOfOrderInts)
}
assert(caught4.message === Some(allInspectionFailed(0, wasSorted(orderedStudents), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val left5 = List(orderedString)
val caught5 = intercept[TestFailedException] {
all(left5) should (not be_== outOfOrderStudents and not be sorted)
}
assert(caught5.message === Some(allInspectionFailed(0, wasNotEqualTo(orderedString, outOfOrderStudents) + ", but " + wasSorted(orderedString), thisLineNumber - 2, left5)))
assert(caught5.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
val left6 = List(orderedString)
val caught6 = intercept[TestFailedException] {
all(left6) should (not be sorted and not be_== outOfOrderStudents)
}
assert(caught6.message === Some(allInspectionFailed(0, wasSorted(orderedString), thisLineNumber - 2, left6)))
assert(caught6.failedCodeFileName === Some("ShouldBeSortedLogicalAndSpec.scala"))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
}
def `should use implicit Sortable when available` {
intercept[TestFailedException] {
all(List(orderedInts)) should (not be (sorted) and not be_== (outOfOrderInts))
}
implicit val imp = falseSortable
all(List(orderedInts)) should (not be (sorted) and not be_== (outOfOrderInts))
}
def `should use explicitly specified Sortable` {
intercept[TestFailedException] {
all(List(orderedInts)) should (not be (sorted) and not equal (outOfOrderInts))
}
(all(List(orderedInts)) should (not be (sorted) and not equal (outOfOrderInts))) (falseSortable, defaultEquality[List[Int]])
}
}
// shouldBe and shouldNot does not support logical expression, and they never will!
}
}
| SRGOM/scalatest | scalatest-test/src/test/scala/org/scalatest/ShouldBeSortedLogicalAndSpec.scala | Scala | apache-2.0 | 19,641 |
package com.github.simonedeponti.play26lettuce
import java.time.Instant
import java.util.Date
import java.util.concurrent.{Callable, CompletionStage}
import akka.Done
import org.specs2.mutable._
import org.specs2.concurrent.ExecutionEnv
import play.api.PlayException
import play.api.cache.{AsyncCacheApi, SyncCacheApi}
import play.cache.{AsyncCacheApi => JavaAsyncCacheApi, SyncCacheApi => JavaSyncCacheApi}
import play.cache.NamedCacheImpl
import scala.collection.JavaConverters._
import scala.concurrent.Future
import scala.concurrent.duration.Duration
import scala.util.{Failure, Success, Try}
import scala.compat.java8.FutureConverters._
class LettuceSpec extends Specification {
sequential
private def redisURL(db: String): String = Try(sys.env("REDIS_URL")) match {
case Success(v) => v.replace("/0", s"/$db")
case Failure(e) => e match {
case _: NoSuchElementException => s"redis://localhost/$db"
case e: Throwable => throw e
}
}
private val environment = play.api.Environment.simple()
private val configurationMap: Map[String, Object] = Map(
"play.modules.enabled" -> List(
"play.api.i18n.I18nModule",
"play.api.mvc.CookiesModule",
"com.github.simonedeponti.play26lettuce.LettuceModule",
"play.api.inject.BuiltinModule"
).asJava,
"play.allowGlobalApplication" -> "false",
"play.cache.defaultCache" -> "default",
"play.cache.bindCaches" -> List("secondary").asJava,
"lettuce.default.url" -> redisURL("0"),
"lettuce.secondary.url" -> redisURL("1")
)
private val configuration = play.api.Configuration.from(
configurationMap
)
private val wrongConfiguration = play.api.Configuration.from(
configurationMap ++ Map("lettuce.secondary.url" -> redisURL("0"))
)
private val emptyConfiguration = play.api.Configuration.empty
private val modules = play.api.inject.Modules.locate(environment, configuration)
"play26-lettuce" should {
"provide LettuceModule" in {
modules.find { module => module.isInstanceOf[LettuceModule] }.get.asInstanceOf[LettuceModule] must beAnInstanceOf[LettuceModule]
}
}
"Module" should {
"provide bindings" in {
val lettuceModule = modules.find { module => module.isInstanceOf[LettuceModule] }.get.asInstanceOf[LettuceModule]
val bindings = lettuceModule.bindings(environment, configuration)
bindings.size mustNotEqual 0
}
"provide no bindings with empty configuration" in {
val lettuceModule = modules.find { module => module.isInstanceOf[LettuceModule] }.get.asInstanceOf[LettuceModule]
val bindings = lettuceModule.bindings(environment, emptyConfiguration)
bindings.size mustEqual 0
}
"report an error if we have a URL conflict" in {
val lettuceModule = modules.find { module => module.isInstanceOf[LettuceModule] }.get.asInstanceOf[LettuceModule]
lettuceModule.bindings(environment, wrongConfiguration) must throwA[PlayException]
}
}
"Injector" should {
def app = play.test.Helpers.fakeApplication(
configurationMap.asJava
)
def injector = app.injector
"provide lettuce clients" in {
val lettuceClient = injector.instanceOf(play.api.inject.BindingKey(classOf[LettuceCacheApi]))
lettuceClient must beAnInstanceOf[LettuceClient]
}
"provide a AsyncCacheApi implementation backed by lettuce" in {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
cacheApi must beAnInstanceOf[LettuceCacheApi]
cacheApi.asInstanceOf[LettuceCacheApi].name must equalTo ("default")
}
"provide a named AsyncCacheApi implementation backed by lettuce" in {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]).qualifiedWith(new NamedCacheImpl("secondary")))
cacheApi must beAnInstanceOf[LettuceCacheApi]
cacheApi.asInstanceOf[LettuceCacheApi].name must equalTo ("secondary")
}
"provide a SyncCacheApi implementation backed by lettuce" in {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[SyncCacheApi]))
cacheApi must beAnInstanceOf[SyncWrapper]
cacheApi.asInstanceOf[SyncWrapper].acache.name must equalTo ("default")
}
"provide a named SyncCacheApi implementation backed by lettuce" in {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[SyncCacheApi]).qualifiedWith(new NamedCacheImpl("secondary")))
cacheApi must beAnInstanceOf[SyncWrapper]
cacheApi.asInstanceOf[SyncWrapper].acache.name must equalTo ("secondary")
}
}
"LettuceCacheApi" should {
def app = play.test.Helpers.fakeApplication(
configurationMap.asJava
)
def injector = app.injector
"set correctly with infinite expire" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val result_inf: Future[Done] = cacheApi.set("foo", 1)
result_inf must beAnInstanceOf[Done].await
}
}
"set correctly with finite expire" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val result_fin: Future[Done] = cacheApi.set("bar", 1, Duration(1, "seconds"))
result_fin must beAnInstanceOf[Done].await
}
}
"get some if present" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val result_ok: Future[Option[Int]] = cacheApi.set("baz", 1).flatMap(
_ => cacheApi.get[Int]("baz")
)
result_ok must beSome(1).await
}
}
"get multiple some if present" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[LettuceCacheApi]))
val result: Future[Seq[Option[Integer]]] = for {
_ <- cacheApi.set("qux", 1)
_ <- cacheApi.set("quux", 2)
get <- cacheApi.getAll[Integer](Seq("qux", "quux", "not-set"))
} yield get
result must beEqualTo(Seq(Some(1), Some(2), None)).await
}
}
"set multiple" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[LettuceCacheApi]))
val result: Future[(Option[Int], Option[Int])] = for {
_ <- cacheApi.setAll(Map("blah" -> Int.box(1), "blaah" -> Int.box(2)))
get1 <- cacheApi.get[Int]("blah")
get2 <- cacheApi.get[Int]("blaah")
} yield (get1, get2)
result must beEqualTo((Some(1), Some(2))).await
}
}
"remove multiple" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[LettuceCacheApi]))
val result: Future[(Option[Int], Option[Int], Option[Int])] = for {
_ <- cacheApi.setAll(Map("da" -> Int.box(1), "daa" -> Int.box(2), "daaa" -> Int.box(3)))
_ <- cacheApi.remove(Seq("daa", "daaa"))
get1 <- cacheApi.get[Int]("da")
get2 <- cacheApi.get[Int]("daa")
get3 <- cacheApi.get[Int]("daaa")
} yield (get1, get2, get3)
result must beEqualTo((Some(1), None, None)).await
}
}
"get none if not present" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val result_ko: Future[Option[Int]] = cacheApi.get[Int]("taz")
result_ko must beNone.await
}
}
"get or else update" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val result_ok: Future[Int] = cacheApi.getOrElseUpdate[Int]("paz", Duration(10, "seconds")) {
Future {
1
}
}
result_ok must beEqualTo(1).await
val result_eq: Future[Int] = cacheApi.getOrElseUpdate[Int]("paz", Duration(10, "seconds")) {
Future {
2
}
}
result_eq must beEqualTo(1).await
}
}
"serialize correctly a scala class" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val testItem = TestItem("cat", 3)
val result_ok: Future[Option[TestItem]] = cacheApi.set("cat", testItem).flatMap(
_ => cacheApi.get[TestItem]("cat")
)
result_ok must beSome(testItem).await
cacheApi.remove("cat") must beAnInstanceOf[Done].await
}
}
"serialize correctly a void string" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val result_ok: Future[Option[String]] = cacheApi.set("void", "").flatMap(
_ => cacheApi.get[String]("void")
)
result_ok must beSome("").await
cacheApi.remove("void") must beAnInstanceOf[Done].await
}
}
"remove deletes it" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val result_ok: Future[Option[Int]] = cacheApi.get[Int]("foo")
result_ok must beSome(1).await
val result_fin: Future[Done] = cacheApi.remove("foo")
result_fin must beAnInstanceOf[Done].await
val result_ko: Future[Option[Int]] = cacheApi.get[Int]("foo")
result_ko must beNone.await
}
}
"removeAll should not explode" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val result_fin: Future[Done] = cacheApi.removeAll()
result_fin must beAnInstanceOf[Done].await
}
}
"removeAll should not remove things from secondary" in {
implicit ee: ExecutionEnv => {
val defaultCache = injector.instanceOf(play.api.inject.BindingKey(classOf[AsyncCacheApi]))
val secondaryCache = injector.instanceOf(
play.api.inject.BindingKey(classOf[AsyncCacheApi]).qualifiedWith(new NamedCacheImpl("secondary"))
)
val defaultSetResult: Future[Done] = defaultCache.set("foo", 1)
defaultSetResult must beAnInstanceOf[Done].await
val secondarySetResult: Future[Done] = secondaryCache.set("foo", 1)
secondarySetResult must beAnInstanceOf[Done].await
val removeAllResult: Future[Done] = defaultCache.removeAll()
removeAllResult must beAnInstanceOf[Done].await
val noItemResult: Future[Option[Int]] = defaultCache.get[Int]("foo")
noItemResult must beNone.await
val oneItemResult: Future[Option[Int]] = secondaryCache.get[Int]("foo")
oneItemResult must beSome[Int](1).await
val removeAllResult2: Future[Done] = secondaryCache.removeAll()
removeAllResult2 must beAnInstanceOf[Done].await
}
}
}
"SyncCacheApi" should {
def app = play.test.Helpers.fakeApplication(
configurationMap.asJava
)
def injector = app.injector
"set and get correctly" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[SyncCacheApi]))
cacheApi.set("foo", 1)
val result: Option[Int] = cacheApi.get[Int]("foo")
result must beSome(1)
}
}
"get none if not present" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[SyncCacheApi]))
val result_ko: Option[Int] = cacheApi.get[Int]("taz")
result_ko must beNone
}
}
"get or else update" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[SyncCacheApi]))
val result_ok: Int = cacheApi.getOrElseUpdate[Int]("paz", Duration(10, "seconds")) {
1
}
result_ok must beEqualTo(1)
val result_eq: Int = cacheApi.getOrElseUpdate[Int]("paz", Duration(10, "seconds")) {
2
}
result_eq must beEqualTo(1)
}
}
"remove deletes it" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[SyncCacheApi]))
val result_ok: Option[Int] = cacheApi.get[Int]("foo")
result_ok must beSome(1)
cacheApi.remove("foo")
val result_ko: Option[Int] = cacheApi.get[Int]("foo")
result_ko must beNone
}
}
}
"Java AsyncCacheApi" should {
def app = play.test.Helpers.fakeApplication(
configurationMap.asJava
)
def injector = app.injector
"set correctly with infinite expire" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaAsyncCacheApi]))
val result_inf: Future[Done] = cacheApi.set("foo", new Integer(1)).toScala
result_inf must beAnInstanceOf[Done].await
}
}
"set correctly with finite expire" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaAsyncCacheApi]))
val result_fin: Future[Done] = cacheApi.set("bar", new Integer(1), 1).toScala
result_fin must beAnInstanceOf[Done].await
}
}
"get some if present" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaAsyncCacheApi]))
val result_ok: Future[Integer] = cacheApi.set("baz", new Integer(1)).toScala.flatMap(
_ => cacheApi.get[Integer]("baz").toScala
)
result_ok must beEqualTo(new Integer(1)).await
}
}
"get null if not present" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaAsyncCacheApi]))
val result_ko: Future[Integer] = cacheApi.get[Integer]("taz").toScala
result_ko must beNull[Integer].await
}
}
"get or else update" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaAsyncCacheApi]))
val orElse1 = new Callable[CompletionStage[Integer]] {
def call(): CompletionStage[Integer] = Future { new Integer(1) }.toJava
}
val result_ok: Future[Integer] = cacheApi.getOrElseUpdate[Integer]("paz", orElse1, 10).toScala
result_ok must beEqualTo(new Integer(1)).await
val orElse2 = new Callable[CompletionStage[Integer]] {
def call(): CompletionStage[Integer] = Future { new Integer(2) }.toJava
}
val result_eq: Future[Integer] = cacheApi.getOrElseUpdate[Integer]("paz", orElse2, 10).toScala
result_eq must beEqualTo(new Integer(1)).await
}
}
"serialize correctly a scala class" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaAsyncCacheApi]))
val testItem = TestItem("cat", 3)
val result_ok: Future[TestItem] = cacheApi.set("cat", testItem).toScala.flatMap(
_ => cacheApi.get[TestItem]("cat").toScala
)
result_ok must beEqualTo(testItem).await
cacheApi.remove("cat").toScala must beAnInstanceOf[Done].await
}
}
"serialize correctly a void string" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaAsyncCacheApi]))
val result_ok: Future[String] = cacheApi.set("void", "").toScala.flatMap(
_ => cacheApi.get[String]("void").toScala
)
result_ok must beEqualTo("").await
cacheApi.remove("void").toScala must beAnInstanceOf[Done].await
}
}
"remove deletes it" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaAsyncCacheApi]))
val result_ok: Future[Integer] = cacheApi.get[Integer]("foo").toScala
result_ok must beEqualTo(new Integer(1)).await
val result_fin: Future[Done] = cacheApi.remove("foo").toScala
result_fin must beAnInstanceOf[Done].await
val result_ko: Future[Integer] = cacheApi.get[Integer]("foo").toScala
result_ko must beNull[Integer].await
}
}
"removeAll should not explode" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaAsyncCacheApi]))
val result_fin: Future[Done] = cacheApi.removeAll().toScala
result_fin must beAnInstanceOf[Done].await
}
}
}
"Java SyncCacheApi" should {
def app = play.test.Helpers.fakeApplication(
configurationMap.asJava
)
def injector = app.injector
"set and get correctly" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaSyncCacheApi]))
cacheApi.set("foo", new Integer(1))
val result: Integer = cacheApi.get[Integer]("foo")
result must beEqualTo(new Integer(1))
}
}
"get null if not present" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaSyncCacheApi]))
val result_ko = cacheApi.get[Integer]("taz")
result_ko must beNull[Integer]
}
}
"get or else update" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaSyncCacheApi]))
//noinspection ConvertExpressionToSAM
val orElse1: Callable[Integer] = new Callable[Integer] {
override def call(): Integer = new Integer(1)
}
val result_ok: Integer = cacheApi.getOrElseUpdate[Integer]("paz", orElse1, 10)
result_ok must beEqualTo(new Integer(1))
//noinspection ConvertExpressionToSAM
val orElse2: Callable[Integer] = new Callable[Integer] {
override def call(): Integer = new Integer(2)
}
val result_eq: Integer = cacheApi.getOrElseUpdate[Integer]("paz", orElse2, 10)
result_eq must beEqualTo(new Integer(1))
}
}
"remove deletes it" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[JavaSyncCacheApi]))
val result_ok: Integer = cacheApi.get[Integer]("foo")
result_ok must beEqualTo(new Integer(1))
cacheApi.remove("foo")
val result_ko = cacheApi.get[Integer]("foo")
result_ko must beNull[Integer]
}
}
}
"AkkaSerialization" should {
def app = play.test.Helpers.fakeApplication(
configurationMap.asJava
)
def injector = app.injector
val now = System.currentTimeMillis()
"serialize a complex Scala class" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[SyncCacheApi]))
val value: ComplexTestItem = ComplexTestItem(
"foo",
None,
Map(
"bah" -> Seq(),
"boh" -> Seq("i")
),
SubItem("e", Date.from(Instant.ofEpochMilli(now))),
Some(SubItem("e", Date.from(Instant.ofEpochMilli(now)))),
active = true
)
val result: Seq[ComplexTestItem] = cacheApi.getOrElseUpdate[Seq[ComplexTestItem]]("complexitemseq", Duration(1, "minute")) {
Seq(value)
}
result must not be empty
result.head.id must beEqualTo("foo")
val result2: Seq[ComplexTestItem] = cacheApi.getOrElseUpdate[Seq[ComplexTestItem]]("complexitemseq", Duration(1, "minute")) {
Seq(value)
}
result2.head.id must beEqualTo(result.head.id)
}
}
"serialize correctly empty sequences" in {
implicit ee: ExecutionEnv => {
val cacheApi = injector.instanceOf(play.api.inject.BindingKey(classOf[SyncCacheApi]))
val result: Seq[ComplexTestItem] = cacheApi.getOrElseUpdate[Seq[ComplexTestItem]]("voidseq", Duration(1, "minute")) {
Seq[ComplexTestItem]()
}
result must beEmpty
val result2: Seq[ComplexTestItem] = cacheApi.getOrElseUpdate[Seq[ComplexTestItem]]("voidseq", Duration(1, "minute")) {
Seq[ComplexTestItem]()
}
result2 must beEmpty
}
}
}
}
| simonedeponti/play26-lettuce | src/test/scala/com/github/simonedeponti/play26lettuce/LettuceSpec.scala | Scala | bsd-3-clause | 20,642 |
package org.five11
import scala.xml.XML
import scalaj.http._
class Api(token: String) {
private val endpoints = new Endpoints(token)
def agencies(): Seq[Agency] = {
val req = endpoints.agencies.asString
val body = XML.loadString(req.body)
(body \\ "Agency").map{ agencyNode =>
val name = (agencyNode \ "@Name").text
val hasDirecton = (agencyNode \ "@HasDirecton").text == "True"
val mode = (agencyNode \ "@Mode").text
new Agency(name, mode, hasDirecton)(this)
}
}
def routes(agency: Agency): Seq[Route] = {
val req = endpoints.routesForAgency.param("agencyName", agency.name).asString
val body = XML.loadString(req.body)
(body \\ "Route").map{ routeNode =>
val name = (routeNode \ "@Name").text
val code = (routeNode \ "@Code").text
new Route(name, code, agency)(this)
}
}
def directions(agency: Agency): Seq[Direction] = {
val req = endpoints.routesForAgency.param("agencyName", agency.name).asString
val body = XML.loadString(req.body)
(body \\ "Route").map{ routeNode =>
(routeNode \\ "RouteDirection").map{ directionNode => (routeNode, directionNode) }
}.
flatten.
groupBy{ case (routeNode, directionNode) => directionNode }.
map{ case(directionNode, tuples) =>
val name = (directionNode \ "@Name").text
val code = (directionNode \ "@Code").text
val routeCodes = tuples.map{ case (routeNode, directionNode) =>
(routeNode \ "@Code").text
}
def getRoutes = agency.routes.filter{ route =>
routeCodes.contains(route.code)
}
new Direction(name, code, agency, getRoutes)
}.
to[Seq]
}
def stops(agency: Agency): Seq[Stop] = {
agency.routes.map{ route =>
if (route.directions.isEmpty) {
val idf = routeIDF(route)
val req = endpoints.stopsForRoute.param("routeIDF", idf).asString
val body = XML.loadString(req.body)
(body \\ "Stop").map{ stopNode => (route, null, stopNode) }
} else {
route.directions.map{ direction =>
val idf = routeIDF(route, direction)
val req = endpoints.stopsForRoute.param("routeIDF", idf).asString
val body = XML.loadString(req.body)
(body \\ "Stop").map{ stopNode => (route, direction, stopNode) }
}.flatten
}
}.
flatten.
groupBy{ case (route, direction, stopNode) => stopNode }.
map{ case (stopNode, tuples) =>
val name = (stopNode \ "@name").text
val code = (stopNode \ "@StopCode").text
val routes = tuples.map{ case (route, direction, stopNode) =>
route
}.distinct
val directions = tuples.map{ case (route, direction, stopNode) =>
direction
}.distinct
new Stop(name, code, agency, routes, directions)(this)
}.
to[Seq]
}
def departures(stop: Stop): Seq[Departure] = {
val req = endpoints.departuresForStop.param("stopcode", stop.code).asString
val body = XML.loadString(req.body)
(body \\ "Route").map{ routeNode =>
val route = stop.routes.find{ (routeNode \ "@Code").text == _.code }
if (!route.isDefined) {
Seq()
} else if (route.get.directions.isEmpty) {
(routeNode \\ "DepartureTime").map{ departureNode =>
new Departure(departureNode.text.toInt, stop, route.get)
}
} else {
(routeNode \\ "RouteDirection").map{ directionNode =>
val direction = route.get.directions.find{ (directionNode \ "@Code").text == _.code }
if (!direction.isDefined) {
Seq()
} else {
(directionNode \\ "DepartureTime").map{ departureNode =>
new Departure(departureNode.text.toInt, stop, route.get, direction.get)
}
}
}.flatten
}
}.
flatten
}
private def routeIDF(route: Route, direction: Direction): String = {
Array(route.agency.name, route.code, direction.code).mkString("~")
}
private def routeIDF(route: Route): String = {
Array(route.agency.name, route.code).mkString("~")
}
}
sealed class Endpoints(token: String) {
private val url = "http://services.my511.org/Transit2.0/"
val agencies = Http(url + "GetAgencies.aspx").param("token", token)
val routesForAgency = Http(url + "GetRoutesForAgency.aspx").param("token", token)
val stopsForRoute = Http(url + "GetStopsForRoutes.aspx").param("token", token)
val departuresForStop = Http(url + "GetNextDeparturesByStopCode.aspx").param("token", token)
}
| jnfeinstein/scala-511 | src/main/scala/org/five11/Api.scala | Scala | mit | 4,538 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.executor
import java.net.URL
import java.nio.ByteBuffer
import java.util.Locale
import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.mutable
import scala.util.{Failure, Success}
import scala.util.control.NonFatal
import io.netty.util.internal.PlatformDependent
import org.json4s.DefaultFormats
import org.apache.spark._
import org.apache.spark.TaskState.TaskState
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.deploy.worker.WorkerWatcher
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.resource.ResourceInformation
import org.apache.spark.resource.ResourceProfile
import org.apache.spark.resource.ResourceProfile._
import org.apache.spark.resource.ResourceUtils._
import org.apache.spark.rpc._
import org.apache.spark.scheduler.{ExecutorLossMessage, ExecutorLossReason, TaskDescription}
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
import org.apache.spark.serializer.SerializerInstance
import org.apache.spark.util.{ChildFirstURLClassLoader, MutableURLClassLoader, SignalUtils, ThreadUtils, Utils}
private[spark] class CoarseGrainedExecutorBackend(
override val rpcEnv: RpcEnv,
driverUrl: String,
executorId: String,
bindAddress: String,
hostname: String,
cores: Int,
env: SparkEnv,
resourcesFileOpt: Option[String],
resourceProfile: ResourceProfile)
extends IsolatedRpcEndpoint with ExecutorBackend with Logging {
import CoarseGrainedExecutorBackend._
private implicit val formats = DefaultFormats
private[spark] val stopping = new AtomicBoolean(false)
var executor: Executor = null
@volatile var driver: Option[RpcEndpointRef] = None
// If this CoarseGrainedExecutorBackend is changed to support multiple threads, then this may need
// to be changed so that we don't share the serializer instance across threads
private[this] val ser: SerializerInstance = env.closureSerializer.newInstance()
private var _resources = Map.empty[String, ResourceInformation]
/**
* Map each taskId to the information about the resource allocated to it, Please refer to
* [[ResourceInformation]] for specifics.
* Exposed for testing only.
*/
private[executor] val taskResources = new mutable.HashMap[Long, Map[String, ResourceInformation]]
private var decommissioned = false
override def onStart(): Unit = {
if (env.conf.get(DECOMMISSION_ENABLED)) {
val signal = env.conf.get(EXECUTOR_DECOMMISSION_SIGNAL)
logInfo(s"Registering SIG$signal handler to trigger decommissioning.")
SignalUtils.register(signal, s"Failed to register SIG$signal handler - disabling" +
s" executor decommission feature.") (self.askSync[Boolean](ExecutorDecommissionSigReceived))
}
logInfo("Connecting to driver: " + driverUrl)
try {
if (PlatformDependent.directBufferPreferred() &&
PlatformDependent.maxDirectMemory() < env.conf.get(MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM)) {
throw new SparkException(s"Netty direct memory should at least be bigger than " +
s"'${MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM.key}', but got " +
s"${PlatformDependent.maxDirectMemory()} bytes < " +
s"${env.conf.get(MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM)}")
}
_resources = parseOrFindResources(resourcesFileOpt)
} catch {
case NonFatal(e) =>
exitExecutor(1, "Unable to create executor due to " + e.getMessage, e)
}
rpcEnv.asyncSetupEndpointRefByURI(driverUrl).flatMap { ref =>
// This is a very fast action so we can use "ThreadUtils.sameThread"
driver = Some(ref)
ref.ask[Boolean](RegisterExecutor(executorId, self, hostname, cores, extractLogUrls,
extractAttributes, _resources, resourceProfile.id))
}(ThreadUtils.sameThread).onComplete {
case Success(_) =>
self.send(RegisteredExecutor)
case Failure(e) =>
exitExecutor(1, s"Cannot register with driver: $driverUrl", e, notifyDriver = false)
}(ThreadUtils.sameThread)
}
/**
* Create a classLoader for use for resource discovery. The user could provide a class
* as a substitute for the default one so we have to be able to load it from a user specified
* jar.
*/
private def createClassLoader(): MutableURLClassLoader = {
val currentLoader = Utils.getContextOrSparkClassLoader
val urls = getUserClassPath.toArray
if (env.conf.get(EXECUTOR_USER_CLASS_PATH_FIRST)) {
new ChildFirstURLClassLoader(urls, currentLoader)
} else {
new MutableURLClassLoader(urls, currentLoader)
}
}
// visible for testing
def parseOrFindResources(resourcesFileOpt: Option[String]): Map[String, ResourceInformation] = {
// use a classloader that includes the user classpath in case they specified a class for
// resource discovery
val urlClassLoader = createClassLoader()
logDebug(s"Resource profile id is: ${resourceProfile.id}")
Utils.withContextClassLoader(urlClassLoader) {
val resources = getOrDiscoverAllResourcesForResourceProfile(
resourcesFileOpt,
SPARK_EXECUTOR_PREFIX,
resourceProfile,
env.conf)
logResourceInfo(SPARK_EXECUTOR_PREFIX, resources)
resources
}
}
def getUserClassPath: Seq[URL] = Nil
def extractLogUrls: Map[String, String] = {
val prefix = "SPARK_LOG_URL_"
sys.env.filterKeys(_.startsWith(prefix))
.map(e => (e._1.substring(prefix.length).toLowerCase(Locale.ROOT), e._2)).toMap
}
def extractAttributes: Map[String, String] = {
val prefix = "SPARK_EXECUTOR_ATTRIBUTE_"
sys.env.filterKeys(_.startsWith(prefix))
.map(e => (e._1.substring(prefix.length).toUpperCase(Locale.ROOT), e._2)).toMap
}
override def receive: PartialFunction[Any, Unit] = {
case RegisteredExecutor =>
logInfo("Successfully registered with driver")
try {
executor = new Executor(executorId, hostname, env, getUserClassPath, isLocal = false,
resources = _resources)
driver.get.send(LaunchedExecutor(executorId))
} catch {
case NonFatal(e) =>
exitExecutor(1, "Unable to create executor due to " + e.getMessage, e)
}
case LaunchTask(data) =>
if (executor == null) {
exitExecutor(1, "Received LaunchTask command but executor was null")
} else {
val taskDesc = TaskDescription.decode(data.value)
logInfo("Got assigned task " + taskDesc.taskId)
taskResources(taskDesc.taskId) = taskDesc.resources
executor.launchTask(this, taskDesc)
}
case KillTask(taskId, _, interruptThread, reason) =>
if (executor == null) {
exitExecutor(1, "Received KillTask command but executor was null")
} else {
executor.killTask(taskId, interruptThread, reason)
}
case StopExecutor =>
stopping.set(true)
logInfo("Driver commanded a shutdown")
// Cannot shutdown here because an ack may need to be sent back to the caller. So send
// a message to self to actually do the shutdown.
self.send(Shutdown)
case Shutdown =>
stopping.set(true)
new Thread("CoarseGrainedExecutorBackend-stop-executor") {
override def run(): Unit = {
// `executor` can be null if there's any error in `CoarseGrainedExecutorBackend.onStart`
// or fail to create `Executor`.
if (executor == null) {
System.exit(1)
} else {
// executor.stop() will call `SparkEnv.stop()` which waits until RpcEnv stops totally.
// However, if `executor.stop()` runs in some thread of RpcEnv, RpcEnv won't be able to
// stop until `executor.stop()` returns, which becomes a dead-lock (See SPARK-14180).
// Therefore, we put this line in a new thread.
executor.stop()
}
}
}.start()
case UpdateDelegationTokens(tokenBytes) =>
logInfo(s"Received tokens of ${tokenBytes.length} bytes")
SparkHadoopUtil.get.addDelegationTokens(tokenBytes, env.conf)
case DecommissionExecutor =>
decommissionSelf()
}
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case ExecutorDecommissionSigReceived =>
var driverNotified = false
try {
driver.foreach { driverRef =>
// Tell driver that we are starting decommissioning so it stops trying to schedule us
driverNotified = driverRef.askSync[Boolean](ExecutorDecommissioning(executorId))
if (driverNotified) decommissionSelf()
}
} catch {
case e: Exception =>
if (driverNotified) {
logError("Fail to decommission self (but driver has been notified).", e)
} else {
logError("Fail to tell driver that we are starting decommissioning", e)
}
decommissioned = false
}
context.reply(decommissioned)
}
override def onDisconnected(remoteAddress: RpcAddress): Unit = {
if (stopping.get()) {
logInfo(s"Driver from $remoteAddress disconnected during shutdown")
} else if (driver.exists(_.address == remoteAddress)) {
exitExecutor(1, s"Driver $remoteAddress disassociated! Shutting down.", null,
notifyDriver = false)
} else {
logWarning(s"An unknown ($remoteAddress) driver disconnected.")
}
}
override def statusUpdate(taskId: Long, state: TaskState, data: ByteBuffer): Unit = {
val resources = taskResources.getOrElse(taskId, Map.empty[String, ResourceInformation])
val msg = StatusUpdate(executorId, taskId, state, data, resources)
if (TaskState.isFinished(state)) {
taskResources.remove(taskId)
}
driver match {
case Some(driverRef) => driverRef.send(msg)
case None => logWarning(s"Drop $msg because has not yet connected to driver")
}
}
/**
* This function can be overloaded by other child classes to handle
* executor exits differently. For e.g. when an executor goes down,
* back-end may not want to take the parent process down.
*/
protected def exitExecutor(code: Int,
reason: String,
throwable: Throwable = null,
notifyDriver: Boolean = true) = {
if (stopping.compareAndSet(false, true)) {
val message = "Executor self-exiting due to : " + reason
if (throwable != null) {
logError(message, throwable)
} else {
if (code == 0) {
logInfo(message)
} else {
logError(message)
}
}
if (notifyDriver && driver.nonEmpty) {
driver.get.send(RemoveExecutor(executorId, new ExecutorLossReason(reason)))
}
self.send(Shutdown)
} else {
logInfo("Skip exiting executor since it's been already asked to exit before.")
}
}
private def decommissionSelf(): Unit = {
if (!env.conf.get(DECOMMISSION_ENABLED)) {
logWarning(s"Receive decommission request, but decommission feature is disabled.")
return
} else if (decommissioned) {
logWarning(s"Executor $executorId already started decommissioning.")
return
}
val msg = s"Decommission executor $executorId."
logInfo(msg)
try {
decommissioned = true
val migrationEnabled = env.conf.get(STORAGE_DECOMMISSION_ENABLED) &&
(env.conf.get(STORAGE_DECOMMISSION_RDD_BLOCKS_ENABLED) ||
env.conf.get(STORAGE_DECOMMISSION_SHUFFLE_BLOCKS_ENABLED))
if (migrationEnabled) {
env.blockManager.decommissionBlockManager()
} else if (env.conf.get(STORAGE_DECOMMISSION_ENABLED)) {
logError(s"Storage decommissioning attempted but neither " +
s"${STORAGE_DECOMMISSION_SHUFFLE_BLOCKS_ENABLED.key} or " +
s"${STORAGE_DECOMMISSION_RDD_BLOCKS_ENABLED.key} is enabled ")
}
if (executor != null) {
executor.decommission()
}
// Shutdown the executor once all tasks are gone & any configured migrations completed.
// Detecting migrations completion doesn't need to be perfect and we want to minimize the
// overhead for executors that are not in decommissioning state as overall that will be
// more of the executors. For example, this will not catch a block which is already in
// the process of being put from a remote executor before migration starts. This trade-off
// is viewed as acceptable to minimize introduction of any new locking structures in critical
// code paths.
val shutdownThread = new Thread("wait-for-blocks-to-migrate") {
override def run(): Unit = {
var lastTaskRunningTime = System.nanoTime()
val sleep_time = 1000 // 1s
// This config is internal and only used by unit tests to force an executor
// to hang around for longer when decommissioned.
val initialSleepMillis = env.conf.getInt(
"spark.test.executor.decommission.initial.sleep.millis", sleep_time)
if (initialSleepMillis > 0) {
Thread.sleep(initialSleepMillis)
}
while (true) {
logInfo("Checking to see if we can shutdown.")
if (executor == null || executor.numRunningTasks == 0) {
if (migrationEnabled) {
logInfo("No running tasks, checking migrations")
val (migrationTime, allBlocksMigrated) = env.blockManager.lastMigrationInfo()
// We can only trust allBlocksMigrated boolean value if there were no tasks running
// since the start of computing it.
if (allBlocksMigrated && (migrationTime > lastTaskRunningTime)) {
logInfo("No running tasks, all blocks migrated, stopping.")
exitExecutor(0, ExecutorLossMessage.decommissionFinished, notifyDriver = true)
} else {
logInfo("All blocks not yet migrated.")
}
} else {
logInfo("No running tasks, no block migration configured, stopping.")
exitExecutor(0, ExecutorLossMessage.decommissionFinished, notifyDriver = true)
}
} else {
logInfo(s"Blocked from shutdown by ${executor.numRunningTasks} running tasks")
// If there is a running task it could store blocks, so make sure we wait for a
// migration loop to complete after the last task is done.
// Note: this is only advanced if there is a running task, if there
// is no running task but the blocks are not done migrating this does not
// move forward.
lastTaskRunningTime = System.nanoTime()
}
Thread.sleep(sleep_time)
}
}
}
shutdownThread.setDaemon(true)
shutdownThread.start()
logInfo("Will exit when finished decommissioning")
} catch {
case e: Exception =>
decommissioned = false
logError("Unexpected error while decommissioning self", e)
}
}
}
private[spark] object CoarseGrainedExecutorBackend extends Logging {
// Message used internally to start the executor when the driver successfully accepted the
// registration request.
case object RegisteredExecutor
case class Arguments(
driverUrl: String,
executorId: String,
bindAddress: String,
hostname: String,
cores: Int,
appId: String,
workerUrl: Option[String],
resourcesFileOpt: Option[String],
resourceProfileId: Int)
def main(args: Array[String]): Unit = {
val createFn: (RpcEnv, Arguments, SparkEnv, ResourceProfile) =>
CoarseGrainedExecutorBackend = { case (rpcEnv, arguments, env, resourceProfile) =>
new CoarseGrainedExecutorBackend(rpcEnv, arguments.driverUrl, arguments.executorId,
arguments.bindAddress, arguments.hostname, arguments.cores,
env, arguments.resourcesFileOpt, resourceProfile)
}
run(parseArguments(args, this.getClass.getCanonicalName.stripSuffix("$")), createFn)
System.exit(0)
}
def run(
arguments: Arguments,
backendCreateFn: (RpcEnv, Arguments, SparkEnv, ResourceProfile) =>
CoarseGrainedExecutorBackend): Unit = {
Utils.initDaemon(log)
SparkHadoopUtil.get.runAsSparkUser { () =>
// Debug code
Utils.checkHost(arguments.hostname)
// Bootstrap to fetch the driver's Spark properties.
val executorConf = new SparkConf
val fetcher = RpcEnv.create(
"driverPropsFetcher",
arguments.bindAddress,
arguments.hostname,
-1,
executorConf,
new SecurityManager(executorConf),
numUsableCores = 0,
clientMode = true)
var driver: RpcEndpointRef = null
val nTries = 3
for (i <- 0 until nTries if driver == null) {
try {
driver = fetcher.setupEndpointRefByURI(arguments.driverUrl)
} catch {
case e: Throwable => if (i == nTries - 1) {
throw e
}
}
}
val cfg = driver.askSync[SparkAppConfig](RetrieveSparkAppConfig(arguments.resourceProfileId))
val props = cfg.sparkProperties ++ Seq[(String, String)](("spark.app.id", arguments.appId))
fetcher.shutdown()
// Create SparkEnv using properties we fetched from the driver.
val driverConf = new SparkConf()
for ((key, value) <- props) {
// this is required for SSL in standalone mode
if (SparkConf.isExecutorStartupConf(key)) {
driverConf.setIfMissing(key, value)
} else {
driverConf.set(key, value)
}
}
cfg.hadoopDelegationCreds.foreach { tokens =>
SparkHadoopUtil.get.addDelegationTokens(tokens, driverConf)
}
driverConf.set(EXECUTOR_ID, arguments.executorId)
val env = SparkEnv.createExecutorEnv(driverConf, arguments.executorId, arguments.bindAddress,
arguments.hostname, arguments.cores, cfg.ioEncryptionKey, isLocal = false)
val backend = backendCreateFn(env.rpcEnv, arguments, env, cfg.resourceProfile)
env.rpcEnv.setupEndpoint("Executor", backend)
arguments.workerUrl.foreach { url =>
env.rpcEnv.setupEndpoint("WorkerWatcher",
new WorkerWatcher(env.rpcEnv, url, isChildProcessStopping = backend.stopping))
}
env.rpcEnv.awaitTermination()
}
}
def parseArguments(args: Array[String], classNameForEntry: String): Arguments = {
var driverUrl: String = null
var executorId: String = null
var bindAddress: String = null
var hostname: String = null
var cores: Int = 0
var resourcesFileOpt: Option[String] = None
var appId: String = null
var workerUrl: Option[String] = None
var resourceProfileId: Int = DEFAULT_RESOURCE_PROFILE_ID
var argv = args.toList
while (!argv.isEmpty) {
argv match {
case ("--driver-url") :: value :: tail =>
driverUrl = value
argv = tail
case ("--executor-id") :: value :: tail =>
executorId = value
argv = tail
case ("--bind-address") :: value :: tail =>
bindAddress = value
argv = tail
case ("--hostname") :: value :: tail =>
hostname = value
argv = tail
case ("--cores") :: value :: tail =>
cores = value.toInt
argv = tail
case ("--resourcesFile") :: value :: tail =>
resourcesFileOpt = Some(value)
argv = tail
case ("--app-id") :: value :: tail =>
appId = value
argv = tail
case ("--worker-url") :: value :: tail =>
// Worker url is used in spark standalone mode to enforce fate-sharing with worker
workerUrl = Some(value)
argv = tail
case ("--resourceProfileId") :: value :: tail =>
resourceProfileId = value.toInt
argv = tail
case Nil =>
case tail =>
// scalastyle:off println
System.err.println(s"Unrecognized options: ${tail.mkString(" ")}")
// scalastyle:on println
printUsageAndExit(classNameForEntry)
}
}
if (hostname == null) {
hostname = Utils.localHostName()
log.info(s"Executor hostname is not provided, will use '$hostname' to advertise itself")
}
if (driverUrl == null || executorId == null || cores <= 0 || appId == null) {
printUsageAndExit(classNameForEntry)
}
if (bindAddress == null) {
bindAddress = hostname
}
Arguments(driverUrl, executorId, bindAddress, hostname, cores, appId, workerUrl,
resourcesFileOpt, resourceProfileId)
}
private def printUsageAndExit(classNameForEntry: String): Unit = {
// scalastyle:off println
System.err.println(
s"""
|Usage: $classNameForEntry [options]
|
| Options are:
| --driver-url <driverUrl>
| --executor-id <executorId>
| --bind-address <bindAddress>
| --hostname <hostname>
| --cores <cores>
| --resourcesFile <fileWithJSONResourceInformation>
| --app-id <appid>
| --worker-url <workerUrl>
| --resourceProfileId <id>
|""".stripMargin)
// scalastyle:on println
System.exit(1)
}
}
| chuckchen/spark | core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala | Scala | apache-2.0 | 22,170 |
package julienrf.example
import julienrf.endpoints.RequestCodec.{MethodPathQueryStringCodec, MethodPathCodec}
import julienrf.endpoints._
import julienrf.formats.FormatValue.Implicits._
import julienrf.schema.Schema
import play.api.libs.json.Json
import play.twirl.api.{Html, StringInterpolation}
object Demo {
def render(body: => Html) = {
html"""
<html>
<head>
<link href="http://getbootstrap.com/dist/css/bootstrap.min.css" rel="stylesheet">
</head>
<body>
<nav class="navbar navbar-inverse">
<div class="container"><div class="navbar-header"><span class="navbar-brand">Play Endpoints</span></div></div>
</nav>
<div class="container">
${body}
</div>
</body>
</html>
"""
}
def documentation(endpoint: Endpoint[_]): Html = {
val (method, pathCodec, maybeQsCodec) = endpoint.codec match {
case MethodPathCodec(m, pc) => (m, pc, None)
case MethodPathQueryStringCodec(m, pc, qsc) => (m, pc, Some(qsc))
}
html"""
<div class="panel">
<h2>${Method.asText(method)} ${PathCodec.documentation(pathCodec)}</h2>
<dl>
${
for {
qsCodec <- maybeQsCodec.to[Seq]
n <- QueryStringCodec.documentation(qsCodec)
} yield {
html"""
<p><b>$n</b></p>
"""
}
}
${
for (schema <- endpoint.inputSchema) yield {
html"""
<p><b>Input schema</b>: <a href='${Example.schemaUrl(schema)}'>${schema.id}</a></p>
"""
}
}
${
for (schema <- endpoint.outputSchema) yield {
html"""
<p><b>Output schema</b>: <a href='${Example.schemaUrl(schema)}'>${schema.id}</a></p>
"""
}
}
</dl>
<p>${endpoint.description}</p>
</div>
"""
}
def schemaTemplate(schema: Schema): Html =
html"""
<h2><a name='${schema.id}'>${schema.id}</a></h2>
<p><b>Description:</b> ${schema.description}</p>
<p><b>Content-Type:</b> application/json </p>
<p><b>Format:</b></p>
<pre>${Json.prettyPrint(schema.format)}</pre>
"""
}
| julienrf/play-endpoints | src/main/scala/julienrf/example/Demo.scala | Scala | mit | 2,203 |
package net.xylophones.planetoid.game
import net.xylophones.planetoid.game.maths.Vector2D
import net.xylophones.planetoid.game.model.{GamePhysics, PlayerIdentifier, Rocket}
class RocketFactory {
def getRocketAtInitialPosition(playerType: PlayerIdentifier.Value, phys: GamePhysics): Rocket = {
val rotation = initialRotation(playerType)
val position = initialPosition(playerType, phys)
Rocket(position, rotation, Vector2D.zero, phys.rocketRadius)
}
private def initialRotation(playerType: PlayerIdentifier.Value): Vector2D = {
if (playerType == PlayerIdentifier.Player1) Vector2D(-1, 0)
else Vector2D(1, 0)
}
private def initialPosition(playerType: PlayerIdentifier.Value, phys: GamePhysics) = {
if (playerType == PlayerIdentifier.Player1) player1InitialPosition(phys)
else player2InitialPosition(phys)
}
private def player1InitialPosition(phys: GamePhysics): Vector2D = {
val y = phys.universeHeight / 2
val x = (phys.universeWidth - phys.planetRadius) / 8
Vector2D(x, y)
}
private def player2InitialPosition(phys: GamePhysics): Vector2D = {
val y = phys.universeHeight / 2
val x = phys.universeWidth - (phys.universeWidth - phys.planetRadius) / 8
Vector2D(x, y)
}
}
| wjsrobertson/planetoid | game/src/main/scala/net/xylophones/planetoid/game/RocketFactory.scala | Scala | apache-2.0 | 1,249 |
package com.datastax.spark.connector.mapper
import com.datastax.oss.driver.api.core.DefaultProtocolVersion.V4
import com.datastax.oss.driver.api.core.ProtocolVersion
import com.datastax.spark.connector.ColumnRef
import com.datastax.spark.connector.cql._
import com.datastax.spark.connector.datasource.CassandraSourceUtil
import com.datastax.spark.connector.types.{BigIntType, BlobType, BooleanType, ColumnType, DateType, DecimalType, DoubleType, FloatType, IntType, ListType, MapType, SmallIntType, TimestampType, TinyIntType, VarCharType}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.{BooleanType => SparkSqlBooleanType, DataType => SparkSqlDataType, DateType => SparkSqlDateType, DecimalType => SparkSqlDecimalType, DoubleType => SparkSqlDoubleType, FloatType => SparkSqlFloatType, MapType => SparkSqlMapType, TimestampType => SparkSqlTimestampType, _}
class DataFrameColumnMapper[T](structType: StructType) extends ColumnMapper[T] {
override def columnMapForWriting(struct: StructDef,
selectedColumns: IndexedSeq[ColumnRef]): ColumnMapForWriting = ???
override def columnMapForReading(struct: StructDef,
selectedColumns: IndexedSeq[ColumnRef]): ColumnMapForReading = ???
override def newTable(
keyspaceName: String,
tableName: String,
protocolVersion: ProtocolVersion = ProtocolVersion.DEFAULT): TableDef = {
val columns = structType.zipWithIndex.map { case (field, i) => {
val columnRole = if (i == 0) PartitionKeyColumn else RegularColumn
ColumnDef(field.name, columnRole, ColumnType.fromDriverType(CassandraSourceUtil.sparkSqlToJavaDriverType(field.dataType, protocolVersion)))
}}
TableDef(keyspaceName, tableName, Seq(columns.head), Seq.empty, columns.tail)
}
}
| datastax/spark-cassandra-connector | connector/src/main/scala/com/datastax/spark/connector/mapper/DataFrameColumnMapper.scala | Scala | apache-2.0 | 1,823 |
package org.jetbrains.plugins.scala.codeInspection.collections
import org.jetbrains.plugins.scala.codeInspection.InspectionBundle
/**
* @author Nikolay.Tropin
*/
class ZeroIndexToHeadTest extends OperationsOnCollectionInspectionTest {
override val inspectionClass: Class[_ <: OperationOnCollectionInspection] = classOf[ZeroIndexToHeadInspection]
override def hint: String = InspectionBundle.message("replace.with.head")
def testApply(): Unit = {
doTest(s"List(1, 2).${START}apply(0)$END",
"List(1, 2).apply(0)",
"List(1, 2).head")
doTest(s"Seq(1, 2).${START}apply(0)$END",
"Seq(1, 2).apply(0)",
"Seq(1, 2).head")
}
def testBraces() {
doTest(s"List(1, 2)$START(0)$END",
"List(1, 2)(0)",
"List(1, 2).head")
doTest(s"val seq = Seq(1, 2); seq$START(0)$END",
"val seq = Seq(1, 2); seq(0)",
"val seq = Seq(1, 2); seq.head")
doTest(s"val arr = Array(Seq(1, 2)); arr(0)$START(0)$END",
"val arr = Array(Seq(1, 2)); arr(0)(0)",
"val arr = Array(Seq(1, 2)); arr(0).head")
}
}
| triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/codeInspection/collections/ZeroIndexToHeadTest.scala | Scala | apache-2.0 | 1,064 |
import sbt._
import sbt.Keys._
import sbt.Project.Initialize
import scala.xml.{ XML, NodeSeq }
/** Manage publishing to Sonatype repos */
object Publish {
lazy val developers = SettingKey[Seq[Developer]]("developers", "Project's developers")
lazy val checkPom = TaskKey[Unit]("check-pom", "Checks if pom contains essential elemets for central")
lazy private[this] val centralElements = Seq(
Seq("modelVersion"),
Seq("groupId"),
Seq("artifactId"),
Seq("version"),
Seq("packaging"),
Seq("name"),
Seq("description"),
Seq("url"),
Seq("licenses"),
Seq("scm", "url"),
Seq("scm", "connection"),
Seq("developers"))
def checkePomTask = (makePom, streams) map { (f, s) =>
val pom = XML.loadFile(f)
val pomNodes = centralElements map ( ns => ns.foldLeft[NodeSeq](pom)(_ \\ _))
val notSetNodes = centralElements.zip(pomNodes).filter(_._2.isEmpty)
notSetNodes.map(p => s.log.warn(p._1.mkString("\\\\") + " not set in pom"))
if (!notSetNodes.isEmpty) sys.error("pom doesn't satisfy central requirements")
}
def settings = Seq(
checkPom <<= checkePomTask,
publishMavenStyle := true,
publishTo <<= sonatypeRepo,
mappings in (Compile, packageBin) <++= mapTo("META-INF", "LICENSE", "NOTICE"),
mappings in (Compile, packageSrc) <++= mapTo("META-INF", "LICENSE", "NOTICE"),
publishArtifact in Test := false,
pomIncludeRepository := { _ => false },
pomExtra <<= developers(makeDevelopersXml)) // if scmInfo is defined only developers are missing
lazy val nexus = "https://oss.sonatype.org/"
def sonatypeRepo: Initialize[Option[Resolver]] = isSnapshot { s =>
if (s) Some("snapshots" at nexus + "content/repositories/snapshots")
else Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
def mapTo(to: String, filenames: String*) =
baseDirectory map { root =>
val path = to + (if (to.isEmpty || to.endsWith("/")) "" else "/")
filenames.map( filename => (root / filename) -> ( path + filename))
}
def makeDevelopersXml(devs: Seq[Developer]) =
<developers>
{devs.map { dev =>
<developer>
<id>{dev.id}</id>
<name>{dev.name}</name>
{dev.page match {
case Some(p) => <url>{p}</url>
case _ => scala.xml.NodeSeq.Empty
}}
</developer>
}}
</developers>
}
case class Developer(id: String, name: String, page: Option[String])
| nevang/play-uid | project/Publish.scala | Scala | bsd-2-clause | 2,461 |
package slamdata.engine.std
import scalaz._
import Scalaz._
import slamdata.engine.{Data, Func, Type, Mapping, SemanticError}
import SemanticError._
import Validation.{success, failure}
import NonEmptyList.nel
trait StringLib extends Library {
private def stringApply(f: (String, String) => String): Func.Typer = {
case Type.Const(Data.Str(a)) :: Type.Const(Data.Str(b)) :: Nil => success(Type.Const(Data.Str(f(a, b))))
case Type.Str :: Type.Const(Data.Str(_)) :: Nil => success(Type.Str)
case Type.Const(Data.Str(_)) :: Type.Str :: Nil => success(Type.Str)
case Type.Str :: Type.Str :: Nil => success(Type.Str)
case t :: _ => failure(nel(TypeError(Type.Str, t, None), Nil))
case Nil => failure(nel(GenericError("expected arguments"), Nil))
}
private val StringUnapply: Func.Untyper = {
case Type.Str => success(Type.Str :: Type.Str :: Nil)
case t => failure(nel(TypeError(Type.Str, t, None), Nil))
}
// TODO: variable arity
val Concat = Mapping("concat", "Concatenates two (or more) string values", Type.Str :: Type.Str :: Nil,
stringApply(_ + _),
StringUnapply
)
val Like = Mapping("(like)", "Determines if a string value matches a pattern", Type.Str :: Type.Str :: Nil,
ts => ts match {
case Type.Str :: Type.Const(Data.Str(_)) :: Nil => success(Type.Bool)
case Type.Str :: t :: Nil => failure(nel(GenericError("expected string constant for LIKE"), Nil))
case t :: Type.Const(Data.Str(_)) :: Nil => failure(nel(TypeError(Type.Str, t, None), Nil))
case _ => failure(nel(GenericError("expected arguments"), Nil))
},
t => t match {
case Type.Bool => success(Type.Str :: Type.Str :: Nil)
case _ => failure(nel(TypeError(Type.Numeric, t, Some("boolean function where non-boolean expression is expected")), Nil))
}
)
def functions = Concat :: Like :: Nil
}
object StringLib extends StringLib | sellout/slamengine-old | src/main/scala/slamdata/engine/std/string.scala | Scala | agpl-3.0 | 2,016 |
/*
// Note: This is an example Build.scala for a Play project
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val appName = "activator-template-template"
val appVersion = "1.0-SNAPSHOT"
val appDependencies = Seq(
// Select Play modules
//jdbc, // The JDBC connection pool and the play.ru.freefry.furniture_factory.api.db API
//anorm, // Scala RDBMS Library
//javaJdbc, // Java database API
//javaEbean, // Java Ebean plugin
//javaJpa, // Java JPA plugin
//filters, // A set of built-in filters
//javaCore, // The core Java API
// Add your own project dependencies in the form:
// "group" % "artifact" % "version"
)
val main = play.Project(appName, appVersion, appDependencies).settings(
scalaVersion := "2.10.1"
// Add your own project settings here
)
}
*/ | freefry/furniture-factory | project/Build.scala | Scala | apache-2.0 | 901 |
package lila.shutup
/**
* - words are automatically pluralized. "tit" will also match "tits"
* - words are automatically leetified. "tit" will also match "t1t", "t-i-t", and more.
* - words do not partial match. "anal" will NOT match "analysis".
*/
private object Dictionary {
def en: List[String] = dict("""
(c|k)oc?k(y|suc?ker|)
(c|k)um(shot|)
(c|k)unt(ing|)
(f+|ph)(u{1,}|a{1,}|e{1,})c?k(er|r|u|k|ed|d|t|ing?|ign|en|tard?|face|off?|)
fck(er|r|u|k|ed|d|t|ing?|ign|tard?|face|off?|)
abortion
adol(f|ph)
anal(plug|sex|)
anus
arse(hole|wipe|)
ass
ass?(hole|fag)
aus?c?hwitz
ball
bastard?
bewb
bimbo
bitche?
blow
blowjob
blumpkin
bollock
boner
boob
bugger
buk?kake
bull?shit
cancer
cawk
chess(|-|_)bot(.?com)?
chink
choad
clit
clitoris
clown
condom
coon
cooter
cornhole
coward?
crap
cunn?ilingu
dic?k(head|face|suc?ker|)
dildo
dogg?ystyle
dong
douche(bag|)
dyke
(f|ph)ag
(f|ph)agg?ot
fanny
(f|ph)art
foreskin
gangbang
gay
genital
genitalia
gobshite?
gook
gypo
handjob
hell
hitler+
homm?o(sexual|)
honkey
hooker
hore
horny
humping
idiot
incest
jerk
jizz?(um|)
kaffir
kike
labia
lesbo
masturbat(e|ion|ing)
milf
molest
moron
mother
motherfuc?k(er|)
mthrfckr
muff
nazi
negro
nigg?(er|a|ah)
nonce
nutsac?k
pa?edo
pa?edo(f|ph)ile
paki
pecker
pederast
pen(1|i)s
pig
pimp
piss
poof
poon
poop(face|)
porn
pric?k
pron
prostitute
punani
puss(i|y|ie|)
queef
queer
quim
raped?
rapist
rect(al|um)
retard(ed|)
rimjob
schlong
screw(d|ed|)
scrotum
scum(bag|)
semen
sex
shag
shemale
shit(z|e|y|ty|bag|)
sister
slag
slut
spastic
spaz
sperm
spick
spoo
spooge
spunk
stfu
stripper
stupid
suc?k
taint
tart
terrorist
tit(s|ies|ties|ty)(fuc?k)
tosser
turd
twat
vag
vagin(a|al|)
vibrator
vulva
wanc?k(er|)
wetback
whore?
wog
""")
private def dict(words: String) = words.lines.filter(_.nonEmpty).toList
}
| clarkerubber/lila | modules/shutup/src/main/Dictionary.scala | Scala | agpl-3.0 | 1,787 |
package com.mehmetakiftutuncu.muezzinapi.controllers
import com.github.mehmetakiftutuncu.errors.{CommonError, Errors}
import com.mehmetakiftutuncu.muezzinapi.models.{City, Country, District}
import com.mehmetakiftutuncu.muezzinapi.services._
import com.mehmetakiftutuncu.muezzinapi.utilities.ControllerExtras
import javax.inject.{Inject, Singleton}
import play.api.libs.json.{JsObject, JsString, Json}
import play.api.mvc._
import scala.concurrent.ExecutionContext.Implicits.global
@Singleton
class PlaceController @Inject()(ControllerComponents: ControllerComponents,
CountryService: AbstractCountryService,
CityService: AbstractCityService,
DistrictService: AbstractDistrictService) extends AbstractController(ControllerComponents) with ControllerExtras {
def getCountries: Action[AnyContent] = Action.async {
val log: String = s"""Failed to get countries!"""
CountryService.getCountries.map {
case Left(errors: Errors) =>
failWithErrors(log, errors)
case Right(countries: List[Country]) =>
val result: JsObject = Json.obj(
"countries" -> JsObject(countries.map(country => country.id.toString -> country.toJson))
)
success(result)
}
}
def getCities(countryId: Int): Action[AnyContent] = Action.async {
val log: String = s"""Failed to get cities for country "$countryId""""
CityService.getCities(countryId).map {
case Left(errors: Errors) =>
failWithErrors(log, errors)
case Right(cities: List[City]) =>
val result: JsObject = Json.obj(
"cities" -> JsObject(cities.map(city => city.id.toString -> city.toJson))
)
success(result)
}
}
def getDistricts(countryId: Int, cityId: Int): Action[AnyContent] = Action.async {
val log: String = s"""Failed to get districts for country "$countryId" and city "$cityId"!"""
CityService.getCities(countryId).flatMap {
case Left(cityErrors: Errors) =>
futureFailWithErrors(log, cityErrors)
case Right(cities: List[City]) =>
if (!cities.exists(_.id == cityId)) {
futureFailWithErrors(log, Errors(CommonError.invalidRequest.reason(s"""Country "$countryId" has no city "$cityId"!""")))
} else {
DistrictService.getDistricts(countryId, cityId).map {
case Left(districtErrors: Errors) =>
failWithErrors(log, districtErrors)
case Right(districts: List[District]) =>
val result: JsObject = Json.obj(
"districts" -> JsObject(districts.map(district => district.id.toString -> JsString(district.name)))
)
success(result)
}
}
}
}
}
| mehmetakiftutuncu/MuezzinAPI | app/com/mehmetakiftutuncu/muezzinapi/controllers/PlaceController.scala | Scala | mit | 2,793 |
package scalaz.stream.mongodb.filesystem
import com.mongodb.{DB, DBObject}
import scala.language.implicitConversions
import scala.language.postfixOps
import com.mongodb.gridfs.GridFS
import org.bson.types.ObjectId
import scalaz.stream.mongodb.channel.ChannelResult
import scalaz.stream.Process
import scalaz.stream.processes._
import scalaz.concurrent.Task
trait FileSystemSyntax extends FileUtil {
/** filesystem syntax, wrapper around gridFs **/
def filesystem(db: DB, filesystemName: String = GridFS.DEFAULT_BUCKET): GridFs = GridFs(db, filesystemName)
val list: ListCommandSyntax = new ListCommandSyntax {}
val write: WriteCommandSyntax = new WriteCommandSyntax {}
/**
* Creates information that uniquely identifies single file in filesystem
* @param name name of the file (required, must be unique)
* @param id unique file identifier
* @param meta optional metadata to store with file
*/
def file(name: String, id: ObjectId = new ObjectId, meta: Option[DBObject] = None, contentType: Option[String] = None, chunkSize: Long = GridFS.DEFAULT_CHUNKSIZE): MongoFileWrite =
MongoFileWrite(name, id, meta, contentType, chunkSize)
/** conversion of listCommand to process */
implicit def readCmd2ChannelResult[A](cmd: ReadCommand[A]) = cmd.toChannelResult
/** conversion of WriteCommand to sink */
implicit def writeCmd2ChannelResult[A](cmd: WriteCommand) = cmd.toChannelResult
/** syntax sugar on listCommand channelResult **/
implicit class ListChannelResultSyntax(val self: ChannelResult[GridFS, MongoFileRead]) {
def and[A](ch: ChannelResult[GridFS, MongoFileRead => Process[Task, A]]): ChannelResult[GridFS, A] =
ListAndCommand.combine(self |> take(1))(ch)
def foreach[A](ch: ChannelResult[GridFS, MongoFileRead => Process[Task, A]]): ChannelResult[GridFS, (MongoFileRead, Process[Task, A])] =
ListForEachCommand.combine(self)(ch)
}
}
| Spinoco/scalaz-stream-mongodb | core/src/main/scala/scalaz/stream/mongodb/filesystem/FileSystemSyntax.scala | Scala | mit | 1,947 |
/*
* Copyright 2013-2015 Websudos, Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Explicit consent must be obtained from the copyright owner, Websudos Limited before any redistribution is made.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.websudos.phantom.tables
import org.joda.time.DateTime
import com.websudos.phantom.dsl._
import com.websudos.phantom.testkit._
import com.websudos.util.testing._
case class TimeSeriesRecord(
id: UUID,
name: String,
timestamp: DateTime
)
sealed class TimeSeriesTable extends CassandraTable[TimeSeriesTable, TimeSeriesRecord] {
object id extends UUIDColumn(this) with PartitionKey[UUID]
object name extends StringColumn(this)
object timestamp extends DateTimeColumn(this) with ClusteringOrder[DateTime] with Descending
def fromRow(row: Row): TimeSeriesRecord = {
TimeSeriesRecord(
id(row),
name(row),
timestamp(row)
)
}
}
object TimeSeriesTable extends TimeSeriesTable with PhantomCassandraConnector {
val testUUID = gen[UUID]
}
| analytically/phantom | phantom-dsl/src/test/scala/com/websudos/phantom/tables/TimeSeriesTable.scala | Scala | bsd-2-clause | 2,281 |
package org.jetbrains.plugins.scala.lang.psi.light
import com.intellij.openapi.util.Key
import com.intellij.psi.impl.light.LightModifierList
import com.intellij.psi.{PsiClass, PsiMethod, PsiModifierList}
import org.jetbrains.plugins.scala.ScalaFileType
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import _root_.scala.collection.immutable.HashMap
/**
* @author Alefas
* @since 27.02.12
*/
class StaticPsiMethodWrapper private (val method: PsiMethod, containingClass: PsiClass)
extends LightMethodAdapter(method.getManager, method, containingClass) with LightScalaMethod {
setNavigationElement(method)
override def hasModifierProperty(name: String): Boolean = {
name match {
case "static" => true
case _ => super.hasModifierProperty(name)
}
}
override def getModifierList: PsiModifierList = new LightModifierList(getManager, ScalaFileType.SCALA_LANGUAGE) {
override def hasModifierProperty(name: String): Boolean = {
name match {
case "static" => true
case _ => super.hasModifierProperty(name)
}
}
override def hasExplicitModifier(name: String): Boolean = {
name match {
case "static" => true
case _ => super.hasModifierProperty(name)
}
}
}
override def isWritable: Boolean = getContainingFile.isWritable
}
object StaticPsiMethodWrapper {
private val KEY: Key[HashMap[PsiClass, (StaticPsiMethodWrapper, Long)]] = Key.create("static.psi.method.wrapper.key")
def getWrapper(method: PsiMethod, containingClass: PsiClass): StaticPsiMethodWrapper = {
var data = method.getUserData(KEY)
if (data == null) {
data = new HashMap()
method.putUserData(KEY, data)
}
val count = ScalaPsiManager.instance(method.getProject).getModificationCount
var res = data.getOrElse(containingClass, null)
if (res != null && res._2 == count) return res._1
res = (new StaticPsiMethodWrapper(method, containingClass), count)
data += ((containingClass, res))
method.putUserData(KEY, data)
res._1
}
}
| whorbowicz/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/light/StaticPsiMethodWrapper.scala | Scala | apache-2.0 | 2,065 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.client
import java.io.PrintStream
import java.lang.{Iterable => JIterable}
import java.lang.reflect.InvocationTargetException
import java.nio.charset.StandardCharsets.UTF_8
import java.util.{Locale, Map => JMap}
import java.util.concurrent.TimeUnit._
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hive.common.StatsSetupConst
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hadoop.hive.metastore.{IMetaStoreClient, TableType => HiveTableType}
import org.apache.hadoop.hive.metastore.api.{Database => HiveDatabase, Table => MetaStoreApiTable, _}
import org.apache.hadoop.hive.ql.Driver
import org.apache.hadoop.hive.ql.metadata.{Hive, HiveException, Partition => HivePartition, Table => HiveTable}
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC
import org.apache.hadoop.hive.ql.processors._
import org.apache.hadoop.hive.ql.session.SessionState
import org.apache.hadoop.hive.serde.serdeConstants
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe
import org.apache.hadoop.security.UserGroupInformation
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.metrics.source.HiveCatalogMetrics
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException, NoSuchPartitionsException, PartitionAlreadyExistsException, PartitionsAlreadyExistException}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParseException}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.catalog.SupportsNamespaces._
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.execution.QueryExecutionException
import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils}
import org.apache.spark.sql.hive.HiveExternalCatalog.DATASOURCE_SCHEMA
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.util.{CircularBuffer, Utils, VersionUtils}
/**
* A class that wraps the HiveClient and converts its responses to externally visible classes.
* Note that this class is typically loaded with an internal classloader for each instantiation,
* allowing it to interact directly with a specific isolated version of Hive. Loading this class
* with the isolated classloader however will result in it only being visible as a [[HiveClient]],
* not a [[HiveClientImpl]].
*
* This class needs to interact with multiple versions of Hive, but will always be compiled with
* the 'native', execution version of Hive. Therefore, any places where hive breaks compatibility
* must use reflection after matching on `version`.
*
* Every HiveClientImpl creates an internal HiveConf object. This object is using the given
* `hadoopConf` as the base. All options set in the `sparkConf` will be applied to the HiveConf
* object and overrides any exiting options. Then, options in extraConfig will be applied
* to the HiveConf object and overrides any existing options.
*
* @param version the version of hive used when pick function calls that are not compatible.
* @param sparkConf all configuration options set in SparkConf.
* @param hadoopConf the base Configuration object used by the HiveConf created inside
* this HiveClientImpl.
* @param extraConfig a collection of configuration options that will be added to the
* hive conf before opening the hive client.
* @param initClassLoader the classloader used when creating the `state` field of
* this [[HiveClientImpl]].
*/
private[hive] class HiveClientImpl(
override val version: HiveVersion,
warehouseDir: Option[String],
sparkConf: SparkConf,
hadoopConf: JIterable[JMap.Entry[String, String]],
extraConfig: Map[String, String],
initClassLoader: ClassLoader,
val clientLoader: IsolatedClientLoader)
extends HiveClient
with Logging {
import HiveClientImpl._
// Circular buffer to hold what hive prints to STDOUT and ERR. Only printed when failures occur.
private val outputBuffer = new CircularBuffer()
private val shim = version match {
case hive.v12 => new Shim_v0_12()
case hive.v13 => new Shim_v0_13()
case hive.v14 => new Shim_v0_14()
case hive.v1_0 => new Shim_v1_0()
case hive.v1_1 => new Shim_v1_1()
case hive.v1_2 => new Shim_v1_2()
case hive.v2_0 => new Shim_v2_0()
case hive.v2_1 => new Shim_v2_1()
case hive.v2_2 => new Shim_v2_2()
case hive.v2_3 => new Shim_v2_3()
case hive.v3_0 => new Shim_v3_0()
case hive.v3_1 => new Shim_v3_1()
}
// Create an internal session state for this HiveClientImpl.
val state: SessionState = {
val original = Thread.currentThread().getContextClassLoader
if (clientLoader.isolationOn) {
// Switch to the initClassLoader.
Thread.currentThread().setContextClassLoader(initClassLoader)
try {
newState()
} finally {
Thread.currentThread().setContextClassLoader(original)
}
} else {
// Isolation off means we detect a CliSessionState instance in current thread.
// 1: Inside the spark project, we have already started a CliSessionState in
// `SparkSQLCLIDriver`, which contains configurations from command lines. Later, we call
// `SparkSQLEnv.init()` there, which would new a hive client again. so we should keep those
// configurations and reuse the existing instance of `CliSessionState`. In this case,
// SessionState.get will always return a CliSessionState.
// 2: In another case, a user app may start a CliSessionState outside spark project with built
// in hive jars, which will turn off isolation, if SessionSate.detachSession is
// called to remove the current state after that, hive client created later will initialize
// its own state by newState()
val ret = SessionState.get
if (ret != null) {
// hive.metastore.warehouse.dir is determined in SharedState after the CliSessionState
// instance constructed, we need to follow that change here.
warehouseDir.foreach { dir =>
ret.getConf.setVar(ConfVars.METASTOREWAREHOUSE, dir)
}
ret
} else {
newState()
}
}
}
// Log the default warehouse location.
logInfo(
s"Warehouse location for Hive client " +
s"(version ${version.fullVersion}) is ${conf.getVar(ConfVars.METASTOREWAREHOUSE)}")
private def newState(): SessionState = {
val hiveConf = newHiveConf(sparkConf, hadoopConf, extraConfig, Some(initClassLoader))
val state = new SessionState(hiveConf)
if (clientLoader.cachedHive != null) {
Hive.set(clientLoader.cachedHive.asInstanceOf[Hive])
}
// Hive 2.3 will set UDFClassLoader to hiveConf when initializing SessionState
// since HIVE-11878, and ADDJarsCommand will add jars to clientLoader.classLoader.
// For this reason we cannot load the jars added by ADDJarsCommand because of class loader
// got changed. We reset it to clientLoader.ClassLoader here.
state.getConf.setClassLoader(clientLoader.classLoader)
shim.setCurrentSessionState(state)
state.out = new PrintStream(outputBuffer, true, UTF_8.name())
state.err = new PrintStream(outputBuffer, true, UTF_8.name())
state
}
/** Returns the configuration for the current session. */
def conf: HiveConf = {
val hiveConf = state.getConf
// Hive changed the default of datanucleus.schema.autoCreateAll from true to false
// and hive.metastore.schema.verification from false to true since Hive 2.0.
// For details, see the JIRA HIVE-6113, HIVE-12463 and HIVE-1841.
// isEmbeddedMetaStore should not be true in the production environment.
// We hard-code hive.metastore.schema.verification and datanucleus.schema.autoCreateAll to allow
// bin/spark-shell, bin/spark-sql and sbin/start-thriftserver.sh to automatically create the
// Derby Metastore when running Spark in the non-production environment.
val isEmbeddedMetaStore = {
val msUri = hiveConf.getVar(ConfVars.METASTOREURIS)
val msConnUrl = hiveConf.getVar(ConfVars.METASTORECONNECTURLKEY)
(msUri == null || msUri.trim().isEmpty) &&
(msConnUrl != null && msConnUrl.startsWith("jdbc:derby"))
}
if (isEmbeddedMetaStore) {
hiveConf.setBoolean("hive.metastore.schema.verification", false)
hiveConf.setBoolean("datanucleus.schema.autoCreateAll", true)
}
hiveConf
}
private def getHive(conf: HiveConf): Hive = {
VersionUtils.majorMinorPatchVersion(version.fullVersion).map {
case (2, 3, v) if v >= 9 => Hive.getWithoutRegisterFns(conf)
case _ => Hive.get(conf)
}.getOrElse {
throw QueryExecutionErrors.unsupportedHiveMetastoreVersionError(
version.fullVersion, HiveUtils.HIVE_METASTORE_VERSION.key)
}
}
override val userName = UserGroupInformation.getCurrentUser.getShortUserName
override def getConf(key: String, defaultValue: String): String = {
conf.get(key, defaultValue)
}
// We use hive's conf for compatibility.
private val retryLimit = conf.getIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES)
private val retryDelayMillis = shim.getMetastoreClientConnectRetryDelayMillis(conf)
/**
* Runs `f` with multiple retries in case the hive metastore is temporarily unreachable.
*/
private def retryLocked[A](f: => A): A = clientLoader.synchronized {
// Hive sometimes retries internally, so set a deadline to avoid compounding delays.
val deadline = System.nanoTime + (retryLimit * retryDelayMillis * 1e6).toLong
var numTries = 0
var caughtException: Exception = null
do {
numTries += 1
try {
return f
} catch {
case e: Exception if causedByThrift(e) =>
caughtException = e
logWarning(
"HiveClient got thrift exception, destroying client and retrying " +
s"(${retryLimit - numTries} tries remaining)", e)
clientLoader.cachedHive = null
Thread.sleep(retryDelayMillis)
}
} while (numTries <= retryLimit && System.nanoTime < deadline)
if (System.nanoTime > deadline) {
logWarning("Deadline exceeded")
}
throw caughtException
}
private def causedByThrift(e: Throwable): Boolean = {
var target = e
while (target != null) {
val msg = target.getMessage()
if (msg != null && msg.matches("(?s).*(TApplication|TProtocol|TTransport)Exception.*")) {
return true
}
target = target.getCause()
}
false
}
private def client: Hive = {
if (clientLoader.cachedHive != null) {
clientLoader.cachedHive.asInstanceOf[Hive]
} else {
val c = getHive(conf)
clientLoader.cachedHive = c
c
}
}
private def msClient: IMetaStoreClient = {
shim.getMSC(client)
}
/** Return the associated Hive [[SessionState]] of this [[HiveClientImpl]] */
override def getState: SessionState = withHiveState(state)
/**
* Runs `f` with ThreadLocal session state and classloaders configured for this version of hive.
*/
def withHiveState[A](f: => A): A = retryLocked {
val original = Thread.currentThread().getContextClassLoader
val originalConfLoader = state.getConf.getClassLoader
// We explicitly set the context class loader since "conf.setClassLoader" does
// not do that, and the Hive client libraries may need to load classes defined by the client's
// class loader. See SPARK-19804 for more details.
Thread.currentThread().setContextClassLoader(clientLoader.classLoader)
state.getConf.setClassLoader(clientLoader.classLoader)
// Set the thread local metastore client to the client associated with this HiveClientImpl.
Hive.set(client)
// Replace conf in the thread local Hive with current conf
// with the side-effect of Hive.get(conf) to avoid using out-of-date HiveConf.
// See discussion in https://github.com/apache/spark/pull/16826/files#r104606859
// for more details.
getHive(conf)
// setCurrentSessionState will use the classLoader associated
// with the HiveConf in `state` to override the context class loader of the current
// thread.
shim.setCurrentSessionState(state)
val ret = try {
f
} catch {
case e: NoClassDefFoundError if e.getMessage.contains("apache/hadoop/hive/serde2/SerDe") =>
throw QueryExecutionErrors.serDeInterfaceNotFoundError(e)
} finally {
state.getConf.setClassLoader(originalConfLoader)
Thread.currentThread().setContextClassLoader(original)
HiveCatalogMetrics.incrementHiveClientCalls(1)
}
ret
}
def setOut(stream: PrintStream): Unit = withHiveState {
state.out = stream
}
def setInfo(stream: PrintStream): Unit = withHiveState {
state.info = stream
}
def setError(stream: PrintStream): Unit = withHiveState {
state.err = stream
}
private def setCurrentDatabaseRaw(db: String): Unit = {
if (state.getCurrentDatabase != db) {
if (databaseExists(db)) {
state.setCurrentDatabase(db)
} else {
throw new NoSuchDatabaseException(db)
}
}
}
override def setCurrentDatabase(databaseName: String): Unit = withHiveState {
setCurrentDatabaseRaw(databaseName)
}
override def createDatabase(
database: CatalogDatabase,
ignoreIfExists: Boolean): Unit = withHiveState {
val hiveDb = toHiveDatabase(database, Some(userName))
client.createDatabase(hiveDb, ignoreIfExists)
}
override def dropDatabase(
name: String,
ignoreIfNotExists: Boolean,
cascade: Boolean): Unit = withHiveState {
client.dropDatabase(name, true, ignoreIfNotExists, cascade)
}
override def alterDatabase(database: CatalogDatabase): Unit = withHiveState {
if (!getDatabase(database.name).locationUri.equals(database.locationUri)) {
// SPARK-29260: Enable supported versions once it support altering database location.
if (!(version.equals(hive.v3_0) || version.equals(hive.v3_1))) {
throw QueryCompilationErrors.alterDatabaseLocationUnsupportedError(version.fullVersion)
}
}
val hiveDb = toHiveDatabase(database)
client.alterDatabase(database.name, hiveDb)
}
private def toHiveDatabase(
database: CatalogDatabase, userName: Option[String] = None): HiveDatabase = {
val props = database.properties
val hiveDb = new HiveDatabase(
database.name,
database.description,
CatalogUtils.URIToString(database.locationUri),
(props -- Seq(PROP_OWNER)).asJava)
props.get(PROP_OWNER).orElse(userName).foreach { ownerName =>
shim.setDatabaseOwnerName(hiveDb, ownerName)
}
hiveDb
}
override def getDatabase(dbName: String): CatalogDatabase = withHiveState {
Option(client.getDatabase(dbName)).map { d =>
val params = Option(d.getParameters).map(_.asScala.toMap).getOrElse(Map()) ++
Map(PROP_OWNER -> shim.getDatabaseOwnerName(d))
CatalogDatabase(
name = d.getName,
description = Option(d.getDescription).getOrElse(""),
locationUri = CatalogUtils.stringToURI(d.getLocationUri),
properties = params)
}.getOrElse(throw new NoSuchDatabaseException(dbName))
}
override def databaseExists(dbName: String): Boolean = withHiveState {
client.databaseExists(dbName)
}
override def listDatabases(pattern: String): Seq[String] = withHiveState {
client.getDatabasesByPattern(pattern).asScala.toSeq
}
private def getRawTableOption(dbName: String, tableName: String): Option[HiveTable] = {
Option(client.getTable(dbName, tableName, false /* do not throw exception */))
}
private def getRawTablesByName(dbName: String, tableNames: Seq[String]): Seq[HiveTable] = {
try {
msClient.getTableObjectsByName(dbName, tableNames.asJava).asScala
.map(extraFixesForNonView).map(new HiveTable(_)).toSeq
} catch {
case ex: Exception =>
throw QueryExecutionErrors.cannotFetchTablesOfDatabaseError(dbName, ex)
}
}
override def tableExists(dbName: String, tableName: String): Boolean = withHiveState {
getRawTableOption(dbName, tableName).nonEmpty
}
override def getTablesByName(
dbName: String,
tableNames: Seq[String]): Seq[CatalogTable] = withHiveState {
getRawTablesByName(dbName, tableNames).map(convertHiveTableToCatalogTable)
}
override def getTableOption(
dbName: String,
tableName: String): Option[CatalogTable] = withHiveState {
logDebug(s"Looking up $dbName.$tableName")
getRawTableOption(dbName, tableName).map(convertHiveTableToCatalogTable)
}
private def convertHiveTableToCatalogTable(h: HiveTable): CatalogTable = {
// Note: Hive separates partition columns and the schema, but for us the
// partition columns are part of the schema
val (cols, partCols) = try {
(h.getCols.asScala.map(fromHiveColumn), h.getPartCols.asScala.map(fromHiveColumn))
} catch {
case ex: SparkException =>
throw QueryExecutionErrors.convertHiveTableToCatalogTableError(
ex, h.getDbName, h.getTableName)
}
val schema = StructType((cols ++ partCols).toSeq)
val bucketSpec = if (h.getNumBuckets > 0) {
val sortColumnOrders = h.getSortCols.asScala
// Currently Spark only supports columns to be sorted in ascending order
// but Hive can support both ascending and descending order. If all the columns
// are sorted in ascending order, only then propagate the sortedness information
// to downstream processing / optimizations in Spark
// TODO: In future we can have Spark support columns sorted in descending order
val allAscendingSorted = sortColumnOrders.forall(_.getOrder == HIVE_COLUMN_ORDER_ASC)
val sortColumnNames = if (allAscendingSorted) {
sortColumnOrders.map(_.getCol)
} else {
Seq.empty
}
Option(BucketSpec(h.getNumBuckets, h.getBucketCols.asScala.toSeq, sortColumnNames.toSeq))
} else {
None
}
// Skew spec and storage handler can't be mapped to CatalogTable (yet)
val unsupportedFeatures = ArrayBuffer.empty[String]
if (!h.getSkewedColNames.isEmpty) {
unsupportedFeatures += "skewed columns"
}
if (h.getStorageHandler != null) {
unsupportedFeatures += "storage handler"
}
if (h.getTableType == HiveTableType.VIRTUAL_VIEW && partCols.nonEmpty) {
unsupportedFeatures += "partitioned view"
}
val properties = Option(h.getParameters).map(_.asScala.toMap).orNull
// Hive-generated Statistics are also recorded in ignoredProperties
val ignoredProperties = scala.collection.mutable.Map.empty[String, String]
for (key <- HiveStatisticsProperties; value <- properties.get(key)) {
ignoredProperties += key -> value
}
val excludedTableProperties = HiveStatisticsProperties ++ Set(
// The property value of "comment" is moved to the dedicated field "comment"
"comment",
// For EXTERNAL_TABLE, the table properties has a particular field "EXTERNAL". This is added
// in the function toHiveTable.
"EXTERNAL"
)
val filteredProperties = properties.filterNot {
case (key, _) => excludedTableProperties.contains(key)
}
val comment = properties.get("comment")
CatalogTable(
identifier = TableIdentifier(h.getTableName, Option(h.getDbName)),
tableType = h.getTableType match {
case HiveTableType.EXTERNAL_TABLE => CatalogTableType.EXTERNAL
case HiveTableType.MANAGED_TABLE => CatalogTableType.MANAGED
case HiveTableType.VIRTUAL_VIEW => CatalogTableType.VIEW
case unsupportedType =>
val tableTypeStr = unsupportedType.toString.toLowerCase(Locale.ROOT).replace("_", " ")
throw QueryCompilationErrors.hiveTableTypeUnsupportedError(tableTypeStr)
},
schema = schema,
partitionColumnNames = partCols.map(_.name).toSeq,
// If the table is written by Spark, we will put bucketing information in table properties,
// and will always overwrite the bucket spec in hive metastore by the bucketing information
// in table properties. This means, if we have bucket spec in both hive metastore and
// table properties, we will trust the one in table properties.
bucketSpec = bucketSpec,
owner = Option(h.getOwner).getOrElse(""),
createTime = h.getTTable.getCreateTime.toLong * 1000,
lastAccessTime = h.getLastAccessTime.toLong * 1000,
storage = CatalogStorageFormat(
locationUri = shim.getDataLocation(h).map(CatalogUtils.stringToURI),
// To avoid ClassNotFound exception, we try our best to not get the format class, but get
// the class name directly. However, for non-native tables, there is no interface to get
// the format class name, so we may still throw ClassNotFound in this case.
inputFormat = Option(h.getTTable.getSd.getInputFormat).orElse {
Option(h.getStorageHandler).map(_.getInputFormatClass.getName)
},
outputFormat = Option(h.getTTable.getSd.getOutputFormat).orElse {
Option(h.getStorageHandler).map(_.getOutputFormatClass.getName)
},
serde = Option(h.getSerializationLib),
compressed = h.getTTable.getSd.isCompressed,
properties = Option(h.getTTable.getSd.getSerdeInfo.getParameters)
.map(_.asScala.toMap).orNull
),
// For EXTERNAL_TABLE, the table properties has a particular field "EXTERNAL". This is added
// in the function toHiveTable.
properties = filteredProperties,
stats = readHiveStats(properties),
comment = comment,
// In older versions of Spark(before 2.2.0), we expand the view original text and
// store that into `viewExpandedText`, that should be used in view resolution.
// We get `viewExpandedText` as viewText, and also get `viewOriginalText` in order to
// display the original view text in `DESC [EXTENDED|FORMATTED] table` command for views
// that created by older versions of Spark.
viewOriginalText = Option(h.getViewOriginalText),
viewText = Option(h.getViewExpandedText),
unsupportedFeatures = unsupportedFeatures.toSeq,
ignoredProperties = ignoredProperties.toMap)
}
override def createTable(table: CatalogTable, ignoreIfExists: Boolean): Unit = withHiveState {
verifyColumnDataType(table.dataSchema)
client.createTable(toHiveTable(table, Some(userName)), ignoreIfExists)
}
override def dropTable(
dbName: String,
tableName: String,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit = withHiveState {
shim.dropTable(client, dbName, tableName, true, ignoreIfNotExists, purge)
}
override def alterTable(
dbName: String,
tableName: String,
table: CatalogTable): Unit = withHiveState {
// getTableOption removes all the Hive-specific properties. Here, we fill them back to ensure
// these properties are still available to the others that share the same Hive metastore.
// If users explicitly alter these Hive-specific properties through ALTER TABLE DDL, we respect
// these user-specified values.
verifyColumnDataType(table.dataSchema)
val hiveTable = toHiveTable(
table.copy(properties = table.ignoredProperties ++ table.properties), Some(userName))
// Do not use `table.qualifiedName` here because this may be a rename
val qualifiedTableName = s"$dbName.$tableName"
shim.alterTable(client, qualifiedTableName, hiveTable)
}
override def alterTableDataSchema(
dbName: String,
tableName: String,
newDataSchema: StructType,
schemaProps: Map[String, String]): Unit = withHiveState {
val oldTable = client.getTable(dbName, tableName)
verifyColumnDataType(newDataSchema)
val hiveCols = newDataSchema.map(toHiveColumn)
oldTable.setFields(hiveCols.asJava)
// remove old schema table properties
val it = oldTable.getParameters.entrySet.iterator
while (it.hasNext) {
val entry = it.next()
if (CatalogTable.isLargeTableProp(DATASOURCE_SCHEMA, entry.getKey)) {
it.remove()
}
}
// set new schema table properties
schemaProps.foreach { case (k, v) => oldTable.setProperty(k, v) }
val qualifiedTableName = s"$dbName.$tableName"
shim.alterTable(client, qualifiedTableName, oldTable)
}
override def createPartitions(
db: String,
table: String,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit = withHiveState {
def replaceExistException(e: Throwable): Unit = e match {
case _: HiveException if e.getCause.isInstanceOf[AlreadyExistsException] =>
throw new PartitionsAlreadyExistException(db, table, parts.map(_.spec))
case _ => throw e
}
try {
shim.createPartitions(client, db, table, parts, ignoreIfExists)
} catch {
case e: InvocationTargetException => replaceExistException(e.getCause)
case e: Throwable => replaceExistException(e)
}
}
override def dropPartitions(
db: String,
table: String,
specs: Seq[TablePartitionSpec],
ignoreIfNotExists: Boolean,
purge: Boolean,
retainData: Boolean): Unit = withHiveState {
// TODO: figure out how to drop multiple partitions in one call
val hiveTable = client.getTable(db, table, true /* throw exception */)
// do the check at first and collect all the matching partitions
val matchingParts =
specs.flatMap { s =>
assert(s.values.forall(_.nonEmpty), s"partition spec '$s' is invalid")
// The provided spec here can be a partial spec, i.e. it will match all partitions
// whose specs are supersets of this partial spec. E.g. If a table has partitions
// (b='1', c='1') and (b='1', c='2'), a partial spec of (b='1') will match both.
val parts = client.getPartitions(hiveTable, s.asJava).asScala
if (parts.isEmpty && !ignoreIfNotExists) {
throw new NoSuchPartitionsException(db, table, Seq(s))
}
parts.map(_.getValues)
}.distinct
var droppedParts = ArrayBuffer.empty[java.util.List[String]]
matchingParts.foreach { partition =>
try {
shim.dropPartition(client, db, table, partition, !retainData, purge)
} catch {
case e: Exception =>
val remainingParts = matchingParts.toBuffer --= droppedParts
logError(
s"""
|======================
|Attempt to drop the partition specs in table '$table' database '$db':
|${specs.mkString("\\n")}
|In this attempt, the following partitions have been dropped successfully:
|${droppedParts.mkString("\\n")}
|The remaining partitions have not been dropped:
|${remainingParts.mkString("\\n")}
|======================
""".stripMargin)
throw e
}
droppedParts += partition
}
}
override def renamePartitions(
db: String,
table: String,
specs: Seq[TablePartitionSpec],
newSpecs: Seq[TablePartitionSpec]): Unit = withHiveState {
require(specs.size == newSpecs.size, "number of old and new partition specs differ")
val catalogTable = getTable(db, table)
val hiveTable = toHiveTable(catalogTable, Some(userName))
specs.zip(newSpecs).foreach { case (oldSpec, newSpec) =>
if (client.getPartition(hiveTable, newSpec.asJava, false) != null) {
throw new PartitionAlreadyExistsException(db, table, newSpec)
}
val hivePart = getPartitionOption(catalogTable, oldSpec)
.map { p => toHivePartition(p.copy(spec = newSpec), hiveTable) }
.getOrElse { throw new NoSuchPartitionException(db, table, oldSpec) }
client.renamePartition(hiveTable, oldSpec.asJava, hivePart)
}
}
override def alterPartitions(
db: String,
table: String,
newParts: Seq[CatalogTablePartition]): Unit = withHiveState {
// Note: Before altering table partitions in Hive, you *must* set the current database
// to the one that contains the table of interest. Otherwise you will end up with the
// most helpful error message ever: "Unable to alter partition. alter is not possible."
// See HIVE-2742 for more detail.
val original = state.getCurrentDatabase
try {
setCurrentDatabaseRaw(db)
val hiveTable = toHiveTable(getTable(db, table), Some(userName))
shim.alterPartitions(client, table, newParts.map { toHivePartition(_, hiveTable) }.asJava)
} finally {
state.setCurrentDatabase(original)
}
}
/**
* Returns the partition names for the given table that match the supplied partition spec.
* If no partition spec is specified, all partitions are returned.
*
* The returned sequence is sorted as strings.
*/
override def getPartitionNames(
table: CatalogTable,
partialSpec: Option[TablePartitionSpec] = None): Seq[String] = withHiveState {
val hivePartitionNames =
partialSpec match {
case None =>
// -1 for result limit means "no limit/return all"
client.getPartitionNames(table.database, table.identifier.table, -1)
case Some(s) =>
assert(s.values.forall(_.nonEmpty), s"partition spec '$s' is invalid")
client.getPartitionNames(table.database, table.identifier.table, s.asJava, -1)
}
hivePartitionNames.asScala.sorted.toSeq
}
override def getPartitionOption(
table: CatalogTable,
spec: TablePartitionSpec): Option[CatalogTablePartition] = withHiveState {
val hiveTable = toHiveTable(table, Some(userName))
val hivePartition = client.getPartition(hiveTable, spec.asJava, false)
Option(hivePartition).map(fromHivePartition)
}
/**
* Returns the partitions for the given table that match the supplied partition spec.
* If no partition spec is specified, all partitions are returned.
*/
override def getPartitions(
table: CatalogTable,
spec: Option[TablePartitionSpec]): Seq[CatalogTablePartition] = withHiveState {
val hiveTable = toHiveTable(table, Some(userName))
val partSpec = spec match {
case None => CatalogTypes.emptyTablePartitionSpec
case Some(s) =>
assert(s.values.forall(_.nonEmpty), s"partition spec '$s' is invalid")
s
}
val parts = client.getPartitions(hiveTable, partSpec.asJava).asScala.map(fromHivePartition)
HiveCatalogMetrics.incrementFetchedPartitions(parts.length)
parts.toSeq
}
override def getPartitionsByFilter(
table: CatalogTable,
predicates: Seq[Expression]): Seq[CatalogTablePartition] = withHiveState {
val hiveTable = toHiveTable(table, Some(userName))
val parts = shim.getPartitionsByFilter(client, hiveTable, predicates)
.map(fromHivePartition)
HiveCatalogMetrics.incrementFetchedPartitions(parts.length)
parts
}
override def listTables(dbName: String): Seq[String] = withHiveState {
client.getAllTables(dbName).asScala.toSeq
}
override def listTables(dbName: String, pattern: String): Seq[String] = withHiveState {
client.getTablesByPattern(dbName, pattern).asScala.toSeq
}
override def listTablesByType(
dbName: String,
pattern: String,
tableType: CatalogTableType): Seq[String] = withHiveState {
val hiveTableType = toHiveTableType(tableType)
try {
// Try with Hive API getTablesByType first, it's supported from Hive 2.3+.
shim.getTablesByType(client, dbName, pattern, hiveTableType)
} catch {
case _: UnsupportedOperationException =>
// Fallback to filter logic if getTablesByType not supported.
val tableNames = client.getTablesByPattern(dbName, pattern).asScala
getRawTablesByName(dbName, tableNames.toSeq)
.filter(_.getTableType == hiveTableType)
.map(_.getTableName)
}
}
/**
* Runs the specified SQL query using Hive.
*/
override def runSqlHive(sql: String): Seq[String] = {
val maxResults = 100000
val results = runHive(sql, maxResults)
// It is very confusing when you only get back some of the results...
if (results.size == maxResults) sys.error("RESULTS POSSIBLY TRUNCATED")
results
}
/**
* Execute the command using Hive and return the results as a sequence. Each element
* in the sequence is one row.
* Since upgrading the built-in Hive to 2.3, hive-llap-client is needed when
* running MapReduce jobs with `runHive`.
* Since HIVE-17626(Hive 3.0.0), need to set hive.query.reexecution.enabled=false.
*/
protected def runHive(cmd: String, maxRows: Int = 1000): Seq[String] = withHiveState {
def closeDriver(driver: Driver): Unit = {
// Since HIVE-18238(Hive 3.0.0), the Driver.close function's return type changed
// and the CommandProcessorFactory.clean function removed.
driver.getClass.getMethod("close").invoke(driver)
if (version != hive.v3_0 && version != hive.v3_1) {
CommandProcessorFactory.clean(conf)
}
}
// Hive query needs to start SessionState.
SessionState.start(state)
logDebug(s"Running hiveql '$cmd'")
if (cmd.toLowerCase(Locale.ROOT).startsWith("set")) { logDebug(s"Changing config: $cmd") }
try {
val cmd_trimmed: String = cmd.trim()
val tokens: Array[String] = cmd_trimmed.split("\\\\s+")
// The remainder of the command.
val cmd_1: String = cmd_trimmed.substring(tokens(0).length()).trim()
val proc = shim.getCommandProcessor(tokens(0), conf)
proc match {
case driver: Driver =>
val response: CommandProcessorResponse = driver.run(cmd)
// Throw an exception if there is an error in query processing.
if (response.getResponseCode != 0) {
closeDriver(driver)
throw new QueryExecutionException(response.getErrorMessage)
}
driver.setMaxRows(maxRows)
val results = shim.getDriverResults(driver)
closeDriver(driver)
results
case _ =>
if (state.out != null) {
// scalastyle:off println
state.out.println(tokens(0) + " " + cmd_1)
// scalastyle:on println
}
val response: CommandProcessorResponse = proc.run(cmd_1)
// Throw an exception if there is an error in query processing.
if (response.getResponseCode != 0) {
throw new QueryExecutionException(response.getErrorMessage)
}
Seq(response.getResponseCode.toString)
}
} catch {
case e: Exception =>
logError(
s"""
|======================
|HIVE FAILURE OUTPUT
|======================
|${outputBuffer.toString}
|======================
|END HIVE FAILURE OUTPUT
|======================
""".stripMargin)
throw e
} finally {
if (state != null) {
state.close()
}
}
}
def loadPartition(
loadPath: String,
dbName: String,
tableName: String,
partSpec: java.util.LinkedHashMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSrcLocal: Boolean): Unit = withHiveState {
val hiveTable = client.getTable(dbName, tableName, true /* throw exception */)
shim.loadPartition(
client,
new Path(loadPath), // TODO: Use URI
s"$dbName.$tableName",
partSpec,
replace,
inheritTableSpecs,
isSkewedStoreAsSubdir = hiveTable.isStoredAsSubDirectories,
isSrcLocal = isSrcLocal)
}
def loadTable(
loadPath: String, // TODO URI
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = withHiveState {
shim.loadTable(
client,
new Path(loadPath),
tableName,
replace,
isSrcLocal)
}
def loadDynamicPartitions(
loadPath: String,
dbName: String,
tableName: String,
partSpec: java.util.LinkedHashMap[String, String],
replace: Boolean,
numDP: Int): Unit = withHiveState {
val hiveTable = client.getTable(dbName, tableName, true /* throw exception */)
shim.loadDynamicPartitions(
client,
new Path(loadPath),
s"$dbName.$tableName",
partSpec,
replace,
numDP,
listBucketingEnabled = hiveTable.isStoredAsSubDirectories)
}
override def createFunction(db: String, func: CatalogFunction): Unit = withHiveState {
shim.createFunction(client, db, func)
}
override def dropFunction(db: String, name: String): Unit = withHiveState {
shim.dropFunction(client, db, name)
}
override def renameFunction(db: String, oldName: String, newName: String): Unit = withHiveState {
shim.renameFunction(client, db, oldName, newName)
}
override def alterFunction(db: String, func: CatalogFunction): Unit = withHiveState {
shim.alterFunction(client, db, func)
}
override def getFunctionOption(
db: String, name: String): Option[CatalogFunction] = withHiveState {
shim.getFunctionOption(client, db, name)
}
override def listFunctions(db: String, pattern: String): Seq[String] = withHiveState {
shim.listFunctions(client, db, pattern)
}
def addJar(path: String): Unit = {
val jarURI = Utils.resolveURI(path)
clientLoader.addJar(jarURI.toURL)
}
def newSession(): HiveClientImpl = {
clientLoader.createClient().asInstanceOf[HiveClientImpl]
}
def reset(): Unit = withHiveState {
val allTables = client.getAllTables("default")
val (mvs, others) = allTables.asScala.map(t => client.getTable("default", t))
.partition(_.getTableType.toString.equals("MATERIALIZED_VIEW"))
// Remove materialized view first, otherwise caused a violation of foreign key constraint.
mvs.foreach { table =>
val t = table.getTableName
logDebug(s"Deleting materialized view $t")
client.dropTable("default", t)
}
others.foreach { table =>
val t = table.getTableName
logDebug(s"Deleting table $t")
try {
client.getIndexes("default", t, 255).asScala.foreach { index =>
shim.dropIndex(client, "default", t, index.getIndexName)
}
if (!table.isIndexTable) {
client.dropTable("default", t)
}
} catch {
case _: NoSuchMethodError =>
// HIVE-18448 Hive 3.0 remove index APIs
client.dropTable("default", t)
}
}
client.getAllDatabases.asScala.filterNot(_ == "default").foreach { db =>
logDebug(s"Dropping Database: $db")
client.dropDatabase(db, true, false, true)
}
}
}
private[hive] object HiveClientImpl extends Logging {
/** Converts the native StructField to Hive's FieldSchema. */
def toHiveColumn(c: StructField): FieldSchema = {
// For Hive Serde, we still need to to restore the raw type for char and varchar type.
// When reading data in parquet, orc, or avro file format with string type for char,
// the tailing spaces may lost if we are not going to pad it.
val typeString = if (SQLConf.get.charVarcharAsString) {
c.dataType.catalogString
} else {
CharVarcharUtils.getRawTypeString(c.metadata).getOrElse(c.dataType.catalogString)
}
new FieldSchema(c.name, typeString, c.getComment().orNull)
}
/** Get the Spark SQL native DataType from Hive's FieldSchema. */
private def getSparkSQLDataType(hc: FieldSchema): DataType = {
try {
CatalystSqlParser.parseDataType(hc.getType)
} catch {
case e: ParseException =>
throw QueryExecutionErrors.cannotRecognizeHiveTypeError(e, hc.getType, hc.getName)
}
}
/** Builds the native StructField from Hive's FieldSchema. */
def fromHiveColumn(hc: FieldSchema): StructField = {
val columnType = getSparkSQLDataType(hc)
val field = StructField(
name = hc.getName,
dataType = columnType,
nullable = true)
Option(hc.getComment).map(field.withComment).getOrElse(field)
}
private def verifyColumnDataType(schema: StructType): Unit = {
schema.foreach(col => getSparkSQLDataType(toHiveColumn(col)))
}
private def toInputFormat(name: String) =
Utils.classForName[org.apache.hadoop.mapred.InputFormat[_, _]](name)
private def toOutputFormat(name: String) =
Utils.classForName[org.apache.hadoop.hive.ql.io.HiveOutputFormat[_, _]](name)
def toHiveTableType(catalogTableType: CatalogTableType): HiveTableType = {
catalogTableType match {
case CatalogTableType.EXTERNAL => HiveTableType.EXTERNAL_TABLE
case CatalogTableType.MANAGED => HiveTableType.MANAGED_TABLE
case CatalogTableType.VIEW => HiveTableType.VIRTUAL_VIEW
case t =>
throw new IllegalArgumentException(
s"Unknown table type is found at toHiveTableType: $t")
}
}
/**
* Converts the native table metadata representation format CatalogTable to Hive's Table.
*/
def toHiveTable(table: CatalogTable, userName: Option[String] = None): HiveTable = {
val hiveTable = new HiveTable(table.database, table.identifier.table)
hiveTable.setTableType(toHiveTableType(table.tableType))
// For EXTERNAL_TABLE, we also need to set EXTERNAL field in the table properties.
// Otherwise, Hive metastore will change the table to a MANAGED_TABLE.
// (metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java#L1095-L1105)
if (table.tableType == CatalogTableType.EXTERNAL) {
hiveTable.setProperty("EXTERNAL", "TRUE")
}
// Note: In Hive the schema and partition columns must be disjoint sets
val (partCols, schema) = table.schema.map(toHiveColumn).partition { c =>
table.partitionColumnNames.contains(c.getName)
}
hiveTable.setFields(schema.asJava)
hiveTable.setPartCols(partCols.asJava)
Option(table.owner).filter(_.nonEmpty).orElse(userName).foreach(hiveTable.setOwner)
hiveTable.setCreateTime(MILLISECONDS.toSeconds(table.createTime).toInt)
hiveTable.setLastAccessTime(MILLISECONDS.toSeconds(table.lastAccessTime).toInt)
table.storage.locationUri.map(CatalogUtils.URIToString).foreach { loc =>
hiveTable.getTTable.getSd.setLocation(loc)}
table.storage.inputFormat.map(toInputFormat).foreach(hiveTable.setInputFormatClass)
table.storage.outputFormat.map(toOutputFormat).foreach(hiveTable.setOutputFormatClass)
hiveTable.setSerializationLib(
table.storage.serde.getOrElse("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"))
table.storage.properties.foreach { case (k, v) => hiveTable.setSerdeParam(k, v) }
table.properties.foreach { case (k, v) => hiveTable.setProperty(k, v) }
table.comment.foreach { c => hiveTable.setProperty("comment", c) }
// Hive will expand the view text, so it needs 2 fields: viewOriginalText and viewExpandedText.
// Since we don't expand the view text, but only add table properties, we map the `viewText` to
// the both fields in hive table.
table.viewText.foreach { t =>
hiveTable.setViewOriginalText(t)
hiveTable.setViewExpandedText(t)
}
table.bucketSpec match {
case Some(bucketSpec) if !HiveExternalCatalog.isDatasourceTable(table) =>
hiveTable.setNumBuckets(bucketSpec.numBuckets)
hiveTable.setBucketCols(bucketSpec.bucketColumnNames.toList.asJava)
if (bucketSpec.sortColumnNames.nonEmpty) {
hiveTable.setSortCols(
bucketSpec.sortColumnNames
.map(col => new Order(col, HIVE_COLUMN_ORDER_ASC))
.toList
.asJava
)
}
case _ =>
}
hiveTable
}
/**
* Converts the native partition metadata representation format CatalogTablePartition to
* Hive's Partition.
*/
def toHivePartition(
p: CatalogTablePartition,
ht: HiveTable): HivePartition = {
val tpart = new org.apache.hadoop.hive.metastore.api.Partition
val partValues = ht.getPartCols.asScala.map { hc =>
p.spec.getOrElse(hc.getName, throw new IllegalArgumentException(
s"Partition spec is missing a value for column '${hc.getName}': ${p.spec}"))
}
val storageDesc = new StorageDescriptor
val serdeInfo = new SerDeInfo
p.storage.locationUri.map(CatalogUtils.URIToString(_)).foreach(storageDesc.setLocation)
p.storage.inputFormat.foreach(storageDesc.setInputFormat)
p.storage.outputFormat.foreach(storageDesc.setOutputFormat)
p.storage.serde.foreach(serdeInfo.setSerializationLib)
serdeInfo.setParameters(p.storage.properties.asJava)
storageDesc.setSerdeInfo(serdeInfo)
tpart.setDbName(ht.getDbName)
tpart.setTableName(ht.getTableName)
tpart.setValues(partValues.asJava)
tpart.setSd(storageDesc)
tpart.setCreateTime(MILLISECONDS.toSeconds(p.createTime).toInt)
tpart.setLastAccessTime(MILLISECONDS.toSeconds(p.lastAccessTime).toInt)
tpart.setParameters(mutable.Map(p.parameters.toSeq: _*).asJava)
new HivePartition(ht, tpart)
}
/**
* Build the native partition metadata from Hive's Partition.
*/
def fromHivePartition(hp: HivePartition): CatalogTablePartition = {
val apiPartition = hp.getTPartition
val properties: Map[String, String] = if (hp.getParameters != null) {
hp.getParameters.asScala.toMap
} else {
Map.empty
}
CatalogTablePartition(
spec = Option(hp.getSpec).map(_.asScala.toMap).getOrElse(Map.empty),
storage = CatalogStorageFormat(
locationUri = Option(CatalogUtils.stringToURI(apiPartition.getSd.getLocation)),
inputFormat = Option(apiPartition.getSd.getInputFormat),
outputFormat = Option(apiPartition.getSd.getOutputFormat),
serde = Option(apiPartition.getSd.getSerdeInfo.getSerializationLib),
compressed = apiPartition.getSd.isCompressed,
properties = Option(apiPartition.getSd.getSerdeInfo.getParameters)
.map(_.asScala.toMap).orNull),
createTime = apiPartition.getCreateTime.toLong * 1000,
lastAccessTime = apiPartition.getLastAccessTime.toLong * 1000,
parameters = properties,
stats = readHiveStats(properties))
}
/**
* This is the same process copied from the method `getTable()`
* of [[org.apache.hadoop.hive.ql.metadata.Hive]] to do some extra fixes for non-views.
* Methods of extracting multiple [[HiveTable]] like `getRawTablesByName()`
* should invoke this before return.
*/
def extraFixesForNonView(tTable: MetaStoreApiTable): MetaStoreApiTable = {
// For non-views, we need to do some extra fixes
if (!(HiveTableType.VIRTUAL_VIEW.toString == tTable.getTableType)) {
// Fix the non-printable chars
val parameters = tTable.getSd.getParameters
if (parameters != null) {
val sf = parameters.get(serdeConstants.SERIALIZATION_FORMAT)
if (sf != null) {
val b: Array[Char] = sf.toCharArray
if ((b.length == 1) && (b(0) < 10)) { // ^A, ^B, ^C, ^D, \\t
parameters.put(serdeConstants.SERIALIZATION_FORMAT, Integer.toString(b(0)))
}
}
}
// Use LazySimpleSerDe for MetadataTypedColumnsetSerDe.
// NOTE: LazySimpleSerDe does not support tables with a single column of col
// of type "array<string>". This happens when the table is created using
// an earlier version of Hive.
if (classOf[MetadataTypedColumnsetSerDe].getName ==
tTable.getSd.getSerdeInfo.getSerializationLib &&
tTable.getSd.getColsSize > 0 &&
tTable.getSd.getCols.get(0).getType.indexOf('<') == -1) {
tTable.getSd.getSerdeInfo.setSerializationLib(classOf[LazySimpleSerDe].getName)
}
}
tTable
}
/**
* Reads statistics from Hive.
* Note that this statistics could be overridden by Spark's statistics if that's available.
*/
private def readHiveStats(properties: Map[String, String]): Option[CatalogStatistics] = {
val totalSize = properties.get(StatsSetupConst.TOTAL_SIZE).filter(_.nonEmpty).map(BigInt(_))
val rawDataSize = properties.get(StatsSetupConst.RAW_DATA_SIZE).filter(_.nonEmpty)
.map(BigInt(_))
val rowCount = properties.get(StatsSetupConst.ROW_COUNT).filter(_.nonEmpty).map(BigInt(_))
// NOTE: getting `totalSize` directly from params is kind of hacky, but this should be
// relatively cheap if parameters for the table are populated into the metastore.
// Currently, only totalSize, rawDataSize, and rowCount are used to build the field `stats`
// TODO: stats should include all the other two fields (`numFiles` and `numPartitions`).
// (see StatsSetupConst in Hive)
// When table is external, `totalSize` is always zero, which will influence join strategy.
// So when `totalSize` is zero, use `rawDataSize` instead. When `rawDataSize` is also zero,
// return None.
// In Hive, when statistics gathering is disabled, `rawDataSize` and `numRows` is always
// zero after INSERT command. So they are used here only if they are larger than zero.
if (totalSize.isDefined && totalSize.get > 0L) {
Some(CatalogStatistics(sizeInBytes = totalSize.get, rowCount = rowCount.filter(_ > 0)))
} else if (rawDataSize.isDefined && rawDataSize.get > 0) {
Some(CatalogStatistics(sizeInBytes = rawDataSize.get, rowCount = rowCount.filter(_ > 0)))
} else {
// TODO: still fill the rowCount even if sizeInBytes is empty. Might break anything?
None
}
}
// Below is the key of table properties for storing Hive-generated statistics
private val HiveStatisticsProperties = Set(
StatsSetupConst.COLUMN_STATS_ACCURATE,
StatsSetupConst.NUM_FILES,
StatsSetupConst.NUM_PARTITIONS,
StatsSetupConst.ROW_COUNT,
StatsSetupConst.RAW_DATA_SIZE,
StatsSetupConst.TOTAL_SIZE
)
def newHiveConf(
sparkConf: SparkConf,
hadoopConf: JIterable[JMap.Entry[String, String]],
extraConfig: Map[String, String],
classLoader: Option[ClassLoader] = None): HiveConf = {
val hiveConf = new HiveConf(classOf[SessionState])
// HiveConf is a Hadoop Configuration, which has a field of classLoader and
// the initial value will be the current thread's context class loader.
// We call hiveConf.setClassLoader(initClassLoader) at here to ensure it use the classloader
// we want.
classLoader.foreach(hiveConf.setClassLoader)
// 1: Take all from the hadoopConf to this hiveConf.
// This hadoopConf contains user settings in Hadoop's core-site.xml file
// and Hive's hive-site.xml file. Note, we load hive-site.xml file manually in
// SharedState and put settings in this hadoopConf instead of relying on HiveConf
// to load user settings. Otherwise, HiveConf's initialize method will override
// settings in the hadoopConf. This issue only shows up when spark.sql.hive.metastore.jars
// is not set to builtin. When spark.sql.hive.metastore.jars is builtin, the classpath
// has hive-site.xml. So, HiveConf will use that to override its default values.
// 2: we set all spark confs to this hiveConf.
// 3: we set all entries in config to this hiveConf.
val confMap = (hadoopConf.iterator().asScala.map(kv => kv.getKey -> kv.getValue) ++
sparkConf.getAll.toMap ++ extraConfig).toMap
confMap.foreach { case (k, v) => hiveConf.set(k, v) }
SQLConf.get.redactOptions(confMap).foreach { case (k, v) =>
logDebug(s"Applying Hadoop/Hive/Spark and extra properties to Hive Conf:$k=$v")
}
// Disable CBO because we removed the Calcite dependency.
hiveConf.setBoolean("hive.cbo.enable", false)
// If this is true, SessionState.start will create a file to log hive job which will not be
// deleted on exit and is useless for spark
if (hiveConf.getBoolean("hive.session.history.enabled", false)) {
logWarning("Detected HiveConf hive.session.history.enabled is true and will be reset to" +
" false to disable useless hive logic")
hiveConf.setBoolean("hive.session.history.enabled", false)
}
// If this is tez engine, SessionState.start might bring extra logic to initialize tez stuff,
// which is useless for spark.
if (hiveConf.get("hive.execution.engine") == "tez") {
logWarning("Detected HiveConf hive.execution.engine is 'tez' and will be reset to 'mr'" +
" to disable useless hive logic")
hiveConf.set("hive.execution.engine", "mr")
}
hiveConf
}
}
| chuckchen/spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala | Scala | apache-2.0 | 53,954 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Tue Jan 5 16:14:38 EST 2010
* @see LICENSE (MIT style license file).
*/
package scalation.scala2d
import scala.math.{atan, cos, Pi, sin}
import scalation.scala2d.Colors._
import scalation.scala2d.Shapes.{Dimension, Graphics, Graphics2D}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Arrow` class uses Java's Path2D class to create a line with an arrowhead
* on the far end. The main line is defined by points 'p1' and 'p2'. Points 'p3'
* and 'p4' are the corners of the triangular arrowhead.
* @param p1 the starting point for the line/arrow
* @param p2 the ending point for the line/arrow
* @param len the length of the arrowhead on the line/arrow
*/
case class Arrow (var p1: R2 = R2 (0.0, 0.0),
var p2: R2 = R2 (0.0, 0.0),
var len: Int = 10)
extends java.awt.geom.Path2D.Double //with CurvilinearShape
{
{
val deltaX = p2.x - p1.x
val slope = (p2.y - p1.y) / deltaX // slope of the main line
val a1_2 = if (slope == Double.PositiveInfinity) Pi / 2.0 // angle of line p1 to p2
else if (slope == Double.NegativeInfinity) 3.0 * Pi / 2.0
else if (deltaX < 0.0) Pi + atan (slope)
else atan (slope)
val a2_3 = a1_2 - 5.0 * Pi / 6.0 // angle of line p2 to p3
val a3_4 = a1_2 + Pi / 2.0 // angle of line p3 to p4
val p3 = R2 (p2.x + len * cos (a2_3), p2.y + len * sin (a2_3))
val p4 = R2 (p3.x + len * cos (a3_4), p3.y + len * sin (a3_4))
moveTo (p1.x, p1.y)
lineTo (p2.x, p2.y)
lineTo (p3.x, p3.y)
lineTo (p4.x, p4.y)
lineTo (p2.x, p2.y)
} // primary constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the x-coordinate of the center of the main line.
*/
def getCenterX (): Double = (p1.x + p2.x) / 2.0
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the y-coordinate of the center of the main line.
*/
def getCenterY (): Double = (p1.y + p2.y) / 2.0
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set (or reset) the location for the Arrow as a line.
* @param _p1 the starting point
* @param _p2 the ending point
*/
def setLine (_p1: R2, _p2: R2)
{
p1 = _p1; p2 = _p2
val deltaX = p2.x - p1.x
val slope = (p2.y - p1.y) / deltaX // slope of the main line
val a1_2 = if (slope == Double.PositiveInfinity) Pi / 2.0 // angle of line p1 to p2
else if (slope == Double.NegativeInfinity) 3.0 * Pi / 2.0
else if (deltaX < 0.0) Pi + atan (slope)
else atan (slope)
val a2_3 = a1_2 - 5.0 * Pi / 6.0 // angle of line p2 to p3
val a3_4 = a1_2 + Pi / 2.0 // angle of line p3 to p4
val p3 = R2 (p2.x + len * cos (a2_3), p2.y + len * sin (a2_3))
val p4 = R2 (p3.x + len * cos (a3_4), p3.y + len * sin (a3_4))
moveTo (p1.x, p1.y)
lineTo (p2.x, p2.y)
lineTo (p3.x, p3.y)
lineTo (p4.x, p4.y)
lineTo (p2.x, p2.y)
} // setLine
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set (or reset) the location for the Arrow as a line. The bend parameter
* is ignored for this class, since arrows are straight.
* @param _p1 the starting point
* @param _p2 the ending point
* @param bend the bend or curvature (0. => straight line)
*/
def setLine (_p1: R2, _p2: R2, bend: Double)
{
p1 = _p1; p2 = _p2
setLine (p1, p2)
} // setLine
} // Arrow class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ArrowTest` object is used to test the `Arrow` class.
*/
object ArrowTest extends App
{
private val arrow1 = new Arrow (R2 (200, 200), R2 (300, 200))
private val arrow2 = new Arrow (R2 (200, 200), R2 (300, 300))
private val arrow3 = new Arrow (R2 (200, 200), R2 (200, 300))
private val arrow4 = new Arrow (R2 (200, 200), R2 (100, 300))
private val arrow5 = new Arrow (R2 (200, 200), R2 (100, 200))
private val arrow6 = new Arrow (R2 (200, 200), R2 (100, 100))
private val arrow7 = new Arrow (R2 (200, 200), R2 (200, 100))
private val arrow8 = new Arrow (R2 (200, 200), R2 (300, 100))
class Canvas extends Panel
{
setBackground (white)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Paint the components into the canvas (drawing panel).
* @param gr low-resolution graphics environment
*/
override def paintComponent (gr: Graphics)
{
super.paintComponent (gr)
val g2d = gr.asInstanceOf [Graphics2D] // use hi-resolution
g2d.setPaint (red)
g2d.draw (arrow1)
g2d.setPaint (orange)
g2d.draw (arrow2)
g2d.setPaint (yellow)
g2d.draw (arrow3)
g2d.setPaint (yellowgreen)
g2d.draw (arrow4)
g2d.setPaint (green)
g2d.draw (arrow5)
g2d.setPaint (cyan)
g2d.draw (arrow6)
g2d.setPaint (blue)
g2d.draw (arrow7)
g2d.setPaint (violet)
g2d.draw (arrow8)
} // paintComponent
} // Canvas class
// Put the drawing canvas in the visualization frame
new VizFrame ("ArrowTest", new Canvas (), 600, 600)
} // ArrowTest object
| NBKlepp/fda | scalation_1.2/src/main/scala/scalation/scala2d/Arrow.scala | Scala | mit | 5,960 |
package net.aicomp.terraforming.entity
import org.specs2.mutable.SpecificationWithJUnit
class PointSpec extends SpecificationWithJUnit {
"Point" should {
"calculate additions" in {
Point(1, 1) + Point(2, -1) must_== Point(3, 0)
}
"calculate subtractions" in {
Point(1, 3) - Point(-1, -1) must_== Point(2, 4)
}
"calculate with directions" in {
Point(2, 2) + Direction.ur must_== Point(3, 1)
Point(2, 2) - Direction.ul must_== Point(2, 3)
}
"calculate multiplication" in {
Point(2, 1) * 3 must_== Point(6, 3)
}
"calculate rotated point" in {
Point(1, 0).rotate120 must_== Point(-1, 1)
Point(-2, 1).rotate120 must_== Point(1, -2)
Point(-3, -2).rotate120.rotate240 must_== Point(-3, -2)
Point(5, 0).rotate120 must_!= Point(5, 0)
Point(5, 0).rotate240 must_!= Point(5, 0)
Point(5, 0).rotate120 must_!= Point(5, 0).rotate240
}
"calculate distance between 2 points" in {
// x
Point(0, 1).distance(Point(3, -1)) must_== 3
// y
Point(1, -2).distance(Point(-1, 2)) must_== 4
// x + y
Point(1, 2).distance(Point(0, 0)) must_== 3
// 0
Point(-1, 3).distance(Point(-1, 3)) must_== 0
}
"determine within distance or not" in {
Point(0, 1).within(1) must_== true
Point(3, -2).within(2) must_== false
Point(3, -2).within(3) must_== true
}
"return shortest path to" in {
val field = Field(7)
val player = Player(1)
val enemy = Player(2)
Point(0, 1).shortestPathTo(Point(0, 1), field, player) must_==
Some(List.empty[Direction])
Point(0, 1).shortestPathTo(Point(0, 3), field, player) must_==
Some(List(Direction.dr, Direction.dr))
Point(2, -1).shortestPathTo(Point(0, 2), field, player).get.length must_==
Point(2, -1).distance(Point(0, 2))
Point(0, -1).shortestPathTo(Point(8, 0), field, player) must_== None
Point(90, -1).shortestPathTo(Point(0, 0), field, player) must_== None
def obstacle(t: Tile) {
t.owner = Some(enemy)
t.robots = 1000000000
t.installation = Some(Installation.bridge)
}
val fieldWithObstacle = Field(2)
obstacle(fieldWithObstacle(0, 0))
obstacle(fieldWithObstacle(-1, 0))
Point(0, 1).shortestPathTo(Point(0, -1), fieldWithObstacle, player) must_==
Some(List(Direction.ur, Direction.ul, Direction.l))
Point(0, 0).shortestPathTo(Point(0, -1), fieldWithObstacle, player) must_== None
val fieldWithWall = Field(1)
obstacle(fieldWithWall(-1, 0))
obstacle(fieldWithWall(0, 0))
obstacle(fieldWithWall(1, 0))
Point(0, 1).shortestPathTo(Point(0, -1), fieldWithWall, player) must_== None
Point(-1, 1).shortestPathTo(Point(0, 1), fieldWithWall, player) must_==
Some(List(Direction.r))
}
"stringify itself" in {
Point(1, 2).stringify must_== "1 2"
}
"return points within distance" in {
Point.pointsWithin(1) must_== List(
Point(-1, 0),
Point(-1, 1),
Point(0, -1),
Point(0, 0),
Point(0, 1),
Point(1, -1),
Point(1, 0))
Point.pointsWithin(-1) must_== Nil
}
"return around points with 1 radius" in {
Point(0, 0).aroundPoints().size must_== 6
Point(0, 0).aroundPoints() must contain(Point(-1, 1))
Point(0, 0).aroundPoints() must contain(Point(1, -1))
Point(0, 0).aroundPoints() must contain(Point(0, -1))
Point(0, 0).aroundPoints() must contain(Point(0, 1))
Point(0, 0).aroundPoints() must contain(Point(-1, 0))
Point(0, 0).aroundPoints() must contain(Point(1, 0))
}
"return around points with 2 radius" in {
Point(0, 0).aroundPoints(2).size must_== 18
Point(0, 0).aroundPoints(2) must contain(Point(-1, 1))
Point(0, 0).aroundPoints(2) must contain(Point(1, -1))
Point(0, 0).aroundPoints(2) must contain(Point(0, -1))
Point(0, 0).aroundPoints(2) must contain(Point(0, 1))
Point(0, 0).aroundPoints(2) must contain(Point(-1, 0))
Point(0, 0).aroundPoints(2) must contain(Point(1, 0))
Point(0, 0).aroundPoints(2) must contain(Point(-2, 2))
Point(0, 0).aroundPoints(2) must contain(Point(2, -2))
Point(0, 0).aroundPoints(2) must contain(Point(0, -2))
Point(0, 0).aroundPoints(2) must contain(Point(0, 2))
Point(0, 0).aroundPoints(2) must contain(Point(-2, 0))
Point(0, 0).aroundPoints(2) must contain(Point(2, 0))
Point(0, 0).aroundPoints(2) must contain(Point(2, -1))
Point(0, 0).aroundPoints(2) must contain(Point(1, 1))
Point(0, 0).aroundPoints(2) must contain(Point(-1, 2))
Point(0, 0).aroundPoints(2) must contain(Point(-2, 1))
Point(0, 0).aroundPoints(2) must contain(Point(-1, -1))
Point(0, 0).aroundPoints(2) must contain(Point(1, -2))
}
"return line points with 1 radius" in {
Point(0, 0).linePoints().size must_== 6
Point(0, 0).linePoints() must contain(Point(-1, 1))
Point(0, 0).linePoints() must contain(Point(1, -1))
Point(0, 0).linePoints() must contain(Point(0, -1))
Point(0, 0).linePoints() must contain(Point(0, 1))
Point(0, 0).linePoints() must contain(Point(-1, 0))
Point(0, 0).linePoints() must contain(Point(1, 0))
}
"return line points with 2 radius" in {
Point(0, 0).linePoints(2).size must_== 12
Point(0, 0).linePoints(2) must contain(Point(-1, 1))
Point(0, 0).linePoints(2) must contain(Point(1, -1))
Point(0, 0).linePoints(2) must contain(Point(0, -1))
Point(0, 0).linePoints(2) must contain(Point(0, 1))
Point(0, 0).linePoints(2) must contain(Point(-1, 0))
Point(0, 0).linePoints(2) must contain(Point(1, 0))
Point(0, 0).linePoints(2) must contain(Point(-2, 2))
Point(0, 0).linePoints(2) must contain(Point(2, -2))
Point(0, 0).linePoints(2) must contain(Point(0, -2))
Point(0, 0).linePoints(2) must contain(Point(0, 2))
Point(0, 0).linePoints(2) must contain(Point(-2, 0))
Point(0, 0).linePoints(2) must contain(Point(2, 0))
}
}
}
| AI-comp/Terraforming | src/test/scala/net/aicomp/terraforming/entity/PointSpec.scala | Scala | apache-2.0 | 6,136 |
package org.hatdex.hat.she.mappers
import java.util.UUID
import org.hatdex.hat.api.models.{
EndpointQuery,
EndpointQueryFilter,
PropertyQuery
}
import org.hatdex.hat.api.models.applications.{
DataFeedItem,
DataFeedItemContent,
DataFeedItemLocation,
DataFeedItemMedia,
DataFeedItemTitle,
LocationAddress,
LocationGeo
}
import org.hatdex.hat.she.models.StaticDataValues
import org.joda.time.DateTime
import play.api.libs.json.{ JsError, JsObject, JsString, JsSuccess, JsValue }
import scala.util.{ Failure, Try }
class FacebookProfileMapper extends DataEndpointMapper with FeedItemComparator {
def dataQueries(
fromDate: Option[DateTime],
untilDate: Option[DateTime]
): Seq[PropertyQuery] = {
Seq(
PropertyQuery(
List(
EndpointQuery(
"facebook/profile",
None,
dateFilter(fromDate, untilDate).map(f =>
Seq(EndpointQueryFilter("hat_updated_time", None, f))
),
None
)
),
Some("hat_updated_time"),
Some("descending"),
None
)
)
}
def mapDataRecord(
recordId: UUID,
content: JsValue,
tailRecordId: Option[UUID] = None,
tailContent: Option[JsValue] = None
): Try[DataFeedItem] = {
val comparison =
compare(content, tailContent).filter(
_._1 == false
) // remove all fields that have the same values pre/current
if (comparison.isEmpty) {
Failure(new RuntimeException("Comparision Failure. Data the same"))
} else {
for {
title <- Try(
DataFeedItemTitle("Your Facebook Profile has changed.", None, None)
)
itemContent <- {
val contentText = comparison.map(item => s"${item._2}\n").mkString
Try(DataFeedItemContent(Some(contentText), None, None, None))
}
} yield {
DataFeedItem(
"facebook",
(tailContent.getOrElse(content) \ "hat_updated_time").as[DateTime],
Seq("profile"),
Some(title),
Some(itemContent),
None
)
}
}
}
def compare(
content: JsValue,
tailContent: Option[JsValue]
): Seq[(Boolean, String)] = {
if (tailContent.isEmpty)
Seq()
else {
Seq(
compareString(content, tailContent.get, "name", "Name"),
compareString(content, tailContent.get, "gender", "Gender"),
compareString(content, tailContent.get, "age_range", "Age Range"),
compareInt(
content,
tailContent.get,
"friend_count",
"Number of Friends"
)
)
}
}
}
class FacebookEventMapper extends DataEndpointMapper {
def dataQueries(
fromDate: Option[DateTime],
untilDate: Option[DateTime]
): Seq[PropertyQuery] = {
Seq(
PropertyQuery(
List(
EndpointQuery(
"facebook/events",
None,
dateFilter(fromDate, untilDate).map(f =>
Seq(EndpointQueryFilter("start_time", None, f))
),
None
)
),
Some("start_time"),
None,
None
)
)
}
def mapDataRecord(
recordId: UUID,
content: JsValue,
tailRecordId: Option[UUID] = None,
tailContent: Option[JsValue] = None
): Try[DataFeedItem] = {
for {
timeIntervalString <- Try(
eventTimeIntervalString(
(content \ "start_time").as[DateTime],
Some((content \ "end_time").as[DateTime])
)
)
itemContent <- Try(
DataFeedItemContent(
Some((content \ "description").as[String]),
None,
None,
None
)
)
title <- Try(if ((content \ "rsvp_status").as[String] == "attending") {
DataFeedItemTitle(
"You are attending an event",
Some(
s"${timeIntervalString._1} ${timeIntervalString._2.getOrElse("")}"
),
Some("event")
)
} else {
DataFeedItemTitle(
"You have an event",
Some(
s"${timeIntervalString._1} ${timeIntervalString._2.getOrElse("")}"
),
Some("event")
)
})
} yield {
val location = Try(
DataFeedItemLocation(
geo = (content \ "place")
.asOpt[JsObject]
.map(location =>
LocationGeo(
(location \ "location" \ "longitude").as[String].toDouble,
(location \ "location" \ "latitude").as[String].toDouble
)
),
address = (content \ "place")
.asOpt[JsObject]
.map(location =>
LocationAddress(
(location \ "location" \ "country").asOpt[String],
(location \ "location" \ "city").asOpt[String],
(location \ "name").asOpt[String],
(location \ "location" \ "street").asOpt[String],
(location \ "location" \ "zip").asOpt[String]
)
),
tags = None
)
).toOption
.filter(l => l.address.isDefined || l.geo.isDefined || l.tags.isDefined)
DataFeedItem(
"facebook",
(content \ "start_time").as[DateTime],
Seq("event"),
Some(title),
Some(itemContent),
location
)
}
}
}
class FacebookFeedMapper extends DataEndpointMapper {
def dataQueries(
fromDate: Option[DateTime],
untilDate: Option[DateTime]
): Seq[PropertyQuery] = {
Seq(
PropertyQuery(
List(
EndpointQuery(
"facebook/feed",
None,
dateFilter(fromDate, untilDate).map(f =>
Seq(EndpointQueryFilter("created_time", None, f))
),
None
)
),
Some("created_time"),
Some("descending"),
None
)
)
}
def mapDataRecord(
recordId: UUID,
content: JsValue,
tailRecordId: Option[UUID] = None,
tailContent: Option[JsValue] = None
): Try[DataFeedItem] = {
for {
title <- Try(if ((content \ "type").as[String] == "photo") {
DataFeedItemTitle("You posted a photo", None, Some("photo"))
} else if ((content \ "type").as[String] == "link") {
DataFeedItemTitle("You shared a story", None, None)
} else {
DataFeedItemTitle("You posted", None, None)
})
media <- Try(
(content \ "picture")
.asOpt[String]
.map(url =>
List(
DataFeedItemMedia(
Some(url),
(content \ "full_picture").asOpt[String]
)
)
)
.getOrElse {
List(
DataFeedItemMedia(None, (content \ "full_picture").asOpt[String])
)
}
)
itemContent <- Try(
DataFeedItemContent(
Some(s"""${(content \ "message")
.asOpt[String]
.getOrElse(
(content \ "story")
.asOpt[String]
.getOrElse((content \ "description").as[String])
)}
|
|${(content \ "link")
.asOpt[String]
.getOrElse("")}""".stripMargin.trim),
None,
Some(media),
None
)
)
date <- Try((content \ "created_time").as[DateTime])
tags <- Try(Seq("post", (content \ "type").as[String]))
} yield {
val locationGeo = Try(
LocationGeo(
(content \ "place" \ "location" \ "longitude").as[Double],
(content \ "place" \ "location" \ "latitude").as[Double]
)
).toOption
val locationAddress = Try(
LocationAddress(
(content \ "place" \ "location" \ "country").asOpt[String],
(content \ "place" \ "location" \ "city").asOpt[String],
(content \ "place" \ "name").asOpt[String],
(content \ "place" \ "location" \ "street").asOpt[String],
(content \ "place" \ "location" \ "zip").asOpt[String]
)
).toOption
val maybeLocation =
if (
locationAddress.contains(
LocationAddress(None, None, None, None, None)
)
) {
None
} else {
locationAddress
}
val location = locationGeo
.orElse(maybeLocation)
.map(_ => DataFeedItemLocation(locationGeo, maybeLocation, None))
DataFeedItem(
"facebook",
date,
tags,
Some(title),
Some(itemContent),
location
)
}
}
}
class FacebookPagesLikesMapper extends DataEndpointMapper {
def dataQueries(
fromDate: Option[DateTime],
untilDate: Option[DateTime]
): Seq[PropertyQuery] = {
Seq(
PropertyQuery(
List(
EndpointQuery(
"facebook/likes/pages",
None,
dateFilter(fromDate, untilDate).map(f =>
Seq(EndpointQueryFilter("created_time", None, f))
),
None
)
),
Some("created_time"),
Some("descending"),
None
)
)
}
def mapDataRecord(
recordId: UUID,
content: JsValue,
tailRecordId: Option[UUID] = None,
tailContent: Option[JsValue] = None
): Try[DataFeedItem] = {
for {
name <- Try((content \ "name").as[String])
title <- Try(DataFeedItemTitle(s"You liked $name", None, None))
itemContent <- Try(
DataFeedItemContent(
Some(s"""Page Name - $name
|
|Location - ${(content \ "location" \ "city")
.asOpt[String]
.getOrElse("")}
|Website - ${(content \ "website")
.asOpt[String]
.getOrElse("")}""".stripMargin.trim),
None,
None,
None
)
)
date <- Try((content \ "created_time").as[DateTime])
tags <- Try(Seq("page", name))
} yield {
val locationGeo = Try(
LocationGeo(
(content \ "location" \ "longitude").as[Double],
(content \ "location" \ "latitude").as[Double]
)
).toOption
val locationAddress = Try(
LocationAddress(
(content \ "location" \ "country").asOpt[String],
(content \ "location" \ "city").asOpt[String],
(content \ "name").asOpt[String],
(content \ "location" \ "street").asOpt[String],
(content \ "location" \ "zip").asOpt[String]
)
).toOption
val maybeLocation =
if (
locationAddress.contains(
LocationAddress(None, None, None, None, None)
)
) {
None
} else {
locationAddress
}
val location = locationGeo
.orElse(maybeLocation)
.map(_ => DataFeedItemLocation(locationGeo, maybeLocation, None))
DataFeedItem(
"facebook",
date,
tags,
Some(title),
Some(itemContent),
location
)
}
}
}
class FacebookProfileStaticDataMapper extends StaticDataEndpointMapper {
def dataQueries(): Seq[PropertyQuery] = {
Seq(
PropertyQuery(
List(EndpointQuery("facebook/profile", None, None, None)),
Some("hat_updated_time"),
Some("descending"),
Some(1)
),
PropertyQuery(
List(EndpointQuery("facebook/likes/pages", None, None, None)),
Some("created_time"),
Some("descending"),
Some(1)
)
)
}
def mapDataRecord(
recordId: UUID,
content: JsValue,
endpoint: String
): Seq[StaticDataValues] = {
val eventualData = content.validate[Map[String, JsValue]]
eventualData match {
case JsSuccess(value, _) =>
val lastPartOfEndpointString = endpoint.split("/").last
if (endpoint.contains("likes")) {
val numberOfPagesLiked =
value.filterKeys(key => key == "number_of_pages_liked")
if (numberOfPagesLiked.isEmpty) {
Seq()
} else {
Seq(StaticDataValues(lastPartOfEndpointString, numberOfPagesLiked))
}
} else {
val updatedValue = value
.get("location")
.flatMap(v => (v \ "name").asOpt[String])
.map(locationName =>
value ++ Map("location" -> JsString(locationName))
)
.getOrElse(value)
Seq(
StaticDataValues(
lastPartOfEndpointString,
updatedValue.filterKeys(key =>
key != "friends" && key != "languages"
)
)
)
}
case e: JsError =>
logger.error(s"Couldn't validate static data JSON for $endpoint. $e")
Seq()
}
}
}
| Hub-of-all-Things/HAT2.0 | hat/app/org/hatdex/hat/she/mappers/FacebookMappers.scala | Scala | agpl-3.0 | 12,928 |
package frameless
import scala.annotation.implicitNotFound
/**
* When summing Spark doesn't change these types:
* - Long -> Long
* - BigDecimal -> BigDecimal
* - Double -> Double
*
* For other types there are conversions:
* - Int -> Long
* - Short -> Long
*/
@implicitNotFound("Cannot compute sum of type ${In}.")
trait CatalystSummable[In, Out] {
def zero: In
}
object CatalystSummable {
def apply[In, Out](zero: In): CatalystSummable[In, Out] = {
val _zero = zero
new CatalystSummable[In, Out] { val zero: In = _zero }
}
implicit val framelessSummableLong : CatalystSummable[Long, Long] = CatalystSummable(zero = 0L)
implicit val framelessSummableBigDecimal: CatalystSummable[BigDecimal, BigDecimal] = CatalystSummable(zero = BigDecimal(0))
implicit val framelessSummableDouble : CatalystSummable[Double, Double] = CatalystSummable(zero = 0.0)
implicit val framelessSummableInt : CatalystSummable[Int, Long] = CatalystSummable(zero = 0)
implicit val framelessSummableShort : CatalystSummable[Short, Long] = CatalystSummable(zero = 0)
}
| adelbertc/frameless | core/src/main/scala/frameless/CatalystSummable.scala | Scala | apache-2.0 | 1,168 |
package at.logic.gapt.prooftool
/**
* Created by IntelliJ IDEA.
* User: mrukhaia
* Date: 2/3/11
* Time: 4:24 PM
*/
import scala.swing._
import BorderPanel._
import event._
import java.awt.Font._
import at.logic.gapt.proofs.proofs._
import java.awt.event.{ MouseMotionListener, MouseEvent }
import at.logic.gapt.proofs.shlk.SchemaProofLinkRule
import at.logic.gapt.proofs.lk.base.Sequent
import java.awt.RenderingHints
import at.logic.gapt.proofs.lk._
import at.logic.gapt.proofs.occurrences.FormulaOccurrence
class DrawProof( val proof: TreeProof[_], private val fSize: Int, private var visible_occurrences: Option[Set[FormulaOccurrence]], private var str: String )
extends BorderPanel with MouseMotionListener {
private val blue = new Color( 0, 0, 255 )
private val black = new Color( 0, 0, 0 )
private val white = new Color( 255, 255, 255 )
background = white
opaque = false
private val labelFont = new Font( SERIF, ITALIC, fSize - 2 )
private val bd = Swing.EmptyBorder( 0, fSize * 2, 0, fSize * 2 )
private val ft = new Font( SERIF, PLAIN, fSize )
private var drawLines = true
// The following is a hack to be able to apply searching to the end-sequent. Think about better solution.
// The problem is that I need to "recalculate" end-sequent and need def for this reason.
// But then since def is a function, size of tx1 cannot be calculated and lines are not drawn correctly.
private var tx = tx1
private def tx1 = proof.root match {
case so: Sequent =>
val ds = DrawSequent( so, ft, visible_occurrences )
ds.listenTo( mouse.moves, mouse.clicks, mouse.wheel, ProofToolPublisher )
ds.reactions += {
case e: MouseEntered => ds.contents.foreach( x => x.foreground = blue )
case e: MouseExited => ds.contents.foreach( x => x.foreground = black )
case e: MouseClicked if e.peer.getButton == MouseEvent.BUTTON3 => PopupMenu( proof, this, e.point.x, e.point.y )
}
ds
case _ => new Label( proof.root.toString ) {
font = ft
if ( !str.isEmpty && proof.root.toString.contains( str ) ) foreground = new Color( 0, 225, 0 )
}
}
listenTo( mouse.moves, mouse.clicks, mouse.wheel, ProofToolPublisher )
reactions += {
case e: MouseDragged =>
Main.body.cursor = new java.awt.Cursor( java.awt.Cursor.MOVE_CURSOR )
case e: MouseReleased =>
Main.body.cursor = java.awt.Cursor.getDefaultCursor
case e: MouseWheelMoved =>
Main.body.peer.dispatchEvent( e.peer )
case HideStructuralRules => //Fix: contraction is still drawn when a weakening is followed by a contraction.
proof.rule match {
case WeakeningLeftRuleType | WeakeningRightRuleType =>
drawLines = false
tx.visible = false
case ContractionLeftRuleType | ContractionRightRuleType =>
val rule = proof.asInstanceOf[UnaryTreeProof[_]].uProof.rule
if ( rule != WeakeningLeftRuleType && rule != WeakeningRightRuleType ) drawLines = false
val dp = layout.find( _._2 == Position.Center ).get._1.asInstanceOf[DrawProof]
dp.tx.visible = false
dp.border = Swing.EmptyBorder( 0, 0, 3, 0 )
case _ =>
}
case e: ShowAllRules if e.proof == proof =>
drawLines = true
initialize()
revalidate()
case e: ShowProof if e.proof == proof =>
drawLines = true
layout.foreach( pair => pair._1.visible = true )
case e: HideProof if e.proof == proof =>
drawLines = false
layout.foreach( pair => if ( pair._2 != Position.South ) pair._1.visible = false )
}
initialize()
// end of constructor
def setVisibleOccurrences( s: Option[Set[FormulaOccurrence]] ) {
visible_occurrences = s
// tx = tx1 // Uncomment this line if you want to include the end-sequent.
initialize()
repaint()
}
def initialize() {
proof match {
case p: UnaryTreeProof[_] =>
border = bd
layout( new DrawProof( p.uProof.asInstanceOf[TreeProof[_]], fSize, visible_occurrences, str ) ) = Position.Center
layout( tx ) = Position.South
case p: BinaryTreeProof[_] =>
border = bd
layout( new DrawProof( p.uProof1.asInstanceOf[TreeProof[_]], fSize, visible_occurrences, str ) ) = Position.West
layout( new DrawProof( p.uProof2.asInstanceOf[TreeProof[_]], fSize, visible_occurrences, str ) ) = Position.East
layout( tx ) = Position.South
case p: NullaryTreeProof[_] => p match {
case SchemaProofLinkRule( _, link, indices ) =>
layout( new BoxPanel( Orientation.Vertical ) {
background = white
opaque = false
border = Swing.EmptyBorder( 0, fSize, 0, fSize )
val pLink = LatexLabel( ft, "(\\\\textbf{" + link + "}" + indices.foldRight( "" )( ( i, rez ) => ", " + DrawSequent.formulaToLatexString( i ) + rez ) + ")", null )
pLink.xLayoutAlignment = 0.5
pLink.opaque = false
pLink.border = Swing.EmptyBorder( 0, 0, 5, 0 )
tx.xLayoutAlignment = 0.5
tx.border = Swing.MatteBorder( 1, 0, 0, 0, new Color( 255, 0, 0 ) )
contents += pLink
contents += tx
} ) = Position.South
case _ =>
tx.border = Swing.EmptyBorder( 0, fSize, 0, fSize )
layout( tx ) = Position.South
}
}
}
def getSequentWidth( g: Graphics2D ) = tx match {
case label: Label => g.getFontMetrics( ft ).stringWidth( label.text )
case fPanel: FlowPanel => fPanel.contents.foldLeft( 0 )( ( width, x ) => width + x.size.width + 5 )
}
def search_=( s: String ) {
str = s
}
def search = str
def searchNotInLKProof() {
tx = tx1
initialize()
}
override def paintComponent( g: Graphics2D ) {
import scala.math.max
super.paintComponent( g )
val metrics = g.getFontMetrics( labelFont )
// val em = metrics.charWidth('M')
g.setFont( labelFont )
// g.setStroke(new BasicStroke(fSize / 25))
g.setRenderingHint( RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_LCD_HRGB )
if ( !str.isEmpty && proof.name.contains( str ) ) g.setColor( new Color( 0, 255, 0 ) )
if ( drawLines ) proof match {
case p: UnaryTreeProof[_] =>
val center = this.layout.find( x => x._2 == Position.Center ).get._1.asInstanceOf[DrawProof]
val width = center.size.width + fSize * 4
val height = center.size.height
val seqLength = max( center.getSequentWidth( g ), getSequentWidth( g ) )
g.drawLine( ( width - seqLength ) / 2, height, ( width + seqLength ) / 2, height )
g.drawString( p.name, ( fSize / 4 + width + seqLength ) / 2, height + metrics.getMaxDescent )
case p: BinaryTreeProof[_] =>
val left = this.layout.find( x => x._2 == Position.West ).get._1.asInstanceOf[DrawProof]
val leftWidth = left.size.width + fSize * 4
val right = this.layout.find( x => x._2 == Position.East ).get._1.asInstanceOf[DrawProof]
val rightWidth = right.size.width
val height = max( left.size.height, right.size.height )
val leftSeqLength = left.getSequentWidth( g )
val rightSeqLength = right.getSequentWidth( g )
val lineLength = right.location.x + ( rightWidth + rightSeqLength ) / 2
if ( Main.DEBUG ) { // draw bounding box around children for debugging
g.setColor( new Color( 200, 200, 50 ) )
g.drawRect( left.location.x, left.location.y, left.size.width - 1, left.size.height )
g.setColor( new Color( 200, 50, 200 ) )
g.drawRect( right.location.x, right.location.y, right.size.width, right.size.height )
g.setColor( new Color( 0, 0, 0 ) )
assert( this.size.width >= left.size.width, "Left child must not be wider than parent." )
assert( this.size.width >= right.size.width, "Right child must not be wider than parent." )
}
g.drawLine( ( leftWidth - leftSeqLength ) / 2, height, lineLength, height )
g.drawString( p.name, lineLength + fSize / 4, height + metrics.getMaxDescent )
case _ =>
}
}
this.peer.setAutoscrolls( true )
this.peer.addMouseMotionListener( this )
def mouseMoved( e: MouseEvent ) {}
def mouseDragged( e: MouseEvent ) {
//The user is dragging us, so scroll!
val r = new Rectangle( e.getX, e.getY, 1, 1 )
this.peer.scrollRectToVisible( r )
}
def getLocationOfProof( p: TreeProof[_] ): Option[Point] = {
if ( p == proof ) {
val newloc = new Point( location.x + bounds.width / 2, location.y + bounds.height )
Some( newloc )
} else contents.foldLeft[Option[Point]]( None )( ( res, dp ) =>
if ( res == None ) dp match {
case x: DrawProof =>
x.getLocationOfProof( p ) match {
case Some( loc ) => // need to translate
val newloc = new Point( loc.x + location.x, loc.y + location.y )
Some( newloc )
case _ => None
}
case _ => None
}
else res // we have found the proof already
)
}
}
| gisellemnr/gapt | src/main/scala/at/logic/gapt/prooftool/DrawProof.scala | Scala | gpl-3.0 | 9,106 |
package mesosphere.marathon
package api.v2
import com.wix.accord._
import com.wix.accord.dsl._
import mesosphere.{ UnitTest, ValidationTestLike }
import mesosphere.marathon.api.v2.Validation._
import mesosphere.marathon.core.pod.BridgeNetwork
import mesosphere.marathon.raml.GroupUpdate
import mesosphere.marathon.state.Container._
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state._
import mesosphere.marathon.test.GroupCreation
import play.api.libs.json.{ JsObject, Json }
import scala.collection.immutable.Seq
object ModelValidationTest {
implicit val groupUpdateValidator: Validator[GroupUpdate] = Group.validNestedGroupUpdateWithBase(PathId.empty)
case class ImportantTitle(name: String)
private implicit val mrImportantValidator: Validator[ImportantTitle] = validator[ImportantTitle] { m =>
m.name is equalTo("Dr.")
m.name is notEmpty
}
def createServicePortApp(id: PathId, servicePort: Int) =
AppDefinition(
id,
networks = Seq(BridgeNetwork()),
container = Some(Docker(
image = "demothing",
portMappings = Seq(PortMapping(2000, Some(0), servicePort = servicePort))
))
)
}
class ModelValidationTest extends UnitTest with GroupCreation with ValidationTestLike {
import ModelValidationTest._
"ModelValidation" should {
"A group update should pass validation" in {
val update = GroupUpdate(id = Some("/a/b/c"))
validate(update).isSuccess should be(true)
}
"Model validation should allow new apps that do not conflict with service ports in existing apps" in {
val existingApp = createServicePortApp("/app1".toPath, 3200)
val conflictingApp = createServicePortApp("/app2".toPath, 3201)
val rootGroup = createRootGroup(apps = Map(existingApp.id -> existingApp, conflictingApp.id -> conflictingApp))
val result = validate(rootGroup)(RootGroup.rootGroupValidator(Set()))
result.isSuccess should be(true)
}
"Multiple errors within one field of a validator should be grouped into one array" in {
val empty = ImportantTitle("")
validate(empty) match {
case Success => fail()
case f: Failure =>
val errors = (Json.toJson(f) \\ "details").as[Seq[JsObject]]
errors should have size 1
(errors.head \\ "path").as[String] should be("/name")
(errors.head \\ "errors").as[Seq[String]] should have size 2
}
}
"Validators should not produce 'value' string at the end of description." in {
val validApp = AppDefinition("/test/group1/valid".toPath, cmd = Some("foo"))
val invalidApp = AppDefinition("/test/group1/invalid".toPath)
val rootGroup = createRootGroup(
groups = Set(createGroup("/test".toPath, groups = Set(
createGroup("/test/group1".toPath, Map(
validApp.id -> validApp,
invalidApp.id -> invalidApp),
validate = false
),
createGroup("/test/group2".toPath, validate = false)),
validate = false)),
validate = false
)
validate(rootGroup)(RootGroup.rootGroupValidator(Set())) should haveViolations(
"/apps//test/group1/invalid" -> "AppDefinition must either contain one of 'cmd' or 'args', and/or a 'container'."
)
}
"PortDefinition should be allowed to contain tcp and udp as protocol." in {
val validApp = AppDefinition("/test/app".toPath, cmd = Some("foo"), portDefinitions = Seq(PortDefinition(port = 80, protocol = "udp,tcp")))
val rootGroup = createRootGroup(groups = Set(createGroup("/test".toPath, apps = Map(validApp.id -> validApp))))
val result = validate(rootGroup)(RootGroup.rootGroupValidator(Set()))
result.isSuccess should be(true)
}
}
} | janisz/marathon | src/test/scala/mesosphere/marathon/api/v2/ModelValidationTest.scala | Scala | apache-2.0 | 3,776 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.spark.example.rdd
import org.apache.hadoop.hbase.client.Delete
import org.apache.hadoop.hbase.{TableName, HBaseConfiguration}
import org.apache.hadoop.hbase.spark.HBaseContext
import org.apache.hadoop.hbase.spark.HBaseRDDFunctions._
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.{SparkContext, SparkConf}
/**
* This is a simple example of deleting records in HBase
* with the bulkDelete function.
*/
object HBaseBulkDeleteExample {
def main(args: Array[String]) {
if (args.length < 1) {
println("HBaseBulkDeleteExample {tableName} are missing an argument")
return
}
val tableName = args(0)
val sparkConf = new SparkConf().setAppName("HBaseBulkDeleteExample " + tableName)
val sc = new SparkContext(sparkConf)
try {
//[Array[Byte]]
val rdd = sc.parallelize(Array(
Bytes.toBytes("1"),
Bytes.toBytes("2"),
Bytes.toBytes("3"),
Bytes.toBytes("4"),
Bytes.toBytes("5")
))
val conf = HBaseConfiguration.create()
val hbaseContext = new HBaseContext(sc, conf)
rdd.hbaseBulkDelete(hbaseContext, TableName.valueOf(tableName),
putRecord => new Delete(putRecord),
4)
} finally {
sc.stop()
}
}
}
| JingchengDu/hbase | hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkDeleteExample.scala | Scala | apache-2.0 | 2,092 |
package com.programmaticallyspeaking.ncd.nashorn
import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.infra.IdGenerator
import com.programmaticallyspeaking.ncd.messaging.{Observable, SerializedSubject}
import org.slf4s.Logging
import scala.collection.concurrent.TrieMap
class LineBreakpoints extends Logging {
private val breakpointIdGenerator = new IdGenerator("ndb")
private val byId = TrieMap[String, LineBreakpoint]()
private val resolvedSubject = new SerializedSubject[BreakpointResolved]()
def resolvedBreakpoints: Observable[BreakpointResolved] = resolvedSubject
def addBreakableLocations(script: Script, newLocations: Seq[BreakableLocation]): Unit = {
// Go through active breakpoints that belong to the script
// For each BL that matches the active breakpoint, add it
val lineBreakpointsForScript = byId.values.filter(_.belongsTo(script))
lineBreakpointsForScript.foreach { bp =>
val toAdd = newLocations.filter(bp.oughtToContain)
if (toAdd.nonEmpty) {
val willBeResolved = bp.isUnresolved
log.debug(s"Adding ${toAdd.size} breakable locations to breakpoint ${bp.id}")
bp.addBreakableLocations(toAdd)
if (willBeResolved) {
// Hm, can there be more than one location here?
val first = toAdd.head
val item = BreakpointResolved(bp.id, LocationInScript(first.script.id, first.scriptLocation))
log.info(s"Resolving breakpoint ${bp.id} with location ${first.scriptLocation} in script ${script.id}")
resolvedSubject.onNext(item)
}
}
}
}
def forBreakableLocation(bl: BreakableLocation): Option[LineBreakpoint] = {
byId.values.find(_.contains(bl))
}
def onBreakpointHit(activeBreakpoint: LineBreakpoint): Unit = {
if (activeBreakpoint.isOneOff) {
log.trace(s"Removing one-off breakpoint with id ${activeBreakpoint.id}")
removeBreakpoint(activeBreakpoint)
}
}
def removeAll(): Unit = {
//TODO: Not very atomic, this
byId.foreach(e => e._2.remove())
byId.clear()
}
/**
* Removes line breakpoints that are unique for the given script, i.e. that cannot match
* another script (via URL).
*/
def removeUniqueForScript(script: Script): Seq[LineBreakpoint] = {
// TODO: Not atomic...
val forScript = byId.values.filter(_.belongsUniquelyTo(script))
forScript.foreach { bp =>
bp.remove()
byId -= bp.id
}
forScript.toSeq
}
def removeById(id: String): Unit = {
byId.get(id) match {
case Some(activeBp) =>
log.info(s"Removing breakpoint with id $id")
removeBreakpoint(activeBp)
case None =>
log.warn(s"Got request to remove an unknown breakpoint with id $id")
}
}
private def removeBreakpoint(activeBreakpoint: LineBreakpoint): Unit = {
activeBreakpoint.remove()
byId -= activeBreakpoint.id
}
def create(id: ScriptIdentity, location: ScriptLocation, locations: Seq[BreakableLocation], condition: Option[String], oneOff: Boolean): LineBreakpoint = {
val activeBp = new LineBreakpoint(breakpointIdGenerator.next, locations, condition, id, location, oneOff)
byId += (activeBp.id -> activeBp)
activeBp
}
}
| provegard/ncdbg | src/main/scala/com/programmaticallyspeaking/ncd/nashorn/LineBreakpoints.scala | Scala | bsd-3-clause | 3,248 |
import sbt._
import Import._
object SingleBuild extends Build with Marker
{
override def projects = if(file("multi").exists) Seq(root, sub, sub2) else Seq(root)
lazy val root = Project("root", file("."), aggregate = if(file("aggregate").exists) Seq(sub) else Nil )
lazy val sub = Project("sub", file("sub"), aggregate = Seq(sub2))
lazy val sub2 = Project("sub2", file("sub") / "sub")
}
| dansanduleac/sbt | sbt/src/sbt-test/actions/aggregate/project/TestProject.scala | Scala | bsd-3-clause | 391 |
package dotty.tools.dotc
package transform
import core.Contexts._
/** Utility class for lazy values whose evaluation depends on a context.
* This should be used whenever the evaluation of a lazy expression
* depends on some context, but the value can be re-used afterwards
* with a different context.
*
* A typical use case is a lazy val in a phase object which exists once per root context where
* the expression intiializing the lazy val depends only on the root context, but not any changes afterwards.
*/
class CtxLazy[T](expr: Context ?=> T) {
private var myValue: T = _
private var forced = false
def apply()(using Context): T = {
if (!forced) {
myValue = expr
forced = true
}
myValue
}
}
| dotty-staging/dotty | compiler/src/dotty/tools/dotc/transform/CtxLazy.scala | Scala | apache-2.0 | 741 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package expr
import types.{Bounds, Nothing}
import lexer.ScalaTokenTypes
import psi.ScalaPsiElementImpl
import api.expr._
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.lang.ASTNode
import types.result.{TypingContext, Success, Failure}
import com.intellij.psi.{PsiElement, PsiElementVisitor}
import api.ScalaElementVisitor
import com.intellij.psi.impl.source.tree.LeafPsiElement
/**
* @author Alexander Podkhalyuzin
* Date: 06.03.2008
*/
class ScIfStmtImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScIfStmt {
override def accept(visitor: PsiElementVisitor) {
visitor match {
case visitor: ScalaElementVisitor => super.accept(visitor)
case _ => super.accept(visitor)
}
}
override def toString: String = "IfStatement"
def condition = {
val rpar = findChildByType(ScalaTokenTypes.tRPARENTHESIS)
val c = if (rpar != null) PsiTreeUtil.getPrevSiblingOfType(rpar, classOf[ScExpression]) else null
if (c == null) None else Some(c)
}
def thenBranch = {
val kElse = findChildByType(ScalaTokenTypes.kELSE)
val t =
if (kElse != null) PsiTreeUtil.getPrevSiblingOfType(kElse, classOf[ScExpression])
else getLastChild match {
case expression: ScExpression => expression
case _ => PsiTreeUtil.getPrevSiblingOfType(getLastChild, classOf[ScExpression])
}
if (t == null) None else condition match {
case None => Some(t)
case Some(c) if c != t => Some(t)
case _ => None
}
}
def elseBranch = {
val kElse = findChildByType(ScalaTokenTypes.kELSE)
val e = if (kElse != null) PsiTreeUtil.getNextSiblingOfType(kElse, classOf[ScExpression]) else null
if (e == null) None else Some(e)
}
def getLeftParenthesis = {
val leftParenthesis = findChildByType(ScalaTokenTypes.tLPARENTHESIS)
if (leftParenthesis == null) None else Some(leftParenthesis)
}
def getRightParenthesis = {
val rightParenthesis = findChildByType(ScalaTokenTypes.tRPARENTHESIS)
if (rightParenthesis == null) None else Some(rightParenthesis)
}
protected override def innerType(ctx: TypingContext) = {
(thenBranch, elseBranch) match {
case (Some(t), Some(e)) => for (tt <- t.getType(TypingContext.empty);
et <- e.getType(TypingContext.empty)) yield {
Bounds.weakLub(tt, et)
}
case (Some(t), None) => t.getType(TypingContext.empty).map(tt => Bounds.weakLub(tt, types.Unit))
case _ => Failure(ScalaBundle.message("nothing.to.type"), Some(this))
}
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScIfStmtImpl.scala | Scala | apache-2.0 | 2,632 |
package ml.wolfe.util
import scala.collection.mutable
import cc.factorie.app.nlp.coref.CorefFeatures.True
/**
* Implementation of Powell's method for derivative free optimisation
* The main idea is the algorithm 9.3 from Numerical Optimization by Wright and Nocedal.
* Following as closely as possible the scipy version:
* https://github.com/scipy/scipy/blob/maintenance/0.14.x/scipy/optimize/optimize.py
* @author Andreas Vlachos
*/
class Powell extends HyperParameterOptimisationAlgorithm {
type Param = Map[String, Double]
override var bestScore : Double = Double.PositiveInfinity
override var iterates : mutable.Buffer[(Double, Param)] = mutable.Buffer()
override var bestParameters: Param = Map()
/**
* Return the linear interpolation between the starting point and the direction
* that minimizes the function value
* @param problem: the function we are trying to optimize
* @param point: the starting point
* @param direction: the direction (a vector with one 1)
* @return the point that minimizes the function along the line defined by the point and the direction
*/
def linesearch(problem: OptimisationProblem, point:Param, direction:Param) : Param = ???
/**
* Optimise the problem, starting at the given starting points
* @param problem The optimisation problem
* @param startingPoint The starting point of the algorithm
* @return The set of best parameters
*/
override def optimise(problem: OptimisationProblem, startingPoint: Param): Param = {
val dimensions = problem.parametersToOptimize.length
println(f"Starting optimisation with $dimensions%d dimensions.")
var points: mutable.ArrayBuffer[(Double, Param)] = new mutable.ArrayBuffer[(Double, Param)]()
// these are the directions to be tried
// essentially, an one-hot per dimension
// essentially it is something that is easy to add
def genNewDirection(keyToChange:String) : Param = {
(for (key <- startingPoint.keys) yield (key, if (key == keyToChange) 1.0 else 0.0)).toMap
}
// define the conjugate directions
var directions: mutable.ArrayBuffer[Param] = new mutable.ArrayBuffer[Param]()
// populate them initially
for (key <- startingPoint.keys){
directions.append(genNewDirection(key))
}
val startingPointEval:Double = problem.evaluate(startingPoint)
points += ((startingPointEval, startingPoint))
// just initialize the error
var diff = 1.0
// convergence criterion
while (diff > 1e-14) {
// initialize the point vector to be used in this iteration
// we do not need the values
var pointsInIteration : mutable.ArrayBuffer[Param] = new mutable.ArrayBuffer[Param]()
// get the most recent iterate
pointsInIteration += points.last._2
// for each direction
for (direction <- directions){
// for each direction find the step away from the current iterate that minimizes the function value
val tempPoint = linesearch(problem, pointsInIteration.last, direction)
pointsInIteration += (tempPoint)
}
// remove the first one
directions.remove(0)
// add the new one, z_(n+1) - z_1
directions.append((for (key <- startingPoint.keys) yield (key, pointsInIteration.last(key) - pointsInIteration(0)(key) )).toMap)
val newPoint = linesearch(problem, pointsInIteration.last, directions.last)
val newPointEval:Double = problem.evaluate(newPoint)
points += ((newPointEval, newPoint))
println(f"new iterate: ${newPoint}%s score ${newPointEval}%.8f")
// update the diff
diff = math.abs(newPointEval - points.last._1)
}
this.bestParameters = points.minBy(_._1)._2
this.bestScore = points.minBy(_._1)._1
this.iterates += points.minBy(_._1)
points.minBy(_._1)._2
}
}
| wolfe-pack/wolfe | wolfe-util/src/main/scala/ml/wolfe/util/Powell.scala | Scala | apache-2.0 | 3,882 |
// date: Oct 26, 2011
/* CREATE CLASS
The Server system has created notecard objects and has built
a network of link lists connecting these objects. The system
passes the link lists in '.struct' files to the 'Notecard' program.
The link list physical addresses, held in the list's 'next'
variable, are converted to strings (symbolic addresses). The
physical address of the object, itself, is converted to a
string and is written to the '.struct' file.
When the object is instantiated by the 'Notecard' system it
reads '<obj>.read_object()' its portion of the '.struct file'
Two string items of this portion are:
address --symbolic address of the object
next --symbolic of the next object in the link list
swizzleTable is a Map[String,Node] into which each object stores
its 'address' string as a key and its physical address as the
key value. After all objects have loaded their mappings
(symbol addr->physical addr), then the objects are iterated.
Each object accesses the swizzleTable using its 'next'
symbolic address to retrieve the physical address of the
object to which it is linked and the 'next' variable is
rewritten with the this physical address (see: Node).
Some objects read a '.struct' file value that is a symbolic
address whose object variable is 'parent'. This 'parent'
symbolic address is the root of a link list. While a
list node may be a child, this child can support a
link list (see: Node trait).
assigner.receive_objects(structObj.tail)
swizzleTable=assigner.setId(swizzleTable, assigner)
//condition Node for logic tests,e.g., ($gender)=(male)
assigner
case "%CardSetTask"=>
val cardSetTask=CardSetTask(symbolTable)
cardSetTask.receive_objects(structObj.tail)
swizzleTable=cardSetTask.setId(swizzleTable, cardSetTask)
cardSetTask
case "%RowerNode"=>// println("%\\t\\tCreateClass: %RowerNode")
val rowerNode=RowerNode(symbolTable)
rowerNode.receive_objects(structObj.tail)
swizzleTable=rowerNode.setId(swizzleTable, rowerNode)
In *.struct file, A parent class having children and which is
also a child of a parenet itself, has symbolic addresses immediately
following the '%<object name>':
1st symbolic value is the address of the object
2nd symbolic value is the address of the first child
3d symbolic value is the address of the next sibling
A class that is just a child has:
1nd symbolic value is the address of the object
2d symbolic value is the address of the next sibling
*/
package com.client
import scala.collection.mutable.Map
class CreateClass extends Node {
var objOption:Option[Any]=None // Any because String to Node
var coreVector=List[Any]()
var swizzleTable= Map[String,Node]()
var root:Option[Notecard]=None
// <class name> used to instantiate the class.
// invoked by CommandNetwork.fileLoad_BuildNetwork(..)
def establishObjectNetwork( symbolTable:Map[String,String],
allStructSets:List[List[String]]):Notecard= {
//println("CreateClass: allStructSets.size="+allStructSets.size)
for(structSet <-allStructSets) {
//'structSet' is List[String], representing one Card
// containing the object's name, such as, %DisplayText,
// followed by one or more argument values that are
// loaded into the object's fields.
objOption=create_object(structSet, symbolTable) //CreateClass
//build object list
var obj= objOption match {
case Some(e)=> e
case None=> println("create_object(..) failed in CreateClass.scala")
throw new Exception
}
coreVector=obj:: coreVector
}
for(core <- coreVector.reverse) {
//convert symbolic address to physical one in Node
swizzleReference(core)
}
if(root==None)
throw new Exception(".struct file lacks %Notecard group")
root.get // notecard assigned to root by 'create_object(..)'
}
// The *.struct commands , such as %DisplayText, used the
// its %<class name> in a match statement to create the named
// object, e.g., Display(symbolTable) Next, the remaining list
// string values (class parameters) are added to the object.
// Next, the object's symbolic address is converted to
// a physical address. Finally, the created name objects are
// returned to be stored in 'coreVector'.
def create_object(structObj:List[String], symbolTable:Map[String,String]):Option[Any] = {
structObj.head match{
case "%Notecard"=>
val notecard= Notecard(symbolTable)
root=Some(notecard) //Notecard is special, it is the root of the hierarchy
// Removes tag such as %%Notecard and passes the
// object's file parameters such as height,width,size
// Adds object to swizzle table
notecard.receive_objects(structObj.tail)
swizzleTable=notecard.setId(swizzleTable, notecard)
Some(notecard ) // returned to be store in 'coreVector'
case "%CardSet" | "%AddCardSet" =>
val cardSet=CardSet(symbolTable)
cardSet.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=cardSet.setId(swizzleTable, cardSet) //phy addr added to swizzleTab..
Some(cardSet ) // returned to be stored in 'coreVector;
case "%NotecardTask"=>
val notecardTask=NotecardTask(symbolTable)
notecardTask.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=notecardTask.setId(swizzleTable, notecardTask) //phy addr added
Some(notecardTask) // returned to be stored in 'coreVector;
case "%NextFile"=>
val nextFile=NextFile(symbolTable)
nextFile.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=nextFile.setId(swizzleTable, nextFile) //phy addr added to swizzleTab..
Some(nextFile ) // returned to be stored in 'coreVector;
case "%AssignerNode"=>
val assigner=Assigner(symbolTable)
assigner.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=assigner.setId(swizzleTable, assigner) // phy addr add to swizzleTable
Some(assigner) // returned to be stored in 'coreVector;
case "%CardSetTask"=>
val cardSetTask=CardSetTask(symbolTable)
cardSetTask.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=cardSetTask.setId(swizzleTable, cardSetTask)// phy addr add swizzleTable
Some(cardSetTask) // returned to be stored in 'coreVector;
case "%RowerNode"=>
val rowerNode=RowerNode(symbolTable)
rowerNode.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=rowerNode.setId(swizzleTable, rowerNode)// phy addr add swizzleTable
Some(rowerNode ) // returned to be stored in 'coreVector;
case "%DisplayText"=>
val displayText=DisplayText(symbolTable)
displayText.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=displayText.setId(swizzleTable, displayText)
Some(displayText ) // returned to be stored in 'coreVector;
case "%BoxField"=>
val boxField=BoxField(symbolTable)
boxField.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=boxField.setId(swizzleTable, boxField)// phy addr add swizzleTable
Some(boxField ) // returned to be stored in 'coreVector;
case "%GroupNode"=>
val groupNode=GroupNode(symbolTable)
groupNode.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=groupNode.setId(swizzleTable, groupNode)
Some(groupNode ) // returned to be stored in 'coreVector;
case "%DisplayVariable"=>
val displayVariable=DisplayVariable(symbolTable)
displayVariable.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=displayVariable.setId(swizzleTable, displayVariable)
Some(displayVariable) // returned to be stored in 'coreVector;
case "%XNode"=>
val xnode=XNode(symbolTable)
xnode.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=xnode.setId(swizzleTable, xnode)// phy addr add swizzleTable
Some(xnode ) // returned to be stored in 'coreVector;
case "%EditNode"=>
val editNode=EditNode(symbolTable)
editNode.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=editNode.setId(swizzleTable, editNode)
Some(editNode ) // returned to be stored in 'coreVector;
case "%LoadDictionary" =>
val loadDictionary=LoadDictionary(symbolTable)
loadDictionary.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=loadDictionary.setId(swizzleTable, loadDictionary)
Some(loadDictionary)
// LoadAssign is an Assigner object that resides in a different
// node of the Linked List structure from that of CardSet
// Assigner objects. The Assigner object will become a child of
// LoadDictionary.
case "%LoadAssign"=>
val assigner=Assigner(symbolTable)
assigner.receive_objects(structObj.tail) // pass parameters to object
swizzleTable=assigner.setId(swizzleTable, assigner) // phy addr add to swizzleTable
Some(assigner) // returned to be stored in 'coreVector;
case _=> println("unknown in CreateClass:create_object="+structObj.head )
None
}
}
/*
Every Card command class has 'convertToReference' method. This
method may have one or two Node methods:
convertToSibling
convertToChild
which reads the SwizzleTable, extracting the physical address that
is associated with the symbolic address. The physical addresses
are assigned to Node.next or to Node.child. */
def swizzleReference( factoryObj:Any)= {
factoryObj match {
case nc:Notecard=>
nc.convertToReference(swizzleTable)
case cs:CardSet=>
cs.convertToReference(swizzleTable)
case ft:NotecardTask=>
ft.convertToReference(swizzleTable)
case nf:NextFile=>
nf.convertToReference(swizzleTable)
case as:Assigner=>
as.convertToReference(swizzleTable)
case cst:CardSetTask=>
cst.convertToReference(swizzleTable)
case rn:RowerNode=>
rn.convertToReference(swizzleTable)
case dt:DisplayText=>
dt.convertToReference(swizzleTable)
case bf:BoxField=>
bf.convertToReference(swizzleTable)
case gn:GroupNode=>
gn.convertToReference(swizzleTable)
case dv:DisplayVariable=>
dv.convertToReference(swizzleTable)
case xn:XNode=>
xn.convertToReference(swizzleTable)
case en:EditNode=>
en.convertToReference(swizzleTable)
case ld:LoadDictionary=>
ld.convertToReference(swizzleTable)
case _=>
println("CreateClass case_=> "+factoryObj)
println("CreateClass throw exception")
throw new Exception
}
}
def getNotecard= root //Notecard is the hierarchy root-- see: createObject
}
| hangle/Notecard | src/CreateClass.scala | Scala | apache-2.0 | 10,720 |
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.reactiveflows
import akka.actor.{ ActorIdentity, ActorPath, ActorRef, ActorSystem, Identify }
import akka.testkit.{ TestDuration, TestProbe }
import org.scalatest.{ BeforeAndAfterAll, Suite }
import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
trait AkkaSpec extends BeforeAndAfterAll { this: Suite =>
protected implicit val system = ActorSystem()
override protected def afterAll() = {
Await.ready(system.terminate(), 42.seconds)
super.afterAll()
}
}
| hseeberger/reactive-flows | src/test/scala/de/heikoseeberger/reactiveflows/AkkaSpec.scala | Scala | apache-2.0 | 1,118 |
package com.twitter.util.lint
import java.util.regex.Pattern
/**
* A single lint rule, that when [[Rule.apply() run]] evaluates
* whether or not there are any issues.
*/
trait Rule {
/**
* Runs this lint check.
*
* @return An empty `Seq` if no issues are found.
*/
def apply(): Seq[Issue]
/** The broad category that this rule belongs in. */
def category: Category
/**
* A '''short''' name for this rule intended to be used for
* generating a machine readable [[id]].
*/
def name: String
/**
* Produce a "machine readable" id from [[name]].
*/
def id: String =
Rule.WhitespacePattern.matcher(name.toLowerCase.trim).replaceAll("-")
/** A description of the issue and what problems it may cause. */
def description: String
}
object Rule {
/**
* Factory method for creating a [[Rule]].
*
* @param fn is evaluated to determine if there are any issues.
*/
def apply(
category: Category,
shortName: String,
desc: String
)(
fn: => Seq[Issue]
): Rule = {
val _cat = category
new Rule {
def apply(): Seq[Issue] = fn
def name: String = shortName
def category: Category = _cat
def description: String = desc
}
}
private[Rule] val WhitespacePattern =
Pattern.compile("\\\\s")
}
| BuoyantIO/twitter-util | util-lint/src/main/scala/com/twitter/util/lint/Rule.scala | Scala | apache-2.0 | 1,309 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.util
import io.gatling.BaseSpec
import io.gatling.commons.util.StringHelper.RichString
class StringHelperSpec extends BaseSpec {
"truncate" should "truncate the string when its length exceeds the max length" in {
"hello".truncate(2) shouldBe "he..."
}
it should "left the string untouched when the string does not exceeds the max length" in {
"hello".truncate(6) shouldBe "hello"
}
"leftPad" should "pad correctly a two digits number" in {
"12".leftPad(6) shouldBe " 12"
}
it should "not pad when the number of digits is higher than the expected string size" in {
"123456".leftPad(4) shouldBe "123456"
}
"rightPad" should "pad correctly a two digits number" in {
"12".rightPad(6) shouldBe "12 "
}
it should "not pad when the number of digits is higher than the expected string size" in {
"123456".rightPad(4) shouldBe "123456"
}
"RichCharSequence.indexOf" should "find target when placed at the beginning" in {
StringHelper.RichCharSequence("${foobar}").indexOf("${".toCharArray, 0) shouldBe 0
}
it should "not find target when placed at the beginning but there's an offset" in {
StringHelper.RichCharSequence("${foobar}").indexOf("${".toCharArray, 1) shouldBe -1
}
it should "find target when placed at the middle" in {
StringHelper.RichCharSequence("foo${bar}").indexOf("${".toCharArray, 0) shouldBe 3
}
it should "find target when placed at the middle and there's an inferior offset" in {
StringHelper.RichCharSequence("foo${bar}").indexOf("${".toCharArray, 2) shouldBe 3
}
it should "not find target when placed at the middle and there's an superior offset" in {
StringHelper.RichCharSequence("foo${bar}").indexOf("${".toCharArray, 4) shouldBe -1
}
it should "not find target when target is longer" in {
StringHelper.RichCharSequence("$").indexOf("${".toCharArray, 0) shouldBe -1
}
"replace" should "replace all occurrences" in {
"1234foo5678foo9012foo".replaceIf(char => Character.isAlphabetic(char), '_') shouldBe "1234___5678___9012___"
}
}
| gatling/gatling | gatling-commons/src/test/scala/io/gatling/commons/util/StringHelperSpec.scala | Scala | apache-2.0 | 2,719 |
import sbt._
import Keys._
import org.scalajs.sbtplugin.ScalaJSPlugin
import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
import scalajsbundler.sbtplugin.ScalaJSBundlerPlugin
import scalajsbundler.sbtplugin.ScalaJSBundlerPlugin.autoImport._
object ScalaJSReactMdl {
object Versions {
val scala = "2.11.8"
val scalatest = "3.0.1"
val htmlWebpackPlugin = "~2.26.0"
val copyWebpackPlugin = "~4.0.1"
val htmlLoader = "~0.4.3"
val react = "~15.4.2"
val scalaJsReact = "0.2.0-SNAPSHOT"
val reactMdl = "^1.7.2"
val cssLoader = "^0.25.0"
val styleLoader = "^0.13.1"
val reactMdlExtra = "^1.4.3"
val scalaJsRedux = "0.4.0-SNAPSHOT"
val scalaJsReactReduxForm = "0.2.0-SNAPSHOT"
val reduxLogger = "~2.7.4"
}
object Dependencies {
lazy val scalaJsReact = "com.github.eldis" %%%! "scalajs-react" % Versions.scalaJsReact
lazy val scalatest = "org.scalatest" %%%! "scalatest" % Versions.scalatest % "test"
lazy val jsReact = Seq(
"react" -> Versions.react,
"react-dom" -> Versions.react,
"react-mdl" -> Versions.reactMdl
)
lazy val scalaJsReactReduxForm = Seq(
"com.github.eldis" %%%! "scalajs-redux-react-eldis"% Versions.scalaJsRedux,
"com.github.eldis" %%%! "scalajs-react-redux-form" % Versions.scalaJsReactReduxForm
)
lazy val reduxLogger = Seq("redux-logger" -> Versions.reduxLogger)
lazy val reactMdlExtra = Seq("react-mdl-extra" -> Versions.reactMdlExtra)
}
object Settings {
type PC = Project => Project
def commonProject: PC =
_.settings(
scalaVersion := Versions.scala,
organization := "com.github.eldis"
)
def scalajsProject: PC =
_.configure(commonProject)
.enablePlugins(ScalaJSPlugin)
.settings(
scalaJSModuleKind := ModuleKind.CommonJSModule,
requiresDOM in Test := true
)
def jsBundler: PC =
_.enablePlugins(ScalaJSBundlerPlugin)
.settings(
enableReloadWorkflow := false,
libraryDependencies += Dependencies.scalatest
)
def react: PC =
_.settings(
libraryDependencies ++= Seq(Dependencies.scalaJsReact),
npmDevDependencies in Compile ++= Dependencies.jsReact,
npmDependencies in Compile ++= Dependencies.jsReact
)
def scalaJsReactReduxForm: PC =
_.settings(
libraryDependencies ++= Dependencies.scalaJsReactReduxForm,
npmDependencies in Compile ++= Dependencies.reduxLogger
)
def reactMdlExtra: PC =
_.settings(
npmDependencies in Compile ++= Dependencies.reactMdlExtra
)
def exampleProject(prjName: String): PC = { p: Project =>
p.in(file("examples") / prjName)
.configure(scalajsProject, jsBundler, react)
.settings(
name := prjName,
npmDevDependencies in Compile ++= Seq(
"html-webpack-plugin" -> Versions.htmlWebpackPlugin,
"copy-webpack-plugin" -> Versions.copyWebpackPlugin,
"html-loader" -> Versions.htmlLoader,
"style-loader" -> Versions.styleLoader,
"css-loader" -> Versions.cssLoader
),
webpackConfigFile in fastOptJS := Some(baseDirectory.value / "config" / "webpack.config.js"),
webpackConfigFile in fullOptJS := Some(baseDirectory.value / "config" / "webpack.config.js")
)
}
def publish: PC =
_.settings(
publishMavenStyle := true,
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (isSnapshot.value)
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
)
}
object Projects {
lazy val scalaJsReactMdl = project.in(file("."))
.configure(
Settings.scalajsProject, Settings.jsBundler, Settings.publish, Settings.react, Settings.reactMdlExtra
)
.settings(
name := "scalajs-react-mdl"
)
lazy val ex1 = project.configure(
Settings.exampleProject("ex1"), Settings.scalaJsReactReduxForm
).dependsOn(scalaJsReactMdl)
lazy val ex2 = project.configure(
Settings.exampleProject("ex2"), Settings.scalaJsReactReduxForm
).dependsOn(scalaJsReactMdl)
}
} | eldis/scalajs-react-mdl | project/Build.scala | Scala | mit | 4,341 |
package sample
import akka.actor._
import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent.Await
import scala.concurrent.duration._
class SampleActor extends Actor {
def receive = {
case message: String => sender ! ("hello " + message)
}
}
object Sample extends App {
val system = ActorSystem("sample")
val actor = system.actorOf(Props[SampleActor])
implicit val timeout = Timeout(3.seconds)
val result = Await.result(actor ? "world", timeout.duration)
println(result)
system.shutdown()
}
| digimead-specific/sbt-aop | src/sbt-test/weave/jar/src/main/scala/sample/Sample.scala | Scala | apache-2.0 | 531 |
package com.owtelse.scazelcast.hazelcast
/**
* Created by robertk on 21/10/14.
*/
trait HazelcastMultiMap {
import com.hazelcast.core.HazelcastInstance
import com.hazelcast.core.{IMap, MultiMap}
import scala.collection.convert.WrapAsScala._
import collection.mutable.{Map => MMap}
import java.util.{Map => JMap}
/**
* Wraps the Hazelcast MultiMap.get(key) method
* If the key is null Hazelcst would throw an exception, this is caught and None returned.
* @param hazelcast
* @param mapName
* @param key
* @tparam K key
* @tparam V value
* @return The list of V associated with the key
*/
def get[K,V](hazelcast: HazelcastInstance, mapName: String)(key: K): List[V] = {
try
{
import java.util.Map
import scala.collection.mutable.Map
import com.hazelcast.core.MultiMap
val x: MultiMap[K, V] = hazelcast.getMultiMap(mapName)
val ret = x.get(key)
if (ret != null) ret.toList else List()
} catch {
// TODO should I return a Validation or Either rather than Option?
case wtf : Exception => {
// TODO fix logging
wtf.printStackTrace()
List()
}
}
}
/**
* Wraps Hazelcast MultiMap.Put(K,V) method to return true if successful.
* Exceptions are caught and None returned in that case.
* @param hazelcast
* @param mapName
* @param key
* @param value
* @tparam K
* @tparam V
* @return true if successful
*/
def put[K,V](hazelcast: HazelcastInstance, mapName: String)(key: K, value: V): Boolean = {
try
{
if (value == null) false
else {
val x: MultiMap[K, V] = hazelcast.getMultiMap(mapName)
x.put(key, value);
}
} catch {
// TODO fix logging
case wtf: Exception => {
wtf.printStackTrace()
false
}
}
}
/**
* Wraps the Hazelcast MutliMap.remove(key) method
* If the key is null Hazelcast would throw an exception, this is caught and None returned.
* @param hazelcast
* @param mapName
* @param key
* @return The list of items deleted
*/
def deleteAll[K,V](hazelcast: HazelcastInstance, mapName: String)(key: K): List[V] = {
try
{
val x: MultiMap[K, V] = hazelcast.getMultiMap(mapName)
val ret = x.remove(key)
if (ret != null) ret.toList else List()
} catch {
// TODO should I return a Validation or Either rather than Option?
case wtf : Exception => {
// TODO fix logging
wtf.printStackTrace()
List()
}
}
}
/**
* Wraps the Hazelcast MultiMap.remove(key,value)
* @param hazelcast
* @param mapName
* @param key
* @param value
* @tparam K
* @tparam V
* @return true if successfully removed from map
*/
def delete[K,V](hazelcast: HazelcastInstance, mapName: String)(key:K, value: V): Boolean = {
try
{
val x: MultiMap[K, V] = hazelcast.getMultiMap(mapName)
x.remove(key,value)
} catch {
// TODO should I return a Validation or Either rather than Option?
case wtf : Exception => {
// TODO fix logging
wtf.printStackTrace()
false
}
}
}
}
| karlroberts/scazelcast | scazelcast-api/src/main/scala/com/owtelse/scazelcast/hazelcast/HazelcastMultiMap.scala | Scala | bsd-3-clause | 3,192 |
package lore.compiler.resolution
import lore.compiler.feedback.Reporter
import lore.compiler.semantics.Registry
import lore.compiler.syntax.DeclNode
object ResolutionPhase {
def process(fragmentModules: Vector[DeclNode.ModuleNode])(implicit reporter: Reporter): Registry = DeclarationResolver.resolve(fragmentModules)
}
| marcopennekamp/lore | compiler/src/lore/compiler/resolution/ResolutionPhase.scala | Scala | mit | 324 |
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.rst
import laika.ast.EnumType.Arabic
import laika.ast.sample.{ParagraphCompanionShortcuts, TestSourceBuilders}
import laika.ast._
import laika.parse.Parser
import laika.rst.ast._
import laika.rst.ext.{ExtensionProvider, RootParserProvider}
import munit.{Assertions, FunSuite}
trait ListParserRunner extends Assertions with ParagraphCompanionShortcuts {
private val defaultParser: Parser[RootElement] = RootParserProvider.forBundle(ExtensionProvider.forExtensions()).rootElement
def fp (content: String): ForcedParagraph = ForcedParagraph(List(Text(content)))
def run (input: String, blocks: Block*)(implicit loc: munit.Location): Unit =
assertEquals(defaultParser.parse(input).toEither, Right(RootElement(blocks)))
}
class BulletListSpec extends FunSuite with ListParserRunner {
test("list items not separated by blank lines") {
val input =
"""* aaa
|* bbb
|* ccc""".stripMargin
run(input, BulletList("aaa", "bbb", "ccc"))
}
test("list items separated by blank lines") {
val input =
"""* aaa
|
|* bbb
|
|* ccc""".stripMargin
run(input, BulletList("aaa", "bbb", "ccc"))
}
test("list items starting with '+' treated the same way as those starting with a '*'") {
val input =
"""+ aaa
|+ bbb
|+ ccc""".stripMargin
run(input, BulletList(StringBullet("+"))("aaa", "bbb", "ccc"))
}
test("list items starting with '-' treated the same way as those starting with a '*'") {
val input =
"""- aaa
|- bbb
|- ccc""".stripMargin
run(input, BulletList(StringBullet("-"))("aaa", "bbb", "ccc"))
}
test("list items containing multiple paragraphs in a single item") {
val input =
"""* aaa
|
| bbb
| bbb
|
|* ccc
|
|* ddd""".stripMargin
val expected = BulletList(
Seq(p("aaa"), p("bbb\\nbbb")),
Seq(fp("ccc")),
Seq(fp("ddd"))
)
run(input, expected)
}
test("nested items indented by spaces") {
val input =
"""* aaa
|
| * bbb
|
| * ccc""".stripMargin
val list3 = BulletList("ccc")
val list2 = BulletList(Seq(SpanSequence("bbb"), list3))
val list1 = BulletList(Seq(SpanSequence("aaa"), list2))
run(input, list1)
}
test("ignore items when the second line is not indented") {
val input =
"""* aaa
|bbb""".stripMargin
run(input, p("* aaa\\nbbb"))
}
test("literal block after the first line of a list item") {
val input =
"""* aaa::
|
| bbb
| bbb
|
|* ccc
|
|* ddd""".stripMargin
val expected = BulletList(
Seq(p("aaa:"), LiteralBlock("bbb\\nbbb")),
Seq(fp("ccc")),
Seq(fp("ddd"))
)
run(input, expected)
}
}
class EnumListSpec extends FunSuite with ListParserRunner {
import EnumType._
test("arabic enumeration style") {
val input =
"""1. aaa
|2. bbb
|3. ccc""".stripMargin
run(input, EnumList(EnumFormat(Arabic))("aaa", "bbb", "ccc"))
}
test("lowercase alphabetic enumeration style") {
val input =
"""a. aaa
|b. bbb
|c. ccc""".stripMargin
run(input, EnumList(EnumFormat(LowerAlpha))("aaa", "bbb", "ccc"))
}
test("uppercase alphabetic enumeration style") {
val input =
"""A. aaa
|B. bbb
|C. ccc""".stripMargin
run(input, EnumList(EnumFormat(UpperAlpha))("aaa", "bbb", "ccc"))
}
test("lowercase Roman enumeration style") {
val input =
"""i. aaa
|ii. bbb
|iii. ccc""".stripMargin
run(input, EnumList(EnumFormat(LowerRoman))("aaa", "bbb", "ccc"))
}
test("uppercase Roman enumeration style") {
val input =
"""I. aaa
|II. bbb
|III. ccc""".stripMargin
run(input, EnumList(EnumFormat(UpperRoman))("aaa", "bbb", "ccc"))
}
test("keep the right start value for arabic enumeration style") {
val input =
"""4. aaa
|5. bbb""".stripMargin
run(input, EnumList(EnumFormat(Arabic), 4)("aaa", "bbb"))
}
test("keep the right start value for lowercase alphabetic enumeration style") {
val input =
"""d. aaa
|e. bbb""".stripMargin
run(input, EnumList(EnumFormat(LowerAlpha), 4)("aaa", "bbb"))
}
test("keep the right start value for uppercase alphabetic enumeration style") {
val input =
"""D. aaa
|E. bbb""".stripMargin
run(input, EnumList(EnumFormat(UpperAlpha), 4)("aaa", "bbb"))
}
test("keep the right start value for lowercase Roman enumeration style") {
val input =
"""iv. aaa
|v. bbb""".stripMargin
run(input, EnumList(EnumFormat(LowerRoman), 4)("aaa", "bbb"))
}
test("keep the right start value for uppercase Roman enumeration style") {
val input =
"""IV. aaa
|V. bbb""".stripMargin
run(input, EnumList(EnumFormat(UpperRoman), 4)("aaa", "bbb"))
}
test("do not try to parse a Roman Numeral in a normal paragraph (issue #19)") {
val input = "imp"
run(input, p("imp"))
}
test("item label suffixed by right-parenthesis") {
val input =
"""1) aaa
|2) bbb
|3) ccc""".stripMargin
run(input, EnumList(EnumFormat(Arabic, "", ")"))("aaa", "bbb", "ccc"))
}
test("item label surrounded by parenthesis") {
val input =
"""(1) aaa
|(2) bbb
|(3) ccc""".stripMargin
run(input, EnumList(EnumFormat(Arabic, "(", ")"))("aaa", "bbb", "ccc"))
}
test("items separated by blank lines") {
val input =
"""1. aaa
|
|2. bbb
|
|3. ccc""".stripMargin
run(input, EnumList(EnumFormat(Arabic))("aaa", "bbb", "ccc"))
}
test("items containing multiple paragraphs") {
val input =
"""1. aaa
|
| bbb
| bbb
|
|2. ccc
|
|3. ddd""".stripMargin
val expected = EnumList(
Seq(p("aaa"), p("bbb\\nbbb")),
Seq(fp("ccc")),
Seq(fp("ddd"))
)
run(input, expected)
}
test("nested items indented by spaces") {
val input =
"""1. aaa
|
| 1. bbb
|
| 1. ccc""".stripMargin
val list3 = EnumList("ccc")
val list2 = EnumList(Seq(SpanSequence("bbb"), list3))
val list1 = EnumList(Seq(SpanSequence("aaa"), list2))
run(input, list1)
}
test("different enumeration patterns kept in separate lists") {
val input =
"""1. aaa
|
|2. bbb
|
|1) ccc
|
|2) ddd""".stripMargin
val f = EnumFormat(Arabic, "", ")")
run(input, EnumList("aaa", "bbb"), EnumList(f)("ccc", "ddd"))
}
}
class DefinitionListSpec extends FunSuite with ListParserRunner with TestSourceBuilders {
test("items not separated by blank lines") {
val input =
"""term 1
| aaa
|term 2
| bbb""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", p("aaa")),
DefinitionListItem("term 2", p("bbb"))
)
run(input, list)
}
test("items separated by blank lines") {
val input =
"""term 1
| aaa
|
|term 2
| bbb""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", p("aaa")),
DefinitionListItem("term 2", p("bbb"))
)
run(input, list)
}
test("term with a classifier") {
val input =
"""term 1
| aaa
|
|term 2 : classifier
| bbb""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", p("aaa")),
DefinitionListItem(List(Text("term 2"), Classifier(List(Text("classifier")))), List(p("bbb")))
)
run(input, list)
}
test("items containing multiple paragraphs") {
val input =
"""term 1
| aaa
| aaa
|
| bbb
|
|term 2
| ccc""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", p("aaa\\naaa"), p("bbb")),
DefinitionListItem("term 2", p("ccc"))
)
run(input, list)
}
test("items containing multiple paragraphs with different indentation") {
val input =
"""term 1
| aaa
| aaa
|
| bbb
|
|term 2
| ccc""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", QuotedBlock("aaa\\naaa"), p("bbb")),
DefinitionListItem("term 2", p("ccc"))
)
run(input, list)
}
test("inline markup in the term") {
val input =
"""term *em*
| aaa
|
|term 2
| bbb""".stripMargin
val list = DefinitionList(
DefinitionListItem(List(Text("term "), Emphasized("em")), List(p("aaa"))),
DefinitionListItem("term 2", p("bbb"))
)
run(input, list)
}
test("ignore subsequent tables") {
val input =
"""term 1
| aaa
|
|term 2
| bbb
|
|=== ===
| a b
|=== ===""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", p("aaa")),
DefinitionListItem("term 2", p("bbb"))
)
run(input, list, Table(Row(BodyCell("a"), BodyCell("b"))))
}
test("ignore subsequent directives") {
val directive =
""".. foo::
| :name: value""".stripMargin
val input =
s"""term 1
| aaa
|
|term 2
| bbb
|
|$directive""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", p("aaa")),
DefinitionListItem("term 2", p("bbb"))
)
run(input, list, InvalidBlock("unknown directive: foo", source(directive, input)))
}
test("ignore subsequent bullet lists") {
val input =
"""term 1
| aaa
|
|term 2
| bbb
|
|* list
| list""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", p("aaa")),
DefinitionListItem("term 2", p("bbb"))
)
run(input, list, BulletList(p("list\\nlist")))
}
test("ignore subsequent enum lists") {
val input =
"""term 1
| aaa
|
|term 2
| bbb
|
|1. list
| list""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", p("aaa")),
DefinitionListItem("term 2", p("bbb"))
)
run(input, list, EnumList(EnumFormat(Arabic))("list\\nlist"))
}
test("ignore subsequent headers with overline") {
val header =
"""########
| Header
|########""".stripMargin
val input =
s"""term 1
| aaa
|
|term 2
| bbb
|
|$header""".stripMargin
val list = DefinitionList(
DefinitionListItem("term 1", p("aaa")),
DefinitionListItem("term 2", p("bbb"))
)
run(input, list, DecoratedHeader(OverlineAndUnderline('#'), List(Text("Header")), source(header, input)))
}
}
class FieldListSpec extends FunSuite with ListParserRunner {
def fl (fields: Field*): FieldList = FieldList(fields.toList)
def field (name: String, blocks: Block*): Field = Field(List(Text(name)), blocks.toList)
test("list with all bodies on the same line as the name") {
val input =
""":name1: value1
|:name2: value2
|:name3: value3""".stripMargin
run(input, fl(field("name1", p("value1")), field("name2", p("value2")), field("name3", p("value3"))))
}
test("list with bodies spanning multiple lines") {
val input =
""":name1: line1a
| line1b
|:name2: line2a
| line2b""".stripMargin
run(input, fl(field("name1", p("line1a\\nline1b")), field("name2", p("line2a\\nline2b"))))
}
test("list with bodies spanning multiple blocks") {
val input =
""":name1: line1a
| line1b
|
| line1c
| line1d
|:name2: line2a
| line2b""".stripMargin
run(input, fl(field("name1", p("line1a\\nline1b"), p("line1c\\nline1d")), field("name2", p("line2a\\nline2b"))))
}
}
class OptionListSpec extends FunSuite with ListParserRunner {
def optL (items: OptionListItem*): OptionList = OptionList(items.toList)
def oli (name: String, value: Block*): OptionListItem = OptionListItem(List(ProgramOption(name, None)), value.toList)
def oli (name: String, value: String): OptionListItem = OptionListItem(List(ProgramOption(name, None)), List(p(value)))
def oli (name: String, argDelim: String, arg: String, value: String): OptionListItem =
OptionListItem(List(ProgramOption(name, Some(OptionArgument(arg, argDelim)))), List(p(value)))
test("list with short posix options") {
val input =
"""-a Option1
|-b Option2""".stripMargin
run(input, optL(oli("-a", "Option1"), oli("-b", "Option2")))
}
test("list with long posix options") {
val input =
"""--aaaa Option1
|--bbbb Option2""".stripMargin
run(input, optL(oli("--aaaa", "Option1"), oli("--bbbb", "Option2")))
}
test("list with short GNU-style options") {
val input =
"""+a Option1
|+b Option2""".stripMargin
run(input, optL(oli("+a", "Option1"), oli("+b", "Option2")))
}
test("list with short DOS-style options") {
val input =
"""/a Option1
|/b Option2""".stripMargin
run(input, optL(oli("/a", "Option1"), oli("/b", "Option2")))
}
test("option argument separated by a space") {
val input =
"""-a FILE Option1
|-b Option2""".stripMargin
run(input, optL(oli("-a", " ", "FILE", "Option1"), oli("-b", "Option2")))
}
test("option argument separated by '='") {
val input =
"""-a=FILE Option1
|-b Option2""".stripMargin
run(input, optL(oli("-a", "=", "FILE", "Option1"), oli("-b", "Option2")))
}
test("option argument enclosed in angle brackets") {
val input =
"""-a <in=out> Option1
|-b Option2""".stripMargin
run(input, optL(oli("-a", " ", "<in=out>", "Option1"), oli("-b", "Option2")))
}
test("description starting on the next line") {
val input =
"""-a
| Option1
|-b Option2""".stripMargin
run(input, optL(oli("-a", "Option1"), oli("-b", "Option2")))
}
test("block of options with blank lines between them") {
val input =
"""-a Option1
|
|-b Option2""".stripMargin
run(input, optL(oli("-a", "Option1"), oli("-b", "Option2")))
}
test("description containing multiple paragraphs") {
val input =
"""-a Line1
| Line2
|
| Line3
|
|-b Option2""".stripMargin
run(input, optL(oli("-a", p("Line1\\nLine2"), p("Line3")), oli("-b", "Option2")))
}
test("option separated by more than 2 spaces") {
val input = """-a Option""".stripMargin
run(input, optL(oli("-a", "Option")))
}
}
class LineBlockSpec extends FunSuite with ListParserRunner {
test("block with out continuation or indentation") {
val input =
"""|| Line1
|| Line2
|| Line3""".stripMargin
run(input, LineBlock(Line("Line1"), Line("Line2"), Line("Line3")))
}
test("block with a continuation line") {
val input =
"""|| Line1
| Line2
|| Line3
|| Line4""".stripMargin
run(input, LineBlock(Line("Line1\\nLine2"), Line("Line3"), Line("Line4")))
}
test("nested structure (pointing right)") {
val input =
"""|| Line1
|| Line2
|| Line3
|| Line4
|| Line5""".stripMargin
run(input, LineBlock(Line("Line1"), LineBlock(Line("Line2"), LineBlock(Line("Line3")), Line("Line4")), Line("Line5")))
}
test("nested structure (pointing left)") {
val input =
"""|| Line1
|| Line2
|| Line3
|| Line4
|| Line5""".stripMargin
run(input, LineBlock(LineBlock(LineBlock(Line("Line1")), Line("Line2")), Line("Line3"), LineBlock(Line("Line4"), LineBlock(Line("Line5")))))
}
}
| planet42/Laika | core/shared/src/test/scala/laika/rst/ListParsersSpec.scala | Scala | apache-2.0 | 16,742 |
package org.allenai.common.guice
import org.allenai.common.Logging
import org.allenai.common.Config._
import com.google.inject.Inject
import com.google.inject.name.Named
import com.typesafe.config.{
Config,
ConfigException,
ConfigFactory,
ConfigObject,
ConfigUtil,
ConfigValueType
}
import net.codingwell.scalaguice.ScalaModule
import scala.collection.JavaConverters._
/** Parent class for modules which use a typesafe config for values. This automatically binds all
* configuration values within a given Config instance, along with defaults from an optional
* bundled config file. Each binding is annotated with @Named(configPath) to differentiate multiple
* bindings for a single primitive type.
*
* This will bind config (HOCON) value types boolean, number, string, list, and object. Boolean and
* string entries are bound to the corresponding scala type. Numbers are bound to Double if they're
* floating point, and are bound to Int, Long, and Double if they're integral. Lists are bound to
* Seq[Config], since HOCON allows mixed list types. All object-valued keys are also bound as
* Config instances.
*
* The default config filename is looked for in the implementing class's path, using the resource
* name `module.conf`. For example, if the implementing module is org.allenai.foobar.MyModule,
* `module.conf` should be in `src/main/resources/org/allenai/foobar`. `defaultConfig` provides the
* filename, if you want to change it from the default.
*
* Example config and bindings:
*
* format: OFF
* Config file -
* stringValue = "foo"
* someObject = {
* propNumber = 123
* propBool = true
* }
*
* Injected Scala class -
* class Injected @Inject() (
* @Named("stringValue") foo: String,
* @Named("someObject.propBool") boolValue: Boolean,
* @Named("someObject.propNumber") integralValue: Int,
* someOtherParameter: ScalaClass,
* @Named("someObject.propNumber") numbersCanBeDoubles: Double
* )
* format: ON
*
* @param config the runtime config to use containing all values to bind
*/
class ConfigModule(config: Config) extends ScalaModule with Logging {
/** The actual config to bind. */
private lazy val fullConfig = {
val resolvedConfig = config.withFallback(defaultConfig).resolve()
bindingPrefix map { resolvedConfig.atPath } getOrElse { resolvedConfig }
}
/** The filename to use for the default. Can be overridden for clarity. */
def configName: String = "module.conf"
/** If overridden, the namespace prefix that is prepended to all binding key names. This is
* used as a path prefix for all config values; so if the prefix is `Some("foo")` and the config
* key is "one.two", the final binding will be for @Named("foo.one.two").
*
* This is useful if you're providing a module within a library, and want to have your clients be
* able to pass Config overrides without having to worry about prefixing them properly.
*/
def bindingPrefix: Option[String] = None
/** The config to use as a fallback. This is where keys will be looked up if they aren't present
* in the provided config.
*/
def defaultConfig: Config = ConfigFactory.parseResources(getClass, configName)
/** Configure method for implementing classes to override if they wish to create additional
* bindings, or bindings based on config values.
* @param config the fully-initilized config object
*/
def configureWithConfig(config: Config): Unit = {}
/** Binds the config provided in the constructor, plus any default config found, and calls
* configureWithConfig with the resultant config object.
*/
final override def configure(): Unit = {
bindConfig()
configureWithConfig(fullConfig)
}
/** Internal helper to bind the config key `key` to the given type `T`. */
private def bindConfigKey[T](
key: String
)(implicit manifest: Manifest[T], configReader: ConfigReader[T]): Unit = {
try {
fullConfig.get[T](key) match {
case Some(value) =>
bind[T].annotatedWithName(key).toInstance(value)
bind[Option[T]].annotatedWithName(key).toInstance(Some(value))
case None =>
addError(s"Config in ${getClass.getSimpleName} missing key '$key' with expected type " +
s"'${manifest.runtimeClass.getSimpleName}'")
}
} catch {
case _: ConfigException.WrongType =>
addError(s"Config in ${getClass.getSimpleName} has bad type for key '$key'; expected " +
s"value of type '${manifest.runtimeClass.getSimpleName}'")
}
}
/** Recursively binds the given config object, located at the given path. */
private def bindConfigObject(config: ConfigObject, pathElements: Seq[String]): Unit = {
for (entry <- config.entrySet.asScala) {
val key = entry.getKey
val fullPathElements = pathElements :+ key
val fullPath = ConfigUtil.joinPath(fullPathElements.asJava)
val value = entry.getValue
logger.debug(s"Binding key $fullPath to $value")
value.valueType match {
case ConfigValueType.BOOLEAN =>
bindConfigKey[Boolean](fullPath)
case ConfigValueType.NUMBER =>
value.unwrapped match {
case _: java.lang.Integer =>
// Bind both floating-point & integral versions, since the config system treats
// whole-valued numbers as integers.
bindConfigKey[Int](fullPath)
bindConfigKey[Long](fullPath)
bindConfigKey[Double](fullPath)
case _: java.lang.Long =>
bindConfigKey[Long](fullPath)
bindConfigKey[Double](fullPath)
case _: java.lang.Double =>
bindConfigKey[Double](fullPath)
case _ =>
// Should be impossible.
throw new IllegalArgumentException("config key produced bad number: " + value)
}
case ConfigValueType.STRING =>
bindConfigKey[String](fullPath)
case ConfigValueType.LIST =>
bindConfigKey[Seq[Config]](fullPath)
case ConfigValueType.OBJECT =>
bindConfigKey[Config](fullPath)
// Recurse.
bindConfigObject(config.toConfig()[Config](key).root, fullPathElements)
case other =>
// Shouldn't happen - but warn if it does.
logger.warn(s"Unhandled config value type [$other] for key $key")
}
}
}
/** Binds all of the paths in the full config object to the appropriate type, annotated with a
* @Named annotation holding the config key.
*/
private def bindConfig(): Unit = {
bindConfigObject(fullConfig.root, Seq.empty)
// Adds default None bindings @Named Options.
bind[Option[Boolean]].annotatedWith(classOf[Named]).toInstance(None)
bind[Option[Int]].annotatedWith(classOf[Named]).toInstance(None)
bind[Option[Long]].annotatedWith(classOf[Named]).toInstance(None)
bind[Option[Double]].annotatedWith(classOf[Named]).toInstance(None)
bind[Option[Config]].annotatedWith(classOf[Named]).toInstance(None)
bind[Option[Seq[Config]]].annotatedWith(classOf[Named]).toInstance(None)
bind[Option[String]].annotatedWith(classOf[Named]).toInstance(None)
}
}
| ryanai3/common | guice/src/main/scala/org/allenai/common/guice/ConfigModule.scala | Scala | apache-2.0 | 7,269 |
package org.http4s.server
package blaze
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.time.Instant
import java.util.concurrent.Executors
import org.http4s.headers.{`Transfer-Encoding`, Date, `Content-Length`}
import org.http4s.{headers => H, _}
import org.http4s.blaze._
import org.http4s.blaze.pipeline.{Command => Cmd}
import org.http4s.dsl._
import org.specs2.specification.core.Fragment
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import fs2._
import scodec.bits.ByteVector
class Http1ServerStageSpec extends Http4sSpec {
def makeString(b: ByteBuffer): String = {
val p = b.position()
val a = new Array[Byte](b.remaining())
b.get(a).position(p)
new String(a)
}
def parseAndDropDate(buff: ByteBuffer): (Status, Set[Header], String) =
dropDate(ResponseParser.apply(buff))
def dropDate(resp: (Status, Set[Header], String)): (Status, Set[Header], String) = {
val hds = resp._2.filter(_.name != Date.name)
(resp._1, hds, resp._3)
}
def runRequest(req: Seq[String], service: HttpService, maxReqLine: Int = 4*1024, maxHeaders: Int = 16*1024): Future[ByteBuffer] = {
val head = new SeqTestHead(req.map(s => ByteBuffer.wrap(s.getBytes(StandardCharsets.ISO_8859_1))))
val httpStage = Http1ServerStage(service, AttributeMap.empty, testExecutionContext, enableWebSockets = true, maxReqLine, maxHeaders)
pipeline.LeafBuilder(httpStage).base(head)
head.sendInboundCommand(Cmd.Connected)
head.result
}
"Http1ServerStage: Invalid Lengths" should {
val req = "GET /foo HTTP/1.1\\r\\nheader: value\\r\\n\\r\\n"
val service = HttpService {
case req => Response().withBody("foo!")
}
"fail on too long of a request line" in {
val buff = Await.result(runRequest(Seq(req), service, maxReqLine = 1), 5.seconds)
val str = StandardCharsets.ISO_8859_1.decode(buff.duplicate()).toString
// make sure we don't have signs of chunked encoding.
str.contains("400 Bad Request") must_== true
}
"fail on too long of a header" in {
val buff = Await.result(runRequest(Seq(req), service, maxHeaders = 1), 5.seconds)
val str = StandardCharsets.ISO_8859_1.decode(buff.duplicate()).toString
// make sure we don't have signs of chunked encoding.
str.contains("400 Bad Request") must_== true
}
}
"Http1ServerStage: Common responses" should {
Fragment.foreach(ServerTestRoutes.testRequestResults.zipWithIndex) { case ((req, (status,headers,resp)), i) =>
if (i == 7 || i == 8) // Awful temporary hack
s"Run request $i Run request: --------\\n${req.split("\\r\\n\\r\\n")(0)}\\n" in {
val result = Await.result(runRequest(Seq(req), ServerTestRoutes()), 5.seconds)
parseAndDropDate(result) must_== ((status, headers, resp))
}
else
s"Run request $i Run request: --------\\n${req.split("\\r\\n\\r\\n")(0)}\\n" in {
val result = Await.result(runRequest(Seq(req), ServerTestRoutes()), 5.seconds)
parseAndDropDate(result) must_== ((status, headers, resp))
}
}
}
"Http1ServerStage: Errors" should {
val exceptionService = HttpService {
case GET -> Root / "sync" => sys.error("Synchronous error!")
case GET -> Root / "async" => Task.fail(new Exception("Asynchronous error!"))
case GET -> Root / "sync" / "422" => throw InvalidMessageBodyFailure("lol, I didn't even look")
case GET -> Root / "async" / "422" => Task.fail(new InvalidMessageBodyFailure("lol, I didn't even look"))
}
def runError(path: String) = runRequest(List(path), exceptionService)
.map(parseAndDropDate)
.map{ case (s, h, r) =>
val close = h.exists{ h => h.toRaw.name == "connection".ci && h.toRaw.value == "close"}
(s, close, r)
}
"Deal with synchronous errors" in {
val path = "GET /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\n\\r\\n"
val (s,c,_) = Await.result(runError(path), 10.seconds)
s must_== InternalServerError
c must_== true
}
"Call toHttpResponse on synchronous errors" in {
val path = "GET /sync/422 HTTP/1.1\\r\\nConnection:keep-alive\\r\\n\\r\\n"
val (s,c,_) = Await.result(runError(path), 10.seconds)
s must_== UnprocessableEntity
c must_== false
}
"Deal with asynchronous errors" in {
val path = "GET /async HTTP/1.1\\r\\nConnection:keep-alive\\r\\n\\r\\n"
val (s,c,_) = Await.result(runError(path), 10.seconds)
s must_== InternalServerError
c must_== true
}
"Call toHttpResponse on asynchronous errors" in {
val path = "GET /async/422 HTTP/1.1\\r\\nConnection:keep-alive\\r\\n\\r\\n"
val (s,c,_) = Await.result(runError(path), 10.seconds)
s must_== UnprocessableEntity
c must_== false
}
}
"Http1ServerStage: routes" should {
"Do not send `Transfer-Encoding: identity` response" in {
val service = HttpService {
case req =>
val headers = Headers(H.`Transfer-Encoding`(TransferCoding.identity))
Response(headers = headers)
.withBody("hello world")
}
// The first request will get split into two chunks, leaving the last byte off
val req = "GET /foo HTTP/1.1\\r\\n\\r\\n"
val buff = Await.result(runRequest(Seq(req), service), 5.seconds)
val str = StandardCharsets.ISO_8859_1.decode(buff.duplicate()).toString
// make sure we don't have signs of chunked encoding.
str.contains("0\\r\\n\\r\\n") must_== false
str.contains("hello world") must_== true
val (_, hdrs, _) = ResponseParser.apply(buff)
hdrs.find(_.name == `Transfer-Encoding`.name) must_== None
}
"Do not send an entity or entity-headers for a status that doesn't permit it" in {
val service: HttpService = HttpService {
case req =>
Response(status = Status.NotModified)
.putHeaders(`Transfer-Encoding`(TransferCoding.chunked))
.withBody("Foo!")
}
val req = "GET /foo HTTP/1.1\\r\\n\\r\\n"
val buf = Await.result(runRequest(Seq(req), service), 5.seconds)
val (status, hs, body) = ResponseParser.parseBuffer(buf)
val hss = Headers(hs.toList)
`Content-Length`.from(hss).isDefined must_== false
body must_== ""
status must_== Status.NotModified
}
"Add a date header" in {
val service = HttpService {
case req => Task.now(Response(body = req.body))
}
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val buff = Await.result(runRequest(Seq(req1), service), 5.seconds)
// Both responses must succeed
val (_, hdrs, _) = ResponseParser.apply(buff)
hdrs.find(_.name == Date.name) must beSome[Header]
}
"Honor an explicitly added date header" in {
val dateHeader = Date(Instant.ofEpochMilli(0))
val service = HttpService {
case req => Task.now(Response(body = req.body).replaceAllHeaders(dateHeader))
}
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val buff = Await.result(runRequest(Seq(req1), service), 5.seconds)
// Both responses must succeed
val (_, hdrs, _) = ResponseParser.apply(buff)
hdrs.find(_.name == Date.name) must_== Some(dateHeader)
}
"Handle routes that echos full request body for non-chunked" in {
val service = HttpService {
case req => Task.now(Response(body = req.body))
}
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val (r11,r12) = req1.splitAt(req1.length - 1)
val buff = Await.result(runRequest(Seq(r11,r12), service), 5.seconds)
// Both responses must succeed
parseAndDropDate(buff) must_== ((Ok, Set(H.`Content-Length`.unsafeFromLong(4)), "done"))
}
"Handle routes that consumes the full request body for non-chunked" in {
val service = HttpService {
case req => req.as[String].flatMap { s => Response().withBody("Result: " + s) }
}
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val (r11,r12) = req1.splitAt(req1.length - 1)
val buff = Await.result(runRequest(Seq(r11,r12), service), 5.seconds)
// Both responses must succeed
parseAndDropDate(buff) must_== ((Ok, Set(H.`Content-Length`.unsafeFromLong(8 + 4), H.
`Content-Type`(MediaType.`text/plain`, Charset.`UTF-8`)), "Result: done"))
}
"Maintain the connection if the body is ignored but was already read to completion by the Http1Stage" in {
val service = HttpService {
case _ => Response().withBody("foo")
}
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(req1,req2), service), 5.seconds)
val hs = Set(H.`Content-Type`(MediaType.`text/plain`, Charset.`UTF-8`), H.`Content-Length`.unsafeFromLong(3))
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
}
"Drop the connection if the body is ignored and was not read to completion by the Http1Stage" in {
val service = HttpService {
case req => Response().withBody("foo")
}
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val (r11,r12) = req1.splitAt(req1.length - 1)
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(r11, r12, req2), service), 5.seconds)
val hs = Set(H.`Content-Type`(MediaType.`text/plain`, Charset.`UTF-8`), H.`Content-Length`.unsafeFromLong(3))
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
buff.remaining() must_== 0
}
"Handle routes that runs the request body for non-chunked" in {
val service = HttpService {
case req => req.body.run.flatMap { _ => Response().withBody("foo") }
}
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val (r11,r12) = req1.splitAt(req1.length - 1)
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(r11,r12,req2), service), 5.seconds)
val hs = Set(H.`Content-Type`(MediaType.`text/plain`, Charset.`UTF-8`), H.`Content-Length`.unsafeFromLong(3))
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
}
// Think of this as drunk HTTP pipelining
"Not die when two requests come in back to back" in {
val service = HttpService { case req =>
Task.now(Response(body = req.body))
}
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(req1 + req2), service), 5.seconds)
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, Set(H.`Content-Length`.unsafeFromLong(4)), "done"))
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, Set(H.`Content-Length`.unsafeFromLong(5)), "total"))
}
"Handle using the request body as the response body" in {
val service = HttpService {
case req => Task.now(Response(body = req.body))
}
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(req1, req2), service), 5.seconds)
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, Set(H.`Content-Length`.unsafeFromLong(4)), "done"))
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, Set(H.`Content-Length`.unsafeFromLong(5)), "total"))
}
{
def req(path: String) = s"GET /$path HTTP/1.1\\r\\nTransfer-Encoding: chunked\\r\\n\\r\\n" +
"3\\r\\n" +
"foo\\r\\n" +
"0\\r\\n" +
"Foo:Bar\\r\\n\\r\\n"
val service = HttpService {
case req if req.pathInfo == "/foo" =>
for {
_ <- req.body.run
hs <- req.trailerHeaders
resp <- Response().withBody(hs.mkString)
} yield resp
case req if req.pathInfo == "/bar" =>
for {
// Don't run the body
hs <- req.trailerHeaders
resp <- Response().withBody(hs.mkString)
} yield resp
}
"Handle trailing headers" in {
val buff = Await.result(runRequest(Seq(req("foo")), service), 5.seconds)
val results = dropDate(ResponseParser.parseBuffer(buff))
results._1 must_== Ok
results._3 must_== "Foo: Bar"
}
"Fail if you use the trailers before they have resolved" in {
val buff = Await.result(runRequest(Seq(req("bar")), service), 5.seconds)
val results = dropDate(ResponseParser.parseBuffer(buff))
results._1 must_== InternalServerError
}
}
}
}
| ZizhengTai/http4s | blaze-server/src/test/scala/org/http4s/server/blaze/Http1ServerStageSpec.scala | Scala | apache-2.0 | 14,397 |
package day2.foldleft
/**
* Created by Niels Bokmans on 22-3-2016.
*/
object FoldLeftMain {
def list = List(1, 2, 3)
def main(args: Array[String]): Unit = {
println("List[String] to string length with fold left: " + countStringList(stringList))
println("Sum of List[Int]: " + countIntListSum(intList))
println("Fold left sum of List[Int]: " + countIntListFoldLeft(intList))
println("Append List[String]: " + appendStringList(stringList))
}
def stringList = List[String]("One", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine", "Ten")
def intList = List[Int](1, 2, 3, 4)
def countIntListSum(list: List[Int]): Int = {
list.sum
}
def countIntListFoldLeft(list: List[Int]): Int = {
list.foldLeft(0)((sum, value) => sum + value)
}
def countStringList(list: List[String]): Int = {
list.foldLeft(0)((sum, str) => sum + str.length())
}
def appendStringList(list: List[String]): String = {
list.reduce(_ + ", " + _)
}
}
| nielsje41/7l7wScala | day2/foldleft/FoldLeftMain.scala | Scala | mit | 998 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.bankdetails
import models.CharacterSets
import models.bankdetails.BankDetails._
import models.registrationprogress.{Completed, NotStarted, Section, Started}
import play.api.libs.json.Json
import utils.{AmlsSpec, DependencyMocks, StatusConstants}
class BankDetailsSpec extends AmlsSpec with CharacterSets with DependencyMocks with BankDetailsModels {
val emptyBankDetails: Option[BankDetails] = None
val accountTypePartialModel = BankDetails(Some(accountType), None)
val accountTypeNew = BelongsToBusiness
val bankAccountPartialModel = BankDetails(None, None, Some(bankAccount))
val bankAccountNew = BankAccount(Some(BankAccountIsUk(true)), None, Some(UKAccount("123456", "11-11-11")))
val incompleteModel = BankDetails(Some(accountType), None)
"BankDetails" must {
"serialise" when {
"given complete model" in {
Json.toJson[BankDetails](completeModel) must be(completeJson)
}
"has the hasChanged flag set as true" in {
Json.toJson[BankDetails](completeModelChanged)(BankDetails.writes) must be(completeJsonChanged)
Json.toJson[BankDetails](completeModelChanged) must be(completeJsonChanged)
}
"partially complete model" which {
"contains only accountType" in {
Json.toJson[BankDetails](accountTypePartialModel) must be(accountTypeJson)
}
"contains only bankAccount" in {
Json.toJson[BankDetails](bankAccountPartialModel) must be(bankAccountJson)
}
}
}
"deserialise" when {
"given complete model" in {
completeJson.as[BankDetails] must be(completeModel)
}
"hasChanged field is missing from the Json" in {
(completeJson - "hasChanged").as[BankDetails] must be(completeModel)
}
"has the hasChanged flag set as true" in {
completeJsonChanged.as[BankDetails] must be(completeModelChanged)
}
"partially complete model" which {
"contains only accountType" in {
accountTypeJson.as[BankDetails] must be(accountTypePartialModel)
}
"contains only bankAccount" in {
bankAccountJson.as[BankDetails] must be(bankAccountPartialModel)
}
}
"given old format" in {
oldCompleteJson.as[BankDetails] must be(completeModel)
}
}
}
"isComplete" must {
"return true" when {
"given complete model" in {
val bankAccount = BankAccount(Some(BankAccountIsUk(true)), None, Some(UKAccount("123456", "00-00-00")))
val bankDetails = BankDetails(Some(accountType), Some("name"), Some( bankAccount), hasAccepted = true)
bankDetails.isComplete must be(true)
}
"empty model" in {
val bankDetails = BankDetails()
bankDetails.isComplete must be(true)
}
"given empty model with accepted" in {
val bankDetails = BankDetails(None, None, hasAccepted = true)
bankDetails.isComplete must be(true)
}
}
"return false" when {
"given incomplete model" in {
val bankDetails = BankDetails(Some(accountType), None)
bankDetails.isComplete must be(false)
}
}
"return false" when {
"given incomplete UK BankAccount" in {
val bankAccount = BankAccount(Some(BankAccountIsUk(true)), None, None)
val bankDetails = BankDetails(Some(accountType), Some("name"), Some( bankAccount), hasAccepted = true)
bankDetails.isComplete must be(false)
}
"given incomplete Non UK BankAccount without IBAN" in {
val bankAccount = BankAccount(Some(BankAccountIsUk(false)), Some(BankAccountHasIban(false)), None)
val bankDetails = BankDetails(Some(accountType), Some("name"), Some( bankAccount), hasAccepted = true)
bankDetails.isComplete must be(false)
}
"given incomplete Non UK BankAccount without IBAN answer" in {
val bankAccount = BankAccount(Some(BankAccountIsUk(false)), None, None)
val bankDetails = BankDetails(Some(accountType), Some("name"), Some( bankAccount), hasAccepted = true)
bankDetails.isComplete must be(false)
}
"given incomplete UK BankAccount with IBAN" in {
val bankAccount = BankAccount(Some(BankAccountIsUk(false)), Some(BankAccountHasIban(true)), None)
val bankDetails = BankDetails(Some(accountType), Some("name"), Some( bankAccount), hasAccepted = true)
bankDetails.isComplete must be(false)
}
}
}
"getBankAccountDescription" must {
"return the correct uk account descriptions" in {
val bankDetailsPersonal = BankDetails(Some(PersonalAccount), None, Some(bankAccount))
val bankDetailsBelongstoBusiness = bankDetailsPersonal.copy(bankAccountType = Some(BelongsToBusiness))
val bankDetailsBelongstoOtherBusiness = bankDetailsPersonal.copy(bankAccountType = Some(BelongsToOtherBusiness))
val bankDetailsNoBankAccountUsed = bankDetailsPersonal.copy(bankAccountType = Some(NoBankAccountUsed))
BankDetails.getBankAccountDescription(bankDetailsPersonal) must be(messages("bankdetails.accounttype.uk.lbl.01"))
BankDetails.getBankAccountDescription(bankDetailsBelongstoBusiness) must be(messages("bankdetails.accounttype.uk.lbl.02"))
BankDetails.getBankAccountDescription(bankDetailsBelongstoOtherBusiness) must be(messages("bankdetails.accounttype.uk.lbl.03"))
BankDetails.getBankAccountDescription(bankDetailsNoBankAccountUsed) must be(messages("bankdetails.accounttype.uk.lbl.04"))
}
"return the correct non-uk account descriptions" in {
val bankDetailsPersonal = BankDetails(Some(PersonalAccount), None, Some(BankAccount(Some(BankAccountIsUk(false)), Some(BankAccountHasIban(false)), Some(NonUKAccountNumber("ABCDEFGHIJKLMNOPQRSTUVWXYZABCD")))))
val bankDetailsBelongstoBusiness = bankDetailsPersonal.copy(bankAccountType = Some(BelongsToBusiness))
val bankDetailsBelongstoOtherBusiness = bankDetailsPersonal.copy(bankAccountType = Some(BelongsToOtherBusiness))
val bankDetailsNoBankAccountUsed = bankDetailsPersonal.copy(bankAccountType = Some(NoBankAccountUsed))
BankDetails.getBankAccountDescription(bankDetailsPersonal) must be(messages("bankdetails.accounttype.nonuk.lbl.01"))
BankDetails.getBankAccountDescription(bankDetailsBelongstoBusiness) must be(messages("bankdetails.accounttype.nonuk.lbl.02"))
BankDetails.getBankAccountDescription(bankDetailsBelongstoOtherBusiness) must be(messages("bankdetails.accounttype.nonuk.lbl.03"))
BankDetails.getBankAccountDescription(bankDetailsNoBankAccountUsed) must be(messages("bankdetails.accounttype.nonuk.lbl.04"))
}
"return the correct description wheere there are no account numbers present" in {
val bankDetailsPersonal = BankDetails(Some(PersonalAccount), None, None)
val bankDetailsBelongstoBusiness = bankDetailsPersonal.copy(bankAccountType = Some(BelongsToBusiness))
val bankDetailsBelongstoOtherBusiness = bankDetailsPersonal.copy(bankAccountType = Some(BelongsToOtherBusiness))
val bankDetailsNoBankAccountUsed = bankDetailsPersonal.copy(bankAccountType = Some(NoBankAccountUsed))
BankDetails.getBankAccountDescription(bankDetailsPersonal) must be(messages("bankdetails.accounttype.lbl.01"))
BankDetails.getBankAccountDescription(bankDetailsBelongstoBusiness) must be(messages("bankdetails.accounttype.lbl.02"))
BankDetails.getBankAccountDescription(bankDetailsBelongstoOtherBusiness) must be(messages("bankdetails.accounttype.lbl.03"))
BankDetails.getBankAccountDescription(bankDetailsNoBankAccountUsed) must be(messages("bankdetails.accounttype.lbl.04"))
}
}
"Section" must {
"return a NotStarted Section" when {
"there is no data at all" in {
val notStartedSection = Section("bankdetails", NotStarted, false, controllers.bankdetails.routes.WhatYouNeedController.get)
mockCacheGetEntry[Seq[BankDetails]](None, BankDetails.key)
BankDetails.section(mockCacheMap) must be(notStartedSection)
}
}
"return a Completed Section" when {
"model is complete and has not changed" in {
val complete = Seq(completeModel)
val completedSection = Section("bankdetails", Completed, false, controllers.bankdetails.routes.YourBankAccountsController.get)
mockCacheGetEntry[Seq[BankDetails]](Some(complete), BankDetails.key)
BankDetails.section(mockCacheMap) must be(completedSection)
}
"model is complete and has changed" in {
val completeChangedModel = BankDetails(Some(accountType), Some("name"), Some(bankAccount), true, hasAccepted = true)
val completedSection = Section("bankdetails", Completed, true, controllers.bankdetails.routes.YourBankAccountsController.get)
mockCacheGetEntry[Seq[BankDetails]](Some(Seq(completeChangedModel)), BankDetails.key)
BankDetails.section(mockCacheMap) must be(completedSection)
}
"model is complete with No bankaccount option selected" in {
val completedSection = Section("bankdetails", Completed, false, controllers.bankdetails.routes.YourBankAccountsController.get)
mockCacheGetEntry[Seq[BankDetails]](Some(Seq.empty), BankDetails.key)
val section = BankDetails.section(mockCacheMap)
section.hasChanged must be(false)
section.status must be(Completed)
BankDetails.section(mockCacheMap) must be(completedSection)
}
"model is complete with only deleted bankaccounts that have not changed" in {
val deleted = Seq(completeModel.copy(status = Some(StatusConstants.Deleted)))
val completedSection = Section("bankdetails", Completed, false, controllers.bankdetails.routes.YourBankAccountsController.get)
mockCacheGetEntry[Seq[BankDetails]](Some(deleted), BankDetails.key)
val section = BankDetails.section(mockCacheMap)
section.hasChanged must be(false)
section.status must be(Completed)
BankDetails.section(mockCacheMap) must be(completedSection)
}
"model is complete with only deleted bankaccounts that have changed" in {
val deleted = Seq(completeModel.copy(status = Some(StatusConstants.Deleted), hasChanged = true, hasAccepted = true))
val completedSection = Section("bankdetails", Completed, true, controllers.bankdetails.routes.YourBankAccountsController.get)
mockCacheGetEntry[Seq[BankDetails]](Some(deleted), BankDetails.key)
val section = BankDetails.section(mockCacheMap)
section.hasChanged must be(true)
section.status must be(Completed)
BankDetails.section(mockCacheMap) must be(completedSection)
}
}
"return a Started Section when model is incomplete" in {
val incomplete = Seq(accountTypePartialModel)
val startedSection = Section("bankdetails", Started, false, controllers.bankdetails.routes.YourBankAccountsController.get)
mockCacheGetEntry[Seq[BankDetails]](Some(incomplete), BankDetails.key)
BankDetails.section(mockCacheMap) must be(startedSection)
}
"set hasChanged and hasAccepted when updating bankAccountType set to None" in {
val result = completeModel.bankAccountType(None)
result.hasAccepted mustBe false
result.hasChanged mustBe true
result.bankAccountType mustBe None
}
"Amendment and Variation flow" must {
"redirect to Your Bank Accounts page" when {
"the section is complete with one of the bank details object being removed" in {
mockCacheGetEntry[Seq[BankDetails]](Some(Seq(
BankDetails(status = Some(StatusConstants.Deleted), hasChanged = true, hasAccepted = true), completeModel)),
BankDetails.key
)
val section = BankDetails.section(mockCacheMap)
section.hasChanged must be(true)
section.status must be(Completed)
section.call must be(controllers.bankdetails.routes.YourBankAccountsController.get)
}
"the section is complete with all the bank details unchanged" in {
mockCacheGetEntry[Seq[BankDetails]](Some(Seq(completeModel, completeModel)), BankDetails.key)
val section = BankDetails.section(mockCacheMap)
section.hasChanged must be(false)
section.status must be(Completed)
section.call must be(controllers.bankdetails.routes.YourBankAccountsController.get)
}
"the section is complete with all the bank details being modified" in {
mockCacheGetEntry[Seq[BankDetails]](Some(Seq(completeModelChanged, completeModelChanged)), BankDetails.key)
val section = BankDetails.section(mockCacheMap)
section.hasChanged must be(true)
section.status must be(Completed)
section.call must be(controllers.bankdetails.routes.YourBankAccountsController.get)
}
}
"redirect to What You Need" when {
"there is no bank account data" in {
mockCacheGetEntry(None,
BankDetails.key)
val section = BankDetails.section(mockCacheMap)
section.hasChanged must be(false)
section.status must be(NotStarted)
section.call must be(controllers.bankdetails.routes.WhatYouNeedController.get)
}
}
}
}
"anyChanged" must {
val originalBankDetails = Seq(BankDetails(Some(accountType), None, Some(bankAccount), false))
val originalBankDetailsChanged = Seq(BankDetails(Some(accountType), None, Some(bankAccountNew), true))
"return false" when {
"no BankDetails within the sequence have changed" in {
val res = BankDetails.anyChanged(originalBankDetails)
res must be(false)
}
}
"return true" when {
"at least one BankDetails within the sequence has changed" in {
val res = BankDetails.anyChanged(originalBankDetailsChanged)
res must be(true)
}
}
}
it when {
"bankAccountType value is set" which {
"is the same as before" must {
"leave the object unchanged" in {
val res = completeModel.bankAccountType(Some(accountType))
res must be(completeModel)
res.hasChanged must be(false)
}
}
"is different" must {
"set the hasChanged & previouslyRegisterd Properties" in {
val res = completeModel.bankAccountType(Some(accountTypeNew))
res.hasChanged must be(true)
BankDetails.anyChanged(Seq(res)) must be(true)
res.bankAccountType must be(Some(accountTypeNew))
}
}
}
"bankAccount value is set" which {
"is the same as before" must {
"leave the object unchanged" in {
val res = completeModel.bankAccount(Some(bankAccount))
res must be(completeModel)
res.hasChanged must be(false)
}
}
"is different" must {
"set the hasChanged & previouslyRegisterd Properties" in {
val res = completeModel.bankAccount(Some(bankAccountNew))
res.hasChanged must be(true)
res.bankAccount must be(Some(bankAccountNew))
}
}
}
}
}
trait BankDetailsModels {
val accountType = PersonalAccount
val accountTypeJson = Json.obj(
"bankAccountType" -> Json.obj(
"bankAccountType" -> "01"
),
"hasChanged" -> false,
"refreshedFromServer" -> false,
"hasAccepted" -> false
)
val bankAccount = BankAccount(Some(BankAccountIsUk(true)), None, Some(UKAccount("111111", "00-00-00")))
val bankAccountJson = Json.obj(
"bankAccount" -> Json.obj(
"isUK" -> true,
"accountNumber" -> "111111",
"sortCode" -> "00-00-00"
),
"hasChanged" -> false,
"refreshedFromServer" -> false,
"hasAccepted" -> false
)
val completeModel = BankDetails(Some(accountType), Some("bankName"), Some(bankAccount), hasAccepted = true)
val completeJson = Json.obj(
"bankAccountType" -> Json.obj(
"bankAccountType" -> "01"),
"accountName" -> "bankName",
"bankAccount" -> Json.obj(
"isUK" -> true,
"accountNumber" -> "111111",
"sortCode" -> "00-00-00"),
"hasChanged" -> false,
"refreshedFromServer" -> false,
"hasAccepted" -> true)
val completeModelChanged = BankDetails(Some(accountType), Some("anotherName"), Some(bankAccount), true, hasAccepted = true)
val completeJsonChanged = Json.obj(
"bankAccountType" -> Json.obj(
"bankAccountType" -> "01"),
"accountName" -> "anotherName",
"bankAccount" -> Json.obj(
"isUK" -> true,
"accountNumber" -> "111111",
"sortCode" -> "00-00-00"),
"hasChanged" -> true,
"refreshedFromServer" -> false,
"hasAccepted" -> true)
val oldCompleteJson = Json.obj(
"bankAccountType" -> Json.obj(
"bankAccountType" -> "01"),
"bankAccount" -> Json.obj(
"accountName" -> "bankName",
"isUK" -> true,
"accountNumber" -> "111111",
"sortCode" -> "00-00-00"),
"hasChanged" -> false,
"refreshedFromServer" -> false,
"hasAccepted" -> true)
} | hmrc/amls-frontend | test/models/bankdetails/BankDetailsSpec.scala | Scala | apache-2.0 | 17,620 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package termination
import purescala.Definitions._
import utils.Report
import utils.ASCIIHelpers._
import leon.purescala.PrettyPrinter
import leon.purescala.SelfPrettyPrinter
case class TerminationReport(ctx: LeonContext, program: Program, results : Seq[(FunDef,TerminationGuarantee)], time : Double) extends Report {
def summaryString : String = {
var t = Table("Termination summary")
for ((fd, g) <- results) t += Row(Seq(
Cell(fd.id.asString(ctx)),
Cell {
val result = if (g.isGuaranteed) "\\u2713" else "\\u2717"
val verdict = g match {
case LoopsGivenInputs(reason, args) =>
val niceArgs = args.map { v =>
SelfPrettyPrinter.print(v, PrettyPrinter(v))(ctx, program)
}
"Non-terminating for call: " + niceArgs.mkString(fd.id + "(", ",", ")")
case CallsNonTerminating(funDefs) =>
"Calls non-terminating functions " + funDefs.map(_.id).mkString(",")
case Terminates(reason) =>
"Terminates (" + reason + ")"
case _ => g.toString
}
s"$result $verdict"
}
))
t += Separator
t += Row(Seq(Cell(
f"Analysis time: $time%7.3f",
spanning = 2
)))
t.render
}
def evaluationString : String = {
val sb = new StringBuilder
for((fd,g) <- results) {
val guar = g match {
case NoGuarantee => "u"
case t => if (t.isGuaranteed) "t" else "n"
}
sb.append(f"- ${fd.id.name}%-30s $guar\\n")
}
sb.toString
}
}
| regb/leon | src/main/scala/leon/termination/TerminationReport.scala | Scala | gpl-3.0 | 1,607 |
package wav.common.scalajs.macros
import scala.scalajs.js
object Test {
val optionTest = JS[Option[Int]](Some(1))
val seqTest = JS[Seq[Int]](Seq(1))
val mapTest = JS[Map[Int, Double]](Map(1 -> 1)) // Not verified.
object enum extends Enumeration {
val x = Value
}
case class Simple(s: String, t: enum.Value)
case class Simple2(s: String, t: Option[enum.Value])
val simpleTest = JS[Simple]
val simpleTest2 = JS[Simple2]
case class SimpleOption(so: Option[String])
val simpleOptionTest = JS[SimpleOption]
case class SimpleSeq(ss: Seq[String])
val simpleSeqTest = JS[SimpleSeq]
case class SimpleMap(ss: Map[Double, String])
val simpleMapTest = JS[SimpleMap]
case class SimpleToJs(s: String) {
val toJs: js.Object = JS[SimpleToJs](this)
}
case class SimpleToJsOpt(s: Option[String]) {
val toJs: js.Object = JS[SimpleToJsOpt](this)
}
case class SimpleToJsDefault(s: Option[String] = None) {
val toJs: js.Object = JS[SimpleToJsDefault](this)
}
case class Mixed(
s: String,
`do`: Option[Double],
sim: SimpleToJs,
simo: Option[SimpleToJs],
id: Int = 1,
sod: Option[String] = None,
sud: js.UndefOr[js.Object] = js.undefined,
sima: Array[SimpleToJs] = Array.empty) {
val toJs: js.Object = JS[Mixed](this)
}
} | wav/scala-macros | src/test/scala/wav/common/scalajs/macros/MacroTest.scala | Scala | apache-2.0 | 1,319 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package toplevel
package typedef
import java.util
import com.intellij.lang.ASTNode
import com.intellij.openapi.progress.ProgressManager
import com.intellij.openapi.project.DumbService
import com.intellij.pom.java.LanguageLevel
import com.intellij.psi._
import com.intellij.psi.impl.PsiClassImplUtil.MemberType
import com.intellij.psi.impl.{PsiClassImplUtil, PsiSuperMethodImplUtil}
import com.intellij.psi.scope.PsiScopeProcessor
import com.intellij.psi.scope.processor.MethodsProcessor
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.psi.util.{PsiTreeUtil, PsiUtil}
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScSelfTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScExtendsBlock
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.TypeDefinitionMembers
import org.jetbrains.plugins.scala.lang.psi.light.ScFunctionWrapper
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.result.{TypeResult, TypingContext}
import org.jetbrains.plugins.scala.lang.resolve.processor.BaseProcessor
import org.jetbrains.plugins.scala.macroAnnotations.{CachedInsidePsiElement, ModCount}
/**
* @author ven
*/
trait ScTemplateDefinition extends ScNamedElement with PsiClass {
import com.intellij.psi.PsiMethod
def qualifiedName: String = null
def additionalJavaNames: Array[String] = Array.empty
def extendsBlock: ScExtendsBlock = {
this match {
case st: ScalaStubBasedElementImpl[_] =>
val stub = st.getStub
if (stub != null) {
return stub.findChildStubByType(ScalaElementTypes.EXTENDS_BLOCK).getPsi
}
case _ =>
}
assert(getLastChild.isInstanceOf[ScExtendsBlock], "Class hasn't extends block: " + this.getText)
getLastChild.asInstanceOf[ScExtendsBlock]
}
def refs = {
extendsBlock.templateParents.toSeq.flatMap(_.typeElements).map { refElement =>
val tuple: Option[(PsiClass, ScSubstitutor)] = refElement.getType(TypingContext.empty).toOption.flatMap(
ScType.extractClassType(_, Some(getProject)))
(refElement, tuple)
}
}
def innerExtendsListTypes = {
val eb = extendsBlock
if (eb != null) {
val tp = eb.templateParents
tp match {
case Some(tp1) => (for (te <- tp1.allTypeElements;
t = te.getType(TypingContext.empty).getOrAny;
asPsi = ScType.toPsi(t, getProject, GlobalSearchScope.allScope(getProject))
if asPsi.isInstanceOf[PsiClassType]) yield asPsi.asInstanceOf[PsiClassType]).toArray[PsiClassType]
case _ => PsiClassType.EMPTY_ARRAY
}
} else PsiClassType.EMPTY_ARRAY
}
def showAsInheritor: Boolean = {
isInstanceOf[ScTypeDefinition] || extendsBlock.templateBody != None
}
override def findMethodBySignature(patternMethod: PsiMethod, checkBases: Boolean): PsiMethod = {
PsiClassImplUtil.findMethodBySignature(this, patternMethod, checkBases)
}
override def findMethodsBySignature(patternMethod: PsiMethod, checkBases: Boolean): Array[PsiMethod] = {
PsiClassImplUtil.findMethodsBySignature(this, patternMethod, checkBases)
}
override def findMethodsByName(name: String, checkBases: Boolean): Array[PsiMethod] = {
PsiClassImplUtil.findMethodsByName(this, name, checkBases)
}
override def findFieldByName(name: String, checkBases: Boolean): PsiField = {
PsiClassImplUtil.findFieldByName(this, name, checkBases)
}
override def findInnerClassByName(name: String, checkBases: Boolean): PsiClass = {
PsiClassImplUtil.findInnerByName(this, name, checkBases)
}
import java.util.{Collection => JCollection, List => JList}
import com.intellij.openapi.util.{Pair => IPair}
def getAllFields: Array[PsiField] = {
PsiClassImplUtil.getAllFields(this)
}
override def findMethodsAndTheirSubstitutorsByName(name: String,
checkBases: Boolean): JList[IPair[PsiMethod, PsiSubstitutor]] = {
//the reordering is a hack to enable 'go to test location' for junit test methods defined in traits
import scala.collection.JavaConversions._
PsiClassImplUtil.findMethodsAndTheirSubstitutorsByName(this, name, checkBases).toList.sortBy(myPair =>
myPair.first match {
case wrapper: ScFunctionWrapper if wrapper.function.isInstanceOf[ScFunctionDeclaration] => 1
case wrapper: ScFunctionWrapper if wrapper.function.isInstanceOf[ScFunctionDefinition] => wrapper.containingClass match {
case myClass: ScTemplateDefinition if myClass.members.contains(wrapper.function) => 0
case _ => 1
}
case _ => 1
})
}
override def getAllMethodsAndTheirSubstitutors: JList[IPair[PsiMethod, PsiSubstitutor]] = {
PsiClassImplUtil.getAllWithSubstitutorsByMap(this, MemberType.METHOD)
}
override def getVisibleSignatures: JCollection[HierarchicalMethodSignature] = {
PsiSuperMethodImplUtil.getVisibleSignatures(this)
}
def getType(ctx: TypingContext): TypeResult[ScType]
def getTypeWithProjections(ctx: TypingContext, thisProjections: Boolean = false): TypeResult[ScType]
def members: Seq[ScMember] = extendsBlock.members
def functions: Seq[ScFunction] = extendsBlock.functions
def aliases: Seq[ScTypeAlias] = extendsBlock.aliases
@CachedInsidePsiElement(this, ModCount.getBlockModificationCount)
def syntheticMethodsWithOverride: Seq[PsiMethod] = syntheticMethodsWithOverrideImpl
/**
* Implement it carefully to avoid recursion.
*/
protected def syntheticMethodsWithOverrideImpl: Seq[PsiMethod] = Seq.empty
def allSynthetics: Seq[PsiMethod] = syntheticMethodsNoOverride ++ syntheticMethodsWithOverride
@CachedInsidePsiElement(this, ModCount.getBlockModificationCount)
def syntheticMethodsNoOverride: Seq[PsiMethod] = syntheticMethodsNoOverrideImpl
protected def syntheticMethodsNoOverrideImpl: Seq[PsiMethod] = Seq.empty
def typeDefinitions: Seq[ScTypeDefinition] = extendsBlock.typeDefinitions
@CachedInsidePsiElement(this, ModCount.getBlockModificationCount)
def syntheticTypeDefinitions: Seq[ScTypeDefinition] = syntheticTypeDefinitionsImpl
def syntheticTypeDefinitionsImpl: Seq[ScTypeDefinition] = Seq.empty
def selfTypeElement: Option[ScSelfTypeElement] = {
val qual = qualifiedName
if (qual != null && (qual == "scala.Predef" || qual == "scala")) return None
extendsBlock.selfTypeElement
}
def selfType = extendsBlock.selfType
def superTypes: List[ScType] = extendsBlock.superTypes
def supers: Seq[PsiClass] = extendsBlock.supers
def allTypeAliases = TypeDefinitionMembers.getTypes(this).allFirstSeq().flatMap(n => n.map {
case (_, x) => (x.info, x.substitutor)
}) ++ syntheticTypeDefinitions.filter(!_.isObject).map((_, ScSubstitutor.empty))
def allTypeAliasesIncludingSelfType = {
selfType match {
case Some(selfType) =>
val clazzType = getTypeWithProjections(TypingContext.empty).getOrAny
Bounds.glb(selfType, clazzType) match {
case c: ScCompoundType =>
TypeDefinitionMembers.getTypes(c, Some(clazzType), this).allFirstSeq().
flatMap(_.map { case (_, n) => (n.info, n.substitutor) })
case _ =>
allTypeAliases
}
case _ =>
allTypeAliases
}
}
def allVals = TypeDefinitionMembers.getSignatures(this).allFirstSeq().flatMap(n => n.filter{
case (_, x) => !x.info.isInstanceOf[PhysicalSignature] &&
(x.info.namedElement match {
case v =>
ScalaPsiUtil.nameContext(v) match {
case _: ScVariable => v.name == x.info.name
case _: ScValue => v.name == x.info.name
case _ => true
}
})}).map { case (_, n) => (n.info.namedElement, n.substitutor) }
def allValsIncludingSelfType = {
selfType match {
case Some(selfType) =>
val clazzType = getTypeWithProjections(TypingContext.empty).getOrAny
Bounds.glb(selfType, clazzType) match {
case c: ScCompoundType =>
TypeDefinitionMembers.getSignatures(c, Some(clazzType), this).allFirstSeq().flatMap(n => n.filter{
case (_, x) => !x.info.isInstanceOf[PhysicalSignature] &&
(x.info.namedElement match {
case v =>
ScalaPsiUtil.nameContext(v) match {
case _: ScVariable => v.name == x.info.name
case _: ScValue => v.name == x.info.name
case _ => true
}
})}).map { case (_, n) => (n.info.namedElement, n.substitutor) }
case _ =>
allVals
}
case _ =>
allVals
}
}
def allMethods: Iterable[PhysicalSignature] =
TypeDefinitionMembers.getSignatures(this).allFirstSeq().flatMap(_.filter {
case (_, n) => n.info.isInstanceOf[PhysicalSignature]}).
map { case (_, n) => n.info.asInstanceOf[PhysicalSignature] } ++
syntheticMethodsNoOverride.map(new PhysicalSignature(_, ScSubstitutor.empty))
def allMethodsIncludingSelfType: Iterable[PhysicalSignature] = {
selfType match {
case Some(selfType) =>
val clazzType = getTypeWithProjections(TypingContext.empty).getOrAny
Bounds.glb(selfType, clazzType) match {
case c: ScCompoundType =>
TypeDefinitionMembers.getSignatures(c, Some(clazzType), this).allFirstSeq().flatMap(_.filter {
case (_, n) => n.info.isInstanceOf[PhysicalSignature]}).
map { case (_, n) => n.info.asInstanceOf[PhysicalSignature] } ++
syntheticMethodsNoOverride.map(new PhysicalSignature(_, ScSubstitutor.empty))
case _ =>
allMethods
}
case _ =>
allMethods
}
}
def allSignatures = TypeDefinitionMembers.getSignatures(this).allFirstSeq().flatMap(_.map { case (_, n) => n.info })
def allSignaturesIncludingSelfType = {
selfType match {
case Some(selfType) =>
val clazzType = getTypeWithProjections(TypingContext.empty).getOrAny
Bounds.glb(selfType, clazzType) match {
case c: ScCompoundType =>
TypeDefinitionMembers.getSignatures(c, Some(clazzType), this).allFirstSeq().
flatMap(_.map { case (_, n) => n.info })
case _ =>
allSignatures
}
case _ =>
allSignatures
}
}
def isScriptFileClass = getContainingFile match {case file: ScalaFile => file.isScriptFile(false) case _ => false}
def processDeclarations(processor: PsiScopeProcessor,
oldState: ResolveState,
lastParent: PsiElement,
place: PsiElement) : Boolean = {
if (!processor.isInstanceOf[BaseProcessor]) {
val lastChild = this match {
case s: ScalaStubBasedElementImpl[_] => s.getLastChildStub
case _ => this.getLastChild
}
val languageLevel: LanguageLevel =
processor match {
case methodProcessor: MethodsProcessor => methodProcessor.getLanguageLevel
case _ => PsiUtil.getLanguageLevel(place)
}
return PsiClassImplUtil.processDeclarationsInClass(this, processor, oldState, null, lastChild, place, languageLevel, false)
}
if (extendsBlock.templateBody.isDefined &&
PsiTreeUtil.isContextAncestor(extendsBlock.templateBody.get, place, false) && lastParent != null) return true
processDeclarationsForTemplateBody(processor, oldState, lastParent, place)
}
def processDeclarationsForTemplateBody(processor: PsiScopeProcessor,
oldState: ResolveState,
lastParent: PsiElement,
place: PsiElement): Boolean = {
if (DumbService.getInstance(getProject).isDumb) return true
var state = oldState
//exception cases
this match {
case s: ScTypeParametersOwner => s.typeParametersClause match {
case Some(tpc) if PsiTreeUtil.isContextAncestor(tpc, place, false) => return true
case _ =>
}
case _ =>
}
// Process selftype reference
selfTypeElement match {
case Some(se) if se.name != "_" => if (!processor.execute(se, state)) return false
case _ =>
}
state = state.put(BaseProcessor.FROM_TYPE_KEY,
if (ScalaPsiUtil.isPlaceTdAncestor(this, place)) ScThisType(this)
else ScType.designator(this))
val eb = extendsBlock
eb.templateParents match {
case Some(p) if PsiTreeUtil.isContextAncestor(p, place, false) =>
eb.earlyDefinitions match {
case Some(ed) => for (m <- ed.members) {
ProgressManager.checkCanceled()
m match {
case _var: ScVariable => for (declared <- _var.declaredElements) {
ProgressManager.checkCanceled()
if (!processor.execute(declared, state)) return false
}
case _val: ScValue => for (declared <- _val.declaredElements) {
ProgressManager.checkCanceled()
if (!processor.execute(declared, state)) return false
}
}
}
case None =>
}
true
case _ =>
eb.earlyDefinitions match {
case Some(ed) if PsiTreeUtil.isContextAncestor(ed, place, true) =>
case _ =>
extendsBlock match {
case e: ScExtendsBlock if e != null =>
if (PsiTreeUtil.isContextAncestor(e, place, true) || !PsiTreeUtil.isContextAncestor(this, place, true)) {
this match {
case t: ScTypeDefinition if selfTypeElement != None &&
!PsiTreeUtil.isContextAncestor(selfTypeElement.get, place, true) &&
PsiTreeUtil.isContextAncestor(e.templateBody.orNull, place, true) &&
processor.isInstanceOf[BaseProcessor] && !t.isInstanceOf[ScObject] =>
selfTypeElement match {
case Some(_) => processor.asInstanceOf[BaseProcessor].processType(ScThisType(t), place, state)
case _ =>
if (!TypeDefinitionMembers.processDeclarations(this, processor, state, lastParent, place)) {
return false
}
}
case _ =>
if (!TypeDefinitionMembers.processDeclarations(this, processor, state, lastParent, place)) return false
}
}
case _ =>
}
}
true
}
}
def addMember(member: ScMember, anchor: Option[PsiElement]): ScMember = {
extendsBlock.templateBody match {
case Some(body) =>
val before = anchor match {
case Some(a) => a.getNode
case None =>
val last = body.getNode.getLastChildNode
if (ScalaPsiUtil.isLineTerminator(last.getTreePrev.getPsi)) {
last.getTreePrev
} else {
last
}
}
if (ScalaPsiUtil.isLineTerminator(before.getPsi))
body.getNode.addChild(ScalaPsiElementFactory.createNewLineNode(member.getManager), before)
body.getNode.addChild(member.getNode, before)
if (!ScalaPsiUtil.isLineTerminator(before.getPsi))
body.getNode.addChild(ScalaPsiElementFactory.createNewLineNode(member.getManager), before)
else
body.getNode.replaceChild(before, ScalaPsiElementFactory.createNewLineNode(member.getManager))
case None =>
val eBlockNode: ASTNode = extendsBlock.getNode
eBlockNode.addChild(ScalaPsiElementFactory.createWhitespace(member.getManager).getNode)
eBlockNode.addChild(ScalaPsiElementFactory.createBodyFromMember(member, member.getManager).getNode)
return members(0)
}
member
}
def deleteMember(member: ScMember) {
member.getParent.getNode.removeChild(member.getNode)
}
def functionsByName(name: String): Seq[PsiMethod] = {
(for ((p: PhysicalSignature, _) <- TypeDefinitionMembers.getSignatures(this).forName(name)._1) yield p.method).
++(syntheticMethodsNoOverride.filter(_.name == name))
}
override def isInheritor(baseClass: PsiClass, deep: Boolean): Boolean = {
if (baseClass == null) return false
val visited: util.Set[PsiClass] = new util.HashSet[PsiClass]
val baseQualifiedName = baseClass.qualifiedName
val baseName = baseClass.name
def isInheritorInner(base: PsiClass, drv: PsiClass, deep: Boolean): Boolean = {
ProgressManager.checkCanceled()
if (!visited.contains(drv)) {
visited.add(drv)
drv match {
case drg: ScTemplateDefinition =>
val supersIterator = drg.supers.iterator
while (supersIterator.hasNext) {
val c = supersIterator.next()
val value = baseClass match {
case _: ScTrait if c.isInstanceOf[ScTrait] => true
case _: ScClass if c.isInstanceOf[ScClass] => true
case _ if !c.isInstanceOf[ScTemplateDefinition] => true
case _ => false
}
if (value && c.name == baseName && c.qualifiedName == baseQualifiedName && value) return true
if (deep && isInheritorInner(base, c, deep)) return true
}
case _ =>
val supers = drv.getSuperTypes
val supersIterator = supers.iterator
while (supersIterator.hasNext) {
val psiT = supersIterator.next()
val c = psiT.resolveGenerics.getElement
if (c != null) {
if (c.name == baseName && c.qualifiedName == baseQualifiedName) return true
if (deep && isInheritorInner(base, c, deep)) return true
}
}
}
}
false
}
if (baseClass == null || DumbService.getInstance(baseClass.getProject).isDumb) return false //to prevent failing during indexes
// This doesn't appear in the superTypes at the moment, so special case required.
if (baseQualifiedName == "java.lang.Object") return true
if (baseQualifiedName == "scala.ScalaObject" && !baseClass.isDeprecated) return true
isInheritorInner(baseClass, this, deep)
}
}
object ScTemplateDefinition {
object ExtendsBlock {
def unapply(definition: ScTemplateDefinition): Some[ScExtendsBlock] = Some(definition.extendsBlock)
}
} | jeantil/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/typedef/ScTemplateDefinition.scala | Scala | apache-2.0 | 19,037 |
package freeslick.testkit
import com.typesafe.slick.testkit.util.{AsyncTest, JdbcTestDB}
class FreeslickInsertTest extends AsyncTest[JdbcTestDB] {
import tdb.profile.api._
class T(tableName: String)(tag: Tag) extends Table[(Int, String)](tag, tableName) {
def id = column[Int]("ID", O.AutoInc, O.PrimaryKey)
def name = column[String]("NAME")
def * = (id, name)
def ins = (id, name)
}
// Oracle 11g doesn't seem to return batch counts. Should be in 12c though
lazy val batchReturnCounts = !tdb.jdbcDriver.contains("OracleDriver")
def testUpsertAutoIncReturning = {
val ts = TableQuery(new T("T_UPSERT")(_))
(for {
_ <- ts.schema.create
q1 = ts returning ts.map(_.id)
_ <- ifCap(jcap.returnInsertKey) {
for {
// Single insert returns single auto inc value
_ <- (q1 +=(0, "e")).map(_ shouldBe 1)
_ <- (q1 +=(0, "f")).map(_ shouldBe 2)
_ <- ts.sortBy(_.id).result.map(_ shouldBe Seq((1, "e"), (2, "f")))
} yield ()
}
// Inserts without "returning" are row counts of inserts
_ <- (ts ++= Seq((1, "a"), (2, "b"))).map(_ shouldBe (if (batchReturnCounts) Some(2) else None))
_ <- ts.insertOrUpdate((0, "c")).map(_ shouldBe 1)
_ <- ts.insertOrUpdate((1, "d")).map(_ shouldBe 1)
_ <- ifCap(jcap.returnInsertKey) {
for {
_ <- ts.sortBy(_.id).result.map(_ shouldBe Seq((1, "d"), (2, "f"), (3, "a"), (4, "b"), (5, "c")))
// Upserts with returning
_ <- q1.insertOrUpdate((0, "g")).map(_ shouldBe Some(6)) // insert returns key
_ <- q1.insertOrUpdate((1, "f")).map(_ shouldBe None) // update returns none
_ <- ts.sortBy(_.id).result.map(_ shouldBe Seq((1, "f"), (2, "f"), (3, "a"), (4, "b"), (5, "c"), (6, "g")))
// batch inserts return sequences of inserted keys
_ <- (q1 ++= Seq((1, "a"), (2, "b"))).map(_ shouldBe Seq(7,8))
_ <- (q1 ++= (0 to 30).map(x => (0, "x"))).map(_ shouldBe (0 to 30).map(_+9))
} yield ()
}
} yield ()).withPinnedSession
}
def testUpsertAutoIncNonReturning = {
val ts = TableQuery(new T("T_UPSERTNORET")(_))
(for {
_ <- ts.schema.create
_ <- ifCap(jcap.returnInsertKey) {
for {
_ <- (ts +=(0, "e")).map(_ shouldBe 1)
_ <- (ts +=(0, "f")).map(_ shouldBe 1)
_ <- ts.sortBy(_.id).result.map(_ shouldBe Seq((1, "e"), (2, "f")))
} yield ()
}
_ <- ts ++= Seq((1, "a"), (2, "b"))
_ <- ts.insertOrUpdate((0, "c")).map(_ shouldBe 1)
_ <- ts.insertOrUpdate((1, "d")).map(_ shouldBe 1)
_ <- ifCap(jcap.returnInsertKey) {
for {
_ <- ts.sortBy(_.id).result.map(_ shouldBe Seq((1, "d"), (2, "f"), (3, "a"), (4, "b"), (5, "c")))
_ <- ts.insertOrUpdate((0, "g")).map(_ shouldBe 1) // insert returns key
_ <- ts.insertOrUpdate((1, "f")).map(_ shouldBe 1) // update returns none
_ <- ts.sortBy(_.id).result.map(_ shouldBe Seq((1, "f"), (2, "f"), (3, "a"), (4, "b"), (5, "c"), (6, "g")))
_ <- (ts ++= (0 to 30).map(x => (0, "x"))).map(_ shouldBe (if (batchReturnCounts) Some(31) else None))
} yield ()
}
} yield ()).withPinnedSession
}
}
| fommil/freeslick | src/it/scala/freeslick/testkit/FreeslickInsertTest.scala | Scala | lgpl-3.0 | 3,266 |
package org.broadinstitute.sting.queue.qscripts.examples
import org.broadinstitute.sting.queue.QScript
class HelloWorld extends QScript {
def script() {
add(new CommandLineFunction {
def commandLine = "echo hello world"
})
}
}
| iontorrent/Torrent-Variant-Caller-stable | public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala | Scala | mit | 247 |
package com.greencatsoft.d3.common
import scala.language.implicitConversions
import scala.math.{ pow, sqrt }
import scala.scalajs.js
import scala.scalajs.js.annotation.JSExportAll
import org.scalajs.dom.svg
import org.scalajs.dom.svg.{ Matrix, SVG }
@JSExportAll
case class Point(x: Double, y: Double) extends Transformable[Point] {
def +(point: Point): Point = Point(x + point.x, y + point.y)
def -(point: Point): Point = this + !point
def *(ratio: Double): Point = Point(x * ratio, y * ratio)
def unary_!(): Point = Point(-x, -y)
def distance(point: Point): Double = {
val p = this - point
sqrt(pow(p.x, 2) + pow(p.y, 2))
}
override def matrixTransform(matrix: Matrix)(implicit ownerNode: SVG): Point = {
val p = ownerNode.createSVGPoint
p.x = x
p.y = y
Point.fromSvgPoint(p.matrixTransform(matrix))
}
}
object Point {
implicit def fromSvgPoint(point: svg.Point): Point = Point(point.x, point.y)
implicit def toSvgPoint(point: Point)(implicit ownerNode: SVG): svg.Point = {
val p = ownerNode.createSVGPoint
p.x = point.x
p.y = point.y
p
}
implicit def fromArray(point: js.Array[Double]): Point = Point(point(0), point(1))
implicit def toArray(point: Point): js.Array[Double] = js.Array(point.x, point.y)
} | greencatsoft/scalajs-d3 | src/main/scala/com/greencatsoft/d3/common/Point.scala | Scala | apache-2.0 | 1,291 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.curve
/**
* Maps a double within a known range to an Int in [0, bins)
*/
trait NormalizedDimension {
/**
* Min value considered for normalization range
*
* @return
*/
def min: Double
/**
* Max value considered for normalizing
*
* @return
*/
def max: Double
/**
* Max value to normalize to
*
* @return
*/
def maxIndex: Int
/**
* Normalize the value
*
* @param x [min, max]
* @return [0, maxIndex]
*/
def normalize(x: Double): Int
/**
* Denormalize the value in bin x
*
* @param x [0, maxIndex]
* @return [min, max]
*/
def denormalize(x: Int): Double
}
object NormalizedDimension {
class BitNormalizedDimension(val min: Double, val max: Double, precision: Int) extends NormalizedDimension {
require(precision > 0 && precision < 32, "Precision (bits) must be in [1,31]")
// (1L << precision) is equivalent to math.pow(2, precision).toLong
private val bins = 1L << precision
private val normalizer = bins / (max - min)
private val denormalizer = (max - min) / bins
override val maxIndex: Int = (bins - 1).toInt // note: call .toInt after subtracting 1 to avoid sign issues
override def normalize(x: Double): Int =
if (x >= max) { maxIndex } else { math.floor((x - min) * normalizer).toInt }
override def denormalize(x: Int): Double =
if (x >= maxIndex) { min + (maxIndex + 0.5d) * denormalizer } else { min + (x + 0.5d) * denormalizer }
}
case class NormalizedLat(precision: Int) extends BitNormalizedDimension(-90d, 90d, precision)
case class NormalizedLon(precision: Int) extends BitNormalizedDimension(-180d, 180d, precision)
case class NormalizedTime(precision: Int, override val max: Double) extends BitNormalizedDimension(0d, max, precision)
// legacy normalization, doesn't correctly bin lower bound
@deprecated("use BitNormalizedDimension instead")
class SemiNormalizedDimension(val min: Double, val max: Double, precision: Long) extends NormalizedDimension {
override val maxIndex: Int = precision.toInt
override def normalize(x: Double): Int = math.ceil((x - min) / (max - min) * precision).toInt
override def denormalize(x: Int): Double = if (x == 0) { min } else { (x - 0.5d) * (max - min) / precision + min }
}
@deprecated("use NormalizedLat instead")
case class SemiNormalizedLat(precision: Long) extends SemiNormalizedDimension(-90d, 90d, precision)
@deprecated("use NormalizedLon instead")
case class SemiNormalizedLon(precision: Long) extends SemiNormalizedDimension(-180d, 180d, precision)
@deprecated("use NormalizedTime instead")
case class SemiNormalizedTime(precision: Long, override val max: Double)
extends SemiNormalizedDimension(0d, max, precision)
}
| locationtech/geomesa | geomesa-z3/src/main/scala/org/locationtech/geomesa/curve/NormalizedDimension.scala | Scala | apache-2.0 | 3,296 |
package com.scalableQuality.quick.surface.output
object WriteTextReportToStdout {
def apply(
textReports: List[() => List[String]]
): Unit = BufferedWriteToStdout(textReports)
}
| MouslihAbdelhakim/Quick | src/main/scala/com/scalableQuality/quick/surface/output/WriteTextReportToStdout.scala | Scala | apache-2.0 | 189 |
package org.http4s
package parser
import org.http4s.headers.{`Content-Range`, Range}
import org.http4s.headers.Range.SubRange
class RangeParserSpec extends Http4sSpec {
"RangeParser" should {
"parse Range" in {
val headers = Seq(
Range(RangeUnit.Bytes, SubRange(0, 500)),
Range(RangeUnit.Bytes, SubRange(0, 499), SubRange(500, 999), SubRange(1000, 1500)),
Range(RangeUnit("page"), SubRange(0, 100)),
Range(10),
Range(-90)
)
forall(headers) { header =>
HttpHeaderParser.parseHeader(header.toRaw) must be_\\/-(header)
}
}
}
"parse Content-Range" in {
val headers = Seq(
`Content-Range`(RangeUnit.Bytes, SubRange(10, None), None),
`Content-Range`(RangeUnit.Bytes, SubRange(0, 500), Some(500)),
`Content-Range`(RangeUnit("page"), SubRange(0, 100), Some(100)),
`Content-Range`(10),
`Content-Range`(-90),
`Content-Range`(SubRange(10, 30))
)
forall(headers) { header =>
HttpHeaderParser.parseHeader(header.toRaw) must be_\\/-(header)
}
}
}
| m4dc4p/http4s | tests/src/test/scala/org/http4s/parser/RangeParserSpec.scala | Scala | apache-2.0 | 1,084 |
package cc.factorie.app.nlp.ner
import cc.factorie.app.nlp.Token
import cc.factorie.variable.CategoricalVectorVar
trait NerLexiconFeatures {
def addLexiconFeatures(tokens: IndexedSeq[Token], featureFunc : (Token => CategoricalVectorVar[String]))
}
| patverga/factorie | src/main/scala/cc/factorie/app/nlp/ner/NerLexiconFeatures.scala | Scala | apache-2.0 | 258 |
package org.mystic.consumer
trait KafkaStreamConsumer {
}
| MysterionRise/h2h-fantazy | src/main/scala/org/mystic/consumer/KafkaStreamConsumer.scala | Scala | mit | 60 |
package com.pirate.jacksparrow
import play.api.libs.ws.WSResponse
import scala.concurrent.Future
/**
* Created by pnagarjuna on 29/12/15.
*/
object PirateUtils {
def getPageHtml(link: String): Future[WSResponse] = {
HttpHelper.client.url(link)
.withHeaders("User-Agent" -> "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36")
.withFollowRedirects(true).get()
}
}
| pamu/olx-bikes | src/main/scala/com/pirate/jacksparrow/PirateUtils.scala | Scala | apache-2.0 | 444 |
package org.orbeon.dom
/**
* `IllegalAddException` is thrown when a node is added incorrectly
* to an `Element`.
*/
class IllegalAddException(reason: String) extends IllegalArgumentException(reason) {
def this(parent: Element, node: Node, reason: String) =
this("The node \"" + node.toString + "\" could not be added to the element \"" +
parent.getQualifiedName +
"\" because: " +
reason)
def this(parent: Branch, node: Node, reason: String) =
this("The node \"" + node.toString + "\" could not be added to the branch \"" +
parent.getName +
"\" because: " +
reason)
}
| brunobuzzi/orbeon-forms | dom/src/main/scala/org/orbeon/dom/IllegalAddException.scala | Scala | lgpl-2.1 | 622 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.controller
import io.prediction.core.BaseDataSource
import io.prediction.core.BasePreparator
import org.apache.spark.SparkContext
import scala.reflect._
/** A helper concrete implementation of [[io.prediction.core.BasePreparator]]
* that passes training data through without any special preparation. This can
* be used in place for both [[PPreparator]] and [[LPreparator]].
*
* @tparam TD Training data class.
* @group Preparator
*/
class IdentityPreparator[TD] extends BasePreparator[TD, TD] {
def prepareBase(sc: SparkContext, td: TD): TD = td
}
/** Companion object of [[IdentityPreparator]] that conveniently returns an
* instance of the class of [[IdentityPreparator]] for use with
* [[EngineFactory]].
*
* @group Preparator
*/
object IdentityPreparator {
/** Produces an instance of the class of [[IdentityPreparator]].
*
* @param ds Instance of the class of the data source for this preparator.
*/
def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
classOf[IdentityPreparator[TD]]
}
/** DEPRECATED. Use [[IdentityPreparator]] instead.
*
* @tparam TD Training data class.
* @group Preparator
*/
@deprecated("Use IdentityPreparator instead.", "0.9.2")
class PIdentityPreparator[TD] extends IdentityPreparator[TD]
/** DEPRECATED. Use [[IdentityPreparator]] instead.
*
* @group Preparator
*/
@deprecated("Use IdentityPreparator instead.", "0.9.2")
object PIdentityPreparator {
/** Produces an instance of the class of [[IdentityPreparator]].
*
* @param ds Instance of the class of the data source for this preparator.
*/
def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
classOf[IdentityPreparator[TD]]
}
/** DEPRECATED. Use [[IdentityPreparator]] instead.
*
* @tparam TD Training data class.
* @group Preparator
*/
@deprecated("Use IdentityPreparator instead.", "0.9.2")
class LIdentityPreparator[TD] extends IdentityPreparator[TD]
/** DEPRECATED. Use [[IdentityPreparator]] instead.
*
* @group Preparator
*/
@deprecated("Use IdentityPreparator instead.", "0.9.2")
object LIdentityPreparator {
/** Produces an instance of the class of [[IdentityPreparator]].
*
* @param ds Instance of the class of the data source for this preparator.
*/
def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
classOf[IdentityPreparator[TD]]
}
| ch33hau/PredictionIO | core/src/main/scala/io/prediction/controller/IdentityPreparator.scala | Scala | apache-2.0 | 3,107 |
package com.twitter.algebird
import org.scalatest._
import org.scalatest.{ PropSpec, Matchers }
import org.scalatest.prop.PropertyChecks
import org.scalacheck.{ Gen, Arbitrary }
import java.util.Arrays
class MinHasherTest extends PropSpec with PropertyChecks with Matchers {
import BaseProperties._
implicit val mhMonoid = new MinHasher32(0.5, 512)
implicit val mhGen = Arbitrary {
for (
v <- Gen.choose(0, 10000)
) yield (mhMonoid.init(v))
}
property("MinHasher is a Monoid") {
monoidLawsEq[MinHashSignature]{ (a, b) => a.bytes.toList == b.bytes.toList }
}
}
class MinHasherSpec extends WordSpec with Matchers {
val r = new java.util.Random
def test[H](mh: MinHasher[H], similarity: Double, epsilon: Double) = {
val (set1, set2) = randomSets(similarity)
val exact = exactSimilarity(set1, set2)
val sim = approxSimilarity(mh, set1, set2)
val error: Double = math.abs(exact - sim)
assert(error < epsilon)
}
def randomSets(similarity: Double) = {
val s = 10000
val uniqueFraction = if (similarity == 1.0) 0.0 else (1 - similarity) / (1 + similarity)
val sharedFraction = 1 - uniqueFraction
val unique1 = 1.to((s * uniqueFraction).toInt).map{ i => math.random }.toSet
val unique2 = 1.to((s * uniqueFraction).toInt).map{ i => math.random }.toSet
val shared = 1.to((s * sharedFraction).toInt).map{ i => math.random }.toSet
(unique1 ++ shared, unique2 ++ shared)
}
def exactSimilarity[T](x: Set[T], y: Set[T]) = {
(x & y).size.toDouble / (x ++ y).size
}
def approxSimilarity[T, H](mh: MinHasher[H], x: Set[T], y: Set[T]) = {
val sig1 = x.map{ l => mh.init(l.toString) }.reduce{ (a, b) => mh.plus(a, b) }
val sig2 = y.map{ l => mh.init(l.toString) }.reduce{ (a, b) => mh.plus(a, b) }
mh.similarity(sig1, sig2)
}
"MinHasher32" should {
"measure 0.5 similarity in 1024 bytes with < 0.1 error" in {
test(new MinHasher32(0.5, 1024), 0.5, 0.1)
}
"measure 0.8 similarity in 1024 bytes with < 0.05 error" in {
test(new MinHasher32(0.8, 1024), 0.8, 0.05)
}
"measure 1.0 similarity in 1024 bytes with < 0.01 error" in {
test(new MinHasher32(1.0, 1024), 1.0, 0.01)
}
}
}
| avibryant/algebird | algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala | Scala | apache-2.0 | 2,226 |
package com.fijimf
import org.joda.time.LocalDate
object Main {
def main(args: Array[String]) {
val frm: FixedRateMortgage = FixedRateMortgage(500000, 360, new LocalDate(2015, 2, 5), new LocalDate(2015, 3, 1), 500000, 360, 0.045)
frm.amortize().take(365).foreach(println _)
val arm: AdjustableRateMortgage = AdjustableRateMortgage(500000, 360, new LocalDate(2015, 2, 5), new LocalDate(2015, 3, 1), 500000, 360, 0.045, "LIBOR", 35, new LocalDate(2020, 3, 1), 1, 0.25, 8, 0.25)
arm.amortize(new ArmContext).take(365).foreach(println _)
}
}
| fijimf/msfcf | msfcf-core/src/main/scala/com/fijimf/Main.scala | Scala | mit | 578 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.eval.graph
import java.awt.Color
import java.io.ByteArrayOutputStream
import java.time.Duration
import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.Uri
import com.netflix.atlas.chart.Colors
import com.netflix.atlas.chart.model.GraphDef
import com.netflix.atlas.chart.model.Layout
import com.netflix.atlas.chart.model.LineDef
import com.netflix.atlas.chart.model.LineStyle
import com.netflix.atlas.chart.model.MessageDef
import com.netflix.atlas.chart.model.Palette
import com.netflix.atlas.chart.model.TickLabelMode
import com.netflix.atlas.chart.model.VisionType
import com.netflix.atlas.core.db.Database
import com.netflix.atlas.core.model.DataExpr
import com.netflix.atlas.core.model.EvalContext
import com.netflix.atlas.core.model.ModelExtractors
import com.netflix.atlas.core.model.ResultSet
import com.netflix.atlas.core.model.StyleExpr
import com.netflix.atlas.core.model.SummaryStats
import com.netflix.atlas.core.model.TagKey
import com.netflix.atlas.core.model.TimeSeries
import com.netflix.atlas.core.util.Features
import com.netflix.atlas.core.util.Strings
import com.netflix.atlas.core.util.UnitPrefix
import com.typesafe.config.Config
import java.util.Locale
import scala.util.Try
case class Grapher(settings: DefaultSettings) {
import Grapher._
/**
* Create a graph config from a request object. This will look at the URI and try to
* extract some context from the headers.
*/
def toGraphConfig(request: HttpRequest): GraphConfig = {
val config = rewriteBasedOnHost(request, toGraphConfig(request.uri))
val agent = request.headers
.find(_.is("user-agent"))
.map(_.value())
.getOrElse("unknown")
val isBrowser = settings.browserAgentPattern.matcher(agent).find()
config.copy(isBrowser = isBrowser)
}
private def rewriteBasedOnHost(request: HttpRequest, config: GraphConfig): GraphConfig = {
request.headers
.find(_.is("host"))
.map(_.value())
.fold(config) { host =>
val newExprs = config.parsedQuery.map { exprs =>
settings.hostRewriter.rewrite(host, exprs)
}
val query = if (newExprs.isFailure) config.query else newExprs.get.mkString(",")
config.copy(query = query, parsedQuery = newExprs)
}
}
/**
* Create a graph config from an Atlas URI.
*/
def toGraphConfig(uri: Uri): GraphConfig = {
val params = uri.query()
val id = "default"
val features = params
.get("features")
.map(v => Features.valueOf(v.toUpperCase(Locale.US)))
.getOrElse(Features.STABLE)
import com.netflix.atlas.chart.GraphConstants._
val axes = (0 to MaxYAxis).map(i => i -> newAxis(params, i)).toMap
val vision = params.get("vision").map(v => VisionType.valueOf(v))
val theme = params.get("theme").getOrElse(settings.theme)
val palette = params.get("palette").getOrElse(settings.primaryPalette(theme))
val flags = ImageFlags(
title = params.get("title").filter(_ != ""),
width = params.get("w").fold(settings.width)(_.toInt),
height = params.get("h").fold(settings.height)(_.toInt),
zoom = params.get("zoom").fold(1.0)(_.toDouble),
axes = axes,
axisPerLine = params.get("axis_per_line").contains("1"),
showLegend = !params.get("no_legend").contains("1"),
showLegendStats = !params.get("no_legend_stats").contains("1"),
showOnlyGraph = params.get("only_graph").contains("1"),
vision = vision.getOrElse(VisionType.normal),
palette = palette,
theme = theme,
layout = Layout.create(params.get("layout").getOrElse("canvas"))
)
val q = params.get("q")
if (q.isEmpty) {
throw new IllegalArgumentException("missing required parameter 'q'")
}
val timezones = params.getAll("tz").reverse
val parsedQuery = Try {
val vars = Map("tz" -> GraphConfig.getTimeZoneIds(settings, timezones).head)
val exprs = settings.interpreter
.execute(q.get, vars, features)
.stack
.reverse
.flatMap {
case ModelExtractors.PresentationType(s) =>
s.perOffset
case v =>
val tpe = v.getClass.getSimpleName
throw new IllegalArgumentException(s"expecting time series expr, found $tpe '$v'")
}
if (settings.simpleLegendsEnabled)
SimpleLegends.generate(exprs)
else
exprs
}
GraphConfig(
settings = settings,
query = q.get,
parsedQuery = parsedQuery,
start = params.get("s"),
end = params.get("e"),
timezones = timezones,
step = params.get("step"),
flags = flags,
format = params.get("format").getOrElse("png"),
id = id,
features = features,
isBrowser = false,
isAllowedFromBrowser = true,
uri = uri.toString
)
}
private def getAxisParam(params: Uri.Query, k: String, id: Int): Option[String] = {
params.get(s"$k.$id").orElse(params.get(k))
}
private def newAxis(params: Uri.Query, id: Int): Axis = {
// Prefer the scale parameter if present. If not, then fallback to look at
// the boolean `o` parameter for backwards compatibility.
val scale = getAxisParam(params, "scale", id).orElse {
if (getAxisParam(params, "o", id).contains("1")) Some("log") else None
}
Axis(
upper = getAxisParam(params, "u", id),
lower = getAxisParam(params, "l", id),
scale = scale,
stack = getAxisParam(params, "stack", id).contains("1"),
ylabel = getAxisParam(params, "ylabel", id).filter(_ != ""),
tickLabels = getAxisParam(params, "tick_labels", id),
palette = params.get(s"palette.$id"),
sort = getAxisParam(params, "sort", id),
order = getAxisParam(params, "order", id)
)
}
/**
* Evaluate the expressions and render a chart using the config from the uri and the
* specified data.
*/
def evalAndRender(uri: Uri, db: Database): Result = evalAndRender(toGraphConfig(uri), db)
/**
* Evaluate the expressions and render a chart using the config from the uri and the
* specified data.
*/
def evalAndRender(uri: Uri, data: List[TimeSeries]): Result =
evalAndRender(toGraphConfig(uri), data)
/**
* Evaluate the expressions and render a chart using the config from the uri and the
* specified data. The data must have already been pre-processed to only include relevant
* results for each DataExpr. It is up to the user to ensure the DataExprs in the map
* match those that will be extracted from the uri.
*/
def evalAndRender(uri: Uri, data: DataMap): Result = evalAndRender(toGraphConfig(uri), data)
/** Evaluate the expressions and render a chart using the specified config and data. */
def evalAndRender(config: GraphConfig, db: Database): Result = {
val dataExprs = config.exprs.flatMap(_.expr.dataExprs).distinct
val result = dataExprs.map(expr => expr -> db.execute(config.evalContext, expr)).toMap
evalAndRender(config, result)
}
/** Evaluate the expressions and render a chart using the specified config and data. */
def evalAndRender(config: GraphConfig, data: List[TimeSeries]): Result = {
val dataExprs = config.exprs.flatMap(_.expr.dataExprs).distinct
val result = dataExprs.map(expr => expr -> eval(config.evalContext, expr, data)).toMap
evalAndRender(config, result)
}
private def eval(
context: EvalContext,
expr: DataExpr,
data: List[TimeSeries]
): List[TimeSeries] = {
val matches = data.filter(t => expr.query.matches(t.tags))
val offset = expr.offset.toMillis
if (offset == 0) expr.eval(context, matches).data
else {
val offsetContext = context.withOffset(expr.offset.toMillis)
expr.eval(offsetContext, matches).data.map { t =>
t.offset(offset)
}
}
}
/**
* Evaluate the expressions and render a chart using the specified config and data. The data
* must have already been pre-processed to only include relevant results for each DataExpr. It
* is up to the user to ensure the DataExprs in the map match those that will be extracted from
* the config.
*/
def evalAndRender(config: GraphConfig, data: DataMap): Result = {
val graphDef = create(config, _.expr.eval(config.evalContext, data))
val baos = new ByteArrayOutputStream
config.engine.write(graphDef, baos)
Result(config, baos.toByteArray)
}
/**
* Render a chart using the config from the uri and the specified data. The data must
* have already been pre-processed to only include relevant results for each DataExpr.
* It is up to the user to ensure the DataExprs in the map match those that will be
* extracted from the uri.
*/
def render(uri: Uri, data: StyleMap): Result = render(toGraphConfig(uri), data)
/**
* Render a chart using the specified config and data. It is up to the user to ensure the
* StyleExprs in the map match those that will be extracted from the config.
*/
def render(config: GraphConfig, data: StyleMap): Result = {
val graphDef = create(config, s => ResultSet(s.expr, data.getOrElse(s, Nil)))
val baos = new ByteArrayOutputStream
config.engine.write(graphDef, baos)
Result(config, baos.toByteArray)
}
/** Create a new graph definition based on the specified config and data. */
def create(config: GraphConfig, eval: StyleExpr => ResultSet): GraphDef = {
val warnings = List.newBuilder[String]
val plotExprs = config.exprs.groupBy(_.axis.getOrElse(0))
val multiY = plotExprs.size > 1
val palette = newPalette(config.flags.palette)
val shiftPalette = newPalette(settings.offsetPalette(config.flags.theme))
val start = config.startMillis
val end = config.endMillis
val plots = plotExprs.toList.sortWith(_._1 < _._1).map {
case (yaxis, exprs) =>
val axisCfg = config.flags.axes(yaxis)
val dfltStyle = if (axisCfg.stack) LineStyle.STACK else LineStyle.LINE
val statFormatter = axisCfg.tickLabelMode match {
case TickLabelMode.BINARY =>
(v: Double) => UnitPrefix.binary(v).format(v)
case _ =>
(v: Double) => UnitPrefix.decimal(v).format(v)
}
val axisPalette = axisCfg.palette.fold(palette) { v =>
newPalette(v)
}
var messages = List.empty[String]
val lines = exprs.flatMap { s =>
val result = eval(s)
// Pick the last non empty message to appear. Right now they are only used
// as a test for providing more information about the state of filtering. These
// can quickly get complicated when used with other features. For example,
// sorting can mix and match lines across multiple expressions. Also binary
// math operations that combine the results of multiple filter expressions or
// multi-level group by with filtered input. For now this is just an
// experiment for the common simple case to see how it impacts usability
// when dealing with filter expressions that remove some of the lines.
if (result.messages.nonEmpty) messages = result.messages.take(1)
val ts = result.data
val labelledTS = ts.map { t =>
val stats = SummaryStats(t.data, start, end)
val offset = Strings.toString(Duration.ofMillis(s.offset))
val outputTags = t.tags + (TagKey.offset -> offset)
// Additional stats can be used for substitutions, but should not be included
// as part of the output tag map
val legendTags = outputTags ++ stats.tags(statFormatter)
val newT = t.withTags(outputTags)
newT.withLabel(s.legend(newT.label, legendTags)) -> stats
}
val linePalette = s.palette.map(newPalette).getOrElse {
s.color
.map { c =>
val p = Palette.singleColor(c).iterator
(_: String) => p.next()
}
.getOrElse {
if (s.offset > 0L) shiftPalette else axisPalette
}
}
val lineDefs = labelledTS.sortWith(_._1.label < _._1.label).map {
case (t, stats) =>
val color = s.color.getOrElse {
val c = linePalette(t.label)
// Alpha setting if present will set the alpha value for the color automatically
// assigned by the palette. If using an explicit color it will have no effect as the
// alpha can be set directly using an ARGB hex format for the color.
s.alpha.fold(c)(a => Colors.withAlpha(c, a))
}
LineDef(
data = t,
groupByKeys = s.expr.finalGrouping,
color = color,
lineStyle = s.lineStyle.fold(dfltStyle)(s => LineStyle.valueOf(s.toUpperCase)),
lineWidth = s.lineWidth,
legendStats = stats
)
}
// Lines must be sorted for presentation after the colors have been applied
// using the palette. The colors selected should be stable regardless of the
// sort order that is applied. Otherwise colors would change each time a user
// changed the sort.
val sorted = sort(warnings, s.sortBy, s.useDescending, lineDefs)
s.limit.fold(sorted)(n => sorted.take(n))
}
// Apply sort based on URL parameters. This will take precedence over
// local sort on an expression.
val sortedLines = sort(warnings, axisCfg.sort, axisCfg.order.contains("desc"), lines)
axisCfg.newPlotDef(sortedLines ::: messages.map(s => MessageDef(s"... $s ...")), multiY)
}
config.newGraphDef(plots, warnings.result())
}
/**
* Creates a new palette and optionally changes it to use the label hash for
* selecting the color rather than choosing the next available color in the
* palette. Hash selection is useful to ensure that the same color is always
* used for a given label even on separate graphs. However, it also means
* that collisions are more likely and that the same color may be used for
* different labels even with a small number of lines.
*
* Hash mode will be used if the palette name is prefixed with "hash:".
*/
private def newPalette(mode: String): String => Color = {
val prefix = "hash:"
if (mode.startsWith(prefix)) {
val pname = mode.substring(prefix.length)
val p = Palette.create(pname)
v => p.colors(v.hashCode)
} else {
val p = Palette.create(mode).iterator
_ => p.next()
}
}
private def sort(
warnings: scala.collection.mutable.Builder[String, List[String]],
sortBy: Option[String],
useDescending: Boolean,
lines: List[LineDef]
): List[LineDef] = {
// The default is sort by legend in ascending order. If the defaults have been explicitly
// changed, then the explicit values should be used. Since the sort by param is used to
// short circuit if there is nothing to do, it will get set to legend explicitly here if
// the order has been changed to descending.
val by = if (useDescending) Some(sortBy.getOrElse("legend")) else sortBy
by.fold(lines) { mode =>
val cmp: Function2[LineDef, LineDef, Boolean] = mode match {
case "legend" =>
(a, b) => compare(useDescending, a.data.label, b.data.label)
case "min" =>
(a, b) => compare(useDescending, a.legendStats.min, b.legendStats.min)
case "max" =>
(a, b) => compare(useDescending, a.legendStats.max, b.legendStats.max)
case "avg" =>
(a, b) => compare(useDescending, a.legendStats.avg, b.legendStats.avg)
case "count" =>
(a, b) => compare(useDescending, a.legendStats.count, b.legendStats.count)
case "total" =>
(a, b) => compare(useDescending, a.legendStats.total, b.legendStats.total)
case "last" =>
(a, b) => compare(useDescending, a.legendStats.last, b.legendStats.last)
case order =>
warnings += s"Invalid sort mode '$order'. Using default of 'legend'."
(a, b) => compare(useDescending, a.data.label, b.data.label)
}
lines.sortWith(cmp)
}
}
private def compare(desc: Boolean, a: String, b: String): Boolean = {
if (desc) a > b else a < b
}
private def compare(desc: Boolean, a: Int, b: Int): Boolean = {
if (desc) a > b else a < b
}
private def compare(desc: Boolean, a: Double, b: Double): Boolean = {
// Note: NaN values are special and should always be sorted last. This is the default
// behavior of `JDouble.compare` for strictly greater than or less than. However it does
// mean that you cannot change the order by sorting one way and then reversing because that
// would move the NaN values to the beginning.
// https://github.com/Netflix/atlas/issues/586
if (desc) compare(_ > _, a, b) else compare(_ < _, a, b)
}
private def compare(op: (Double, Double) => Boolean, a: Double, b: Double): Boolean = {
// Do not use op directly because NaN values can cause contract errors with the sort:
// https://github.com/Netflix/atlas/issues/405
if (a.isNaN && b.isNaN)
false
else if (a.isNaN)
false // b should come first as it has a value
else if (b.isNaN)
true // a should come first as it has a value
else
op(a, b)
}
}
object Grapher {
def apply(root: Config): Grapher = Grapher(DefaultSettings(root))
/**
* Rendered graph result.
*
* @param config
* The config used to generate the graph.
* @param data
* Rendered data. The format of this data will depend on the config settings
* for the graph.
*/
case class Result(config: GraphConfig, data: Array[Byte]) {
def dataString: String = new String(data, "UTF-8")
}
}
| Netflix/atlas | atlas-eval/src/main/scala/com/netflix/atlas/eval/graph/Grapher.scala | Scala | apache-2.0 | 18,604 |
package org.bitcoins.core.serializers.p2p.messages
import org.bitcoins.core.bloom.BloomUpdateNone
import org.bitcoins.core.number.{UInt32, UInt64}
import org.bitcoins.core.protocol.CompactSizeUInt
import org.bitcoins.core.util.BytesUtil
import org.bitcoins.testkitcore.util.BitcoinSUnitTest
/** Created by chris on 7/20/16.
*/
class RawFilterLoadMessageSerializerTest extends BitcoinSUnitTest {
"RawFilterLoadMessageSerializer" must "deserialize and serialize a filter load message" in {
//example from the bitcoin developer reference
//https://bitcoin.org/en/developer-reference#filterload
val hex = "02b50f0b0000000000000000"
val filterLoadMsg = RawFilterLoadMessageSerializer.read(hex)
filterLoadMsg.bloomFilter.filterSize must be(CompactSizeUInt(UInt64(2)))
BytesUtil.encodeHex(filterLoadMsg.bloomFilter.data) must be("b50f")
filterLoadMsg.bloomFilter.hashFuncs must be(UInt32(11))
filterLoadMsg.bloomFilter.tweak must be(UInt32.zero)
filterLoadMsg.bloomFilter.flags must be(BloomUpdateNone)
RawFilterLoadMessageSerializer.write(filterLoadMsg).toHex must be(hex)
}
}
| bitcoin-s/bitcoin-s | core-test/src/test/scala/org/bitcoins/core/serializers/p2p/messages/RawFilterLoadMessageSerializerTest.scala | Scala | mit | 1,123 |
package org.jetbrains.sbt
package project
import java.io.File
import com.intellij.openapi.externalSystem.model.project._
import com.intellij.openapi.externalSystem.model.task.{ExternalSystemTaskId, ExternalSystemTaskNotificationEvent, ExternalSystemTaskNotificationListener}
import com.intellij.openapi.externalSystem.model.{DataNode, ExternalSystemException}
import com.intellij.openapi.externalSystem.service.project.ExternalSystemProjectResolver
import com.intellij.openapi.module.StdModuleTypes
import com.intellij.openapi.roots.DependencyScope
import com.intellij.openapi.util.io.FileUtil
import org.jetbrains.plugins.scala.project.Version
import org.jetbrains.sbt.project.SbtProjectResolver._
import org.jetbrains.sbt.project.data._
import org.jetbrains.sbt.project.module.SbtModuleType
import org.jetbrains.sbt.project.settings._
import org.jetbrains.sbt.project.structure._
import org.jetbrains.sbt.resolvers.SbtResolver
import org.jetbrains.sbt.structure.XmlSerializer._
import org.jetbrains.sbt.{structure => sbtStructure}
import scala.collection.immutable.HashMap
/**
* @author Pavel Fatin
*/
class SbtProjectResolver extends ExternalSystemProjectResolver[SbtExecutionSettings] with ExternalSourceRootResolution {
private var runner: SbtRunner = null
protected var taskListener: TaskListener = SilentTaskListener
def resolveProjectInfo(id: ExternalSystemTaskId,
wrongProjectPathDontUseIt: String,
isPreview: Boolean,
settings: SbtExecutionSettings,
listener: ExternalSystemTaskNotificationListener): DataNode[ProjectData] = {
val root = {
val file = new File(settings.realProjectPath)
if (file.isDirectory) file.getPath else file.getParent
}
runner = new SbtRunner(settings.vmExecutable, settings.vmOptions, settings.environment,
settings.customLauncher, settings.customSbtStructureFile)
taskListener = new ExternalTaskListener(listener, id)
var warnings = new StringBuilder()
val xml = runner.read(new File(root), !isPreview, settings.resolveClassifiers, settings.resolveSbtClassifiers) { message =>
if (message.startsWith("[error] ") || message.startsWith("[warn] ")) {
warnings ++= message
}
listener.onStatusChange(new ExternalSystemTaskNotificationEvent(id, message.trim))
} match {
case Left(errors) => errors match {
case _ : SbtRunner.ImportCancelledException => return null
case _ => throw new ExternalSystemException(errors)
}
case Right(node) => node
}
if (warnings.nonEmpty) {
listener.onTaskOutput(id, WarningMessage(warnings.toString), false)
}
val data = xml.deserialize[sbtStructure.StructureData].right.get
convert(root, data, settings.jdk).toDataNode
}
private def convert(root: String, data: sbtStructure.StructureData, jdk: Option[String]): Node[ProjectData] = {
val projects = data.projects
val project = data.projects.find(p => FileUtil.filesEqual(p.base, new File(root)))
.orElse(data.projects.headOption)
.getOrElse(throw new RuntimeException("No root project found"))
val projectNode = new ProjectNode(project.name, root, root)
val basePackages = projects.flatMap(_.basePackages).distinct
val javacOptions = project.java.map(_.options).getOrElse(Seq.empty)
val sbtVersion = data.sbtVersion
val projectJdk = project.android.map(android => Android(android.targetVersion))
.orElse(jdk.map(JdkByName))
projectNode.add(new SbtProjectNode(basePackages, projectJdk, javacOptions, sbtVersion, root))
val newPlay2Data = projects.flatMap(p => p.play2.map(d => (p.id, p.base, d)))
projectNode.add(new Play2ProjectNode(Play2OldStructureAdapter(newPlay2Data)))
val libraryNodes = createLibraries(data, projects)
projectNode.addAll(libraryNodes)
val moduleFilesDirectory = new File(root + "/" + Sbt.ModulesDirectory)
val moduleNodes = createModules(projects, libraryNodes, moduleFilesDirectory)
projectNode.addAll(moduleNodes)
createModuleDependencies(projects, moduleNodes)
val projectToModuleNode: Map[sbtStructure.ProjectData, ModuleNode] = projects.zip(moduleNodes).toMap
val sharedSourceModules = createSharedSourceModules(projectToModuleNode, libraryNodes, moduleFilesDirectory)
projectNode.addAll(sharedSourceModules)
projectNode.addAll(projects.map(createBuildModule(_, moduleFilesDirectory, data.localCachePath)))
projectNode
}
def createModuleDependencies(projects: Seq[sbtStructure.ProjectData], moduleNodes: Seq[ModuleNode]): Unit = {
projects.zip(moduleNodes).foreach { case (moduleProject, moduleNode) =>
moduleProject.dependencies.projects.foreach { dependencyId =>
val dependency = moduleNodes.find(_.getId == dependencyId.project).getOrElse(
throw new ExternalSystemException("Cannot find project dependency: " + dependencyId.project))
val data = new ModuleDependencyNode(moduleNode, dependency)
data.setScope(scopeFor(dependencyId.configuration))
data.setExported(true)
moduleNode.add(data)
}
}
}
def createModules(projects: Seq[sbtStructure.ProjectData], libraryNodes: Seq[LibraryNode], moduleFilesDirectory: File): Seq[ModuleNode] = {
val unmanagedSourcesAndDocsLibrary = libraryNodes.map(_.data).find(_.getExternalName == Sbt.UnmanagedSourcesAndDocsName)
projects.map { project =>
val moduleNode = createModule(project, moduleFilesDirectory)
val contentRootNode = createContentRoot(project)
project.android.foreach(a => a.apklibs.foreach(addApklibDirs(contentRootNode, _)))
moduleNode.add(contentRootNode)
moduleNode.addAll(createLibraryDependencies(project.dependencies.modules)(moduleNode, libraryNodes.map(_.data)))
moduleNode.add(createModuleExtData(project))
moduleNode.addAll(project.android.map(createFacet(project, _)).toSeq)
moduleNode.addAll(createUnmanagedDependencies(project.dependencies.jars)(moduleNode))
unmanagedSourcesAndDocsLibrary foreach { lib =>
val dependency = new LibraryDependencyNode(moduleNode, lib, LibraryLevel.MODULE)
dependency.setScope(DependencyScope.COMPILE)
moduleNode.add(dependency)
}
moduleNode
}
}
def createLibraries(data: sbtStructure.StructureData, projects: Seq[sbtStructure.ProjectData]): Seq[LibraryNode] = {
val repositoryModules = data.repository.map(_.modules).getOrElse(Seq.empty)
val (modulesWithoutBinaries, modulesWithBinaries) = repositoryModules.partition(_.binaries.isEmpty)
val otherModuleIds = projects.flatMap(_.dependencies.modules.map(_.id)).toSet --
repositoryModules.map(_.id).toSet
val libs = modulesWithBinaries.map(createResolvedLibrary) ++ otherModuleIds.map(createUnresolvedLibrary)
val modulesWithDocumentation = modulesWithoutBinaries.filter(m => m.docs.nonEmpty || m.sources.nonEmpty)
if (modulesWithDocumentation.isEmpty) return libs
val unmanagedSourceLibrary = new LibraryNode(Sbt.UnmanagedSourcesAndDocsName, true)
unmanagedSourceLibrary.addPaths(LibraryPathType.DOC, modulesWithDocumentation.flatMap(_.docs).map(_.path))
unmanagedSourceLibrary.addPaths(LibraryPathType.SOURCE, modulesWithDocumentation.flatMap(_.sources).map(_.path))
libs :+ unmanagedSourceLibrary
}
private def createModuleExtData(project: sbtStructure.ProjectData): ModuleExtNode = {
val scalaVersion = project.scala.map(s => Version(s.version))
val scalacClasspath = project.scala.fold(Seq.empty[File])(s => s.compilerJar +: s.libraryJar +: s.extraJars)
val scalacOptions = project.scala.fold(Seq.empty[String])(_.options)
val javacOptions = project.java.fold(Seq.empty[String])(_.options)
val jdk = project.android.map(android => Android(android.targetVersion))
.orElse(project.java.flatMap(java => java.home.map(JdkByHome)))
new ModuleExtNode(scalaVersion, scalacClasspath, scalacOptions, jdk, javacOptions)
}
private def createFacet(project: sbtStructure.ProjectData, android: sbtStructure.AndroidData): AndroidFacetNode = {
new AndroidFacetNode(android.targetVersion, android.manifest, android.apk,
android.res, android.assets, android.gen, android.libs,
android.isLibrary, android.proguardConfig)
}
private def createUnresolvedLibrary(moduleId: sbtStructure.ModuleIdentifier): LibraryNode = {
val module = sbtStructure.ModuleData(moduleId, Set.empty, Set.empty, Set.empty)
createLibrary(module, resolved = false)
}
private def createResolvedLibrary(module: sbtStructure.ModuleData): LibraryNode = {
createLibrary(module, resolved = true)
}
private def createLibrary(module: sbtStructure.ModuleData, resolved: Boolean): LibraryNode = {
val result = new LibraryNode(nameFor(module.id), resolved)
result.addPaths(LibraryPathType.BINARY, module.binaries.map(_.path).toSeq)
result.addPaths(LibraryPathType.DOC, module.docs.map(_.path).toSeq)
result.addPaths(LibraryPathType.SOURCE, module.sources.map(_.path).toSeq)
result
}
private def nameFor(id: sbtStructure.ModuleIdentifier) = {
val classifierOption = if (id.classifier.isEmpty) None else Some(id.classifier)
s"${id.organization}:${id.name}:${id.revision}" + classifierOption.map(":"+_).getOrElse("") + s":${id.artifactType}"
}
private def createModule(project: sbtStructure.ProjectData, moduleFilesDirectory: File): ModuleNode = {
// TODO use both ID and Name when related flaws in the External System will be fixed
// TODO explicit canonical path is needed until IDEA-126011 is fixed
val result = new ModuleNode(StdModuleTypes.JAVA.getId, project.id, project.id,
moduleFilesDirectory.path, project.base.canonicalPath)
result.setInheritProjectCompileOutputPath(false)
project.configurations.find(_.id == "compile").foreach { configuration =>
result.setCompileOutputPath(ExternalSystemSourceType.SOURCE, configuration.classes.path)
}
project.configurations.find(_.id == "test").foreach { configuration =>
result.setCompileOutputPath(ExternalSystemSourceType.TEST, configuration.classes.path)
}
result
}
private def createContentRoot(project: sbtStructure.ProjectData): ContentRootNode = {
val productionSources = validRootPathsIn(project, "compile")(_.sources)
val productionResources = validRootPathsIn(project, "compile")(_.resources)
val testSources = validRootPathsIn(project, "test")(_.sources) ++ validRootPathsIn(project, "it")(_.sources)
val testResources = validRootPathsIn(project, "test")(_.resources) ++ validRootPathsIn(project, "it")(_.resources)
val result = new ContentRootNode(project.base.path)
result.storePaths(ExternalSystemSourceType.SOURCE, productionSources)
result.storePaths(ExternalSystemSourceType.RESOURCE, productionResources)
result.storePaths(ExternalSystemSourceType.TEST, testSources)
result.storePaths(ExternalSystemSourceType.TEST_RESOURCE, testResources)
getExcludedTargetDirs(project).foreach { path =>
result.storePath(ExternalSystemSourceType.EXCLUDED, path.path)
}
result
}
// We cannot always exclude the whole ./target/ directory because of
// the generated sources, so we resort to an heuristics.
private def getExcludedTargetDirs(project: sbtStructure.ProjectData): Seq[File] = {
val extractedExcludes = project.configurations.flatMap(_.excludes)
if (extractedExcludes.nonEmpty)
return extractedExcludes.distinct
val managedDirectories = project.configurations
.flatMap(configuration => configuration.sources ++ configuration.resources)
.filter(_.managed)
.map(_.file)
val defaultNames = Set("main", "test")
val relevantDirectories = managedDirectories.filter(file => file.exists || !defaultNames.contains(file.getName))
def isRelevant(f: File): Boolean = !relevantDirectories.forall(_.isOutsideOf(f))
if (isRelevant(project.target)) {
// If we can't exclude the target directory, go one level deeper (which may hit resolution-cache and streams)
Option(project.target.listFiles()).toList.flatten.filter {
child => child.isDirectory && !isRelevant(child)
}
} else List(project.target)
}
private def createBuildModule(project: sbtStructure.ProjectData, moduleFilesDirectory: File, localCachePath: Option[String]): ModuleNode = {
val id = project.id + Sbt.BuildModuleSuffix
val name = project.name + Sbt.BuildModuleSuffix
val buildRoot = project.base / Sbt.ProjectDirectory
// TODO use both ID and Name when related flaws in the External System will be fixed
// TODO explicit canonical path is needed until IDEA-126011 is fixed
val result = new ModuleNode(SbtModuleType.instance.getId, id, id, moduleFilesDirectory.path, buildRoot.canonicalPath)
result.setInheritProjectCompileOutputPath(false)
result.setCompileOutputPath(ExternalSystemSourceType.SOURCE, (buildRoot / Sbt.TargetDirectory / "idea-classes").path)
result.setCompileOutputPath(ExternalSystemSourceType.TEST, (buildRoot / Sbt.TargetDirectory / "idea-test-classes").path)
result.add(createBuildContentRoot(buildRoot))
val library = {
val build = project.build
val classes = build.classes.filter(_.exists).map(_.path)
val docs = build.docs.filter(_.exists).map(_.path)
val sources = build.sources.filter(_.exists).map(_.path)
createModuleLevelDependency(Sbt.BuildLibraryName, classes, docs, sources, DependencyScope.COMPILE)(result)
}
result.add(library)
result.add(createSbtModuleData(project, localCachePath))
result
}
private def createBuildContentRoot(buildRoot: File): ContentRootNode = {
val result = new ContentRootNode(buildRoot.path)
val sourceDirs = Seq(buildRoot) // , base << 1
val exludedDirs = Seq(
buildRoot / Sbt.TargetDirectory,
buildRoot / Sbt.ProjectDirectory / Sbt.TargetDirectory)
result.storePaths(ExternalSystemSourceType.SOURCE, sourceDirs.map(_.path))
result.storePaths(ExternalSystemSourceType.EXCLUDED, exludedDirs.map(_.path))
result
}
def createSbtModuleData(project: sbtStructure.ProjectData, localCachePath: Option[String]): SbtModuleNode = {
val imports = project.build.imports.flatMap(_.trim.substring(7).split(", "))
val resolvers = project.resolvers map { r => new SbtResolver(SbtResolver.Kind.Maven, r.name, r.root) }
new SbtModuleNode(imports, resolvers + SbtResolver.localCacheResolver(localCachePath))
}
private def validRootPathsIn(project: sbtStructure.ProjectData, scope: String)
(selector: sbtStructure.ConfigurationData => Seq[sbtStructure.DirectoryData]): Seq[String] = {
project.configurations
.find(_.id == scope)
.map(selector)
.getOrElse(Seq.empty)
.map(_.file)
.filter(!_.isOutsideOf(project.base))
.map(_.path)
}
protected def createLibraryDependencies(dependencies: Seq[sbtStructure.ModuleDependencyData])
(moduleData: ModuleData, libraries: Seq[LibraryData]): Seq[LibraryDependencyNode] = {
dependencies.map { dependency =>
val name = nameFor(dependency.id)
val library = libraries.find(_.getExternalName == name).getOrElse(
throw new ExternalSystemException("Library not found: " + name))
val data = new LibraryDependencyNode(moduleData, library, LibraryLevel.PROJECT)
data.setScope(scopeFor(dependency.configurations))
data
}
}
private def createUnmanagedDependencies(dependencies: Seq[sbtStructure.JarDependencyData])
(moduleData: ModuleData): Seq[LibraryDependencyNode] = {
dependencies.groupBy(it => scopeFor(it.configurations)).toSeq.map { case (scope, dependency) =>
val name = scope match {
case DependencyScope.COMPILE => Sbt.UnmanagedLibraryName
case it => s"${Sbt.UnmanagedLibraryName}-${it.getDisplayName.toLowerCase}"
}
val files = dependency.map(_.file.path)
createModuleLevelDependency(name, files, Seq.empty, Seq.empty, scope)(moduleData)
}
}
private def createModuleLevelDependency(name: String, classes: Seq[String], docs: Seq[String], sources: Seq[String], scope: DependencyScope)
(moduleData: ModuleData): LibraryDependencyNode = {
val libraryNode = new LibraryNode(name, resolved = true)
libraryNode.addPaths(LibraryPathType.BINARY, classes)
libraryNode.addPaths(LibraryPathType.DOC, docs)
libraryNode.addPaths(LibraryPathType.SOURCE, sources)
val result = new LibraryDependencyNode(moduleData, libraryNode, LibraryLevel.MODULE)
result.setScope(scope)
result
}
private def addApklibDirs(contentRootNode: ContentRootNode, apklib: sbtStructure.ApkLib): Unit = {
contentRootNode.storePath(ExternalSystemSourceType.SOURCE, apklib.sources.canonicalPath)
contentRootNode.storePath(ExternalSystemSourceType.SOURCE_GENERATED, apklib.gen.canonicalPath)
contentRootNode.storePath(ExternalSystemSourceType.RESOURCE, apklib.resources.canonicalPath)
}
protected def scopeFor(configurations: Seq[sbtStructure.Configuration]): DependencyScope = {
val ids = configurations.toSet
if (ids.contains(sbtStructure.Configuration.Compile))
DependencyScope.COMPILE
else if (ids.contains(sbtStructure.Configuration.Runtime))
DependencyScope.RUNTIME
else if (ids.contains(sbtStructure.Configuration.Test))
DependencyScope.TEST
else if (ids.contains(sbtStructure.Configuration.Provided))
DependencyScope.PROVIDED
else
DependencyScope.COMPILE
}
def cancelTask(taskId: ExternalSystemTaskId, listener: ExternalSystemTaskNotificationListener) = {
if (runner != null)
runner.cancel()
false
}
}
object SbtProjectResolver {
trait TaskListener {
def onTaskOutput(message: String, stdOut: Boolean): Unit
}
object SilentTaskListener extends TaskListener {
override def onTaskOutput(message: String, stdOut: Boolean): Unit = {}
}
class ExternalTaskListener(
val listener: ExternalSystemTaskNotificationListener,
val taskId: ExternalSystemTaskId)
extends TaskListener {
def onTaskOutput(message: String, stdOut: Boolean): Unit =
listener.onTaskOutput(taskId, message, stdOut)
}
}
| advancedxy/intellij-scala | src/org/jetbrains/sbt/project/SbtProjectResolver.scala | Scala | apache-2.0 | 18,507 |
import io.hydrosphere.mist.api.MistJob
object TestError extends MistJob {
/** Contains implementation of spark job with ordinary [[org.apache.spark.SparkContext]]
* Abstract method must be overridden
*
* @return result exception Test Error
*/
def execute(): Map[String, Any] = {
throw new Exception("Test Error")
}
} | KineticCookie/mist | examples-spark1/src/main/scala/TestError.scala | Scala | apache-2.0 | 343 |
/** Copyright 2014 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.data.storage.examples
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.HConnectionManager
import org.apache.hadoop.hbase.client.HConnection
import org.apache.hadoop.hbase.client.HBaseAdmin
import io.prediction.data.storage.Storage
import io.prediction.data.storage.Event
import scala.concurrent.ExecutionContext.Implicits.global
// sbt/sbt "data/run-main io.prediction.data.storage.examples.TestHBase"
object TestHBase {
def main(arg: Array[String]) {
/*
val conf = HBaseConfiguration.create()
conf.set("hbase.client.retries.number", "1")
conf.set("zookeeper.recovery.retry", "1")
HBaseAdmin.checkHBaseAvailable(conf)
val a = new HBaseAdmin(conf)
a.listTables()
*/
println(" Verifying Event Data Backend")
val eventsDb = Storage.getLEvents()
eventsDb.init(0)
eventsDb.insert(Event(
event="test",
entityType="test",
entityId="test"), 0)
eventsDb.remove(0)
eventsDb.close()
}
}
| TheDataShed/PredictionIO | data/src/main/scala/examples/TestHBase.scala | Scala | apache-2.0 | 1,682 |
package chat.tox.antox.utils
import android.content.{BroadcastReceiver, Context, Intent}
/**
* This Broadcast Receiver will pick up the phone booting up and will proceed to start the tox service
*/
class BootReceiver extends BroadcastReceiver {
override def onReceive(context: Context, intent: Intent): Unit = {
// Check if autostart setting is enabled first
//FIXME disabled autostart for now
//val preferences = PreferenceManager.getDefaultSharedPreferences(context)
//if (preferences.getBoolean("autostart", true)) {
// context.startService(new Intent(context, classOf[ToxService]))
//}
}
}
| wiiam/Antox | app/src/main/scala/chat/tox/antox/utils/BootReceiver.scala | Scala | gpl-3.0 | 631 |
package org.scalameta.internal
import scala.reflect.macros.blackbox.Context
trait MacroCompat {
val c: Context
import c.universe._
val AssignOrNamedArg = NamedArg
type AssignOrNamedArg = NamedArg
}
object MacroCompat {
val productFieldNamesAvailable = true
}
object ScalaCompat {
// Removed in 2.13
trait IndexedSeqOptimized[+A]
implicit class XtensionScala213ToSeq[T](seq: collection.Seq[T]) {
def toScalaSeq: Seq[T] = seq.toSeq
}
}
| scalameta/scalameta | scalameta/common/shared/src/main/scala-2.13/org/scalameta/internal/MacroCompat.scala | Scala | bsd-3-clause | 460 |
package org.amcgala.vr
import scala.concurrent.{ Promise, Future }
import org.amcgala.vr.BrainModes.BrainMode
import org.amcgala.vr.need.{ NeedManager, Need }
import org.amcgala.vr.need.Needs.NeedIDs.NeedID
object BrainModes {
sealed trait BrainMode
case object NeedMode extends BrainMode
case object JobMode extends BrainMode
case object IdleMode extends BrainMode
}
trait Behavior {
type Return
val bot: Bot
protected var done = false
def isDone(): Boolean = done
def start(): Future[Return]
}
trait Task {
val bot: Bot
type Return
def isDone(): Boolean
def execute(): Future[Return]
}
trait MultiStepTask extends Task {
import scala.concurrent._
var result: Promise[Return] = promise[Return]
private var d = false
val id = System.nanoTime().toString
bot.registerOnTickAction(id, onTick)
def onTick(): Unit
def isDone() = d
def done(): Unit = {
d = true
bot.removeOnTickAction(id)
}
}
class Brain(bot: Bot) {
import scala.concurrent.ExecutionContext.Implicits.global
private var job: Option[Behavior] = None
private var idle: Option[Behavior] = None
private var activeBehavior: Option[Behavior] = None
private var activeTask: Option[Task] = None
private var mode: BrainMode = BrainModes.IdleMode
private val needManager = new NeedManager
def executeTask(task: Task): Future[task.Return] = {
activeTask = Some(task)
task.execute()
}
def executeBehavior(behavior: Behavior): Future[behavior.Return] = {
activeBehavior = Some(behavior)
behavior.start()
}
def registerJob(jobBehavior: Behavior) = job = Some(jobBehavior)
def registerIdleBehavior(behavior: Behavior) = idle = Some(behavior)
def registerNeed(need: Need) = needManager.registerNeed(need)
def removeNeed(id: NeedID): Unit = needManager.removeNeed(id)
def update(): Unit = {
if (activeBehavior == None && activeTask == None) {
for (time β bot.currentTime) {
mode match {
case BrainModes.JobMode if time.hours > 16 β
// Done with work?! Let's...uhm...EAT!
mode = BrainModes.IdleMode
for (i β idle) executeBehavior(i)
case BrainModes.NeedMode if time.hours > 8 β
// Switch to job if we don't have anything else to do. This ensures that we wait until the last SatisfactionBehavior is finished.
mode = BrainModes.JobMode
for (j β job) activeBehavior = Some(j)
case BrainModes.IdleMode if time.hours > 8 β
mode = BrainModes.JobMode
for (j β job) executeBehavior(j)
case BrainModes.JobMode β
// If there is still time for work another work session and the last job is done, we start over again.
for (j β job) executeBehavior(j)
case BrainModes.NeedMode β
needManager.update()
val suggestions = needManager.needSuggestion
val strategy = needManager.getSatisfactionStrategyFor(suggestions.head)
executeBehavior(strategy)
case BrainModes.IdleMode β
needManager.update()
for (i β idle) executeBehavior(i)
}
}
} else {
for (b β activeBehavior) {
if (b.isDone()) {
activeBehavior = None
}
}
for (t β activeTask) {
if (t.isDone()) {
activeTask = None
}
}
}
}
} | th-koeln/amcgala-vr | src/main/scala/org/amcgala/vr/task/TaskManagement.scala | Scala | apache-2.0 | 3,439 |
package pl.edu.agh.mplt.parser.declaration.objective
import scala.util.parsing.combinator.JavaTokenParsers
import pl.edu.agh.mplt.parser.phrase.set.Indexing
import pl.edu.agh.mplt.parser.phrase.expression.Expression
import language.postfixOps
trait ObjectiveDeclarationAMPLParser extends JavaTokenParsers {
def nonKeyword: Parser[String]
def objective: Parser[String] = "maximize" | "minimize"
def indexing: Parser[Indexing]
def expr: Parser[Expression]
def objectiveDeclaration: Parser[ObjectiveDeclaration] =
objective ~ nonKeyword ~ (nonKeyword ?) ~ (indexing ?) ~ (":" ~> expr ?) <~ ";" ^^ {
case "maximize" ~ name ~ optAlias ~ optIndexing ~ (exprOpt) => Maximize(name, optAlias, optIndexing,
exprOpt)
case "minimize" ~ name ~ optAlias ~ optIndexing ~ (exprOpt) => Minimize(name, optAlias, optIndexing,
exprOpt)
}
}
| marek1840/MPLT | src/main/scala/pl/edu/agh/mplt/parser/declaration/objective/ObjectiveDeclarationAMPLParser.scala | Scala | mit | 923 |
trait Foo {
def name: String
val message = "hello, " + name // error
}
class Bar extends Foo {
def name = message
}
| som-snytt/dotty | tests/init/neg/override9.scala | Scala | apache-2.0 | 125 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import java.io.FileNotFoundException
import exceptions.TestFailedException
import org.scalactic.Prettifier
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ShorthandShouldBeThrownBySpec extends AnyFunSpec {
private val prettifier = Prettifier.default
val fileName: String = "ShorthandShouldBeThrownBySpec.scala"
def exceptionExpected(clz: Class[_]): String =
Resources.exceptionExpected(clz.getName)
def wrongException(expectedClz: Class[_], actualClz: Class[_]): String =
Resources.wrongException(expectedClz.getName, actualClz.getName)
def exceptionNotExpected(clz: Class[_]): String =
Resources.exceptionNotExpected(clz.getName)
def hadMessageInsteadOfExpectedMessage(left: Throwable, actualMessage: String, expectedMessage: String) : String =
FailureMessages.hadMessageInsteadOfExpectedMessage(prettifier, left, actualMessage, expectedMessage)
describe("a [Exception] should") {
it("do nothing when provided code produce expected exception") {
a [RuntimeException] shouldBe thrownBy {
throw new RuntimeException("purposely")
}
}
it("throw new TestFailedException with correct message and stack depth when provided code does not produce any exception") {
val e = intercept[TestFailedException] {
a [RuntimeException] shouldBe thrownBy {
assert(1 === 1)
}
}
assert(e.message === Some(exceptionExpected(classOf[RuntimeException])))
assert(e.failedCodeFileName === Some(fileName))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 6))
}
it("throw new TestFailedException with correct message and stack depth when provided code does not produce expected exception") {
val e = intercept[TestFailedException] {
a [RuntimeException] shouldBe thrownBy {
throw new CustomException("secret file not found")
}
}
assert(e.message === Some(wrongException(classOf[RuntimeException], classOf[CustomException])))
assert(e.failedCodeFileName === Some(fileName))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 6))
}
}
class CustomException(message: String) extends Exception(message)
describe("an [Exception] should") {
it("do nothing when provided code produce expected exception") {
an [UnsupportedOperationException] shouldBe thrownBy {
throw new UnsupportedOperationException("purposely")
}
}
it("throw new TestFailedException with correct message and stack depth when provided code does not produce any exception") {
val e = intercept[TestFailedException] {
an [RuntimeException] shouldBe thrownBy {
assert(1 === 1)
}
}
assert(e.message === Some(exceptionExpected(classOf[RuntimeException])))
assert(e.failedCodeFileName === Some(fileName))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 6))
}
it("throw new TestFailedException with correct message and stack depth when provided code does not produce expected exception") {
val e = intercept[TestFailedException] {
an [RuntimeException] shouldBe thrownBy {
throw new CustomException("secret file not found")
}
}
assert(e.message === Some(wrongException(classOf[RuntimeException], classOf[CustomException])))
assert(e.failedCodeFileName === Some(fileName))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 6))
}
}
describe("noException should") {
it("do nothing when no exception is thrown from the provided code") {
noException shouldBe thrownBy {
assert(1 === 1)
}
}
it("throw new TestFailedException with correct message and stack depth when provided code produces exception") {
val e = intercept[TestFailedException] {
noException shouldBe thrownBy {
throw new RuntimeException("purposely")
}
}
assert(e.message === Some(exceptionNotExpected(classOf[RuntimeException])))
assert(e.failedCodeFileName === Some(fileName))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 6))
}
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/ShorthandShouldBeThrownBySpec.scala | Scala | apache-2.0 | 4,886 |
package service
import model.Profile._
import profile.simple._
import util.ControlUtil._
import java.sql.DriverManager
import org.apache.commons.io.FileUtils
import scala.util.Random
import java.io.File
trait ServiceSpecBase {
def withTestDB[A](action: (Session) => A): A = {
util.FileUtil.withTmpDir(new File(FileUtils.getTempDirectory(), Random.alphanumeric.take(10).mkString)){ dir =>
val (url, user, pass) = (s"jdbc:h2:${dir}", "sa", "sa")
org.h2.Driver.load()
using(DriverManager.getConnection(url, user, pass)){ conn =>
servlet.AutoUpdate.versions.reverse.foreach(_.update(conn))
}
Database.forURL(url, user, pass).withSession { session =>
action(session)
}
}
}
}
| tb280320889/TESTTB | src/test/scala/service/ServiceSpecBase.scala | Scala | apache-2.0 | 738 |
package model
import org.joda.time.DateTime
case class User(token: String)
case class Folder(id: Long, title: String)
case class Link(url: String, code: String)
case class Click(date: DateTime, remoteIp: String)
case class CodeInfo(url: String, idFolder: Long, var clicks: List[Click])
object Config{
val defaultLimit = 100
}
| Claus1/play-test | app/model/models.scala | Scala | mit | 336 |
package im.actor.server.group
import java.time.ZoneOffset
import im.actor.server.migrations.Migration
import slick.driver.PostgresDriver
import scala.concurrent.duration._
import scala.concurrent.{ Await, ExecutionContext, Future, Promise }
import akka.actor.{ ActorLogging, ActorSystem, Props }
import akka.pattern.pipe
import akka.persistence.{ PersistentActor, RecoveryCompleted }
import org.joda.time.DateTime
import slick.driver.PostgresDriver.api._
import im.actor.server.event.TSEvent
import im.actor.server.file.{ Avatar, AvatarImage, FileLocation }
import im.actor.server.{ persist β p, models }
private final case class Migrate(group: models.FullGroup, avatarData: Option[models.AvatarData], botUsers: Seq[models.GroupBot], groupUsers: Seq[models.GroupUser])
object GroupMigrator extends Migration {
protected override def migrationName: String = "2015-08-04-GroupsMigration"
protected override def migrationTimeout: Duration = 1.hour
protected override def startMigration()(implicit system: ActorSystem, db: PostgresDriver.api.Database, ec: ExecutionContext): Future[Unit] = {
db.run(p.Group.allIds) flatMap (ids β Future.sequence(ids map migrateSingle)) map (_ β ())
}
private def migrateSingle(groupId: Int)(implicit system: ActorSystem, db: Database): Future[Unit] = {
val promise = Promise[Unit]()
system.actorOf(props(promise, groupId), name = s"migrate_group_${groupId}")
promise.future
}
private def props(promise: Promise[Unit], groupId: Int)(implicit db: Database) = Props(classOf[GroupMigrator], promise, groupId, db)
}
private final class GroupMigrator(promise: Promise[Unit], groupId: Int, db: Database) extends PersistentActor with ActorLogging {
import GroupEvents._
private implicit val ec: ExecutionContext = context.dispatcher
override def persistenceId = GroupOffice.persistenceIdFor(groupId)
private def migrate(): Unit = {
db.run(p.Group.findFull(groupId)) foreach {
case Some(group) β
db.run(for {
avatarOpt β p.AvatarData.findByGroupId(groupId)
bots β p.GroupBot.findByGroup(groupId) map (_.map(Seq(_)).getOrElse(Seq.empty))
users β p.GroupUser.find(groupId)
} yield Migrate(
group = group,
avatarData = avatarOpt,
botUsers = bots,
groupUsers = users
)) pipeTo self onFailure {
case e β
log.error(e, "Failed to migrate group")
promise.failure(e)
context stop self
}
case None β
log.error("Group not found")
promise.failure(new Exception(s"Cannot find group ${groupId}"))
context stop self
}
}
override def receiveCommand: Receive = {
case m @ Migrate(group, avatarDataOpt, botUsers, users) β
log.info("Migrate: {}", m)
val created: TSEvent = TSEvent(group.createdAt, Created(group.id, Some(GroupType.General), group.creatorUserId, group.accessHash, group.title))
val botAdded: Vector[TSEvent] = botUsers.toVector map { bu β
TSEvent(group.createdAt, BotAdded(bu.userId, bu.token))
}
val becamePublic: Vector[TSEvent] =
if (group.isPublic)
Vector(TSEvent(group.createdAt, BecamePublic()))
else
Vector.empty
val (userAdded, userJoined): (Vector[TSEvent], Vector[TSEvent]) = (users.toVector map { gu β
(TSEvent(gu.invitedAt, UserInvited(gu.userId, gu.inviterUserId)),
gu.joinedAt map (ts β TSEvent(new DateTime(ts.toInstant(ZoneOffset.UTC).getEpochSecond() * 1000), UserJoined(gu.userId, gu.inviterUserId))))
}).unzip match {
case (i, j) β (i, j.flatten)
}
val avatarUpdated: Vector[TSEvent] = avatarDataOpt match {
case Some(models.AvatarData(_, _,
Some(smallFileId), Some(smallFileHash), Some(smallFileSize),
Some(largeFileId), Some(largeFileHash), Some(largeFileSize),
Some(fullFileId), Some(fullFileHash), Some(fullFileSize),
Some(fullWidth), Some(fullHeight))) β
Vector(TSEvent(group.avatarChangedAt, AvatarUpdated(Some(Avatar(
Some(AvatarImage(FileLocation(smallFileId, smallFileHash), 100, 100, smallFileSize.toLong)),
Some(AvatarImage(FileLocation(largeFileId, largeFileHash), 200, 200, largeFileSize.toLong)),
Some(AvatarImage(FileLocation(fullFileId, fullFileHash), fullWidth, fullHeight, fullFileSize.toLong))
)))))
case _ β Vector.empty
}
val events: Vector[TSEvent] = created +: (botAdded ++ becamePublic ++ userAdded ++ userJoined ++ avatarUpdated).toVector
persistAsync(events)(identity)
defer(TSEvent(new DateTime, "migrated")) { _ β
log.info("Migrated")
promise.success(())
context stop self
}
}
private[this] var migrationNeeded = true
override def receiveRecover: Receive = {
case TSEvent(_, _: Created) β
migrationNeeded = false
case RecoveryCompleted β
if (migrationNeeded) {
migrate()
} else {
promise.success(())
context stop self
}
}
}
| dsaved/africhat-platform-0.1 | actor-server/actor-core/src/main/scala/im/actor/server/group/GroupMigrator.scala | Scala | mit | 5,147 |
package charactor.core.messages
class DroppedDeadMessage(val obj: Any)
{
}
| PiotrTrzpil/charactor | src/charactor/core/messages/DroppedDeadMessage.scala | Scala | apache-2.0 | 76 |
package lonelyInteger
import scala.io.Source.stdin
object Solution {
def main(args: Array[String]) {
val lines = stdin.getLines()
println(solve(lines.next().toInt, lines.next()))
}
def solve(numberOfIntegers: Int, line: String): Int = {
line.split(" ").map(_.toInt).reduce(_ ^ _)
}
} | alexandrnikitin/HackerRank.scala | src/main/scala/lonelyInteger/Solution.scala | Scala | mit | 308 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.