code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package genetic.string
import java.util.Random
class HillClimbing(heuristic: Array[Char] => Double, print: Boolean, rand: Random) {
val chars: Array[Char] = (32 to 126).map(_.toChar).toArray
def hillClimbing(state: Array[Char]): String = {
var index = 0
while (heuristic(state) > 0 && index < state.length) {
val bestChar = chars.minBy(c => {
state(index) = c
val value = heuristic(state)
value
})
state(index) = bestChar
index += 1
if (print) println(state.mkString)
}
state.mkString
}
}
object HillClimbing {
def run(secret: String, heuristic: (Array[Char], Array[Char]) => Double, print: Boolean): Unit = {
val TargetString: Array[Char] = secret.toCharArray
val rand = new Random()
val before = System.nanoTime()
new HillClimbing(heuristic(_, TargetString), print, rand).hillClimbing(Array.ofDim[Char](secret.length))
val after = System.nanoTime()
val time = after - before
val milis = (time / 1000).toDouble / 1000
println(s"Time: $milis ms" + (if (print) " (including printing)" else ""))
}
}
| NightRa/AILab | Genetic/src/main/scala/genetic/string/HillClimbing.scala | Scala | apache-2.0 | 1,116 |
/*
* Copyright ActionML, LLC under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* ActionML licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.actionml.authserver.services
import java.util.concurrent.TimeUnit
import com.actionml.authserver.{AccessToken, ResourceId, RoleId}
import com.typesafe.scalalogging.LazyLogging
import org.ehcache.{CacheManager, ValueSupplier}
import org.ehcache.config.builders.{CacheConfigurationBuilder, CacheManagerBuilder, ResourcePoolsBuilder}
import org.ehcache.expiry.{Duration, Expirations, Expiry}
import scaldi.{Injectable, Injector}
import scala.concurrent.{ExecutionContext, Future}
class CachedAuthorizationService(implicit inj: Injector) extends ClientAuthorizationService with LazyLogging with Injectable {
private implicit val _ = inject[ExecutionContext]
import CachedAuthorizationService.tokenCache
override def authorize(token: AccessToken, roleId: RoleId, resourceId: ResourceId): Future[Boolean] = {
val key = (token, roleId, resourceId)
if (tokenCache.containsKey(key)) {
Future.successful(tokenCache.get(key))
} else {
super.authorize(token, roleId, resourceId).map { result =>
tokenCache.put(key, result)
result
}
}
}
}
object CachedAuthorizationService {
val cacheTtl = Duration.of(30, TimeUnit.MINUTES)
val cacheSize = 1000
private class CacheExpiry[K,V] extends Expiry[K,V] {
def getExpiryForUpdate(key: K, oldValue: ValueSupplier[_ <: V], newValue: V): Duration = cacheTtl
def getExpiryForCreation(key: K, value: V): Duration = cacheTtl
def getExpiryForAccess(key: K, value: ValueSupplier[_ <: V]): Duration = cacheTtl
}
private val cacheManager: CacheManager = CacheManagerBuilder.newCacheManagerBuilder
.withCache("access_tokens", CacheConfigurationBuilder.newCacheConfigurationBuilder(
classOf[(AccessToken, RoleId, ResourceId)],
classOf[java.lang.Boolean],
ResourcePoolsBuilder.heap(cacheSize)
).withExpiry(new CacheExpiry)).build
cacheManager.init()
private lazy val tokenCache = cacheManager.getCache("access_tokens", classOf[(AccessToken, RoleId, ResourceId)], classOf[java.lang.Boolean])
}
| actionml/harness | rest-server/server/src/main/scala/com/actionml/authserver/services/CachedAuthorizationService.scala | Scala | apache-2.0 | 2,816 |
package test.endtoend.auctionsniper
import java.util.concurrent.{ArrayBlockingQueue, TimeUnit}
import org.hamcrest.Matchers._
import org.junit.Assert.assertThat
import org.hamcrest.Matcher;
import org.jivesoftware.smack.{Chat, ChatManagerListener, MessageListener, XMPPConnection, XMPPException}
import org.jivesoftware.smack.packet.Message
import auctionsniper.xmpp.XMPPAuction
class FakeAuctionServer(val itemId: String) {
import FakeAuctionServer._
private val AUCTION_PASSWORD = "auction"
private val messageListener = new SingleMessageListener
private val connection = new XMPPConnection(XMPP_HOSTNAME)
private var currentChat: Chat = null
def startSellingItem() {
connection.connect()
connection.login(ITEM_ID_AS_LOGIN.format(itemId), AUCTION_PASSWORD, AUCTION_RESOURCE)
connection.getChatManager.addChatListener(new ChatManagerListener() {
def chatCreated(chat: Chat, createdLocally: Boolean) {
currentChat = chat
chat.addMessageListener(messageListener)
}
})
}
def sendInvalidMessageContaining(brokenMessage: String){
currentChat.sendMessage(brokenMessage)
}
def reportPrice(price: Int, increment: Int, bidder: String) {
currentChat.sendMessage(
"SOLVersion: 1.1; Event: PRICE; CurrentPrice: %d; Increment: %d; Bidder: %s;"
.format(price, increment, bidder))
}
def hasReceivedJoinRequestFrom(sniperId: String) {
receivesAMessageMatching(sniperId, equalTo(XMPPAuction.JOIN_COMMAND_FORMAT))
}
def hasReceivedBid(bid: Int, sniperId: String) {
receivesAMessageMatching(
sniperId,
equalTo(XMPPAuction.BID_COMMAND_FORMAT.format(bid)))
}
private def receivesAMessageMatching[T >: String](sniperId: String, messageMatcher: Matcher[T]) {
messageListener.receivesAMessage(messageMatcher)
assertThat(currentChat.getParticipant, equalTo(sniperId))
}
def announceClosed() {
currentChat.sendMessage("SOLVersion: 1.1; Event: CLOSE;")
}
def stop() {
connection.disconnect()
}
class SingleMessageListener extends MessageListener {
private val messages = new ArrayBlockingQueue[Message](1)
def processMessage(chat: Chat, message: Message) {
messages.add(message)
}
def receivesAMessage() {
assertThat("Message", messages.poll(5, TimeUnit.SECONDS), is(notNullValue[Message]))
}
def receivesAMessage[T >: String](messageMatcher: Matcher[T]) {
val message = messages.poll(5, TimeUnit.SECONDS)
//XXX crashes the compiler: assertThat(message, hasProperty("body", messageMatcher))
assertThatReplacementForBug2705(message, hasProperty("body", messageMatcher))
}
//XXX temporary workaround for bug: https://lampsvn.epfl.ch/trac/scala/ticket/2705
private def assertThatReplacementForBug2705[A, M >: A](actual: A, matcher: Matcher[M]) {
if (!matcher.matches(actual)) {
val description = new org.hamcrest.StringDescription()
description.appendText("\\nExpected: ")
.appendDescriptionOf(matcher)
.appendText("\\n but: ")
matcher.describeMismatch(actual, description)
throw new AssertionError(description.toString)
}
}
}
}
object FakeAuctionServer {
val ITEM_ID_AS_LOGIN = "auction-%s"
val AUCTION_RESOURCE = "Auction"
val XMPP_HOSTNAME = "localhost"
}
| sptz45/goos-scala | test/end-to-end/test/endtoend/auctionsniper/FakeAuctionServer.scala | Scala | apache-2.0 | 3,392 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this thing except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import enablers.Definition
import Matchers._
import exceptions.TestFailedException
import org.scalactic.Prettifier
class ShouldBeDefinedLogicalOrImplicitSpec extends FunSpec {
private val prettifier = Prettifier.default
val fileName: String = "ShouldBeDefinedLogicalOrImplicitSpec.scala"
def wasEqualTo(left: Any, right: Any): String =
FailureMessages.wasEqualTo(prettifier, left, right)
def wasNotEqualTo(left: Any, right: Any): String =
FailureMessages.wasNotEqualTo(prettifier, left, right)
def equaled(left: Any, right: Any): String =
FailureMessages.equaled(prettifier, left, right)
def didNotEqual(left: Any, right: Any): String =
FailureMessages.didNotEqual(prettifier, left, right)
def wasNotDefined(left: Any): String =
FailureMessages.wasNotDefined(prettifier, left)
def wasDefined(left: Any): String =
FailureMessages.wasDefined(prettifier, left)
def allError(message: String, lineNumber: Int, left: Any): String = {
val messageWithIndex = UnquotedString(" " + FailureMessages.forAssertionsGenTraversableMessageWithStackDepth(prettifier, 0, UnquotedString(message), UnquotedString(fileName + ":" + lineNumber)))
FailureMessages.allShorthandFailed(prettifier, messageWithIndex, left)
}
trait Thing {
def isDefined: Boolean
}
val something = new Thing {
val isDefined = true
}
val nothing = new Thing {
val isDefined = false
}
implicit def definitionOfThing[T <: Thing]: Definition[T] =
new Definition[T] {
def isDefined(thing: T): Boolean = thing.isDefined
}
describe("Defined matcher") {
describe("when work with 'thing should be (defined)'") {
it("should do nothing when thing is defined") {
something should (be (defined) or be (something))
nothing should (be (defined) or be (nothing))
something should (be (defined) or be (nothing))
something should (be (something) or be (defined))
something should (be (nothing) or be (defined))
nothing should (be (nothing) or be (defined))
something should (be (defined) or equal (something))
nothing should (be (defined) or equal (nothing))
something should (be (defined) or equal (nothing))
something should (equal (something) or be (defined))
something should (equal (nothing) or be (defined))
nothing should (equal (nothing) or be (defined))
}
it("should throw TestFailedException with correct stack depth when thing is not defined") {
val caught1 = intercept[TestFailedException] {
nothing should (be (defined) or be (something))
}
assert(caught1.message === Some(wasNotDefined(nothing) + ", and " + wasNotEqualTo(nothing, something)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
nothing should (be (something) or be (defined))
}
assert(caught2.message === Some(wasNotEqualTo(nothing, something) + ", and " + wasNotDefined(nothing)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
nothing should (be (defined) or equal (something))
}
assert(caught3.message === Some(wasNotDefined(nothing) + ", and " + didNotEqual(nothing, something)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
nothing should (equal (something) or be (defined))
}
assert(caught4.message === Some(didNotEqual(nothing, something) + ", and " + wasNotDefined(nothing)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'thing should not be defined'") {
it("should do nothing when thing is not defined") {
nothing should (not be defined or not be something)
something should (not be defined or not be nothing)
nothing should (not be defined or not be nothing)
nothing should (not be something or not be defined)
nothing should (not be nothing or not be defined)
something should (not be nothing or not be defined)
nothing should (not be defined or not equal something)
something should (not be defined or not equal nothing)
nothing should (not be defined or not equal nothing)
nothing should (not equal something or not be defined)
nothing should (not equal nothing or not be defined)
something should (not equal nothing or not be defined)
}
it("should throw TestFailedException with correct stack depth when thing is defined") {
val caught1 = intercept[TestFailedException] {
something should (not be defined or not be something)
}
assert(caught1.message === Some(wasDefined(something) + ", and " + wasEqualTo(something, something)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
something should (not be something or not be defined)
}
assert(caught2.message === Some(wasEqualTo(something, something) + ", and " + wasDefined(something)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
something should (not be defined or not equal something)
}
assert(caught3.message === Some(wasDefined(something) + ", and " + equaled(something, something)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
something should (not equal something or not be defined)
}
assert(caught4.message === Some(equaled(something, something) + ", and " + wasDefined(something)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should be (defined)'") {
it("should do nothing when all(xs) is defined") {
all(List(something)) should (be (defined) or be (something))
all(List(nothing)) should (be (defined) or be (nothing))
all(List(something)) should (be (defined) or be (nothing))
all(List(something)) should (be (something) or be (defined))
all(List(something)) should (be (nothing) or be (defined))
all(List(nothing)) should (be (nothing) or be (defined))
all(List(something)) should (be (defined) or equal (something))
all(List(nothing)) should (be (defined) or equal (nothing))
all(List(something)) should (be (defined) or equal (nothing))
all(List(something)) should (equal (something) or be (defined))
all(List(something)) should (equal (nothing) or be (defined))
all(List(nothing)) should (equal (nothing) or be (defined))
}
it("should throw TestFailedException with correct stack depth when xs is not defined") {
val left1 = List(nothing)
val caught1 = intercept[TestFailedException] {
all(left1) should (be (something) or be (defined))
}
assert(caught1.message === Some(allError(wasNotEqualTo(nothing, something) + ", and " + wasNotDefined(nothing), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(nothing)
val caught2 = intercept[TestFailedException] {
all(left2) should (be (defined) or be (something))
}
assert(caught2.message === Some(allError(wasNotDefined(nothing) + ", and " + wasNotEqualTo(nothing, something), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(nothing)
val caught3 = intercept[TestFailedException] {
all(left3) should (equal (something) or be (defined))
}
assert(caught3.message === Some(allError(didNotEqual(nothing, something) + ", and " + wasNotDefined(nothing), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(nothing)
val caught4 = intercept[TestFailedException] {
all(left4) should (be (defined) or equal (something))
}
assert(caught4.message === Some(allError(wasNotDefined(nothing) + ", and " + didNotEqual(nothing, something), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should not be defined'") {
it("should do nothing when xs is not defined") {
all(List(nothing)) should (not be defined or not be something)
all(List(something)) should (not be defined or not be nothing)
all(List(nothing)) should (not be defined or not be nothing)
all(List(nothing)) should (not be something or not be defined)
all(List(nothing)) should (not be nothing or not be defined)
all(List(something)) should (not be nothing or not be defined)
all(List(nothing)) should (not be defined or not equal something)
all(List(something)) should (not be defined or not equal nothing)
all(List(nothing)) should (not be defined or not equal nothing)
all(List(nothing)) should (not equal something or not be defined)
all(List(nothing)) should (not equal nothing or not be defined)
all(List(something)) should (not equal nothing or not be defined)
}
it("should throw TestFailedException with correct stack depth when xs is not defined") {
val left1 = List(something)
val caught1 = intercept[TestFailedException] {
all(left1) should (not be something or not be defined)
}
assert(caught1.message === Some(allError(wasEqualTo(something, something) + ", and " + wasDefined(something), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(something)
val caught2 = intercept[TestFailedException] {
all(left2) should (not be defined or not be something)
}
assert(caught2.message === Some(allError(wasDefined(something) + ", and " + wasEqualTo(something, something), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(something)
val caught3 = intercept[TestFailedException] {
all(left3) should (not equal something or not be defined)
}
assert(caught3.message === Some(allError(equaled(something, something) + ", and " + wasDefined(something), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(something)
val caught4 = intercept[TestFailedException] {
all(left4) should (not be defined or not equal something)
}
assert(caught4.message === Some(allError(wasDefined(something) + ", and " + equaled(something, something), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/ShouldBeDefinedLogicalOrImplicitSpec.scala | Scala | apache-2.0 | 13,226 |
/*
* Package object for collections package.
* Copyright (C) 2014 Michael Thorsley
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see [http://www.gnu.org/licenses/].
*/
package com.eigenvektor
/** Package for immutable collections.
*
* Currently the only class is [[RandomAccessList]], a form of list with constant-time head, cons, and tail
* operations, but also logarithmic time random access.
*/
package object collections {
} | Vyzen/trout | src/main/scala/com/eigenvektor/collections/package.scala | Scala | gpl-3.0 | 1,018 |
package com.flowtomation.akkared.nodes.core
import java.time.Instant
import akka.actor.{Actor, ActorLogging, Cancellable, Props}
import akka.http.scaladsl.model.{ContentTypes, HttpEntity}
import akka.http.scaladsl.server.PathMatchers.Segment
import akka.parboiled2.RuleTrace.StringMatch
import com.flowtomation.akkared.{NodeContext, NodeType, Runtime}
import com.flowtomation.akkared.model.{FlowNode, ItemId}
import com.flowtomation.akkared.nodes.core.Debug.{complete, path, pathPrefix, post}
import com.flowtomation.akkared.nodes.core.InjectActor.Injection
import play.api.libs.json._
import scala.concurrent.duration._
object Inject extends NodeType{
val name = "inject"
override def instance(ctx: NodeContext): Props = {
Props(new InjectActor(ctx))
}
override def routes(runtime: Runtime) = pathPrefix("inject") {
path(Segment) { nodeId =>
// if node not found return 404 with plain text body "Not Found"
post {
runtime.send(ItemId(nodeId), Injection)
// empty body
// TODO inject into node
complete(HttpEntity(ContentTypes.`text/plain(UTF-8)`, "OK"))
}
}
}
}
private object InjectConfig{
implicit val reads = Json.reads[InjectConfig]
}
private case class InjectConfig(
once: Boolean,
payload: JsValue,
crontab: String,
payloadType: String,
topic: String,
repeat: String
)
object InjectActor{
case object Injection
}
private class InjectActor(ctx: NodeContext) extends Actor with ActorLogging{
// TODO proper parsing of all possible values (format)
// akka-red (once,true)
// akka-red (payload,"")
// akka-red (crontab,"")
// akka-red (payloadType,"date")
// akka-red (topic,"")
// akka-red (repeat,"")
//node.otherProperties.foreach(println)
private val config = Json.fromJson[InjectConfig](JsObject(ctx.node.otherProperties)).fold( e =>
throw new RuntimeException(e.toString)
, identity
)
val repeat: Option[FiniteDuration] = Option(config.repeat).filter(_.nonEmpty).map(_.toLong.seconds)
val once = config.once
// TODO add period injection
if(once){
self ! Injection
}
val repeatCancellable = repeat.fold(Cancellable.alreadyCancelled){ duration =>
context.system.scheduler.schedule(duration, duration, self, Injection)(context.system.dispatcher)
}
override def receive: Receive = {
case Injection =>
log.info("Injection")
val payload: JsValue = if (( config.payloadType == null && config.payload == JsString("")) || config.payloadType == "date") {
JsNumber(System.currentTimeMillis())
} else if (config.payloadType == null) {
config.payload
} else if (config.payloadType == "none") {
JsString("")
} else {
evaluateNodeProperty(config.payload, config.payloadType, config, "TODO")
}
val msg = Json.obj(
"topic" -> config.topic,
"payload" -> payload
)
ctx.send(msg)
case m =>
log.warning(m.toString)
}
override def postStop(): Unit = {
repeatCancellable.cancel()
}
private def evaluateNodeProperty(value: JsValue, `type`: String, node: Any, msg: String): JsValue = {
`type` match {
case "str" => JsString(value.as[String])
case "num" => JsNumber(BigDecimal(value.as[String]))
case "json" => Json.parse(value.as[String])
case "date" => JsNumber(System.currentTimeMillis())
case "bool" => JsBoolean(Option(value).map(_.as[String].toLowerCase).contains("true"))
case _ => value
}
// } else if (`type` == "re") {
// new RegExp(value)
// } else if (`type` == "bin") {
// var data = JSON.parse(value)
// Buffer.from(data)
// } else if (`type` == "msg" && msg) {
// getMessageProperty(msg,value)
// } else if (`type` == "flow" && node) {
// node.context().flow.get(value)
// } else if (`type` == "global" && node) {
// node.context().global.get(value)
// } else if (`type` == "jsonata") {
// var expr = prepareJSONataExpression(value,node)
// return evaluateJSONataExpression(expr,msg)
// }else{
// value.as[JsValue]
// }
}
}
| francisdb/akka-red | src/main/scala/com/flowtomation/akkared/nodes/core/Inject.scala | Scala | apache-2.0 | 4,115 |
/*
* Copyright 2015 – 2016 Martin Seeler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.oanda.events
import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.Materializer
import akka.stream.scaladsl.Source
import cats.data.Xor
import rx.oanda.OandaEnvironment._
import rx.oanda._
import rx.oanda.utils.Heartbeat
import EventClientRequest._
class EventClient(env: OandaEnvironment)(implicit sys: ActorSystem, mat: Materializer, A: ConnectionPool) extends StreamingConnection {
private[oanda] val streamingConnection = env.streamFlow[Long]
/**
* Stream all events related to all accounts / a subset of accounts.
*
* @param accounts The account ids of the accounts to monitor events for. If empty, all accounts are monitored.
* @return A stream which emits all events when they occure, as well as heartbeats.
*/
def liveEvents(accounts: Seq[Long] = Nil): Source[Xor[OandaEvent, Heartbeat], NotUsed] =
startStreaming[OandaEvent](eventStreamRequest(accounts).withHeaders(env.headers), "transaction").log("events")
} | MartinSeeler/rx-oanda | src/main/scala/rx/oanda/events/EventClient.scala | Scala | apache-2.0 | 1,589 |
package ru.maizy.cheesecake.server.checker
/**
* Copyright (c) Nikita Kovaliov, maizy.ru, 2016
* See LICENSE.txt for details.
*/
import java.time.ZonedDateTime
import ru.maizy.cheesecake.server.ExtraInfo
import ru.maizy.cheesecake.server.service.Endpoint
object CheckStatus extends Enumeration {
type Type = Value
val Ok, UnableToCheck, Unavailable = Value
}
sealed trait CheckerProtocol
abstract class AbstractCheck(endpoint: Endpoint) extends CheckerProtocol
case class Check(endpoint: Endpoint) extends AbstractCheck(endpoint)
trait CheckResult {
def status: CheckStatus.Type
def checkTime: ZonedDateTime
def extraInfo: Option[ExtraInfo]
}
| maizy/cheesecake | server/src/main/scala/ru/maizy/cheesecake/server/checker/CheckerProtocol.scala | Scala | apache-2.0 | 662 |
package test.verifier
import org.scalatest.{GivenWhenThen, FlatSpec}
import org.scalatest.matchers.ShouldMatchers._
import tap.types.classes._
import tap._
import tap.types.kinds._
import tap.types._
import tap.types.Type._
import tap.types.Natives._
import language.reflectiveCalls
import tap.types.classes.ClassEnvironments._
import tap.verifier.ModuleTypeInference
import tap.ir._
import tap.types.inference.TIEnv
class ModuleTypeInferenceTests extends FlatSpec with GivenWhenThen {
behavior of "apply"
ignore should "do some things" in {}
// ------------------------------------------------------------------------
behavior of "makeInstanceMemberType"
it should "throw an error if instantiating a type that does not have predicates for the instance class" in {
val sc = Qual(List(IsIn(ModuleId("Prelude", "Show"), List(TGen(0, 0)))), Forall(0, List(Star), TGen(0, 0) fn tString))
val tci = Inst("Test", Nil, IsIn(ModuleId("Test", "NotShow"), List(tNumber)))
val mti = new ModuleTypeInference(Nil, Map.empty, Map.empty)
evaluating {
mti.makeInstanceMemberType(TIEnv.empty, sc, tci)
} should produce [Error]
}
it should "throw an error if passed a non-Forall type" in {
val sc = Qual(List(IsIn(ModuleId("Prelude", "Show"), List(TVar("a", Star)))), TVar("a", Star) fn tString)
val tci = Inst("Test", Nil, IsIn(ModuleId("Prelude", "Show"), List(tNumber)))
val mti = new ModuleTypeInference(Nil, Map.empty, Map.empty)
evaluating {
mti.makeInstanceMemberType(TIEnv.empty, sc, tci)
} should produce [Error]
}
it should "throw an error if the instance type and forall'd variable counts differ" in {
Given("too many types on the instance")
evaluating {
val sc = Qual(List(IsIn(ModuleId("Test", "MultiClass"), List(TGen(0, 0)))), Forall(0, List(Star), TGen(0, 0) fn tString))
val tci = Inst("Test", Nil, IsIn(ModuleId("Test", "MultiClass"), List(tNumber, tString)))
val mti = new ModuleTypeInference(Nil, Map.empty, Map.empty)
mti.makeInstanceMemberType(TIEnv.empty, sc, tci)
} should produce [Error]
Given("not enough types on the instance")
evaluating {
val sc = Qual(List(IsIn(ModuleId("Test", "MultiClass"), List(TGen(0, 0), TGen(0, 1)))), Forall(0, List(Star, Star), (TGen(0, 0) fn TGen(0, 1)) fn tString))
val tci = Inst("Test", Nil, IsIn(ModuleId("Test", "MultiClass"), List(tNumber)))
val mti = new ModuleTypeInference(Nil, Map.empty, Map.empty)
mti.makeInstanceMemberType(TIEnv.empty, sc, tci)
} should produce [Error]
}
it should "instantiate a type for a particular class instance" in {
val sc = Qual(List(IsIn(ModuleId("Prelude", "Show"), List(TGen(0, 0)))), Forall(0, List(Star), TGen(0, 0) fn tString))
val tci = Inst("Test", Nil, IsIn(ModuleId("Prelude", "Show"), List(tNumber)))
val mti = new ModuleTypeInference(Nil, Map.empty, Map.empty)
mti.makeInstanceMemberType(TIEnv.empty, sc, tci)._2 should be ===
Qual(List(IsIn(ModuleId("Prelude", "Show"), List(tNumber))), tNumber fn tString)
}
it should "instantiate a type for a particular class instance when the class has multiple parameters" in {
val sc = Qual(List(IsIn(ModuleId("Test", "MultiClass"), List(TGen(0, 0), TGen(0, 1)))), Forall(0, List(Star, Star), (TGen(0, 0) fn TGen(0, 1)) fn tString))
val tci = Inst("Test", Nil, IsIn(ModuleId("Test", "MultiClass"), List(tNumber, tString)))
val mti = new ModuleTypeInference(Nil, Map.empty, Map.empty)
mti.makeInstanceMemberType(TIEnv.empty, sc, tci)._2 should be ===
Qual(List(IsIn(ModuleId("Test", "MultiClass"), List(tNumber, tString))), (tNumber fn tString) fn tString)
}
// ------------------------------------------------------------------------
behavior of "buildClassEnv"
ignore should "do some things" in {}
// ------------------------------------------------------------------------
behavior of "resolveBindingGroups"
it should "group implicit definitions as tightly as possible" in {
/*
Equivalent to:
(def tmpX (-> a a))
(let tmpX (lambda (x) (tmpD x) x))
(let tmpA (lambda (a) (tmpB a) a))
(let tmpB (lambda (b) (tmpA b) (tmpX b) b))
(let tmpC (lambda (c) (tmpA c) (tmpD c) c))
(let tmpD (lambda (d) (tmpC d) d))
Or as a .dot graph:
digraph {
X [shape=box]
A -> B
B -> A
B -> X
C -> A
C -> D
D -> C
X -> D
}
*/
val mis: Map[Id, TapExpr] = Map(
ModuleId("Prelude","tmpC") -> FunctionExpr(Argument("c"),BlockExpr(List(ApplyExpr(ValueReadExpr(ModuleId("Prelude","tmpA")),ValueReadExpr(LocalId("c"))), ApplyExpr(ValueReadExpr(ModuleId("Prelude","tmpD")),ValueReadExpr(LocalId("c"))), ValueReadExpr(LocalId("c"))))),
ModuleId("Prelude","tmpX") -> FunctionExpr(Argument("x"),BlockExpr(List(ApplyExpr(ValueReadExpr(ModuleId("Prelude","tmpD")),ValueReadExpr(LocalId("x"))), ValueReadExpr(LocalId("x"))))),
ModuleId("Prelude","tmpB") -> FunctionExpr(Argument("b"),BlockExpr(List(ApplyExpr(ValueReadExpr(ModuleId("Prelude","tmpA")),ValueReadExpr(LocalId("b"))), ApplyExpr(ValueReadExpr(ModuleId("Prelude","tmpX")),ValueReadExpr(LocalId("b"))), ValueReadExpr(LocalId("b"))))),
ModuleId("Prelude","tmpD") -> FunctionExpr(Argument("d"),BlockExpr(List(ApplyExpr(ValueReadExpr(ModuleId("Prelude","tmpC")),ValueReadExpr(LocalId("d"))), ValueReadExpr(LocalId("d"))))),
ModuleId("Prelude","tmpA") -> FunctionExpr(Argument("a"),BlockExpr(List(ApplyExpr(ValueReadExpr(ModuleId("Prelude","tmpB")),ValueReadExpr(LocalId("a"))), ValueReadExpr(LocalId("a")))))
)
val mts: Map[Id, Qual[Type]] = Map(
ModuleId("Prelude","tmpX") -> Qual(List(),Forall(6,List(Star),TAp(TAp(TCon(ModuleId("Prelude","->"),Kfun(Star,Kfun(Star,Star))),TGen(6,0)),TGen(6,0))))
)
val mti = new ModuleTypeInference(Nil, Map.empty, Map.empty)
val bgs = mti.resolveBindingGroups(mis, mts)
val expl = List(ModuleId("Prelude","tmpX"))
val impls = List(
List(ModuleId("Prelude","tmpB"), ModuleId("Prelude","tmpA")),
List(ModuleId("Prelude","tmpD"), ModuleId("Prelude","tmpC"))
)
bgs should be === List((expl, impls))
}
ignore should "do some other things" in {}
}
| garyb/tap | src/test/scala/test/verifier/ModuleTypeInferenceTests.scala | Scala | mit | 6,716 |
package controllers.registration.returns
import itutil.ControllerISpec
import models.api.returns.Returns
import models.{ConditionalValue, NIPCompliance}
import org.jsoup.Jsoup
import play.api.http.HeaderNames
import play.api.test.Helpers._
class SellOrMoveNipControllerISpec extends ControllerISpec {
val testAmount: BigDecimal = 1234.123
lazy val url = controllers.registration.returns.routes.SellOrMoveNipController.show.url
val testNIPCompliance: NIPCompliance = NIPCompliance(Some(ConditionalValue(true, Some(testAmount))), None)
"Show sell or move (NIP) page" should {
"return OK with pre-pop when is no value for 'goodsToEU' in the backend" in new Setup {
given()
.user.isAuthorised()
.s4lContainer[Returns].contains(Returns(northernIrelandProtocol = Some(testNIPCompliance)))
insertCurrentProfileIntoDb(currentProfile, sessionId)
val response = buildClient("/sell-or-move-nip").get()
whenReady(response) { res =>
res.status mustBe OK
}
}
"Return OK with pre-pop when there is a value for 'goodsToEU' in the backend" in {
given()
.user.isAuthorised()
.s4lContainer[Returns].contains(Returns(northernIrelandProtocol = Some(testNIPCompliance)))
val res = buildClient(url).get()
whenReady(res) { result =>
result.status mustBe OK
Jsoup.parse(result.body).getElementsByAttribute("checked").first().parent().text() mustBe "Yes"
}
}
"Submit send goods to EU" should {
"return SEE_OTHER for receive goods" in new Setup {
given()
.user.isAuthorised()
.s4lContainer[Returns].contains(Returns(northernIrelandProtocol = Some(testNIPCompliance)))
.s4lContainer[Returns].isUpdatedWith(Returns(northernIrelandProtocol = Some(NIPCompliance(Some(ConditionalValue(true, Some(testAmount))), Some(ConditionalValue(true, Some(testAmount)))))))
.vatScheme.contains(emptyVatSchemeNetp)
insertCurrentProfileIntoDb(currentProfile, sessionId)
val response = buildClient("/sell-or-move-nip").post(Map("value" -> Seq("true"), "sellOrMoveNip" -> Seq("123456")))
whenReady(response) { res =>
res.status mustBe SEE_OTHER
res.header(HeaderNames.LOCATION) mustBe Some(controllers.registration.returns.routes.ReceiveGoodsNipController.show.url)
}
}
}
}
}
| hmrc/vat-registration-frontend | it/controllers/registration/returns/SellOrMoveNipControllerISpec.scala | Scala | apache-2.0 | 2,397 |
package com.codahale.logula.tests
import com.codahale.simplespec.Spec
import com.codahale.logula.{Log, Logging}
import org.junit.Test
class LoggingExample extends Logging {
def getLog = log
}
class LoggingSpec extends Spec {
class `A class which extends Logging` {
val example = new LoggingExample
@Test def `has a Log instance` = {
example.getLog.must(beA[Log])
}
}
}
| codahale/logula | src/test/scala/com/codahale/logula/tests/LoggingSpec.scala | Scala | mit | 397 |
package x7c1.wheat.modern.sequence
import org.scalatest.{FlatSpecLike, Matchers}
import x7c1.wheat.modern.features.HasShortLength
class HeadlineSequencerTest extends FlatSpecLike with Matchers {
behavior of classOf[HeadlineSequencer[_, _]].getSimpleName
implicit def regardEitherAsShort = HasShortLength[Either[String, Char]]
implicit def regardIntAsShort = HasShortLength[Char]
it can "generate new sequence with headlines" in {
val sequencer = HeadlineSequencer[Char, String](
equals = _ == _,
toHeadline = xs => s"label(${xs.length})"
)
val sequence = sequencer derive Sequence.from(Seq('a', 'b', 'b', 'c', 'c', 'c'))
sequence.toSeq shouldBe Seq(
Left("label(1)"),
Right('a'),
Left("label(2)"),
Right('b'), Right('b'),
Left("label(3)"),
Right('c'), Right('c'), Right('c')
)
val sequence2 = sequencer derive Sequence.from(Seq('a'))
sequence2.toSeq shouldBe Seq(
Left("label(1)"),
Right('a')
)
val sequence3 = sequencer derive Sequence.from(Seq('a', 'b', 'a', 'a', 'a'))
sequence3.toSeq shouldBe Seq(
Left("label(1)"),
Right('a'),
Left("label(1)"),
Right('b'),
Left("label(3)"),
Right('a'), Right('a'), Right('a')
)
}
it should "generate empty Sequence if given Sequence is empty" in {
val sequencer = HeadlineSequencer[Char, String](
equals = _ == _,
toHeadline = xs => s"label(${xs.length})"
)
val sequence = sequencer.derive[Sequence](Sequence.from(Seq()))
sequence.toSeq shouldBe Seq()
}
}
| x7c1/Linen | wheat-modern/src/test/scala/x7c1/wheat/modern/sequence/HeadlineSequencerTest.scala | Scala | mit | 1,583 |
package com.github.cloudinaboxsoftware.vaadin.util
import io.Source
object Execute {
def apply(command: String): Seq[String] = {
val p = Runtime.getRuntime().exec(Array[String]("bash", "-c", command))
p.waitFor()
val in = Source.createBufferedSource(p.getInputStream)
in.getLines().toSeq
}
}
| CloudInABox/scalavaadinutils | src/main/scala/com/github/cloudinaboxsoftware/vaadin/util/Execute.scala | Scala | mit | 315 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.batch.boot
import cmwell.ctrl.config.Jvms
import cmwell.indexer.Indexer
import com.typesafe.config.ConfigFactory
import cmwell.tlog.{TLog, TLogState}
import cmwell.imp.IMPService
import cmwell.fts.FTSServiceES
import com.typesafe.scalalogging.LazyLogging
import concurrent._
import scala.compat.Platform._
import scala.concurrent.ExecutionContext.Implicits.global
import cmwell.driver.Dao
import scala.util.Success
import scala.util.Failure
import cmwell.irw.IRWService
import k.grid.{Grid, GridConnection}
import cmwell.rts.Publisher
import uk.org.lidalia.sysoutslf4j.context.SysOutOverSLF4J
import scala.util.Failure
import scala.util.Success
/**
* User: Israel
* Date: 5/20/13
* Time: 12:05 PM
*/
object Runner extends App with LazyLogging{
import Settings._
// val isIndexer:Boolean = java.lang.Boolean.valueOf(Option(System.getProperty("isIndexer")).getOrElse("false"))
logger info ("Running batch")
//SLF4J initialization is not thread safe, so it's "initialized" by writing some log and only then using sendSystemOutAndErrToSLF4J.
//Without it there will be en error in stderr and some log line at the beginning will be lost
SysOutOverSLF4J.sendSystemOutAndErrToSLF4J()
//logger info(s"Grid.join with clusterName [$clusterName] gridBindIP [$gridBindIP] gridSeeds [$gridSeeds] gridBindPort [$gridBindPort]")
Grid.setGridConnection(GridConnection(memberName = "batch", labels = Set("publisher")))
Grid.joinClient
Thread.sleep(1000)
Publisher.init
val updatesTlog = TLog(updatesTLogName, updatesTLogPartition)
updatesTlog.init()
val uuidsTlog = TLog(uuidsTLogName, uuidsTLogPartition)
uuidsTlog.init()
val irwServiceDao = Dao(irwServiceDaoClusterName,irwServiceDaoKeySpace,irwServiceDaoHostName)
logger.info("If you got nothing to log, Logan, don't log logan!")
val irwService = IRWService(irwServiceDao,false)
val ftsService = FTSServiceES.getOne("ftsService.yml")
//val fIndexer = future {blocking{ indexer.index}}
//fIndexer.onComplete{case Success(_) => logger info("Indexer exited"); case Failure(t) => logger error (t.getLocalizedMessage + "\\n" + t.getCause.getStackTraceString + "\\n" + t.getStackTraceString)}
logger debug ("starting imp")
val impState = TLogState("imp" , updatesTLogName , updatesTLogPartition)
impState.loadState
val impService = IMPService(updatesTlog, uuidsTlog, irwService, impState , impParallelism, impBatchSize)
val fImp = Future {blocking{impService.process} }
fImp.onComplete{case Success(_) => logger info("IMP exited"); case Failure(t) => logger error (t.getCause.getMessage + "\\n" + t.getLocalizedMessage + "\\n" + t.getCause.getStackTrace().mkString("", EOL, EOL) + "\\n" + t.getStackTrace().mkString("", EOL, EOL))}
// logger info ("waiting 7 minutes before starting indexer")
// Thread.sleep(1000 * 60 * 7)
val indexerState = TLogState("indexer" , uuidsTLogName , updatesTLogPartition)
// val indexer = RateIndexer(uuidsTlog, irwService, ftsService, indexerState)
// indexer.start
val akkaIndexer = new Indexer(uuidsTlog, updatesTlog, irwService, ftsService, indexerState)
sys.addShutdownHook{
// logger info ("stopping indexing service")
// indexer.terminate
logger info ("stopping imp service")
impService.terminate
// shutdown tlog
updatesTlog.shutdown()
uuidsTlog.shutdown()
irwServiceDao.shutdown()
// shutdown dao
irwServiceDao.shutdown()
Grid.shutdown
logger info ("Asta La Vista Baby.")
}
}
object Settings {
val hostName = java.net.InetAddress.getLocalHost.getHostName
val config = ConfigFactory.load()
// tLogs DAO
val tLogsDaoHostName = config.getString("tLogs.hostName")
val tLogsDaoClusterName = config.getString("tLogs.cluster.name")
val tLogsDaoKeySpace = config.getString("tLogs.keyspace")
val tLogsDaoColumnFamily = config.getString("tLogs.columnFamilyName")
val tLogsDaoMaxConnections = config.getInt("tLogs.maxConnections")
// updates tLog
val updatesTLogName = config.getString("updatesTlog.name")
val updatesTLogPartition = try { config.getString("updatesTlog.partition") } catch { case _:Throwable => "updatesPar_" + hostName}
// uuids tLog
val uuidsTLogName = config.getString("uuidsTlog.name")
val uuidsTLogPartition = try { config.getString("uuidsTlog.partition") } catch { case _:Throwable => "uuidsPar_" + hostName }
// infotons DAO
val irwServiceDaoHostName = config.getString("irwServiceDao.hostName")
val irwServiceDaoClusterName = config.getString("irwServiceDao.clusterName")
val irwServiceDaoKeySpace = config.getString("irwServiceDao.keySpace")
val impParallelism = config.getInt("imp.parallelism")
val impBatchSize = config.getInt("imp.batchSize")
val gridBindIP = config.getString("cmwell.grid.bind.host")
val gridBindPort = config.getInt("cmwell.grid.bind.port")
val gridSeeds = Set.empty[String] ++ config.getString("cmwell.grid.seeds").split(";")
val clusterName = config.getString("cmwell.clusterName")
// val pollingInterval = config.getLong("indexer.pollingInterval")
// val bucketsSize = config.getInt("indexer.bucketsSize")
}
| nruppin/CM-Well | server/cmwell-batch/src/main/scala/cmwell/batch/boot/Runner.scala | Scala | apache-2.0 | 5,763 |
package org.me.PrivateSpark
import org.apache.spark.rdd.RDD
import org.me.PrivateSpark.api.{Range, Lap_RDD, Lap_PairRDD}
import org.me.PrivateSpark.impl.{Lap_RDD_Reduceable, Lap_RDD_NonReduceable, Lap_PairRDD_Reduceable, Lap_PairRDD_NonReduceable}
import scala.reflect.{ClassTag, classTag}
object RDDCreator {
def create[T : ClassTag](delegate : RDD[T], info : QueryInfo, enforcement : Single_Enforcement)
(implicit tag : ClassTag[T]) : Lap_RDD[T] = {
delegate match {
case reducible : RDD[Double @unchecked] if tag == classTag[Double] =>
val _delegate = delegate.asInstanceOf[RDD[Double]]
val result = new Lap_RDD_Reduceable[Double](_delegate, info, enforcement)
result.asInstanceOf[Lap_RDD[T]]
case _ =>
new Lap_RDD_NonReduceable[T](delegate, info, enforcement)
}
}
def create[K, V : ClassTag] (delegate : RDD[(K, V)], info : QueryInfo,
enforcement: Pair_Enforcement[K])
(implicit tag : ClassTag[V]) : Lap_PairRDD[K, V] = {
delegate match {
case reducible: RDD[(K, Double)@unchecked] if tag == classTag[Double] =>
val _delegate = delegate.asInstanceOf[RDD[(K, Double)]]
val result = new Lap_PairRDD_Reduceable[K, Double](_delegate, info, enforcement)
result.asInstanceOf[Lap_PairRDD[K, V]]
case _ =>
new Lap_PairRDD_NonReduceable[K, V](delegate, info, enforcement)
}
}
def enforceRanges[K, V : ClassTag](delegate : RDD[(K, V)], info : QueryInfo,
enforcement : Pair_Enforcement[K]) (implicit tag : ClassTag[V])
: Lap_PairRDD_Reduceable[K, Double] = {
val _delegate = delegate.asInstanceOf[RDD[(K, Double)]]
val enforcer = Utils.enforce(enforcement.ranges) _
new Lap_PairRDD_Reduceable[K, Double](_delegate.map(enforcer), info, enforcement)
}
// Key enforcement needs to happen for both Reduceable and NonReduceable RDDs!
def filterKeys[K, V : ClassTag](delegate : RDD[(K, V)], info : QueryInfo,
enforcement : Pair_Enforcement[K])
: Lap_PairRDD[K, V] = {
val matcher = Utils.keyMatch(enforcement.keys.toSet) _
create(delegate.filter(matcher), info, enforcement)
}
}
| alec-heif/MIT-Thesis | spark-wrapper/src/main/scala/org/me/PrivateSpark/RDDCreator.scala | Scala | mit | 2,278 |
package org.apache.activemq.apollo.broker.store.leveldb
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.ArrayList
import java.util.Iterator
import java.util.List
import java.util.NoSuchElementException
import org.apache.activemq.apollo.util.TreeMap
object Interval {
def apply[N](start: N)(implicit numeric: scala.math.Numeric[N]): Interval[N] = {
import numeric._
Interval(start, start + one)
}
}
case class Interval[N](start: N, limit: N)(implicit numeric: scala.math.Numeric[N]) {
import numeric._
def size = limit - start
def end = limit - one
def start(value: N): Interval[N] = Interval(value, limit)
def limit(value: N): Interval[N] = Interval(start, value)
override def toString = {
if (start == end) {
start.toString
} else {
start.toString + "-" + end
}
}
def contains(value: N): Boolean = {
return start <= value && value < limit
}
}
/**
* Tracks numeric ranges. Handy for keeping track of things like allocation or free lists.
*
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
case class IntervalSet[N](implicit numeric: scala.math.Numeric[N]) extends java.lang.Iterable[Interval[N]] {
import numeric._
import collection.JavaConversions._
private final val ranges = new TreeMap[N, Interval[N]]
def copy = {
val rc = new IntervalSet[N]
for (r <- iterator) {
rc.ranges.put(r.start, Interval(r.start, r.limit))
}
rc
}
def add(r: N): Unit = add(Interval(r))
def add(r: Interval[N]): Unit = {
var start = r.start
var limit = r.limit
var entry = ranges.floorEntry(limit)
while (entry != null) {
var curr = entry
var range = curr.getValue
entry = entry.previous
if (range.limit < start) {
entry = null
} else {
if (limit < range.limit) {
limit = range.limit
}
if (start < range.start) {
ranges.removeEntry(curr)
} else {
curr.setValue(range.limit(limit))
return
}
}
}
ranges.put(start, Interval(start, limit))
}
def remove(r: N): Unit = remove(Interval(r))
def remove(r: Interval[N]): Unit = {
val start = r.start
var limit = r.limit
var entry = ranges.lowerEntry(limit)
while (entry != null) {
var curr = entry
var range = curr.getValue
entry = entry.previous
if (range.limit <= start) {
entry = null
} else {
if (limit < range.limit) {
ranges.put(limit, Interval(limit, range.limit))
}
if (start <= range.start) {
ranges.removeEntry(curr)
} else {
curr.setValue(range.limit(start))
entry = null
}
}
}
}
def contains(value: N) = {
var entry = ranges.floorEntry(value)
if (entry == null) {
false
} else {
entry.getValue.contains(value)
}
}
def clear: Unit = ranges.clear
def copy(source: IntervalSet[N]): Unit = {
ranges.clear
for (entry <- source.ranges.entrySet) {
ranges.put(entry.getKey, entry.getValue)
}
}
def size = {
var rc = 0
var entry = ranges.firstEntry
while (entry != null) {
rc += entry.getValue.size.toInt()
entry = entry.next
}
rc
}
def toArrayList = {
new ArrayList(ranges.values)
}
override def toString = {
"[ " + ranges.values().mkString(", ") + " ]"
}
def iterator: Iterator[Interval[N]] = {
return ranges.values.iterator
}
def values: List[N] = {
var rc = new ArrayList[N]
for (i <- new ValueIterator(iterator)) {
rc.add(i)
}
return rc
}
def valueIterator: Iterator[N] = new ValueIterator(iterator)
def valuesIteratorNotInInterval(r: Interval[N]): Iterator[N] = new ValueIterator(iteratorNotInInterval(r))
def isEmpty = ranges.isEmpty
def iteratorNotInInterval(mask: Interval[N]): java.util.Iterator[Interval[N]] = {
return new Iterator[Interval[N]] {
private var iter = ranges.values.iterator
private var last = new Interval(mask.start, mask.start)
private var _next: Interval[N] = null
def hasNext: Boolean = {
while (next == null && last.limit < mask.limit && iter.hasNext) {
var r = iter.next
if (r.limit >= last.limit) {
if (r.start < last.limit) {
last = new Interval(last.start, r.limit)
} else {
if (r.start < mask.limit) {
_next = new Interval(last.limit, r.start)
} else {
_next = new Interval(last.limit, mask.limit)
}
}
}
}
return next != null
}
def next: Interval[N] = {
if (!hasNext) {
throw new NoSuchElementException
}
last = next
_next = null
return last
}
def remove: Unit = {
throw new UnsupportedOperationException
}
}
}
private final class ValueIterator(val ranges: Iterator[Interval[N]]) extends java.util.Iterator[N] {
private var range: Interval[N] = null
private var _next: Option[N] = None
private var last: N = zero
def hasNext: Boolean = {
if (_next == None) {
if (Interval == null) {
if (ranges.hasNext) {
range = ranges.next
_next = Some(range.start)
} else {
return false
}
} else {
_next = Some(last + one)
}
if (_next.get == (range.limit - one)) {
range = null
}
}
return _next.isDefined
}
def next: N = {
if (!hasNext) {
throw new NoSuchElementException
}
last = _next.get
_next = None
return last
}
def remove: Unit = throw new UnsupportedOperationException
}
} | chirino/activemq-apollo | apollo-leveldb/src/main/scala/org/apache/activemq/apollo/broker/store/leveldb/Interval.scala | Scala | apache-2.0 | 6,624 |
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.requests.script.{Script, ScriptBuilderFn, ScriptType}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
class ScriptBuilderFnTest extends AnyFunSuite with Matchers {
test("should handle recursive maps") {
ScriptBuilderFn(Script("myscript", params = Map("a" -> 1.2, "b" -> Map("c" -> true, "d" -> List(Map("e" -> 3)))))).string shouldBe
"""{"source":"myscript","params":{"a":1.2,"b":{"c":true,"d":[{"e":3}]}}}"""
}
test("should handle lists of maps") {
ScriptBuilderFn(Script("myscript", params = Map("a" -> 1.2, "b" -> Map("c" -> true, "d" -> List(Map("e" -> 3)))))).string shouldBe
"""{"source":"myscript","params":{"a":1.2,"b":{"c":true,"d":[{"e":3}]}}}"""
}
test("should handle recursive lists") {
ScriptBuilderFn(Script("myscript", params = Map("a" -> List(List(List("foo")))))).string shouldBe
"""{"source":"myscript","params":{"a":[[["foo"]]]}}"""
}
test("should handle maps of lists") {
ScriptBuilderFn(Script("myscript", params = Map("a" -> List(3, 2, 1)))).string shouldBe
"""{"source":"myscript","params":{"a":[3,2,1]}}"""
}
test("should handle mixed lists") {
ScriptBuilderFn(Script("myscript", params = Map("a" -> List(List(true, 1.2, List("foo"), Map("w" -> "wibble")))))).string shouldBe
"""{"source":"myscript","params":{"a":[[true,1.2,["foo"],{"w":"wibble"}]]}}"""
}
test("should handle stored scripts") {
ScriptBuilderFn(Script("convert_currency", scriptType = ScriptType.Stored, params = Map("field" -> "price", "conversion_rate" -> 0.835526591))).string shouldBe
"""{"id":"convert_currency","params":{"field":"price","conversion_rate":0.835526591}}"""
}
}
| stringbean/elastic4s | elastic4s-core/src/test/scala/com/sksamuel/elastic4s/ScriptBuilderFnTest.scala | Scala | apache-2.0 | 1,766 |
/*
* Copyright (C) 2015 Original Work Marios Iliofotou
* Copyright (C) 2016 Modified Work Benjamin Finley
*
* This file is part of ReSurfAlt.
*
* ReSurfAlt is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ReSurfAlt is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ReSurfAlt. If not, see <http://www.gnu.org/licenses/>.
*/
package com.resurf.graph
import java.lang
import com.resurf.common._
import com.twitter.util.{Time, Duration, StorageUnit}
import org.graphstream.graph.{ Graph, EdgeFactory, NodeFactory, Node }
import org.graphstream.graph.implementations.{ MultiGraph, AbstractEdge, AbstractNode, AbstractGraph }
import org.graphstream.algorithm.ConnectedComponents
import org.graphstream.ui.view.Viewer
import scala.collection.JavaConverters._
import org.slf4j.LoggerFactory
import scalaz.Memo
/**
* This class provides a representation of a referrer graph.
* Typically there will be one referrer graph for each device or IP address, etc.
*
* @constructor create a new referrer graph with the specified identifier
* @param id the identifier
*/
class ReferrerGraph(id: String, headNodeSelectionCriteria:ReSurfHeadNodeSelectionCriteria) {
private[this] lazy val logger = LoggerFactory.getLogger(this.getClass)
//the internal graphstream multigraph that holds our custom nodes and edges
private val internalGraph: Graph = new MultiGraph("RG:" + id, false, true,
DefaultReferrerGraphNodeCapacity,DefaultReferrerGraphEdgeCapacity)
//set the node factory to the custom node class
internalGraph.setNodeFactory(new NodeFactory[ReSurfNode] {
def newInstance(id: String, graph: Graph): ReSurfNode = {
graph match {case graph:AbstractGraph => new ReSurfNode(graph, id)}
}
})
//set the edge factory to the custom edge class
internalGraph.setEdgeFactory(new EdgeFactory[ReSurfEdge] {
def newInstance(id: String, src: Node, dst: Node, directed: Boolean): ReSurfEdge = {
(src,dst) match {case (src:AbstractNode,dst:AbstractNode) => new ReSurfEdge(id, src, dst, directed)}
}
});
/**
* Add a node to the referrer graph based on the specified RequestSummary
*
* @param nodeId the id of the node (typically the URL of the request)
* @param details the request summary object of the request
*/
private def addNode(nodeId: String, details: Option[RequestSummary] = None): Unit = {
details match {
case None =>
internalGraph.addNode(nodeId)
()
case Some(request) =>
logger.debug("Adding node " + nodeId + " with details " + details)
//node is not in internalGraph
Option(internalGraph.getNode[ReSurfNode](nodeId)) match {
case None =>
logger.debug("First time seeing node: {}", nodeId)
this.addNode(nodeId)
internalGraph.getNode[ReSurfNode](nodeId).requestRepo.add(request)
case Some(node) =>
node.requestRepo.add(request)
}
}
}
/**
* Get a node by id
*
* @param the id of the node to get
* @return an Option containing either the ReSurfNode or None
*/
def getNode(nodeId:String):Option[ReSurfNode] = Option(internalGraph.getNode[ReSurfNode](nodeId))
/**
* Get an edge by id
*
* @param the id of the edge to get
* @return an Option containing either the ReSurfEdge or None
*/
def getEdge(edgeId:String):Option[ReSurfEdge] = Option(internalGraph.getEdge[ReSurfEdge](edgeId))
/**
* The number of edges of the referrer graph
*
* @return number of edges of the referrer graph
*/
def edgeCount:Int = internalGraph.getEdgeCount
/**
* The number of nodes of the referrer graph
*
* @return number of nodes of the referrer graph
*/
def nodeCount:Int = internalGraph.getNodeCount
/**
* Add a link to the referrer graph based on the specified source node, destination node, and request
*
* @param srcId the id of the source node (typically the referrer URI of the request)
* @param dstId the id of the destination node (typically the target URI of the request)
* @param details the request summary object of the request
*/
private def addLink(srcId: String, dstId: String, details: RequestSummary): Unit = {
logger.debug(s"Adding edge from $srcId to $dstId")
val edgeId = ReferrerGraph.getLinkIdAsString(srcId, dstId)
this.addNode(srcId)
//nodes store their incoming requests as the repository
this.addNode(dstId, Some(details))
//Gets the edge if it already exists, else create it
Option(internalGraph.getEdge[ReSurfEdge](edgeId)) match {
case None =>
logger.debug(s"New edge from $srcId to $dstId")
internalGraph.addEdge(edgeId, srcId, dstId, true)
val e = internalGraph.getEdge[ReSurfEdge](edgeId)
e.requestRepo.add(details)
//else add the request summary to the link's request repo
case Some(edge) =>
edge.requestRepo.add(details)
}
}
/**
* Get a summary of the referrer graph
*
* @return a graph summary for the referrer graph
*/
def getGraphSummary: GraphSummary = {
val cc = new ConnectedComponents()
cc.init(internalGraph)
GraphSummary(nodeCount = internalGraph.getNodeCount, linkCount = internalGraph.getEdgeCount, connectedComponentCount = cc.getConnectedComponentsCount)
}
/** Display the referrer graph */
def visualize: Viewer = internalGraph.display()
/**
* Processes the specified web request by creating the specific node(s) and link in the referrer graph
*
* @param newEvent the HTTP request to process
*/
def processRequest(newEvent: WebRequest): Unit = {
//Deal with HTTP redirection 302 statuses ????
newEvent.referrer match {
case None =>
// There is no referrer, so we just update the node
addNode(newEvent.url.toString, Some(newEvent.getSummary))
case Some(referrer) =>
//check to make sure the url and referrer are not the same since this causes self-loop
if (referrer.equals(newEvent.url)) {
//if they are the same then simply remove the referrer
val newEventCopyWOReferrer = newEvent.copy(referrer = None)
addNode(newEventCopyWOReferrer.url.toString, Some(newEventCopyWOReferrer.getSummary))
} else {
// There is a referrer, so we can update the link (from referrer to target)
addLink(referrer.toString, newEvent.url.toString, newEvent.getSummary)
}
}
}
private def candidateHNCriteria(node: ReSurfNode): Boolean = {
headNodeSelectionCriteria.nodeLocalCriteria.map(cond => cond(node)).forall(identity)
}
/**
* Get the head node that each node maps to according to the ReSurf methodology.
* Headnodes naturally map to themselves. Nodes that are not head nodes and do not map to a
* headnode are considered unknown and map to None.
*
* @return a map in the form (node => Option[headnode])
*/
def assignNodesToHeadNodes: Map[ReSurfNode, Option[ReSurfNode]] = {
//get the headnodes
val headNodes = getHeadNodes
//store the IDs of taken edges while searching so that we don't end up trapped in a cycle
var takenEdgeIDs = Set.empty[String]
//define recursive function that traverses backward toward the head node
def traverseToHeadNode: Option[ReSurfNode] => Option[ReSurfNode] =
//use memoization from scalaz to improve performance
Memo.mutableHashMapMemo {
case Some(node) => {
//node is head node thus we found the headnode to map to!
if (headNodes.contains(node)) {
logger.debug("Found headnode " + node.getId)
Some(node)
//else if node still has incoming edges and we haven't take all of them yet then follow the shortest one backward
} else if (node.getInDegree > 0 && node.getEachEnteringEdge[ReSurfEdge].asScala.count{edge => !takenEdgeIDs.contains(edge.getId)} > 0) {
val enteringEdgesNotTaken = node.getEachEnteringEdge[ReSurfEdge].asScala.filter{edge => !takenEdgeIDs.contains(edge.getId)}
val smallestEdgeNotTaken = enteringEdgesNotTaken.minBy{edge => edge.timeGapAvg.getOrElse(DefaultEdgeAvgTimeGap)}
takenEdgeIDs += smallestEdgeNotTaken.getId
val sourceNodeOfShortestEdgeNotTaken = smallestEdgeNotTaken.getSourceNode[ReSurfNode]
logger.debug("Found an edge to take backwards through node " + sourceNodeOfShortestEdgeNotTaken.getId)
traverseToHeadNode(Some(sourceNodeOfShortestEdgeNotTaken))
//node is not a headnode and does not have any incoming edges or untaken incoming edges thus is classified as unknown
} else {
logger.debug("Could not find a headnode and there are no more unvisited edges to take")
None
}
}
case None => throw new Exception("A node along the chain is null, this should never happen!")
}
//find nodes that are not head nodes since we will follow these backwards to the headnodes
val nonHeadNodes = internalGraph.getNodeSet[ReSurfNode].asScala.toSet.diff(headNodes)
//call the function for each node that is not a headnode
nonHeadNodes.map{node =>
logger.debug("Starting traversal to find headnode for node " + node.getId)
//reset the taken edge set for each node
takenEdgeIDs = Set.empty[String]
(node, traverseToHeadNode(Some(node)))
}
//transform to map and add the headnodes as mapping to themselves
.toMap ++ headNodes.map{headNode => (headNode, Some(headNode)) }
}
/**
* Get the browsing behavior of the user according to the ReSurf methodology
* In terms of the referrer graph this consists of all requests to headnodes ordered by time
*
* @return the browsing of the user according to the ReSurf methodology
*/
def getUserBrowsing: Seq[Tuple2[Time,String]] = {
val headNodes = getHeadNodes.toList
val requests = headNodes.flatMap{node => node.requestRepo.getRepo.map{request => (request.ts,node.getId)}}
requests.toSeq.sortBy(request => request._1)
}
/**
* Get the nodes that are considered head nodes through the ReSurf methodology
*
* @return the head nodes
*/
def getHeadNodes: Set[ReSurfNode] = {
val nodes = internalGraph.getNodeSet[ReSurfNode].asScala
var totalHeadnodes = Set.empty[ReSurfNode]
if(headNodeSelectionCriteria.headNodeReferrerMustBeHeadnode){
val initialHeadNodes = nodes.filter { node =>
//check to make sure that node has no referrer (parent node)
candidateHNCriteria(node) && node.getInDegree == 0
}.toSet
var lastFoundHeadNodes = initialHeadNodes
totalHeadnodes ++= lastFoundHeadNodes
while (lastFoundHeadNodes.size > 0) {
//get the candidate children and make sure the candidate children are not already head nodes
//(this could happen in case of graph cycle)
val childrenOfLastFoundHeadNodes = lastFoundHeadNodes.flatMap { node => node.childNodeSet }.toSet.diff(totalHeadnodes)
lastFoundHeadNodes = childrenOfLastFoundHeadNodes.filter { node =>
//check to make sure at least one of the referrer (parent) nodes is a head node
candidateHNCriteria(node) && totalHeadnodes.intersect(node.parentNodeSet).size > 0
}.toSet
totalHeadnodes ++= lastFoundHeadNodes
}
}else{
totalHeadnodes = nodes.filter{node => candidateHNCriteria(node)}.toSet
}
totalHeadnodes
}
}
/** The companion object of a request repository */
object ReferrerGraph {
/**
* Get the link id based on the source and destination node ids
*
* @param src the id of the source node of the link
* @param dst the id of the destination node of the link
* @return the link id
*/
def getLinkIdAsString(src: String, dst: String): String = src + "->" + dst
}
| finleyb/ReSurfAlt | src/main/scala/com/resurf/graph/ReferrerGraph.scala | Scala | gpl-2.0 | 12,310 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/lgpl-3.0.en.html
package org.ensime.sexp.formats
import shapeless._
import org.ensime.sexp._
/**
* Helper methods for generating wrappers for types in a family, also
* known as "type hints".
*
* See https://gist.github.com/fommil/3a04661116c899056197
*
* Will be replaced by a port of spray-json-shapeless.
*/
trait LegacyFamilyFormats {
case class TypeHint[T](hint: SexpSymbol)
implicit def typehint[T](implicit t: Typeable[T]): TypeHint[T] =
TypeHint(SexpSymbol(":" + t.describe.replaceAll("\\\\.type$", "")))
// always serialises to Nil, and is differentiated by the TraitFormat
// scala names https://github.com/milessabin/shapeless/issues/256
implicit def singletonFormat[T <: Singleton](implicit w: Witness.Aux[T]): SexpFormat[T] = new SexpFormat[T] {
def write(t: T) = SexpNil
def read(v: Sexp) =
if (v == SexpNil) w.value
else deserializationError(v)
}
abstract class TraitFormat[T] extends SexpFormat[T] {
protected def wrap[E](t: E)(implicit th: TypeHint[E], sf: SexpFormat[E]): Sexp = {
val contents = t.toSexp
// special cases: empty case clases, and case objects (hopefully)
if (contents == SexpNil) SexpList(th.hint)
else SexpData(th.hint -> contents)
}
// implement by matching on the implementations and passing off to wrap
// def write(t: T): Sexp
final def read(sexp: Sexp): T = sexp match {
case SexpList(List(hint @ SexpSymbol(_))) => read(hint, SexpNil)
case SexpData(map) if map.size == 1 =>
map.head match {
case (hint, value) => read(hint, value)
}
case x => deserializationError(x)
}
// implement by matching on the hint and passing off to convertTo[Impl]
protected def read(hint: SexpSymbol, value: Sexp): T
}
}
| VlachJosef/ensime-server | s-express/src/main/scala/org/ensime/sexp/formats/LegacyFamilyFormats.scala | Scala | gpl-3.0 | 1,905 |
package models
import play.api.db.slick.Config.driver.simple._
case class Release(id: Option[Int], name: String, userId: Int, studyName: String, studyAbstract: String, created: java.sql.Timestamp)
class ReleaseTable(tag: Tag) extends Table[Release](tag, "release") {
val users = TableQuery[UserTable]
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def name = column[String]("name", O.NotNull)
def userId = column[Int]("user_id", O.NotNull)
def studyName = column[String]("study_name", O.NotNull)
def studyAbstract = column[String]("study_abstract", O.NotNull)
def created = column[java.sql.Timestamp]("created_tstmp", O.NotNull)
def * = (id.?, name, userId, studyName, studyAbstract, created) <> (Release.tupled, Release.unapply _)
def user = foreignKey("user_FK", userId, users)(_.id)
} | seqprodbio/restoule | app/models/Release.scala | Scala | gpl-3.0 | 825 |
package es.weso.shex
import es.weso.rdfgraph.nodes._
import es.weso.rdfgraph._
import es.weso.rdfgraph.statements._
import es.weso.shex.ShapeSyntax._
import es.weso.shex.Typing._
import es.weso.monads.Result._
import es.weso.rdf._
import es.weso.rdf._
import es.weso.shex.Context._
import org.slf4j._
import scala.util.matching.Regex
import es.weso.utils.Logging
import es.weso.monads._
import es.weso.shacl.PrefixMaps
/**
* Shape validator using Regular Expression Derivatives
* Some parts of this code have been inspired by:
* https://hackage.haskell.org/package/hxt-regex-xmlschema-9.1.0/docs/src/Text-Regex-XMLSchema-String-Regex.html
*
*/
trait ShapeValidatorWithDeriv extends ShapeValidator with Logging {
override def id = "Validator by Derivatives 1.0"
implicit val pm: PrefixMap = PrefixMaps.commonShacl
override def matchRule(
ctx: Context,
g: Set[RDFTriple],
rule: Rule): Result[Typing] =
rule match {
case OpenRule(r) =>
for (
(g1, remaining) <- parts(g); t <- matchRuleClosed(ctx, g1, r)
) yield t
case _ => matchRuleClosed(ctx, g, rule)
}
def matchRuleClosed(
ctx: Context,
g: Set[RDFTriple],
rule: Rule): Result[Typing] = {
val (dr, ts) = deltaTriples(rule, g, ctx)
if (nullable(dr)) {
Passed(ts)
} else {
Failure("Does not match, dr = " + showRule(dr)(ctx.pm))
}
}
type Nullable = Boolean
type Res = (Rule, Stream[Typing])
lazy val failTyping = Stream()
def isOpen(rule: Rule): Boolean = {
rule match {
case OpenRule(_) => true
case _ => false
}
}
def nullable(r: Rule): Nullable = {
r match {
case FailRule(_) => false
case EmptyRule => true
case ArcRule(_, _, _) => false
case RevArcRule(_, _, _) => false
case RelationRule(_, _, _) => false
case AndRule(r1, r2) => nullable(r1) && nullable(r2)
case OrRule(r1, r2) => nullable(r1) || nullable(r2)
case StarRule(r) => true
case PlusRule(r) => nullable(r)
case OptRule(r) => true
case ActionRule(r, _) => nullable(r)
case RangeMinRule(m, r) => m == 0 || nullable(r)
case RangeRule(m, n, r) => m == 0 || nullable(r)
case NotRule(r) => !nullable(r) // TODO: check the semantics of this
case AnyRule => true
case OpenRule(r) => nullable(r)
}
}
def mkAndRule(r1: Rule, r2: Rule): Rule = {
val r = (r1, r2) match {
case (EmptyRule, e2) => e2
case (e1, EmptyRule) => e1
case (f @ FailRule(_), _) => f
case (_, f @ FailRule(_)) => f
case (_, _) => AndRule(r1, r2)
}
r
}
def mkOrRule(r1: Rule, r2: Rule): Rule = {
val r = (r1, r2) match {
case (f @ FailRule(_), e2) => e2
case (e1, f @ FailRule(_)) => e1
case (e1, e2) =>
if (e1 == e2) e1
else OrRule(e1, e2)
}
r
}
def mkRangeRule(m: Int, n: Int, r: Rule): Rule = {
if (m < 0) FailRule("Range with negative lower bound = " + m)
else if (m > n) FailRule("Range with lower bound " + m + " bigger than upper bound " + n)
else {
(m, n, r) match {
case (0, 0, _) => EmptyRule
case (1, 1, e) => e
case (_, _, f @ FailRule(_)) => f
case (_, _, e @ EmptyRule) => e
case (m, n, e) => RangeRule(m, n, e)
}
}
}
def mkRangeMinRule(m: Int, r: Rule): Rule = {
if (m < 0) FailRule("Range with negative lower bound = " + m)
else {
(m, r) match {
case (0, _) => EmptyRule
case (1, e) => e
case (_, f @ FailRule(_)) => f
case (_, e @ EmptyRule) => e
case (m, e) => RangeMinRule(m, e)
}
}
}
def deltaTriples(r: Rule, ts: Set[RDFTriple], ctx: Context): (Rule, Stream[Typing]) = {
val e: Res = (r, Stream(ctx.typing))
def f(b: Res, triple: RDFTriple): Res = {
val (current, st1) = b
log.debug("Calculating delta.\\nTriple: " + triple +
"\\nschema " + showRule(current)(ctx.pm) +
"\\nst1= " + st1)
val (dr, st2) = delta(current, triple, ctx)
log.debug("Step delta of triple " + triple +
"\\nRule: " + showRule(current)(ctx.pm) +
"\\ndr = " + dr +
"\\nst2 =" + st2)
(dr, combineTypings(st1, st2))
}
def combineTypings(st1: Stream[Typing], st2: Stream[Typing]): Stream[Typing] = {
if (st1.isEmpty) st2
else for (t1 <- st1; t2 <- st2) yield (t1 combine t2)
}
ts.foldLeft(e)(f)
}
def delta(rule: Rule, triple: RDFTriple, ctx: Context): (Rule, Stream[Typing]) = {
lazy val noTyping = Stream(ctx.typing)
rule match {
case ArcRule(_, n, v) =>
if (matchName(ctx, triple.pred, n).isValid) {
val mv = matchValue(ctx, triple.obj, v)
if (mv.isValid) {
(EmptyRule, mv.run.get)
} else {
(FailRule("Does not match value " + triple.obj +
" with ArcRule " + showRule(rule)(ctx.pm) + " Msg: " + mv.failMsg),
noTyping)
}
} else {
(FailRule("Does not match name " + triple.pred +
" with ArcRule " + showRule(rule)(ctx.pm)),
noTyping)
}
case RevArcRule(_, n, v) =>
if (matchName(ctx, triple.pred, n).isValid) {
val mv = matchValue(ctx, triple.subj, v)
if (mv.isValid) {
(EmptyRule, mv.run.get)
} else {
(FailRule("Does not match value " + triple.subj +
" with RevArcRule " + showRule(rule)(ctx.pm) + " Msg: " + mv.failMsg),
noTyping)
}
} else {
(FailRule("Does not match name " + triple.pred +
" with RevArcRule " + showRule(rule)(ctx.pm)),
noTyping)
}
case RelationRule(_, v1, v2) =>
val mv1 = matchValue(ctx, triple.subj, v1)
val mv2 = matchValue(ctx, triple.obj, v1)
if (mv1.isValid) {
if (mv2.isValid)
(EmptyRule, mv1.run.get ++ mv2.run.get)
else {
(FailRule("Does not match value " + triple.obj +
" with RelationRule " + showRule(rule)(ctx.pm) + " Msg: " + mv2.failMsg),
noTyping)
}
} else {
(FailRule("Does not match value " + triple.subj +
" with RelationRule " + showRule(rule)(ctx.pm) + " Msg: " + mv1.failMsg),
noTyping)
}
case EmptyRule =>
(FailRule("Unexpected triple " + triple), noTyping)
case f @ FailRule(msg) => {
log.debug("...Failing rule " + showRule(rule)(ctx.pm) + " with " + msg)
(f, noTyping)
}
case OrRule(r1, r2) => {
val (dr1, t1) = delta(r1, triple, ctx)
val (dr2, t2) = delta(r2, triple, ctx)
(mkOrRule(dr1, dr2), t1 ++ t2)
}
// The semantics of And is the same as interleave in RelaxNG because arcs are not ordered
// TODO: check possible simplifications of this rule in case dr1 or dr2 are nullable
case AndRule(r1, r2) => {
val (dr1, t1) = delta(r1, triple, ctx)
val (dr2, t2) = delta(r2, triple, ctx)
(mkOrRule(mkAndRule(dr1, r2), mkAndRule(dr2, r1)), t1 ++ t2)
}
case e @ StarRule(r) => {
val (dr, t) = delta(r, triple, ctx)
(mkAndRule(dr, e), t)
}
case OptRule(r) => {
val (dr, t) = delta(r, triple, ctx)
(dr, t)
}
case PlusRule(r) => {
val (dr, t) = delta(r, triple, ctx)
(mkAndRule(dr, StarRule(r)), t)
}
case RangeRule(m, n, r) => {
val (dr, t) = delta(r, triple, ctx)
(mkAndRule(dr, mkRangeRule(math.max(m - 1, 0), n - 1, r)), t)
}
case RangeMinRule(m, r) => {
val (dr, t) = delta(r, triple, ctx)
(mkAndRule(dr, mkRangeMinRule(math.max(m - 1, 0), r)), t)
}
case ActionRule(r, a) => delta(r, triple, ctx)
case AnyRule => (EmptyRule, noTyping)
case NotRule(r) => {
val (dr, t) = delta(r, triple, ctx)
dr match {
case EmptyRule =>
(FailRule("Not rule found triple " + t + " that matches " + showRule(rule)(ctx.pm)), noTyping)
case FailRule(msg) => {
(EmptyRule, noTyping)
}
case _ => (NotRule(dr), t)
}
}
case OpenRule(r) => {
val (dr, t) = delta(r, triple, ctx)
(dr, t)
}
}
}
def showRule(rule: Rule)(implicit pm: PrefixMap): String =
ShapeDoc.rule2String(rule)(pm)
}
object ShapeValidatorWithDeriv extends ShapeValidatorWithDeriv {
} | jorgeyp/ShExcala | src/main/scala/es/weso/shex/ShapeValidatorWithDeriv.scala | Scala | mit | 8,567 |
package ch.descabato.core.actors
import java.io.File
import akka.actor.{TypedActor, TypedProps}
import ch.descabato.core._
import ch.descabato.core.commands.ProblemCounter
import ch.descabato.core.config.BackupFolderConfiguration
import ch.descabato.core.model._
import ch.descabato.frontend.{ETACounter, ProgressReporters, StandardByteCounter}
import ch.descabato.utils.Implicits._
import ch.descabato.utils._
import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.util.{Failure, Success, Try}
class ChunkStorageActor(val context: BackupContext, val journalHandler: JournalHandler) extends ChunkStorage with JsonUser {
val logger = LoggerFactory.getLogger(getClass)
val config: BackupFolderConfiguration = context.config
private var assignedIds: Map[Long, StoredChunk] = Map.empty
private var alreadyAssignedIds: FastHashMap[Long] = new FastHashMap[Long]()
private var checkpointed: FastHashMap[StoredChunk] = new FastHashMap[StoredChunk]()
private var notCheckpointed: FastHashMap[StoredChunk] = new FastHashMap[StoredChunk]()
private var _currentWriter: (VolumeWriteActor, File) = null
val headroomInVolume = 1000
private val bytesStoredCounter = new StandardByteCounter("Stored")
ProgressReporters.addCounter(bytesStoredCounter)
private def currentWriter = {
if (_currentWriter == null) {
newWriter()
}
_currentWriter
}
private def newWriter(): Unit = {
val index = context.fileManager.volumeIndex
val file = index.nextFile()
val indexNumber = index.numberOfFile(file)
val volumeFile = context.fileManager.volume.fileForNumber(indexNumber)
_currentWriter = (new VolumeWriteActor(context, volumeFile), volumeFile)
}
def startup(): Future[Boolean] = {
Future {
val measure = new StandardMeasureTime
val files = context.fileManager.volumeIndex.getFiles()
val futures = files.map { f =>
Future {
(f, readJson[Seq[StoredChunk]](f))
}
}
for (elem <- futures) {
Await.result(elem, 1.minute) match {
case (_, Success(seq)) =>
checkpointed ++= seq.map(x => (x.hash, x))
case (f, Failure(_)) =>
logger.warn(s"Could not read index $f, it might be corrupted, some data loss will occur. Backup again.")
}
}
logger.info(s"Reconstructed state after ${measure.measuredTime()}")
measure.startMeasuring()
assignedIds = checkpointed.values.map(x => (x.id, x)).toMap
if (assignedIds.nonEmpty) {
ChunkIds.maxId(assignedIds.keySet.max)
}
logger.info(s"Reconstructing state completed in ${measure.measuredTime()}, have ${assignedIds.size} chunks")
true
}
}
def chunkId(block: Block, assignIdIfNotFound: Boolean): Future[ChunkIdResult] = {
Future.successful {
chunkIdInternal(block.hash, assignIdIfNotFound)
}
}
private def chunkIdInternal(hash: Hash, assignIdIfNotFound: Boolean): ChunkIdResult = {
val existingId = checkpointed.get(hash).orElse(notCheckpointed.get(hash))
val alreadyAssigned = alreadyAssignedIds.get(hash)
(existingId, alreadyAssigned) match {
case (Some(_), Some(_)) => throw new IllegalStateException()
case (Some(chunk), None) => ChunkFound(chunk.id)
case (None, Some(id)) => ChunkIdAssigned(id)
case (None, None) =>
if (assignIdIfNotFound) {
val newId = ChunkIds.nextId()
alreadyAssignedIds += hash -> newId
ChunkIdAssigned(newId)
} else {
ChunkUnknown
}
}
}
def read(id: Long): Future[BytesWrapper] = {
val maybeChunk = assignedIds.get(id)
maybeChunk match {
case Some(chunk) => Future.successful(getReader(chunk).read(chunk.asFilePosition()))
case None => Future.failed(new NullPointerException(s"Could not find chunk for id $id"))
}
}
override def getHashForId(id: Long): Future[Hash] = {
Future.fromTry(Try {
assignedIds(id).hash
})
}
def hasAlready(id: Long): Future[Boolean] = {
Future.successful {
assignedIds.get(id).flatMap { chunk =>
checkpointed.get(chunk.hash).orElse(notCheckpointed.get(chunk.hash)).map(_ => true)
}.getOrElse(false)
}
}
private def finishVolumeAndCreateIndex() = {
val hashFuture = currentWriter._1.finish().flatMap(_ => currentWriter._1.md5Hash)
val filename = currentWriter._1.filename
val toSave = notCheckpointed.filter { case (_, block) =>
block.file == filename
}
val hash = Await.result(hashFuture, 10.minutes)
journalHandler.addFileToJournal(currentWriter._2, hash)
val indexFile: File = computeIndexFileForVolume()
writeToJson(indexFile, toSave.values.toSeq)
checkpointed ++= toSave
notCheckpointed --= toSave.keySet
context.eventBus.publish(CheckpointedChunks(checkpointed.values.map(_.id).toSet))
require(notCheckpointed.isEmpty)
logger.info(s"Wrote volume and index for $filename")
_currentWriter = null
}
private def computeIndexFileForVolume() = {
val numberOfVolume = context.fileManager.volume.numberOfFile(currentWriter._2)
val volumeIndex = context.fileManager.volumeIndex
volumeIndex.fileForNumber(numberOfVolume)
}
override def save(block: CompressedBlock, id: Long): Future[Boolean] = {
if (notCheckpointed.get(block.hash).orElse(checkpointed.get(block.hash)).isEmpty) {
require(alreadyAssignedIds(block.hash) == id)
if (blockCanNotFitAnymoreIntoCurrentWriter(block)) {
finishVolumeAndCreateIndex()
}
val filePosition = currentWriter._1.saveBlock(block)
val storedChunk = StoredChunk(id, currentWriter._1.filename, block.hash, filePosition.offset, filePosition.length)
require(!assignedIds.safeContains(id), s"Should not contain $id")
alreadyAssignedIds -= block.hash
assignedIds += id -> storedChunk
notCheckpointed += storedChunk.hash -> storedChunk
bytesStoredCounter += storedChunk.length
} else {
logger.warn(s"Chunk with hash ${block.hash} was already saved, but was compressed another time anyway")
}
Future.successful(true)
}
private def blockCanNotFitAnymoreIntoCurrentWriter(block: CompressedBlock) = {
currentWriter._1.currentPosition() + block.compressed.length + headroomInVolume > config.volumeSize.bytes
}
private var _readers: Map[String, VolumeReader] = Map.empty
def getReader(chunk: StoredChunk) = {
if (!_readers.safeContains(chunk.file)) {
val value = TypedProps.apply[VolumeReader](classOf[VolumeReader], new VolumeReadActor(context, new File(config.folder, chunk.file)))
_readers += chunk.file -> TypedActor(context.actorSystem).typedActorOf(value.withTimeout(5.minutes))
}
_readers(chunk.file)
}
override def finish(): Future[Boolean] = {
closeReaders()
if (_currentWriter != null) {
finishVolumeAndCreateIndex()
}
if (bytesStoredCounter.current > 0) {
logger.info(s"Wrote volumes with total of ${Size(bytesStoredCounter.current)}")
}
Future.successful(true)
}
private def closeReaders() = {
Await.result(Future.sequence(_readers.values.map(_.finish())), 1.hour)
_readers = Map.empty
}
val verifiedCounter = new ETACounter {
override def name: String = "Verified chunks"
}
override def verifyChunksAreAvailable(chunkIdsToTest: Seq[Long], counter: ProblemCounter, checkVolumeToo: Boolean, checkContent: Boolean): BlockingOperation = {
ProgressReporters.addCounter(verifiedCounter)
var futures = Seq.empty[Future[Unit]]
val distinctIds = chunkIdsToTest.distinct
verifiedCounter.maxValue = distinctIds.size
for (chunkId <- distinctIds) {
var shouldCount = true
if (!assignedIds.safeContains(chunkId)) {
counter.addProblem(s"Chunk with id $chunkId could not be found")
} else {
if (checkVolumeToo) {
val chunk = assignedIds(chunkId)
val file = config.resolveRelativePath(chunk.file)
if (file.length() < (chunk.startPos + chunk.length)) {
counter.addProblem(s"Chunk ${chunk.id} should be in file ${chunk.file} at ${chunk.startPos} - to ${chunk.startPos + chunk.length}, but file is only ${file.length} long")
} else {
if (checkContent) {
val wrapper = getReader(chunk).read(chunk.asFilePosition())
shouldCount = false
futures :+= Future {
val stream = CompressedStream.decompressToBytes(wrapper)
val hashComputed: Hash = config.createMessageDigest().digest(stream)
if (hashComputed !== chunk.hash) {
counter.addProblem(s"Chunk ${chunk} was read from volume and does not have the same hash as stored")
}
verifiedCounter += 1
}
}
}
}
}
if (shouldCount) {
verifiedCounter += 1
}
}
for (future <- futures) {
Await.result(future, 1.hour)
}
new BlockingOperation()
}
}
| Stivo/DeScaBaTo | core/src/main/scala/ch/descabato/core/actors/ChunkStorageActor.scala | Scala | gpl-3.0 | 9,171 |
package com.twitter.finatra.kafkastreams.transformer.stores.internal
import com.twitter.finatra.kafkastreams.internal.utils.ReflectionUtils
import com.twitter.finatra.kafkastreams.transformer.stores.FinatraKeyValueStore
import com.twitter.finatra.kafkastreams.utils.RocksKeyValueIterator
import com.twitter.util.logging.Logging
import java.util
import org.apache.kafka.common.serialization.Serde
import org.apache.kafka.common.utils.Bytes
import org.apache.kafka.streams.KeyValue
import org.apache.kafka.streams.errors.InvalidStateStoreException
import org.apache.kafka.streams.processor.internals.ProcessorStateManager
import org.apache.kafka.streams.processor.ProcessorContext
import org.apache.kafka.streams.processor.StateStore
import org.apache.kafka.streams.processor.TaskId
import org.apache.kafka.streams.state.internals.RocksDBStore
import org.apache.kafka.streams.state.KeyValueIterator
import org.apache.kafka.streams.state.KeyValueStore
import org.apache.kafka.streams.state.StateSerdes
import org.rocksdb.RocksDB
class FinatraKeyValueStoreImpl[K, V](
_rocksDBStore: RocksDBStore,
inner: KeyValueStore[Bytes, Array[Byte]],
keySerde: Serde[K],
valueSerde: Serde[V])
extends FinatraKeyValueStore[K, V]
with Logging {
/* Private Mutable */
@transient private var _context: ProcessorContext = _
@transient private var serdes: StateSerdes[K, V] = _
@transient private var _rocksDB: RocksDB = _
/* Public */
override def init(processorContext: ProcessorContext, root: StateStore): Unit = {
debug(s"init ${inner.name} ${processorContext.taskId()}")
_context = processorContext
serdes = new StateSerdes[K, V](
ProcessorStateManager.storeChangelogTopic(processorContext.applicationId, name),
keySerde,
valueSerde)
assert(!inner.isOpen)
inner.init(processorContext, root)
}
override def close(): Unit = {
info(s"close ${inner.name} ${_context.taskId()}")
//Note: _rocksDB is obtained from the "inner" store via reflection. As such,
//we null out the retrieved reference here and rely on calling inner.close() below to
// properly close the resources associated with _rocksDB
_rocksDB = null
inner.close()
}
override def taskId: TaskId = _context.taskId()
override def name(): String = inner.name()
override def flush(): Unit = inner.flush()
override def persistent(): Boolean = inner.persistent()
override def isOpen: Boolean = inner.isOpen
override def get(key: K): V = {
valueFrom(inner.get(rawKey(key)))
}
override def put(key: K, value: V): Unit = {
inner.put(rawKey(key), rawValue(value))
}
override def putIfAbsent(key: K, value: V): V = {
valueFrom(inner.putIfAbsent(rawKey(key), rawValue(value)))
}
override def putAll(entries: util.List[KeyValue[K, V]]): Unit = {
val byteEntries = new util.ArrayList[KeyValue[Bytes, Array[Byte]]]
val iterator = entries.iterator
while (iterator.hasNext) {
val entry = iterator.next
byteEntries.add(KeyValue.pair(rawKey(entry.key), rawValue(entry.value)))
}
inner.putAll(byteEntries)
}
override def range(from: K, to: K, allowStaleReads: Boolean): KeyValueIterator[K, V] = {
// If this method is being handled here (as oppose to a caching store) then
// we can ignore allowStaleReads and directly call range which will never serve stale reads
range(from = from, to = to)
}
override def delete(key: K): V = {
valueFrom(inner.delete(rawKey(key)))
}
override def approximateNumEntries(): Long = inner.approximateNumEntries()
override def all(): KeyValueIterator[K, V] = {
validateStoreOpen()
val iterator = rocksDB.newIterator()
iterator.seekToFirst()
new RocksKeyValueIterator(
iterator = iterator,
keyDeserializer = serdes.keyDeserializer,
valueDeserializer = serdes.valueDeserializer,
storeName = name)
}
/**
* Get an iterator over a given range of keys. This iterator must be closed after use.
* The returned iterator must be safe from {@link java.util.ConcurrentModificationException}s
* and must not return null values. No ordering guarantees are provided.
*
* @param from The first key that could be in the range
* @param to The last key that could be in the range (inclusive)
*
* @return The iterator for this range.
*
* @throws NullPointerException If null is used for from or to.
* @throws InvalidStateStoreException if the store is not initialized
*/
override def range(from: K, to: K): KeyValueIterator[K, V] = {
validateStoreOpen()
val iterator = rocksDB.newIterator()
iterator.seek(serdes.rawKey(from))
val toBytesInclusive = serdes.rawKey(to)
new RocksKeyValueIterator(
iterator,
serdes.keyDeserializer,
serdes.valueDeserializer,
inner.name) {
private val comparator = Bytes.BYTES_LEXICO_COMPARATOR
override def hasNext: Boolean = {
super.hasNext &&
comparator.compare(iterator.key(), toBytesInclusive) <= 0 // <= 0 since to is inclusive
}
}
}
/* Public Finatra Additions */
override def range(
fromBytesInclusive: Array[Byte],
toBytesExclusive: Array[Byte]
): KeyValueIterator[K, V] = {
validateStoreOpen()
val iterator = rocksDB.newIterator()
iterator.seek(fromBytesInclusive)
new RocksKeyValueIterator(
iterator,
serdes.keyDeserializer,
serdes.valueDeserializer,
inner.name) {
private val comparator = Bytes.BYTES_LEXICO_COMPARATOR
override def hasNext: Boolean = {
super.hasNext &&
comparator.compare(iterator.key(), toBytesExclusive) < 0 // < 0 since to is exclusive
}
}
}
/**
* A range scan starting from bytes. If RocksDB "prefix seek mode" is not enabled, than the
* iteration will NOT end when fromBytes is no longer the prefix
*
* Note 1: This is an API for Advanced users only
*
* Note 2: If this RocksDB instance is configured in "prefix seek mode", than fromBytes will be
* used as a "prefix" and the iteration will end when the prefix is no longer part of the next element.
* Enabling "prefix seek mode" can be done by calling options.useFixedLengthPrefixExtractor.
* When enabled, prefix scans can take advantage of a prefix based bloom filter for better seek performance
* See: https://github.com/facebook/rocksdb/wiki/Prefix-Seek-API-Changes
*
* TODO: Save off iterators to make sure they are all closed
*/
override def range(fromBytes: Array[Byte]): KeyValueIterator[K, V] = {
val iterator = rocksDB.newIterator()
iterator.seek(fromBytes)
new RocksKeyValueIterator(
iterator,
serdes.keyDeserializer,
serdes.valueDeserializer,
inner.name)
}
override def deleteRange(from: K, to: K): Unit = {
val iterator = range(from, to)
try {
while (iterator.hasNext) {
delete(iterator.next.key)
}
} finally {
iterator.close()
}
}
// Optimization which avoid getting the prior value which keyValueStore.delete does :-/
override final def deleteWithoutGettingPriorValue(key: K): Unit = {
inner.put(rawKey(key), null)
}
override final def getOrDefault(key: K, default: => V): V = {
val existing = inner.get(rawKey(key))
if (existing == null) {
default
} else {
valueFrom(existing)
}
}
override def deleteRangeExperimentalWithNoChangelogUpdates(
beginKeyInclusive: Array[Byte],
endKeyExclusive: Array[Byte]
): Unit = {
rocksDB.deleteRange(beginKeyInclusive, endKeyExclusive)
}
/* Private */
private def rawKey(key: K): Bytes = {
Bytes.wrap(serdes.rawKey(key))
}
private def rawValue(value: V): Array[Byte] = {
serdes.rawValue(value)
}
private def valueFrom(bytes: Array[Byte]) = {
serdes.valueFrom(bytes)
}
private def validateStoreOpen(): Unit = {
if (!isOpen) throw new InvalidStateStoreException("Store " + this.name + " is currently closed")
}
/*
* Note: We need to constantly check if _rocksDB is set and still owns a handle to the underlying
* RocksDB resources because some operations (such as restoring state from the changelog) can
* result in the underlying rocks store being closed and reopened
* e.g. https://github.com/apache/kafka/blob/2.0/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBStore.java#L233
*/
private def rocksDB: RocksDB = {
if (_rocksDB == null || !_rocksDB.isOwningHandle) {
_rocksDB = ReflectionUtils.getFinalField(_rocksDBStore, "db")
}
_rocksDB
}
}
| twitter/finatra | kafka-streams/kafka-streams/src/main/scala/com/twitter/finatra/kafkastreams/transformer/stores/internal/FinatraKeyValueStoreImpl.scala | Scala | apache-2.0 | 8,631 |
package im.actor.server.api.rpc.service.messaging
import im.actor.api.rpc.DBIOResult._
import im.actor.api.rpc.PeerHelpers._
import im.actor.api.rpc._
import im.actor.api.rpc.messaging._
import im.actor.api.rpc.misc.{ ResponseSeq, ResponseVoid }
import im.actor.api.rpc.peers.{ ApiOutPeer, ApiPeerType }
import im.actor.server.dialog.{ ReadFailed, ReceiveFailed }
import im.actor.server.history.HistoryUtils
import im.actor.server.{ models, persist }
import org.joda.time.DateTime
import slick.dbio
import slick.driver.PostgresDriver.api._
import scala.concurrent.Future
object HistoryErrors {
val ReceiveFailed = RpcError(500, "RECEIVE_FAILED", "", true, None)
val ReadFailed = RpcError(500, "READ_FAILED", "", true, None)
}
trait HistoryHandlers {
self: MessagingServiceImpl ⇒
import HistoryUtils._
import im.actor.api.rpc.Implicits._
override def jhandleMessageReceived(peer: ApiOutPeer, date: Long, clientData: im.actor.api.rpc.ClientData): Future[HandlerResult[ResponseVoid]] = {
val action = requireAuth(clientData).map { implicit client ⇒
DBIO.from {
dialogExt.messageReceived(peer.`type`, peer.id, client.userId, date) map (_ ⇒ Ok(ResponseVoid))
}
}
db.run(toDBIOAction(action)) recover {
case ReceiveFailed ⇒ Error(HistoryErrors.ReceiveFailed)
}
}
override def jhandleMessageRead(peer: ApiOutPeer, date: Long, clientData: ClientData): Future[HandlerResult[ResponseVoid]] = {
val action = requireAuth(clientData).map { implicit client ⇒
DBIO.from {
dialogExt.messageRead(peer.`type`, peer.id, client.userId, client.authId, date) map (_ ⇒ Ok(ResponseVoid))
}
}
db.run(toDBIOAction(action)) recover {
case ReadFailed ⇒ Error(HistoryErrors.ReadFailed)
}
}
override def jhandleClearChat(peer: ApiOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeq]] = {
val action = requireAuth(clientData) map { implicit client ⇒
val update = UpdateChatClear(peer.asPeer)
for {
_ ← fromDBIOBoolean(CommonErrors.forbidden("Clearing of public chats is forbidden")) {
if (peer.`type` == ApiPeerType.Private) {
DBIO.successful(true)
} else {
DBIO.from(groupExt.isHistoryShared(peer.id)) flatMap (isHistoryShared ⇒ DBIO.successful(!isHistoryShared))
}
}
_ ← fromDBIO(persist.HistoryMessage.deleteAll(client.userId, peer.asModel))
seqstate ← fromFuture(userExt.broadcastClientUpdate(update, None, isFat = false))
} yield ResponseSeq(seqstate.seq, seqstate.state.toByteArray)
}
db.run(toDBIOAction(action map (_.run)))
}
override def jhandleDeleteChat(peer: ApiOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeq]] = {
val action = requireAuth(clientData).map { implicit client ⇒
val update = UpdateChatDelete(peer.asPeer)
for {
_ ← persist.HistoryMessage.deleteAll(client.userId, peer.asModel)
_ ← persist.Dialog.delete(client.userId, peer.asModel)
seqstate ← DBIO.from(userExt.broadcastClientUpdate(update, None, isFat = false))
} yield Ok(ResponseSeq(seqstate.seq, seqstate.state.toByteArray))
}
db.run(toDBIOAction(action))
}
override def jhandleLoadDialogs(endDate: Long, limit: Int, clientData: ClientData): Future[HandlerResult[ResponseLoadDialogs]] = {
val authorizedAction = requireAuth(clientData).map { implicit client ⇒
persist.Dialog.findByUser(client.userId, endDateTimeFrom(endDate), Int.MaxValue) flatMap { dialogModels ⇒
for {
dialogs ← DBIO.sequence(dialogModels map getDialogStruct)
(users, groups) ← getDialogsUsersGroups(dialogs)
} yield {
Ok(ResponseLoadDialogs(
groups = groups.toVector,
users = users.toVector,
dialogs = dialogs.toVector
))
}
}
}
db.run(toDBIOAction(authorizedAction))
}
override def jhandleLoadHistory(peer: ApiOutPeer, endDate: Long, limit: Int, clientData: ClientData): Future[HandlerResult[ResponseLoadHistory]] = {
val authorizedAction = requireAuth(clientData).map { implicit client ⇒
withOutPeer(peer) {
withHistoryOwner(peer.asModel) { historyOwner ⇒
persist.Dialog.find(client.userId, peer.asModel) flatMap { dialogOpt ⇒
persist.HistoryMessage.find(historyOwner, peer.asModel, endDateTimeFrom(endDate), limit) flatMap { messageModels ⇒
val lastReceivedAt = dialogOpt map (_.lastReceivedAt) getOrElse (new DateTime(0))
val lastReadAt = dialogOpt map (_.lastReadAt) getOrElse (new DateTime(0))
val (messages, userIds) = messageModels.view
.map(_.ofUser(client.userId))
.foldLeft(Vector.empty[ApiHistoryMessage], Set.empty[Int]) {
case ((msgs, userIds), message) ⇒
val messageStruct = message.asStruct(lastReceivedAt, lastReadAt)
val newMsgs = msgs :+ messageStruct
val newUserIds = relatedUsers(messageStruct.message) ++
(if (message.senderUserId != client.userId)
userIds + message.senderUserId
else
userIds)
(newMsgs, newUserIds)
}
for {
userStructs ← DBIO.from(Future.sequence(userIds.toVector map (userExt.getApiStruct(_, client.userId, client.authId))))
} yield {
Ok(ResponseLoadHistory(messages, userStructs))
}
}
}
}
}
}
db.run(toDBIOAction(authorizedAction))
}
override def jhandleDeleteMessage(outPeer: ApiOutPeer, randomIds: Vector[Long], clientData: ClientData): Future[HandlerResult[ResponseSeq]] = {
val action = requireAuth(clientData).map { implicit client ⇒
withOutPeer(outPeer) {
val peer = outPeer.asModel
withHistoryOwner(peer) { historyOwner ⇒
if (isSharedUser(historyOwner)) {
persist.HistoryMessage.find(historyOwner, peer, randomIds.toSet) flatMap { messages ⇒
if (messages.exists(_.senderUserId != client.userId)) {
DBIO.successful(Error(CommonErrors.forbidden("You can only delete your own messages")))
} else {
val update = UpdateMessageDelete(outPeer.asPeer, randomIds)
for {
_ ← persist.HistoryMessage.delete(historyOwner, peer, randomIds.toSet)
groupUserIds ← persist.GroupUser.findUserIds(peer.id) map (_.toSet)
(seqstate, _) ← DBIO.from(userExt.broadcastClientAndUsersUpdate(groupUserIds, update, None, false))
} yield Ok(ResponseSeq(seqstate.seq, seqstate.state.toByteArray))
}
}
} else {
val update = UpdateMessageDelete(outPeer.asPeer, randomIds)
for {
_ ← persist.HistoryMessage.delete(client.userId, peer, randomIds.toSet)
seqstate ← DBIO.from(userExt.broadcastClientUpdate(update, None, isFat = false))
} yield Ok(ResponseSeq(seqstate.seq, seqstate.state.toByteArray))
}
}
}
}
db.run(toDBIOAction(action))
}
private val MaxDate = (new DateTime(294276, 1, 1, 0, 0)).getMillis
private def endDateTimeFrom(date: Long): Option[DateTime] = {
if (date == 0l) {
None
} else {
Some(new DateTime(
if (date >= MaxDate)
new DateTime(294276, 1, 1, 0, 0)
else
date
))
}
}
private def getDialogStruct(dialogModel: models.Dialog)(implicit client: AuthorizedClientData): dbio.DBIO[ApiDialog] = {
withHistoryOwner(dialogModel.peer) { historyOwner ⇒
for {
messageOpt ← persist.HistoryMessage.findNewest(historyOwner, dialogModel.peer) map (_.map(_.ofUser(client.userId)))
unreadCount ← getUnreadCount(historyOwner, dialogModel.peer, dialogModel.ownerLastReadAt)
} yield {
val emptyMessageContent = ApiTextMessage(text = "", mentions = Vector.empty, ext = None)
val messageModel = messageOpt.getOrElse(models.HistoryMessage(dialogModel.userId, dialogModel.peer, new DateTime(0), 0, 0, emptyMessageContent.header, emptyMessageContent.toByteArray, None))
val message = messageModel.asStruct(dialogModel.lastReceivedAt, dialogModel.lastReadAt)
ApiDialog(
peer = dialogModel.peer.asStruct,
unreadCount = unreadCount,
sortDate = dialogModel.lastMessageDate.getMillis,
senderUserId = message.senderUserId,
randomId = message.randomId,
date = message.date,
message = message.message,
state = message.state
)
}
}
}
private def getUnreadCount(historyOwner: Int, peer: models.Peer, ownerLastReadAt: DateTime)(implicit client: AuthorizedClientData): DBIO[Int] = {
if (isSharedUser(historyOwner)) {
for {
isMember ← DBIO.from(groupExt.getMemberIds(peer.id) map { case (memberIds, _, _) ⇒ memberIds contains client.userId })
result ← if (isMember) persist.HistoryMessage.getUnreadCount(historyOwner, peer, ownerLastReadAt) else DBIO.successful(0)
} yield result
} else {
persist.HistoryMessage.getUnreadCount(historyOwner, peer, ownerLastReadAt)
}
}
private def getDialogsUsersGroups(dialogs: Seq[ApiDialog])(implicit client: AuthorizedClientData) = {
val (userIds, groupIds) = dialogs.foldLeft((Set.empty[Int], Set.empty[Int])) {
case ((uacc, gacc), dialog) ⇒
if (dialog.peer.`type` == ApiPeerType.Private) {
(uacc ++ relatedUsers(dialog.message) ++ Set(dialog.peer.id, dialog.senderUserId), gacc)
} else {
(uacc ++ relatedUsers(dialog.message) + dialog.senderUserId, gacc + dialog.peer.id)
}
}
for {
groups ← DBIO.from(Future.sequence(groupIds map (groupExt.getApiStruct(_, client.userId))))
groupUserIds = groups.map(g ⇒ g.members.map(m ⇒ Seq(m.userId, m.inviterUserId)).flatten :+ g.creatorUserId).flatten
users ← DBIO.from(Future.sequence((userIds ++ groupUserIds).filterNot(_ == 0) map (userExt.getApiStruct(_, client.userId, client.authId))))
} yield (users, groups)
}
private def relatedUsers(message: ApiMessage): Set[Int] = {
message match {
case ApiServiceMessage(_, extOpt) ⇒ extOpt map (relatedUsers) getOrElse (Set.empty)
case ApiTextMessage(_, mentions, _) ⇒ mentions.toSet
case ApiJsonMessage(_) ⇒ Set.empty
case _: ApiDocumentMessage ⇒ Set.empty
}
}
private def relatedUsers(ext: ApiServiceEx): Set[Int] =
ext match {
case ApiServiceExContactRegistered(userId) ⇒ Set(userId)
case ApiServiceExChangedAvatar(_) ⇒ Set.empty
case ApiServiceExChangedTitle(_) ⇒ Set.empty
case ApiServiceExGroupCreated | _: ApiServiceExGroupCreated ⇒ Set.empty
case ApiServiceExPhoneCall(_) ⇒ Set.empty
case ApiServiceExPhoneMissed | _: ApiServiceExPhoneMissed ⇒ Set.empty
case ApiServiceExUserInvited(invitedUserId) ⇒ Set(invitedUserId)
case ApiServiceExUserJoined | _: ApiServiceExUserJoined ⇒ Set.empty
case ApiServiceExUserKicked(kickedUserId) ⇒ Set(kickedUserId)
case ApiServiceExUserLeft | _: ApiServiceExUserLeft ⇒ Set.empty
}
}
| hzy87email/actor-platform | actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/messaging/HistoryHandlers.scala | Scala | mit | 11,684 |
package net.usersource.twitpipe
import akka.serialization._
import akka.serialization.Serializable.ScalaJSON
import akka.serialization.JsonSerialization._
import akka.serialization.DefaultProtocol._
import reflect.BeanInfo
import annotation.target.field
import sjson.json.JSONTypeHint
object JSON {
implicit lazy val UserFormat: sjson.json.Format[User] =
asProduct9("id_str",
"screen_name",
"name",
"lang",
"location",
"time_zone",
"description",
"statuses_count",
"url")(User)(User.unapply(_).get)
@BeanInfo
case class User(
id_str: String,
screen_name: String,
name: String,
lang: String,
location: Option[String],
time_zone: Option[String],
description: Option[String],
statuses_count: Long,
url: Option[String]
) extends ScalaJSON[User] {
def toJSON: String = JsValue.toJson(tojson(this))
def toBytes: Array[Byte] = tobinary(this)
def fromBytes(bytes: Array[Byte]) = frombinary[User](bytes)
def fromJSON(js: String) = fromjson[User](Js(js))
}
implicit lazy val GeoFormat: sjson.json.Format[Geo] =
asProduct2("latitude", "longitude")(Geo)(Geo.unapply(_).get)
@BeanInfo
case class Geo(
latitude: Double,
longitude: Double
) extends ScalaJSON[Geo] {
def toJSON: String = JsValue.toJson(tojson(this))
def toBytes: Array[Byte] = tobinary(this)
def fromBytes(bytes: Array[Byte]) = frombinary[Geo](bytes)
def fromJSON(js: String) = fromjson[Geo](Js(js))
}
implicit lazy val StatusFormat: sjson.json.Format[Status] =
asProduct7(
"id_str",
"text",
"created_at",
"user",
"geo",
"in_reply_to_screen_name",
"in_reply_to_status_id_str"
)(Status)(Status.unapply(_).get)
@BeanInfo
case class Status(
id_str: String,
text: String,
created_at: String,
@(JSONTypeHint @field)(value = classOf[User])user: User,
@(JSONTypeHint @field)(value = classOf[Option[Geo]])geo: Option[Geo],
in_reply_to_screen_name: Option[String],
in_reply_to_status_id_str: Option[String]
) extends ScalaJSON[Status] {
def toJSON: String = JsValue.toJson(tojson(this))
def toBytes: Array[Byte] = tobinary(this)
def fromBytes(bytes: Array[Byte]) = frombinary[Status](bytes)
def fromJSON(js: String) = fromjson[Status](Js(js))
}
} | glenford/TwitterPipeline | src/main/scala/net/usersource/twitpipe/Status.scala | Scala | apache-2.0 | 2,697 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.sql.catalyst.util.resourceToString
import org.apache.spark.sql.internal.SQLConf
/**
* This test suite ensures all the TPC-DS queries can be successfully analyzed, optimized
* and compiled without hitting the max iteration threshold.
*/
class TPCDSQuerySuite extends BenchmarkQueryTest {
override def beforeAll() {
super.beforeAll()
sql(
"""
|CREATE TABLE `catalog_page` (
|`cp_catalog_page_sk` INT, `cp_catalog_page_id` STRING, `cp_start_date_sk` INT,
|`cp_end_date_sk` INT, `cp_department` STRING, `cp_catalog_number` INT,
|`cp_catalog_page_number` INT, `cp_description` STRING, `cp_type` STRING)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `catalog_returns` (
|`cr_returned_date_sk` INT, `cr_returned_time_sk` INT, `cr_item_sk` INT,
|`cr_refunded_customer_sk` INT, `cr_refunded_cdemo_sk` INT, `cr_refunded_hdemo_sk` INT,
|`cr_refunded_addr_sk` INT, `cr_returning_customer_sk` INT, `cr_returning_cdemo_sk` INT,
|`cr_returning_hdemo_sk` INT, `cr_returning_addr_sk` INT, `cr_call_center_sk` INT,
|`cr_catalog_page_sk` INT, `cr_ship_mode_sk` INT, `cr_warehouse_sk` INT, `cr_reason_sk` INT,
|`cr_order_number` INT, `cr_return_quantity` INT, `cr_return_amount` DECIMAL(7,2),
|`cr_return_tax` DECIMAL(7,2), `cr_return_amt_inc_tax` DECIMAL(7,2), `cr_fee` DECIMAL(7,2),
|`cr_return_ship_cost` DECIMAL(7,2), `cr_refunded_cash` DECIMAL(7,2),
|`cr_reversed_charge` DECIMAL(7,2), `cr_store_credit` DECIMAL(7,2),
|`cr_net_loss` DECIMAL(7,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `customer` (
|`c_customer_sk` INT, `c_customer_id` STRING, `c_current_cdemo_sk` INT,
|`c_current_hdemo_sk` INT, `c_current_addr_sk` INT, `c_first_shipto_date_sk` INT,
|`c_first_sales_date_sk` INT, `c_salutation` STRING, `c_first_name` STRING,
|`c_last_name` STRING, `c_preferred_cust_flag` STRING, `c_birth_day` INT,
|`c_birth_month` INT, `c_birth_year` INT, `c_birth_country` STRING, `c_login` STRING,
|`c_email_address` STRING, `c_last_review_date` STRING)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `customer_address` (
|`ca_address_sk` INT, `ca_address_id` STRING, `ca_street_number` STRING,
|`ca_street_name` STRING, `ca_street_type` STRING, `ca_suite_number` STRING,
|`ca_city` STRING, `ca_county` STRING, `ca_state` STRING, `ca_zip` STRING,
|`ca_country` STRING, `ca_gmt_offset` DECIMAL(5,2), `ca_location_type` STRING)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `customer_demographics` (
|`cd_demo_sk` INT, `cd_gender` STRING, `cd_marital_status` STRING,
|`cd_education_status` STRING, `cd_purchase_estimate` INT, `cd_credit_rating` STRING,
|`cd_dep_count` INT, `cd_dep_employed_count` INT, `cd_dep_college_count` INT)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `date_dim` (
|`d_date_sk` INT, `d_date_id` STRING, `d_date` STRING,
|`d_month_seq` INT, `d_week_seq` INT, `d_quarter_seq` INT, `d_year` INT, `d_dow` INT,
|`d_moy` INT, `d_dom` INT, `d_qoy` INT, `d_fy_year` INT, `d_fy_quarter_seq` INT,
|`d_fy_week_seq` INT, `d_day_name` STRING, `d_quarter_name` STRING, `d_holiday` STRING,
|`d_weekend` STRING, `d_following_holiday` STRING, `d_first_dom` INT, `d_last_dom` INT,
|`d_same_day_ly` INT, `d_same_day_lq` INT, `d_current_day` STRING, `d_current_week` STRING,
|`d_current_month` STRING, `d_current_quarter` STRING, `d_current_year` STRING)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `household_demographics` (
|`hd_demo_sk` INT, `hd_income_band_sk` INT, `hd_buy_potential` STRING, `hd_dep_count` INT,
|`hd_vehicle_count` INT)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `inventory` (`inv_date_sk` INT, `inv_item_sk` INT, `inv_warehouse_sk` INT,
|`inv_quantity_on_hand` INT)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `item` (`i_item_sk` INT, `i_item_id` STRING, `i_rec_start_date` STRING,
|`i_rec_end_date` STRING, `i_item_desc` STRING, `i_current_price` DECIMAL(7,2),
|`i_wholesale_cost` DECIMAL(7,2), `i_brand_id` INT, `i_brand` STRING, `i_class_id` INT,
|`i_class` STRING, `i_category_id` INT, `i_category` STRING, `i_manufact_id` INT,
|`i_manufact` STRING, `i_size` STRING, `i_formulation` STRING, `i_color` STRING,
|`i_units` STRING, `i_container` STRING, `i_manager_id` INT, `i_product_name` STRING)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `promotion` (
|`p_promo_sk` INT, `p_promo_id` STRING, `p_start_date_sk` INT, `p_end_date_sk` INT,
|`p_item_sk` INT, `p_cost` DECIMAL(15,2), `p_response_target` INT, `p_promo_name` STRING,
|`p_channel_dmail` STRING, `p_channel_email` STRING, `p_channel_catalog` STRING,
|`p_channel_tv` STRING, `p_channel_radio` STRING, `p_channel_press` STRING,
|`p_channel_event` STRING, `p_channel_demo` STRING, `p_channel_details` STRING,
|`p_purpose` STRING, `p_discount_active` STRING)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `store` (
|`s_store_sk` INT, `s_store_id` STRING, `s_rec_start_date` STRING,
|`s_rec_end_date` STRING, `s_closed_date_sk` INT, `s_store_name` STRING,
|`s_number_employees` INT, `s_floor_space` INT, `s_hours` STRING, `s_manager` STRING,
|`s_market_id` INT, `s_geography_class` STRING, `s_market_desc` STRING,
|`s_market_manager` STRING, `s_division_id` INT, `s_division_name` STRING,
|`s_company_id` INT, `s_company_name` STRING, `s_street_number` STRING,
|`s_street_name` STRING, `s_street_type` STRING, `s_suite_number` STRING, `s_city` STRING,
|`s_county` STRING, `s_state` STRING, `s_zip` STRING, `s_country` STRING,
|`s_gmt_offset` DECIMAL(5,2), `s_tax_precentage` DECIMAL(5,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `store_returns` (
|`sr_returned_date_sk` BIGINT, `sr_return_time_sk` BIGINT, `sr_item_sk` BIGINT,
|`sr_customer_sk` BIGINT, `sr_cdemo_sk` BIGINT, `sr_hdemo_sk` BIGINT, `sr_addr_sk` BIGINT,
|`sr_store_sk` BIGINT, `sr_reason_sk` BIGINT, `sr_ticket_number` BIGINT,
|`sr_return_quantity` BIGINT, `sr_return_amt` DECIMAL(7,2), `sr_return_tax` DECIMAL(7,2),
|`sr_return_amt_inc_tax` DECIMAL(7,2), `sr_fee` DECIMAL(7,2),
|`sr_return_ship_cost` DECIMAL(7,2), `sr_refunded_cash` DECIMAL(7,2),
|`sr_reversed_charge` DECIMAL(7,2), `sr_store_credit` DECIMAL(7,2),
|`sr_net_loss` DECIMAL(7,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `catalog_sales` (
|`cs_sold_date_sk` INT, `cs_sold_time_sk` INT, `cs_ship_date_sk` INT,
|`cs_bill_customer_sk` INT, `cs_bill_cdemo_sk` INT, `cs_bill_hdemo_sk` INT,
|`cs_bill_addr_sk` INT, `cs_ship_customer_sk` INT, `cs_ship_cdemo_sk` INT,
|`cs_ship_hdemo_sk` INT, `cs_ship_addr_sk` INT, `cs_call_center_sk` INT,
|`cs_catalog_page_sk` INT, `cs_ship_mode_sk` INT, `cs_warehouse_sk` INT,
|`cs_item_sk` INT, `cs_promo_sk` INT, `cs_order_number` INT, `cs_quantity` INT,
|`cs_wholesale_cost` DECIMAL(7,2), `cs_list_price` DECIMAL(7,2),
|`cs_sales_price` DECIMAL(7,2), `cs_ext_discount_amt` DECIMAL(7,2),
|`cs_ext_sales_price` DECIMAL(7,2), `cs_ext_wholesale_cost` DECIMAL(7,2),
|`cs_ext_list_price` DECIMAL(7,2), `cs_ext_tax` DECIMAL(7,2), `cs_coupon_amt` DECIMAL(7,2),
|`cs_ext_ship_cost` DECIMAL(7,2), `cs_net_paid` DECIMAL(7,2),
|`cs_net_paid_inc_tax` DECIMAL(7,2), `cs_net_paid_inc_ship` DECIMAL(7,2),
|`cs_net_paid_inc_ship_tax` DECIMAL(7,2), `cs_net_profit` DECIMAL(7,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `web_sales` (
|`ws_sold_date_sk` INT, `ws_sold_time_sk` INT, `ws_ship_date_sk` INT, `ws_item_sk` INT,
|`ws_bill_customer_sk` INT, `ws_bill_cdemo_sk` INT, `ws_bill_hdemo_sk` INT,
|`ws_bill_addr_sk` INT, `ws_ship_customer_sk` INT, `ws_ship_cdemo_sk` INT,
|`ws_ship_hdemo_sk` INT, `ws_ship_addr_sk` INT, `ws_web_page_sk` INT, `ws_web_site_sk` INT,
|`ws_ship_mode_sk` INT, `ws_warehouse_sk` INT, `ws_promo_sk` INT, `ws_order_number` INT,
|`ws_quantity` INT, `ws_wholesale_cost` DECIMAL(7,2), `ws_list_price` DECIMAL(7,2),
|`ws_sales_price` DECIMAL(7,2), `ws_ext_discount_amt` DECIMAL(7,2),
|`ws_ext_sales_price` DECIMAL(7,2), `ws_ext_wholesale_cost` DECIMAL(7,2),
|`ws_ext_list_price` DECIMAL(7,2), `ws_ext_tax` DECIMAL(7,2),
|`ws_coupon_amt` DECIMAL(7,2), `ws_ext_ship_cost` DECIMAL(7,2), `ws_net_paid` DECIMAL(7,2),
|`ws_net_paid_inc_tax` DECIMAL(7,2), `ws_net_paid_inc_ship` DECIMAL(7,2),
|`ws_net_paid_inc_ship_tax` DECIMAL(7,2), `ws_net_profit` DECIMAL(7,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `store_sales` (
|`ss_sold_date_sk` INT, `ss_sold_time_sk` INT, `ss_item_sk` INT, `ss_customer_sk` INT,
|`ss_cdemo_sk` INT, `ss_hdemo_sk` INT, `ss_addr_sk` INT, `ss_store_sk` INT,
|`ss_promo_sk` INT, `ss_ticket_number` INT, `ss_quantity` INT,
|`ss_wholesale_cost` DECIMAL(7,2), `ss_list_price` DECIMAL(7,2),
|`ss_sales_price` DECIMAL(7,2), `ss_ext_discount_amt` DECIMAL(7,2),
|`ss_ext_sales_price` DECIMAL(7,2), `ss_ext_wholesale_cost` DECIMAL(7,2),
|`ss_ext_list_price` DECIMAL(7,2), `ss_ext_tax` DECIMAL(7,2),
|`ss_coupon_amt` DECIMAL(7,2), `ss_net_paid` DECIMAL(7,2),
|`ss_net_paid_inc_tax` DECIMAL(7,2), `ss_net_profit` DECIMAL(7,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `web_returns` (
|`wr_returned_date_sk` BIGINT, `wr_returned_time_sk` BIGINT, `wr_item_sk` BIGINT,
|`wr_refunded_customer_sk` BIGINT, `wr_refunded_cdemo_sk` BIGINT,
|`wr_refunded_hdemo_sk` BIGINT, `wr_refunded_addr_sk` BIGINT,
|`wr_returning_customer_sk` BIGINT, `wr_returning_cdemo_sk` BIGINT,
|`wr_returning_hdemo_sk` BIGINT, `wr_returning_addr_sk` BIGINT, `wr_web_page_sk` BIGINT,
|`wr_reason_sk` BIGINT, `wr_order_number` BIGINT, `wr_return_quantity` BIGINT,
|`wr_return_amt` DECIMAL(7,2), `wr_return_tax` DECIMAL(7,2),
|`wr_return_amt_inc_tax` DECIMAL(7,2), `wr_fee` DECIMAL(7,2),
|`wr_return_ship_cost` DECIMAL(7,2), `wr_refunded_cash` DECIMAL(7,2),
|`wr_reversed_charge` DECIMAL(7,2), `wr_account_credit` DECIMAL(7,2),
|`wr_net_loss` DECIMAL(7,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `web_site` (
|`web_site_sk` INT, `web_site_id` STRING, `web_rec_start_date` DATE,
|`web_rec_end_date` DATE, `web_name` STRING, `web_open_date_sk` INT,
|`web_close_date_sk` INT, `web_class` STRING, `web_manager` STRING, `web_mkt_id` INT,
|`web_mkt_class` STRING, `web_mkt_desc` STRING, `web_market_manager` STRING,
|`web_company_id` INT, `web_company_name` STRING, `web_street_number` STRING,
|`web_street_name` STRING, `web_street_type` STRING, `web_suite_number` STRING,
|`web_city` STRING, `web_county` STRING, `web_state` STRING, `web_zip` STRING,
|`web_country` STRING, `web_gmt_offset` STRING, `web_tax_percentage` DECIMAL(5,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `reason` (
|`r_reason_sk` INT, `r_reason_id` STRING, `r_reason_desc` STRING)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `call_center` (
|`cc_call_center_sk` INT, `cc_call_center_id` STRING, `cc_rec_start_date` DATE,
|`cc_rec_end_date` DATE, `cc_closed_date_sk` INT, `cc_open_date_sk` INT, `cc_name` STRING,
|`cc_class` STRING, `cc_employees` INT, `cc_sq_ft` INT, `cc_hours` STRING,
|`cc_manager` STRING, `cc_mkt_id` INT, `cc_mkt_class` STRING, `cc_mkt_desc` STRING,
|`cc_market_manager` STRING, `cc_division` INT, `cc_division_name` STRING, `cc_company` INT,
|`cc_company_name` STRING, `cc_street_number` STRING, `cc_street_name` STRING,
|`cc_street_type` STRING, `cc_suite_number` STRING, `cc_city` STRING, `cc_county` STRING,
|`cc_state` STRING, `cc_zip` STRING, `cc_country` STRING, `cc_gmt_offset` DECIMAL(5,2),
|`cc_tax_percentage` DECIMAL(5,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `warehouse` (
|`w_warehouse_sk` INT, `w_warehouse_id` STRING, `w_warehouse_name` STRING,
|`w_warehouse_sq_ft` INT, `w_street_number` STRING, `w_street_name` STRING,
|`w_street_type` STRING, `w_suite_number` STRING, `w_city` STRING, `w_county` STRING,
|`w_state` STRING, `w_zip` STRING, `w_country` STRING, `w_gmt_offset` DECIMAL(5,2))
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `ship_mode` (
|`sm_ship_mode_sk` INT, `sm_ship_mode_id` STRING, `sm_type` STRING, `sm_code` STRING,
|`sm_carrier` STRING, `sm_contract` STRING)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `income_band` (
|`ib_income_band_sk` INT, `ib_lower_bound` INT, `ib_upper_bound` INT)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `time_dim` (
|`t_time_sk` INT, `t_time_id` STRING, `t_time` INT, `t_hour` INT, `t_minute` INT,
|`t_second` INT, `t_am_pm` STRING, `t_shift` STRING, `t_sub_shift` STRING,
|`t_meal_time` STRING)
|USING parquet
""".stripMargin)
sql(
"""
|CREATE TABLE `web_page` (`wp_web_page_sk` INT, `wp_web_page_id` STRING,
|`wp_rec_start_date` DATE, `wp_rec_end_date` DATE, `wp_creation_date_sk` INT,
|`wp_access_date_sk` INT, `wp_autogen_flag` STRING, `wp_customer_sk` INT,
|`wp_url` STRING, `wp_type` STRING, `wp_char_count` INT, `wp_link_count` INT,
|`wp_image_count` INT, `wp_max_ad_count` INT)
|USING parquet
""".stripMargin)
}
val tpcdsQueries = Seq(
"q1", "q2", "q3", "q4", "q5", "q6", "q7", "q8", "q9", "q10", "q11",
"q12", "q13", "q14a", "q14b", "q15", "q16", "q17", "q18", "q19", "q20",
"q21", "q22", "q23a", "q23b", "q24a", "q24b", "q25", "q26", "q27", "q28", "q29", "q30",
"q31", "q32", "q33", "q34", "q35", "q36", "q37", "q38", "q39a", "q39b", "q40",
"q41", "q42", "q43", "q44", "q45", "q46", "q47", "q48", "q49", "q50",
"q51", "q52", "q53", "q54", "q55", "q56", "q57", "q58", "q59", "q60",
"q61", "q62", "q63", "q64", "q65", "q66", "q67", "q68", "q69", "q70",
"q71", "q72", "q73", "q74", "q75", "q76", "q77", "q78", "q79", "q80",
"q81", "q82", "q83", "q84", "q85", "q86", "q87", "q88", "q89", "q90",
"q91", "q92", "q93", "q94", "q95", "q96", "q97", "q98", "q99")
tpcdsQueries.foreach { name =>
val queryString = resourceToString(s"tpcds/$name.sql",
classLoader = Thread.currentThread().getContextClassLoader)
test(name) {
withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "true") {
// check the plans can be properly generated
val plan = sql(queryString).queryExecution.executedPlan
checkGeneratedCode(plan)
}
}
}
// These queries are from https://github.com/cloudera/impala-tpcds-kit/tree/master/queries
val modifiedTPCDSQueries = Seq(
"q3", "q7", "q10", "q19", "q27", "q34", "q42", "q43", "q46", "q52", "q53", "q55", "q59",
"q63", "q65", "q68", "q73", "q79", "q89", "q98", "ss_max")
modifiedTPCDSQueries.foreach { name =>
val queryString = resourceToString(s"tpcds-modifiedQueries/$name.sql",
classLoader = Thread.currentThread().getContextClassLoader)
test(s"modified-$name") {
// check the plans can be properly generated
val plan = sql(queryString).queryExecution.executedPlan
checkGeneratedCode(plan)
}
}
}
| ioana-delaney/spark | sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala | Scala | apache-2.0 | 17,277 |
package org.talg.home.converters
import org.talg.home.entities.Person
import com.mongodb.casbah.Imports._
import org.talg.home.entities.PersonMongoProperties._
/**
* User: talg
*/
object PersonMongoConverter {
def convertToMongoObject(person: Person): DBObject = {
val builder = MongoDBObject.newBuilder
builder += ID -> person._id
builder += NAME -> person.name
builder += AGE -> person.age
builder += KNOWN_LANGUAGES -> MongoDBList(person.knownLanguages: _*)
builder += ADDRESS -> AddressMongoConverter.convertToMongoObject(person.address)
builder += SOCIAL_ID -> person.socialId // Pay attention this is an Option
builder.result()
}
def convertFromMongoObject(db: DBObject): Person = {
val name: String = db.getAsOrElse[String](NAME, mongoFail)
val age: Int = db.getAsOrElse[Int](AGE, mongoFail)
val knownLanguages = db.getAs[MongoDBList](KNOWN_LANGUAGES) match {
case Some(languages) => languages collect {
case s: String => s
}
case None => mongoFail
}
val socialId = db.getAs[Long](SOCIAL_ID)
val address = AddressMongoConverter.convertFromMongoObject(db.getAsOrElse[DBObject](ADDRESS, mongoFail))
Person(
_id = db.getAsOrElse[ObjectId](ID, mongoFail),
age = age,
name = name,
address = address,
knownLanguages = knownLanguages,
socialId = socialId
)
}
}
| talgendler/casbah | src/main/scala/org/talg/home/converters/PersonMongoConverter.scala | Scala | mit | 1,402 |
package io.getquill
import io.getquill.context.sql.{ TestDecoders, TestEncoders }
import monix.execution.Scheduler
package object postgres {
private implicit val scheduler = Scheduler.global
object testContext extends PostgresMonixNdbcContext(Literal, "testPostgresDB")
with TestEntities with TestEncoders with TestDecoders
}
| getquill/quill | quill-ndbc-monix/src/test/scala/io/getquill/postgres/package.scala | Scala | apache-2.0 | 336 |
package com.lucidchart.open.nark.controllers
import com.lucidchart.open.nark.request.{AppAction, AuthAction}
import com.lucidchart.open.nark.models.{DynamicAlertModel, DynamicAlertTagModel, DynamicAlertTagSubscriptionModel, TagConverter}
import com.lucidchart.open.nark.models.records.{DynamicAlert, TagMap, Pagination}
import com.lucidchart.open.nark.views
import play.api.libs.json.Json
object DynamicAlertTagsController extends DynamicAlertTagsController
class DynamicAlertTagsController extends AppController {
/*
* Get tag and all the dynamic alerts it is associated with.
* @param tag the tag to look for
*/
def tag(tag: String) = AuthAction.maybeAuthenticatedUser { implicit user =>
AppAction { implicit request =>
val alertIds = DynamicAlertTagModel.findAlertsByTag(tag).map(_.recordId)
val alerts = DynamicAlertModel.findDynamicAlertByID(alertIds)
val subscriptions = DynamicAlertTagSubscriptionModel.getSubscriptionsByTag(tag)
Ok(views.html.datags.tag(tag, alerts, subscriptions))
}
}
/**
* Search for a specific tag
* @param term the search term
* @param page the page of search results to show
*/
def search(term: String, page: Int) = AuthAction.maybeAuthenticatedUser { implicit user =>
AppAction { implicit request =>
val realPage = page.max(1)
val (found, tags) = DynamicAlertTagModel.search(term, realPage - 1)
val alertTags = DynamicAlertTagModel.findAlertsByTag(tags.map{_.tag})
val alerts = DynamicAlertModel.findDynamicAlertByID(alertTags.map(_.recordId).distinct).filter(!_.deleted)
Ok(views.html.datags.search(term, Pagination[TagMap[DynamicAlert]](realPage, found, DynamicAlertTagModel.configuredLimit, List(TagConverter.toTagMap[DynamicAlert](alertTags, alerts)))))
}
}
/**
* Search tags by name. Returns json formatted for jquery-tokeninput.
*/
def searchToJson(term: String) = AuthAction.maybeAuthenticatedUser { implicit user =>
AppAction { implicit request =>
val (found, matches) = DynamicAlertTagModel.search(term + "%", 0)
Ok(Json.toJson(matches.map{ m =>
Json.obj("id" -> m.recordId.toString, "name" -> m.tag)
}))
}
}
} | lucidsoftware/nark | app/com/lucidchart/open/nark/controllers/DynamicAlertTagsController.scala | Scala | apache-2.0 | 2,137 |
abstract class AbsIterator {
type T
def hasNext: Boolean
def next: T
}
trait RichIterator extends AbsIterator {
def foreach(f: T => Unit) {
while (hasNext) f(next)
}
}
class StringIterator(s: String) extends AbsIterator {
type T = Char
private var i = 0
def hasNext = i < s.length()
def next = { val x = s.charAt(i); i += 1; println("next: " + x); x }
}
trait SyncIterator extends AbsIterator {
abstract override def hasNext: Boolean =
synchronized(super.hasNext)
abstract override def next: T =
synchronized {
println("<sync>"); val x = super.next; println("</sync>"); x
}
}
trait LoggedIterator extends AbsIterator {
abstract override def next: T = {
val x = super.next; println("log: " + x); x
}
}
class Iter2(s: String) extends StringIterator(s)
with SyncIterator with LoggedIterator;
object Test {
def main(args: Array[String]) {
class Iter extends StringIterator(args(0)) with RichIterator with SyncIterator with LoggedIterator
val iter = new Iter
iter foreach Console.println
}
}
| felixmulder/scala | test/files/run/absoverride.scala | Scala | bsd-3-clause | 1,073 |
package openreveal
/**
* Created by Paul Lysak on 02.06.15.
*/
package object model {
//TODO restrict it with enums
type Country = String
}
| paul-lysak/OpenReveal | src/main/scala/openreveal/model/package.scala | Scala | apache-2.0 | 149 |
def sequence[A](lma: List[F[A]]): F[List[A]] =
lma.foldRight(unit(List[A]()))((ma, mla) => map2(ma, mla)(_ :: _))
def traverse[A,B](la: List[A])(f: A => F[B]): F[List[B]] =
la.foldRight(unit(List[B]()))((a, mlb) => map2(f(a), mlb)(_ :: _)) | willcodejavaforfood/fpinscala | answerkey/monads/3.answer.scala | Scala | mit | 244 |
/*
Copyright The MITRE Corporation 2009-2010. All rights reserved.
*/
package org.mitre.jcarafe.maxent
import org.mitre.jcarafe.crf.{MemoryAccessSeq, AccessSeq, AbstractInstance}
class MaxEntDiskAccessSeq(val diskCache: String, val st: Int, val en: Int) extends MaxEntMemoryAccessSeq(Vector()) {
override def accessSingleInstance(i: Int) = {
MESerializations.readInstance(new java.io.File(diskCache+"/"+i.toString))
}
override def length = en - st
override def splitAccessor(n: Int) : Seq[MemoryAccessSeq] = {
throw new RuntimeException("splitAccessor unsupported with MaxEntDiskAccessSeq")
}
}
| wellner/jcarafe | jcarafe-core/src/main/scala/org/mitre/jcarafe/maxent/MaxEntDiskAccessSeq.scala | Scala | bsd-3-clause | 645 |
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.analysis.model
import org.dom4j._
import org.orbeon.oxf.xforms._
import action.XFormsActions
import analysis._
import event.EventHandlerImpl
import collection.JavaConverters._
import org.orbeon.oxf.xforms.XFormsConstants._
import java.lang.String
import collection.mutable.LinkedHashMap
import org.orbeon.oxf.xml.{Dom4j, ContentHandlerHelper}
import Model._
import org.orbeon.oxf.xforms.xbl.Scope
import org.orbeon.oxf.xml.dom4j.Dom4jUtils
/**
* Static analysis of an XForms model <xf:model> element.
*/
class Model(val staticStateContext: StaticStateContext, elem: Element, parent: Option[ElementAnalysis], preceding: Option[ElementAnalysis], val scope: Scope)
extends ElementAnalysis(staticStateContext.partAnalysis, elem, parent, preceding)
with ChildrenBuilderTrait
with ModelInstances
with ModelVariables
with ModelSubmissions
with ModelEventHandlers
with ModelBinds {
require(staticStateContext ne null)
require(scope ne null)
val namespaceMapping = part.metadata.getNamespaceMapping(prefixedId)
// NOTE: It is possible to imagine a model having a context and binding, but this is not supported now
protected def computeContextAnalysis = None
protected def computeValueAnalysis = None
protected def computeBindingAnalysis = None
val model = Some(this)
// NOTE: Same code is in SimpleElementAnalysis, which is not optimal → maybe think about passing the container scope to constructors
def containerScope = part.containingScope(prefixedId)
override def getChildrenContext = defaultInstancePrefixedId map { defaultInstancePrefixedId ⇒ // instance('defaultInstanceId')
PathMapXPathAnalysis(part, PathMapXPathAnalysis.buildInstanceString(defaultInstancePrefixedId),
null, None, Map.empty[String, VariableTrait], null, scope, Some(defaultInstancePrefixedId), locationData, element, avt = false)
}
// For now this only checks actions and submissions, in the future should also build rest of content
override def findRelevantChildrenElements =
findAllChildrenElements collect
{ case (e, s) if XFormsActions.isAction(e.getQName) || Set(XFORMS_SUBMISSION_QNAME, XFORMS_INSTANCE_QNAME)(e.getQName) ⇒ (e, s) }
// Above we only create actions, submissions and instances as children. But binds are also indexed so add them.
override def indexedElements = super.indexedElements ++ bindsById.values
override def analyzeXPath() {
// Analyze this
super.analyzeXPath()
analyzeVariablesXPath()
analyzeBindsXPath()
}
override def toXMLAttributes = Seq(
"scope" → scope.scopeId,
"prefixed-id" → prefixedId,
"default-instance-prefixed-id" → defaultInstancePrefixedId.orNull,
"analyzed-binds" → figuredAllBindRefAnalysis.toString
)
override def toXMLContent(helper: ContentHandlerHelper): Unit = {
super.toXMLContent(helper)
variablesToXML(helper)
bindsToXML(helper)
instancesToXML(helper)
handlersToXML(helper)
}
override def freeTransientState(): Unit = {
super.freeTransientState()
freeVariablesTransientState()
freeBindsTransientState()
}
}
trait ModelInstances {
self: Model ⇒
// Instance objects
lazy val instances: collection.Map[String, Instance] = LinkedHashMap(children collect { case instance: Instance ⇒ instance.staticId → instance }: _*)
def instancesMap = instances.asJava
// General info about instances
lazy val hasInstances = instances.nonEmpty
lazy val defaultInstance = instances.headOption map (_._2)
lazy val defaultInstanceStaticId = instances.headOption map (_._1) orNull
lazy val defaultInstancePrefixedId = Option(if (hasInstances) scope.fullPrefix + defaultInstanceStaticId else null)
// TODO: instances on which MIPs depend
def instancesToXML(helper: ContentHandlerHelper): Unit = {
// Output instances information
def outputInstanceList(name: String, values: collection.Set[String]) {
if (values.nonEmpty) {
helper.startElement(name)
for (value ← values)
helper.element("instance", value)
helper.endElement()
}
}
outputInstanceList("bind-instances", bindInstances)
outputInstanceList("computed-binds-instances", computedBindExpressionsInstances)
outputInstanceList("validation-binds-instances", validationBindInstances)
}
}
trait ModelVariables {
self: Model ⇒
// NOTE: It is possible to imagine a model having in-scope variables, but this is not supported now
val inScopeVariables = Map.empty[String, VariableTrait]
// Get *:variable/*:var elements
private val variableElements = Dom4j.elements(self.element) filter (e ⇒ ControlAnalysisFactory.isVariable(e.getQName)) asJava
// Handle variables
val variablesSeq: Seq[VariableAnalysisTrait] = {
// NOTE: For now, all top-level variables in a model are visible first, then only are binds variables visible.
// In the future, we might want to change that to use document order between variables and binds, but some
// more thinking is needed wrt the processing model.
// Iterate and resolve all variables in order
var preceding: Option[SimpleElementAnalysis with VariableAnalysisTrait] = None
for {
variableElement ← variableElements.asScala
analysis: VariableAnalysisTrait = {
val result = new SimpleElementAnalysis(staticStateContext, variableElement, Some(self), preceding, scope) with VariableAnalysisTrait
preceding = Some(result)
result
}
} yield
analysis
}
def jVariablesSeq = variablesSeq.asJava
val variablesMap: Map[String, VariableAnalysisTrait] = variablesSeq map (variable ⇒ variable.name → variable) toMap
val jVariablesMap = variablesMap.asJava
def analyzeVariablesXPath(): Unit =
for (variable ← variablesSeq)
variable.analyzeXPath()
def variablesToXML(helper: ContentHandlerHelper): Unit =
// Output variable information
for (variable ← variablesSeq)
variable.toXML(helper)
def freeVariablesTransientState(): Unit =
for (variable ← variablesSeq)
variable.freeTransientState()
}
trait ModelSubmissions {
self: Model ⇒
// Submissions (they are all direct children)
lazy val submissions = children collect { case s: Submission ⇒ s }
def jSubmissions = submissions.asJava
}
trait ModelEventHandlers {
self: Model ⇒
// Event handlers, including on submissions and within nested actions
lazy val eventHandlers = descendants collect { case e: EventHandlerImpl ⇒ e }
def jEventHandlers = eventHandlers.asJava
def handlersToXML(helper: ContentHandlerHelper) =
eventHandlers foreach (_.toXML(helper))
}
trait ModelBinds {
selfModel: Model ⇒
// FIXME: A bit unhappy with this. Laziness desired because of init order issues with the superclass. There has to be a simpler way!
private class LazyConstant[T](evaluate: ⇒ T) extends (() ⇒ T) {
private lazy val result = evaluate
def apply() = result
}
// Q: Why do we pass isCustomMIP to BindTree? Init order issue?
private def isCustomMIP: QName ⇒ Boolean = {
import ElementAnalysis.attQNameSet
def canBeCustomMIP(qName: QName) =
qName.getNamespacePrefix.nonEmpty &&
! qName.getNamespacePrefix.startsWith("xml") &&
(StandardCustomMIPsQNames(qName) || ! NeverCustomMIPsURIs(qName.getNamespaceURI))
Option(selfModel.element.attribute(XXFORMS_CUSTOM_MIPS_QNAME)) match {
case Some(_) ⇒
// If the attribute is present, allow all specified QNames if valid, plus standard MIP QNames
attQNameSet(selfModel.element, XXFORMS_CUSTOM_MIPS_QNAME, namespaceMapping) ++ StandardCustomMIPsQNames filter canBeCustomMIP
case None ⇒
// Attribute not present: backward-compatible behavior
canBeCustomMIP
}
}
private var bindTree = new LazyConstant(new BindTree(selfModel, Dom4j.elements(selfModel.element, XFORMS_BIND_QNAME), isCustomMIP))
private def annotateSubTree(rawElement: Element) = {
val (annotatedTree, _) =
part.xblBindings.annotateSubtree(
None,
Dom4jUtils.createDocumentCopyParentNamespaces(rawElement),
scope,
scope,
XXBLScope.inner,
containerScope,
hasFullUpdate = false,
ignoreRoot = false,
needCompact = false)
annotatedTree
}
def rebuildBinds(rawModelElement: Element): Unit = {
assert(! selfModel.part.isTopLevel)
bindTree().destroy()
bindTree = new LazyConstant(new BindTree(selfModel, Dom4j.elements(rawModelElement, XFORMS_BIND_QNAME) map (annotateSubTree(_).getRootElement), isCustomMIP))
}
def bindsById = bindTree().bindsById
def bindsByName = bindTree().bindsByName
def jBindsByName = bindTree().bindsByName.asJava
def hasDefaultValueBind = bindTree().hasDefaultValueBind
def hasCalculateBind = bindTree().hasCalculateBind
def hasTypeBind = bindTree().hasTypeBind
def hasRequiredBind = bindTree().hasRequiredBind
def hasConstraintBind = bindTree().hasConstraintBind
def hasCalculateComputedCustomBind = bindTree().hasCalculateComputedCustomBind
def hasValidateBind = bindTree().hasValidateBind
def bindInstances = bindTree().bindInstances
def computedBindExpressionsInstances = bindTree().computedBindExpressionsInstances
def validationBindInstances = bindTree().validationBindInstances
// TODO: use and produce variables introduced with xf:bind/@name
def topLevelBinds = bindTree().topLevelBinds
def topLevelBindsJava = topLevelBinds.asJava
def hasBinds = bindTree().hasBinds
def containsBind(bindId: String) = bindTree().bindIds(bindId)
def figuredAllBindRefAnalysis = bindTree().figuredAllBindRefAnalysis
def analyzeBindsXPath() = bindTree().analyzeBindsXPath()
def bindsToXML(helper: ContentHandlerHelper) = bindTree().bindsToXML(helper)
def freeBindsTransientState() = bindTree().freeBindsTransientState()
}
object Model {
// MIP enumeration
sealed trait MIP { def name: String; val aName: QName; val eName: QName }
trait StdMIP extends MIP { val name: String; val aName = QName.get(name); val eName = QName.get(name, XFORMS_NAMESPACE) }
trait ExtMIP extends MIP { val name: String; val aName = QName.get(name, XXFORMS_NAMESPACE); val eName = QName.get(name, XXFORMS_NAMESPACE) }
trait ComputedMIP extends MIP
trait ValidateMIP extends MIP
trait XPathMIP extends MIP
trait BooleanMIP extends XPathMIP
trait StringMIP extends XPathMIP
// NOTE: "required" is special: it is evaluated during recalculate, but used during revalidate. In effect both
// recalculate AND revalidate depend on it. Ideally maybe revalidate would depend on the the *value* of the
// "required" MIP, not on the XPath of it. See also what we would need for xxf:valid(), etc. functions.
case object Relevant extends { val name = "relevant" } with StdMIP with BooleanMIP with ComputedMIP
case object Readonly extends { val name = "readonly" } with StdMIP with BooleanMIP with ComputedMIP
case object Required extends { val name = "required" } with StdMIP with BooleanMIP with ComputedMIP with ValidateMIP
case object Constraint extends { val name = "constraint" } with StdMIP with BooleanMIP with ValidateMIP
case object Calculate extends { val name = "calculate" } with StdMIP with StringMIP with ComputedMIP
case object Default extends { val name = "default" } with ExtMIP with StringMIP with ComputedMIP
case object Type extends { val name = "type" } with StdMIP with ValidateMIP
//case class Custom(n: String) extends { val name = n } with StdMIP with XPathMIP
val AllMIPs = Set[MIP](Relevant, Readonly, Required, Constraint, Calculate, Default, Type)
val AllMIPsInOrder = AllMIPs.toList.sortBy(_.name)
val AllMIPNamesInOrder = AllMIPsInOrder map (_.name)
val AllMIPsByName = AllMIPs map (mip ⇒ mip.name → mip) toMap
val AllMIPNames = AllMIPs map (_.name)
val MIPNameToAttributeQName = AllMIPs map (m ⇒ m.name → m.aName) toMap
val QNameToXPathComputedMIP = AllMIPs collect { case m: XPathMIP with ComputedMIP ⇒ m.aName → m } toMap
val QNameToXPathValidateMIP = AllMIPs collect { case m: XPathMIP with ValidateMIP ⇒ m.aName → m } toMap
val QNameToXPathMIP = QNameToXPathComputedMIP ++ QNameToXPathValidateMIP
val CalculateMIPNames = AllMIPs collect { case m: ComputedMIP ⇒ m.name }
val ValidateMIPNames = AllMIPs collect { case m: ValidateMIP ⇒ m.name }
val BooleanXPathMIPNames = AllMIPs collect { case m: XPathMIP with BooleanMIP ⇒ m.name }
val StringXPathMIPNames = AllMIPs collect { case m: XPathMIP with StringMIP ⇒ m.name }
val StandardCustomMIPsQNames = Set(XXFORMS_EVENT_MODE_QNAME)
val NeverCustomMIPsURIs = Set(XFORMS_NAMESPACE_URI, XXFORMS_NAMESPACE_URI)
def buildCustomMIPName(qualifiedName: String) = qualifiedName.replace(':', '-')
// Constants for Java callers
val RELEVANT = Relevant.name
val READONLY = Readonly.name
val REQUIRED = Required.name
val CONSTRAINT = Constraint.name
val CALCULATE = Calculate.name
val DEFAULT = Default.name
val TYPE = Type.name
// MIP default values
val DEFAULT_RELEVANT = true
val DEFAULT_READONLY = false
val DEFAULT_REQUIRED = false
val DEFAULT_VALID = true
val DEFAULT_CONSTRAINT = true
val XFormsSchemaTypeNames = Set(
"dayTimeDuration",
"yearMonthDuration",
"email",
"card-number"
)
val jXFormsSchemaTypeNames = XFormsSchemaTypeNames.asJava
val XFormsVariationTypeNames = Set(
"dateTime",
"time",
"date",
"gYearMonth",
"gYear",
"gMonthDay",
"gDay",
"gMonth",
"string",
"boolean",
"base64Binary",
"hexBinary",
"float",
"decimal",
"double",
"anyURI",
"QName",
"normalizedString",
"token",
"language",
"Name",
"NCName",
"ID",
"IDREF",
"IDREFS",
"NMTOKEN",
"NMTOKENS",
"integer",
"nonPositiveInteger",
"negativeInteger",
"long",
"int",
"short",
"byte",
"nonNegativeInteger",
"unsignedLong",
"unsignedInt",
"unsignedShort",
"unsignedByte",
"positiveInteger"
)
val XFormsTypeNames = Set(
"listItem",
"listItems",
"dayTimeDuration",
"yearMonthDuration",
"email",
"card-number",
"HTMLFragment" // XForms 2.0
)
val jXFormsVariationTypeNames = XFormsVariationTypeNames.asJava
}
| evlist/orbeon-forms | src/main/scala/org/orbeon/oxf/xforms/analysis/model/Model.scala | Scala | lgpl-2.1 | 16,339 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.unsafe.types
import org.apache.commons.lang3.StringUtils
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.prop.GeneratorDrivenPropertyChecks
// scalastyle:off
import org.scalatest.{FunSuite, Matchers}
import org.apache.spark.unsafe.types.UTF8String.{fromString => toUTF8}
/**
* This TestSuite utilize ScalaCheck to generate randomized inputs for UTF8String testing.
*/
class UTF8StringPropertyCheckSuite extends FunSuite with GeneratorDrivenPropertyChecks with Matchers {
// scalastyle:on
test("toString") {
forAll { (s: String) =>
assert(toUTF8(s).toString() === s)
}
}
test("numChars") {
forAll { (s: String) =>
assert(toUTF8(s).numChars() === s.length)
}
}
test("startsWith") {
forAll { (s: String) =>
val utf8 = toUTF8(s)
assert(utf8.startsWith(utf8))
for (i <- 1 to s.length) {
assert(utf8.startsWith(toUTF8(s.dropRight(i))))
}
}
}
test("endsWith") {
forAll { (s: String) =>
val utf8 = toUTF8(s)
assert(utf8.endsWith(utf8))
for (i <- 1 to s.length) {
assert(utf8.endsWith(toUTF8(s.drop(i))))
}
}
}
// scalastyle:off caselocale
test("toUpperCase") {
forAll { (s: String) =>
assert(toUTF8(s).toUpperCase === toUTF8(s.toUpperCase))
}
}
test("toLowerCase") {
forAll { (s: String) =>
assert(toUTF8(s).toLowerCase === toUTF8(s.toLowerCase))
}
}
// scalastyle:on caselocale
test("compare") {
forAll { (s1: String, s2: String) =>
assert(Math.signum(toUTF8(s1).compareTo(toUTF8(s2))) === Math.signum(s1.compareTo(s2)))
}
}
test("substring") {
forAll { (s: String) =>
for (start <- 0 to s.length; end <- 0 to s.length; if start <= end) {
assert(toUTF8(s).substring(start, end).toString === s.substring(start, end))
}
}
}
test("contains") {
forAll { (s: String) =>
for (start <- 0 to s.length; end <- 0 to s.length; if start <= end) {
val substring = s.substring(start, end)
assert(toUTF8(s).contains(toUTF8(substring)) === s.contains(substring))
}
}
}
val whitespaceChar: Gen[Char] = Gen.const(0x20.toChar)
val whitespaceString: Gen[String] = Gen.listOf(whitespaceChar).map(_.mkString)
val randomString: Gen[String] = Arbitrary.arbString.arbitrary
test("trim, trimLeft, trimRight") {
// lTrim and rTrim are both modified from java.lang.String.trim
def lTrim(s: String): String = {
var st = 0
val array: Array[Char] = s.toCharArray
while ((st < s.length) && (array(st) == ' ')) {
st += 1
}
if (st > 0) s.substring(st, s.length) else s
}
def rTrim(s: String): String = {
var len = s.length
val array: Array[Char] = s.toCharArray
while ((len > 0) && (array(len - 1) == ' ')) {
len -= 1
}
if (len < s.length) s.substring(0, len) else s
}
forAll(
whitespaceString,
randomString,
whitespaceString
) { (start: String, middle: String, end: String) =>
val s = start + middle + end
assert(toUTF8(s).trim() === toUTF8(rTrim(lTrim(s))))
assert(toUTF8(s).trimLeft() === toUTF8(lTrim(s)))
assert(toUTF8(s).trimRight() === toUTF8(rTrim(s)))
}
}
test("reverse") {
forAll { (s: String) =>
assert(toUTF8(s).reverse === toUTF8(s.reverse))
}
}
test("indexOf") {
forAll { (s: String) =>
for (start <- 0 to s.length; end <- 0 to s.length; if start <= end) {
val substring = s.substring(start, end)
assert(toUTF8(s).indexOf(toUTF8(substring), 0) === s.indexOf(substring))
}
}
}
val randomInt = Gen.choose(-100, 100)
test("repeat") {
def repeat(str: String, times: Int): String = {
if (times > 0) str * times else ""
}
// ScalaCheck always generating too large repeat times which might hang the test forever.
forAll(randomString, randomInt) { (s: String, times: Int) =>
assert(toUTF8(s).repeat(times) === toUTF8(repeat(s, times)))
}
}
test("lpad, rpad") {
def padding(origin: String, pad: String, length: Int, isLPad: Boolean): String = {
if (length <= 0) return ""
if (length <= origin.length) {
origin.substring(0, length)
} else {
if (pad.length == 0) return origin
val toPad = length - origin.length
val partPad = if (toPad % pad.length == 0) "" else pad.substring(0, toPad % pad.length)
if (isLPad) {
pad * (toPad / pad.length) + partPad + origin
} else {
origin + pad * (toPad / pad.length) + partPad
}
}
}
forAll (
randomString,
randomString,
randomInt
) { (s: String, pad: String, length: Int) =>
assert(toUTF8(s).lpad(length, toUTF8(pad)) ===
toUTF8(padding(s, pad, length, true)))
assert(toUTF8(s).rpad(length, toUTF8(pad)) ===
toUTF8(padding(s, pad, length, false)))
}
}
val nullalbeSeq = Gen.listOf(Gen.oneOf[String](null: String, randomString))
test("concat") {
def concat(origin: Seq[String]): String =
if (origin.contains(null)) null else origin.mkString
forAll { (inputs: Seq[String]) =>
assert(UTF8String.concat(inputs.map(toUTF8): _*) === toUTF8(inputs.mkString))
}
forAll (nullalbeSeq) { (inputs: Seq[String]) =>
assert(UTF8String.concat(inputs.map(toUTF8): _*) === toUTF8(concat(inputs)))
}
}
test("concatWs") {
def concatWs(sep: String, inputs: Seq[String]): String = {
if (sep == null) return null
inputs.filter(_ != null).mkString(sep)
}
forAll { (sep: String, inputs: Seq[String]) =>
assert(UTF8String.concatWs(toUTF8(sep), inputs.map(toUTF8): _*) ===
toUTF8(inputs.mkString(sep)))
}
forAll(randomString, nullalbeSeq) {(sep: String, inputs: Seq[String]) =>
assert(UTF8String.concatWs(toUTF8(sep), inputs.map(toUTF8): _*) ===
toUTF8(concatWs(sep, inputs)))
}
}
// TODO: enable this when we find a proper way to generate valid patterns
ignore("split") {
forAll { (s: String, pattern: String, limit: Int) =>
assert(toUTF8(s).split(toUTF8(pattern), limit) ===
s.split(pattern, limit).map(toUTF8(_)))
}
}
test("levenshteinDistance") {
forAll { (one: String, another: String) =>
assert(toUTF8(one).levenshteinDistance(toUTF8(another)) ===
StringUtils.getLevenshteinDistance(one, another))
}
}
test("hashCode") {
forAll { (s: String) =>
assert(toUTF8(s).hashCode() === toUTF8(s).hashCode())
}
}
test("equals") {
forAll { (one: String, another: String) =>
assert(toUTF8(one).equals(toUTF8(another)) === one.equals(another))
}
}
}
| michalsenkyr/spark | common/unsafe/src/test/scala/org/apache/spark/unsafe/types/UTF8StringPropertyCheckSuite.scala | Scala | apache-2.0 | 7,575 |
package play
import sbt.{ Project => SbtProject, _ }
import sbt.Keys._
import Keys._
import play.core.{ SBTLink, SBTDocHandler }
import play.console.Colors
import annotation.tailrec
import scala.collection.JavaConverters._
import java.net.URLClassLoader
import java.util.jar.JarFile
/**
* Provides mechanisms for running a Play application in SBT
*/
trait PlayRun extends PlayInternalKeys {
this: PlayReloader =>
/**
* Configuration for the Play docs application's dependencies. Used to build a classloader for
* that application. Hidden so that it isn't exposed when the user application is published.
*/
val DocsApplication = config("docs") hide
// For some reason, jline disables echo when it creates a new console reader.
// When we use the reader, we also enabled echo after using it, so as long as this is lazy, and that holds true,
// then we won't exit SBT with echo disabled.
private lazy val consoleReader = new jline.console.ConsoleReader
private def waitForKey() = {
def waitEOF() {
consoleReader.readCharacter() match {
case 4 => // STOP
case 11 => consoleReader.clearScreen(); waitEOF()
case 10 => println(); waitEOF()
case _ => waitEOF()
}
}
consoleReader.getTerminal.setEchoEnabled(false)
try {
waitEOF()
} finally {
consoleReader.getTerminal.setEchoEnabled(true)
}
}
private def parsePort(portString: String): Int = {
try {
Integer.parseInt(portString)
} catch {
case e: NumberFormatException => sys.error("Invalid port argument: " + portString)
}
}
private def filterArgs(args: Seq[String], defaultHttpPort: Int): (Seq[(String, String)], Option[Int], Option[Int]) = {
val (properties, others) = args.span(_.startsWith("-D"))
val javaProperties = properties.map(_.drop(2).split('=')).map(a => a(0) -> a(1)).toSeq
// collect arguments plus config file property if present
val httpPort = Option(System.getProperty("http.port"))
val httpsPort = Option(System.getProperty("https.port"))
//port can be defined as a numeric argument or as disabled, -Dhttp.port argument or a generic sys property
val maybePort = others.headOption.orElse(javaProperties.toMap.get("http.port")).orElse(httpPort)
val maybeHttpsPort = javaProperties.toMap.get("https.port").orElse(httpsPort).map(parsePort)
if (maybePort.exists(_ == "disabled")) (javaProperties, Option.empty[Int], maybeHttpsPort)
else (javaProperties, (maybePort.map(parsePort)).orElse(Some(defaultHttpPort)), maybeHttpsPort)
}
val createURLClassLoader: ClassLoaderCreator = (name, urls, parent) => new java.net.URLClassLoader(urls, parent) {
override def toString = name + "{" + getURLs.map(_.toString).mkString(", ") + "}"
}
val createDelegatedResourcesClassLoader: ClassLoaderCreator = (name, urls, parent) => new java.net.URLClassLoader(urls, parent) {
require(parent ne null)
override def getResources(name: String): java.util.Enumeration[java.net.URL] = getParent.getResources(name)
override def toString = name + "{" + getURLs.map(_.toString).mkString(", ") + "}"
}
val playRunSetting: SbtProject.Initialize[InputTask[Unit]] = playRunTask(playRunHooks, playDependencyClasspath, playDependencyClassLoader, playReloaderClasspath, playReloaderClassLoader)
def playRunTask(
runHooks: TaskKey[Seq[play.PlayRunHook]],
dependencyClasspath: TaskKey[Classpath], dependencyClassLoader: TaskKey[ClassLoaderCreator],
reloaderClasspath: TaskKey[Classpath], reloaderClassLoader: TaskKey[ClassLoaderCreator]): SbtProject.Initialize[InputTask[Unit]] = inputTask { (argsTask: TaskKey[Seq[String]]) =>
(
argsTask, state, playCommonClassloader, managedClasspath in DocsApplication,
dependencyClasspath, dependencyClassLoader, reloaderClassLoader
) map { (args, state, commonLoader, docsAppClasspath, appDependencyClasspath, createClassLoader, createReloader) =>
val extracted = SbtProject.extract(state)
val (_, hooks) = extracted.runTask(runHooks, state)
val interaction = extracted.get(playInteractionMode)
val (properties, httpPort, httpsPort) = filterArgs(args, defaultHttpPort = extracted.get(playDefaultPort))
require(httpPort.isDefined || httpsPort.isDefined, "You have to specify https.port when http.port is disabled")
// Set Java properties
properties.foreach {
case (key, value) => System.setProperty(key, value)
}
println()
/*
* We need to do a bit of classloader magic to run the Play application.
*
* There are six classloaders:
*
* 1. sbtLoader, the classloader of sbt and the Play sbt plugin.
* 2. commonLoader, a classloader that persists across calls to run.
* This classloader is stored inside the
* PlayInternalKeys.playCommonClassloader task. This classloader will
* load the classes for the H2 database if it finds them in the user's
* classpath. This allows H2's in-memory database state to survive across
* calls to run.
* 3. delegatingLoader, a special classloader that overrides class loading
* to delegate shared classes for sbt link to the sbtLoader, and accesses
* the reloader.currentApplicationClassLoader for resource loading to
* make user resources available to dependency classes.
* Has the commonLoader as its parent.
* 4. applicationLoader, contains the application dependencies. Has the
* delegatingLoader as its parent. Classes from the commonLoader and
* the delegatingLoader are checked for loading first.
* 5. docsLoader, the classloader for the special play-docs application
* that is used to serve documentation when running in development mode.
* Has the applicationLoader as its parent for Play dependencies and
* delegation to the shared sbt doc link classes.
* 6. reloader.currentApplicationClassLoader, contains the user classes
* and resources. Has applicationLoader as its parent, where the
* application dependencies are found, and which will delegate through
* to the sbtLoader via the delegatingLoader for the shared link.
* Resources are actually loaded by the delegatingLoader, where they
* are available to both the reloader and the applicationLoader.
* This classloader is recreated on reload. See PlayReloader.
*
* Someone working on this code in the future might want to tidy things up
* by splitting some of the custom logic out of the URLClassLoaders and into
* their own simpler ClassLoader implementations. The curious cycle between
* applicationLoader and reloader.currentApplicationClassLoader could also
* use some attention.
*/
// Get the URLs for the resources in a classpath
def urls(cp: Classpath): Array[URL] = cp.map(_.data.toURI.toURL).toArray
// Support method to merge the output of two calls to ClassLoader.getResources(String) into a single result
def combineResources(resources1: java.util.Enumeration[URL], resources2: java.util.Enumeration[URL]) =
new java.util.Vector[java.net.URL]((resources1.asScala ++ resources2.asScala).toSeq.distinct.asJava).elements
val sbtLoader = this.getClass.getClassLoader
/**
* ClassLoader that delegates loading of shared sbt link classes to the
* sbtLoader. Also accesses the reloader resources to make these available
* to the applicationLoader, creating a full circle for resource loading.
*/
lazy val delegatingLoader: ClassLoader = new ClassLoader(commonLoader) {
private val sbtSharedClasses = Seq(
classOf[play.core.SBTLink].getName,
classOf[play.core.SBTDocHandler].getName,
classOf[play.core.server.ServerWithStop].getName,
classOf[play.api.UsefulException].getName,
classOf[play.api.PlayException].getName,
classOf[play.api.PlayException.InterestingLines].getName,
classOf[play.api.PlayException.RichDescription].getName,
classOf[play.api.PlayException.ExceptionSource].getName,
classOf[play.api.PlayException.ExceptionAttachment].getName)
override def loadClass(name: String, resolve: Boolean): Class[_] = {
if (sbtSharedClasses.contains(name)) {
sbtLoader.loadClass(name)
} else {
super.loadClass(name, resolve)
}
}
// -- Delegate resource loading. We have to hack here because the default implementation is already recursive.
private val findResource = classOf[ClassLoader].getDeclaredMethod("findResource", classOf[String])
findResource.setAccessible(true)
override def getResource(name: String): java.net.URL = {
val resource = reloader.currentApplicationClassLoader.map(findResource.invoke(_, name).asInstanceOf[java.net.URL]).orNull
if (resource == null) {
super.getResource(name)
} else {
resource
}
}
private val findResources = classOf[ClassLoader].getDeclaredMethod("findResources", classOf[String])
findResources.setAccessible(true)
override def getResources(name: String): java.util.Enumeration[java.net.URL] = {
val resources1 = reloader.currentApplicationClassLoader.map(findResources.invoke(_, name).asInstanceOf[java.util.Enumeration[java.net.URL]]).getOrElse(new java.util.Vector[java.net.URL]().elements)
val resources2 = super.getResources(name)
combineResources(resources1, resources2)
}
override def toString = {
"DelegatingClassLoader, using parent: " + (getParent)
}
}
lazy val applicationLoader = createClassLoader("PlayDependencyClassLoader", urls(appDependencyClasspath), delegatingLoader)
lazy val reloader = newReloader(state, playReload, createReloader, reloaderClasspath, applicationLoader)
// Now we're about to start, let's call the hooks:
hooks.run(_.beforeStarted())
// Get a handler for the documentation. The documentation content lives in play/docs/content
// within the play-docs JAR.
val docsLoader = new URLClassLoader(urls(docsAppClasspath), applicationLoader)
val docsJarFile = {
val f = docsAppClasspath.map(_.data).filter(_.getName.startsWith("play-docs")).head
new JarFile(f)
}
val sbtDocHandler = {
val docHandlerFactoryClass = docsLoader.loadClass("play.docs.SBTDocHandlerFactory")
val factoryMethod = docHandlerFactoryClass.getMethod("fromJar", classOf[JarFile], classOf[String])
factoryMethod.invoke(null, docsJarFile, "play/docs/content").asInstanceOf[SBTDocHandler]
}
val server = {
val mainClass = applicationLoader.loadClass("play.core.server.NettyServer")
if (httpPort.isDefined) {
val mainDev = mainClass.getMethod("mainDevHttpMode", classOf[SBTLink], classOf[SBTDocHandler], classOf[Int])
mainDev.invoke(null, reloader, sbtDocHandler, httpPort.get: java.lang.Integer).asInstanceOf[play.core.server.ServerWithStop]
} else {
val mainDev = mainClass.getMethod("mainDevOnlyHttpsMode", classOf[SBTLink], classOf[SBTDocHandler], classOf[Int])
mainDev.invoke(null, reloader, sbtDocHandler, httpsPort.get: java.lang.Integer).asInstanceOf[play.core.server.ServerWithStop]
}
}
// Notify hooks
hooks.run(_.afterStarted(server.mainAddress))
println()
println(Colors.green("(Server started, use Ctrl+D to stop and go back to the console...)"))
println()
val ContinuousState = AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.")
def isEOF(c: Int): Boolean = c == 4
@tailrec def executeContinuously(watched: Watched, s: State, reloader: SBTLink, ws: Option[WatchState] = None): Option[String] = {
@tailrec def shouldTerminate: Boolean = (System.in.available > 0) && (isEOF(System.in.read()) || shouldTerminate)
val sourcesFinder = PathFinder { watched watchPaths s }
val watchState = ws.getOrElse(s get ContinuousState getOrElse WatchState.empty)
val (triggered, newWatchState, newState) =
try {
val (triggered, newWatchState) = SourceModificationWatch.watch(sourcesFinder, watched.pollInterval, watchState)(shouldTerminate)
(triggered, newWatchState, s)
} catch {
case e: Exception =>
val log = s.log
log.error("Error occurred obtaining files to watch. Terminating continuous execution...")
(false, watchState, s.fail)
}
if (triggered) {
//Then launch compile
Project.synchronized {
val start = System.currentTimeMillis
SbtProject.runTask(compile in Compile, newState).get._2.toEither.right.map { _ =>
val duration = System.currentTimeMillis - start
val formatted = duration match {
case ms if ms < 1000 => ms + "ms"
case s => (s / 1000) + "s"
}
println("[" + Colors.green("success") + "] Compiled in " + formatted)
}
}
// Avoid launching too much compilation
Thread.sleep(Watched.PollDelayMillis)
// Call back myself
executeContinuously(watched, newState, reloader, Some(newWatchState))
} else {
// Stop
Some("Okay, i'm done")
}
}
// If we have both Watched.Configuration and Watched.ContinuousState
// attributes and if Watched.ContinuousState.count is 1 then we assume
// we're in ~ run mode
val maybeContinuous = state.get(Watched.Configuration).map { w =>
state.get(Watched.ContinuousState).map { ws =>
(ws.count == 1, w, ws)
}.getOrElse((false, None, None))
}.getOrElse((false, None, None))
val newState = maybeContinuous match {
case (true, w: sbt.Watched, ws) => {
// ~ run mode
interaction doWithoutEcho {
executeContinuously(w, state, reloader, Some(WatchState.empty))
}
// Remove state two first commands added by sbt ~
state.copy(remainingCommands = state.remainingCommands.drop(2)).remove(Watched.ContinuousState)
}
case _ => {
// run mode
interaction.waitForCancel()
state
}
}
server.stop()
docsJarFile.close()
reloader.clean()
// Notify hooks
hooks.run(_.afterStopped())
// Remove Java properties
properties.foreach {
case (key, _) => System.clearProperty(key)
}
println()
}
}
val playStartCommand = Command.args("start", "<port>") { (state: State, args: Seq[String]) =>
val extracted = SbtProject.extract(state)
val interaction = extracted.get(playInteractionMode)
// Parse HTTP port argument
val (properties, httpPort, httpsPort) = filterArgs(args, defaultHttpPort = extracted.get(playDefaultPort))
require(httpPort.isDefined || httpsPort.isDefined, "You have to specify https.port when http.port is disabled")
SbtProject.runTask(compile in Compile, state).get._2.toEither match {
case Left(_) => {
println()
println("Cannot start with errors.")
println()
state.fail
}
case Right(_) => {
SbtProject.runTask(dependencyClasspath in Runtime, state).get._2.toEither.right.map { dependencies =>
//trigger a require build if needed
SbtProject.runTask(buildRequire, state).get._2
val classpath = dependencies.map(_.data).map(_.getCanonicalPath).reduceLeft(_ + java.io.File.pathSeparator + _)
import java.lang.{ ProcessBuilder => JProcessBuilder }
val builder = new JProcessBuilder(Seq(
"java") ++ (properties ++ System.getProperties.asScala).map { case (key, value) => "-D" + key + "=" + value } ++ Seq("-Dhttp.port=" + httpPort.getOrElse("disabled"), "-cp", classpath, "play.core.server.NettyServer", extracted.currentProject.base.getCanonicalPath): _*)
new Thread {
override def run {
System.exit(Process(builder) !)
}
}.start()
println(Colors.green(
"""|
|(Starting server. Type Ctrl+D to exit logs, the server will remain in background)
|""".stripMargin))
interaction.waitForCancel()
println()
state.copy(remainingCommands = Seq.empty)
}.right.getOrElse {
println()
println("Oops, cannot start the server?")
println()
state.fail
}
}
}
}
}
| michaelahlers/team-awesome-wedding | vendor/play-2.2.1/framework/src/sbt-plugin/src/main/scala/PlayRun.scala | Scala | mit | 17,311 |
package com.codemettle.akkasolr
import com.codemettle.akkasolr.solrtypes.SolrQueryResponse
import scala.concurrent.{ExecutionContext, Future}
/**
* Created by steven on 5/17/2018.
*/
package object client {
implicit class RichResponse(val res: SolrQueryResponse) extends AnyVal {
def toFailMessage(implicit opts: Solr.UpdateOptions): Either[Solr.UpdateError, SolrQueryResponse] =
res.status match {
case status if status != 0 && opts.failOnNonZeroStatus => Left(Solr.UpdateError(status, res.errorMessageOpt))
case _ => Right(res)
}
}
implicit class RichResponseFuture(val resF: Future[SolrQueryResponse]) extends AnyVal {
def failIfNeeded(implicit opts: Solr.UpdateOptions, ec: ExecutionContext): Future[SolrQueryResponse] =
resF.map(_.toFailMessage) flatMap {
case Left(err) => Future.failed(err)
case Right(res) => Future.successful(res)
}
}
}
| CodeMettle/akka-solr | src/main/scala/com/codemettle/akkasolr/client/package.scala | Scala | apache-2.0 | 926 |
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.express.flow
import cascading.pipe.Pipe
import cascading.tuple.Fields
import com.twitter.scalding.RichPipe
import com.twitter.scalding.TupleSetter
import org.apache.avro.Schema
import org.apache.avro.generic.GenericRecord
import org.kiji.annotations.ApiAudience
import org.kiji.annotations.ApiStability
import org.kiji.annotations.Inheritance
import org.kiji.express.flow.util.AvroGenericTupleConverter
/**
* A class that adds Kiji-specific functionality to a Cascading pipe, allowing the user to pack
* fields into an Avro record.
*
* A `KijiPipe` can be obtained by end-users during the course of authoring a Scalding flow via
* an implicit conversion or by constructing one directly with an existing pipe.
*
* @param pipe enriched with extra functionality.
*/
@ApiAudience.Public
@ApiStability.Stable
@Inheritance.Sealed
class KijiPipe(private[express] val pipe: Pipe) {
/**
* Packs the specified fields into an Avro [[org.apache.avro.generic.GenericRecord]]. The
* provided field names must match the fields of the generic record specified by the schema.
*
* @param fields is the mapping of input fields (to be packed into the
* [[org.apache.avro.generic.GenericRecord]]) to output field which will contain
* the [[org.apache.avro.generic.GenericRecord]].
* @return a pipe containing all input fields, and an additional field containing an
* [[org.apache.avro.generic.GenericRecord]].
*/
def packGenericRecord(fields: (Fields, Fields))(schema: Schema): Pipe = {
require(fields._2.size == 1, "Cannot pack generic record to more than a single field.")
require(schema.getType == Schema.Type.RECORD, "Cannot pack non-record Avro type.")
new RichPipe(pipe).map(fields) { input: GenericRecord => input } (
new AvroGenericTupleConverter(fields._1, schema), implicitly[TupleSetter[GenericRecord]])
}
/**
* Packs the specified fields into an Avro [[org.apache.avro.generic.GenericRecord]] and drops
* other fields from the flow. The provided field names must match the fields of the
* generic record specified by the schema.
*
* @param fields is the mapping of input fields (to be packed into the
* [[org.apache.avro.generic.GenericRecord]]) to new output field which will
* contain the [[org.apache.avro.generic.GenericRecord]].
* @return a pipe containing a single field with an Avro
* [[org.apache.avro.generic.GenericRecord]].
*/
def packGenericRecordTo(fields: (Fields, Fields))(schema: Schema): Pipe = {
require(fields._2.size == 1, "Cannot pack generic record to more than a single field.")
require(schema.getType == Schema.Type.RECORD, "Cannot pack to non-record Avro type.")
new RichPipe(pipe).mapTo(fields) { input: GenericRecord => input } (
new AvroGenericTupleConverter(fields._1, schema), implicitly[TupleSetter[GenericRecord]])
}
}
| kijiproject/kiji-express | kiji-express/src/main/scala/org/kiji/express/flow/KijiPipe.scala | Scala | apache-2.0 | 3,621 |
import FileName._
@main def Test = {
val fileName1: FileName = ToFileName("fileName1")
println(fileName1)
}
| som-snytt/dotty | tests/run-macros/i8671/Test_2.scala | Scala | apache-2.0 | 113 |
/*
* Shadowsocks - A shadowsocks client for Android
* Copyright (C) 2014 <max.c.lv@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* ___====-_ _-====___
* _--^^^#####// \\#####^^^--_
* _-^##########// ( ) \\##########^-_
* -############// |\^^/| \\############-
* _/############// (@::@) \\############\_
* /#############(( \\// ))#############\
* -###############\\ (oo) //###############-
* -#################\\ / VV \ //#################-
* -###################\\/ \//###################-
* _#/|##########/\######( /\ )######/\##########|\#_
* |/ |#/\#/\#/\/ \#/\##\ | | /##/\#/ \/\#/\#/\#| \|
* ` |/ V V ` V \#\| | | |/#/ V ' V V \| '
* ` ` ` ` / | | | | \ ' ' ' '
* ( | | | | )
* __\ | | | | /__
* (vvv(VVV)(VVV)vvv)
*
* HERE BE DRAGONS
*
*/
package com.github.shadowsocks
import java.io.{FileOutputStream, IOException, InputStream, OutputStream}
import java.util
import java.util.Locale
import android.app.backup.BackupManager
import android.app.{Activity, ProgressDialog}
import android.content._
import android.content.res.AssetManager
import android.graphics.Typeface
import android.net.VpnService
import android.os._
import android.preference.{Preference, SwitchPreference}
import android.support.design.widget.{FloatingActionButton, Snackbar}
import android.support.v4.content.ContextCompat
import android.support.v7.app.AppCompatActivity
import android.support.v7.widget.Toolbar
import android.util.Log
import android.view.{View, ViewGroup, ViewParent}
import android.widget._
import com.github.jorgecastilloprz.FABProgressCircle
import com.github.shadowsocks.aidl.IShadowsocksServiceCallback
import com.github.shadowsocks.database._
import com.github.shadowsocks.preferences.{DropDownPreference, NumberPickerPreference, PasswordEditTextPreference, SummaryEditTextPreference}
import com.github.shadowsocks.utils._
import com.google.android.gms.ads.{AdRequest, AdSize, AdView}
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
object Typefaces {
def get(c: Context, assetPath: String): Typeface = {
cache synchronized {
if (!cache.containsKey(assetPath)) {
try {
cache.put(assetPath, Typeface.createFromAsset(c.getAssets, assetPath))
} catch {
case e: Exception =>
Log.e(TAG, "Could not get typeface '" + assetPath + "' because " + e.getMessage)
return null
}
}
return cache.get(assetPath)
}
}
private final val TAG = "Typefaces"
private final val cache = new util.Hashtable[String, Typeface]
}
object Shadowsocks {
// Constants
val TAG = "Shadowsocks"
val REQUEST_CONNECT = 1
val PREFS_NAME = "Shadowsocks"
val PROXY_PREFS = Array(Key.profileName, Key.proxy, Key.remotePort, Key.localPort, Key.sitekey, Key.encMethod,
Key.isAuth)
val FEATURE_PREFS = Array(Key.route, Key.isProxyApps, Key.isUdpDns, Key.isIpv6)
val EXECUTABLES = Array(Executable.PDNSD, Executable.REDSOCKS, Executable.SS_TUNNEL, Executable.SS_LOCAL,
Executable.TUN2SOCKS)
// Helper functions
def updateDropDownPreference(pref: Preference, value: String) {
pref.asInstanceOf[DropDownPreference].setValue(value)
}
def updatePasswordEditTextPreference(pref: Preference, value: String) {
pref.setSummary(value)
pref.asInstanceOf[PasswordEditTextPreference].setText(value)
}
def updateNumberPickerPreference(pref: Preference, value: Int) {
pref.asInstanceOf[NumberPickerPreference].setValue(value)
}
def updateSummaryEditTextPreference(pref: Preference, value: String) {
pref.setSummary(value)
pref.asInstanceOf[SummaryEditTextPreference].setText(value)
}
def updateSwitchPreference(pref: Preference, value: Boolean) {
pref.asInstanceOf[SwitchPreference].setChecked(value)
}
def updatePreference(pref: Preference, name: String, profile: Profile) {
name match {
case Key.profileName => updateSummaryEditTextPreference(pref, profile.name)
case Key.proxy => updateSummaryEditTextPreference(pref, profile.host)
case Key.remotePort => updateNumberPickerPreference(pref, profile.remotePort)
case Key.localPort => updateNumberPickerPreference(pref, profile.localPort)
case Key.sitekey => updatePasswordEditTextPreference(pref, profile.password)
case Key.encMethod => updateDropDownPreference(pref, profile.method)
case Key.route => updateDropDownPreference(pref, profile.route)
case Key.isProxyApps => updateSwitchPreference(pref, profile.proxyApps)
case Key.isUdpDns => updateSwitchPreference(pref, profile.udpdns)
case Key.isAuth => updateSwitchPreference(pref, profile.auth)
case Key.isIpv6 => updateSwitchPreference(pref, profile.ipv6)
}
}
}
class Shadowsocks
extends AppCompatActivity with ServiceBoundContext {
// Variables
var serviceStarted = false
var fab: FloatingActionButton = _
var fabProgressCircle: FABProgressCircle = _
var progressDialog: ProgressDialog = _
var progressTag = -1
var state = State.INIT
var prepared = false
var currentProfile = new Profile
var vpnEnabled = -1
// Services
var currentServiceName = classOf[ShadowsocksNatService].getName
override def onServiceConnected() {
// Update the UI
if (fab != null) fab.setEnabled(true)
stateUpdate()
if (!ShadowsocksApplication.settings.getBoolean(ShadowsocksApplication.getVersionName, false)) {
ShadowsocksApplication.settings.edit.putBoolean(ShadowsocksApplication.getVersionName, true).apply()
recovery()
try {
// Workaround that convert port(String) to port(Int)
val oldLocalPort = ShadowsocksApplication.settings.getString("port", "-1")
val oldRemotePort = ShadowsocksApplication.settings.getString("remotePort", "-1")
if (oldLocalPort != "-1") {
ShadowsocksApplication.settings.edit.putInt(Key.localPort, oldLocalPort.toInt).commit()
}
if (oldRemotePort != "-1") {
ShadowsocksApplication.settings.edit.putInt(Key.remotePort, oldRemotePort.toInt).commit()
}
} catch {
case ex: Exception => // Ignore
}
}
}
override def onServiceDisconnected() {
if (fab != null) fab.setEnabled(false)
}
def trafficUpdated(txRate: String, rxRate: String, txTotal: String, rxTotal: String) {
val trafficStat = getString(R.string.stat_summary)
.formatLocal(Locale.ENGLISH, txRate, rxRate, txTotal, rxTotal)
handler.post(() => {
preferences.findPreference(Key.stat).setSummary(trafficStat)
})
}
private lazy val preferences =
getFragmentManager.findFragmentById(android.R.id.content).asInstanceOf[ShadowsocksSettings]
private var adView: AdView = _
private lazy val greyTint = ContextCompat.getColorStateList(this, R.color.material_blue_grey_700)
private lazy val greenTint = ContextCompat.getColorStateList(this, R.color.material_green_700)
var handler = new Handler()
private def changeSwitch(checked: Boolean) {
serviceStarted = checked
fab.setImageResource(if (checked) R.drawable.ic_cloud else R.drawable.ic_cloud_off)
if (fab.isEnabled) {
fab.setEnabled(false)
handler.postDelayed(() => fab.setEnabled(true), 1000)
}
}
private def showProgress(msg: Int): Handler = {
clearDialog()
progressDialog = ProgressDialog.show(this, "", getString(msg), true, false)
progressTag = msg
new Handler {
override def handleMessage(msg: Message) {
clearDialog()
}
}
}
private def copyAssets(path: String) {
val assetManager: AssetManager = getAssets
var files: Array[String] = null
try {
files = assetManager.list(path)
} catch {
case e: IOException =>
Log.e(Shadowsocks.TAG, e.getMessage)
}
if (files != null) {
for (file <- files) {
var in: InputStream = null
var out: OutputStream = null
try {
if (path.length > 0) {
in = assetManager.open(path + "/" + file)
} else {
in = assetManager.open(file)
}
out = new FileOutputStream(Path.BASE + file)
copyFile(in, out)
in.close()
in = null
out.flush()
out.close()
out = null
} catch {
case ex: Exception =>
Log.e(Shadowsocks.TAG, ex.getMessage)
}
}
}
}
private def copyFile(in: InputStream, out: OutputStream) {
val buffer: Array[Byte] = new Array[Byte](1024)
var read: Int = 0
while ( {
read = in.read(buffer)
read
} != -1) {
out.write(buffer, 0, read)
}
}
private def crashRecovery() {
val cmd = new ArrayBuffer[String]()
for (task <- Array("ss-local", "ss-tunnel", "pdnsd", "redsocks", "tun2socks")) {
cmd.append("chmod 666 %s%s-nat.pid".formatLocal(Locale.ENGLISH, Path.BASE, task))
cmd.append("chmod 666 %s%s-vpn.pid".formatLocal(Locale.ENGLISH, Path.BASE, task))
}
if (!ShadowsocksApplication.isVpnEnabled) {
Console.runRootCommand(cmd.toArray)
} else {
Console.runCommand(cmd.toArray)
}
cmd.clear()
for (task <- Array("ss-local", "ss-tunnel", "pdnsd", "redsocks", "tun2socks")) {
try {
val pid_nat = scala.io.Source.fromFile(Path.BASE + task + "-nat.pid").mkString.trim.toInt
val pid_vpn = scala.io.Source.fromFile(Path.BASE + task + "-vpn.pid").mkString.trim.toInt
cmd.append("kill -9 %d".formatLocal(Locale.ENGLISH, pid_nat))
cmd.append("kill -9 %d".formatLocal(Locale.ENGLISH, pid_vpn))
Process.killProcess(pid_nat)
Process.killProcess(pid_vpn)
} catch {
case e: Throwable => Log.e(Shadowsocks.TAG, "unable to kill " + task)
}
cmd.append("rm -f %s%s-nat.pid".formatLocal(Locale.ENGLISH, Path.BASE, task))
cmd.append("rm -f %s%s-nat.conf".formatLocal(Locale.ENGLISH, Path.BASE, task))
cmd.append("rm -f %s%s-vpn.pid".formatLocal(Locale.ENGLISH, Path.BASE, task))
cmd.append("rm -f %s%s-vpn.conf".formatLocal(Locale.ENGLISH, Path.BASE, task))
}
Console.runCommand(cmd.toArray)
if (!ShadowsocksApplication.isVpnEnabled) {
Console.runRootCommand(cmd.toArray)
Console.runRootCommand(Utils.getIptables + " -t nat -F OUTPUT")
}
}
def cancelStart() {
clearDialog()
changeSwitch(checked = false)
}
def isReady: Boolean = {
if (!checkText(Key.proxy)) return false
if (!checkText(Key.sitekey)) return false
if (bgService == null) return false
true
}
def prepareStartService() {
Future {
if (ShadowsocksApplication.isVpnEnabled) {
val intent = VpnService.prepare(this)
if (intent != null) {
startActivityForResult(intent, Shadowsocks.REQUEST_CONNECT)
} else {
handler.post(() => onActivityResult(Shadowsocks.REQUEST_CONNECT, Activity.RESULT_OK, null))
}
} else {
serviceStart()
}
}
}
def getLayoutView(view: ViewParent): LinearLayout = {
view match {
case layout: LinearLayout => layout
case _ => if (view != null) getLayoutView(view.getParent) else null
}
}
override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setContentView(R.layout.layout_main)
// Initialize Toolbar
val toolbar = findViewById(R.id.toolbar).asInstanceOf[Toolbar]
toolbar.setTitle(getString(R.string.screen_name))
toolbar.setTitleTextAppearance(toolbar.getContext, R.style.Toolbar_Logo)
val field = classOf[Toolbar].getDeclaredField("mTitleTextView")
field.setAccessible(true)
val title: TextView = field.get(toolbar).asInstanceOf[TextView]
val tf: Typeface = Typefaces.get(this, "fonts/Iceland.ttf")
if (tf != null) title.setTypeface(tf)
fab = findViewById(R.id.fab).asInstanceOf[FloatingActionButton]
fabProgressCircle = findViewById(R.id.fabProgressCircle).asInstanceOf[FABProgressCircle]
fab.setOnClickListener((v: View) => {
serviceStarted = !serviceStarted
serviceStarted match {
case true =>
if (isReady)
prepareStartService()
else
changeSwitch(checked = false)
case false =>
serviceStop()
}
})
fab.setOnLongClickListener((v: View) => {
Utils.positionToast(Toast.makeText(this, if (serviceStarted) R.string.stop else R.string.connect,
Toast.LENGTH_SHORT), fab, getWindow, 0, Utils.dpToPx(this, 8)).show
true
})
// Bind to the service
handler.post(() => {
attachService(new IShadowsocksServiceCallback.Stub {
override def stateChanged(state: Int, msg: String) {
onStateChanged(state, msg)
}
override def trafficUpdated(txRate: String, rxRate: String, txTotal: String, rxTotal: String) {
Shadowsocks.this.trafficUpdated(txRate, rxRate, txTotal, rxTotal)
}
})
})
}
def reloadProfile() {
currentProfile = ShadowsocksApplication.currentProfile match {
case Some(profile) => profile // updated
case None => // removed
val profiles = ShadowsocksApplication.profileManager.getAllProfiles.getOrElse(List[Profile]())
if (profiles.isEmpty) ShadowsocksApplication.profileManager.createDefault()
else ShadowsocksApplication.switchProfile(profiles.head.id)
}
updatePreferenceScreen()
serviceStop()
}
protected override def onPause() {
super.onPause()
ShadowsocksApplication.profileManager.save
prepared = false
}
private def stateUpdate() {
if (bgService != null) {
bgService.getState match {
case State.CONNECTING =>
fab.setBackgroundTintList(greyTint)
changeSwitch(checked = true)
setPreferenceEnabled(false)
fabProgressCircle.show()
case State.CONNECTED =>
fab.setBackgroundTintList(greenTint)
changeSwitch(checked = true)
setPreferenceEnabled(false)
fabProgressCircle.show()
handler.postDelayed(() => fabProgressCircle.hide(), 1000)
case State.STOPPING =>
fab.setBackgroundTintList(greyTint)
changeSwitch(checked = false)
setPreferenceEnabled(false)
fabProgressCircle.show()
case _ =>
fab.setBackgroundTintList(greyTint)
changeSwitch(checked = false)
setPreferenceEnabled(true)
fabProgressCircle.show()
handler.postDelayed(() => fabProgressCircle.hide(), 1000)
}
state = bgService.getState
}
}
protected override def onResume() {
super.onResume()
stateUpdate()
ConfigUtils.refresh(this)
// Check if current profile changed
if (ShadowsocksApplication.profileId != currentProfile.id) reloadProfile()
trafficUpdated(TrafficMonitor.getTxRate, TrafficMonitor.getRxRate,
TrafficMonitor.getTxTotal, TrafficMonitor.getRxTotal)
}
private def setPreferenceEnabled(enabled: Boolean) {
preferences.findPreference(Key.isNAT).setEnabled(enabled)
for (name <- Shadowsocks.PROXY_PREFS) {
val pref = preferences.findPreference(name)
if (pref != null) {
pref.setEnabled(enabled)
}
}
for (name <- Shadowsocks.FEATURE_PREFS) {
val pref = preferences.findPreference(name)
if (pref != null) {
if (name == Key.isProxyApps) {
pref.setEnabled(enabled && (Utils.isLollipopOrAbove || !ShadowsocksApplication.isVpnEnabled))
} else {
pref.setEnabled(enabled)
}
}
}
}
private def updatePreferenceScreen() {
val profile = currentProfile
if (profile.host == "198.199.101.152" && adView == null) {
adView = new AdView(this)
adView.setAdUnitId("ca-app-pub-9097031975646651/7760346322")
adView.setAdSize(AdSize.SMART_BANNER)
preferences.getView.asInstanceOf[ViewGroup].addView(adView, 0)
adView.loadAd(new AdRequest.Builder().build())
}
for (name <- Shadowsocks.PROXY_PREFS) {
val pref = preferences.findPreference(name)
Shadowsocks.updatePreference(pref, name, profile)
}
for (name <- Shadowsocks.FEATURE_PREFS) {
val pref = preferences.findPreference(name)
Shadowsocks.updatePreference(pref, name, profile)
}
}
override def onStop() {
super.onStop()
clearDialog()
}
override def onDestroy() {
super.onDestroy()
deattachService()
new BackupManager(this).dataChanged()
handler.removeCallbacksAndMessages(null)
}
def copyToSystem() {
val ab = new ArrayBuffer[String]
ab.append("mount -o rw,remount -t yaffs2 /dev/block/mtdblock3 /system")
for (executable <- Shadowsocks.EXECUTABLES) {
ab.append("cp %s%s /system/bin/".formatLocal(Locale.ENGLISH, Path.BASE, executable))
ab.append("chmod 755 /system/bin/" + executable)
ab.append("chown root:shell /system/bin/" + executable)
}
ab.append("mount -o ro,remount -t yaffs2 /dev/block/mtdblock3 /system")
Console.runRootCommand(ab.toArray)
}
def install() {
copyAssets(System.getABI)
val ab = new ArrayBuffer[String]
for (executable <- Shadowsocks.EXECUTABLES) {
ab.append("chmod 755 " + Path.BASE + executable)
}
Console.runCommand(ab.toArray)
}
def reset() {
crashRecovery()
install()
}
def recovery() {
serviceStop()
val h = showProgress(R.string.recovering)
Future {
reset()
h.sendEmptyMessage(0)
}
}
def flushDnsCache() {
val h = showProgress(R.string.flushing)
Future {
Utils.toggleAirplaneMode(getBaseContext)
h.sendEmptyMessage(0)
}
}
override def onActivityResult(requestCode: Int, resultCode: Int, data: Intent) = resultCode match {
case Activity.RESULT_OK =>
prepared = true
serviceStart()
case _ =>
cancelStart()
Log.e(Shadowsocks.TAG, "Failed to start VpnService")
}
def serviceStop() {
if (bgService != null) bgService.stop()
}
def checkText(key: String): Boolean = {
val text = ShadowsocksApplication.settings.getString(key, "")
if (text != null && text.length > 0) return true
Snackbar.make(findViewById(android.R.id.content), getString(R.string.proxy_empty), Snackbar.LENGTH_LONG).show
false
}
/** Called when connect button is clicked. */
def serviceStart() {
bgService.start(ConfigUtils.load(ShadowsocksApplication.settings))
if (ShadowsocksApplication.isVpnEnabled) {
changeSwitch(checked = false)
}
}
def clearDialog() {
if (progressDialog != null) {
progressDialog.dismiss()
progressDialog = null
progressTag = -1
}
}
def onStateChanged(s: Int, m: String) {
handler.post(() => if (state != s) {
s match {
case State.CONNECTING =>
fab.setBackgroundTintList(greyTint)
fab.setImageResource(R.drawable.ic_cloud_queue)
fab.setEnabled(false)
fabProgressCircle.show()
setPreferenceEnabled(enabled = false)
case State.CONNECTED =>
fab.setBackgroundTintList(greenTint)
if (state == State.CONNECTING) {
fabProgressCircle.beginFinalAnimation()
} else {
handler.postDelayed(() => fabProgressCircle.hide(), 1000)
}
fab.setEnabled(true)
changeSwitch(checked = true)
setPreferenceEnabled(enabled = false)
case State.STOPPED =>
fab.setBackgroundTintList(greyTint)
handler.postDelayed(() => fabProgressCircle.hide(), 1000)
fab.setEnabled(true)
changeSwitch(checked = false)
if (m != null) Snackbar.make(findViewById(android.R.id.content),
getString(R.string.vpn_error).formatLocal(Locale.ENGLISH, m), Snackbar.LENGTH_LONG).show
setPreferenceEnabled(enabled = true)
case State.STOPPING =>
fab.setBackgroundTintList(greyTint)
fab.setImageResource(R.drawable.ic_cloud_queue)
fab.setEnabled(false)
if (state == State.CONNECTED) fabProgressCircle.show() // ignore for stopped
setPreferenceEnabled(enabled = false)
}
state = s
})
}
}
| a642500/shadowsocks-android | src/main/scala/com/github/shadowsocks/Shadowsocks.scala | Scala | gpl-3.0 | 21,297 |
import play.api.libs.iteratee._
import reactivemongo.api.gridfs.{ ReadFile, DefaultFileToSave, GridFS }
import reactivemongo.api.gridfs.Implicits._
import reactivemongo.bson._
import scala.concurrent._
import reactivemongo.api.gridfs
object GridfsSpec extends org.specs2.mutable.Specification {
"GridFS" title
import Common._
sequential
lazy val gfs = GridFS(db)
lazy val file = DefaultFileToSave(Some("somefile"), Some("application/file"))
lazy val fileContent = Enumerator((1 to 100).view.map(_.toByte).toArray)
"ReactiveMongo" should {
"store a file in gridfs" in {
gfs.save(fileContent, file).map(_.filename).
aka("filename") must beSome("somefile").await(timeoutMillis)
}
"find this file in gridfs" in {
val futureFile = gfs.find(BSONDocument("filename" -> "somefile")).collect[List]()
val actual = Await.result(futureFile, timeout).head
(actual.filename mustEqual file.filename) and
(actual.uploadDate must beSome) and
(actual.contentType mustEqual file.contentType)
import scala.collection.mutable.ArrayBuilder
val res = Await.result(gfs.enumerate(actual) |>>> Iteratee.fold(ArrayBuilder.make[Byte]()) { (result, arr) =>
result ++= arr
}, timeout)
res.result mustEqual ((1 to 100).map(_.toByte).toArray)
}
"delete this file from gridfs" in {
gfs.remove(file.id).map(_.n) must beEqualTo(1).await(timeoutMillis)
}
}
}
| charleskubicek/ReactiveMongo | driver/src/test/scala/GridfsSpec.scala | Scala | apache-2.0 | 1,457 |
package scales.report
import scales.Coverage
/** @author Stephen Samuel */
class CoberturaXmlWriter extends ScalesWriter {
def write(coverage: Coverage) {}
}
| crvidya/scales | src/main/scala/scales/report/CoberturaXmlWriter.scala | Scala | apache-2.0 | 162 |
package org.scalatra
import java.util.concurrent.Executors
import _root_.akka.actor._
import org.eclipse.jetty.server.{ Connector, ServerConnector, Server }
import org.eclipse.jetty.util.thread.QueuedThreadPool
import org.scalatra.test.HttpComponentsClient
import org.scalatra.test.specs2.MutableScalatraSpec
import scala.concurrent._
import scala.concurrent.duration._
class AkkaSupportServlet extends ScalatraServlet with FutureSupport {
val system = ActorSystem()
protected implicit val executor = system.dispatcher
override def asyncTimeout = 2 seconds
private val futureEC = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(1))
get("/redirect") {
new AsyncResult {
val is: Future[_] = Future {
redirect("redirected")
}
}
}
get("/async-oh-noes") {
new AsyncResult {
val is = Future {
Thread.sleep(100) // To get the container to give up the request
Ok(body = s"${request.getContextPath}")
}
}
}
get("/async-attributes/:mockSessionId") {
request.setAttribute("sessionId", params("mockSessionId"))
val handlingReq = request
new AsyncResult {
val is = Future {
Thread.sleep(200)
Ok(body = request.getAttribute("sessionId"))
}(futureEC)
}
}
get("/redirected") {
"redirected"
}
asyncGet("/working") {
"the-working-reply"
}
asyncGet("/timeout") {
Thread.sleep((asyncTimeout plus 1.second).toMillis)
}
class FailException extends RuntimeException
asyncGet("/fail") {
throw new FailException
}
class FailHarderException extends RuntimeException
asyncGet("/fail-harder") {
throw new FailHarderException
}
asyncGet("/halt") {
halt(419)
}
asyncGet("/*.jpg") {
"jpeg"
}
override protected def contentTypeInferrer = ({
case "jpeg" => "image/jpeg"
}: ContentTypeInferrer) orElse super.contentTypeInferrer
error {
case e: FailException => "caught"
}
override def destroy() {
super.destroy()
system.shutdown()
}
}
class AkkaSupportSpec extends MutableScalatraSpec {
sequential
override lazy val server = {
/*
Min threads for Jetty is 6 because: acceptors=1 + selectors=4 + request=1
so 16 max and 6 min -> 10 worker threads
*/
val threadPool = new QueuedThreadPool(16, 6)
val server = new Server(threadPool)
val connector: ServerConnector = new ServerConnector(server)
connector.setPort(port)
server.setConnectors(Array[Connector](connector))
server
}
addServlet(new AkkaSupportServlet, "/*")
"The AkkaSupport" should {
"render the reply of an actor" in {
get("/working") {
body must_== "the-working-reply"
}
}
"respond with timeout if no timely reply from the actor" in {
get("/timeout") {
status must_== 504
body must_== "Gateway timeout"
}
}
"handle an async exception" in {
get("/fail") {
body must contain("caught")
}
}
"return 500 for an unhandled async exception" in {
get("/fail-harder") {
status must_== 500
}
}
"render a halt" in {
get("/halt") {
status must_== 419
}
}
"infers the content type of the future result" in {
get("/foo.jpg") {
header("Content-Type") must startWith("image/jpeg")
}
}
"redirect with the redirect method" in {
get("/redirect") {
status must_== 302
response.header("Location") must_== (baseUrl + "/redirected")
}
}
"have a stable request" in {
get("/async-oh-noes") {
body must_== ""
// body must not be_== "null"
}
}
"should not leak attributes between requests" in {
implicit val multiClentEc = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(50))
val ids = (1 to 50).map(_ => scala.util.Random.nextInt())
val serverBaseUrl = baseUrl
val idsToResponseFs = ids.map { id =>
val client = new HttpComponentsClient {
override val baseUrl: String = serverBaseUrl
}
Future {
blocking {
id.toString -> client.get(s"/async-attributes/$id") {
client.body
}
}
}(multiClentEc)
}
val fIdsToresponses = Future.sequence(idsToResponseFs)
val idsToResponses = Await.result(fIdsToresponses, 60.seconds)
foreachWhen(idsToResponses) {
case (expected, actual) => {
expected must_== actual
}
}
}
}
} | seratch/scalatra | core/src/test/scala/org/scalatra/AkkaSupportSpec.scala | Scala | bsd-2-clause | 4,565 |
package se.uu.farmbio.cp
import org.apache.spark.SharedSparkContext
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
import org.junit.runner.RunWith
import scala.util.Random
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
private[cp] object OneNNClassifier {
def createModel(training: Array[LabeledPoint]) = (features: Vector) => {
val classAndDist = training.map(point =>
(point.label, Vectors.sqdist(point.features, features)))
.sorted
classAndDist(0)._1 //return the class of the nearest neighbor
}
}
//this will not work for big input RDDs, however this is just for testing purpose
private[cp] class OneNNClassifier(
val model: Vector => Double,
val training: Array[LabeledPoint])
extends UnderlyingAlgorithm(model) {
def this(input: RDD[LabeledPoint]) = {
this(OneNNClassifier.createModel(input.collect), input.collect)
}
override def nonConformityMeasure(newSample: LabeledPoint) = {
val filtTrain = training.filter(_.label == newSample.label)
if (filtTrain.isEmpty) {
//New class, then the new sample is very non conforming
Double.MaxValue
} else {
//Avg distance from the previous samples of same class
val distances = filtTrain.map(point =>
Vectors.sqdist(point.features, newSample.features))
.sorted
distances.sum / filtTrain.length
}
}
}
@RunWith(classOf[JUnitRunner])
class ICPTest extends FunSuite with SharedSparkContext with MockitoSugar {
Random.setSeed(11)
test("ICP classification") {
val significance = 0.20
val errFracts = (0 to 100).map { _ =>
val (training, calibration, test) = TestUtils.generate4ClassesTrainCalibTest(significance)
val alg = new OneNNClassifier(sc.parallelize(training))
val model = ICP.trainClassifier(alg, numClasses = 4, calibration)
//compute error fraction
val errors = test.count { p =>
val region = model.predict(p.features, significance)
!region.contains(p.label)
}
errors.toDouble / test.length.toDouble
}
val meanError = errFracts.sum / errFracts.length
assert(meanError <= significance)
}
test("calibration and training split") {
val input = (1 to 100).map(i => new LabeledPoint(i, Vectors.dense(i)))
val (calibration, trainingRDD) = ICP.calibrationSplit(sc.parallelize(input), 30)
val training = trainingRDD.collect
val concat = calibration ++ training
assert(calibration.length == 30)
assert(training.length == 70)
assert(concat.length == 100)
concat.sortBy(_.label).zip(input).foreach {
case (x, y) => assert(x == y)
}
}
test("stratified calibration and training split") {
val input = (1 to 1000).map(
i => new LabeledPoint(
if (i <= 200) 1.0 else 0.0,
Vectors.dense(i)))
val (calibration, trainingRDD) = ICP.calibrationSplit(
sc.parallelize(input), 300, stratified = true)
val training = trainingRDD.collect
val concat = calibration ++ training
assert(calibration.filter(_.label == 1.0).length == 60)
assert(calibration.filter(_.label == 0.0).length == 240)
assert(training.length == 700)
assert(concat.length == 1000)
concat.sortBy(_.features.toArray(0)).zip(input).foreach {
case (x, y) => assert(x == y)
}
}
test("aggregated ICPs classification") {
val significance = 0.20
val test = TestUtils.generate4ClassesData(instances = 20,
seed = Random.nextLong)
val icps = (0 to 100).map { _ =>
val (training, calibration, _) = TestUtils.generate4ClassesTrainCalibTest(significance)
val alg = new OneNNClassifier(sc.parallelize(training))
ICP.trainClassifier(alg, numClasses = 4, calibration)
}
val aggr = new AggregatedICPClassifier(icps)
val errors = test.count { p =>
val region = aggr.predict(p.features, significance)
!region.contains(p.label)
}
val meanError = errors.toDouble / test.length.toDouble
assert(meanError <= significance)
}
test("binary classification metrics") {
val Seq(training, calibration, test) =
Seq(100, 10, 20).map { instances =>
TestUtils.generateBinaryData(instances, Random.nextInt)
}
val alg = new OneNNClassifier(sc.parallelize(training))
val model = ICP.trainClassifier(alg, numClasses = 2, calibration.toArray)
val mondrianPvAndLabels = sc.parallelize(test).map {
p => (model.mondrianPv(p.features), p.label)
}
val metrics = new BinaryClassificationICPMetrics(mondrianPvAndLabels)
val effAndErrBySig = metrics.significances.map { sig =>
val efficiency = test.count { p =>
model.predict(p.features, sig).size == 1
}.toDouble / test.length
val errorRate = test.count { p =>
!model.predict(p.features, sig).contains(p.label)
}.toDouble / test.length
val recall = test.count { p =>
val set = model.predict(p.features, sig)
set == Set(1.0) && p.label == 1.0
}.toDouble / test.count(_.label == 1.0)
val validity = errorRate <= sig
(sig, efficiency, errorRate, recall, validity)
}
val effBySig = effAndErrBySig.map(t => (t._1, t._2))
assert(metrics.efficiencyBySignificance sameElements effBySig)
val errRateBySig = effAndErrBySig.map(t => (t._1, t._3))
assert(metrics.errorRateBySignificance sameElements errRateBySig)
val recBySig = effAndErrBySig.map(t => (t._1, t._4))
assert(metrics.recallBySignificance sameElements recBySig)
val valBySig = effAndErrBySig.map(t => (t._1, t._5))
assert(metrics.validityBySignificance sameElements valBySig)
}
test("serialize/deserialize icp") {
//Mocks
val serialICP = "{1.0,2.0},{(0.1,0.2,0.3),(0.3,0.2,0.1),(0.2,0.1,0.3)}"
val alphas = Seq(
Array(0.1, 0.2, 0.3),
Array(0.3, 0.2, 0.1),
Array(0.2, 0.1, 0.3))
val model = mock[UnderlyingAlgorithm]
when(model.toString).thenReturn("1.0,2.0")
//Test serialization
val icp = new ICPClassifierModelImpl(model, alphas)
assert(icp.toString == serialICP)
//Test deserialization
val algDeserializer = mock[Deserializer[UnderlyingAlgorithm]]
when(algDeserializer.deserialize("1.0,2.0"))
.thenReturn(model)
val parsedICP = ICPClassifierModel.deserialize(serialICP, algDeserializer)
.asInstanceOf[ICPClassifierModelImpl[UnderlyingAlgorithm]]
assert(parsedICP.alg.toString == "1.0,2.0")
assert(parsedICP.alphas.toArray.deep == alphas.toArray.deep)
}
} | mcapuccini/spark-cp | cp/src/test/scala/se/uu/farmbio/cp/ICPTest.scala | Scala | apache-2.0 | 6,707 |
/**
* Copyright (c) 2013, The National Archives <digitalpreservation@nationalarchives.gov.uk>
* http://www.nationalarchives.gov.uk
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package uk.gov.nationalarchives.csv.validator.schema.v1_1
import java.io.StringReader
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import uk.gov.nationalarchives.csv.validator.TestResources
import uk.gov.nationalarchives.csv.validator.schema._
@RunWith(classOf[JUnitRunner])
class SchemaParserVersionSpec extends SchemaSpecBase with TestResources{
import TestSchemaParser._
"Schema" should {
"succeed for valid minimal schema" in {
val columnDefinitions = List(new ColumnDefinition(NamedColumnIdentifier("column1")),new ColumnDefinition(NamedColumnIdentifier("column2")),new ColumnDefinition(NamedColumnIdentifier("column3")))
val schema = """version 1.0
|@totalColumns 3
|@noHeader
|column1:
|column2:
|column3:""".stripMargin
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual buildSchema1_0(TotalColumns(3),NoHeader())(columnDefinitions:_*) }
}
"fail if the schema version is wrong" in {
val schema = """version 1
@totalColumns 1
LastName: @IgnoreCase regex ("[a]")"""
parse(new StringReader(schema)) must beLike {
case Failure(messages, _) => messages mustEqual s"Schema version declaration 'version 1.0' missing or incorrect"
}
}
"fail if the schema version is not supported" in {
val schema = """version 2.0
@totalColumns 1
LastName: @IgnoreCase regex ("[a]")"""
parse(new StringReader(schema)) must beLike {
case Failure(messages, _) => messages mustEqual s"Invalid schema version. This version of the csv validator supports only 1.1 and below."
}
}
"fail if the schema defined rule that are only supported by future version - upperCase" in {
def schema(version: String) = s"""version $version
@totalColumns 1
LastName: upperCase"""
parse(new StringReader(schema("1.1"))).successful mustEqual true
parse(new StringReader(schema("1.0"))) must beLike {
case Failure(messages, _) => messages mustEqual s"Invalid column definition"
}
}
"fail if the schema defined rule that are only supported by future version - unbound range" in {
def schema(version: String) = s"""version $version
@totalColumns 1
Age: range(1,*)"""
parse(new StringReader(schema("1.1"))).successful mustEqual true
parse(new StringReader(schema("1.0"))).successful mustEqual false
}
"fail if the schema defined rule that are only supported by future version - any" in {
def schema(version: String) = s"""version $version
@totalColumns 1
LastName: any("a","b","c")"""
parse(new StringReader(schema("1.1"))).successful mustEqual true
parse(new StringReader(schema("1.0"))) must beLike {
case Failure(messages, _) => messages mustEqual s"Invalid column definition"
}
}
"fail if the schema defined rule that are only supported by future version - integrityCheck" in {
def schema(version: String) = s"""version $version
@totalColumns 1
LastName: integrityCheck("excludeFolder")"""
parse(new StringReader(schema("1.1"))).successful mustEqual true
parse(new StringReader(schema("1.0"))) must beLike {
case Failure(messages, _) => messages mustEqual s"Invalid column definition"
}
}
"fail if the schema defined rule that are only supported by future version - identical" in {
def schema(version: String) = s"""version $version
@totalColumns 1
LastName: identical"""
parse(new StringReader(schema("1.1"))).successful mustEqual true
parse(new StringReader(schema("1.0"))) must beLike {
case Failure(messages, _) => messages mustEqual s"Invalid column definition"
}
}
"fail if the schema defined rule that are only supported by future version - switch" in {
def schema(version: String) = s"""version $version
@totalColumns 1
LastName: identical"""
parse(new StringReader(schema("1.1"))).successful mustEqual true
parse(new StringReader(schema("1.0"))) must beLike {
case Failure(messages, _) => messages mustEqual s"Invalid column definition"
}
}
"fail if the schema defined rule that are only supported by future version - switch" in {
val Name = "$Name"
def schema(version: String) = s"""version $version
| @totalColumns 2
| Name:
| SomeSwitchRule: switch(($Name/starts("hello"),is("hello world")),($Name/starts("HELLO"),is("HELLO WORLD")))""".stripMargin
parse(new StringReader(schema("1.1"))).successful mustEqual true
parse(new StringReader(schema("1.0"))) must beLike {
case Failure(messages, _) => messages mustEqual s"Invalid column definition"
}
}
}
} | valydia/csv-validator | csv-validator-core/src/test/scala/uk/gov/nationalarchives/csv/validator/schema/v1_1/SchemaParserVersionSpec.scala | Scala | mpl-2.0 | 5,632 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.interpreter.scala
import java.io.{InputStream, OutputStream}
import java.net.{URL, URLClassLoader}
import org.apache.toree.interpreter.Results.Result
import org.apache.toree.interpreter._
import org.apache.toree.utils.TaskManager
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
import scala.concurrent.Future
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{IMain, IR, JPrintWriter}
import scala.tools.nsc.util.ClassPath
class ScalaInterpreterSpec extends FunSpec
with Matchers with MockitoSugar with BeforeAndAfter
{
private var interpreter: ScalaInterpreter = _
private var interpreterNoPrintStreams: ScalaInterpreter = _
private var mockSparkIMain: IMain = _
private var mockTaskManager: TaskManager = _
private var mockSettings: Settings = _
trait StubbedUpdatePrintStreams extends Interpreter {
override def updatePrintStreams(
in: InputStream,
out: OutputStream,
err: OutputStream
): Unit = {}
}
trait SingleLineInterpretLineRec extends StubbedStartInterpreter {
override protected def interpretRec(lines: List[String], silent: Boolean, results: (Result, Either[ExecuteOutput, ExecuteFailure])): (Result, Either[ExecuteOutput, ExecuteFailure]) =
interpretLine(lines.mkString("\\n"))
}
trait StubbedInterpretAddTask extends StubbedStartInterpreter {
override protected def interpretAddTask(code: String, silent: Boolean) =
mock[Future[IR.Result]]
}
trait StubbedInterpretMapToCustomResult extends StubbedStartInterpreter {
override protected def interpretMapToCustomResult(future: Future[IR.Result]) =
mock[Future[Results.Result with Product with Serializable]]
}
trait StubbedInterpretMapToResultAndOutput extends StubbedStartInterpreter {
override protected def interpretMapToResultAndOutput(future: Future[Results.Result]) =
mock[Future[(Results.Result, String)]]
}
trait StubbedInterpretMapToResultAndExecuteInfo extends StubbedStartInterpreter {
override protected def interpretMapToResultAndExecuteInfo(future: Future[(Results.Result, String)]) =
mock[Future[(
Results.Result with Product with Serializable,
Either[ExecuteOutput, ExecuteFailure] with Product with Serializable
)]]
}
trait StubbedInterpretConstructExecuteError extends StubbedStartInterpreter {
override protected def interpretConstructExecuteError(value: Option[AnyRef], output: String) =
mock[ExecuteError]
}
class StubbedStartInterpreter
extends ScalaInterpreter
{
override protected def newIMain(settings: Settings, out: JPrintWriter): IMain = mockSparkIMain
override def newTaskManager(): TaskManager = mockTaskManager
override def newSettings(args: List[String]): Settings = mockSettings
// mocking out these
override protected def reinitializeSymbols(): Unit = {}
override protected def refreshDefinitions(): Unit = {}
// Stubbed out (not testing this)
}
before {
mockSparkIMain = mock[IMain]
mockTaskManager = mock[TaskManager]
val mockSettingsClasspath = mock[Settings#PathSetting]
doNothing().when(mockSettingsClasspath).value_=(any[Settings#PathSetting#T])
mockSettings = mock[Settings]
doReturn(mockSettingsClasspath).when(mockSettings).classpath
doNothing().when(mockSettings).embeddedDefaults(any[ClassLoader])
interpreter = new StubbedStartInterpreter
interpreterNoPrintStreams =
new StubbedStartInterpreter with StubbedUpdatePrintStreams
}
after {
mockSparkIMain = null
mockTaskManager = null
mockSettings = null
interpreter = null
}
describe("ScalaInterpreter") {
describe("#addJars") {
// Mocked test ignored.
ignore("should add each jar URL to the runtime classloader") {
// Needed to access runtimeClassloader method
// import scala.language.reflectiveCalls
// Create a new interpreter exposing the internal runtime classloader
val itInterpreter = new StubbedStartInterpreter {
// Expose the runtime classloader
def runtimeClassloader = _runtimeClassloader
}
val url = new URL("file://expected")
itInterpreter.start()
itInterpreter.addJars(url)
// itInterpreter.runtimeClassloader
val cl = itInterpreter.runtimeClassloader
// cl.getURLs should contain (url)
itInterpreter.stop()
}
it("should add each jar URL to the interpreter classpath") {
val url = new URL("file://expected")
interpreter.start()
interpreter.addJars(url)
}
}
describe("#buildClasspath") {
it("should return classpath based on classloader hierarchy") {
// Needed to access runtimeClassloader method
// import scala.language.reflectiveCalls
// Create a new interpreter exposing the internal runtime classloader
val itInterpreter = new StubbedStartInterpreter
val parentUrls = Array(
new URL("file:/some/dir/a.jar"),
new URL("file:/some/dir/b.jar"),
new URL("file:/some/dir/c.jar")
)
val theParentClassloader = new URLClassLoader(parentUrls, null)
val urls = Array(
new URL("file:/some/dir/1.jar"),
new URL("file:/some/dir/2.jar"),
new URL("file:/some/dir/3.jar")
)
val theClassloader = new URLClassLoader(urls, theParentClassloader)
val expected = ClassPath.join((parentUrls ++ urls).map(_.toString) :_*)
itInterpreter.buildClasspath(theClassloader) should be(expected)
}
}
describe("#interrupt") {
it("should fail a require if the interpreter is not started") {
intercept[IllegalArgumentException] {
interpreter.interrupt()
}
}
it("should call restart() on the task manager") {
interpreterNoPrintStreams.start()
interpreterNoPrintStreams.interrupt()
verify(mockTaskManager).restart()
}
}
// TODO: Provide testing for the helper functions that return various
// mapped futures -- this was too difficult for me to figure out
// in a short amount of time
describe("#interpret") {
it("should fail if not started") {
intercept[IllegalArgumentException] {
interpreter.interpret("val x = 3")
}
}
it("should add a new task to the task manager") {
var taskManagerAddCalled = false
val itInterpreter =
new StubbedStartInterpreter
with SingleLineInterpretLineRec
with StubbedUpdatePrintStreams
//with StubbedInterpretAddTask
with StubbedInterpretMapToCustomResult
with StubbedInterpretMapToResultAndOutput
with StubbedInterpretMapToResultAndExecuteInfo
with StubbedInterpretConstructExecuteError
with TaskManagerProducerLike
{
// Must override this way since cannot figure out the signature
// to verify this as a mock
override def newTaskManager(): TaskManager = new TaskManager {
override def add[T](taskFunction: => T): Future[T] = {
taskManagerAddCalled = true
mock[TaskManager].add(taskFunction)
}
}
}
itInterpreter.start()
itInterpreter.interpret("val x = 3")
taskManagerAddCalled should be (true)
}
}
describe("#start") {
it("should initialize the task manager") {
interpreterNoPrintStreams.start()
verify(mockTaskManager).start()
}
// TODO: Figure out how to trigger sparkIMain.beQuietDuring { ... }
/*it("should add an import for SparkContext._") {
interpreterNoPrintStreams.start()
verify(mockSparkIMain).addImports("org.apache.spark.SparkContext._")
}*/
}
describe("#stop") {
describe("when interpreter already started") {
it("should stop the task manager") {
interpreterNoPrintStreams.start()
interpreterNoPrintStreams.stop()
verify(mockTaskManager).stop()
}
it("should stop the SparkIMain") {
interpreterNoPrintStreams.start()
interpreterNoPrintStreams.stop()
verify(mockSparkIMain).close()
}
}
}
describe("#updatePrintStreams") {
// TODO: Figure out how to trigger sparkIMain.beQuietDuring { ... }
}
// describe("#classServerUri") {
// it("should fail a require if the interpreter is not started") {
// intercept[IllegalArgumentException] {
// interpreter.classServerURI
// }
// }
// TODO: Find better way to test this
// it("should invoke the underlying SparkIMain implementation") {
// Using hack to access private class
// val securityManagerClass =
// java.lang.Class.forName("org.apache.spark.SecurityManager")
// val httpServerClass =
// java.lang.Class.forName("org.apache.spark.HttpServer")
// val httpServerConstructor = httpServerClass.getDeclaredConstructor(
// classOf[SparkConf], classOf[File], securityManagerClass, classOf[Int],
// classOf[String])
// val httpServer = httpServerConstructor.newInstance(
// null, null, null, 0: java.lang.Integer, "")
//
// // Return the server instance (cannot mock a private class)
// // NOTE: Can mock the class through reflection, but cannot verify
// // a method was called on it since treated as type Any
// //val mockHttpServer = org.mockito.Mockito.mock(httpServerClass)
// doAnswer(new Answer[String] {
// override def answer(invocation: InvocationOnMock): String = {
// val exceptionClass =
// java.lang.Class.forName("org.apache.spark.ServerStateException")
// val exception = exceptionClass
// .getConstructor(classOf[String])
// .newInstance("")
// .asInstanceOf[Exception]
// throw exception
// }
// }
// ).when(mockSparkIMain)
// interpreterNoPrintStreams.start()
// Not going to dig so deeply that we actually start a web server for
// this to work... just throwing this specific exception proves that
// we have called the uri method of the server
// try {
// interpreterNoPrintStreams.classServerURI
// fail()
// } catch {
// // Have to catch this way because... of course... the exception is
// // also private
// case ex: Throwable =>
// ex.getClass.getName should be ("org.apache.spark.ServerStateException")
// }
// }
// }
describe("#read") {
it("should fail a require if the interpreter is not started") {
intercept[IllegalArgumentException] {
interpreter.read("someVariable")
}
}
it("should execute the underlying eval method") {
interpreter.start()
interpreter.read("someVariable")
verify(mockSparkIMain).eval(anyString())
}
}
describe("#doQuietly") {
it("should fail a require if the interpreter is not started") {
intercept[IllegalArgumentException] {
interpreter.doQuietly {}
}
}
// TODO: Figure out how to verify sparkIMain.beQuietDuring { ... }
/*it("should invoke the underlying SparkIMain implementation") {
interpreterNoPrintStreams.start()
interpreterNoPrintStreams.doQuietly {}
verify(mockSparkIMain).beQuietDuring(any[IR.Result])
}*/
}
describe("#bind") {
it("should fail a require if the interpreter is not started") {
intercept[IllegalArgumentException] {
interpreter.bind("", "", null, null)
}
}
// TODO: Re-enable tests since we've commented this one out.
// it("should invoke the underlying SparkIMain implementation") {
// interpreterNoPrintStreams.start()
// interpreterNoPrintStreams.bind("", "", null, null)
//
// verify(mockSparkIMain).bind(
// anyString(), anyString(), any[Any], any[List[String]])
// }
}
describe("#truncateResult") {
it("should truncate result of res result") {
// Results that match
interpreter.truncateResult("res7: Int = 38") should be("38")
interpreter.truncateResult("res7: Int = 38",true) should be("Int = 38")
interpreter.truncateResult("res4: String = \\nVector(1\\n, 2\\n)") should be ("Vector(1\\n, 2\\n)")
interpreter.truncateResult("res4: String = \\nVector(1\\n, 2\\n)",true) should be ("String = Vector(1\\n, 2\\n)")
interpreter.truncateResult("res123") should be("")
interpreter.truncateResult("res1") should be("")
// Results that don't match
interpreter.truncateResult("resabc: Int = 38") should be("")
}
it("should truncate res results that have tuple values") {
interpreter.truncateResult("res0: (String, Int) = (hello,1)") should
be("(hello,1)")
}
it("should truncate res results that have parameterized types") {
interpreter.truncateResult(
"res0: Class[_ <: (String, Int)] = class scala.Tuple2"
) should be("class scala.Tuple2")
}
}
}
}
| chipsenkbeil/incubator-toree | scala-interpreter/src/test/scala-2.11/scala/ScalaInterpreterSpec.scala | Scala | apache-2.0 | 14,383 |
package yuuto.enhancedinventories.item
import java.awt.Color
import java.util.List
import java.util.Random
import yuuto.enhancedinventories.util.UpdateHelper
import yuuto.enhancedinventories.tile.traits.TInventoryConnectiveUpgradeable
import net.minecraft.client.resources.I18n
import net.minecraft.client.renderer.texture.IIconRegister
import net.minecraft.nbt.NBTTagCompound
import yuuto.enhancedinventories.util.MinecraftColors
import net.minecraft.tileentity.TileEntity
import net.minecraft.entity.player.EntityPlayer
import yuuto.enhancedinventories.materials.ETier
import yuuto.enhancedinventories.materials.FrameMaterial
import net.minecraft.creativetab.CreativeTabs
import yuuto.enhancedinventories.materials.DecorationHelper
import net.minecraft.util.IIcon
import yuuto.enhancedinventories.materials.FrameMaterials
import net.minecraft.entity.item.EntityItem
import net.minecraft.item.ItemStack
import net.minecraft.world.World
import cpw.mods.fml.relauncher.SideOnly
import yuuto.enhancedinventories.item.base.ItemBaseEI
import net.minecraft.entity.Entity
import net.minecraft.item.Item
import cpw.mods.fml.relauncher.Side
import yuuto.enhancedinventories.ref.ReferenceEI
class ItemSizeUpgrade(name:String) extends ItemBaseEI(name){
this.hasSubtypes = true;
val frames:Array[IIcon]= new Array[IIcon](3);
val crosses:Array[IIcon] = new Array[IIcon](7);
override def onUpdate(stack:ItemStack, world:World, entity:Entity, meta:Int, bool:Boolean){
if(stack.getItem() != this || stack.hasTagCompound()){
super.onUpdate(stack, world, entity, meta, bool);
return;
}
UpdateHelper.updateSizeUpgrade(stack);
super.onUpdate(stack, world, entity, meta, bool);
}
override def onEntityItemUpdate(entityItem:EntityItem):Boolean={
val stack:ItemStack = entityItem.getEntityItem();
if(stack.getItem() != this || stack.hasTagCompound()){
return super.onEntityItemUpdate(entityItem);
}
UpdateHelper.updateSizeUpgrade(stack);
return super.onEntityItemUpdate(entityItem);
}
@SideOnly(Side.CLIENT)
override def getSpriteNumber():Int=0;
@Override
@SideOnly(Side.CLIENT)
override def getColorFromItemStack(stack:ItemStack, pass:Int):Int={
pass match{
case 0 =>{
if(!stack.hasTagCompound())
return Color.WHITE.getRGB();
else{
val mat:FrameMaterial = FrameMaterials.Instance.getMaterial(stack.getTagCompound().getString(DecorationHelper.KEY_FRAME_NAME));
return mat.color().getRGB();
}
}
case 1 =>{
return MinecraftColors.YELLOW.getColor().getRGB();
}
}
return Color.WHITE.getRGB();
}
override def getIcon(stack:ItemStack, pass:Int):IIcon={
pass match{
case 0 => {
if(!stack.hasTagCompound())
return this.frames(0);
else{
val mat:FrameMaterial = FrameMaterials.Instance.getMaterial(stack.getTagCompound().getString(DecorationHelper.KEY_FRAME_NAME));
return this.frames(mat.getTextureIndex());
}
}
case 1 =>{
return this.crosses(stack.getItemDamage());
}
}
return this.itemIcon;
}
override def getRenderPasses(metadata:Int):Int=if(requiresMultipleRenderPasses()){2}else{1};
@SideOnly(Side.CLIENT)
override def requiresMultipleRenderPasses():Boolean=true;
@Override
@SideOnly(Side.CLIENT)
override def registerIcons(reg:IIconRegister){
frames(0) = reg.registerIcon(ReferenceEI.MOD_ID.toLowerCase()+":chestFrames/chestFrameMetal");
frames(1) = reg.registerIcon(ReferenceEI.MOD_ID.toLowerCase()+":chestFrames/chestFrameStone");
frames(2) = reg.registerIcon(ReferenceEI.MOD_ID.toLowerCase()+":chestFrames/chestFrameObsidian");
this.itemIcon = frames(0);
for(i <- 0 until crosses.length){
crosses(i) = reg.registerIcon(ReferenceEI.MOD_ID.toLowerCase()+":chestFrames/chestCross"+i);
}
}
override def getIconIndex(stack:ItemStack):IIcon={
if(stack.getItem() != this || stack.hasTagCompound())
return super.getIconIndex(stack);
UpdateHelper.updateSizeUpgrade(stack);
if(stack.getItem() != this)
return stack.getIconIndex();
return super.getIconIndex(stack);
}
@SideOnly(Side.CLIENT)
override def getIconFromDamage(metadata:Int):IIcon=if(frames != null && metadata < frames.length){frames(metadata)}else{itemIcon};
@SideOnly(Side.CLIENT)
override def getSubItems(item:Item, tab:CreativeTabs, subItems:List[_]){
for(i <- 0 until 7){
val nbt:NBTTagCompound=new NBTTagCompound();
val stack:ItemStack = new ItemStack(this, 1, i);
DecorationHelper.setFrame(nbt, ETier.values()(i+1).getRandomFrameMaterial(this.getRandom()));
stack.setTagCompound(nbt);
subItems.asInstanceOf[List[ItemStack]].add(stack);
}
}
override def getUnlocalizedName(stack:ItemStack):String=this.getUnlocalizedName()+"."+stack.getItemDamage();
override def getMetadata (damageValue:Int):Int=damageValue;
override def onItemUse(stack:ItemStack, player:EntityPlayer, world:World, x:Int, y:Int, z:Int, side:Int, hitX:Float, hitY:Float, hitZ:Float):Boolean={
stack.getItem().onUpdate(stack, world, player, side, false);
if(!player.isSneaking())
return false;
val tile:TileEntity= world.getTileEntity(x, y, z);
if(tile == null || !tile.isInstanceOf[TInventoryConnectiveUpgradeable])
return false;
val chest:TInventoryConnectiveUpgradeable = tile.asInstanceOf[TInventoryConnectiveUpgradeable];
if(!chest.isUpgradeValid(stack, player))
return false;
if(chest.isConnected())
chest.disconnect();
chest.setUninitialized();
if(!chest.addUpgrade(stack, player))
return false;
if(player.capabilities.isCreativeMode)
return true;
stack.stackSize -=1;
player.inventoryContainer.detectAndSendChanges();
return true;
}
@SideOnly(Side.CLIENT)
override def addInformation(stack:ItemStack, player:EntityPlayer, info:List[_], bool:Boolean) {
super.addInformation(stack, player, info, bool);
if(stack.hasTagCompound())
info.asInstanceOf[List[String]].add(I18n.format(stack.getTagCompound().getString(DecorationHelper.KEY_FRAME_NAME)));
else
info.asInstanceOf[List[String]].add(I18n.format(FrameMaterials.Stone.getID()));
}
} | AnimeniacYuuto/EnhancedInventories | src/main/scala/yuuto/enhancedinventories/item/ItemSizeUpgrade.scala | Scala | gpl-2.0 | 6,295 |
package monocle.function
import monocle.{Iso, Lens}
import scala.annotation.implicitNotFound
/**
* Typeclass that defines a [[Lens]] from an `S` to its fourth element of type `A`
* @tparam S source of [[Lens]]
* @tparam A target of [[Lens]], `A` is supposed to be unique for a given `S`
*/
@implicitNotFound("Could not find an instance of Field4[${S},${A}], please check Monocle instance location policy to " +
"find out which import is necessary")
trait Field4[S, A] extends Serializable {
def fourth: Lens[S, A]
}
object Field4 extends Field4Functions {
/** lift an instance of [[Field4]] using an [[Iso]] */
def fromIso[S, A, B](iso: Iso[S, A])(implicit ev: Field4[A, B]): Field4[S, B] = new Field4[S, B] {
override def fourth: Lens[S, B] =
iso composeLens ev.fourth
}
}
trait Field4Functions {
def fourth[S, A](implicit ev: Field4[S, A]): Lens[S, A] = ev.fourth
}
| NightRa/Monocle | core/src/main/scala/monocle/function/Field4.scala | Scala | mit | 899 |
// Compile as scalac -P:continuations:enable Continuations.scala
import scala.util.continuations._
object Main extends App {
var cont : (Unit => Unit) = null
var filename = "myfile.txt"
var contents = ""
def tryRead(): Unit @cps[Unit] = {
while (contents == "") {
try {
contents = scala.io.Source.fromFile(filename, "UTF-8").mkString
} catch { case _ => }
shift { k : (Unit => Unit) =>
cont = k
}
}
}
reset {
tryRead()
}
if (contents == "") {
println("Try another filename: ");
filename = readLine
cont()
}
println(contents)
}
| P7h/ScalaPlayground | Scala for the Impatient/examples/ch22/sec06/Continuations.scala | Scala | apache-2.0 | 620 |
package nl.gideondk.sentinel.client
import akka.actor.ActorSystem
import akka.stream._
import akka.stream.scaladsl.{ BidiFlow, GraphDSL, RunnableGraph, Tcp }
import akka.stream.stage.GraphStageLogic.EagerTerminateOutput
import akka.stream.stage._
import akka.util.ByteString
import akka.{ Done, stream }
import nl.gideondk.sentinel.pipeline.Processor
import nl.gideondk.sentinel.protocol.{ Command, Event }
import scala.collection.mutable
import scala.concurrent._
import scala.concurrent.duration._
import scala.util.{ Failure, Success, Try }
case class Host(host: String, port: Int)
object ClientStage {
trait ConnectionClosedException
trait HostEvent {
def host: Host
}
case class ConnectionClosedWithReasonException(message: String, cause: Throwable) extends Exception(message, cause) with ConnectionClosedException
case class ConnectionClosedWithoutReasonException(message: String) extends Exception(message) with ConnectionClosedException
case class HostUp(host: Host) extends HostEvent
case class HostDown(host: Host) extends HostEvent
case object NoConnectionsAvailableException extends Exception
}
import nl.gideondk.sentinel.client.ClientStage._
class ClientStage[Context, Cmd, Evt](connectionsPerHost: Int, maximumFailuresPerHost: Int,
recoveryPeriod: FiniteDuration, finishGracefully: Boolean, processor: Processor[Cmd, Evt],
protocol: BidiFlow[ByteString, Evt, Cmd, ByteString, Any])(implicit system: ActorSystem, mat: Materializer)
extends GraphStage[BidiShape[(Command[Cmd], Context), (Try[Event[Evt]], Context), HostEvent, HostEvent]] {
val connectionEventIn = Inlet[HostEvent]("ClientStage.ConnectionEvent.In")
val connectionEventOut = Outlet[HostEvent]("ClientStage.ConnectionEvent.Out")
val commandIn = Inlet[(Command[Cmd], Context)]("ClientStage.Command.In")
val eventOut = Outlet[(Try[Event[Evt]], Context)]("ClientStage.Event.Out")
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogic(shape) {
private val hosts = mutable.Map.empty[Host, Int]
private val hostFailures = mutable.Map.empty[Host, Int]
private val connectionPool = mutable.Queue.empty[Connection]
private val failures = mutable.Queue.empty[(Try[Event[Evt]], Context)]
private var antennaId = 0
private var closingOnCommandIn = false
override def preStart() = {
pull(connectionEventIn)
pull(commandIn)
schedulePeriodically(Done, recoveryPeriod)
}
def nextId() = {
antennaId += 1
antennaId
}
def addHost(host: Host) = {
if (!hosts.contains(host)) {
hosts += (host -> 0)
pullCommand(true)
}
}
def ensureConnections() = {
hosts
.find(_._2 < connectionsPerHost)
.foreach {
case (host, connectionCount) ⇒
val connection = Connection(host, nextId())
connection.initialize()
connectionPool.enqueue(connection)
hosts(connection.host) = connectionCount + 1
}
pullCommand(false)
}
def pullCommand(shouldInitializeConnection: Boolean): Unit =
if (hosts.isEmpty && isAvailable(commandIn)) {
val (_, context) = grab(commandIn)
failures.enqueue((Failure(NoConnectionsAvailableException), context))
if (isAvailable(eventOut) && failures.nonEmpty) {
push(eventOut, failures.dequeue())
}
pull(commandIn)
} else if (isAvailable(commandIn)) {
connectionPool.dequeueFirst(_.canBePushedForCommand) match {
case Some(connection) ⇒
val (command, context) = grab(commandIn)
connection.pushCommand(command, context)
connectionPool.enqueue(connection)
pull(commandIn)
case None ⇒ if (shouldInitializeConnection) ensureConnections()
}
}
def connectionFailed(connection: Connection, cause: Throwable) = {
val host = connection.host
val totalFailure = hostFailures.getOrElse(host, 0) + 1
hostFailures(host) = totalFailure
system.log.warning(s"Connection ${connection.connectionId} to $host failed due to ${cause.getMessage}")
if (hostFailures(host) >= maximumFailuresPerHost) {
system.log.error(cause, s"Dropping $host, failed $totalFailure times")
emit(connectionEventOut, HostDown(host))
removeHost(host, Some(cause))
} else {
removeConnection(connection, Some(cause))
}
}
def removeHost(host: Host, cause: Option[Throwable] = None) = {
hosts.remove(host)
hostFailures.remove(host)
connectionPool.dequeueAll(_.host == host).foreach(_.close(cause))
if (isAvailable(eventOut) && failures.nonEmpty) {
push(eventOut, failures.dequeue())
}
pullCommand(true)
}
def removeConnection(connection: Connection, cause: Option[Throwable]) = {
hosts(connection.host) = hosts(connection.host) - 1
connectionPool.dequeueAll(_.connectionId == connection.connectionId).foreach(_.close(cause))
if (isAvailable(eventOut) && failures.nonEmpty) {
push(eventOut, failures.dequeue())
}
pullCommand(true)
}
setHandler(connectionEventOut, EagerTerminateOutput)
setHandler(connectionEventIn, new InHandler {
override def onPush() = {
grab(connectionEventIn) match {
case HostUp(connection) ⇒ addHost(connection)
case HostDown(connection) ⇒ removeHost(connection)
}
pull(connectionEventIn)
}
override def onUpstreamFinish() = ()
override def onUpstreamFailure(ex: Throwable) =
failStage(throw new IllegalStateException(s"Stream for ConnectionEvents failed", ex))
})
setHandler(commandIn, new InHandler {
override def onPush() = pullCommand(shouldInitializeConnection = true)
override def onUpstreamFinish() = {
if (finishGracefully) {
closingOnCommandIn = true
connectionPool.foreach(_.requestClose())
} else {
connectionPool.foreach(_.close(None))
completeStage()
}
}
override def onUpstreamFailure(ex: Throwable) =
failStage(throw new IllegalStateException(s"Requests stream failed", ex))
})
setHandler(eventOut, new OutHandler {
override def onPull() =
if (failures.nonEmpty) push(eventOut, failures.dequeue())
else {
connectionPool
.dequeueFirst(_.canBePulledForEvent)
.foreach(connection ⇒ {
if (isAvailable(eventOut)) {
val event = connection.pullEvent
push(eventOut, event)
}
connectionPool.enqueue(connection)
})
}
override def onDownstreamFinish() = {
completeStage()
}
})
override def onTimer(timerKey: Any) = {
hostFailures.clear()
}
case class Connection(host: Host, connectionId: Int) {
connection ⇒
private val connectionEventIn = new SubSinkInlet[Event[Evt]](s"Connection.[$host].[$connectionId].in")
private val connectionCommandOut = new SubSourceOutlet[Command[Cmd]](s"Connection.[$host].[$connectionId].out")
private val contexts = mutable.Queue.empty[Context]
private var closing = false
def canBePushedForCommand = connectionCommandOut.isAvailable
def canBePulledForEvent = connectionEventIn.isAvailable
def pushCommand(command: Command[Cmd], context: Context) = {
contexts.enqueue(context)
connectionCommandOut.push(command)
}
def pullEvent() = {
val event = connectionEventIn.grab()
val context = contexts.dequeue()
if (closing) {
close(None)
(Success(event), context)
} else {
connectionEventIn.pull()
(Success(event), context)
}
}
def requestClose() = {
closing = true
if (contexts.length == 0) {
close(None)
}
}
def close(cause: Option[Throwable]) = {
val exception = cause match {
case Some(cause) ⇒ ConnectionClosedWithReasonException(s"Failure to process request to $host at connection $connectionId", cause)
case None ⇒ ConnectionClosedWithoutReasonException(s"Failure to process request to $host connection $connectionId")
}
contexts.dequeueAll(_ ⇒ true).foreach(context ⇒ {
failures.enqueue((Failure(exception), context))
})
connectionEventIn.cancel()
connectionCommandOut.complete()
}
def initialize() = {
connectionEventIn.setHandler(new InHandler {
override def onPush() = if (isAvailable(eventOut)) {
push(eventOut, connection.pullEvent)
}
override def onUpstreamFinish() = {
removeConnection(connection, None)
}
override def onUpstreamFailure(reason: Throwable) = reason match {
case t: TimeoutException ⇒ removeConnection(connection, Some(t))
case _ ⇒ connectionFailed(connection, reason)
}
})
connectionCommandOut.setHandler(new OutHandler {
override def onPull() = pullCommand(shouldInitializeConnection = true)
override def onDownstreamFinish() = {
()
}
})
RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒
import GraphDSL.Implicits._
val pipeline = b.add(processor
.flow
.atop(protocol.reversed)
.join(Tcp().outgoingConnection(host.host, host.port)))
connectionCommandOut.source ~> pipeline.in
pipeline.out ~> connectionEventIn.sink
stream.ClosedShape
}).run()(subFusingMaterializer)
connectionEventIn.pull()
}
}
}
override def shape = new BidiShape(commandIn, eventOut, connectionEventIn, connectionEventOut)
} | gideondk/sentinel | src/main/scala/nl/gideondk/sentinel/client/ClientStage.scala | Scala | apache-2.0 | 10,108 |
/**
* Copyright (C) Emiliyan Todorov.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.popi.geom
import org.popi.stat.MathUtil.{pow, abs, sqrt}
import scala.collection.immutable.List
/**
* Distance Measurer
*
* @author Emiliyan Todorov
*
*/
object Distances {
/**
* Calculates the distance between two multi-dimensional points
* @param point1 the coordinates for point 1
* @param point2 the coordinates for point 2
* @return the distance
*/
def euclidean(point1: List[Double], point2: List[Double]): Double =
pNorm(2, point1, point2)
/**
* Calculates the distance between two multi-dimensional points
* @param pnorm the p norm (e.g. >1 for Minkowski, 1 for Manhattan, 2 for Euclidean, +inf for Chebishev, ...)
* @param point1 the coordinates for point 1
* @param point2 the coordinates for point 2
* @return the distance or NaN if pnorm < l.0
*/
def pNorm(pnorm: Double, point1: List[Double], point2: List[Double]): Double = {
def difference = point1.zip(point2).map{case (coordinates_p1, coordinates_p2) => abs(coordinates_p1 - coordinates_p2)}
pnorm match {
// TODO: implement for p < 1
case _ if pnorm < 1 => Double.NaN
case 1.0 => difference.sum
case 2.0 =>
val sum = difference.map(x => x * x).sum
sqrt(sum)
case Double.PositiveInfinity => difference.max
// case Double.NegativeInfinity => difference.min
case p: Double =>
val sum = difference.map(x => pow(x, p)).sum
pow(sum, 1/p)
}
}
}
| emodemo/Popi | src/org/popi/geom/Distances.scala | Scala | gpl-3.0 | 2,196 |
package geotrellis.network.graph
import geotrellis.network._
import geotrellis.network.graph._
import scala.collection.mutable
class MutableGraph() {
private val locationsToVertices = mutable.Map[Location,Vertex]()
private val edgeSets = mutable.Map[Vertex,EdgeSet]()
def edgeCount(mode:TransitMode) = edgeSets.values.foldLeft(0)(_+_.edgeCount(mode))
def edgeCount(v:Vertex,mode:TransitMode) =
edgeSets(v).edgeCount(mode)
def edges(v:Vertex) =
edgeSets(v)
def vertexCount = locationsToVertices.size
def addVertex(v:Vertex) = {
locationsToVertices(v.location) = v
edgeSets(v) = EdgeSet(v)
}
def +=(v:Vertex) =
addVertex(v)
def addWithEdges(v:Vertex,edgeSet:EdgeSet) = {
locationsToVertices(v.location) = v
edgeSets(v) = edgeSet
}
/** Does not check if target vertex is in the graph */
def addEdge(source:Vertex,edge:Edge) = {
edgeSets(source).addEdge(edge)
}
def vertices() =
locationsToVertices.values
def vertexAtLocation(location:Location) =
locationsToVertices(location)
def locations() = locationsToVertices.keys
def contains(v:Vertex) =
edgeSets.contains(v)
def pack():TransitGraph = {
TransitGraph.pack(this)
}
override
def toString = {
var s = "(MUTABLE)\\n"
s += "Vertex\\t\\tEdges\\n"
s += "---------------------------------\\n"
for(v <- vertices) {
val edgeStrings = mutable.Set[String]()
s += s"$v\\t\\t"
for(e <- edgeSets(v)) {
edgeStrings += s"$e"
}
s += edgeStrings.mkString(",") + "\\n"
}
s
}
}
object MutableGraph {
def apply() = new MutableGraph()
def apply(vertices:Iterable[Vertex]) = {
val g = new MutableGraph()
for(v <- vertices) { g += v }
g
}
def merge(g1:MutableGraph,g2:MutableGraph):MutableGraph = {
val mg = MutableGraph()
for(v <- g1.vertices) { mg.addWithEdges(v, g1.edges(v)) }
for(v <- g2.vertices) { mg.addWithEdges(v, g2.edges(v)) }
mg
}
}
| flibbertigibbet/open-transit-indicators | scala/geotrellis-transit/src/main/scala/geotrellis/network/graph/MutableGraph.scala | Scala | gpl-3.0 | 1,985 |
package japgolly.scalajs.react.vdom
import org.scalajs.dom.{html => *}
import Exports._
object HtmlTags extends HtmlTags
trait HtmlTags {
/**
* Represents a hyperlink, linking to another resource.
*/
object a extends TagOf[*.Anchor]("a", Nil, Namespace.Html) {
/** A link to open a new window (tab) to a given URL.
*
* Like: `<a href="https://google.com" target="_blank" rel="noopener"></a>`
*
* @param noopener See https://developers.google.com/web/tools/lighthouse/audits/noopener
*/
def toNewWindow(href : String,
noopener : Boolean = true,
noreferrer: Boolean = false) = {
implicit def strAttr = Attr.ValueType.string
val a = this(HtmlAttrs.target.blank, HtmlAttrs.href := href)
(noopener, noreferrer) match {
case(true , false) => a(HtmlAttrs.rel := "noopener")
case(true , true ) => a(HtmlAttrs.rel := "noopener noreferrer")
case(false, true ) => a(HtmlAttrs.rel := "noreferrer")
case(false, false) => a
}
}
}
/**
* An abbreviation or acronym; the expansion of the abbreviation can be
* represented in the title attribute.
*/
final def abbr = HtmlTagOf[*.Element]("abbr")
/**
* Defines a section containing contact information.
*/
final def address = HtmlTagOf[*.Element]("address")
/**
* In conjunction with map, defines an image map
*/
final def area: HtmlTagOf[*.Area] = "area".reactTerminalTag
/**
* Defines self-contained content that could exist independently of the rest
* of the content.
*/
final def article = HtmlTagOf[*.Element]("article")
/**
* Defines some content loosely related to the page content. If it is removed,
* the remaining content still makes sense.
*/
final def aside = HtmlTagOf[*.Element]("aside")
/**
* Represents a sound or an audio stream.
*/
final def audio = HtmlTagOf[*.Audio]("audio")
/**
* Bold text.
*/
final def b = HtmlTagOf[*.Element]("b")
/**
* Defines the base URL for relative URLs in the page.
*/
final def base: HtmlTagOf[*.Base] = "base".reactTerminalTag
/**
* Represents text that must be isolated from its surrounding for bidirectional
* text formatting. It allows embedding a span of text with a different, or
* unknown, directionality.
*/
final def bdi = HtmlTagOf[*.Element]("bdi")
/**
* Represents the directionality of its children, in order to explicitly
* override the Unicode bidirectional algorithm.
*/
final def bdo = HtmlTagOf[*.Element]("bdo")
/**
* Represents a content that is quoted from another source.
*/
final def blockquote = HtmlTagOf[*.Quote]("blockquote")
/**
* Represents the content of an HTML document. There is only one body
* element in a document.
*/
final def body = HtmlTagOf[*.Body]("body")
/**
* Represents a line break.
*/
final def br: HtmlTagOf[*.BR] = "br".reactTerminalTag
final def button = HtmlTagOf[*.Button]("button")
/**
* Represents a bitmap area that scripts can use to render graphics like graphs,
* games or any visual images on the fly.
*/
final def canvas = HtmlTagOf[*.Canvas]("canvas")
/**
* The title of a table.
*/
final def caption = HtmlTagOf[*.TableCaption]("caption")
/**
* Represents the title of a work being cited.
*/
final def cite = HtmlTagOf[*.Element]("cite")
/**
* Represents computer code.
*/
final def code = HtmlTagOf[*.Element]("code")
/**
* A single column.
*/
final def col: HtmlTagOf[*.TableCol] = "col".reactTerminalTag
/**
* A set of columns.
*/
final def colgroup = HtmlTagOf[*.TableCol]("colgroup")
/**
* A command that the user can invoke.
*/
final def command: HtmlTagOf[*.Element] = "command".reactTerminalTag
/**
* Associates to its content a machine-readable equivalent.
*/
final def data = HtmlTagOf[*.Element]("data")
/**
* A set of predefined options for other controls.
*/
final def datalist = HtmlTagOf[*.DataList]("datalist")
/**
* Represents the definition of the terms immediately listed before it.
*/
final def dd = HtmlTagOf[*.DD]("dd")
/**
* Defines a remofinal def from the document.
*/
final def del = HtmlTagOf[*.Mod]("del")
/**
* A widget from which the user can obtain additional information
* or controls.
*/
final def details = HtmlTagOf[*.Element]("details")
/**
* Represents a term whose definition is contained in its nearest ancestor
* content.
*/
final def dfn = HtmlTagOf[*.Element]("dfn")
/**
* Represents a generic container with no special meaning.
*/
final def div = HtmlTagOf[*.Div]("div")
/**
* Defines a definition list; a list of terms and their associated definitions.
*/
final def dl = HtmlTagOf[*.DList]("dl")
/**
* Represents a term defined by the next dd
*/
final def dt = HtmlTagOf[*.DT]("dt")
/**
* Represents emphasized text.
*/
final def em = HtmlTagOf[*.Element]("em")
/**
* Represents a integration point for an external, often non-HTML, application
* or interactive content.
*/
final def embed: HtmlTagOf[*.Embed] = "embed".reactTerminalTag
/**
* A set of fields.
*/
final def fieldset = HtmlTagOf[*.FieldSet]("fieldset")
/**
* Represents the legend of a figure.
*/
final def figcaption = HtmlTagOf[*.Element]("figcaption")
/**
* Represents a figure illustrated as part of the document.
*/
final def figure = HtmlTagOf[*.Element]("figure")
/**
* Defines the footer for a page or section. It often contains a copyright
* notice, some links to legal information, or addresses to give feedback.
*/
final def footer = HtmlTagOf[*.Element]("footer")
/**
* Represents a form, consisting of controls, that can be submitted to a
* server for processing.
*/
final def form = HtmlTagOf[*.Form]("form")
/**
* Heading level 1
*/
final def h1 = HtmlTagOf[*.Heading]("h1")
/**
* Heading level 2
*/
final def h2 = HtmlTagOf[*.Heading]("h2")
/**
* Heading level 3
*/
final def h3 = HtmlTagOf[*.Heading]("h3")
/**
* Heading level 4
*/
final def h4 = HtmlTagOf[*.Heading]("h4")
/**
* Heading level 5
*/
final def h5 = HtmlTagOf[*.Heading]("h5")
/**
* Heading level 6
*/
final def h6 = HtmlTagOf[*.Heading]("h6")
/**
* Represents a collection of metadata about the document, including links to,
* or definitions of, scripts and style sheets.
*/
final def head = HtmlTagOf[*.Head]("head")
/**
* Defines the header of a page or section. It often contains a logo, the
* title of the Web site, and a navigational table of content.
*/
final def header = HtmlTagOf[*.Element]("header")
/**
* Represents a thematic break between paragraphs of a section or article or
* any longer content.
*/
final def hr: HtmlTagOf[*.HR] = "hr".reactTerminalTag
/**
* Represents the root of an HTML or XHTML document. All other elements must
* be descendants of this element.
*/
final def html = HtmlTagOf[*.Html]("html")
/**
* Italicized text.
*/
final def i = HtmlTagOf[*.Element]("i")
/**
* Represents a nested browsing context, that is an embedded HTML document.
*/
final def iframe = HtmlTagOf[*.IFrame]("iframe")
/**
* Represents an image.
*/
final def img: HtmlTagOf[*.Image] = "img".reactTerminalTag
/**
* The HTML element <input> is used to create interactive controls for web-based forms in order to accept data
* from the user. How an <input> works varies considerably depending on the value of its type attribute.
*/
object input extends TagOf[*.Input]("input", Nil, Namespace.Html) {
private[this] val `type` = VdomAttr[String]("type")
/** Returns a <input type="{t}" /> */
def withType(t: String): TagOf[*.Input] =
this(`type`.:=(t)(Attr.ValueType.string))
/** A push button with no default behavior. */
def button = this withType "button"
/** A check box. You must use the value attribute to define the value submitted by this item. Use the checked attribute to indicate whether this item is selected. You can also use the indeterminate attribute to indicate that the checkbox is in an indeterminate state (on most platforms, this draws a horizontal line across the checkbox). */
val checkbox = this withType "checkbox"
/** [HTML5] A control for specifying a color. A color picker's UI has no required features other than accepting simple colors as text (more info). */
def color = this withType "color"
/** [HTML5] A control for entering a date (year, month, and day, with no time). */
def date = this withType "date"
/** [HTML5] A control for entering a date and time (hour, minute, second, and fraction of a second) based on UTC time zone. */
def datetime = this withType "datetime"
/** [HTML5] A control for entering a date and time, with no time zone. */
def datetimeLocal = this withType "datetime-local"
/** [HTML5] A field for editing an e-mail address. The input value is validated to contain either the empty string or a single valid e-mail address before submitting. The :valid and :invalid CSS pseudo-classes are applied as appropriate. */
def email = this withType "email"
/** A control that lets the user select a file. Use the accept attribute to define the types of files that the control can select. */
def file = this withType "file"
/** A control that is not displayed, but whose value is submitted to the server. */
def hidden = this withType "hidden"
/** A graphical submit button. You must use the src attribute to define the source of the image and the alt attribute to define alternative text. You can use the height and width attributes to define the size of the image in pixels. */
def image = this withType "image"
/** [HTML5] A control for entering a month and year, with no time zone. */
def month = this withType "month"
/** [HTML5] A control for entering a floating point number. */
def number = this withType "number"
/** A single-line text field whose value is obscured. Use the maxlength attribute to specify the maximum length of the value that can be entered. */
def password = this withType "password"
/** A radio button. You must use the value attribute to define the value submitted by this item. Use the checked attribute to indicate whether this item is selected by default. Radio buttons that have the same value for the name attribute are in the same "radio button group"; only one radio button in a group can be selected at a time. */
def radio = this withType "radio"
/** [HTML5] A control for entering a number whose exact value is not important. This type control uses the following default values if the corresponding attributes are not specified: */
def range = this withType "range"
/** A button that resets the contents of the form to default values. */
def reset = this withType "reset"
/** [HTML5] A single-line text field for entering search strings; line-breaks are automatically removed from the input value. */
def search = this withType "search"
/** A button that submits the form. */
def submit = this withType "submit"
/** [HTML5] A control for entering a telephone number; line-breaks are automatically removed from the input value, but no other syntax is enforced. You can use attributes such as pattern and maxlength to restrict values entered in the control. The :valid and :invalid CSS pseudo-classes are applied as appropriate. */
def tel = this withType "tel"
/** A single-line text field; line-breaks are automatically removed from the input value. */
val text = this withType "text"
/** [HTML5] A control for entering a time value with no time zone. */
def time = this withType "time"
/** [HTML5] A field for editing a URL. The input value is validated to contain either the empty string or a valid absolute URL before submitting. Line-breaks and leading or trailing whitespace are automatically removed from the input value. You can use attributes such as pattern and maxlength to restrict values entered in the control. The :valid and :invalid CSS pseudo-classes are applied as appropriate. */
def url = this withType "url"
/** [HTML5] A control for entering a date consisting of a week-year number and a week number with no time zone. */
def week = this withType "week"
}
/**
* Defines an addition to the document.
*/
final def ins = HtmlTagOf[*.Mod]("ins")
/**
* Represents user input, often from a keyboard, but not necessarily.
*/
final def kbd = HtmlTagOf[*.Element]("kbd")
/**
* A key-pair generator control.
*/
final def keygen: HtmlTagOf[*.Element] = "keygen".reactTerminalTag
/**
* The caption of a single field
*/
final def label = HtmlTagOf[*.Label]("label")
/**
* The caption for a fieldset.
*/
final def legend = HtmlTagOf[*.Legend]("legend")
/**
* Defines an item of an list.
*/
final def li = HtmlTagOf[*.LI]("li")
/**
* Used to link JavaScript and external CSS with the current HTML document.
*/
final def link: HtmlTagOf[*.Link] = "link".reactTerminalTag
/**
* Defines the main or important content in the document. There is only one
* main element in the document.
*/
final def main = HtmlTagOf[*.Element]("main")
/**
* In conjunction with area, defines an image map.
*/
final def map = HtmlTagOf[*.Map]("map")
/**
* Represents text highlighted for reference purposes, that is for its
* relevance in another context.
*/
final def mark = HtmlTagOf[*.Element]("mark")
/**
* Defines a mathematical formula.
*/
final def math = HtmlTagOf[*.Element]("math")
/**
* A list of commands
*/
final def menu = HtmlTagOf[*.Menu]("menu")
/**
* Defines metadata that can't be defined using another HTML element.
*/
final def meta: HtmlTagOf[*.Meta] = "meta".reactTerminalTag
/**
* A scalar measurement within a known range.
*/
final def meter = HtmlTagOf[*.Element]("meter")
/**
* Represents a section of a page that links to other pages or to parts within
* the page: a section with navigation links.
*/
final def nav = HtmlTagOf[*.Element]("nav")
/**
* Defines alternative content to display when the browser doesn't support
* scripting.
*/
final def noscript = HtmlTagOf[*.Element]("noscript")
/**
* Represents an external resource, which is treated as an image, an HTML
* sub-document, or an external resource to be processed by a plug-in.
*/
final def `object` = HtmlTagOf[*.Object]("object")
/**
* Defines an ordered list of items.
*/
final def ol = HtmlTagOf[*.OList]("ol")
/**
* A set of options, logically grouped.
*/
final def optgroup = HtmlTagOf[*.OptGroup]("optgroup")
/**
* An option in a select element.
*/
final def option = HtmlTagOf[*.Option]("option")
/**
* The result of a calculation
*/
final def output = HtmlTagOf[*.Element]("output")
/**
* Defines a portion that should be displayed as a paragraph.
*/
final def p = HtmlTagOf[*.Paragraph]("p")
/**
* Defines parameters for use by plug-ins invoked by object elements.
*/
final def param: HtmlTagOf[*.Param] = "param".reactTerminalTag
/**
* Indicates that its content is preformatted and that this format must be
* preserved.
*/
final def pre = HtmlTagOf[*.Pre]("pre")
/**
* A progress completion bar
*/
final def progress = HtmlTagOf[*.Progress]("progress")
/**
* An inline quotation.
*/
final def q = HtmlTagOf[*.Quote]("q")
/**
* Represents parenthesis around a ruby annotation, used to display the
* annotation in an alternate way by browsers not supporting the standard
* display for annotations.
*/
final def rp = HtmlTagOf[*.Element]("rp")
/**
* Represents the text of a ruby annotation.
*/
final def rt = HtmlTagOf[*.Element]("rt")
/**
* Represents content to be marked with ruby annotations, short runs of text
* presented alongside the text. This is often used in conjunction with East
* Asian language where the annotations act as a guide for pronunciation, like
* the Japanese furigana .
*/
final def ruby = HtmlTagOf[*.Element]("ruby")
/**
* Strikethrough element, used for that is no longer accurate or relevant.
*/
final def s = HtmlTagOf[*.Element]("s")
/**
* Represents the output of a program or a computer.
*/
final def samp = HtmlTagOf[*.Element]("samp")
/**
* Defines either an internal script or a link to an external script. The
* script language is JavaScript.
*/
final def script = HtmlTagOf[*.Script]("script")
/**
* Represents a generic section of a document, i.e., a thematic grouping of
* content, typically with a heading.
*/
final def section = HtmlTagOf[*.Element]("section")
/**
* A control that allows the user to select one of a set of options.
*/
final def select = HtmlTagOf[*.Select]("select")
/**
* Represents a side comment; text like a disclaimer or copyright, which is not
* essential to the comprehension of the document.
*/
final def small = HtmlTagOf[*.Element]("small")
/**
* Allows the authors to specify alternate media resources for media elements
* like video or audio
*/
final def source: HtmlTagOf[*.Source] = "source".reactTerminalTag
/**
* Represents text with no specific meaning. This has to be used when no other
* text-semantic element conveys an adequate meaning, which, in this case, is
* often brought by global attributes like class, lang, or dir.
*/
final def span = HtmlTagOf[*.Span]("span")
/**
* Represents especially important text.
*/
final def strong = HtmlTagOf[*.Element]("strong")
/**
* Used to write inline CSS.
*/
final def styleTag = HtmlTagOf[*.Style]("style")
/**
* Subscript tag
*/
final def sub = HtmlTagOf[*.Element]("sub")
/**
* A summary, caption, or legend for a given details.
*/
final def summary = HtmlTagOf[*.Element]("summary")
/**
* Superscript tag.
*/
final def sup = HtmlTagOf[*.Element]("sup")
/**
* Represents data with more than one dimension.
*/
final def table = HtmlTagOf[*.Table]("table")
/**
* The table body.
*/
final def tbody = HtmlTagOf[*.TableSection]("tbody")
/**
* A single cell in a table.
*/
final def td = HtmlTagOf[*.TableCell]("td")
/**
* A multiline text edit control.
*/
final def textarea = HtmlTagOf[*.TextArea]("textarea")
/**
* The table footer.
*/
final def tfoot = HtmlTagOf[*.TableSection]("tfoot")
/**
* A header cell in a table.
*/
final def th = HtmlTagOf[*.TableHeaderCell]("th")
/**
* The table headers.
*/
final def thead = HtmlTagOf[*.TableSection]("thead")
/**
* Represents a date and time value; the machine-readable equivalent can be
* represented in the datetime attribetu
*/
final def time = HtmlTagOf[*.Element]("time")
/**
* Defines the title of the document, shown in a browser's title bar or on the
* page's tab. It can only contain text and any contained tags are not
* interpreted.
*/
final def titleTag = HtmlTagOf[*.Title]("title")
/**
* A single row in a table.
*/
final def tr = HtmlTagOf[*.TableRow]("tr")
/**
* Allows authors to specify timed text track for media elements like video or
* audio
*/
final def track: HtmlTagOf[*.Track] = "track".reactTerminalTag
/**
* Underlined text.
*/
final def u = HtmlTagOf[*.Element]("u")
/**
* Defines an unordered list of items.
*/
final def ul = HtmlTagOf[*.UList]("ul")
/**
* Represents a variable.
*/
final def `var` = HtmlTagOf[*.Element]("var")
/**
* Represents a line break opportunity, that is a suggested point for wrapping
* text in order to improve readability of text split on several lines.
*/
final def wbr: HtmlTagOf[*.Element] = "wbr".reactTerminalTag
/**
* Represents a video, and its associated audio files and captions, with the
* necessary interface to play it.
*/
final def video = HtmlTagOf[*.Video]("video")
}
| matthughes/scalajs-react | core/src/main/scala/japgolly/scalajs/react/vdom/HtmlTags.scala | Scala | apache-2.0 | 20,696 |
package com.sparcedge.turbine.ejournal
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.concurrent.ExecutionContext.Implicits.global
import akka.actor.{Actor,ActorRef,Props,ActorLogging}
import scala.concurrent.duration._
import journal.io.api.{Journal,Location}
import Journal.ReadType
import com.sparcedge.turbine.event.EventPackage
import com.sparcedge.turbine.data.WriteHandler
object JournalReader {
case class EventWrittenToDisk(id: String)
case class EventsWrittenToDisk(ids: Iterable[String])
case object ProcessJournalEvents
case object CompactJournal
}
import JournalReader._
import WriteHandler._
class JournalReader(journal: Journal, writeHandlerRouter: ActorRef) extends Actor with ActorLogging {
val eventLocations = mutable.Map[String,Location]()
val processJournalDelay = context.system.settings.config.getInt("com.sparcedge.turbinedb.journal.process-delay")
val compactJournalDelay = context.system.settings.config.getInt("com.sparcedge.turbinedb.journal.compact-delay")
context.system.scheduler.scheduleOnce(100.milliseconds, self, CompactJournal) // Compact Journal After 100ms
var lastLocation: Option[Location] = None
scheduleProcessJournalMessage()
def receive = {
case EventWrittenToDisk(id) =>
removeEventFromJournal(id)
case EventsWrittenToDisk(ids) =>
ids foreach {id =>
removeEventFromJournal(id)
}
case ProcessJournalEvents =>
log.debug("Processing Journal Events")
processJournalEvents()
case CompactJournal =>
compactJournal()
case _ =>
}
def compactJournal() {
println("Compacting Journal!")
journal.compact()
println("Finished Compacting Journal!")
scheduleCompactJournalMessage()
}
def removeEventFromJournal(id: String) {
eventLocations.remove(id) foreach { location =>
journal.delete(location)
}
log.debug("Removed event from journal: {}", id)
}
def processJournalEvents() {
var skip = lastLocation.isDefined
val journalIterator = lastLocation map { ll => journal.redo(ll) } getOrElse { journal.redo() }
journalIterator foreach { loc =>
val locKey = createLocationKey(loc)
val eventBytes = journal.read(loc, ReadType.ASYNC)
if(!skip) {
writeHandlerRouter ! WriteEventRequest(locKey, eventBytes)
eventLocations(locKey) = loc
lastLocation = Some(loc)
} else {
skip = false
}
}
scheduleProcessJournalMessage()
}
def scheduleProcessJournalMessage() {
context.system.scheduler.scheduleOnce(processJournalDelay.milliseconds, self, ProcessJournalEvents)
}
def scheduleCompactJournalMessage() {
context.system.scheduler.scheduleOnce(compactJournalDelay.milliseconds, self, CompactJournal)
}
def createLocationKey(loc: Location): String = {
s"${loc.getDataFileId}-${loc.getPointer}"
}
} | bobwilliams/turbinedb | src/main/scala/com/sparcedge/turbine/ejournal/JournalReader.scala | Scala | gpl-3.0 | 2,783 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.net
import java.net.{URI, URISyntaxException}
import org.junit.Assert._
import org.junit.Test
import org.scalajs.testsuite.utils.AssertThrows._
import org.scalajs.testsuite.utils.Platform.executingInJVM
class URITest {
def expectURI(uri: URI, isAbsolute: Boolean, isOpaque: Boolean)(
authority: String = null, fragment: String = null,
host: String = null, path: String = null, port: Int = -1,
query: String = null, scheme: String = null, userInfo: String = null,
schemeSpecificPart: String = null)(rawAuthority: String = authority,
rawFragment: String = fragment, rawPath: String = path,
rawQuery: String = query, rawUserInfo: String = userInfo,
rawSchemeSpecificPart: String = schemeSpecificPart): Unit = {
assertEquals(authority, uri.getAuthority())
assertEquals(fragment, uri.getFragment())
assertEquals(host, uri.getHost())
assertEquals(path, uri.getPath())
assertEquals(port, uri.getPort())
assertEquals(query, uri.getQuery())
assertEquals(rawAuthority, uri.getRawAuthority())
assertEquals(rawFragment, uri.getRawFragment())
assertEquals(rawPath, uri.getRawPath())
assertEquals(rawQuery, uri.getRawQuery())
assertEquals(rawSchemeSpecificPart, uri.getRawSchemeSpecificPart())
assertEquals(rawUserInfo, uri.getRawUserInfo())
assertEquals(scheme, uri.getScheme())
assertEquals(schemeSpecificPart, uri.getSchemeSpecificPart())
assertEquals(userInfo, uri.getUserInfo())
assertEquals(isAbsolute, uri.isAbsolute())
assertEquals(isOpaque, uri.isOpaque())
}
@Test def should_parse_vanilla_absolute_URIs(): Unit = {
expectURI(new URI("http://java.sun.com/j2se/1.3/"), true, false)(
scheme = "http",
host = "java.sun.com",
path = "/j2se/1.3/",
authority = "java.sun.com",
schemeSpecificPart = "//java.sun.com/j2se/1.3/")()
}
@Test def should_parse_absolute_URIs_with_empty_path(): Unit = {
expectURI(new URI("http://foo:bar"), true, false)(
authority = "foo:bar",
path = "",
scheme = "http",
schemeSpecificPart = "//foo:bar")()
}
@Test def should_parse_absolute_URIs_with_IPv6(): Unit = {
val uri = new URI("http://hans@[ffff::0:128.4.5.3]:345/~hans/")
expectURI(uri, true, false)(
scheme = "http",
host = "[ffff::0:128.4.5.3]",
userInfo = "hans",
port = 345,
path = "/~hans/",
authority = "hans@[ffff::0:128.4.5.3]:345",
schemeSpecificPart = "//hans@[ffff::0:128.4.5.3]:345/~hans/")()
}
@Test def should_parse_absolute_URIs_without_authority(): Unit = {
expectURI(new URI("file:/~/calendar"), true, false)(
scheme = "file",
path = "/~/calendar",
schemeSpecificPart = "/~/calendar")()
}
@Test def should_parse_absolute_URIs_with_empty_authority(): Unit = {
expectURI(new URI("file:///~/calendar"), true, false)(
scheme = "file",
path = "/~/calendar",
schemeSpecificPart = "///~/calendar")()
}
@Test def should_parse_opaque_URIs(): Unit = {
expectURI(new URI("mailto:java-net@java.sun.com"), true, true)(
scheme = "mailto",
schemeSpecificPart = "java-net@java.sun.com")()
expectURI(new URI("news:comp.lang.java"), true, true)(
scheme = "news",
schemeSpecificPart = "comp.lang.java")()
expectURI(new URI("urn:isbn:096139210x"), true, true)(
scheme = "urn",
schemeSpecificPart = "isbn:096139210x")()
}
@Test def should_parse_relative_URIs(): Unit = {
expectURI(new URI("docs/guide/collections/designfaq.html#28"), false, false)(
path = "docs/guide/collections/designfaq.html",
fragment = "28",
schemeSpecificPart = "docs/guide/collections/designfaq.html")()
expectURI(new URI("../../../demo/jfc/SwingSet2/src/SwingSet2.java"), false, false)(
path = "../../../demo/jfc/SwingSet2/src/SwingSet2.java",
schemeSpecificPart = "../../../demo/jfc/SwingSet2/src/SwingSet2.java")()
}
@Test def should_parse_relative_URIs_with_IPv4(): Unit = {
expectURI(new URI("//123.5.6.3:45/bar"), false, false)(
authority = "123.5.6.3:45",
host = "123.5.6.3",
port = 45,
path = "/bar",
schemeSpecificPart = "//123.5.6.3:45/bar")()
}
@Test def should_parse_relative_URIs_with_registry_based_authority(): Unit = {
expectURI(new URI("//foo:bar"), false, false)(
authority = "foo:bar",
path = "",
schemeSpecificPart = "//foo:bar")()
}
@Test def should_parse_relative_URIs_with_escapes(): Unit = {
expectURI(new URI("//ma%5dx:secret@example.com:8000/foo"), false, false)(
authority = "ma]x:secret@example.com:8000",
userInfo = "ma]x:secret",
host = "example.com",
port = 8000,
path = "/foo",
schemeSpecificPart = "//ma]x:secret@example.com:8000/foo")(
rawUserInfo = "ma%5dx:secret",
rawAuthority = "ma%5dx:secret@example.com:8000",
rawSchemeSpecificPart = "//ma%5dx:secret@example.com:8000/foo")
}
@Test def should_parse_relative_URIs_with_fragment_only(): Unit = {
expectURI(new URI("#foo"), false, false)(
fragment = "foo",
path = "",
schemeSpecificPart = "")()
}
@Test def should_parse_relative_URIs_with_query_and_fragment(): Unit = {
expectURI(new URI("?query=1#foo"), false, false)(
query = "query=1",
fragment = "foo",
path = "",
schemeSpecificPart = "?query=1")()
}
@Test def should_provide_compareTo(): Unit = {
val x = new URI("http://example.com/asdf%6a")
val y = new URI("http://example.com/asdf%6A")
val z = new URI("http://example.com/asdfj")
val rel = new URI("/foo/bar")
assertTrue(x.compareTo(y) > 0)
assertTrue(x.compareTo(z) < 0)
assertTrue(y.compareTo(z) < 0)
assertEquals(0, x.compareTo(x))
assertEquals(0, y.compareTo(y))
assertEquals(0, z.compareTo(z))
assertTrue(x.compareTo(rel) > 0)
assertTrue(y.compareTo(rel) > 0)
assertTrue(z.compareTo(rel) > 0)
assertEquals(0, rel.compareTo(rel))
}
@Test def should_provide_equals(): Unit = {
val x = new URI("http://example.com/asdf%6a")
val y = new URI("http://example.com/asdf%6A")
val z = new URI("http://example.com/asdfj")
assertTrue(x == y)
assertFalse(x == z)
assertFalse(y == z)
assertTrue(x == x)
assertTrue(y == y)
assertTrue(z == z)
assertNotEquals(new URI("foo:helloWorld%6b%6C"), new URI("foo:helloWorld%6C%6b"))
}
@Test def should_provide_normalize(): Unit = {
expectURI(new URI("http://example.com/../asef/../../").normalize, true, false)(
scheme = "http",
host = "example.com",
authority = "example.com",
path = "/../../",
schemeSpecificPart = "//example.com/../../")()
expectURI(new URI("http://example.com/../as/./ef/foo/../../").normalize, true, false)(
scheme = "http",
host = "example.com",
authority = "example.com",
path = "/../as/",
schemeSpecificPart = "//example.com/../as/")()
expectURI(new URI("bar/../fo:o/./bar").normalize, false, false)(
path = "./fo:o/bar",
schemeSpecificPart = "./fo:o/bar")()
expectURI(new URI("bar/..//fo:o//./bar").normalize, false, false)(
path = "./fo:o/bar",
schemeSpecificPart = "./fo:o/bar")()
expectURI(new URI("").normalize, false, false)(
path = "",
schemeSpecificPart = "")()
val x = new URI("http://www.example.com/foo/bar")
assertTrue(x.normalize eq x)
}
@Test def should_provide_resolve__JavaDoc_examples(): Unit = {
val base = "http://java.sun.com/j2se/1.3/"
val relative1 = "docs/guide/collections/designfaq.html#28"
val resolved1 =
"http://java.sun.com/j2se/1.3/docs/guide/collections/designfaq.html#28"
val relative2 = "../../../demo/jfc/SwingSet2/src/SwingSet2.java"
val resolved2 =
"http://java.sun.com/j2se/1.3/demo/jfc/SwingSet2/src/SwingSet2.java"
assertEquals(resolved1, new URI(base).resolve(relative1).toString)
assertEquals(resolved2, new URI(resolved1).resolve(relative2).toString)
assertEquals("/a/", new URI("").resolve("/a/").toString)
assertEquals("/a/", new URI("/a/").resolve("").toString)
}
@Test def should_provide_resolve_RFC2396_examples(): Unit = {
val base = new URI("http://a/b/c/d;p?q")
def resTest(ref: String, trg: String): Unit =
assertEquals(trg, base.resolve(ref).toString)
// Normal examples
resTest("g:h", "g:h")
resTest("g", "http://a/b/c/g")
resTest("./g", "http://a/b/c/g")
resTest("g/", "http://a/b/c/g/")
resTest("/g", "http://a/g")
resTest("//g", "http://g")
resTest("?y", "http://a/b/c/?y")
resTest("g?y", "http://a/b/c/g?y")
resTest("#s", "http://a/b/c/d;p?q#s")
resTest("g#s", "http://a/b/c/g#s")
resTest("g?y#s", "http://a/b/c/g?y#s")
resTest(";x", "http://a/b/c/;x")
resTest("g;x", "http://a/b/c/g;x")
resTest("g;x?y#s", "http://a/b/c/g;x?y#s")
resTest(".", "http://a/b/c/")
resTest("./", "http://a/b/c/")
resTest("..", "http://a/b/")
resTest("../", "http://a/b/")
resTest("../g", "http://a/b/g")
resTest("../..", "http://a/")
resTest("../../", "http://a/")
resTest("../../g", "http://a/g")
// Abnormal examples
resTest("../../../g", "http://a/../g")
resTest("../../../../g", "http://a/../../g")
resTest("/./g", "http://a/./g")
resTest("/../g", "http://a/../g")
resTest("g.", "http://a/b/c/g.")
resTest(".g", "http://a/b/c/.g")
resTest("g..", "http://a/b/c/g..")
resTest("..g", "http://a/b/c/..g")
resTest("./../g", "http://a/b/g")
resTest("./g/.", "http://a/b/c/g/")
resTest("g/./h", "http://a/b/c/g/h")
resTest("g/../h", "http://a/b/c/h")
resTest("g;x=1/./y", "http://a/b/c/g;x=1/y")
resTest("g;x=1/../y", "http://a/b/c/y")
resTest("g?y/./x", "http://a/b/c/g?y/./x")
resTest("g?y/../x", "http://a/b/c/g?y/../x")
resTest("g#s/./x", "http://a/b/c/g#s/./x")
resTest("g#s/../x", "http://a/b/c/g#s/../x")
resTest("http:g", "http:g")
}
@Test def should_provide_resolve_when_authority_is_empty__issue_2048(): Unit = {
val base = new URI("http://foo/a")
def resTest(ref: String, trg: String): Unit =
assertEquals(trg, base.resolve(ref).toString)
resTest("///a", "http://foo/a")
resTest("/b", "http://foo/b")
resTest("/b/../d", "http://foo/b/../d")
}
@Test def should_provide_normalize__examples_derived_from_RFC_relativize(): Unit = {
expectURI(new URI("http://a/b/c/..").normalize, true, false)(
scheme = "http",
host = "a",
authority = "a",
path = "/b/",
schemeSpecificPart = "//a/b/")()
expectURI(new URI("http://a/b/c/.").normalize, true, false)(
scheme = "http",
host = "a",
authority = "a",
path = "/b/c/",
schemeSpecificPart = "//a/b/c/")()
}
@Test def should_provide_relativize(): Unit = {
val x = new URI("http://f%4Aoo@asdf/a")
val y = new URI("http://fJoo@asdf/a/b/")
val z = new URI("http://f%4aoo@asdf/a/b/")
assertTrue(x.relativize(y) eq y)
assertEquals("b/", x.relativize(z).toString())
def relTest(base: String, trg: String, exp: String): Unit =
assertEquals(exp, new URI(base).relativize(new URI(trg)).toString())
relTest("http://a.ch/a", "http://a.ch/a/b", "b")
relTest("http://a.ch/a/", "http://a.ch/a/b", "b")
relTest("https://a.ch/a", "http://a.ch/a/b", "http://a.ch/a/b")
relTest("/a/b/c", "/a/b/c/d/e", "d/e")
relTest("/a/b/c/", "/a/b/c/d/e", "d/e")
relTest("/a/b/c/", "/a/b/c/foo:e/d", "foo:e/d") // see bug JDK-7037120
relTest("../a/b", "../a/b/c", "c")
relTest("../a/b", "", "")
relTest("", "../a/b", "../a/b")
relTest("file:///a", "file:///a/b/", "b/")
relTest("file:/c", "file:///c/d/", "d/")
}
@Test def should_provide_hashCode(): Unit = {
if (!executingInJVM) { // Fails on JDK6 and JDK7
assertEquals(new URI("http://example.com/asdf%6a").hashCode,
new URI("http://example.com/asdf%6A").hashCode)
}
}
@Test def should_allow_non_ASCII_characters(): Unit = {
expectURI(new URI("http://cs.dbpedia.org/resource/Víno"), true, false)(
scheme = "http",
host = "cs.dbpedia.org",
path = "/resource/Víno",
authority = "cs.dbpedia.org",
schemeSpecificPart = "//cs.dbpedia.org/resource/Víno")()
}
@Test def should_decode_UTF_8(): Unit = {
expectURI(new URI("http://cs.dbpedia.org/resource/V%C3%ADno"), true, false)(
scheme = "http",
host = "cs.dbpedia.org",
path = "/resource/Víno",
authority = "cs.dbpedia.org",
schemeSpecificPart = "//cs.dbpedia.org/resource/Víno")(
rawPath = "/resource/V%C3%ADno",
rawSchemeSpecificPart = "//cs.dbpedia.org/resource/V%C3%ADno")
expectURI(new URI("%e3%81%93a%e3%82%93%e3%81%AB%e3%81%a1%e3%81%af"), false, false)(
path = "こaんにちは",
schemeSpecificPart = "こaんにちは")(
rawPath = "%e3%81%93a%e3%82%93%e3%81%AB%e3%81%a1%e3%81%af",
rawSchemeSpecificPart = "%e3%81%93a%e3%82%93%e3%81%AB%e3%81%a1%e3%81%af")
}
@Test def should_support_toASCIIString(): Unit = {
def cmp(base: String, encoded: String): Unit =
assertEquals(encoded, new URI(base).toASCIIString())
cmp("http://cs.dbpedia.org/resource/Víno",
"http://cs.dbpedia.org/resource/V%C3%ADno")
cmp("http://こaんにちは/",
"http://%E3%81%93a%E3%82%93%E3%81%AB%E3%81%A1%E3%81%AF/")
cmp("foo://bar/\\uD800\\uDCF5/",
"foo://bar/%F0%90%83%B5/")
}
@Test def should_replace_when_bad_surrogates_are_present(): Unit = {
expectURI(new URI("http://booh/%E3a"), true, false)(
scheme = "http",
host = "booh",
path = "/�a",
authority = "booh",
schemeSpecificPart = "//booh/�a")(
rawPath = "/%E3a",
rawSchemeSpecificPart = "//booh/%E3a")
// lowercase e is kept
expectURI(new URI("http://booh/%e3a"), true, false)(
scheme = "http",
host = "booh",
path = "/�a",
authority = "booh",
schemeSpecificPart = "//booh/�a")(
rawPath = "/%e3a",
rawSchemeSpecificPart = "//booh/%e3a")
// %E3%81 is considered as 1 malformed
expectURI(new URI("http://booh/%E3%81a"), true, false)(
scheme = "http",
host = "booh",
path = "/�a",
authority = "booh",
schemeSpecificPart = "//booh/�a")(
rawPath = "/%E3%81a",
rawSchemeSpecificPart = "//booh/%E3%81a")
if (!executingInJVM) { // Fails on JDK6 and JDK7
// %E3%E3 is considered as 2 malformed
expectURI(new URI("http://booh/%E3%E3a"), true, false)(
scheme = "http",
host = "booh",
path = "/��a",
authority = "booh",
schemeSpecificPart = "//booh/��a")(
rawPath = "/%E3%E3a",
rawSchemeSpecificPart = "//booh/%E3%E3a")
}
}
@Test def should_throw_on_bad_escape_sequences(): Unit = {
expectThrows(classOf[URISyntaxException], new URI("http://booh/%E"))
expectThrows(classOf[URISyntaxException], new URI("http://booh/%Ep"))
}
}
| lrytz/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/net/URITest.scala | Scala | bsd-3-clause | 15,933 |
package zeroadv.position
import com.typesafe.scalalogging.slf4j.Logging
import zeroadv._
import zeroadv.PositionedAgents
import zeroadv.BeaconSpotting
import zeroadv.BeaconSpottings
import zeroadv.PositionedBeacon
class DistanceBasedBeaconPos(
beaconDistance: BeaconDistance,
calculatePosition: CalculatePosition) extends BeaconPosFromSpottings with Logging {
def calculate(
agents: PositionedAgents,
spottings: BeaconSpottings): PositionedBeacon = {
val points = for {
(agent, timedRssi) <- spottings.history
positionedAgent <- agents.agents.find(_.agent == agent)
} yield {
val rssis = timedRssi.map(_.rssi).sorted
val rssi = if (rssis.size > 2) {
rssis.drop(1).dropRight(1).sum / (rssis.size - 2)
} else {
rssis.head
}
val dist = beaconDistance.distanceToBeacon(BeaconSpotting(spottings.beacon, agent, TimedRssi(null, rssi)))
(positionedAgent.pos, dist)
}
val beaconPos = calculatePosition.calculate(points.toList)._1
PositionedBeacon(spottings.beacon, beaconPos)
}
}
| adamw/zeroadv | collector/src/main/scala/zeroadv/position/DistanceBasedBeaconPos.scala | Scala | gpl-2.0 | 1,078 |
package in.ashwanthkumar.uclassify
import dispatch._
import dispatch.Defaults._
import scala.xml.Elem
import ResponseTransformer._
class UClassifyClient(requestBuilder: RequestBuilder) {
import UClassifyClient.BASE_REQUEST
def classify(classifier: String, textsToClassify: List[String], classifierUsername: Option[String] = None) = {
val classifyXml = requestBuilder.classify(classifier, textsToClassify, classifierUsername)
sendRequest(classifyXml.mkString) map transformClassifyResult(textsToClassify)
}
def getInformation(classifier: String) = {
val getInformationXml = requestBuilder.getInformation(classifier)
sendRequest(getInformationXml.mkString) map transformGetInformationResult
}
def createClassifier(classifier: String) = {
val createClassifier = requestBuilder.createClassifier(classifier)
sendRequest(createClassifier.mkString) map verifyResponse
}
def removeClassifier(classifier: String) = {
val removeClassifier = requestBuilder.removeClassifier(classifier)
sendRequest(removeClassifier.mkString) map verifyResponse
}
def addClass(className: String, classifier: String) = {
val addClassToClassifier = requestBuilder.addClass(className, classifier)
sendRequest(addClassToClassifier.mkString) map verifyResponse
}
def removeClass(className: String, classifier: String) = {
val removeClassFromClassifier = requestBuilder.removeClass(className, classifier)
sendRequest(removeClassFromClassifier.mkString) map verifyResponse
}
def train(textsToTrain: List[String], classifierClass: String, classifier: String) = {
val trainClassifier = requestBuilder.train(textsToTrain, classifierClass, classifier)
sendRequest(trainClassifier.mkString) map verifyResponse
}
def untrain(textsToUntrain: List[String], classifierClass: String, classifier: String) = {
val untrainClassifier = requestBuilder.untrain(textsToUntrain, classifierClass, classifier)
sendRequest(untrainClassifier.mkString) map verifyResponse
}
private[uclassify] def sendRequest(xmlStringToSend: String): dispatch.Future[Elem] = {
val request = BASE_REQUEST << xmlStringToSend
Http(request OK as.xml.Elem)
}
}
object UClassifyClient {
def BASE_REQUEST = url("http://api.uclassify.com").POST
def apply() = new UClassifyClient(RequestBuilder())
def apply(readApiKey: String, writeApiKey: String) = new UClassifyClient(
RequestBuilder(
APIInfo(readApiKey,writeApiKey)
))
} | ashwanthkumar/uClassify-scala | src/main/scala/in/ashwanthkumar/uclassify/UClassifyClient.scala | Scala | apache-2.0 | 2,487 |
package aerospikez
import org.specs2.mutable.Specification
import com.typesafe.config.ConfigParseOptions
import com.typesafe.config.ConfigFactory
import com.aerospike.client.async.AsyncClient
import com.aerospike.client.Host
import scalaz.NonEmptyList
class AerospikeClientSpec extends Specification {
val aerospikezInstance = AerospikeClient.apply()
val defaultPort: Int = 3000
val defaultHosts: NonEmptyList[String] = Hosts("127.0.0.1:3000")
val badConfigFile = ConfigFactory.load("inexistentFile")
val goodConfigFile = ConfigFactory.parseString("""
aerospike {
port = 4000
hosts = ["234.15.67.102:5000", "56.34.109.100"]
}
""", ConfigParseOptions.defaults)
"An AerospikeClient" should {
"received a optional hosts list, client config and config file" in {
AerospikeClient()
AerospikeClient(Hosts("127.0.0.1:3000"))
AerospikeClient(Hosts("127.0.0.1:3000"), ClientConfig())
AerospikeClient(Hosts("127.0.0.1:3000"), ClientConfig(), badConfigFile)
// because the previos expresions will try to connect to the aeroskipe
// server, if not exception are throw then everything is correct:
success
}
"create an async client" in {
aerospikezInstance.asyncClient should beAnInstanceOf[AsyncClient]
}
}
/////////////////////////////////////////////////////////////////////
// The nexts methods are compose when created an Aeroskipe client, //
// so if all test are success, the composition work as expected //
/////////////////////////////////////////////////////////////////////
"getPort method" should {
"be take the general port indicated in the configuration file as default port" in {
aerospikezInstance.getPort(goodConfigFile) must beEqualTo(4000)
}
"be use the 3000 port as default if there are not port indicated in the configuration file" in {
aerospikezInstance.getPort(badConfigFile) must beEqualTo(defaultPort)
}
}
"getHosts method" should {
"be take the hosts/nodes that are indicated in the configuration file" in {
aerospikezInstance.getHosts(
goodConfigFile,
defaultHosts).list must containTheSameElementsAs(List("234.15.67.102:5000", "56.34.109.100"))
}
"be use the \\"localhost\\" if there are not hosts indicated in the configuration file" in {
aerospikezInstance.getHosts(
badConfigFile,
defaultHosts).list must containTheSameElementsAs(List("localhost"))
}
}
"createHosts method" should {
val firstHost = aerospikezInstance.getHosts(goodConfigFile, defaultHosts).head
val secondHost = aerospikezInstance.getHosts(goodConfigFile, defaultHosts).tail.head
"be create a host with address and port indicate in the configuration file" in {
val host = aerospikezInstance.createHost(firstHost, defaultPort)
host must beAnInstanceOf[Host]
host.name must beEqualTo("234.15.67.102")
host.port must beEqualTo(5000)
}
"be create host with the address and default port if only the address are indicate in the configuration file" in {
val host = aerospikezInstance.createHost(secondHost, defaultPort)
host must beAnInstanceOf[Host]
host.name must beEqualTo("56.34.109.100")
host.port must beEqualTo(3000)
}
"throw a exception if the host (string) are bad formed" in {
lazy val host = aerospikezInstance.createHost("123.101.34.101:3000:lala", defaultPort)
host must throwA[IllegalArgumentException]
}
}
}
| otrimegistro/aerospikez | src/test/scala/aerospikez/AerospikeClientSpec.scala | Scala | mit | 3,539 |
package breeze.linalg
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import breeze.storage.DefaultArrayValue
import breeze.math.{TensorSpace, Ring, Semiring, Field}
import breeze.generic.{URFunc, UReduceable, CanMapValues}
import collection.Set
import operators._
import support.{CanZipMapValues, CanNorm, CanCopy}
/**
* A map-like tensor that acts like a collection of key-value pairs where
* the set of values may grow arbitrarily.
*
* @author dramage, dlwh
*/
@SerialVersionUID(1)
trait CounterLike[K, V, +M<:scala.collection.mutable.Map[K,V], +This<:Counter[K,V]] extends TensorLike[K,V,This] with Serializable {
def data : M
def default: V
def keySet: Set[K] = data.keySet
def repr = this.asInstanceOf[This]
override def size = data.size
def activeSize = data.size
def isEmpty = data.isEmpty
def contains(k: K) = data.contains(k)
override def apply(k : K) = {
data.get(k) getOrElse default
}
def update(k : K, v : V) { data(k) = v }
def get(k: K) = data.get(k)
override def keysIterator = data.keysIterator
override def valuesIterator = data.valuesIterator
override def iterator = data.iterator
def activeIterator = iterator
def activeValuesIterator = valuesIterator
def activeKeysIterator = keysIterator
override def toString: String = data.mkString("Counter(",", ", ")")
override def equals(p1: Any): Boolean = p1 match {
case x:Counter[K, V] => x.data == this.data
case _ => false
}
override def hashCode(): Int = data.hashCode()
def toMap = data.toMap
}
trait Counter[K, V] extends Tensor[K,V] with CounterLike[K, V, collection.mutable.Map[K, V], Counter[K,V]] {
}
object Counter extends CounterOps {
class Impl[K, V]
(override val data : scala.collection.mutable.Map[K,V])
(implicit defaultArrayValue : DefaultArrayValue[V])
extends Counter[K,V] {
def default = defaultArrayValue.value
}
/** Returns an empty counter. */
def apply[K,V:DefaultArrayValue:Semiring]() : Counter[K,V] =
new Impl(scala.collection.mutable.HashMap[K,V]())
/** Returns a counter by summing all the given values. */
def apply[K,V:DefaultArrayValue:Semiring](values : (K,V)*) : Counter[K,V] =
apply(values)
/** Returns a counter by summing all the given values. */
def apply[K,V:DefaultArrayValue:Semiring](values : TraversableOnce[(K,V)]) : Counter[K,V] = {
val rv = apply[K,V]()
val field = implicitly[Semiring[V]]
values.foreach({ case (k,v) => rv(k) = field.+(v,rv(k)) })
rv
}
/** Counts each of the given items. */
def count[K](items : TraversableOnce[K]) : Counter[K,Int] = {
val rv = apply[K,Int]()
items.foreach(rv(_) += 1)
rv
}
def count[K](items: K*): Counter[K,Int] = count(items)
implicit def CanMapValuesCounter[K, V, RV:Semiring:DefaultArrayValue]: CanMapValues[Counter[K, V], V, RV, Counter[K, RV]]
= new CanMapValues[Counter[K,V],V,RV,Counter[K,RV]] {
override def map(from : Counter[K,V], fn : (V=>RV)) = {
val rv = Counter[K,RV]()
for( (k,v) <- from.iterator) {
rv(k) = fn(from.data(k))
}
rv
}
override def mapActive(from : Counter[K,V], fn : (V=>RV)) = {
val rv = Counter[K,RV]()
for( (k,v) <- from.activeIterator) {
rv(k) = fn(from.data(k))
}
rv
}
}
implicit def ured[K, V]: UReduceable[Counter[K, V], V] = {
new UReduceable[Counter[K, V], V] {
def apply[Final](c: Counter[K, V], f: URFunc[V, Final]): Final = f(c.data.values)
}
}
implicit def tensorspace[K, V:Field:DefaultArrayValue] = {
implicit def zipMap = Counter.zipMap[K, V, V]
implicit def canDivVV = Counter.canDivVV[K, K, V]
TensorSpace.make[Counter[K, V], K, V]
}
}
trait CounterOps {
implicit def canCopy[K1, V:DefaultArrayValue:Semiring]:CanCopy[Counter[K1, V]] = new CanCopy[Counter[K1, V]] {
def apply(t: Counter[K1, V]): Counter[K1, V] = {
Counter(t.iterator)
}
}
def binaryOpFromBinaryUpdateOp[K, V, Other, Op<:OpType](implicit copy: CanCopy[Counter[K, V]], op: BinaryUpdateOp[Counter[K, V], Other, Op]) = {
new BinaryOp[Counter[K, V], Other, Op, Counter[K, V]] {
override def apply(a : Counter[K, V], b : Other) = {
val c = copy(a)
op(c, b)
c
}
}
}
implicit def addIntoVV[K1, V:Semiring]:BinaryUpdateOp[Counter[K1, V], Counter[K1, V], OpAdd] = new BinaryUpdateOp[Counter[K1, V], Counter[K1, V], OpAdd] {
val field = implicitly[Semiring[V]]
def apply(a: Counter[K1, V], b: Counter[K1, V]) {
for( (k,v) <- b.activeIterator) {
a(k) = field.+(a(k), v)
}
}
}
implicit def addVV[K1, V:Semiring:DefaultArrayValue]:BinaryOp[Counter[K1, V], Counter[K1, V], OpAdd, Counter[K1,V]] = {
binaryOpFromBinaryUpdateOp(canCopy, addIntoVV)
}
implicit def addIntoVS[K1, K2<:K1, V:Semiring]:BinaryUpdateOp[Counter[K1, V], V, OpAdd] = new BinaryUpdateOp[Counter[K1, V], V, OpAdd] {
val field = implicitly[Semiring[V]]
def apply(a: Counter[K1, V], b: V) {
for( (k,v) <- a.activeIterator) {
a(k) = field.+(v, b)
}
}
}
implicit def addVS[K1, V:Semiring:DefaultArrayValue]:BinaryOp[Counter[K1, V], V, OpAdd, Counter[K1,V]] = {
binaryOpFromBinaryUpdateOp(canCopy, addIntoVS)
}
implicit def subIntoVV[K1, V:Ring]:BinaryUpdateOp[Counter[K1, V], Counter[K1, V], OpSub] = new BinaryUpdateOp[Counter[K1, V], Counter[K1, V], OpSub] {
val field = implicitly[Ring[V]]
def apply(a: Counter[K1, V], b: Counter[K1, V]) {
for( (k,v) <- b.activeIterator) {
a(k) = field.-(a(k), v)
}
}
}
implicit def subVV[K1, V:Ring:DefaultArrayValue]:BinaryOp[Counter[K1, V], Counter[K1, V], OpSub, Counter[K1,V]] = {
binaryOpFromBinaryUpdateOp(canCopy, subIntoVV)
}
implicit def subIntoVS[K1, K2<:K1, V:Ring]:BinaryUpdateOp[Counter[K1, V], V, OpSub] = new BinaryUpdateOp[Counter[K1, V], V, OpSub] {
val field = implicitly[Ring[V]]
def apply(a: Counter[K1, V], b: V) {
for( (k,v) <- a.activeIterator) {
a(k) = field.-(v, b)
}
}
}
implicit def subVS[K1, K2<:K1, V:Ring:DefaultArrayValue]:BinaryOp[Counter[K1, V], V, OpSub, Counter[K1,V]] = {
binaryOpFromBinaryUpdateOp(canCopy, subIntoVS)
}
implicit def canMulIntoVV[K2, K1 <: K2, V:Semiring]:BinaryUpdateOp[Counter[K1, V], Counter[K2, V], OpMulScalar] = new BinaryUpdateOp[Counter[K1, V], Counter[K2, V], OpMulScalar] {
val field = implicitly[Semiring[V]]
def apply(a: Counter[K1, V], b: Counter[K2, V]) {
for( (k,v) <- a.activeIterator) {
a(k) = field.*(v, b(k))
}
}
}
implicit def canMulVV[K2, K1<:K2, V](implicit semiring: Semiring[V],
d: DefaultArrayValue[V]):BinaryOp[Counter[K1, V], Counter[K2, V], OpMulScalar, Counter[K1, V]] = {
new BinaryOp[Counter[K1, V], Counter[K2, V], OpMulScalar, Counter[K1, V]] {
override def apply(a : Counter[K1, V], b : Counter[K2, V]) = {
val r = Counter[K1, V]()
for( (k, v) <- a.activeIterator) {
val vr = semiring.*(v, b(k))
if(vr != semiring.zero)
r(k) = vr
}
r
}
}
}
implicit def canMulIntoVS[K2, K1 <: K2, V:Semiring]:BinaryUpdateOp[Counter[K1, V], V, OpMulScalar] = new BinaryUpdateOp[Counter[K1, V], V, OpMulScalar] {
val field = implicitly[Semiring[V]]
def apply(a: Counter[K1, V], b: V) {
for( (k,v) <- a.activeIterator) {
a(k) = field.*(v, b)
}
}
}
implicit def canMulIntoVS_M[K2, K1 <: K2, V:Semiring]:BinaryUpdateOp[Counter[K1, V], V, OpMulMatrix] = new BinaryUpdateOp[Counter[K1, V], V, OpMulMatrix] {
val field = implicitly[Semiring[V]]
def apply(a: Counter[K1, V], b: V) {
for( (k,v) <- a.activeIterator) {
a(k) = field.*(v, b)
}
}
}
implicit def canMulVS[K2, K1<:K2, V](implicit semiring: Semiring[V],
d: DefaultArrayValue[V]):BinaryOp[Counter[K1, V], V, OpMulScalar, Counter[K1, V]] = {
new BinaryOp[Counter[K1, V], V, OpMulScalar, Counter[K1, V]] {
override def apply(a : Counter[K1, V], b : V) = {
val r = Counter[K1, V]()
for( (k, v) <- a.activeIterator) {
val vr = semiring.*(v, b)
r(k) = vr
}
r
}
}
}
implicit def canMulVS_M[K2, K1<:K2, V](implicit semiring: Semiring[V],
d: DefaultArrayValue[V]):BinaryOp[Counter[K1, V], V, OpMulMatrix, Counter[K1, V]] = {
new BinaryOp[Counter[K1, V], V, OpMulMatrix, Counter[K1, V]] {
override def apply(a : Counter[K1, V], b : V) = {
val r = Counter[K1, V]()
for( (k, v) <- a.activeIterator) {
val vr = semiring.*(v, b)
r(k) = vr
}
r
}
}
}
implicit def canDivIntoVV[K2, K1 <: K2, V:Field]:BinaryUpdateOp[Counter[K1, V], Counter[K2, V], OpDiv] = new BinaryUpdateOp[Counter[K1, V], Counter[K2, V], OpDiv] {
val field = implicitly[Field[V]]
def apply(a: Counter[K1, V], b: Counter[K2, V]) {
for( (k,v) <- a.activeIterator) {
a(k) = field./(v, b(k))
}
}
}
implicit def canDivVV[K2, K1<:K2, V](implicit copy: CanCopy[Counter[K1, V]],
semiring: Field[V],
d: DefaultArrayValue[V]):BinaryOp[Counter[K1, V], Counter[K2, V], OpDiv, Counter[K1, V]] = {
new BinaryOp[Counter[K1, V], Counter[K2, V], OpDiv, Counter[K1, V]] {
override def apply(a : Counter[K1, V], b : Counter[K2, V]) = {
val r = Counter[K1, V]()
for( (k, v) <- a.activeIterator) {
val vr = semiring./(v, b(k))
r(k) = vr
}
r
}
}
}
implicit def canDivVS[K1, V](implicit copy: CanCopy[Counter[K1, V]],
semiring: Field[V],
d: DefaultArrayValue[V]):BinaryOp[Counter[K1, V], V, OpDiv, Counter[K1, V]] = {
new BinaryOp[Counter[K1, V], V, OpDiv, Counter[K1, V]] {
override def apply(a : Counter[K1, V], b : V) = {
val r = Counter[K1, V]()
for( (k, v) <- a.activeIterator) {
val vr = semiring./(v, b)
r(k) = vr
}
r
}
}
}
implicit def canDivIntoVS[K1, V:Field]:BinaryUpdateOp[Counter[K1, V], V, OpDiv] = new BinaryUpdateOp[Counter[K1, V], V, OpDiv] {
val field = implicitly[Field[V]]
def apply(a: Counter[K1, V], b: V) {
for( (k,v) <- a.activeIterator) {
a(k) = field./(v, b)
}
}
}
implicit def canSetIntoVV[K1, K2 <: K1, V]:BinaryUpdateOp[Counter[K1, V], Counter[K2, V], OpSet] = new BinaryUpdateOp[Counter[K1, V], Counter[K2, V], OpSet] {
def apply(a: Counter[K1, V], b: Counter[K2, V]) {
a.data.clear()
for( (k,v) <- b.activeIterator) {
a(k) = v
}
}
}
implicit def canSetIntoVS[K1, V]:BinaryUpdateOp[Counter[K1, V], V, OpSet] = new BinaryUpdateOp[Counter[K1, V], V, OpSet] {
def apply(a: Counter[K1, V], b: V) {
for( k <- a.keysIterator) {
a(k) = b
}
}
}
implicit def canNegate[K1, V](implicit ring: Ring[V], d: DefaultArrayValue[V]):UnaryOp[Counter[K1, V], OpNeg, Counter[K1, V]] = {
new UnaryOp[Counter[K1, V], OpNeg, Counter[K1, V]] {
override def apply(a : Counter[K1, V]) = {
val result = Counter[K1, V]()
for( (k, v) <- a.activeIterator) {
val vr = ring.negate(v)
result(k) = vr
}
result
}
}
}
implicit def canMulInner[K2, K1<:K2, V](implicit copy: CanCopy[Counter[K1, V]],
semiring: Semiring[V],
d: DefaultArrayValue[V]):BinaryOp[Counter[K1, V], Counter[K2, V], OpMulInner, V] = {
new BinaryOp[Counter[K1, V], Counter[K2, V], OpMulInner, V] {
val zero = semiring.zero
override def apply(a : Counter[K1, V], b : Counter[K2, V]) = {
var result = zero
for( (k, v) <- a.activeIterator) {
val vr = semiring.*(v, b(k))
result = semiring.+(result, vr)
}
result
}
}
}
/** Returns the k-norm of this Vector. */
implicit def canNorm[K, V:Ring]:CanNorm[Counter[K, V]] = new CanNorm[Counter[K, V]] {
val field = implicitly[Ring[V]]
def apply(c: Counter[K, V], n: Double): Double = {
import c._
if (n == 1) {
var sum = 0.0
activeValuesIterator foreach (v => sum += field.norm(v))
sum
} else if (n == 2) {
var sum = 0.0
activeValuesIterator foreach (v => { val nn = field.norm(v); sum += nn * nn })
math.sqrt(sum)
} else if (n == Double.PositiveInfinity) {
var max = Double.NegativeInfinity
activeValuesIterator foreach (v => { val nn = field.norm(v); if (nn > max) max = nn })
max
} else {
var sum = 0.0
activeValuesIterator foreach (v => { val nn = field.norm(v); sum += math.pow(nn,n) })
math.pow(sum, 1.0 / n)
}
}
}
class CanZipMapValuesCounter[K, V, RV:DefaultArrayValue:Semiring] extends CanZipMapValues[Counter[K, V],V,RV,Counter[K, RV]] {
/**Maps all corresponding values from the two collection. */
def map(from: Counter[K, V], from2: Counter[K, V], fn: (V, V) => RV) = {
val result = Counter[K, RV]
for ( k <- (from.keySet ++ from2.keySet)) {
result(k) = fn(from(k), from2(k))
}
result
}
}
implicit def zipMap[K, V, R:DefaultArrayValue:Semiring] = new CanZipMapValuesCounter[K, V, R]
}
| tjhunter/scalanlp-core | math/src/main/scala/breeze/linalg/Counter.scala | Scala | apache-2.0 | 14,142 |
package com.textteaser.summarizer.models
import net.liftweb.mongodb.record.field._
import net.liftweb.record.field._
import net.liftweb.mongodb.record._
class Keyword extends MongoRecord[Keyword] with ObjectIdPk[Keyword] {
def meta = Keyword
object word extends StringField(this, "")
object score extends LongField(this, 0)
object date extends DateField(this)
object summaryId extends StringField(this, 10)
object blog extends StringField(this, "Undefined")
object category extends StringField(this, "Undefined")
}
object Keyword extends Keyword with MongoMetaRecord[Keyword] | ahmadassaf/Text-Teaser | src/main/scala/com/textteaser/summarizer/models/Keyword.scala | Scala | mit | 593 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.sst
import slamdata.Predef._
import quasar.contrib.algebra._
import quasar.contrib.matryoshka._
import quasar.contrib.matryoshka.arbitrary._
import quasar.ejson, ejson.{EJsonArbitrary, TypeTag}
import quasar.ejson.implicits._
import quasar.fp._
import quasar.contrib.iota._
import quasar.fp.numeric.{Natural, Positive}
import quasar.tpe._
import scala.Predef.$conforms
import eu.timepit.refined.auto._
import matryoshka.{project => _, _}
import matryoshka.data._
import matryoshka.implicits._
import monocle.syntax.fields._
import org.scalacheck._, Arbitrary.arbitrary
import org.specs2.scalacheck._
import scalaz._, Scalaz._
import scalaz.scalacheck.ScalazArbitrary._
import spire.math.Real
final class CompressionSpec extends quasar.Qspec
with StructuralTypeArbitrary
with TypeStatArbitrary
with EJsonArbitrary
with SimpleTypeArbitrary {
implicit val params = Parameters(maxSize = 10)
import StructuralType.{ConstST, TagST, TypeST}
type J = TypedEJson[Fix]
type S = SST[J, Real]
val J = ejson.Fixed[J]
case class LeafEjs(ejs: J) {
def toSst: S = SST.fromEJson(Real(1), ejs)
}
implicit val arbitraryLeafEjs: Arbitrary[LeafEjs] =
Arbitrary(Gen.oneOf(
Gen.const(J.nul()),
arbitrary[Boolean] map (b => J.bool(b)),
arbitrary[String] map (s => J.str(s)),
arbitrary[BigDecimal] map (d => J.dec(d)),
arbitrary[Char] map (c => J.char(c)),
arbitrary[BigInt] map (i => J.int(i))
) map (LeafEjs(_)))
implicit val orderLeafEjs: Order[LeafEjs] =
Order.orderBy(_.ejs)
implicit val realShow: Show[Real] = Show.showFromToString
val cnt1 = TypeStat.count(Real(1))
val envTType = envTIso[TypeStat[Real], StructuralType.ST[J, ?], S] composeLens _2
def sstConst(s: S) = ConstST unapply (s.project) map (_._3)
def attemptCompression(s: S, f: SSTF[J, Real, S] => Option[SSTF[J, Real, S]]): S =
s.transAna[S](orOriginal(f))
"coalesceKeys" >> {
def test(kind: String, f: Char => J): Unit = {
s"compresses largest group of keys having same primary ${kind}" >> prop {
(cs: ISet[Char], n: BigInt, s: String, unk0: Option[(LeafEjs, LeafEjs)]) => (cs.size > 1) ==> {
val chars = cs.toIList map f
val int = J.int(n)
val str = J.str(s)
val nul = SST.fromEJson(Real(1), J.nul())
val m0 = IMap.fromFoldable((int :: str :: chars) strengthR nul)
val unk = unk0.map(_.umap(_.toSst))
val msst = envT(cnt1, TypeST(TypeF.map[J, S](m0, unk))).embed
val uval = SST.fromEJson(Real(cs.size), J.nul())
val ukey = chars.foldMap1Opt(c => SST.fromEJson(Real(1), c))
val m1 = IMap.fromFoldable(IList(str, int) strengthR nul)
val unk1 = ukey.strengthR(uval) |+| unk
val exp = envT(cnt1, TypeST(TypeF.map[J, S](m1, unk1))).embed
attemptCompression(msst, compression.coalesceKeys(2L, 0L, false)) must_= exp
}}
s"compresses largest group of keys having same primary ${kind}, with retention" >> prop {
(c1: Char, c2: Char, n: BigInt, s: String, unk0: Option[(LeafEjs, LeafEjs)]) => (c1 =/= c2) ==> {
val fc1 = f(c1)
val sc1 = SST.fromEJson(Real(5), J.nul())
val fc2 = f(c2)
val sc2 = SST.fromEJson(Real(3), J.nul())
val chars = IList((fc1, sc1), (fc2, sc2))
val int = J.int(n)
val str = J.str(s)
val nul = SST.fromEJson(Real(1), J.nul())
val m0 = IMap.fromFoldable((int, nul) :: (str, nul) :: chars)
val unk = unk0.map(_.umap(_.toSst))
val msst = envT(cnt1, TypeST(TypeF.map[J, S](m0, unk))).embed
val ukey = SST.fromEJson(Real(3), fc2)
val m1 = IMap.fromFoldable((fc1, sc1) :: IList(str, int).strengthR(nul))
val unk1 = unk |+| Some((ukey, sc2))
val exp = envT(cnt1, TypeST(TypeF.map[J, S](m1, unk1))).embed
attemptCompression(msst, compression.coalesceKeys(2L, 1L, false)) must_= exp
}}
()
}
test("type", J.char(_))
test("tag", c => J.meta(J.char(c), J.tpe(TypeTag("cp"))))
"ignores map where size of keys does not exceed maxSize" >> prop {
(xs: IList[(LeafEjs, LeafEjs)], unk0: Option[(LeafEjs, LeafEjs)]) =>
val m = IMap.fromFoldable(xs.map(_.bimap(_.ejs, _.toSst)))
val unk = unk0.map(_.umap(_.toSst))
val sst = envT(cnt1, TypeST(TypeF.map[J, S](m, unk))).embed
Natural(m.size.toLong).cata(
l => attemptCompression(sst, compression.coalesceKeys(l, 0L, false)),
sst
) must_= sst
}
}
"coalesceKeysWidened" >> {
"compresses excessive map to widened keys with values" >> prop {
(cs: ISet[Char], n: BigInt, s: String, unk0: Option[(LeafEjs, LeafEjs)]) => (cs.size > 1) ==> {
val chars = cs.toIList.map(J.char(_))
val int = J.int(n)
val str = J.str(s)
val nul = SST.fromEJson(Real(1), J.nul())
val m0 = IMap.fromFoldable((int :: str :: chars) strengthR nul)
val unk = unk0.map(_.umap(_.toSst))
val msst = envT(cnt1, TypeST(TypeF.map[J, S](m0, unk))).embed
val uval = SST.fromEJson(Real(cs.size + 2), J.nul())
val ckey = chars.foldMap1Opt(c => compression.primarySst(false)(Real(1), c))
val ikey = compression.primarySst(false)(Real(1), int).some
val skey = compression.primarySst(false)(Real(1), str).some
val unk1 = (ckey |+| ikey |+| skey).strengthR(uval) |+| unk
val exp = envT(cnt1, TypeST(TypeF.map[J, S](IMap.empty[J, S], unk1))).embed
attemptCompression(msst, compression.coalesceKeysWidened(2L, 0L, false)) must_= exp
}}
"compresses excessive map to widened keys with values, with retention" >> prop {
(c1: Char, c2: Char, n: BigInt, s: String, unk0: Option[(LeafEjs, LeafEjs)]) => (c1 =/= c2) ==> {
val fc1 = J.char(c1)
val sc1 = SST.fromEJson(Real(5), J.nul())
val fc2 = J.char(c2)
val sc2 = SST.fromEJson(Real(3), J.nul())
val chars = IList((fc1, sc1), (fc2, sc2))
val int = J.int(n)
val str = J.str(s)
val nul = SST.fromEJson(Real(1), J.nul())
val m0 = IMap.fromFoldable((int, nul) :: (str, nul) :: chars)
val unk = unk0.map(_.umap(_.toSst))
val msst = envT(cnt1, TypeST(TypeF.map[J, S](m0, unk))).embed
val uval = SST.fromEJson(Real(5), J.nul())
val ckey = compression.primarySst(false)(Real(3), fc2)
val ikey = compression.primarySst(false)(Real(1), int)
val skey = compression.primarySst(false)(Real(1), str)
val ukey = ckey |+| ikey |+| skey
val m1 = IMap.fromFoldable(IList((fc1, sc1)))
val unk1 = unk |+| Some((ukey, uval))
val exp = envT(cnt1, TypeST(TypeF.map[J, S](m1, unk1))).embed
attemptCompression(msst, compression.coalesceKeysWidened(2L, 1L, false)) must_= exp
}}
"ignores map where size of keys does not exceed maxSize" >> prop {
(xs: IList[(LeafEjs, LeafEjs)], unk0: Option[(LeafEjs, LeafEjs)]) =>
val m = IMap.fromFoldable(xs.map(_.bimap(_.ejs, _.toSst)))
val unk = unk0.map(_.umap(_.toSst))
val sst = envT(cnt1, TypeST(TypeF.map[J, S](m, unk))).embed
Natural(m.size.toLong).cata(
l => attemptCompression(sst, compression.coalesceKeysWidened(l, 0L, false)),
sst
) must_= sst
}
}
"coalescePrimary" >> {
"combines consts with their primary SST in unions" >> prop { (sj: LeafEjs, sjs: ISet[LeafEjs]) =>
val pt = primaryTagOf(sj.ejs)
val primarySst = compression.primarySst(true)(Real(1), sj.ejs)
val leafs = sjs.insert(sj).toIList
val ssts = leafs.map(_.toSst)
val (matching, nonmatching) =
leafs.partition(l => primaryTagOf(l.ejs) ≟ pt)
val simplified = matching.map(l => compression.primarySst(true)(Real(1), l.ejs))
val coalesced = NonEmptyList.nel(primarySst, simplified).suml1
val compressed =
attemptCompression(
NonEmptyList.nel(primarySst, ssts).suml1,
compression.coalescePrimary(true))
compressed must_= NonEmptyList.nel(coalesced, nonmatching map (_.toSst)).suml1
}
"combines multiple instances of a primary SST in unions" >> prop {
(x: SimpleType, xs: NonEmptyList[SimpleType]) =>
val y = envT(cnt1, TypeST(TypeF.simple[J, S](x))).embed
val ys = xs.map(st => envT(cnt1, TypeST(TypeF.simple[J, S](st))).embed)
val cnt = TypeStat.count(Real(ys.length + 1))
val union = envT(cnt, TypeST(TypeF.union[J, S](y, ys.head, ys.tail))).embed
val sum = (y <:: ys).suml1
attemptCompression(union, compression.coalescePrimary(true)) must_= sum
}
"no effect when a const's primary tag not in the union" >> prop { ljs: NonEmptyList[LeafEjs] =>
val sum = ljs.foldMap1(_.toSst)
attemptCompression(sum, compression.coalescePrimary(true)) must_= sum
}
}
"coalesceWithUnknown" >> {
def testUnk(kind: String, f: Char => J, g: (TypeStat[Real], SimpleType) => SSTF[J, Real, S]) =
s"merges known map entry with unknown entry when same primary $kind appears in unknown" >> prop {
(head: (Char, LeafEjs), xs0: NonEmptyList[(Char, LeafEjs)], kv: (BigInt, LeafEjs)) =>
val xs: IMap[Char, LeafEjs] = IMap.fromFoldable(xs0) + head
val u1 = head.bimap(_ => SST.fromEJson(Real(1), f('x')), _.toSst)
val u2 = head.bimap(
c => g(TypeStat.fromEJson(Real(1), J.char(c)), SimpleType.Char).embed,
_.toSst)
val kv1 = kv.bimap(J.int(_), _.toSst)
val cs = xs.toList.map(_.bimap(f, _.toSst))
val m = IMap.fromFoldable(kv1 :: cs)
val sst1 = envT(cnt1, TypeST(TypeF.map(m, u1.some))).embed
val sst2 = envT(cnt1, TypeST(TypeF.map(m, u2.some))).embed
val a = cs.foldMap1Opt { case (j, s) => (SST.fromEJson(Real(1), j), s) }
val b = IMap.singleton(kv1._1, kv1._2)
val exp1 = envT(cnt1, TypeST(TypeF.map(b, a map (_ |+| u1)))).embed
val exp2 = envT(cnt1, TypeST(TypeF.map(b, a map (_ |+| u2)))).embed
(attemptCompression(sst1, compression.coalesceWithUnknown(0L, false)) must_= exp1) and
(attemptCompression(sst2, compression.coalesceWithUnknown(0L, false)) must_= exp2)
}
def testUnkUnion(kind: String, f: Char => J, g: (TypeStat[Real], SimpleType) => SSTF[J, Real, S]) =
s"merges known map entry with unknown when primary $kind appears in unknown union" >> prop {
(head: (Char, LeafEjs), xs0: NonEmptyList[(Char, LeafEjs)], kv: (BigInt, LeafEjs)) =>
val xs: IMap[Char, LeafEjs] = IMap.fromFoldable(xs0) + head
val u1 = head.bimap(_ => SST.fromEJson(Real(1), f('x')), _.toSst)
val u2 = head.bimap(
c => g(TypeStat.fromEJson(Real(1), J.char(c)), SimpleType.Char).embed,
_.toSst)
val st = envT(cnt1, TypeST(TypeF.simple[J, S](SimpleType.Dec))).embed
val tp = envT(cnt1, TypeST(TypeF.top[J, S]())).embed
val u1u = u1.leftMap(s => envT(cnt1, TypeST(TypeF.union[J, S](tp, s, IList(st)))).embed)
val u2u = u2.leftMap(s => envT(cnt1, TypeST(TypeF.union[J, S](s, st, IList(tp)))).embed)
val kv1 = kv.bimap(J.int(_), _.toSst)
val cs = xs.toList.map(_.bimap(f, _.toSst))
val m = IMap.fromFoldable(kv1 :: cs)
val sst1 = envT(cnt1, TypeST(TypeF.map(m, u1u.some))).embed
val sst2 = envT(cnt1, TypeST(TypeF.map(m, u2u.some))).embed
val a = cs.foldMap1Opt { case (j, s) => (SST.fromEJson(Real(1), j), s) }
val b = IMap.singleton(kv1._1, kv1._2)
val exp1 = envT(cnt1, TypeST(TypeF.map(b, a map (_ |+| u1u)))).embed
val exp2 = envT(cnt1, TypeST(TypeF.map(b, a map (_ |+| u2u)))).embed
(attemptCompression(sst1, compression.coalesceWithUnknown(0L, false)) must_= exp1) and
(attemptCompression(sst2, compression.coalesceWithUnknown(0L, false)) must_= exp2)
}
def testUnkRetain(kind: String, f: Char => J, g: (TypeStat[Real], SimpleType) => SSTF[J, Real, S]) =
s"merges known map entry with unknown entry when same primary $kind appears in unknown, with retention" >> prop {
(head: (Char, LeafEjs), xs0: NonEmptyList[(Char, LeafEjs)], kv: (BigInt, LeafEjs)) =>
val xs: IMap[Char, LeafEjs] = IMap.fromFoldable(xs0) + head
val u1 = head.bimap(_ => SST.fromEJson(Real(1), f('x')), _.toSst)
val u2 = head.bimap(
c => g(TypeStat.fromEJson(Real(1), J.char(c)), SimpleType.Char).embed,
_.toSst)
val kv1 = kv.bimap(J.int(_), _.toSst)
val cs = (xs - head._1).toList.map(_.bimap(f, _.toSst))
val h = head.bimap(f, l => SST.fromEJson(Real(1000000), l.ejs))
val m = IMap.fromFoldable(kv1 :: h :: cs)
val sst1 = envT(cnt1, TypeST(TypeF.map(m, u1.some))).embed
val sst2 = envT(cnt1, TypeST(TypeF.map(m, u2.some))).embed
val a = cs.foldMap1Opt { case (j, s) => (SST.fromEJson(Real(1), j), s) }
val b = IMap(kv1, h)
val exp1 = envT(cnt1, TypeST(TypeF.map(b, a map (_ |+| u1)))).embed
val exp2 = envT(cnt1, TypeST(TypeF.map(b, a map (_ |+| u2)))).embed
(attemptCompression(sst1, compression.coalesceWithUnknown(1L, false)) must_= exp1) and
(attemptCompression(sst2, compression.coalesceWithUnknown(1L, false)) must_= exp2)
}
def test(kind: String, f: Char => J, g: (TypeStat[Real], SimpleType) => SSTF[J, Real, S]) = {
testUnk(kind, f, g)
testUnkUnion(kind, f, g)
testUnkRetain(kind, f, g)
}
test("type",
J.char(_),
(ts, st) => envT(ts, TypeST(TypeF.simple[J, S](st))))
test("tag",
c => J.meta(J.char(c), J.tpe(TypeTag("codepoint"))),
(ts, st) => envT(TypeStat.count(ts.size), TagST[J](Tagged(
TypeTag("codepoint"),
envT(ts, TypeST(TypeF.simple[J, S](st))).embed))))
"has no effect on maps when all keys are known" >> prop { xs: IList[(LeafEjs, LeafEjs)] =>
val m = IMap.fromFoldable(xs.map(_.bimap(_.ejs, _.toSst)))
val sst = envT(cnt1, TypeST(TypeF.map[J, S](m, None))).embed
attemptCompression(sst, compression.coalesceWithUnknown(0L, false)) must_= sst
}
"has no effect on maps when primary type not in unknown" >> prop { xs: IList[(LeafEjs, LeafEjs)] =>
val m = IMap.fromFoldable(xs.map(_.bimap(_.ejs, _.toSst)))
val T = envT(cnt1, TypeST(TypeF.top[J, S]())).embed
val sst = envT(cnt1, TypeST(TypeF.map[J, S](m, Some((T, T))))).embed
attemptCompression(sst, compression.coalesceWithUnknown(0L, false)) must_= sst
}
"has no effect on maps when primary tag not in unknown" >> prop { xs: IList[(LeafEjs, LeafEjs)] =>
val foo = TypeTag("foo")
val bar = TypeTag("bar")
val m = IMap.fromFoldable(xs.map(_.bimap(l => J.meta(l.ejs, J.tpe(foo)), _.toSst)))
val T = envT(cnt1, TagST[J](Tagged(bar, envT(cnt1, TypeST(TypeF.top[J, S]())).embed))).embed
val sst = envT(cnt1, TypeST(TypeF.map[J, S](m, Some((T, T))))).embed
attemptCompression(sst, compression.coalesceWithUnknown(0L, false)) must_= sst
}
}
"limitArrays" >> {
"compresses arrays longer than maxLen to the union of the members" >> prop {
xs: NonEmptyList[BigInt] => (xs.length > 1) ==> {
val alen: Natural = Natural(xs.length.toLong) getOrElse 0L
val lt: Natural = Natural((xs.length - 1).toLong) getOrElse 0L
val rlen = Real(xs.length).some
val ints = xs.map(J.int(_))
val xsst = SST.fromEJson(Real(1), J.arr(ints.toList))
val sum = ints.foldMap1(x => SST.fromEJson(Real(1), x))
val coll = TypeStat.coll(Real(1), rlen, rlen)
val lubarr = envT(coll, TypeST(TypeF.arr[J, S](IList[S](), Some(sum)))).embed
val req = xsst.elgotApo[S](compression.limitArrays(alen, 0L))
val rlt = xsst.elgotApo[S](compression.limitArrays(lt, 0L))
(req must_= xsst) and (rlt must_= lubarr)
}}
"does not limit structural string arrays" >> prop {
s: String => (s.length > 1) ==> {
val lim: Natural = Natural((s.length - 1).toLong) getOrElse 0L
val stringSst = strings.widen[J, Real](TypeStat.fromEJson(Real(1), J.str(s)), s).embed
stringSst.elgotApo[S](compression.limitArrays(lim, 0L)) must_= stringSst
}}
"preserves the first k indices" >> {
val a = BigInt(1)
val b = BigInt(2)
val c = BigInt(3)
val d = BigInt(4)
val rlen = Real(4).some
val ints = NonEmptyList(J.int(a), J.int(b), J.int(c), J.int(d))
val xsst = SST.fromEJson(Real(1), J.arr(ints.toList))
val known = IList(J.int(a), J.int(b)).map(SST.fromEJson(Real(1), _))
val sum = NonEmptyList(J.int(c), J.int(d)).foldMap1(SST.fromEJson(Real(1), _))
val coll = TypeStat.coll(Real(1), rlen, rlen)
val exp = envT(coll, TypeST(TypeF.arr[J, S](known, Some(sum)))).embed
val res = xsst.elgotApo[S](compression.limitArrays(3L, 2L))
res must_= exp
}
}
"limitStrings" >> {
"compresses strings longer than maxLen" >> prop { s: String => (s.length > 1) ==> {
val plen: Natural = Natural(s.length.toLong) getOrElse 0L
val lt: Natural = Natural((s.length - 1).toLong) getOrElse 0L
val str = SST.fromEJson(Real(1), J.str(s))
val strtrunc = str.map(TypeStat.str.modify {
case (c, n, x, ns, _) =>
val i = lt.value.toInt
(c, n, x, ns.substring(0, i), ns.substring(0, i))
})
val arr = strings.compress[S, J, Real](strtrunc.copoint, s).embed
val req = attemptCompression(str, compression.limitStrings(plen, true))
val rlt = attemptCompression(str, compression.limitStrings(lt, true))
(req must_= str) and (rlt must_= arr)
}}
"compresses to simple type when preserve structure is 'false'" >> prop { s: String => (s.length > 1) ==> {
val lt: Natural = Natural((s.length - 1).toLong) getOrElse 0L
val str = SST.fromEJson(Real(1), J.str(s))
val strtrunc = str.map(TypeStat.str.modify {
case (c, n, x, ns, _) =>
val i = lt.value.toInt
(c, n, x, ns.substring(0, i), ns.substring(0, i))
})
val smp = strings.simple[S, J, Real](strtrunc.copoint).embed
val rlt = attemptCompression(str, compression.limitStrings(lt, false))
rlt must_= smp
}}
}
"narrowUnion" >> {
"reduces the largest group of values having the same primary type" >> prop {
(ss: ISet[Char], c1: String, c2: String, c3: String, d1: BigDecimal) =>
((ss.size > 3) && (ISet.fromFoldable(IList(c1, c2, c3)).size ≟ 3)) ==> {
val chars = ss.toIList.map(s => SST.fromEJson(Real(1), J.char(s)))
val strs = IList(c1, c2, c3).map(c => SST.fromEJson(Real(1), J.str(c)))
val dec = SST.fromEJson(Real(1), J.dec(d1))
val compChar = envT(
chars.foldMap1Opt(_.copoint) | TypeStat.count(Real(0)),
TypeST(TypeF.simple[J, S](SimpleType.Char))
).embed
val union0 = NonEmptyList.nel(dec, strs ::: chars).suml1
val union1 = envT(union0.copoint, TypeST(TypeF.union[J, S](compChar, dec, strs))).embed
attemptCompression(union0, compression.narrowUnion(3L, true)) must_= union1
}}
"no effect on unions smaller or equal to maxSize" >> prop {
(x: LeafEjs, y: LeafEjs, xs: IList[LeafEjs]) =>
val union = envT(cnt1, TypeST(TypeF.union[J, S](x.toSst, y.toSst, xs map (_.toSst)))).embed
Positive((xs.length + 2).toLong).cata(
l => attemptCompression(union, compression.narrowUnion(l, true)),
union
) must_= union
}
}
}
| slamdata/slamengine | sst/src/test/scala/quasar/sst/CompressionSpec.scala | Scala | apache-2.0 | 20,091 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils.{Logging, TestUtils}
import kafka.zk.ZooKeeperTestHarness
import org.apache.kafka.common.TopicPartition
import org.junit.Assert.assertEquals
import org.junit.{After, Test}
import scala.collection.{Map, Set}
class PreferredReplicaElectionCommandTest extends ZooKeeperTestHarness with Logging {
var servers: Seq[KafkaServer] = Seq()
@After
override def tearDown() {
TestUtils.shutdownServers(servers)
super.tearDown()
}
@Test
def testPreferredReplicaJsonData() {
// write preferred replica json data to zk path
val partitionsForPreferredReplicaElection = Set(new TopicPartition("test", 1), new TopicPartition("test2", 1))
PreferredReplicaLeaderElectionCommand.writePreferredReplicaElectionData(zkClient, partitionsForPreferredReplicaElection)
// try to read it back and compare with what was written
val partitionsUndergoingPreferredReplicaElection = zkClient.getPreferredReplicaElection
assertEquals("Preferred replica election ser-de failed", partitionsForPreferredReplicaElection,
partitionsUndergoingPreferredReplicaElection)
}
@Test
def testBasicPreferredReplicaElection() {
val expectedReplicaAssignment = Map(1 -> List(0, 1, 2))
val topic = "test"
val partition = 1
val preferredReplica = 0
// create brokers
val brokerRack = Map(0 -> "rack0", 1 -> "rack1", 2 -> "rack2")
val serverConfigs = TestUtils.createBrokerConfigs(3, zkConnect, false, rackInfo = brokerRack).map(KafkaConfig.fromProps)
// create the topic
adminZkClient.createOrUpdateTopicPartitionAssignmentPathInZK(topic, expectedReplicaAssignment)
servers = serverConfigs.reverseMap(s => TestUtils.createServer(s))
// broker 2 should be the leader since it was started first
val currentLeader = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, partition, oldLeaderOpt = None)
// trigger preferred replica election
val preferredReplicaElection = new PreferredReplicaLeaderElectionCommand(zkClient, Set(new TopicPartition(topic, partition)))
preferredReplicaElection.moveLeaderToPreferredReplica()
val newLeader = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, partition, oldLeaderOpt = Some(currentLeader))
assertEquals("Preferred replica election failed", preferredReplica, newLeader)
}
}
| ollie314/kafka | core/src/test/scala/unit/kafka/admin/PreferredReplicaElectionCommandTest.scala | Scala | apache-2.0 | 3,198 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.internal
import org.apache.spark.util.Utils
/**
* Static SQL configuration is a cross-session, immutable Spark configuration. External users can
* see the static sql configs via `SparkSession.conf`, but can NOT set/unset them.
*/
object StaticSQLConf {
import SQLConf.buildStaticConf
val WAREHOUSE_PATH = buildStaticConf("spark.sql.warehouse.dir")
.doc("The default location for managed databases and tables.")
.stringConf
.createWithDefault(Utils.resolveURI("spark-warehouse").toString)
val CATALOG_IMPLEMENTATION = buildStaticConf("spark.sql.catalogImplementation")
.internal()
.stringConf
.checkValues(Set("hive", "in-memory"))
.createWithDefault("in-memory")
val GLOBAL_TEMP_DATABASE = buildStaticConf("spark.sql.globalTempDatabase")
.internal()
.stringConf
.createWithDefault("global_temp")
// This is used to control when we will split a schema's JSON string to multiple pieces
// in order to fit the JSON string in metastore's table property (by default, the value has
// a length restriction of 4000 characters, so do not use a value larger than 4000 as the default
// value of this property). We will split the JSON string of a schema to its length exceeds the
// threshold. Note that, this conf is only read in HiveExternalCatalog which is cross-session,
// that's why this conf has to be a static SQL conf.
val SCHEMA_STRING_LENGTH_THRESHOLD =
buildStaticConf("spark.sql.sources.schemaStringLengthThreshold")
.doc("The maximum length allowed in a single cell when " +
"storing additional schema information in Hive's metastore.")
.internal()
.intConf
.createWithDefault(4000)
val FILESOURCE_TABLE_RELATION_CACHE_SIZE =
buildStaticConf("spark.sql.filesourceTableRelationCacheSize")
.internal()
.doc("The maximum size of the cache that maps qualified table names to table relation plans.")
.intConf
.checkValue(cacheSize => cacheSize >= 0, "The maximum size of the cache must not be negative")
.createWithDefault(1000)
// When enabling the debug, Spark SQL internal table properties are not filtered out; however,
// some related DDL commands (e.g., ANALYZE TABLE and CREATE TABLE LIKE) might not work properly.
val DEBUG_MODE = buildStaticConf("spark.sql.debug")
.internal()
.doc("Only used for internal debugging. Not all functions are supported when it is enabled.")
.booleanConf
.createWithDefault(false)
val HIVE_THRIFT_SERVER_SINGLESESSION =
buildStaticConf("spark.sql.hive.thriftServer.singleSession")
.doc("When set to true, Hive Thrift server is running in a single session mode. " +
"All the JDBC/ODBC connections share the temporary views, function registries, " +
"SQL configuration and the current database.")
.booleanConf
.createWithDefault(false)
val SPARK_SESSION_EXTENSIONS = buildStaticConf("spark.sql.extensions")
.doc("Name of the class used to configure Spark Session extensions. The class should " +
"implement Function1[SparkSessionExtension, Unit], and must have a no-args constructor.")
.stringConf
.createOptional
val QUERY_EXECUTION_LISTENERS = buildStaticConf("spark.sql.queryExecutionListeners")
.doc("List of class names implementing QueryExecutionListener that will be automatically " +
"added to newly created sessions. The classes should have either a no-arg constructor, " +
"or a constructor that expects a SparkConf argument.")
.stringConf
.toSequence
.createOptional
}
| ron8hu/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala | Scala | apache-2.0 | 4,408 |
package glasskey.play.client
import glasskey.config.OAuthConfig
object ClientCredentialsController extends SampleController {
override def env: PlayClientRuntimeEnvironment = PlayClientRuntimeEnvironment("hello-client_credentials-client")
} | MonsantoCo/glass-key | samples/glass-key-play-client/app/glasskey/play/client/ClientCredentialsController.scala | Scala | bsd-3-clause | 245 |
package de.leanovate.swaggercheck.schema.gen
import de.leanovate.swaggercheck.schema.model._
import de.leanovate.swaggercheck.shrinkable.CheckJsValue
import org.scalacheck.Gen
import scala.language.implicitConversions
trait GeneratableDefinition extends Definition {
def generate(schema: GeneratableSchema): Gen[CheckJsValue]
}
object GeneratableDefinition {
implicit def toGeneratable(definition: Definition): GeneratableDefinition = definition match {
case definition: AllOfDefinition => GeneratableAllOf(definition)
case definition: ArrayDefinition => GeneratableArray(definition)
case BooleanDefinition => GeneratableBoolean
case EmptyDefinition => GeneratableEmpty
case definition: IntegerDefinition => GeneratableInteger(definition)
case definition: NumberDefinition => GeneratableNumber(definition)
case definition: ObjectDefinition => GeneratableObject(definition)
case definition: OneOfDefinition => GeneratableOneOf(definition)
case definition: StringDefinition => GeneratableString(definition)
case definition: ReferenceDefinition => GeneratableReference(definition)
}
} | leanovate/swagger-check | json-schema-gen/src/main/scala/de/leanovate/swaggercheck/schema/gen/GeneratableDefinition.scala | Scala | mit | 1,131 |
class A { def m() { var x: Boolean = 1 % 1 } }
object Main { def main(args: Array[String]) { } }
| tobast/compil-petitscala | tests/typing/bad/testfile-mod3-1.scala | Scala | gpl-3.0 | 97 |
package okapies.finagle.kafka
package protocol
import org.scalatest._
import org.scalatest.matchers._
import org.jboss.netty.buffer.ChannelBuffers
class MessageTest extends FlatSpec with Matchers {
import kafka.message.{
Message => KafkaMessage,
NoCompressionCodec
}
import util.Helper._
behavior of "A Message"
it should "encode a no compressed message" in {
val msg1 = Message.create(
ChannelBuffers.wrappedBuffer("value1".getBytes(utf8)), // value
Some(ChannelBuffers.wrappedBuffer("key1".getBytes(utf8))), // key
0 // codec
)
val kafkaMsg1 = new KafkaMessage(msg1.underlying.toByteBuffer)
assert(kafkaMsg1.checksum === msg1.crc)
assert(kafkaMsg1.magic === msg1.magicByte)
assert(kafkaMsg1.attributes === msg1.attributes)
assert(kafkaMsg1.key.asString === "key1")
assert(kafkaMsg1.payload.asString === "value1")
val msg2 = Message.create(
ChannelBuffers.wrappedBuffer("value2".getBytes(utf8)), // value
None, // key
0 // codec
)
val kafkaMsg2 = new KafkaMessage(msg2.underlying.toByteBuffer)
assert(kafkaMsg2.checksum === msg2.crc)
assert(kafkaMsg2.magic === msg2.magicByte)
assert(kafkaMsg2.attributes === msg2.attributes)
assert(kafkaMsg2.key === null)
assert(kafkaMsg2.payload.asString === "value2")
}
it should "decode a no compressed message" in {
val kafkaMsg1 = new KafkaMessage(
"value1".getBytes(utf8), // value
"key1".getBytes(utf8), // key
NoCompressionCodec // codec
)
val msg1 = Message(ChannelBuffers.wrappedBuffer(kafkaMsg1.buffer))
assert(msg1.crc === kafkaMsg1.checksum)
assert(msg1.magicByte === kafkaMsg1.magic)
assert(msg1.attributes === kafkaMsg1.attributes)
assert(msg1.key.get.toString(utf8) === "key1")
assert(msg1.value.toString(utf8) === "value1")
val kafkaMsg2 = new KafkaMessage(
"value2".getBytes(utf8), // value
NoCompressionCodec // codec
)
val msg2 = Message(ChannelBuffers.wrappedBuffer(kafkaMsg2.buffer))
assert(msg2.crc === kafkaMsg2.checksum)
assert(msg2.magicByte === kafkaMsg2.magic)
assert(msg2.attributes === kafkaMsg2.attributes)
assert(msg2.key === None)
assert(msg2.value.toString(utf8) === "value2")
}
}
| yonglehou/finagle-kafka | src/test/scala/okapies/finagle/kafka/protocol/MessageTest.scala | Scala | apache-2.0 | 2,472 |
package org.jetbrains.plugins.scala.lang.parser.scala3
import com.intellij.psi.tree.IElementType
import org.jetbrains.plugins.scala.AssertionMatchers
import org.jetbrains.plugins.scala.lang.lexer.{ScalaTokenType, ScalaTokenTypes}
import org.jetbrains.plugins.scala.lang.psi.api.base.ScEnd
import org.jetbrains.plugins.scala.util.PsiSelectionUtil
class EndParserTest extends SimpleScala3ParserTestBase with PsiSelectionUtil with AssertionMatchers {
def doTest(code: String, expectedType: IElementType): Unit = {
val file = checkParseErrors(code.stripMargin)
val endElement = searchElement[ScEnd](file)
val designator = endElement.tag
designator shouldNotBe null
val designatorType = endElement.tag.getNode.getElementType
designatorType shouldBe expectedType
}
def test_end_if(): Unit = doTest(
"""
|if (boolean)
| stmt1
| stmt2
|else
| stmt3
| stmt4
|end if
|""".stripMargin,
expectedType = ScalaTokenTypes.kIF
)
def test_one_expr_end_if(): Unit = checkTree(
"""
|if (boolean)
| stmt
|end if
|""".stripMargin,
"""ScalaFile
| PsiWhiteSpace('\\n')
| IfStatement
| PsiElement(if)('if')
| PsiWhiteSpace(' ')
| PsiElement(()('(')
| ReferenceExpression: boolean
| PsiElement(identifier)('boolean')
| PsiElement())(')')
| PsiWhiteSpace('\\n ')
| ReferenceExpression: stmt
| PsiElement(identifier)('stmt')
| PsiWhiteSpace('\\n')
| End: if
| PsiElement(end)('end')
| PsiWhiteSpace(' ')
| PsiElement(if)('if')
| PsiWhiteSpace('\\n')
|""".stripMargin
)
def test_end_on_nested_if(): Unit = checkTree(
"""
|if (boolean)
| if (boolean)
| stmt
|end if
|""".stripMargin,
"""ScalaFile
| PsiWhiteSpace('\\n')
| IfStatement
| PsiElement(if)('if')
| PsiWhiteSpace(' ')
| PsiElement(()('(')
| ReferenceExpression: boolean
| PsiElement(identifier)('boolean')
| PsiElement())(')')
| PsiWhiteSpace('\\n ')
| IfStatement
| PsiElement(if)('if')
| PsiWhiteSpace(' ')
| PsiElement(()('(')
| ReferenceExpression: boolean
| PsiElement(identifier)('boolean')
| PsiElement())(')')
| PsiWhiteSpace('\\n ')
| ReferenceExpression: stmt
| PsiElement(identifier)('stmt')
| PsiWhiteSpace('\\n')
| End: if
| PsiElement(end)('end')
| PsiWhiteSpace(' ')
| PsiElement(if)('if')
| PsiWhiteSpace('\\n')
|""".stripMargin
)
def test_end_while(): Unit = doTest(
"""
|while
| stmt1
| stmt2
|do
| stmt3
| stmt4
|end while
|""".stripMargin,
expectedType = ScalaTokenTypes.kWHILE
)
def test_end_for(): Unit = doTest(
"""
|for
| x <- xs
|do
| stmt1
|end for
|""".stripMargin,
expectedType = ScalaTokenTypes.kFOR
)
def test_end_try_finally(): Unit = doTest(
"""
|try
| stmt1
| stmt2
|finally
| stmt3
| stmt4
|end try
|""".stripMargin,
expectedType = ScalaTokenTypes.kTRY
)
def test_end_try_catch(): Unit = doTest(
"""
|try
| stmt1
| stmt2
|catch
|case a => stmt3
|case b => stmt4
|end try
|""".stripMargin,
expectedType = ScalaTokenTypes.kTRY
)
def test_end_match(): Unit = doTest(
"""
|something match
|case a => stmt1
|case _ => stmt2
|end match
|""".stripMargin,
expectedType = ScalaTokenTypes.kMATCH
)
def test_end_new(): Unit = doTest(
"""
|new:
| stmt1
| stmt2
|end new
|""".stripMargin,
expectedType = ScalaTokenType.NewKeyword
)
def test_end_class(): Unit = doTest(
"""
|class A:
| stmt1
| stmt2
|end A
|""".stripMargin,
expectedType = ScalaTokenTypes.tIDENTIFIER
)
def test_end_method(): Unit = doTest(
"""
|def test() =
| stmt1
| stmt2
|end test
|""".stripMargin,
expectedType = ScalaTokenTypes.tIDENTIFIER
)
def test_empty_trait_end(): Unit = doTest(
"""
|trait A:
|end A
|""".stripMargin,
expectedType = ScalaTokenTypes.tIDENTIFIER
)
def test_empty_package_end(): Unit = doTest(
"""
|package A:
|end A
|""".stripMargin,
expectedType = ScalaTokenTypes.tIDENTIFIER
)
def test_package_end(): Unit = checkTree(
"""package foo:
| package bar:
| object A:
| def foo = 1
| end bar
|end foo
|package baz:
| object B:
| def f = foo.bar.A.foo
|end baz
|""".stripMargin,
"""ScalaFile
| ScPackaging
| PsiElement(package)('package')
| PsiWhiteSpace(' ')
| CodeReferenceElement: foo
| PsiElement(identifier)('foo')
| PsiElement(:)(':')
| PsiWhiteSpace('\\n ')
| ScPackaging
| PsiElement(package)('package')
| PsiWhiteSpace(' ')
| CodeReferenceElement: bar
| PsiElement(identifier)('bar')
| PsiElement(:)(':')
| PsiWhiteSpace('\\n ')
| ScObject: A
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(object)('object')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('A')
| ExtendsBlock
| ScTemplateBody
| PsiElement(:)(':')
| PsiWhiteSpace('\\n ')
| ScFunctionDefinition: foo
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(def)('def')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('foo')
| Parameters
| <empty list>
| PsiWhiteSpace(' ')
| PsiElement(=)('=')
| PsiWhiteSpace(' ')
| IntegerLiteral
| PsiElement(integer)('1')
| PsiWhiteSpace('\\n ')
| End: bar
| PsiElement(end)('end')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('bar')
| PsiWhiteSpace('\\n')
| End: foo
| PsiElement(end)('end')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('foo')
| PsiWhiteSpace('\\n')
| ScPackaging
| PsiElement(package)('package')
| PsiWhiteSpace(' ')
| CodeReferenceElement: baz
| PsiElement(identifier)('baz')
| PsiElement(:)(':')
| PsiWhiteSpace('\\n ')
| ScObject: B
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(object)('object')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('B')
| ExtendsBlock
| ScTemplateBody
| PsiElement(:)(':')
| PsiWhiteSpace('\\n ')
| ScFunctionDefinition: f
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(def)('def')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('f')
| Parameters
| <empty list>
| PsiWhiteSpace(' ')
| PsiElement(=)('=')
| PsiWhiteSpace(' ')
| ReferenceExpression: foo.bar.A.foo
| ReferenceExpression: foo.bar.A
| ReferenceExpression: foo.bar
| ReferenceExpression: foo
| PsiElement(identifier)('foo')
| PsiElement(.)('.')
| PsiElement(identifier)('bar')
| PsiElement(.)('.')
| PsiElement(identifier)('A')
| PsiElement(.)('.')
| PsiElement(identifier)('foo')
| PsiWhiteSpace('\\n')
| End: baz
| PsiElement(end)('end')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('baz')
| PsiWhiteSpace('\\n')
|""".stripMargin
)
def testExtension(): Unit = checkTree(
"""object A {
| extension (c: String)
| def onlyDigits: Boolean = c.forall(_.isDigit)
| end extension
|
| extension [T](xs: List[T])
| def sumBy[U: Numeric](f: T => U): U = ???
| end extension
|}""".stripMargin,
"""ScalaFile
| ScObject: A
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(object)('object')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('A')
| PsiWhiteSpace(' ')
| ExtendsBlock
| ScTemplateBody
| PsiElement({)('{')
| PsiWhiteSpace('\\n ')
| Extension on String
| PsiElement(extension)('extension')
| PsiWhiteSpace(' ')
| Parameters
| ParametersClause
| PsiElement(()('(')
| Parameter: c
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(identifier)('c')
| PsiElement(:)(':')
| PsiWhiteSpace(' ')
| ParameterType
| SimpleType: String
| CodeReferenceElement: String
| PsiElement(identifier)('String')
| PsiElement())(')')
| PsiWhiteSpace('\\n ')
| ScExtensionBody
| ScFunctionDefinition: onlyDigits
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(def)('def')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('onlyDigits')
| Parameters
| <empty list>
| PsiElement(:)(':')
| PsiWhiteSpace(' ')
| SimpleType: Boolean
| CodeReferenceElement: Boolean
| PsiElement(identifier)('Boolean')
| PsiWhiteSpace(' ')
| PsiElement(=)('=')
| PsiWhiteSpace(' ')
| MethodCall
| ReferenceExpression: c.forall
| ReferenceExpression: c
| PsiElement(identifier)('c')
| PsiElement(.)('.')
| PsiElement(identifier)('forall')
| ArgumentList
| PsiElement(()('(')
| ReferenceExpression: _.isDigit
| UnderscoreSection
| PsiElement(_)('_')
| PsiElement(.)('.')
| PsiElement(identifier)('isDigit')
| PsiElement())(')')
| PsiWhiteSpace('\\n ')
| End: extension
| PsiElement(end)('end')
| PsiWhiteSpace(' ')
| PsiElement(extension)('extension')
| PsiWhiteSpace('\\n\\n ')
| Extension on List[T]
| PsiElement(extension)('extension')
| PsiWhiteSpace(' ')
| TypeParameterClause
| PsiElement([)('[')
| TypeParameter: T
| PsiElement(identifier)('T')
| PsiElement(])(']')
| Parameters
| ParametersClause
| PsiElement(()('(')
| Parameter: xs
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(identifier)('xs')
| PsiElement(:)(':')
| PsiWhiteSpace(' ')
| ParameterType
| ParametrizedType: List[T]
| SimpleType: List
| CodeReferenceElement: List
| PsiElement(identifier)('List')
| TypeArgumentsList
| PsiElement([)('[')
| SimpleType: T
| CodeReferenceElement: T
| PsiElement(identifier)('T')
| PsiElement(])(']')
| PsiElement())(')')
| PsiWhiteSpace('\\n ')
| ScExtensionBody
| ScFunctionDefinition: sumBy
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(def)('def')
| PsiWhiteSpace(' ')
| PsiElement(identifier)('sumBy')
| TypeParameterClause
| PsiElement([)('[')
| TypeParameter: U
| PsiElement(identifier)('U')
| PsiElement(:)(':')
| PsiWhiteSpace(' ')
| SimpleType: Numeric
| CodeReferenceElement: Numeric
| PsiElement(identifier)('Numeric')
| PsiElement(])(']')
| Parameters
| ParametersClause
| PsiElement(()('(')
| Parameter: f
| AnnotationsList
| <empty list>
| Modifiers
| <empty list>
| PsiElement(identifier)('f')
| PsiElement(:)(':')
| PsiWhiteSpace(' ')
| ParameterType
| FunctionalType: T => U
| SimpleType: T
| CodeReferenceElement: T
| PsiElement(identifier)('T')
| PsiWhiteSpace(' ')
| PsiElement(=>)('=>')
| PsiWhiteSpace(' ')
| SimpleType: U
| CodeReferenceElement: U
| PsiElement(identifier)('U')
| PsiElement())(')')
| PsiElement(:)(':')
| PsiWhiteSpace(' ')
| SimpleType: U
| CodeReferenceElement: U
| PsiElement(identifier)('U')
| PsiWhiteSpace(' ')
| PsiElement(=)('=')
| PsiWhiteSpace(' ')
| ReferenceExpression: ???
| PsiElement(identifier)('???')
| PsiWhiteSpace('\\n ')
| End: extension
| PsiElement(end)('end')
| PsiWhiteSpace(' ')
| PsiElement(extension)('extension')
| PsiWhiteSpace('\\n')
| PsiElement(})('}')""".stripMargin
)
// todo: add tests for given
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/parser/scala3/EndParserTest.scala | Scala | apache-2.0 | 16,019 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext
import org.codehaus.commons.compiler.CompileException
import org.codehaus.janino.JaninoRuntimeException
import org.apache.spark.{broadcast, SparkEnv}
import org.apache.spark.internal.Logging
import org.apache.spark.io.CompressionCodec
import org.apache.spark.rdd.{RDD, RDDOperationScope}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{Predicate => GenPredicate, _}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.execution.metric.SQLMetric
import org.apache.spark.sql.types.DataType
import org.apache.spark.util.ThreadUtils
/**
* The base class for physical operators.
*
* The naming convention is that physical operators end with "Exec" suffix, e.g. [[ProjectExec]].
*/
abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializable {
/**
* A handle to the SQL Context that was used to create this plan. Since many operators need
* access to the sqlContext for RDD operations or configuration this field is automatically
* populated by the query planning infrastructure.
*/
@transient
final val sqlContext = SparkSession.getActiveSession.map(_.sqlContext).orNull
protected def sparkContext = sqlContext.sparkContext
// sqlContext will be null when we are being deserialized on the slaves. In this instance
// the value of subexpressionEliminationEnabled will be set by the deserializer after the
// constructor has run.
val subexpressionEliminationEnabled: Boolean = if (sqlContext != null) {
sqlContext.conf.subexpressionEliminationEnabled
} else {
false
}
/** Overridden make copy also propagates sqlContext to copied plan. */
override def makeCopy(newArgs: Array[AnyRef]): SparkPlan = {
SparkSession.setActiveSession(sqlContext.sparkSession)
super.makeCopy(newArgs)
}
/**
* Return all metadata that describes more details of this SparkPlan.
*/
def metadata: Map[String, String] = Map.empty
/**
* Return all metrics containing metrics of this SparkPlan.
*/
def metrics: Map[String, SQLMetric] = Map.empty
/**
* Reset all the metrics.
*/
def resetMetrics(): Unit = {
metrics.valuesIterator.foreach(_.reset())
}
/**
* Return a LongSQLMetric according to the name.
*/
def longMetric(name: String): SQLMetric = metrics(name)
// TODO: Move to `DistributedPlan`
/** Specifies how data is partitioned across different nodes in the cluster. */
def outputPartitioning: Partitioning = UnknownPartitioning(0) // TODO: WRONG WIDTH!
/** Specifies any partition requirements on the input data for this operator. */
def requiredChildDistribution: Seq[Distribution] =
Seq.fill(children.size)(UnspecifiedDistribution)
/** Specifies how data is ordered in each partition. */
def outputOrdering: Seq[SortOrder] = Nil
/** Specifies sort order for each partition requirements on the input data for this operator. */
def requiredChildOrdering: Seq[Seq[SortOrder]] = Seq.fill(children.size)(Nil)
/**
* Returns the result of this query as an RDD[InternalRow] by delegating to `doExecute` after
* preparations.
*
* Concrete implementations of SparkPlan should override `doExecute`.
*/
final def execute(): RDD[InternalRow] = executeQuery {
doExecute()
}
/**
* Returns the result of this query as a broadcast variable by delegating to `doExecuteBroadcast`
* after preparations.
*
* Concrete implementations of SparkPlan should override `doExecuteBroadcast`.
*/
final def executeBroadcast[T](): broadcast.Broadcast[T] = executeQuery {
doExecuteBroadcast()
}
/**
* Execute a query after preparing the query and adding query plan information to created RDDs
* for visualization.
*/
protected final def executeQuery[T](query: => T): T = {
RDDOperationScope.withScope(sparkContext, nodeName, false, true) {
prepare()
waitForSubqueries()
query
}
}
/**
* List of (uncorrelated scalar subquery, future holding the subquery result) for this plan node.
* This list is populated by [[prepareSubqueries]], which is called in [[prepare]].
*/
@transient
private val runningSubqueries = new ArrayBuffer[ExecSubqueryExpression]
/**
* Finds scalar subquery expressions in this plan node and starts evaluating them.
*/
protected def prepareSubqueries(): Unit = {
expressions.foreach {
_.collect {
case e: ExecSubqueryExpression =>
e.plan.prepare()
runningSubqueries += e
}
}
}
/**
* Blocks the thread until all subqueries finish evaluation and update the results.
*/
protected def waitForSubqueries(): Unit = synchronized {
// fill in the result of subqueries
runningSubqueries.foreach { sub =>
sub.updateResult()
}
runningSubqueries.clear()
}
/**
* Whether the "prepare" method is called.
*/
private var prepared = false
/**
* Prepare a SparkPlan for execution. It's idempotent.
*/
final def prepare(): Unit = {
// doPrepare() may depend on it's children, we should call prepare() on all the children first.
children.foreach(_.prepare())
synchronized {
if (!prepared) {
prepareSubqueries()
doPrepare()
prepared = true
}
}
}
/**
* Overridden by concrete implementations of SparkPlan. It is guaranteed to run before any
* `execute` of SparkPlan. This is helpful if we want to set up some state before executing the
* query, e.g., `BroadcastHashJoin` uses it to broadcast asynchronously.
*
* Note: the prepare method has already walked down the tree, so the implementation doesn't need
* to call children's prepare methods.
*
* This will only be called once, protected by `this`.
*/
protected def doPrepare(): Unit = {}
/**
* Overridden by concrete implementations of SparkPlan.
* Produces the result of the query as an RDD[InternalRow]
*/
protected def doExecute(): RDD[InternalRow]
/**
* Overridden by concrete implementations of SparkPlan.
* Produces the result of the query as a broadcast variable.
*/
protected[sql] def doExecuteBroadcast[T](): broadcast.Broadcast[T] = {
throw new UnsupportedOperationException(s"$nodeName does not implement doExecuteBroadcast")
}
/**
* Packing the UnsafeRows into byte array for faster serialization.
* The byte arrays are in the following format:
* [size] [bytes of UnsafeRow] [size] [bytes of UnsafeRow] ... [-1]
*
* UnsafeRow is highly compressible (at least 8 bytes for any column), the byte array is also
* compressed.
*/
private def getByteArrayRdd(n: Int = -1): RDD[Array[Byte]] = {
execute().mapPartitionsInternal { iter =>
var count = 0
val buffer = new Array[Byte](4 << 10) // 4K
val codec = CompressionCodec.createCodec(SparkEnv.get.conf)
val bos = new ByteArrayOutputStream()
val out = new DataOutputStream(codec.compressedOutputStream(bos))
while (iter.hasNext && (n < 0 || count < n)) {
val row = iter.next().asInstanceOf[UnsafeRow]
out.writeInt(row.getSizeInBytes)
row.writeToStream(out, buffer)
count += 1
}
out.writeInt(-1)
out.flush()
out.close()
Iterator(bos.toByteArray)
}
}
/**
* Decode the byte arrays back to UnsafeRows and put them into buffer.
*/
private def decodeUnsafeRows(bytes: Array[Byte]): Iterator[InternalRow] = {
val nFields = schema.length
val codec = CompressionCodec.createCodec(SparkEnv.get.conf)
val bis = new ByteArrayInputStream(bytes)
val ins = new DataInputStream(codec.compressedInputStream(bis))
new Iterator[InternalRow] {
private var sizeOfNextRow = ins.readInt()
override def hasNext: Boolean = sizeOfNextRow >= 0
override def next(): InternalRow = {
val bs = new Array[Byte](sizeOfNextRow)
ins.readFully(bs)
val row = new UnsafeRow(nFields)
row.pointTo(bs, sizeOfNextRow)
sizeOfNextRow = ins.readInt()
row
}
}
}
/**
* Runs this query returning the result as an array.
*/
def executeCollect(): Array[InternalRow] = {
val byteArrayRdd = getByteArrayRdd()
val results = ArrayBuffer[InternalRow]()
byteArrayRdd.collect().foreach { bytes =>
decodeUnsafeRows(bytes).foreach(results.+=)
}
results.toArray
}
/**
* Runs this query returning the result as an iterator of InternalRow.
*
* Note: this will trigger multiple jobs (one for each partition).
*/
def executeToIterator(): Iterator[InternalRow] = {
getByteArrayRdd().toLocalIterator.flatMap(decodeUnsafeRows)
}
/**
* Runs this query returning the result as an array, using external Row format.
*/
def executeCollectPublic(): Array[Row] = {
val converter = CatalystTypeConverters.createToScalaConverter(schema)
executeCollect().map(converter(_).asInstanceOf[Row])
}
/**
* Runs this query returning the first `n` rows as an array.
*
* This is modeled after RDD.take but never runs any job locally on the driver.
*/
def executeTake(n: Int): Array[InternalRow] = {
if (n == 0) {
return new Array[InternalRow](0)
}
val childRDD = getByteArrayRdd(n)
val buf = new ArrayBuffer[InternalRow]
val totalParts = childRDD.partitions.length
var partsScanned = 0
while (buf.size < n && partsScanned < totalParts) {
// The number of partitions to try in this iteration. It is ok for this number to be
// greater than totalParts because we actually cap it at totalParts in runJob.
var numPartsToTry = 1L
if (partsScanned > 0) {
// If we didn't find any rows after the previous iteration, quadruple and retry.
// Otherwise, interpolate the number of partitions we need to try, but overestimate
// it by 50%. We also cap the estimation in the end.
val limitScaleUpFactor = Math.max(sqlContext.conf.limitScaleUpFactor, 2)
if (buf.isEmpty) {
numPartsToTry = partsScanned * limitScaleUpFactor
} else {
// the left side of max is >=1 whenever partsScanned >= 2
numPartsToTry = Math.max((1.5 * n * partsScanned / buf.size).toInt - partsScanned, 1)
numPartsToTry = Math.min(numPartsToTry, partsScanned * limitScaleUpFactor)
}
}
val p = partsScanned.until(math.min(partsScanned + numPartsToTry, totalParts).toInt)
val sc = sqlContext.sparkContext
val res = sc.runJob(childRDD,
(it: Iterator[Array[Byte]]) => if (it.hasNext) it.next() else Array.empty[Byte], p)
buf ++= res.flatMap(decodeUnsafeRows)
partsScanned += p.size
}
if (buf.size > n) {
buf.take(n).toArray
} else {
buf.toArray
}
}
protected def newMutableProjection(
expressions: Seq[Expression],
inputSchema: Seq[Attribute],
useSubexprElimination: Boolean = false): MutableProjection = {
log.debug(s"Creating MutableProj: $expressions, inputSchema: $inputSchema")
GenerateMutableProjection.generate(expressions, inputSchema, useSubexprElimination)
}
private def genInterpretedPredicate(
expression: Expression, inputSchema: Seq[Attribute]): InterpretedPredicate = {
val str = expression.toString
val logMessage = if (str.length > 256) {
str.substring(0, 256 - 3) + "..."
} else {
str
}
logWarning(s"Codegen disabled for this expression:\n $logMessage")
InterpretedPredicate.create(expression, inputSchema)
}
protected def newPredicate(
expression: Expression, inputSchema: Seq[Attribute]): GenPredicate = {
try {
GeneratePredicate.generate(expression, inputSchema)
} catch {
case e @ (_: JaninoRuntimeException | _: CompileException)
if sqlContext == null || sqlContext.conf.wholeStageFallback =>
genInterpretedPredicate(expression, inputSchema)
}
}
protected def newOrdering(
order: Seq[SortOrder], inputSchema: Seq[Attribute]): Ordering[InternalRow] = {
GenerateOrdering.generate(order, inputSchema)
}
/**
* Creates a row ordering for the given schema, in natural ascending order.
*/
protected def newNaturalAscendingOrdering(dataTypes: Seq[DataType]): Ordering[InternalRow] = {
val order: Seq[SortOrder] = dataTypes.zipWithIndex.map {
case (dt, index) => SortOrder(BoundReference(index, dt, nullable = true), Ascending)
}
newOrdering(order, Seq.empty)
}
}
object SparkPlan {
private[execution] val subqueryExecutionContext = ExecutionContext.fromExecutorService(
ThreadUtils.newDaemonCachedThreadPool("subquery", 16))
}
trait LeafExecNode extends SparkPlan {
override final def children: Seq[SparkPlan] = Nil
override def producedAttributes: AttributeSet = outputSet
}
object UnaryExecNode {
def unapply(a: Any): Option[(SparkPlan, SparkPlan)] = a match {
case s: SparkPlan if s.children.size == 1 => Some((s, s.children.head))
case _ => None
}
}
trait UnaryExecNode extends SparkPlan {
def child: SparkPlan
override final def children: Seq[SparkPlan] = child :: Nil
}
trait BinaryExecNode extends SparkPlan {
def left: SparkPlan
def right: SparkPlan
override final def children: Seq[SparkPlan] = Seq(left, right)
}
| setjet/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala | Scala | apache-2.0 | 14,554 |
/*
* Copyright 2016 Coral realtime streaming analytics (http://coral-streaming.github.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.coral.actors.transform
import akka.actor.{ActorRefFactory, Actor, ActorSystem, Props}
import akka.io.IO
import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe}
import akka.util.Timeout
import akka.pattern.ask
import io.coral.actors.CoralActorFactory
import io.coral.api.{JsonConversions, DefaultModule}
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import spray.can.Http
import spray.routing.HttpService
import scala.concurrent.Await
import scala.concurrent.duration._
class HttpClientActorSpec(_system: ActorSystem)
extends TestKit(_system)
with ImplicitSender
with WordSpecLike
with Matchers
with BeforeAndAfterAll {
implicit val timeout = Timeout(2.seconds)
//Setup HTTP server for testing the client
val service = system.actorOf(Props[HttpTestActor])
Await.result(IO(Http) ? Http.Bind(service, "localhost", 8111), timeout.duration)
override def afterAll() {
Http.Unbind
TestKit.shutdownActorSystem(system)
}
implicit val injector = new DefaultModule(system.settings.config)
implicit val formats = org.json4s.DefaultFormats
def this() = this(ActorSystem("HttpClientActorSpec"))
val testProbe = TestProbe()
"a HttpClientActor" should {
"not create a new actor with an incorrect JSON definition" in {
val instantiationJson = parse(s"""{
| "type": "httpclient",
| "params": {
| "url": "http://localhost:8111"
| }
|}""".stripMargin).asInstanceOf[JObject]
val httpClientActor = HttpClientActor(instantiationJson)
assert(httpClientActor == None)
}
"emit the retrieved content" in {
val instantiationJson = parse(s"""{
| "type": "httpclient",
| "params": {
| "url": "http://localhost:8111/text",
| "method": "GET",
| "mode": "static",
| "response": "emit"
| }
|}}""".stripMargin).asInstanceOf[JObject]
val props: Props = CoralActorFactory.getProps(instantiationJson).get
val actorRef = TestActorRef[HttpClientActor](props)
actorRef.underlyingActor.emitTargets += testProbe.ref
val triggerJson = parse("{}").asInstanceOf[JObject]
actorRef ! triggerJson
val json = testProbe.receiveOne(1.seconds).asInstanceOf[JObject]
assert((json \\ "status").extract[String] == "200 OK")
assert((json \\ "headers" \\ "Content-Type").extract[String] == "text/plain; charset=UTF-8")
assert((json \\ "body").extract[String] == "content")
}
"emit json content as json" in {
val instantiationJson = parse(s"""{
| "type": "httpclient",
| "params": {
| "url": "http://localhost:8111/json",
| "method": "GET",
| "mode": "static",
| "response": "emit"
| }
|}}""".stripMargin).asInstanceOf[JObject]
val props: Props = CoralActorFactory.getProps(instantiationJson).get
val actorRef = TestActorRef[HttpClientActor](props)
actorRef.underlyingActor.emitTargets += testProbe.ref
val triggerJson = parse("{}").asInstanceOf[JObject]
actorRef ! triggerJson
val json = testProbe.receiveOne(1.seconds).asInstanceOf[JObject]
assert((json \\ "body" \\ "content").extract[String] == "jsoncontent")
}
"send header to the server" in {
val instantiationJson = parse(s"""{
| "type": "httpclient",
| "params": {
| "url": "http://localhost:8111/header",
| "method": "GET",
| "mode": "static",
| "response": "emit",
| "headers": {
| "Authorization": "mykey"
| }
| }
|}}""".stripMargin).asInstanceOf[JObject]
val props: Props = CoralActorFactory.getProps(instantiationJson).get
val actorRef = TestActorRef[HttpClientActor](props)
actorRef.underlyingActor.emitTargets += testProbe.ref
val triggerJson = parse("{}").asInstanceOf[JObject]
actorRef ! triggerJson
val json = testProbe.receiveOne(1.seconds).asInstanceOf[JObject]
assert((json \\ "body").extract[String] == "The authorization received is mykey")
}
"send payload to the server " in {
val instantiationJson = parse(s"""{
| "type": "httpclient",
| "params": {
| "url": "http://localhost:8111/text",
| "method": "POST",
| "mode": "static",
| "response": "emit"
| }
|}""".stripMargin).asInstanceOf[JObject]
val props: Props = CoralActorFactory.getProps(instantiationJson).get
val actorRef = TestActorRef[HttpClientActor](props)
actorRef.underlyingActor.emitTargets += testProbe.ref
val payload = """{"mypayload":"something"}"""
val triggerJson = parse(payload).asInstanceOf[JObject]
actorRef ! triggerJson
val json = testProbe.receiveOne(1.seconds).asInstanceOf[JObject]
assert((json \\ "body").extract[String] == s"The received payload is ${payload}")
}
}
}
class HttpTestActor extends Actor with HttpService {
override def receive: Receive = runRoute(serviceRoute)
override implicit def actorRefFactory: ActorRefFactory = context
implicit val system = context.system
val serviceRoute = {
pathPrefix("text") {
pathEnd {
get {
complete("content")
} ~ post {
entity(as[String]) { payload =>
complete(s"The received payload is $payload")
}
}
}
} ~ pathPrefix("json") {
pathEnd {
get {
import JsonConversions._
complete(parse( """{"content": "jsoncontent"}"""))
}
}
} ~ pathPrefix("header") {
pathEnd {
get {
headerValueByName("Authorization") { authorization =>
complete(s"The authorization received is $authorization")
}
}
}
}
}
} | coral-streaming/coral | src/test/scala/io/coral/actors/transform/HttpClientActorSpec.scala | Scala | apache-2.0 | 6,327 |
package swarmize.aws.swf
import com.amazonaws.services.simpleworkflow.flow.common.WorkflowExecutionUtils
import com.amazonaws.services.simpleworkflow.model._
sealed trait SwfHistoryEvent {
def rawEvent: HistoryEvent
def eventType: String = rawEvent.getEventType
def eventId = rawEvent.getEventId
def isDecisionEvent = eventType startsWith "Decision"
override def toString = WorkflowExecutionUtils.prettyPrintHistoryEvent(rawEvent)
}
case class WorkflowExecutionStarted(rawEvent: HistoryEvent) extends SwfHistoryEvent {
def props = rawEvent.getWorkflowExecutionStartedEventAttributes
def input = props.getInput
}
case class ActivityTaskStarted(rawEvent: HistoryEvent) extends SwfHistoryEvent
case class ActivityTaskScheduled(rawEvent: HistoryEvent) extends SwfHistoryEvent {
def props = rawEvent.getActivityTaskScheduledEventAttributes
def activityType = props.getActivityType
def control = props.getControl
}
case class ActivityTaskCompleted(rawEvent: HistoryEvent) extends SwfHistoryEvent {
def props = rawEvent.getActivityTaskCompletedEventAttributes
def result = props.getResult
def scheduledEventId = props.getScheduledEventId
}
case class ActivityTaskFailed(rawEvent: HistoryEvent) extends SwfHistoryEvent {
def props = rawEvent.getActivityTaskFailedEventAttributes
def details = props.getDetails
def reason = props.getReason
}
case class ActivityTaskTimedOut(rawEvent: HistoryEvent) extends SwfHistoryEvent {
def props = rawEvent.getActivityTaskTimedOutEventAttributes
def details = props.getDetails
}
case class UnparsedHistoryEvent(rawEvent: HistoryEvent) extends SwfHistoryEvent
/*
<b>Allowed Values: </b>
WorkflowExecutionStarted,
WorkflowExecutionCancelRequested,
WorkflowExecutionCompleted,
CompleteWorkflowExecutionFailed,
WorkflowExecutionFailed,
FailWorkflowExecutionFailed,
WorkflowExecutionTimedOut,
WorkflowExecutionCanceled,
CancelWorkflowExecutionFailed,
WorkflowExecutionContinuedAsNew,
ContinueAsNewWorkflowExecutionFailed,
WorkflowExecutionTerminated,
DecisionTaskScheduled,
DecisionTaskStarted,
DecisionTaskCompleted,
DecisionTaskTimedOut,
ActivityTaskScheduled,
ScheduleActivityTaskFailed,
ActivityTaskStarted,
ActivityTaskFailed,
ActivityTaskTimedOut,
ActivityTaskCanceled,
ActivityTaskCancelRequested,
RequestCancelActivityTaskFailed,
WorkflowExecutionSignaled,
MarkerRecorded,
RecordMarkerFailed,
TimerStarted,
StartTimerFailed,
TimerFired,
TimerCanceled,
CancelTimerFailed,
StartChildWorkflowExecutionInitiated,
StartChildWorkflowExecutionFailed,
ChildWorkflowExecutionStarted,
ChildWorkflowExecutionCompleted,
ChildWorkflowExecutionFailed,
ChildWorkflowExecutionTimedOut,
ChildWorkflowExecutionCanceled,
ChildWorkflowExecutionTerminated,
SignalExternalWorkflowExecutionInitiated,
SignalExternalWorkflowExecutionFailed,
ExternalWorkflowExecutionSignaled,
RequestCancelExternalWorkflowExecutionInitiated,
RequestCancelExternalWorkflowExecutionFailed,
ExternalWorkflowExecutionCancelRequested
*
*/
object SwfHistoryEvent {
val all = List(
WorkflowExecutionStarted,
ActivityTaskStarted,
ActivityTaskCompleted,
ActivityTaskFailed
)
val allWithName = all.map(c => c.toString -> c).toMap.withDefaultValue(UnparsedHistoryEvent)
def parse(ev: HistoryEvent): SwfHistoryEvent = allWithName(ev.getEventType).apply(ev)
} | FreeSchoolHackers/swarmize | shared-lib/src/main/scala/swarmize/aws/swf/SwfHistoryEvent.scala | Scala | apache-2.0 | 3,373 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui
import java.net.ServerSocket
import scala.io.Source
import scala.util.{Failure, Success, Try}
import org.eclipse.jetty.servlet.ServletContextHandler
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark.LocalSparkContext._
import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
class UISuite extends SparkFunSuite {
/**
* Create a test SparkContext with the SparkUI enabled.
* It is safe to `get` the SparkUI directly from the SparkContext returned here.
*/
private def newSparkContext(): SparkContext = {
val conf = new SparkConf()
.setMaster("local")
.setAppName("test")
.set("spark.ui.enabled", "true")
val sc = new SparkContext(conf)
assert(sc.ui.isDefined)
sc
}
ignore("basic ui visibility") {
withSpark(newSparkContext()) { sc =>
// test if the ui is visible, and all the expected tabs are visible
eventually(timeout(10 seconds), interval(50 milliseconds)) {
val html = Source.fromURL(sc.ui.get.appUIAddress).mkString
assert(!html.contains("random data that should not be present"))
assert(html.toLowerCase.contains("stages"))
assert(html.toLowerCase.contains("storage"))
assert(html.toLowerCase.contains("environment"))
assert(html.toLowerCase.contains("executors"))
}
}
}
ignore("visibility at localhost:4040") {
withSpark(newSparkContext()) { sc =>
// test if visible from http://localhost:4040
eventually(timeout(10 seconds), interval(50 milliseconds)) {
val html = Source.fromURL("http://localhost:4040").mkString
assert(html.toLowerCase.contains("stages"))
}
}
}
test("jetty selects different port under contention") {
val server = new ServerSocket(0)
val startPort = server.getLocalPort
val serverInfo1 = JettyUtils.startJettyServer(
"0.0.0.0", startPort, Seq[ServletContextHandler](), new SparkConf)
val serverInfo2 = JettyUtils.startJettyServer(
"0.0.0.0", startPort, Seq[ServletContextHandler](), new SparkConf)
// Allow some wiggle room in case ports on the machine are under contention
val boundPort1 = serverInfo1.boundPort
val boundPort2 = serverInfo2.boundPort
assert(boundPort1 != startPort)
assert(boundPort2 != startPort)
assert(boundPort1 != boundPort2)
serverInfo1.server.stop()
serverInfo2.server.stop()
server.close()
}
test("jetty binds to port 0 correctly") {
val serverInfo = JettyUtils.startJettyServer(
"0.0.0.0", 0, Seq[ServletContextHandler](), new SparkConf)
val server = serverInfo.server
val boundPort = serverInfo.boundPort
assert(server.getState === "STARTED")
assert(boundPort != 0)
Try { new ServerSocket(boundPort) } match {
case Success(s) => fail("Port %s doesn't seem used by jetty server".format(boundPort))
case Failure(e) =>
}
}
test("verify appUIAddress contains the scheme") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
val uiAddress = ui.appUIAddress
val uiHostPort = ui.appUIHostPort
assert(uiAddress.equals("http://" + uiHostPort))
}
}
test("verify appUIAddress contains the port") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
val splitUIAddress = ui.appUIAddress.split(':')
val boundPort = ui.boundPort
assert(splitUIAddress(2).toInt == boundPort)
}
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | core/src/test/scala/org/apache/spark/ui/UISuite.scala | Scala | apache-2.0 | 4,303 |
package songs
object Input {
}
| jasonmar/millionsongs | src/main/scala/songs/Input.scala | Scala | apache-2.0 | 34 |
/*******************************************************************************
* Copyright (c) 2014 Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr>.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Public License v3.0
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/gpl.html
*
* Contributors:
* Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr> - initial API and implementation
******************************************************************************/
package arithmetic.P33
import util.ExerciseTemplate
trait P33 extends ExerciseTemplate {
/*
P33 (*) Determine whether two positive integer numbers are coprime.
Two numbers are coprime if their greatest common divisor equals 1.
scala> 35.isCoprimeTo(64)
res0: Boolean = true
*/
val name = "P33 (Determine whether two positive integer numbers are coprime)"
def isCoprimeTo(a: Int, b: Int): Boolean
test("Invoking isCoprimeTo with a=0 AND b=0 should return an IllegalArgumentException") {
intercept[IllegalArgumentException] {
isCoprimeTo(0, 0)
}
}
test("Invoking isCoprimeTo with a=0 or b=0 should return false") {
assert(!isCoprimeTo(42, 0))
assert(!isCoprimeTo(0, 42))
}
test("Invoking isCoprime with a>0 and b>0 should return true if they are coprime, else false") {
assert(isCoprimeTo(42, 1))
assert(!isCoprimeTo(36, 63))
assert(isCoprimeTo(35, 64))
}
}
| GuillaumeDD/scala99problems | src/main/scala/arithmetic/P33/P33.scala | Scala | gpl-3.0 | 1,566 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package graph
import scala.collection.JavaConversions._
import scala.collection.mutable
import org.objectweb.asm.{ Opcodes, Type }
import org.objectweb.asm.signature.SignatureVisitor
import com.asakusafw.lang.compiler.extension.directio.DirectFileIoModels
import com.asakusafw.lang.compiler.model.graph.{ ExternalOutput, MarkerOperator }
import com.asakusafw.lang.compiler.planning.{ Plan, Planning, SubPlan }
import com.asakusafw.spark.compiler.planning._
import com.asakusafw.spark.compiler.spi.NodeCompiler
import com.asakusafw.spark.compiler.util.SparkIdioms._
import com.asakusafw.spark.runtime.JobContext
import com.asakusafw.spark.runtime.graph._
import com.asakusafw.spark.tools.asm._
import com.asakusafw.spark.tools.asm.MethodBuilder._
import com.asakusafw.spark.tools.asm4s._
import com.asakusafw.utils.graph.Graphs
class JobClassBuilder(
val plan: Plan)(
implicit context: JobCompiler.Context)
extends ClassBuilder(
Type.getType(s"L${GeneratedClassPackageInternalName}/${context.flowId}/graph/Job;"),
classOf[Job].asType) {
private val directOutputs = collectDirectOutputs(plan.getElements.toSet[SubPlan])
private def useDirectOut: Boolean = directOutputs.nonEmpty
private val subplans = Graphs.sortPostOrder(Planning.toDependencyGraph(plan)).toSeq
private val subplanToIdx = if (useDirectOut) {
subplans.zipWithIndex.map {
case (subplan, i) => subplan -> (i + 1)
}.toMap
} else {
subplans.zipWithIndex.toMap
}
override def defFields(fieldDef: FieldDef): Unit = {
super.defFields(fieldDef)
fieldDef.newField(
Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL,
"nodes",
classOf[Seq[Node]].asType,
new TypeSignatureBuilder()
.newClassType(classOf[Seq[_]].asType) {
_.newTypeArgument(SignatureVisitor.INSTANCEOF, classOf[Node].asType)
})
}
override def defConstructors(ctorDef: ConstructorDef): Unit = {
ctorDef.newInit(Seq(classOf[JobContext].asType)) { implicit mb =>
val thisVar :: jobContextVar :: _ = mb.argVars
thisVar.push().invokeInit(
superType,
jobContextVar.push())
val nodesVar = pushNewArray(
classOf[Node].asType,
if (useDirectOut) subplans.size + 2 else subplans.size).store()
val broadcastsVar = pushObject(mutable.Map)
.invokeV("empty", classOf[mutable.Map[BroadcastId, Broadcast[_]]].asType)
.store()
if (useDirectOut) {
thisVar.push().invokeV("node0", nodesVar.push(), jobContextVar.push())
}
subplans.foreach { subplan =>
thisVar.push().invokeV(
s"node${subplanToIdx(subplan)}",
nodesVar.push(),
broadcastsVar.push(),
jobContextVar.push())
}
if (useDirectOut) {
thisVar.push().invokeV(s"node${subplans.size + 1}", nodesVar.push(), jobContextVar.push())
}
thisVar.push().putField(
"nodes", pushObject(Predef)
.invokeV(
"wrapRefArray",
classOf[mutable.WrappedArray[_]].asType,
nodesVar.push().asType(classOf[Array[AnyRef]].asType))
.asType(classOf[Seq[_]].asType))
}
}
override def defMethods(methodDef: MethodDef): Unit = {
super.defMethods(methodDef)
methodDef.newMethod("nodes", classOf[Seq[Node]].asType, Seq.empty,
new MethodSignatureBuilder()
.newReturnType {
_.newClassType(classOf[Seq[_]].asType) {
_.newTypeArgument(SignatureVisitor.INSTANCEOF, classOf[Node].asType)
}
}) { implicit mb =>
val thisVar :: _ = mb.argVars
`return`(thisVar.push().getField("nodes", classOf[Seq[_]].asType))
}
if (useDirectOut) {
defSetupNode(methodDef)
}
subplans.foreach { subplan =>
defNode(methodDef)(subplan)
}
if (useDirectOut) {
defCommitNode(methodDef)
}
}
private def collectDirectOutputs(subplans: Set[SubPlan]): Set[(SubPlan, ExternalOutput)] = {
if (context.options.useOutputDirect) {
for {
subplan <- subplans
subPlanInfo = subplan.getAttribute(classOf[SubPlanInfo])
primaryOperator = subPlanInfo.getPrimaryOperator
if primaryOperator.isInstanceOf[ExternalOutput]
operator = primaryOperator.asInstanceOf[ExternalOutput]
info <- Option(operator.getInfo)
if DirectFileIoModels.isSupported(info)
} yield {
subplan -> operator
}
} else {
Set.empty
}
}
private def defSetupNode(methodDef: MethodDef): Unit = {
methodDef.newMethod(
Opcodes.ACC_PRIVATE,
"node0",
Seq(classOf[Array[Node]].asType, classOf[JobContext].asType)) { implicit mb =>
val thisVar :: nodesVar :: jobContextVar :: _ = mb.argVars
nodesVar.push().astore(ldc(0), {
val setupType = DirectOutputSetupCompiler.compile(directOutputs.map(_._2))
val setup = pushNew(setupType)
setup.dup().invokeInit(jobContextVar.push())
setup
})
`return`()
}
}
private def defCommitNode(methodDef: MethodDef): Unit = {
methodDef.newMethod(
Opcodes.ACC_PRIVATE,
s"node${subplanToIdx.size + 1}",
Seq(classOf[Array[Node]].asType, classOf[JobContext].asType)) { implicit mb =>
val thisVar :: nodesVar :: jobContextVar :: _ = mb.argVars
nodesVar.push().astore(ldc(subplanToIdx.size + 1), {
val commitType = DirectOutputCommitCompiler.compile(directOutputs.map(_._2))
val commit = pushNew(commitType)
commit.dup().invokeInit(
buildSet { builder =>
directOutputs.toSeq.map(_._1).map(subplanToIdx).sorted.foreach { i =>
builder += nodesVar.push().aload(ldc(i))
}
},
jobContextVar.push())
commit
})
`return`()
}
}
private def defNode(
methodDef: MethodDef)(
subplan: SubPlan): Unit = {
val compiler = NodeCompiler.get(subplan)(context.nodeCompilerContext)
val nodeType = compiler.compile(subplan)(context.nodeCompilerContext)
methodDef.newMethod(
Opcodes.ACC_PRIVATE,
s"node${subplanToIdx(subplan)}",
Seq(
classOf[Array[Node]].asType,
classOf[mutable.Map[BroadcastId, Broadcast[_]]].asType,
classOf[JobContext].asType)) { implicit mb =>
val thisVar :: nodesVar :: allBroadcastsVar :: jobContextVar :: _ = mb.argVars
val broadcastsVar =
buildMap { builder =>
for {
subPlanInput <- subplan.getInputs
inputInfo <- Option(subPlanInput.getAttribute(classOf[SubPlanInputInfo]))
if inputInfo.getInputType == SubPlanInputInfo.InputType.BROADCAST
broadcastInfo <- Option(subPlanInput.getAttribute(classOf[BroadcastInfo]))
} {
val prevSubPlanOutputs = subPlanInput.getOpposites
if (prevSubPlanOutputs.size == 1) {
val prevSubPlanOperator = prevSubPlanOutputs.head.getOperator
builder += (
context.broadcastIds.getField(subPlanInput.getOperator),
applyMap(
allBroadcastsVar.push(),
context.broadcastIds.getField(prevSubPlanOperator))
.cast(classOf[Broadcast[_]].asType))
} else {
val marker = subPlanInput.getOperator
builder += (
context.broadcastIds.getField(marker),
newBroadcast(
marker,
subplan,
broadcastInfo)(
() => buildSeq { builder =>
prevSubPlanOutputs.foreach { subPlanOutput =>
builder += tuple2(
nodesVar.push().aload(ldc(subplanToIdx(subPlanOutput.getOwner))),
context.branchKeys.getField(subPlanOutput.getOperator))
}
},
jobContextVar.push))
}
}
}.store()
val setupVar = if (useDirectOut) {
Some(nodesVar.push().aload(ldc(0)).store())
} else {
None
}
val instantiator = compiler.instantiator
val nodeVar = instantiator.newInstance(
nodeType,
subplan,
subplanToIdx)(
Instantiator.Vars(jobContextVar, nodesVar, broadcastsVar, setupVar))(
implicitly, context.instantiatorCompilerContext)
nodesVar.push().astore(ldc(subplanToIdx(subplan)), nodeVar.push())
for {
subPlanOutput <- subplan.getOutputs
outputInfo <- Option(subPlanOutput.getAttribute(classOf[SubPlanOutputInfo]))
if outputInfo.getOutputType == SubPlanOutputInfo.OutputType.BROADCAST
broadcastInfo <- Option(subPlanOutput.getAttribute(classOf[BroadcastInfo]))
if subPlanOutput.getOpposites.exists(_.getOpposites.size == 1)
} {
val marker = subPlanOutput.getOperator
addToMap(
allBroadcastsVar.push(),
context.broadcastIds.getField(subPlanOutput.getOperator),
newBroadcast(
marker,
subplan,
broadcastInfo)(
() => buildSeq { builder =>
builder += tuple2(
nodeVar.push().asType(classOf[Source].asType),
context.branchKeys.getField(marker))
},
jobContextVar.push))
}
`return`()
}
}
private def newBroadcast(
marker: MarkerOperator,
subplan: SubPlan,
broadcastInfo: BroadcastInfo)(
nodes: () => Stack,
sc: () => Stack)(
implicit mb: MethodBuilder): Stack = {
val dataModelRef = marker.getInput.dataModelRef
val group = broadcastInfo.getFormatInfo
val broadcast = pushNew(classOf[MapBroadcastOnce].asType)
val label = Seq(
Option(subplan.getAttribute(classOf[SubPlanInfo]))
.flatMap(info => Option(info.getLabel)),
Option(subplan.getAttribute(classOf[NameInfo]))
.map(_.getName))
.flatten match {
case Seq() => "N/A"
case s: Seq[String] => s.mkString(":")
}
broadcast.dup().invokeInit(
nodes(),
option(
sortOrdering(
dataModelRef.groupingTypes(group.getGrouping),
dataModelRef.orderingTypes(group.getOrdering))),
groupingOrdering(dataModelRef.groupingTypes(group.getGrouping)),
partitioner(ldc(1)),
ldc(label),
sc())
broadcast
}
}
| ueshin/asakusafw-spark | compiler/src/main/scala/com/asakusafw/spark/compiler/graph/JobClassBuilder.scala | Scala | apache-2.0 | 11,306 |
package ml.combust.mleap.core.regression
import ml.combust.mleap.core.types.{ScalarShape, ScalarType, StructField, TensorType}
import org.apache.spark.ml.linalg.Vectors
import org.scalatest.FunSpec
class GeneralizedLinearRegressionModelSpec extends FunSpec {
describe("generalized linear regression model") {
val model = new GeneralizedLinearRegressionModel(Vectors.dense(1, 2, 3), 23, null)
it("has the right input schema") {
assert(model.inputSchema.fields ==
Seq(StructField("features",TensorType.Double(3))))
}
it("has the right output schema") {
assert(model.outputSchema.fields ==
Seq(StructField("prediction", ScalarType.Double.nonNullable),
StructField("link_prediction", ScalarType.Double.nonNullable)))
}
}
}
| combust/mleap | mleap-core/src/test/scala/ml/combust/mleap/core/regression/GeneralizedLinearRegressionModelSpec.scala | Scala | apache-2.0 | 787 |
package mobile.stream
import monifu.reactive.OverflowStrategy.DropNew
import monifu.reactive.{Observable, Subscriber}
// import org.scalajs.dom
import shared.models._
import scala.concurrent.duration.FiniteDuration
import scala.scalajs.js.Dynamic.global
final class DataConsumer(interval: FiniteDuration, seed: Long, doBackPressure: Boolean)
extends Observable[Event] {
def onSubscribe(subscriber: Subscriber[Event]): Unit = {
val hostEmulator = "10.0.2.2:9000"
val hostBrowser = "localhost:9000"
val host = hostBrowser
val source = if (doBackPressure) {
val url = s"ws://$host/back-pressured-stream?periodMillis=${interval.toMillis}&seed=$seed"
BackPressuredWebSocketClient(url)
}
else {
val url = s"ws://$host/simple-stream?periodMillis=${interval.toMillis}&seed=$seed"
SimpleWebSocketClient(url, DropNew(1000))
}
source
.collect { case IsEvent(e) => e }
.onSubscribe(subscriber)
}
object IsEvent {
def unapply(message: String) = {
val json = global.JSON.parse(message)
json.event.asInstanceOf[String] match {
case "chat" => {
Some(Signal(
value = Chat(
json.value.id.asInstanceOf[Number].intValue(),
json.value.name.asInstanceOf[String],
json.value.lastText.asInstanceOf[String],
json.value.face.asInstanceOf[String]
),
timestamp = json.timestamp.asInstanceOf[Number].longValue()
))}
case "overflow" =>
Some(OverflowEvent(
dropped = json.dropped.asInstanceOf[Number].longValue(),
timestamp = json.timestamp.asInstanceOf[Number].longValue()
))
case "error" =>
val errorType = json.`type`.asInstanceOf[String]
val message = json.message.asInstanceOf[String]
throw new BackPressuredWebSocketClient.Exception(
s"Server-side error throw - $errorType: $message")
case _ =>
None
}
}
}
}
| aoprisan/monifu-ionic-sample | app-js/src/main/scala/mobile/stream/DataConsumer.scala | Scala | gpl-2.0 | 2,033 |
package scalan.imp
class ArrayImp(array: Array[_]) {
def length = array.length
}
| scalan/scalan | lms-backend/core/src/main/resources/scalan/imp/ArrayImp.scala | Scala | apache-2.0 | 84 |
/*
* Copyright (c) 2017
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package tiki
package instances
import cats.Show
import tiki.geometry.Point
trait PointInstances {
implicit def showForPoint[A]: Show[Point] = (f: Point) => s"Point(${f.x},${f.y})"
}
| lewismj/tiki | core/src/main/scala/tiki/instances/point.scala | Scala | bsd-2-clause | 1,535 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package statements
import javax.swing.Icon
import com.intellij.ide.util.EditSourceUtil
import com.intellij.lang.ASTNode
import com.intellij.navigation.ItemPresentation
import com.intellij.openapi.editor.colors.TextAttributesKey
import com.intellij.psi._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createIdentifier
import org.jetbrains.plugins.scala.lang.psi.stubs.ScTypeAliasStub
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
* Time: 9:55:13
*/
class ScTypeAliasDefinitionImpl private (stub: ScTypeAliasStub, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, ScalaElementTypes.TYPE_DEFINITION, node) with ScTypeAliasDefinition {
def this(node: ASTNode) = this(null, node)
def this(stub: ScTypeAliasStub) = this(stub, null)
def nameId: PsiElement = findChildByType[PsiElement](ScalaTokenTypes.tIDENTIFIER) match {
case null =>
val name = getGreenStub.getName
val id = createIdentifier(name)
if (id == null) {
assert(assertion = false, s"Id is null. Name: $name. Text: $getText. Parent text: ${getParent.getText}.")
}
id.getPsi
case n => n
}
override def aliasedTypeElement: Option[ScTypeElement] =
byPsiOrStub(Option(findChildByClassScala(classOf[ScTypeElement])))(_.typeElement)
override def getTextOffset: Int = nameId.getTextRange.getStartOffset
override def navigate(requestFocus: Boolean) {
val descriptor = EditSourceUtil.getDescriptor(nameId)
if (descriptor != null) descriptor.navigate(requestFocus)
}
override def toString: String = "ScTypeAliasDefinition: " + name
override def getPresentation: ItemPresentation = {
new ItemPresentation() {
def getPresentableText: String = name
def getTextAttributesKey: TextAttributesKey = null
def getLocationString: String = "(" + ScTypeAliasDefinitionImpl.this.containingClass.qualifiedName + ")"
override def getIcon(open: Boolean): Icon = ScTypeAliasDefinitionImpl.this.getIcon(0)
}
}
override def getOriginalElement: PsiElement = super[ScTypeAliasDefinition].getOriginalElement
override def accept(visitor: ScalaElementVisitor) {
visitor.visitTypeAliasDefinition(this)
}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case s: ScalaElementVisitor => s.visitTypeAliasDefinition(this)
case _ => super.accept(visitor)
}
}
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScTypeAliasDefinitionImpl.scala | Scala | apache-2.0 | 2,799 |
package com.bazaarvoice.sswf
import com.bazaarvoice.sswf.model.StepInput
import com.fasterxml.jackson.databind.node.{JsonNodeFactory, ObjectNode}
import com.fasterxml.jackson.databind.{JsonNode, ObjectMapper}
package object util {
private[this] val mapper = new ObjectMapper
private[this] val json = JsonNodeFactory.instance
private[this] def packStepInput(stepInput: StepInput): JsonNode = {
val node: ObjectNode = json.objectNode()
stepInput.stepInputString.foreach(s => node.put("stepInputString", s))
stepInput.resumeProgress.foreach(s => node.put("stepResume", s))
node
}
private[this] def unpackStepInput(node: JsonNode): StepInput = {
StepInput(
Option(node.get("stepInputString")).map(_.asText()),
Option(node.get("stepResume")).map(_.asText())
)
}
private[sswf] def packInput[SSWFInput](inputParser: InputParser[SSWFInput])(stepInput: StepInput, wfInput: SSWFInput): String = {
val node: ObjectNode = json.objectNode()
node.set("stepInput", packStepInput(stepInput))
node.put("wfInput", inputParser.serialize(wfInput))
mapper.writeValueAsString(node)
}
private[sswf] def unpackInput[SSWFInput](inputParser: InputParser[SSWFInput])(packedInput: String): (StepInput, SSWFInput) = {
val node: JsonNode = mapper.readTree(packedInput)
(unpackStepInput(node.get("stepInput")), inputParser.deserialize(node.get("wfInput").asText()))
}
private[sswf] def packTimer(stepName: String, stepInput: StepInput) = {
val node: ObjectNode = json.objectNode()
node.set("stepInput", packStepInput(stepInput))
node.put("stepName", stepName)
mapper.writeValueAsString(node)
}
private[sswf] def unpackTimer(control: String): (String,StepInput) = {
val node = mapper.readTree(control)
(node.get("stepName").asText(),unpackStepInput(node.get("stepInput")))
}
}
| bazaarvoice/super-simple-workflow | sswf-core/src/main/scala/com/bazaarvoice/sswf/util/package.scala | Scala | apache-2.0 | 1,865 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spark.network
import spark._
import scala.collection.mutable.{HashMap, Queue, ArrayBuffer}
import java.io._
import java.nio._
import java.nio.channels._
import java.nio.channels.spi._
import java.net._
private[spark]
abstract class Connection(val channel: SocketChannel, val selector: Selector,
val socketRemoteConnectionManagerId: ConnectionManagerId)
extends Logging {
def this(channel_ : SocketChannel, selector_ : Selector) = {
this(channel_, selector_,
ConnectionManagerId.fromSocketAddress(
channel_.socket.getRemoteSocketAddress().asInstanceOf[InetSocketAddress]))
}
channel.configureBlocking(false)
channel.socket.setTcpNoDelay(true)
channel.socket.setReuseAddress(true)
channel.socket.setKeepAlive(true)
/*channel.socket.setReceiveBufferSize(32768) */
var onCloseCallback: Connection => Unit = null
var onExceptionCallback: (Connection, Exception) => Unit = null
var onKeyInterestChangeCallback: (Connection, Int) => Unit = null
val remoteAddress = getRemoteAddress()
// Read channels typically do not register for write and write does not for read
// Now, we do have write registering for read too (temporarily), but this is to detect
// channel close NOT to actually read/consume data on it !
// How does this work if/when we move to SSL ?
// What is the interest to register with selector for when we want this connection to be selected
def registerInterest()
// What is the interest to register with selector for when we want this connection to
// be de-selected
// Traditionally, 0 - but in our case, for example, for close-detection on SendingConnection hack,
// it will be SelectionKey.OP_READ (until we fix it properly)
def unregisterInterest()
// On receiving a read event, should we change the interest for this channel or not ?
// Will be true for ReceivingConnection, false for SendingConnection.
def changeInterestForRead(): Boolean
// On receiving a write event, should we change the interest for this channel or not ?
// Will be false for ReceivingConnection, true for SendingConnection.
// Actually, for now, should not get triggered for ReceivingConnection
def changeInterestForWrite(): Boolean
def getRemoteConnectionManagerId(): ConnectionManagerId = {
socketRemoteConnectionManagerId
}
def key() = channel.keyFor(selector)
def getRemoteAddress() = channel.socket.getRemoteSocketAddress().asInstanceOf[InetSocketAddress]
// Returns whether we have to register for further reads or not.
def read(): Boolean = {
throw new UnsupportedOperationException(
"Cannot read on connection of type " + this.getClass.toString)
}
// Returns whether we have to register for further writes or not.
def write(): Boolean = {
throw new UnsupportedOperationException(
"Cannot write on connection of type " + this.getClass.toString)
}
def close() {
val k = key()
if (k != null) {
k.cancel()
}
channel.close()
callOnCloseCallback()
}
def onClose(callback: Connection => Unit) {
onCloseCallback = callback
}
def onException(callback: (Connection, Exception) => Unit) {
onExceptionCallback = callback
}
def onKeyInterestChange(callback: (Connection, Int) => Unit) {
onKeyInterestChangeCallback = callback
}
def callOnExceptionCallback(e: Exception) {
if (onExceptionCallback != null) {
onExceptionCallback(this, e)
} else {
logError("Error in connection to " + getRemoteConnectionManagerId() +
" and OnExceptionCallback not registered", e)
}
}
def callOnCloseCallback() {
if (onCloseCallback != null) {
onCloseCallback(this)
} else {
logWarning("Connection to " + getRemoteConnectionManagerId() +
" closed and OnExceptionCallback not registered")
}
}
def changeConnectionKeyInterest(ops: Int) {
if (onKeyInterestChangeCallback != null) {
onKeyInterestChangeCallback(this, ops)
} else {
throw new Exception("OnKeyInterestChangeCallback not registered")
}
}
def printRemainingBuffer(buffer: ByteBuffer) {
val bytes = new Array[Byte](buffer.remaining)
val curPosition = buffer.position
buffer.get(bytes)
bytes.foreach(x => print(x + " "))
buffer.position(curPosition)
print(" (" + bytes.size + ")")
}
def printBuffer(buffer: ByteBuffer, position: Int, length: Int) {
val bytes = new Array[Byte](length)
val curPosition = buffer.position
buffer.position(position)
buffer.get(bytes)
bytes.foreach(x => print(x + " "))
print(" (" + position + ", " + length + ")")
buffer.position(curPosition)
}
}
private[spark]
class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
remoteId_ : ConnectionManagerId)
extends Connection(SocketChannel.open, selector_, remoteId_) {
class Outbox(fair: Int = 0) {
val messages = new Queue[Message]()
val defaultChunkSize = 65536 //32768 //16384
var nextMessageToBeUsed = 0
def addMessage(message: Message) {
messages.synchronized{
/*messages += message*/
messages.enqueue(message)
logDebug("Added [" + message + "] to outbox for sending to " +
"[" + getRemoteConnectionManagerId() + "]")
}
}
def getChunk(): Option[MessageChunk] = {
fair match {
case 0 => getChunkFIFO()
case 1 => getChunkRR()
case _ => throw new Exception("Unexpected fairness policy in outbox")
}
}
private def getChunkFIFO(): Option[MessageChunk] = {
/*logInfo("Using FIFO")*/
messages.synchronized {
while (!messages.isEmpty) {
val message = messages(0)
val chunk = message.getChunkForSending(defaultChunkSize)
if (chunk.isDefined) {
messages += message // this is probably incorrect, it wont work as fifo
if (!message.started) {
logDebug("Starting to send [" + message + "]")
message.started = true
message.startTime = System.currentTimeMillis
}
return chunk
} else {
/*logInfo("Finished sending [" + message + "] to [" + getRemoteConnectionManagerId() + "]")*/
message.finishTime = System.currentTimeMillis
logDebug("Finished sending [" + message + "] to [" + getRemoteConnectionManagerId() +
"] in " + message.timeTaken )
}
}
}
None
}
private def getChunkRR(): Option[MessageChunk] = {
messages.synchronized {
while (!messages.isEmpty) {
/*nextMessageToBeUsed = nextMessageToBeUsed % messages.size */
/*val message = messages(nextMessageToBeUsed)*/
val message = messages.dequeue
val chunk = message.getChunkForSending(defaultChunkSize)
if (chunk.isDefined) {
messages.enqueue(message)
nextMessageToBeUsed = nextMessageToBeUsed + 1
if (!message.started) {
logDebug(
"Starting to send [" + message + "] to [" + getRemoteConnectionManagerId() + "]")
message.started = true
message.startTime = System.currentTimeMillis
}
logTrace(
"Sending chunk from [" + message+ "] to [" + getRemoteConnectionManagerId() + "]")
return chunk
} else {
message.finishTime = System.currentTimeMillis
logDebug("Finished sending [" + message + "] to [" + getRemoteConnectionManagerId() +
"] in " + message.timeTaken )
}
}
}
None
}
}
private val outbox = new Outbox(1)
val currentBuffers = new ArrayBuffer[ByteBuffer]()
/*channel.socket.setSendBufferSize(256 * 1024)*/
override def getRemoteAddress() = address
val DEFAULT_INTEREST = SelectionKey.OP_READ
override def registerInterest() {
// Registering read too - does not really help in most cases, but for some
// it does - so let us keep it for now.
changeConnectionKeyInterest(SelectionKey.OP_WRITE | DEFAULT_INTEREST)
}
override def unregisterInterest() {
changeConnectionKeyInterest(DEFAULT_INTEREST)
}
def send(message: Message) {
outbox.synchronized {
outbox.addMessage(message)
if (channel.isConnected) {
registerInterest()
}
}
}
// MUST be called within the selector loop
def connect() {
try{
channel.register(selector, SelectionKey.OP_CONNECT)
channel.connect(address)
logInfo("Initiating connection to [" + address + "]")
} catch {
case e: Exception => {
logError("Error connecting to " + address, e)
callOnExceptionCallback(e)
}
}
}
def finishConnect(force: Boolean): Boolean = {
try {
// Typically, this should finish immediately since it was triggered by a connect
// selection - though need not necessarily always complete successfully.
val connected = channel.finishConnect
if (!force && !connected) {
logInfo(
"finish connect failed [" + address + "], " + outbox.messages.size + " messages pending")
return false
}
// Fallback to previous behavior - assume finishConnect completed
// This will happen only when finishConnect failed for some repeated number of times
// (10 or so)
// Is highly unlikely unless there was an unclean close of socket, etc
registerInterest()
logInfo("Connected to [" + address + "], " + outbox.messages.size + " messages pending")
return true
} catch {
case e: Exception => {
logWarning("Error finishing connection to " + address, e)
callOnExceptionCallback(e)
// ignore
return true
}
}
}
override def write(): Boolean = {
try {
while (true) {
if (currentBuffers.size == 0) {
outbox.synchronized {
outbox.getChunk() match {
case Some(chunk) => {
currentBuffers ++= chunk.buffers
}
case None => {
// changeConnectionKeyInterest(0)
/*key.interestOps(0)*/
return false
}
}
}
}
if (currentBuffers.size > 0) {
val buffer = currentBuffers(0)
val remainingBytes = buffer.remaining
val writtenBytes = channel.write(buffer)
if (buffer.remaining == 0) {
currentBuffers -= buffer
}
if (writtenBytes < remainingBytes) {
// re-register for write.
return true
}
}
}
} catch {
case e: Exception => {
logWarning("Error writing in connection to " + getRemoteConnectionManagerId(), e)
callOnExceptionCallback(e)
close()
return false
}
}
// should not happen - to keep scala compiler happy
return true
}
// This is a hack to determine if remote socket was closed or not.
// SendingConnection DOES NOT expect to receive any data - if it does, it is an error
// For a bunch of cases, read will return -1 in case remote socket is closed : hence we
// register for reads to determine that.
override def read(): Boolean = {
// We don't expect the other side to send anything; so, we just read to detect an error or EOF.
try {
val length = channel.read(ByteBuffer.allocate(1))
if (length == -1) { // EOF
close()
} else if (length > 0) {
logWarning(
"Unexpected data read from SendingConnection to " + getRemoteConnectionManagerId())
}
} catch {
case e: Exception =>
logError("Exception while reading SendingConnection to " + getRemoteConnectionManagerId(), e)
callOnExceptionCallback(e)
close()
}
false
}
override def changeInterestForRead(): Boolean = false
override def changeInterestForWrite(): Boolean = true
}
// Must be created within selector loop - else deadlock
private[spark] class ReceivingConnection(channel_ : SocketChannel, selector_ : Selector)
extends Connection(channel_, selector_) {
class Inbox() {
val messages = new HashMap[Int, BufferMessage]()
def getChunk(header: MessageChunkHeader): Option[MessageChunk] = {
def createNewMessage: BufferMessage = {
val newMessage = Message.create(header).asInstanceOf[BufferMessage]
newMessage.started = true
newMessage.startTime = System.currentTimeMillis
logDebug(
"Starting to receive [" + newMessage + "] from [" + getRemoteConnectionManagerId() + "]")
messages += ((newMessage.id, newMessage))
newMessage
}
val message = messages.getOrElseUpdate(header.id, createNewMessage)
logTrace(
"Receiving chunk of [" + message + "] from [" + getRemoteConnectionManagerId() + "]")
message.getChunkForReceiving(header.chunkSize)
}
def getMessageForChunk(chunk: MessageChunk): Option[BufferMessage] = {
messages.get(chunk.header.id)
}
def removeMessage(message: Message) {
messages -= message.id
}
}
@volatile private var inferredRemoteManagerId: ConnectionManagerId = null
override def getRemoteConnectionManagerId(): ConnectionManagerId = {
val currId = inferredRemoteManagerId
if (currId != null) currId else super.getRemoteConnectionManagerId()
}
// The reciever's remote address is the local socket on remote side : which is NOT
// the connection manager id of the receiver.
// We infer that from the messages we receive on the receiver socket.
private def processConnectionManagerId(header: MessageChunkHeader) {
val currId = inferredRemoteManagerId
if (header.address == null || currId != null) return
val managerId = ConnectionManagerId.fromSocketAddress(header.address)
if (managerId != null) {
inferredRemoteManagerId = managerId
}
}
val inbox = new Inbox()
val headerBuffer: ByteBuffer = ByteBuffer.allocate(MessageChunkHeader.HEADER_SIZE)
var onReceiveCallback: (Connection , Message) => Unit = null
var currentChunk: MessageChunk = null
channel.register(selector, SelectionKey.OP_READ)
override def read(): Boolean = {
try {
while (true) {
if (currentChunk == null) {
val headerBytesRead = channel.read(headerBuffer)
if (headerBytesRead == -1) {
close()
return false
}
if (headerBuffer.remaining > 0) {
// re-register for read event ...
return true
}
headerBuffer.flip
if (headerBuffer.remaining != MessageChunkHeader.HEADER_SIZE) {
throw new Exception(
"Unexpected number of bytes (" + headerBuffer.remaining + ") in the header")
}
val header = MessageChunkHeader.create(headerBuffer)
headerBuffer.clear()
processConnectionManagerId(header)
header.typ match {
case Message.BUFFER_MESSAGE => {
if (header.totalSize == 0) {
if (onReceiveCallback != null) {
onReceiveCallback(this, Message.create(header))
}
currentChunk = null
// re-register for read event ...
return true
} else {
currentChunk = inbox.getChunk(header).orNull
}
}
case _ => throw new Exception("Message of unknown type received")
}
}
if (currentChunk == null) throw new Exception("No message chunk to receive data")
val bytesRead = channel.read(currentChunk.buffer)
if (bytesRead == 0) {
// re-register for read event ...
return true
} else if (bytesRead == -1) {
close()
return false
}
/*logDebug("Read " + bytesRead + " bytes for the buffer")*/
if (currentChunk.buffer.remaining == 0) {
/*println("Filled buffer at " + System.currentTimeMillis)*/
val bufferMessage = inbox.getMessageForChunk(currentChunk).get
if (bufferMessage.isCompletelyReceived) {
bufferMessage.flip
bufferMessage.finishTime = System.currentTimeMillis
logDebug("Finished receiving [" + bufferMessage + "] from " +
"[" + getRemoteConnectionManagerId() + "] in " + bufferMessage.timeTaken)
if (onReceiveCallback != null) {
onReceiveCallback(this, bufferMessage)
}
inbox.removeMessage(bufferMessage)
}
currentChunk = null
}
}
} catch {
case e: Exception => {
logWarning("Error reading from connection to " + getRemoteConnectionManagerId(), e)
callOnExceptionCallback(e)
close()
return false
}
}
// should not happen - to keep scala compiler happy
return true
}
def onReceive(callback: (Connection, Message) => Unit) {onReceiveCallback = callback}
override def changeInterestForRead(): Boolean = true
override def changeInterestForWrite(): Boolean = {
throw new IllegalStateException("Unexpected invocation right now")
}
override def registerInterest() {
// Registering read too - does not really help in most cases, but for some
// it does - so let us keep it for now.
changeConnectionKeyInterest(SelectionKey.OP_READ)
}
override def unregisterInterest() {
changeConnectionKeyInterest(0)
}
}
| wgpshashank/spark | core/src/main/scala/spark/network/Connection.scala | Scala | apache-2.0 | 18,428 |
package org.jetbrains.plugins.scala.lang.formatter.tests
import org.jetbrains.plugins.scala.lang.formatter.AbstractScalaFormatterTestBase
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import scala.jdk.CollectionConverters.SeqHasAsJava
@RunWith(classOf[Parameterized])
class ScalaBlankLinesTest_MembersExhaustive(tuple: (String, String, String))
extends AbstractScalaFormatterTestBase with LineCommentsTestOps {
private val (first, second, container) = tuple
// Test huge amount combinations of members in different contexts with different blank lines settings
// example:
// trait T {
// def foo = ????
// val x = 42
// }
@Test
def testBlankLines(): Unit = {
val before =
s"""$container {
| $first
| $second
|}""".stripMargin
allBlankLinesSettingsCombinations(first, second, container) { () =>
val expectedLines = expectedLinesBetween(first, second, container)
val expectedAfter =
s"""$container {
| $first${"\\n" * expectedLines}
| $second
|}""".stripMargin
try doTextTest(before, expectedAfter) catch {
case ex: Throwable =>
System.err.println(s"### SETTINGS ###\\n$currentBlankLinesSettingsDebugText")
throw ex
}
}
}
/** @return Cartesian product */
private def prod[T](l: List[List[T]]): List[List[T]] = l match {
case Nil => List(Nil)
case l :: ls => for (i <- l; r <- prod(ls)) yield i :: r
}
// size of "fair" product of all possible settings values is very huge, and tests would take ages to end
// so we do some optimisation to drop irrelevant settings in some cases
private def allBlankLinesSettingsCombinations(first: String, second: String, container: String)(body: () => Unit): Unit = {
val fSetter : Int => Unit = cs.BLANK_LINES_AROUND_FIELD = _
val fiSetter : Int => Unit = cs.BLANK_LINES_AROUND_FIELD_IN_INTERFACE = _
val fisSetter: Int => Unit = ss.BLANK_LINES_AROUND_FIELD_IN_INNER_SCOPES = _
val mSetter : Int => Unit = cs.BLANK_LINES_AROUND_METHOD = _
val miSetter : Int => Unit = cs.BLANK_LINES_AROUND_METHOD_IN_INTERFACE = _
val misSetter: Int => Unit = ss.BLANK_LINES_AROUND_METHOD_IN_INNER_SCOPES = _
val cSetter : Int => Unit = cs.BLANK_LINES_AROUND_CLASS = _
val cisSetter: Int => Unit = ss.BLANK_LINES_AROUND_CLASS_IN_INNER_SCOPES = _
val memberTypes = firstToken(first) :: firstToken(second) :: Nil
val containerType = firstToken(container)
val allSetters = List(fSetter, fiSetter, fisSetter, mSetter, miSetter, misSetter, cSetter, cisSetter)
val containerRelevantSetters = containerType match {
case "class" | "object" => Seq(fSetter, mSetter, cSetter)
case "trait" => Seq(fiSetter, miSetter, cSetter)
case "def" => Seq(fisSetter, misSetter, cisSetter)
}
val containerNotRelevantSetters = allSetters.diff(containerRelevantSetters)
val relevantSetters: List[Int => Unit] = memberTypes.flatMap {
case "class" | "trait" | "object" => List(cSetter, cisSetter)
case "def" => List(mSetter, miSetter, misSetter)
case "val" | "var" | "lazy" | "type" | _ => List(fSetter, fiSetter, fisSetter)
}.distinct.diff(containerNotRelevantSetters)
val notRelevantSetters = allSetters.diff(relevantSetters).diff(containerNotRelevantSetters)
assert(allSetters.size == relevantSetters.size + notRelevantSetters.size + containerNotRelevantSetters.size)
val maxBlankLine = 2
def applications(setters: List[Int => Unit]): List[List[() => Unit]] =
setters.map(setter => (0 to maxBlankLine).map(idx => () => setter.apply(idx)).toList)
val relevantApplications = applications(relevantSetters)
val notRelevantApplications1 = applications(notRelevantSetters)
val notRelevantApplications2 = applications(containerNotRelevantSetters)
// val prod1 = prod(relevantApplications ++ notRelevantApplications1)
// val prod2 = prod(relevantApplications ++ notRelevantApplications2)
val prod1 = prod(relevantApplications)
val prod2 = prod(notRelevantApplications1)
val prod3 = prod(notRelevantApplications2)
val allProd = Seq(prod1, prod2, prod3).flatten
allSetters.foreach(_.apply(0))
allProd.foreach { applySettings: Seq[() => Unit] =>
applySettings.foreach(_.apply())
body()
}
}
private def expectedLinesBetween(firstMember: String, secondMember: String, container: String): Int = {
val firstValue = settingValue(firstMember, container)
val secondValue = settingValue(secondMember, container)
math.max(firstValue, secondValue)
}
//noinspection ScalaUnusedSymbol
private def settingValue(member: String, container: String): Int =
firstToken(member) match {
case "class" | "trait" | "object" =>
firstToken(container) match {
case "def" => ss.BLANK_LINES_AROUND_CLASS_IN_INNER_SCOPES
case _ => cs.BLANK_LINES_AROUND_CLASS
}
case "def" =>
firstToken(container) match {
case "class" | "object" => cs.BLANK_LINES_AROUND_METHOD
case "trait" => cs.BLANK_LINES_AROUND_METHOD_IN_INTERFACE
case "def" => ss.BLANK_LINES_AROUND_METHOD_IN_INNER_SCOPES
}
case "val" | "var" | "lazy" | "type" | _ =>
firstToken(container) match {
case "class" | "object" => cs.BLANK_LINES_AROUND_FIELD
case "trait" => cs.BLANK_LINES_AROUND_FIELD_IN_INTERFACE
case "def" => ss.BLANK_LINES_AROUND_FIELD_IN_INNER_SCOPES
}
}
private def firstToken(member: String): String = {
val idx = member.indexOf(' ')
if (idx != -1) member.substring(0, member.indexOf(' '))
else member
}
private def currentBlankLinesSettingsDebugText: String =
s"""BLANK_LINES_AROUND_FIELD = ${cs.BLANK_LINES_AROUND_FIELD}
|BLANK_LINES_AROUND_FIELD_IN_INTERFACE = ${cs.BLANK_LINES_AROUND_FIELD_IN_INTERFACE }
|BLANK_LINES_AROUND_FIELD_IN_INNER_SCOPES = ${ss.BLANK_LINES_AROUND_FIELD_IN_INNER_SCOPES}
|
|BLANK_LINES_AROUND_METHOD = ${cs.BLANK_LINES_AROUND_METHOD}
|BLANK_LINES_AROUND_METHOD_IN_INTERFACE = ${cs.BLANK_LINES_AROUND_METHOD_IN_INTERFACE }
|BLANK_LINES_AROUND_METHOD_IN_INNER_SCOPES = ${ss.BLANK_LINES_AROUND_METHOD_IN_INNER_SCOPES}
|
|BLANK_LINES_AROUND_CLASS = ${cs.BLANK_LINES_AROUND_CLASS}
|BLANK_LINES_AROUND_CLASS_IN_INNER_SCOPES = ${ss.BLANK_LINES_AROUND_CLASS_IN_INNER_SCOPES}
|""".stripMargin
}
object ScalaBlankLinesTest_MembersExhaustive {
private val members1 = Seq(
"val a = 1",
"var a = 1",
"lazy val a = 1",
"type t = String"
)
private val members2 = Seq(
"println(42)",
)
private val members3 = Seq(
"def f = 1",
)
private val members4 = Seq(
"class T",
"trait T",
"object T",
)
private val pairs: Seq[(String, String)] = Nil ++
(for (a <- members1; b <- members2) yield Seq((a, b), (a, a))).flatten ++
(for (a <- members1; b <- members3) yield Seq((a, b), (a, a))).flatten ++
(for (a <- members1; b <- members4) yield Seq((a, b), (a, a))).flatten ++
(for (a <- members2; b <- members3) yield Seq((a, b), (a, a))).flatten ++
(for (a <- members2; b <- members4) yield Seq((a, b), (a, a))).flatten ++
(for (a <- members3; b <- members4) yield Seq((a, b), (a, a))).flatten :+
(members1.head, members1.head) :+
(members2.head, members2.head) :+
(members3.head, members3.head) :+
(members4.head, members4.head)
private val containers = Seq(
"trait T",
"class T",
"def foo",
)
private val combinations: Seq[(String, String, String)] = for {
(first, second) <- pairs
container <- containers
} yield (first, second, container)
@Parameterized.Parameters(name = "{index}: {0}")
def primeNumbers: java.util.Collection[(String, String, String)] =
combinations.asJava
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/formatter/tests/ScalaBlankLinesTest_MembersExhaustive.scala | Scala | apache-2.0 | 8,130 |
package scaltris
object Board {
val Width = 10
val Height = 24
val EmptyBoardRow = Array.fill[Block.Value](Width)(Block.EMPTY)
def emptyBoard: Array[Array[Block.Value]] = {
Array.fill[Array[Block.Value]](Height)(EmptyBoardRow)
}
}
class Board(var board: Array[Array[Block.Value]]) {
def this() = this(Board.emptyBoard)
def withTetromino(tetromino: Tetromino): Board = {
val boardCopy = clone
val positions = tetromino.getBlockPositions
positions.foreach {
position => boardCopy.board(position._2)(position._1) = tetromino.block
}
boardCopy
}
def overlap(tetromino: Tetromino): Boolean = {
val positions = tetromino.getBlockPositions
!positions.forall {
position => board(position._2)(position._1).equals(Block.EMPTY)
}
}
private def legalX = (0 until Board.Width)
private def legalY = (0 until Board.Height)
def isLegal(tetromino: Tetromino): Boolean = {
val positions = tetromino.getBlockPositions
positions.forall {
position => legalX.contains(position._1) && legalY.contains(position._2)
} && !overlap(tetromino)
}
def clearFullRows: Int = {
val clearedBoard = board.filter(_.contains(Block.EMPTY))
val clearedRows = Board.Height - clearedBoard.size
board = Array.fill[Array[Block.Value]](clearedRows)(Board.EmptyBoardRow) ++ clearedBoard
clearedRows
}
override def clone: Board = new Board(board.map(_.clone))
}
| myrjola/scaltris | src/scaltris/Board.scala | Scala | mit | 1,443 |
import scala.quoted._
object Macros {
trait Quoted {
def foo: Int
}
inline def quote: Quoted = ${ quoteImpl }
def quoteImpl(using qctx: QuoteContext): Expr[Quoted] = '{
new Quoted {
def foo = ???
}
}
}
| som-snytt/dotty | tests/pos-macros/i7513c/Macro_1.scala | Scala | apache-2.0 | 232 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.eventhubs
import java.util.Locale
import java.util.concurrent.atomic.AtomicInteger
import org.scalatest.time.SpanSugar._
import org.apache.spark.eventhubs._
import org.apache.spark.eventhubs.utils.EventHubsTestUtils
import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.streaming._
import org.apache.spark.sql.test.SharedSQLContext
import org.scalatest.time.Span
import collection.JavaConverters._
class EventHubsSinkSuite extends StreamTest with SharedSQLContext {
import testImplicits._
import EventHubsTestUtils._
protected var testUtils: EventHubsTestUtils = _
override val streamingTimeout: Span = 30.seconds
override def beforeAll(): Unit = {
super.beforeAll()
testUtils = new EventHubsTestUtils
}
override def afterAll(): Unit = {
if (testUtils != null) {
testUtils.destroyAllEventHubs()
testUtils = null
super.afterAll()
}
}
private val eventHubId = new AtomicInteger(0)
private def newEventHub(): String = s"eh-${eventHubId.getAndIncrement}"
private def getEventHubsConf(name: String) = testUtils.getEventHubsConf(name)
private def createReader(ehConf: EventHubsConf): DataFrame = {
spark.read
.format("eventhubs")
.options(ehConf.toMap)
.load()
.select($"body" cast "string")
}
private def createEventHubsWriter(
input: DataFrame,
ehConf: EventHubsConf,
withOutputMode: Option[OutputMode] = None,
properties: Option[Map[String, String]] = None)(withSelectExrp: String*): StreamingQuery = {
var stream: DataStreamWriter[Row] = null
withTempDir { checkpointDir =>
var df = input.toDF().withColumnRenamed("value", "body")
if (properties.isDefined) {
df = df.withColumn("properties", typedLit(properties.get))
}
if (withSelectExrp.nonEmpty) {
df = df.selectExpr(withSelectExrp: _*)
}
stream = df.writeStream
.format("eventhubs")
.options(ehConf.toMap)
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.queryName("eventHubStream")
withOutputMode.foreach(stream.outputMode(_))
}
stream.start()
}
test("batch - write to EventHubs") {
val eh = newEventHub()
testUtils.createEventHubs(eh, DefaultPartitionCount)
val ehConf = getEventHubsConf(eh)
val df = Seq("1", "2", "3", "4", "5").toDF("body")
df.write
.format("eventhubs")
.options(ehConf.toMap)
.save()
checkAnswer(createReader(ehConf),
Row("1") :: Row("2") :: Row("3") :: Row("4") :: Row("5") :: Nil)
}
test("batch - write to specific partition id") {
val eh = newEventHub()
val targetPartition = "0"
testUtils.createEventHubs(eh, DefaultPartitionCount)
val ehConf = getEventHubsConf(eh)
val df = Seq("1", "2", "3", "4", "5").map(v => (targetPartition, v)).toDF("partition", "body")
df.write
.format("eventhubs")
.options(ehConf.toMap)
.save()
assert(testUtils.getEventHubs(eh).getPartitions(targetPartition.toInt).size == 5)
checkAnswer(createReader(ehConf),
Row("1") :: Row("2") :: Row("3") :: Row("4") :: Row("5") :: Nil)
}
test("batch - unsupported save modes") {
val eh = newEventHub()
testUtils.createEventHubs(eh, DefaultPartitionCount)
val ehConf = getEventHubsConf(eh)
val df = Seq[(String, String)](("0", "1")).toDF("partition", "body")
// Test bad save mode Ignore
var ex = intercept[AnalysisException] {
df.write
.format("eventhubs")
.options(ehConf.toMap)
.mode(SaveMode.Ignore)
.save()
}
assert(
ex.getMessage
.toLowerCase(Locale.ROOT)
.contains(s"save mode ignore not allowed for eventhubs"))
// Test bad save mode Overwrite
ex = intercept[AnalysisException] {
df.write
.format("eventhubs")
.mode(SaveMode.Overwrite)
.save()
}
assert(
ex.getMessage
.toLowerCase(Locale.ROOT)
.contains(s"save mode overwrite not allowed for eventhubs"))
}
test("SPARK-20496: batch - enforce analyzed plans") {
val inputEvents =
spark
.range(1, 1000)
.select(to_json(struct("*")) as 'body)
val eh = newEventHub()
testUtils.createEventHubs(eh, DefaultPartitionCount)
val ehConf = getEventHubsConf(eh)
// Should not throw UnresolvedException
inputEvents.write
.format("eventhubs")
.options(ehConf.toMap)
.save()
}
test("streaming - write to eventhubs") {
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, DefaultPartitionCount)
val ehConf = getEventHubsConf(eh)
val writer = createEventHubsWriter(
input.toDF,
ehConf,
withOutputMode = Some(OutputMode.Append)
)("body")
val reader = (e: EventHubsConf) => createReader(e).as[String].map(_.toInt)
try {
input.addData("1", "2", "3", "4", "5")
failAfter(streamingTimeout) {
writer.processAllAvailable()
}
checkDatasetUnorderly(reader(ehConf), 1, 2, 3, 4, 5)
input.addData("6", "7", "8", "9", "10")
failAfter(streamingTimeout) {
writer.processAllAvailable()
}
checkDatasetUnorderly(reader(ehConf), 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
} finally {
writer.stop()
}
}
test("streaming - write to specific partition") {
val targetPart = "0"
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, partitionCount = 10)
val ehConf = getEventHubsConf(eh)
val writer = createEventHubsWriter(
input.toDF,
ehConf,
withOutputMode = Some(OutputMode.Update())
)(s"'$targetPart' as partition", "body")
val reader = (e: EventHubsConf) => createReader(e).as[String].map(_.toInt)
try {
input.addData("1", "2", "2", "3", "3", "3")
failAfter(streamingTimeout) {
writer.processAllAvailable()
}
assert(testUtils.getEventHubs(eh).getPartitions(targetPart.toPartitionId).size == 6)
checkDatasetUnorderly(reader(ehConf), 1, 2, 2, 3, 3, 3)
input.addData("1", "2", "3")
failAfter(streamingTimeout) {
writer.processAllAvailable()
}
assert(testUtils.getEventHubs(eh).getPartitions(targetPart.toPartitionId).size == 9)
checkDatasetUnorderly(reader(ehConf), 1, 2, 2, 3, 3, 3, 1, 2, 3)
} finally {
writer.stop()
}
}
test("streaming - write with properties and partition") {
val targetPart = "0"
val targetProperties = Map("a" -> "3", "b" -> "bar", "c" -> "spark")
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, partitionCount = 10)
val ehConf = getEventHubsConf(eh)
val writer = createEventHubsWriter(
input.toDF,
ehConf,
withOutputMode = Some(OutputMode.Update()),
properties = Some(targetProperties)
)("properties", "body", s"'$targetPart' as partition")
val reader = (e: EventHubsConf) => createReader(e).as[String].map(_.toInt)
try {
input.addData("1", "2", "2", "3", "3", "3")
failAfter(streamingTimeout) {
writer.processAllAvailable()
}
assert(testUtils.getEventHubs(eh).getPartitions(targetPart.toPartitionId).size == 6)
checkDatasetUnorderly(reader(ehConf), 1, 2, 2, 3, 3, 3)
input.addData("1", "2", "3")
failAfter(streamingTimeout) {
writer.processAllAvailable()
}
assert(testUtils.getEventHubs(eh).getPartitions(targetPart.toPartitionId).size == 9)
checkDatasetUnorderly(reader(ehConf), 1, 2, 2, 3, 3, 3, 1, 2, 3)
assert(
testUtils
.getEventHubs(eh)
.getPartitions(targetPart.toPartitionId)
.getEvents
.head
.getProperties
.asScala == targetProperties)
} finally {
writer.stop()
}
}
test("streaming - write with properties") {
val targetProperties = Map("property" -> "foo", "another" -> "bar")
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, partitionCount = 10)
val ehConf = getEventHubsConf(eh)
val writer = createEventHubsWriter(
input.toDF,
ehConf,
withOutputMode = Some(OutputMode.Update()),
properties = Some(targetProperties)
)("properties", "body")
val reader = (e: EventHubsConf) => createReader(e).as[String].map(_.toInt)
try {
input.addData("1", "2", "2", "3", "3", "3")
failAfter(streamingTimeout) {
writer.processAllAvailable()
}
checkDatasetUnorderly(reader(ehConf), 1, 2, 2, 3, 3, 3)
input.addData("1", "2", "3")
failAfter(streamingTimeout) {
writer.processAllAvailable()
}
checkDatasetUnorderly(reader(ehConf), 1, 2, 2, 3, 3, 3, 1, 2, 3)
assert(
testUtils
.getEventHubs(eh)
.getPartitions(0)
.getEvents
.head
.getProperties
.asScala == targetProperties)
} finally {
writer.stop()
}
}
test("streaming - write data with bad schema - no body field") {
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, partitionCount = 10)
val ehConf = getEventHubsConf(eh)
var writer: StreamingQuery = null
var ex: Exception = null
try {
ex = intercept[StreamingQueryException] {
writer = createEventHubsWriter(input.toDF(), ehConf)("body as foo")
input.addData("1", "2", "3", "4", "5")
writer.processAllAvailable()
}
} finally {
writer.stop()
}
assert(ex.getMessage.toLowerCase(Locale.ROOT).contains("required attribute 'body' not found."))
}
test("streaming - write data with bad schema - partitionKey and partition have been set") {
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, partitionCount = 10)
val ehConf = getEventHubsConf(eh)
var writer: StreamingQuery = null
var ex: Exception = null
val partitionKey = "foo"
val partitionId = "0"
try {
ex = intercept[StreamingQueryException] {
writer = createEventHubsWriter(input.toDF(), ehConf)(s"'$partitionKey' as partitionKey",
s"'$partitionId' as partition",
"body")
input.addData("1", "2", "3", "4", "5")
writer.processAllAvailable()
}
} finally {
writer.stop()
}
assert(
ex.getMessage
.toLowerCase(Locale.ROOT)
.contains(
s"both a partitionkey ($partitionKey) and partition ($partitionId) have been detected. both can not be set."))
}
test("streaming - write data with valid schema but wrong type - bad body type") {
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, partitionCount = 10)
val ehConf = getEventHubsConf(eh)
var writer: StreamingQuery = null
var ex: Exception = null
try {
ex = intercept[StreamingQueryException] {
writer = createEventHubsWriter(input.toDF(), ehConf)("CAST (body as INT) as body")
input.addData("1", "2", "3", "4", "5")
writer.processAllAvailable()
}
} finally {
writer.stop()
}
assert(
ex.getMessage
.toLowerCase(Locale.ROOT)
.contains("body attribute type must be a string or binarytype"))
}
test("streaming - write data with valid schema but wrong type - bad partition type") {
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, partitionCount = 10)
val ehConf = getEventHubsConf(eh)
var writer: StreamingQuery = null
var ex: Exception = null
val partitionId = "0"
try {
ex = intercept[StreamingQueryException] {
writer =
createEventHubsWriter(input.toDF(), ehConf)(s"CAST('$partitionId' as INT) as partition",
"body")
input.addData("1", "2", "3", "4", "5")
writer.processAllAvailable()
}
} finally {
writer.stop()
}
assert(
ex.getMessage
.toLowerCase(Locale.ROOT)
.contains(s"partitionid attribute unsupported type"))
}
test("streaming - write data with valid schema but wrong type - bad partitionKey type") {
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, partitionCount = 10)
val ehConf = getEventHubsConf(eh)
var writer: StreamingQuery = null
var ex: Exception = null
val partitionKey = "234"
try {
ex = intercept[StreamingQueryException] {
writer = createEventHubsWriter(input.toDF(), ehConf)(
s"CAST('$partitionKey' as INT) as partitionKey",
"body")
input.addData("1", "2", "3", "4", "5")
writer.processAllAvailable()
}
} finally {
writer.stop()
}
assert(
ex.getMessage
.toLowerCase(Locale.ROOT)
.contains(s"partitionkey attribute unsupported type"))
}
test("streaming - write with bad properties - null value in properties") {
val targetProperties = Map("a" -> "3", "b" -> null, "c" -> "spark")
val input = MemoryStream[String]
val eh = newEventHub()
testUtils.createEventHubs(eh, partitionCount = 10)
val ehConf = getEventHubsConf(eh)
var writer: StreamingQuery = null
var ex: Exception = null
try {
ex = intercept[StreamingQueryException] {
writer = createEventHubsWriter(input.toDF(), ehConf, properties = Some(targetProperties))(
"properties",
"body")
input.addData("1", "2", "3", "4", "5")
writer.processAllAvailable()
}
} finally {
writer.stop()
}
assert(
ex.getMessage
.toLowerCase(Locale.ROOT)
.contains("properties cannot have a null value"))
}
}
| hdinsight/spark-eventhubs | core/src/test/scala/org/apache/spark/sql/eventhubs/EventHubsSinkSuite.scala | Scala | apache-2.0 | 14,943 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.template.recommendation
import org.apache.predictionio.controller.PAlgorithm
import org.apache.predictionio.controller.Params
import org.apache.predictionio.data.storage.BiMap
import org.apache.spark.SparkContext._
import org.apache.spark.mllib.recommendation.ALS
import org.apache.spark.mllib.recommendation.{Rating => MLlibRating}
import grizzled.slf4j.Logger
case class ALSAlgorithmParams(rank: Int, numIterations: Int, lambda: Double,
seed: Option[Long]) extends Params
/**
* Use ALS to build item x feature matrix
*/
class ALSAlgorithm(val ap: ALSAlgorithmParams)
extends PAlgorithm[PreparedData, ALSModel, Query, PredictedResult] {
@transient lazy val logger = Logger[this.type]
def train(data: PreparedData): ALSModel = {
require(!data.ratings.take(1).isEmpty,
s"viewEvents in PreparedData cannot be empty." +
" Please check if DataSource generates TrainingData" +
" and Preprator generates PreparedData correctly.")
require(!data.items.take(1).isEmpty,
s"items in PreparedData cannot be empty." +
" Please check if DataSource generates TrainingData" +
" and Preprator generates PreparedData correctly.")
// create item's String ID to integer index BiMap
val itemStringIntMap = BiMap.stringInt(data.items.keys)
val userStringIntMap = BiMap.stringInt(data.ratings.map(_.user))
// HOWTO: collect Item as Map and convert ID to Int index
val items: Map[Int, Item] = data.items.map { case (id, item) ⇒
(itemStringIntMap(id), item)
}.collectAsMap.toMap
val mllibRatings = data.ratings.map { r =>
// Convert user and item String IDs to Int index for MLlib
val iindex = itemStringIntMap.getOrElse(r.item, -1)
val uindex = userStringIntMap.getOrElse(r.user, -1)
if (iindex == -1)
logger.info(s"Couldn't convert nonexistent item ID ${r.item}"
+ " to Int index.")
(uindex -> iindex) -> 1
}.filter { case ((u, i), v) => (i != -1) && (u != -1) }
.reduceByKey(_ + _) // aggregate all view events of same item
.map { case ((u, i), v) => MLlibRating(u, i, v) }
// MLLib ALS cannot handle empty training data.
require(!mllibRatings.take(1).isEmpty,
s"mllibRatings cannot be empty." +
" Please check if your events contain valid user and item ID.")
// seed for MLlib ALS
val seed = ap.seed.getOrElse(System.nanoTime)
val m = ALS.trainImplicit(
ratings = mllibRatings,
rank = ap.rank,
iterations = ap.numIterations,
lambda = ap.lambda,
blocks = -1,
alpha = 1.0,
seed = seed)
new ALSModel(productFeatures = m.productFeatures,
itemStringIntMap = itemStringIntMap, items = items)
}
def predict(model: ALSModel, query: Query): PredictedResult = {
val queryFeatures =
model.items.keys.flatMap(model.productFeatures.lookup(_).headOption)
val indexScores = if (queryFeatures.isEmpty) {
logger.info(s"No productFeatures found for query ${query}.")
Array[(Int, Double)]()
} else {
model.productFeatures.mapValues { f ⇒
queryFeatures.map(cosine(_, f)).reduce(_ + _)
}.filter(_._2 > 0) // keep items with score > 0
.collect()
}
// HOWTO: filter predicted results by query.
val filteredScores = filterItems(indexScores, model.items, query)
implicit val ord = Ordering.by[(Int, Double), Double](_._2)
val topScores = getTopN(filteredScores, query.num).toArray
val itemScores = topScores.map { case (i, s) ⇒
new ItemScore(item = model.itemIntStringMap(i), score = s,
creationYear = model.items(i).creationYear)
}
new PredictedResult(itemScores)
}
private def getTopN[T](s: Seq[T], n: Int)
(implicit ord: Ordering[T]): Iterable[T] = {
var result = List.empty[T]
for (x <- s) {
if (result.size < n)
result = x :: result
else {
val min = result.min
if (ord.compare(x, min) < 0) {
result = x :: result.filter(_ != min)
}
}
}
result.sorted.reverse
}
private def cosine(v1: Array[Double], v2: Array[Double]): Double = {
val size = v1.size
var i = 0
var n1: Double = 0
var n2: Double = 0
var d: Double = 0
while (i < size) {
n1 += v1(i) * v1(i)
n2 += v2(i) * v2(i)
d += v1(i) * v2(i)
i += 1
}
val n1n2 = (math.sqrt(n1) * math.sqrt(n2))
if (n1n2 == 0) 0 else (d / n1n2)
}
// HOWTO: actual filter of predicted movie results.
// filter selects all movies
// that were made after the year specified in the query
private def filterItems(selectedScores: Array[(Int, Double)],
items: Map[Int, Item],
query: Query) =
selectedScores.view.filter { case (iId, _) ⇒
items(iId).creationYear.map(icr ⇒ query.creationYear.forall(icr >= _))
.getOrElse(true)
}
}
| himanshudhami/PredictionIO | examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSAlgorithm.scala | Scala | apache-2.0 | 5,794 |
package com.aquamentis.util
import scala.collection.mutable.StringBuilder
import scala.collection.mutable.ListBuffer
import java.io.InputStream
import java.net.URL
import java.net.HttpURLConnection
import javax.xml.parsers.DocumentBuilderFactory
import org.w3c.dom.Node
import org.w3c.dom.NodeList
class NotFeedException() extends Exception() { }
case class Story(title: String, link: String)
// For the future: Parse and store Story summary text/HTML for display
case class Feed (
title: String, link: String, displayLink: String, etag: Option[String],
lastMod: Option[String], stories: Seq[Story])
object Feed {
def refresh(link: String, etag: Option[String],
lastMod: Option[String]): Option[Feed] = {
val url = new URL(if (link.contains("://")) link
else "http://" + link)
val connection = url.openConnection().asInstanceOf[HttpURLConnection]
etag match {
case Some(e) => connection.setRequestProperty("If-None-Match", e)
case None =>
}
lastMod match {
case Some(l) => connection.setRequestProperty("If-Modified-Since", l)
case None =>
}
try {
val istream = connection.getInputStream()
connection.getResponseCode match {
case HttpURLConnection.HTTP_OK => {
val etag = connection.getHeaderField("ETag") match {
case null => None
case e => Some(e)
}
val lastMod = connection.getHeaderField("Last-Modified") match {
case null => None
case l => Some(l)
}
val feed = FeedParser.parse(istream)
Some(feed.copy(link=connection.getURL.toString, etag=etag,
lastMod=lastMod))
}
case HttpURLConnection.HTTP_NOT_MODIFIED => None
case _ => throw new NotFeedException()
}
} finally {
connection.disconnect()
}
}
}
/** DOM-based RSS/Atom feed parser
* Could be written more easily in Scala, but there's a bug in Android
* involving the SAX parser it depends on that wasn't fixed until 2.2
*/
object FeedParser {
sealed abstract class FeedType
case object RSSFeed extends FeedType
case object AtomFeed extends FeedType
class RichNodeList(list: NodeList) {
def foreach(fn: (Node => Unit)) {
for (x <- 0 until list.getLength; node = list.item(x))
fn(node)
}
}
implicit def enrichNodeList(list: NodeList) = new RichNodeList(list)
/** Extracts a node's text content, converting some XML entities it finds
*
* Included in the Android API versions 8+, with entity encoding fixed in 11
*/
def getTextContent(node: Node): String = {
val result = new StringBuilder()
node.getChildNodes.foreach {
(child: Node) => result.append(child.getNodeType match {
case Node.TEXT_NODE => child.getNodeValue
case Node.ENTITY_REFERENCE_NODE => child.getNodeName match {
case "#34" => "\\""
case "#38" => "&"
case "#39" => "'"
case "#60" => "<"
case "#62" => ">"
case _ => ""
}
case _ => ""
})
}
result.toString
}
def extractStory(node: Node, feedType: FeedType): Story = {
var title = ""
var link = ""
node.getChildNodes.foreach {
(child: Node) => child.getNodeName match {
case "title" => title = getTextContent(child)
case "link" => link = feedType match {
case RSSFeed => getTextContent(child)
case AtomFeed => child.getAttributes
.getNamedItem("href").getNodeValue
}
case _ =>
}
}
Story(title, link)
}
def parse(input: InputStream): Feed = {
val doc = DocumentBuilderFactory.newInstance
.newDocumentBuilder.parse(input)
val root = doc.getDocumentElement
val feedType = root.getTagName match {
case ("rss"|"rdf:RDF") => RSSFeed
case "feed" => AtomFeed
case _ => throw new NotFeedException()
}
val title = getTextContent(root.getElementsByTagName("title").item(0))
val link = feedType match {
case RSSFeed => getTextContent(root.getElementsByTagName("link").item(0))
case AtomFeed => root.getElementsByTagName("link").item(0).getAttributes
.getNamedItem("href").getNodeValue
}
val stories = ListBuffer.empty[Story]
root.getElementsByTagName(
feedType match {
case RSSFeed => "item"
case AtomFeed => "entry"
}).foreach((story:Node) => stories.append(extractStory(story, feedType)))
Feed(title, link, link, None, None, stories.result())
}
}
| alexclare/nwsr | src/scala/util/feeds.scala | Scala | gpl-3.0 | 4,639 |
package fpscala.chapter2
/**
* Created by sajit on 5/2/15.
*/
object Samples {
def findFirst[A](arr:List[A],cond:(A => Boolean)):Int = {
@annotation.tailrec
def doFindFirst(list:List[A],currentIdx:Int):Int = {
if(list.isEmpty){
-1
}
else{
val head = list.head
if(cond(head)){
currentIdx
}
else{
doFindFirst(list.tail,currentIdx+1)
}
}
}
doFindFirst(arr,0)
}
def isSorted[A](as:List[A] ,ordered: (A,A) => Boolean):Boolean = {
@annotation.tailrec
def doIsSorted(list:List[A],prev:A):Boolean = list match {
case Nil => true
case (x::xs) => ordered(prev,x) && doIsSorted(xs,x)
}
doIsSorted(as,as.head)
}
def isSorted2[A](as:List[A],ordered:(A,A) => Boolean):Boolean = as match {
case Nil => true
case x::Nil => true
case (x::y::xs) => ordered(x,y) && isSorted2(y::xs,ordered)
}
}
| sajit/learnyou | scala/minimal-scala/src/main/scala/fpscala/chapter2/Samples.scala | Scala | mit | 943 |
package org.jetbrains.plugins.dotty.lang.parser.parsing.statements
import org.jetbrains.plugins.dotty.lang.parser.parsing.types.Type
/**
* @author adkozlov
*/
object ValDcl extends org.jetbrains.plugins.scala.lang.parser.parsing.statements.ValDcl {
override protected val `type` = Type
}
| whorbowicz/intellij-scala | src/org/jetbrains/plugins/dotty/lang/parser/parsing/statements/ValDcl.scala | Scala | apache-2.0 | 296 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.lang.{Boolean => JavaBoolean}
import java.lang.{Byte => JavaByte}
import java.lang.{Double => JavaDouble}
import java.lang.{Float => JavaFloat}
import java.lang.{Integer => JavaInteger}
import java.lang.{Long => JavaLong}
import java.lang.{Short => JavaShort}
import java.math.{BigDecimal => JavaBigDecimal}
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import java.time.{Instant, LocalDate}
import java.util
import java.util.Objects
import javax.xml.bind.DatatypeConverter
import scala.math.{BigDecimal, BigInt}
import scala.reflect.runtime.universe.TypeTag
import scala.util.Try
import org.json4s.JsonAST._
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, ScalaReflection}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.catalyst.util.DateTimeUtils.instantToMicros
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types._
import org.apache.spark.util.Utils
object Literal {
val TrueLiteral: Literal = Literal(true, BooleanType)
val FalseLiteral: Literal = Literal(false, BooleanType)
def apply(v: Any): Literal = v match {
case i: Int => Literal(i, IntegerType)
case l: Long => Literal(l, LongType)
case d: Double => Literal(d, DoubleType)
case f: Float => Literal(f, FloatType)
case b: Byte => Literal(b, ByteType)
case s: Short => Literal(s, ShortType)
case s: String => Literal(UTF8String.fromString(s), StringType)
case c: Char => Literal(UTF8String.fromString(c.toString), StringType)
case b: Boolean => Literal(b, BooleanType)
case d: BigDecimal =>
val decimal = Decimal(d)
Literal(decimal, DecimalType.fromDecimal(decimal))
case d: JavaBigDecimal =>
val decimal = Decimal(d)
Literal(decimal, DecimalType.fromDecimal(decimal))
case d: Decimal => Literal(d, DecimalType(Math.max(d.precision, d.scale), d.scale))
case i: Instant => Literal(instantToMicros(i), TimestampType)
case t: Timestamp => Literal(DateTimeUtils.fromJavaTimestamp(t), TimestampType)
case ld: LocalDate => Literal(ld.toEpochDay.toInt, DateType)
case d: Date => Literal(DateTimeUtils.fromJavaDate(d), DateType)
case a: Array[Byte] => Literal(a, BinaryType)
case a: collection.mutable.WrappedArray[_] => apply(a.array)
case a: Array[_] =>
val elementType = componentTypeToDataType(a.getClass.getComponentType())
val dataType = ArrayType(elementType)
val convert = CatalystTypeConverters.createToCatalystConverter(dataType)
Literal(convert(a), dataType)
case i: CalendarInterval => Literal(i, CalendarIntervalType)
case null => Literal(null, NullType)
case v: Literal => v
case _ =>
throw new RuntimeException("Unsupported literal type " + v.getClass + " " + v)
}
/**
* Returns the Spark SQL DataType for a given class object. Since this type needs to be resolved
* in runtime, we use match-case idioms for class objects here. However, there are similar
* functions in other files (e.g., HiveInspectors), so these functions need to merged into one.
*/
private[this] def componentTypeToDataType(clz: Class[_]): DataType = clz match {
// primitive types
case JavaShort.TYPE => ShortType
case JavaInteger.TYPE => IntegerType
case JavaLong.TYPE => LongType
case JavaDouble.TYPE => DoubleType
case JavaByte.TYPE => ByteType
case JavaFloat.TYPE => FloatType
case JavaBoolean.TYPE => BooleanType
// java classes
case _ if clz == classOf[LocalDate] => DateType
case _ if clz == classOf[Date] => DateType
case _ if clz == classOf[Instant] => TimestampType
case _ if clz == classOf[Timestamp] => TimestampType
case _ if clz == classOf[JavaBigDecimal] => DecimalType.SYSTEM_DEFAULT
case _ if clz == classOf[Array[Byte]] => BinaryType
case _ if clz == classOf[JavaShort] => ShortType
case _ if clz == classOf[JavaInteger] => IntegerType
case _ if clz == classOf[JavaLong] => LongType
case _ if clz == classOf[JavaDouble] => DoubleType
case _ if clz == classOf[JavaByte] => ByteType
case _ if clz == classOf[JavaFloat] => FloatType
case _ if clz == classOf[JavaBoolean] => BooleanType
// other scala classes
case _ if clz == classOf[String] => StringType
case _ if clz == classOf[BigInt] => DecimalType.SYSTEM_DEFAULT
case _ if clz == classOf[BigDecimal] => DecimalType.SYSTEM_DEFAULT
case _ if clz == classOf[CalendarInterval] => CalendarIntervalType
case _ if clz.isArray => ArrayType(componentTypeToDataType(clz.getComponentType))
case _ => throw new AnalysisException(s"Unsupported component type $clz in arrays")
}
/**
* Constructs a [[Literal]] of [[ObjectType]], for example when you need to pass an object
* into code generation.
*/
def fromObject(obj: Any, objType: DataType): Literal = new Literal(obj, objType)
def fromObject(obj: Any): Literal = new Literal(obj, ObjectType(obj.getClass))
def create(v: Any, dataType: DataType): Literal = {
Literal(CatalystTypeConverters.convertToCatalyst(v), dataType)
}
def create[T : TypeTag](v: T): Literal = Try {
val ScalaReflection.Schema(dataType, _) = ScalaReflection.schemaFor[T]
val convert = CatalystTypeConverters.createToCatalystConverter(dataType)
Literal(convert(v), dataType)
}.getOrElse {
Literal(v)
}
/**
* Create a literal with default value for given DataType
*/
def default(dataType: DataType): Literal = dataType match {
case NullType => create(null, NullType)
case BooleanType => Literal(false)
case ByteType => Literal(0.toByte)
case ShortType => Literal(0.toShort)
case IntegerType => Literal(0)
case LongType => Literal(0L)
case FloatType => Literal(0.0f)
case DoubleType => Literal(0.0)
case dt: DecimalType => Literal(Decimal(0, dt.precision, dt.scale))
case DateType => create(0, DateType)
case TimestampType => create(0L, TimestampType)
case StringType => Literal("")
case BinaryType => Literal("".getBytes(StandardCharsets.UTF_8))
case CalendarIntervalType => Literal(new CalendarInterval(0, 0, 0))
case arr: ArrayType => create(Array(), arr)
case map: MapType => create(Map(), map)
case struct: StructType =>
create(InternalRow.fromSeq(struct.fields.map(f => default(f.dataType).value)), struct)
case udt: UserDefinedType[_] => Literal(default(udt.sqlType).value, udt)
case other =>
throw new RuntimeException(s"no default for type $dataType")
}
private[expressions] def validateLiteralValue(value: Any, dataType: DataType): Unit = {
def doValidate(v: Any, dataType: DataType): Boolean = dataType match {
case _ if v == null => true
case BooleanType => v.isInstanceOf[Boolean]
case ByteType => v.isInstanceOf[Byte]
case ShortType => v.isInstanceOf[Short]
case IntegerType | DateType => v.isInstanceOf[Int]
case LongType | TimestampType => v.isInstanceOf[Long]
case FloatType => v.isInstanceOf[Float]
case DoubleType => v.isInstanceOf[Double]
case _: DecimalType => v.isInstanceOf[Decimal]
case CalendarIntervalType => v.isInstanceOf[CalendarInterval]
case BinaryType => v.isInstanceOf[Array[Byte]]
case StringType => v.isInstanceOf[UTF8String]
case st: StructType =>
v.isInstanceOf[InternalRow] && {
val row = v.asInstanceOf[InternalRow]
st.fields.map(_.dataType).zipWithIndex.forall {
case (dt, i) => doValidate(row.get(i, dt), dt)
}
}
case at: ArrayType =>
v.isInstanceOf[ArrayData] && {
val ar = v.asInstanceOf[ArrayData]
ar.numElements() == 0 || doValidate(ar.get(0, at.elementType), at.elementType)
}
case mt: MapType =>
v.isInstanceOf[MapData] && {
val map = v.asInstanceOf[MapData]
doValidate(map.keyArray(), ArrayType(mt.keyType)) &&
doValidate(map.valueArray(), ArrayType(mt.valueType))
}
case ObjectType(cls) => cls.isInstance(v)
case udt: UserDefinedType[_] => doValidate(v, udt.sqlType)
case _ => false
}
require(doValidate(value, dataType),
s"Literal must have a corresponding value to ${dataType.catalogString}, " +
s"but class ${Utils.getSimpleName(value.getClass)} found.")
}
}
/**
* An extractor that matches non-null literal values
*/
object NonNullLiteral {
def unapply(literal: Literal): Option[(Any, DataType)] = {
Option(literal.value).map(_ => (literal.value, literal.dataType))
}
}
/**
* Extractor for retrieving Float literals.
*/
object FloatLiteral {
def unapply(a: Any): Option[Float] = a match {
case Literal(a: Float, FloatType) => Some(a)
case _ => None
}
}
/**
* Extractor for retrieving Double literals.
*/
object DoubleLiteral {
def unapply(a: Any): Option[Double] = a match {
case Literal(a: Double, DoubleType) => Some(a)
case _ => None
}
}
/**
* Extractor for retrieving Int literals.
*/
object IntegerLiteral {
def unapply(a: Any): Option[Int] = a match {
case Literal(a: Int, IntegerType) => Some(a)
case _ => None
}
}
/**
* Extractor for retrieving String literals.
*/
object StringLiteral {
def unapply(a: Any): Option[String] = a match {
case Literal(s: UTF8String, StringType) => Some(s.toString)
case _ => None
}
}
/**
* Extractor for and other utility methods for decimal literals.
*/
object DecimalLiteral {
def apply(v: Long): Literal = Literal(Decimal(v))
def apply(v: Double): Literal = Literal(Decimal(v))
def unapply(e: Expression): Option[Decimal] = e match {
case Literal(v, _: DecimalType) => Some(v.asInstanceOf[Decimal])
case _ => None
}
def largerThanLargestLong(v: Decimal): Boolean = v > Decimal(Long.MaxValue)
def smallerThanSmallestLong(v: Decimal): Boolean = v < Decimal(Long.MinValue)
}
/**
* In order to do type checking, use Literal.create() instead of constructor
*/
case class Literal (value: Any, dataType: DataType) extends LeafExpression {
Literal.validateLiteralValue(value, dataType)
override def foldable: Boolean = true
override def nullable: Boolean = value == null
override def toString: String = value match {
case null => "null"
case binary: Array[Byte] => s"0x" + DatatypeConverter.printHexBinary(binary)
case other => other.toString
}
override def hashCode(): Int = {
val valueHashCode = value match {
case null => 0
case binary: Array[Byte] => util.Arrays.hashCode(binary)
case other => other.hashCode()
}
31 * Objects.hashCode(dataType) + valueHashCode
}
override def equals(other: Any): Boolean = other match {
case o: Literal if !dataType.equals(o.dataType) => false
case o: Literal =>
(value, o.value) match {
case (null, null) => true
case (a: Array[Byte], b: Array[Byte]) => util.Arrays.equals(a, b)
case (a, b) => a != null && a.equals(b)
}
case _ => false
}
override protected def jsonFields: List[JField] = {
// Turns all kinds of literal values to string in json field, as the type info is hard to
// retain in json format, e.g. {"a": 123} can be an int, or double, or decimal, etc.
val jsonValue = (value, dataType) match {
case (null, _) => JNull
case (i: Int, DateType) => JString(DateTimeUtils.toJavaDate(i).toString)
case (l: Long, TimestampType) => JString(DateTimeUtils.toJavaTimestamp(l).toString)
case (other, _) => JString(other.toString)
}
("value" -> jsonValue) :: ("dataType" -> dataType.jsonValue) :: Nil
}
override def eval(input: InternalRow): Any = value
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = CodeGenerator.javaType(dataType)
if (value == null) {
ExprCode.forNullValue(dataType)
} else {
def toExprCode(code: String): ExprCode = {
ExprCode.forNonNullValue(JavaCode.literal(code, dataType))
}
dataType match {
case BooleanType | IntegerType | DateType =>
toExprCode(value.toString)
case FloatType =>
value.asInstanceOf[Float] match {
case v if v.isNaN =>
toExprCode("Float.NaN")
case Float.PositiveInfinity =>
toExprCode("Float.POSITIVE_INFINITY")
case Float.NegativeInfinity =>
toExprCode("Float.NEGATIVE_INFINITY")
case _ =>
toExprCode(s"${value}F")
}
case DoubleType =>
value.asInstanceOf[Double] match {
case v if v.isNaN =>
toExprCode("Double.NaN")
case Double.PositiveInfinity =>
toExprCode("Double.POSITIVE_INFINITY")
case Double.NegativeInfinity =>
toExprCode("Double.NEGATIVE_INFINITY")
case _ =>
toExprCode(s"${value}D")
}
case ByteType | ShortType =>
ExprCode.forNonNullValue(JavaCode.expression(s"($javaType)$value", dataType))
case TimestampType | LongType =>
toExprCode(s"${value}L")
case _ =>
val constRef = ctx.addReferenceObj("literal", value, javaType)
ExprCode.forNonNullValue(JavaCode.global(constRef, dataType))
}
}
}
override def sql: String = (value, dataType) match {
case (_, NullType | _: ArrayType | _: MapType | _: StructType) if value == null => "NULL"
case _ if value == null => s"CAST(NULL AS ${dataType.sql})"
case (v: UTF8String, StringType) =>
// Escapes all backslashes and single quotes.
"'" + v.toString.replace("\\\\", "\\\\\\\\").replace("'", "\\\\'") + "'"
case (v: Byte, ByteType) => v + "Y"
case (v: Short, ShortType) => v + "S"
case (v: Long, LongType) => v + "L"
// Float type doesn't have a suffix
case (v: Float, FloatType) =>
val castedValue = v match {
case _ if v.isNaN => "'NaN'"
case Float.PositiveInfinity => "'Infinity'"
case Float.NegativeInfinity => "'-Infinity'"
case _ => s"'$v'"
}
s"CAST($castedValue AS ${FloatType.sql})"
case (v: Double, DoubleType) =>
v match {
case _ if v.isNaN => s"CAST('NaN' AS ${DoubleType.sql})"
case Double.PositiveInfinity => s"CAST('Infinity' AS ${DoubleType.sql})"
case Double.NegativeInfinity => s"CAST('-Infinity' AS ${DoubleType.sql})"
case _ => v + "D"
}
case (v: Decimal, t: DecimalType) => v + "BD"
case (v: Int, DateType) =>
val formatter = DateFormatter(DateTimeUtils.getZoneId(SQLConf.get.sessionLocalTimeZone))
s"DATE '${formatter.format(v)}'"
case (v: Long, TimestampType) =>
val formatter = TimestampFormatter.getFractionFormatter(
DateTimeUtils.getZoneId(SQLConf.get.sessionLocalTimeZone))
s"TIMESTAMP '${formatter.format(v)}'"
case (i: CalendarInterval, CalendarIntervalType) =>
s"INTERVAL '${i.toString}'"
case (v: Array[Byte], BinaryType) => s"X'${DatatypeConverter.printHexBinary(v)}'"
case _ => value.toString
}
}
| dbtsai/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala | Scala | apache-2.0 | 16,127 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.driver.test.util
import java.io.FileNotFoundException
import java.io.InputStream
import scala.util.Failure
import scala.util.Try
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs._
import org.junit.runner.RunWith
import org.mockito.Mockito._
import org.scalatest._
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import com.stratio.sparta.driver.util.HdfsUtils
@RunWith(classOf[JUnitRunner])
class HdfsUtilsTest extends FlatSpec with ShouldMatchers with MockitoSugar {
val fileSystem: FileSystem = mock[FileSystem]
val utils = new HdfsUtils(fileSystem, "stratio")
"hdfs utils" should "getfiles from a path" in {
val expected = Array(mock[FileStatus])
when(fileSystem.listStatus(new Path("myTestPath"))).thenReturn(expected)
val result = utils.getFiles("myTestPath")
result should be(expected)
}
it should "return single file as inputStream" in {
val expected: InputStream = mock[FSDataInputStream]
when(fileSystem.open(new Path("testFile"))).thenReturn(expected.asInstanceOf[FSDataInputStream])
val result: InputStream = utils.getFile("testFile")
result should be(expected)
}
it should "write" in {
val result = Try(utils.write("from", "to", true)) match {
case Failure(ex: Throwable) => ex
}
result.isInstanceOf[FileNotFoundException] should be(true)
}
it should "write without override" in {
val result = Try(utils.write("from", "to", false)) match {
case Failure(ex: Throwable) => ex
}
result.isInstanceOf[FileNotFoundException] should be(true)
}
}
| danielcsant/sparta | driver/src/test/scala/com/stratio/sparta/driver/test/util/HdfsUtilsTest.scala | Scala | apache-2.0 | 2,246 |
package cromwell.util.docker
/**
* A set of endpoints belonging to a registry.
*
* For most registries, such as gcr.io, quay.io, etc., the namespace, the registry v1 hostname, and the registry v2
* hostname are all the same.
*
* Docker Hub is an example registry that uses a different namespace, a different registry v1 hostname, and a
* different registry v2 hostname.
*
* - https://github.com/docker/docker/blob/v1.9.1/registry/config.go#L24-L25
* - https://github.com/docker/docker/blob/v1.9.1/registry/config_unix.go#L6-L10
*/
object DockerRegistry {
/**
* Creates a registry where the namespace, the registry v1 hostname, and the registry v2 hostname are all the same.
*
* @param host The host to use as the namespace and registry endpoints.
* @param login The login information for the registry.
* @return The DockerRegistry.
*/
def apply(host: String, login: DockerLoginProvider): DockerRegistry = DockerRegistry(host, host, host, login)
}
/**
* Creates a registry.
*
* @param namespace The namespace of the registry.
* @param v1Hostname The host for contacting the V1 API.
* @param v2Hostname The host for contacting the V2 API.
*/
case class DockerRegistry(namespace: String, v1Hostname: String, v2Hostname: String, loginProvider: DockerLoginProvider)
| dgtester/cromwell | src/main/scala/cromwell/util/docker/DockerRegistry.scala | Scala | bsd-3-clause | 1,325 |
package ee.cone
import scala.annotation.StaticAnnotation
package object c4assemble {
class assemble extends StaticAnnotation
class c4assemble(apps: String*) extends StaticAnnotation
class c4multiAssemble(apps: String*) extends StaticAnnotation
class fieldAccess extends StaticAnnotation
class ignore extends StaticAnnotation
type MakeJoinKey = IndexFactory=>JoinKey
}
| conecenter/c4proto | base_lib/src/main/scala/ee/cone/c4assemble/package.scala | Scala | apache-2.0 | 382 |
package com.arcusys.valamis.updaters.version320.schema3203
import com.arcusys.valamis.persistence.common.DbNameUtils._
import com.arcusys.valamis.persistence.common.SlickProfile
trait AgentProfileSchema {
self: SlickProfile =>
import driver.api._
type AgentProfileRow = (String, Long, String)
class AgentProfilesTable(tag: Tag) extends Table[AgentProfileRow](tag, "lrs_agentProfiles") {
def * = (profileId, agentKey, documentKey)
def profileId = column[String]("profileId", O.SqlType(varCharMax))
def agentKey = column[Long]("agentKey")
def documentKey = column[String]("documentKey", O.SqlType(uuidKeyLength))
}
lazy val agentProfiles = TableQuery[AgentProfilesTable]
}
| arcusys/Valamis | valamis-updaters/src/main/scala/com/arcusys/valamis/updaters/version320/schema3203/AgentProfileSchema.scala | Scala | gpl-3.0 | 714 |
package mlbigbook.ml
import breeze.linalg.DenseVector
import mlbigbook.math.MathVectorOps
import org.scalatest.FunSuite
import scala.language.reflectiveCalls
class NearestNeighborsTest extends FunSuite {
import NearestNeighborsTest._
import fif.ImplicitCollectionsData._
test("Sanity check: 1-NN on train set evaluates to input item") {
val rank = nn.mkRanker(distance, stringVectorizer)(data)
data foreach { item =>
val retrieved = rank(1)(item)
assert(retrieved.size === 1)
assert(retrieved.head === item)
}
}
}
object NearestNeighborsTest {
val nn = NearestNeighbors[String, Float, DenseVector](
MathVectorOps.Implicits.FloatDenseVot
)
val data = KnnClassifierTest.data.map { case (ws, _) => ws }
val stringVectorizer: nn.Vectorizer = KnnClassifierTest.stringVectorizer
val distance: nn.Distance = KnnClassifierTest.distance
}
| malcolmgreaves/bigmlbook | fp4ml-main/src/test/scala/mlbigbook/ml/NearestNeighborsTest.scala | Scala | lgpl-3.0 | 889 |
package mesosphere.marathon
package raml
trait ConstraintConversion {
implicit val constraintRamlReader: Reads[Constraint, Protos.Constraint] = Reads { raml =>
val operator = raml.operator match {
case ConstraintOperator.Unique => Protos.Constraint.Operator.UNIQUE
case ConstraintOperator.Cluster => Protos.Constraint.Operator.CLUSTER
case ConstraintOperator.GroupBy => Protos.Constraint.Operator.GROUP_BY
case ConstraintOperator.Like => Protos.Constraint.Operator.LIKE
case ConstraintOperator.Unlike => Protos.Constraint.Operator.UNLIKE
case ConstraintOperator.MaxPer => Protos.Constraint.Operator.MAX_PER
case ConstraintOperator.Is => Protos.Constraint.Operator.IS
}
val builder = Protos.Constraint.newBuilder().setField(raml.fieldName).setOperator(operator)
raml.value.foreach(builder.setValue)
builder.build()
}
implicit val appConstraintRamlReader: Reads[Seq[String], Protos.Constraint] = Reads { raw =>
// this is not a substite for validation, but does ensure that we're not translating invalid operators
def validOperator(op: String): Boolean = ConstraintConversion.ValidOperators.contains(op)
val result: Protos.Constraint = (raw.lift(0), raw.lift(1), raw.lift(2)) match {
case (Some(field), Some(op), None) if validOperator(op) =>
Protos.Constraint
.newBuilder()
.setField(field)
.setOperator(Protos.Constraint.Operator.valueOf(op))
.build()
case (Some(field), Some(op), Some(value)) if validOperator(op) =>
Protos.Constraint
.newBuilder()
.setField(field)
.setOperator(Protos.Constraint.Operator.valueOf(op))
.setValue(value)
.build()
case _ => throw SerializationFailedException(s"illegal constraint specification ${raw.mkString(",")}")
}
result
}
implicit val constraintRamlWriter: Writes[Protos.Constraint, Constraint] = Writes { c =>
val operator = c.getOperator match {
case Protos.Constraint.Operator.UNIQUE => ConstraintOperator.Unique
case Protos.Constraint.Operator.CLUSTER => ConstraintOperator.Cluster
case Protos.Constraint.Operator.GROUP_BY => ConstraintOperator.GroupBy
case Protos.Constraint.Operator.LIKE => ConstraintOperator.Like
case Protos.Constraint.Operator.UNLIKE => ConstraintOperator.Unlike
case Protos.Constraint.Operator.MAX_PER => ConstraintOperator.MaxPer
case Protos.Constraint.Operator.IS => ConstraintOperator.Is
}
Constraint(c.getField, operator, if (c.hasValue) Some(c.getValue) else None)
}
implicit val constraintToSeqStringWrites: Writes[Protos.Constraint, Seq[String]] = Writes { constraint =>
val builder = Seq.newBuilder[String]
builder += constraint.getField
builder += constraint.getOperator.name
if (constraint.hasValue) builder += constraint.getValue
builder.result()
}
}
object ConstraintConversion extends ConstraintConversion {
val ValidOperators: Set[String] = Protos.Constraint.Operator.values().iterator.map(_.toString).toSet
}
| mesosphere/marathon | src/main/scala/mesosphere/marathon/raml/ConstraintConversion.scala | Scala | apache-2.0 | 3,094 |
/* Copyright 2009-2016 EPFL, Lausanne */
import leon.annotation._
import leon.lang._
object FoolProofAdder {
def foolProofAdder(x: BigInt): BigInt = {
require(x > 0)
x + BigInt(999999) + BigInt("999999999999999")
} ensuring(_ > 0)
}
| epfl-lara/leon | src/test/resources/regression/verification/purescala/valid/FoolProofAdder.scala | Scala | gpl-3.0 | 249 |
/*
* Copyright 2009-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.json4s
import org.specs2.mutable.Specification
import text.Document
object NativeMergeExamples extends MergeExamples[Document]("Native") with native.JsonMethods
object JacksonMergeExamples extends MergeExamples[JValue]("Jackson") with jackson.JsonMethods
abstract class MergeExamples[T](mod: String) extends Specification with JsonMethods[T] {
(mod+" Merge Examples") should {
"Merge example" in {
(scala1 merge scala2) must_== expectedMergeResult
}
}
lazy val scala1 = parse("""
{
"lang": "scala",
"year": 2006,
"tags": ["fp", "oo"],
"features": {
"key1":"val1",
"key2":"val2"
}
}""")
lazy val scala2 = parse("""
{
"tags": ["static-typing","fp"],
"compiled": true,
"lang": "scala",
"features": {
"key2":"newval2",
"key3":"val3"
}
}""")
lazy val expectedMergeResult = parse("""
{
"lang": "scala",
"year": 2006,
"tags": ["fp", "oo", "static-typing"],
"features": {
"key1":"val1",
"key2":"newval2",
"key3":"val3"
},
"compiled": true
}""")
"Lotto example" in {
(lotto1 merge lotto2) must_== mergedLottoResult
}
lazy val lotto1 = parse("""
{
"lotto":{
"lotto-id":5,
"winning-numbers":[2,45,34,23,7,5,3],
"winners":[{
"winner-id":23,
"numbers":[2,45,34,23,3,5]
}]
}
}""")
lazy val lotto2 = parse("""
{
"lotto":{
"winners":[{
"winner-id":54,
"numbers":[52,3,12,11,18,22]
}]
}
}""")
lazy val mergedLottoResult = parse("""
{
"lotto":{
"lotto-id":5,
"winning-numbers":[2,45,34,23,7,5,3],
"winners":[{
"winner-id":23,
"numbers":[2,45,34,23,3,5]
},{
"winner-id":54,
"numbers":[52,3,12,11,18,22]
}]
}
}""")
}
| geggo98/json4s | tests/src/test/scala/org/json4s/MergeExamples.scala | Scala | apache-2.0 | 2,564 |
package number
import common.StorageTestBase
import common.randomHelpers.LogarithmicDistribution
import common.randomHelpers.UniformDistribution
import common.storage._
import number.api.SkillState
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import scala.util.Random
import scala.util.Sorting
import java.nio.file.Path
import java.lang.Runtime;
/**
* This test is used to find storage overuse by the generated code.
*
* run with: -XX:MaxHeapFreeRatio=99 -Xmx8G -Xms4G
* @author Jonathan Roth
*/
@RunWith(classOf[JUnitRunner])
class StorageTest extends StorageTestBase[SkillState]("number") {
override def getMainObject = StorageTest
override def create = SkillState.create
override def createElements(σ: SkillState, n: Int) = for (i ← 0 until n) σ.Number(i)
override def write(σ: SkillState, f: Path) = σ.write(f)
override def read(f: Path) = SkillState.read(f)
override def createMoreElements(σ: SkillState, n: Int) = createElements(σ, n)
override def append(σ: SkillState) = σ.append
test("Randomized storage test")
{
val createRes = CollapsedResult("create", Math.max)
val writeRes = CollapsedResult("write", Math.max)
val readRes = CollapsedResult("read", Math.max)
val createMoreRes = CollapsedResult("create more", Math.max)
val appendRes = CollapsedResult("append", Math.max)
val random = new Random
random.setSeed(31948)
randomizedTest(100, new UniformDistribution(random, 1, 30000000), "-Xmx8G",
createAndWrite(Some(createRes), Some(writeRes)),
readAndAppend(Some(readRes), Some(createMoreRes), Some(appendRes)))
SingleValueResult.saveGraph("results/storageTest/number.tex", "Number test", "axis", "only marks",
Seq(createRes, writeRes, readRes, createMoreRes, appendRes))
}
}
object StorageTest extends StorageTestBase.ExternalTest[SkillState] {
def createTest = new StorageTest
}
| XyzNobody/skillScalaTestSuite | src/test/scala/number/StorageTest.scala | Scala | bsd-3-clause | 1,924 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.