code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package com.rumblesan.reaktor
import scalaz._, Scalaz._
class EventSource[OutputEvent](
desiredLoopsPerSecond: Int,
source: () => OutputEvent,
handler: OutputEvent => Unit,
maxRepeats: Option[Int]
) {
var running = true
def funcLoop() = {
var lastLoopTime: Long = System.nanoTime()
val loopsPerSecond: Int = desiredLoopsPerSecond
val optimalTime: Long = 1000000000L / loopsPerSecond.toLong
var lastFpsTime: Long = 0L
var lps: Int = 0
var repeats = maxRepeats.getOrElse(-1)
while(running) {
var now: Long = System.nanoTime()
var updateLength: Double = now - lastLoopTime
lastLoopTime = now
var delta: Double = updateLength / optimalTime
lastFpsTime += updateLength.toLong
lps += 1
if (lastFpsTime >= 1000000000L) {
println(s"LPS: $lps")
lastFpsTime = 0L
lps = 0
}
handler(source())
if (repeats > 0) repeats -= 1
else if (repeats == 0) running = false
// we want each frame to take 10 milliseconds, to do this
// we've recorded when we started the frame. We add 10 milliseconds
// to this and then factor in the current time to give
// us our final value to wait for
// remember this is in ms, whereas our lastLoopTime etc. vars are in ns.
try{
Thread.sleep(
(lastLoopTime - System.nanoTime() + optimalTime) / 1000000
)
}
}
}
def run = {
val loop: Thread = new Thread(){
override def run() = {
funcLoop()
}
}
loop.start()
}
}
| rumblesan/reaktor | src/main/scala/reaktor/EventSource.scala | Scala | mit | 1,591 |
package skinny.time
import org.joda.time._
import skinny.util.DateTimeUtil
private[time] object LastParam
private[time] sealed trait JodaType
private[time] case object DateTimeType extends JodaType
private[time] case object LocalDateType extends JodaType
private[time] case object LocalTimeType extends JodaType
/**
* String interpolation as a factory of joda-time values.
*/
class DateTimeInterpolationString(val s: StringContext) extends AnyVal {
// DateTime
def joda(params: Any*): DateTime = jodaDateTime(params: _*)
def jodaDateTime(params: Any*): DateTime = DateTimeUtil.parseDateTime(buildInterpolatedString(params, DateTimeType))
// LocalDate
def jodaLocalDate(params: Any*): LocalDate =
DateTimeUtil.parseLocalDate(buildInterpolatedString(params, LocalDateType))
def jodaDate(params: Any*): LocalDate = jodaLocalDate(params: _*)
// LocalTime
def jodaLocalTime(params: Any*): LocalTime =
DateTimeUtil.parseLocalTime(buildInterpolatedString(params, LocalTimeType))
def jodaTime(params: Any*): LocalTime = jodaLocalTime(params: _*)
private def string(d: DateTime) = DateTimeUtil.toString(d)
private def string(d: LocalDate) = DateTimeUtil.toString(d)
private def string(d: LocalTime) = DateTimeUtil.toString(d)
private def buildInterpolatedString(params: Seq[Any], jodaType: JodaType): String = {
val str = s.parts
.zipAll(params, "", LastParam)
.foldLeft(new StringBuilder) {
case (sb, (previousQueryPart, LastParam)) => sb ++= previousQueryPart
case (sb, (previousQueryPart, param)) =>
sb ++= previousQueryPart ++=
Option(param)
.map {
case s: String => s
case n: Number => n.toString
case v =>
throw new IllegalArgumentException(
s"${v} (type: ${v.getClass.getCanonicalName}) is not allowed. Use String or number value instead."
)
}
.getOrElse("")
}
.toString
def reportUs: Int = throw new Exception("This is a skinny-common's bug. Please report us this issue!")
(str, jodaType) match {
case (str, _) if str.matches("""^\\d+\\s+years?\\s+(ago|later)$""") =>
str.split("\\\\s+") match {
case Array(amount, _, "ago") => string(DateTime.now.minusYears(amount.toInt))
case Array(amount, _, "later") => string(DateTime.now.plusYears(amount.toInt))
}
case (str, _) if str.matches("""^\\d+\\s+months?\\s+(ago|later)$""") =>
str.split("\\\\s+") match {
case Array(amount, _, "ago") => string(DateTime.now.minusMonths(amount.toInt))
case Array(amount, _, "later") => string(DateTime.now.plusMonths(amount.toInt))
}
case (str, _) if str.matches("""^\\d+\\s+days?\\s+(ago|later)$""") =>
str.split("\\\\s+") match {
case Array(amount, _, "ago") => string(DateTime.now.minusDays(amount.toInt))
case Array(amount, _, "later") => string(DateTime.now.plusDays(amount.toInt))
}
case (str, _) if str.matches("""^\\d+\\s+hours?\\s+(ago|later)$""") =>
str.split("\\\\s+") match {
case Array(amount, _, "ago") => string(DateTime.now.minusHours(amount.toInt))
case Array(amount, _, "later") => string(DateTime.now.plusHours(amount.toInt))
}
case (str, _) if str.matches("""^\\d+\\s+minutes?\\s+(ago|later)$""") =>
str.split("\\\\s+") match {
case Array(amount, _, "ago") => string(DateTime.now.minusMinutes(amount.toInt))
case Array(amount, _, "later") => string(DateTime.now.plusMinutes(amount.toInt))
}
case (str, _) if str.matches("""^\\d+\\s+seconds?\\s+(ago|later)$""") =>
str.split("\\\\s+") match {
case Array(amount, _, "ago") => string(DateTime.now.minusSeconds(amount.toInt))
case Array(amount, _, "later") => string(DateTime.now.plusSeconds(amount.toInt))
}
case ("now", DateTimeType) => string(DateTime.now)
case ("now", LocalDateType) => string(LocalDate.now)
case ("now", LocalTimeType) => string(LocalTime.now)
case ("today", DateTimeType) => string(LocalDate.now)
case ("today", LocalDateType) => string(LocalDate.now)
case ("yesterday", DateTimeType) => string(LocalDate.now.minusDays(1))
case ("yesterday", LocalDateType) => string(LocalDate.now.minusDays(1))
case ("tomorrow", DateTimeType) => string(LocalDate.now.plusDays(1))
case ("tomorrow", LocalDateType) => string(LocalDate.now.plusDays(1))
case _ => str
}
}
}
| seratch/skinny-framework | common/src/main/scala/skinny/time/DateTimeInterpolationString.scala | Scala | mit | 4,665 |
package part3testing
import akka.actor.{Actor, ActorLogging, ActorSystem, Props}
import akka.testkit.{EventFilter, ImplicitSender, TestKit}
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}
class InterceptingLogsSpec extends TestKit(ActorSystem("InterceptingLogsSpec", ConfigFactory.load().getConfig("interceptingLogMessages")))
with ImplicitSender
with WordSpecLike
with BeforeAndAfterAll {
override def afterAll(): Unit = {
TestKit.shutdownActorSystem(system)
}
import InterceptingLogsSpec._
val item = "My Course"
val creditCard = "4111111111111111"
"A checkout flow" should {
"correctly log the dispatch of an order" in {
// Scans for log messages at level INFO.
// It waits only for 3s before time-ing out.
EventFilter.info(pattern = s"Order [0-9]+ for item $item has been dispatched.", occurrences = 1) intercept {
val checkoutRef = system.actorOf(Props[CheckoutActor])
checkoutRef ! Checkout(item, creditCard)
}
}
}
}
object InterceptingLogsSpec {
case class Checkout(item: String, creditCart: String)
case class AuthorizeCard(creditCard: String)
case object PaymentAccepted
case object PaymentRefused
case class DispatchOrder(item: String)
case object OrderConfirmed
class CheckoutActor extends Actor {
private val paymentManager = context.actorOf(Props[PaymentManager])
private val fulfillmentManager = context.actorOf(Props[FulfillmentManager])
override def receive: Receive = awaitingCheckout
def awaitingCheckout: Receive = {
case Checkout(item, card) =>
paymentManager ! AuthorizeCard(card)
context.become(pendingPayment(item))
}
def pendingPayment(item: String): Receive = {
case PaymentAccepted =>
fulfillmentManager ! DispatchOrder(item: String)
context.become(pendingFulfillment(item))
case PaymentRefused => // @TODO
}
def pendingFulfillment(item: String): Receive = {
case OrderConfirmed => context.become(awaitingCheckout)
}
}
class PaymentManager extends Actor {
override def receive: Receive = {
case AuthorizeCard(card) =>
if (card.startsWith("0")) sender() ! PaymentRefused
else sender() ! PaymentAccepted
}
}
class FulfillmentManager extends Actor with ActorLogging {
var orderId = 42
override def receive: Receive = {
case DispatchOrder(item) =>
orderId += 1
log.info(s"Order $orderId for item $item has been dispatched.")
sender() ! OrderConfirmed
}
}
} | guhemama/moocs | RockAkka/src/main/scala/part3testing/InterceptingLogsSpec.scala | Scala | bsd-3-clause | 2,601 |
package molt.syntax.cnf
import molt.syntax.cfg.ASTTag
import molt.syntax.cfg.ASTNormalTag
import molt.syntax.cfg.ASTEmptyTag
import molt.util.Memoize
import molt.syntax.cnf._
import molt.syntax.cfg.CNFConversionTag
import CNFConversionTag._
import ordered._
// some silliness hehe. categorification for semiring parsing
// additionally requires the times to respect the product. maybe this is necessary from first principles?
// ...this seems way sketchy actually. hmmm. challenge is to generalize semiring parsing to streams.
// The main interesting thing that I'm trying to generalize is that it's NOT that each production is assigned an element of the semiring.
// rather, it is assigned an ENDOMORPHISM on the semiring (which, as it happens, you can get by multiplying by an element.)
// but now we see that the categorical generalization would simply allow you morphisms to different types. But which ones?
// well, we can product anything in the CKY algorithm, and the shape of what things we have determines what morphisms (production rules) we can apply.
// but we actually ONLY use + when we have the SAME symbol / same type we're working with.
// So we only require that each OBJECT in the category has a COMMUTATIVE SEMIGROUP associated with it.
import scalaz.\/
trait MonoidalCategoryOfCommutativeSemigroups[F[_]] {
// identity of plus, annihilates times
def empty[A]: F[A]
// commutative and associative
def plus[A](fa1: F[A], fa2: F[A]): F[A]
// distributes over plus
def times[A, B](fa: F[A], fb: F[B]): F[(A, B)]
}
// a production rule corresponds to a function F[A] -> F[B] (for unary productions) or F[(A, B)] -> F[C] (for binary productions).
// This means If F is a functor, you can define your production rule functions as just A -> B or (A, B) -> C.
// and if you really want to go crazy you can put monadic production stuff on TOP of this... woof.
// note: the above is not quite induced by MonadPlus.
// But it is induced by Const of a Semiring? Seems so to me...
class GeneralizedCKYParser[A](
// val cnfGrammar: CNFGrammar[A],
// val schedulingParams: SmartParseParameters[CNFAST[A]]
) {
// val productions = cnfGrammar.productions
// val lexicalCategories = cnfGrammar.lexicalCategories
// val startSymbols = cnfGrammar.startSymbols
// import schedulingParams._
// private[this] type Tag = ASTTag[A]
// private[this] val allTags =
// productions.flatMap(_.tags) ++ lexicalCategories.map(x => ASTNormalTag(x.symbol))
// private[this] val allLabels =
// productions.flatMap(_.symbols) ++ lexicalCategories.map(_.symbol)
// // almost exactly the algorithm from Lange and Leiss
// private[this] val nullableTags: Set[Tag] = {
// var tagToProducers = allTags.map(tag => (tag, productions.filter {
// case Unary(_, c) if c == tag => true
// case Binary(_, l, r) if l == tag || r == tag => true
// case _ => false
// })).toMap
// var nullable =
// if(allTags(ASTEmptyTag)) Set[Tag](ASTEmptyTag)
// else Set.empty[Tag]
// var todo = nullable
// while(!todo.isEmpty) {
// val b = todo.head
// todo = todo - b
// tagToProducers(b).foreach {
// case Unary(a, _) if !nullable(ASTNormalTag(a)) => {
// nullable = nullable + ASTNormalTag(a)
// todo = todo + ASTNormalTag(a)
// }
// case Binary(a, x, y) if !nullable(ASTNormalTag(a)) => {
// if((x == b && nullable(y)) || (y == b && nullable(x))) {
// nullable = nullable + ASTNormalTag(a)
// todo = todo + ASTNormalTag(a)
// }
// }
// case _ => ()
// }
// }
// nullable
// }
// private[this] lazy val nullableTrees: OrderedStream[CNFAST[A]] = {
// if(allTags(ASTEmptyTag)) (CNFEmpty(): CNFAST[A]) :< unitNullParses(CNFEmpty())
// else OrderedStream.empty
// }
// private[this] def unitNullParses(subtree: CNFAST[A]): OrderedStream[CNFAST[A]] = {
// // assumption: subtree is a null parse tree
// val tag = subtree.tag
// // only pair a NULL tree with smaller trees for the first level
// val smallerNullableTrees =
// if(subtree == CNFEmpty()) OrderedStream.empty
// else nullableTrees.takeWhile(_ != subtree)
// val oneLevelTreesList = productions.map (prod => prod match {
// case Unary(label, child) if(child == tag) =>
// OrderedStream.unit[CNFAST[A]](CNFUnaryNonterminal(label, subtree))
// case Binary(label, left, right) => {
// // need to check that `right` is nullable tag because filtering on a
// // predicate that will never be satisfied will cause an infinite loop
// val rightNulls = if(left == tag && nullableTags(right)) {
// smallerNullableTrees.filter(_.tag == right).map(
// nullTree => CNFBinaryNonterminal(label, subtree, nullTree): CNFAST[A])
// } else {
// OrderedStream.empty[CNFAST[A]]
// }
// val leftNulls = if(right == tag && nullableTags(left)) {
// smallerNullableTrees.filter(_.tag == left).map(
// nullTree => CNFBinaryNonterminal(label, nullTree, subtree): CNFAST[A])
// } else {
// OrderedStream.empty[CNFAST[A]]
// }
// val doubleNull = if(right == tag && left == tag) {
// OrderedStream.unit(CNFBinaryNonterminal(label, subtree, subtree): CNFAST[A])
// } else {
// OrderedStream.empty[CNFAST[A]]
// }
// leftNulls.merge(rightNulls).merge(doubleNull)
// }
// case _ => OrderedStream.empty
// })
// // TODO can more efficiently take it from an indexedSeq
// val oneLevelTrees: OrderedStream[CNFAST[A]] = oneLevelTreesList.foldLeft(OrderedStream.empty)(_ merge _)
// oneLevelTrees match {
// case ONil() => ONil[CNFAST[A]]
// case tree :< remainder => (tree: CNFAST[A]) :< remainder().merge(unitNullParses(tree))
// }
// }
// private[this] def unitNonNullParses(subtree: CNFAST[A]): OrderedStream[CNFAST[A]] = {
// // assumption: subtree is not a null parse tree
// val tag = subtree.tag
// val oneLevelTreesList = productions.map (prod => prod match {
// case Unary(label, child) if(child == tag) =>
// OrderedStream.unit[CNFAST[A]](CNFUnaryNonterminal(label, subtree))
// case Binary(label, left, right) => {
// val rightNulls = if(left == tag && nullableTags(right)) {
// nullableTrees.filter(_.tag == right).map(
// nullTree => CNFBinaryNonterminal(label, subtree, nullTree): CNFAST[A])
// } else {
// OrderedStream.empty[CNFAST[A]]
// }
// val leftNulls = if(right == tag && nullableTags(left)) {
// nullableTrees.filter(_.tag == left).map(
// nullTree => CNFBinaryNonterminal(label, nullTree, subtree): CNFAST[A])
// } else {
// OrderedStream.empty[CNFAST[A]]
// }
// leftNulls.merge(rightNulls)
// }
// case _ => OrderedStream.empty
// })
// // TODO once again, more efficiently make it from an IndexedSeq
// val oneLevelTrees = oneLevelTreesList.foldLeft(OrderedStream.empty)(_ merge _)
// oneLevelTrees match {
// case ONil() => ONil[CNFAST[A]]
// case tree :< remainder => (tree: CNFAST[A]) :< remainder().merge(unitNonNullParses(tree))
// }
// }
// def parseTokens(tokens: Seq[String]): OrderedStream[CNFAST[A]] = {
// val parses: OrderedStream[CNFAST[A]] = if(tokens.isEmpty) {
// nullableTrees
// }
// else {
// val getEntry: (((Int, Int)) => OrderedStream[CNFAST[A]]) = {
// def getEntryGen(recurse: (((Int, Int)) => OrderedStream[CNFAST[A]]))(indices: (Int, Int)): OrderedStream[CNFAST[A]] = {
// val (level, offset) = indices
// val lexicalOrBinary: OrderedStream[CNFAST[A]] = {
// if(level == 0) OrderedStream.fromSeq((for { // lexical
// cat <- lexicalCategories
// if cat.member(tokens(offset))
// } yield CNFTerminal(cat.symbol, tokens(offset))).toVector)
// else {
// val indexPairPairs = (1 to level).map(i => ((i - 1, offset), (level - i, offset + i)))
// def treesProducingIndexPair(indexPair: ((Int, Int), (Int, Int))): OrderedStream[CNFAST[A]] = indexPair match { case (leftIndices, rightIndices) =>
// val leftCell = recurse(leftIndices)
// val rightCell = recurse(rightIndices)
// val ord = implicitly[Ordering[CNFAST[A]]]
// implicit val pairOrd =
// Ordering.by[(CNFAST[A], CNFAST[A]), CNFAST[A]](pair => ord.max(pair._1, pair._2))
// val cellItemPairs = leftCell.flatMap(l => rightCell.map(r => (l, r)))
// val treeStream = cellItemPairs.flatMap {
// case (left, right) => {
// val leftTag = left.tag
// val rightTag = right.tag
// OrderedStream.fromSeq(productions.collect {
// case p@Binary(root, `leftTag`, `rightTag`) =>
// CNFBinaryNonterminal(root, left, right): CNFAST[A]
// }.toVector)
// }
// }
// treeStream
// }
// indexPairPairs.map(treesProducingIndexPair _).foldLeft(
// OrderedStream.empty)(_ merge _)
// }
// }
// lexicalOrBinary match {
// case ONil() => ONil[CNFAST[A]]
// case tree :< remainder => tree :< remainder().merge(unitNonNullParses(tree))
// }
// }
// lazy val getEntryFunc: (((Int, Int)) => OrderedStream[CNFAST[A]]) = Memoize(getEntryGen(getEntryFunc))
// getEntryFunc
// }
// // indices for the root of the trees in the table
// getEntry(tokens.length - 1, 0)
// }
// parses.filter(ast => ast.tag match {
// case ASTNormalTag(sym) => startSymbols(sym)
// case ASTEmptyTag => false
// })
// }
}
| julianmichael/molt | molt/shared/src/main/scala/molt/syntax/cnf/GeneralizedCKYParser.scala | Scala | mit | 10,131 |
package mesosphere.marathon
package core.event
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.event.EventStream
import akka.stream.Materializer
import mesosphere.marathon.core.election.ElectionService
import mesosphere.marathon.core.event.impl.stream._
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.plugin.auth.{Authenticator, Authorizer}
import org.eclipse.jetty.servlets.EventSourceServlet
/**
* Exposes everything necessary to provide an internal event stream, an HTTP events stream and HTTP event callbacks.
*/
class EventModule(
metrics: Metrics,
eventBus: EventStream,
actorSystem: ActorSystem,
conf: EventConf,
deprecatedFeatureSet: DeprecatedFeatureSet,
electionService: ElectionService,
authenticator: Authenticator,
authorizer: Authorizer)(implicit val materializer: Materializer) {
lazy val httpEventStreamActor: ActorRef = {
val outstanding = conf.eventStreamMaxOutstandingMessages()
def handleStreamProps(handle: HttpEventStreamHandle): Props =
Props(new HttpEventStreamHandleActor(handle, eventBus, outstanding))
actorSystem.actorOf(
Props(
new HttpEventStreamActor(
electionService.leadershipTransitionEvents,
new HttpEventStreamActorMetrics(metrics),
handleStreamProps)
),
"HttpEventStream"
)
}
lazy val httpEventStreamServlet: EventSourceServlet = {
new HttpEventStreamServlet(
metrics,
httpEventStreamActor,
conf,
authenticator,
authorizer)
}
}
| gsantovena/marathon | src/main/scala/mesosphere/marathon/core/event/EventModule.scala | Scala | apache-2.0 | 1,563 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.data.webhooks.segmentio
import io.prediction.data.webhooks.{ConnectorException, JsonConnector}
import org.json4s._
private[prediction] object SegmentIOConnector extends JsonConnector {
//private lazy val supportedAPI = Vector("2", "2.0", "2.0.0")
implicit val json4sFormats: Formats = DefaultFormats
override
def toEventJson(data: JObject): JObject = {
try {
val version: String = data.values("version").toString
/*
if (!supportedAPI.contains(version)) {
throw new ConnectorException(
s"Supported segment.io API versions: [2]. got [$version]"
)
}
*/
} catch { case _: Throwable β
throw new ConnectorException(s"Failed to get segment.io API version.")
}
val common = try {
data.extract[Common]
} catch {
case e: Throwable β throw new ConnectorException(
s"Cannot extract Common field from $data. ${e.getMessage}", e
)
}
try {
common.`type` match {
case "identify" β
toEventJson(
common = common,
identify = data.extract[Events.Identify]
)
case "track" β
toEventJson(
common = common,
track = data.extract[Events.Track]
)
case "alias" β
toEventJson(
common = common,
alias = data.extract[Events.Alias]
)
case "page" β
toEventJson(
common = common,
page = data.extract[Events.Page]
)
case "screen" β
toEventJson(
common = common,
screen = data.extract[Events.Screen]
)
case "group" β
toEventJson(
common = common,
group = data.extract[Events.Group]
)
case _ β
throw new ConnectorException(
s"Cannot convert unknown type ${common.`type`} to event JSON."
)
}
} catch {
case e: ConnectorException => throw e
case e: Exception =>
throw new ConnectorException(
s"Cannot convert $data to event JSON. ${e.getMessage}", e
)
}
}
def toEventJson(common: Common, identify: Events.Identify ): JObject = {
import org.json4s.JsonDSL._
val eventProperties = "traits" β identify.traits
toJson(common, eventProperties)
}
def toEventJson(common: Common, track: Events.Track): JObject = {
import org.json4s.JsonDSL._
val eventProperties =
("properties" β track.properties) ~
("event" β track.event)
toJson(common, eventProperties)
}
def toEventJson(common: Common, alias: Events.Alias): JObject = {
import org.json4s.JsonDSL._
toJson(common, "previous_id" β alias.previous_id)
}
def toEventJson(common: Common, screen: Events.Screen): JObject = {
import org.json4s.JsonDSL._
val eventProperties =
("name" β screen.name) ~
("properties" β screen.properties)
toJson(common, eventProperties)
}
def toEventJson(common: Common, page: Events.Page): JObject = {
import org.json4s.JsonDSL._
val eventProperties =
("name" β page.name) ~
("properties" β page.properties)
toJson(common, eventProperties)
}
def toEventJson(common: Common, group: Events.Group): JObject = {
import org.json4s.JsonDSL._
val eventProperties =
("group_id" β group.group_id) ~
("traits" β group.traits)
toJson(common, eventProperties)
}
private def toJson(common: Common, props: JObject): JsonAST.JObject = {
val commonFields = commonToJson(common)
JObject(("properties" β properties(common, props)) :: commonFields.obj)
}
private def properties(common: Common, eventProps: JObject): JObject = {
import org.json4s.JsonDSL._
common.context map { context β
try {
("context" β Extraction.decompose(context)) ~ eventProps
} catch {
case e: Throwable β
throw new ConnectorException(
s"Cannot convert $context to event JSON. ${e.getMessage }", e
)
}
} getOrElse eventProps
}
private def commonToJson(common: Common): JObject =
commonToJson(common, common.`type`)
private def commonToJson(common: Common, typ: String): JObject = {
import org.json4s.JsonDSL._
common.user_id.orElse(common.anonymous_id) match {
case Some(userId) β
("event" β typ) ~
("entityType" β "user") ~
("entityId" β userId) ~
("eventTime" β common.timestamp)
case None β
throw new ConnectorException(
"there was no `userId` or `anonymousId` in the common fields."
)
}
}
}
object Events {
private[prediction] case class Track(
event: String,
properties: Option[JObject] = None
)
private[prediction] case class Alias(previous_id: String, user_id: String)
private[prediction] case class Group(
group_id: String,
traits: Option[JObject] = None
)
private[prediction] case class Screen(
name: Option[String] = None,
properties: Option[JObject] = None
)
private[prediction] case class Page(
name: Option[String] = None,
properties: Option[JObject] = None
)
private[prediction] case class Identify(
user_id: String,
traits: Option[JObject]
)
}
object Common {
private[prediction] case class Integrations(
All: Boolean = false,
Mixpanel: Boolean = false,
Marketo: Boolean = false,
Salesforse: Boolean = false
)
private[prediction] case class Context(
ip: String,
library: Library,
user_agent: String,
app: Option[App] = None,
campaign: Option[Campaign] = None,
device: Option[Device] = None,
network: Option[Network] = None,
location: Option[Location] = None,
os: Option[OS] = None,
referrer: Option[Referrer] = None,
screen: Option[Screen] = None,
timezone: Option[String] = None
)
private[prediction] case class Screen(width: Int, height: Int, density: Int)
private[prediction] case class Referrer(id: String, `type`: String)
private[prediction] case class OS(name: String, version: String)
private[prediction] case class Location(
city: Option[String] = None,
country: Option[String] = None,
latitude: Option[Double] = None,
longitude: Option[Double] = None,
speed: Option[Int] = None
)
case class Page(
path: String,
referrer: String,
search: String,
title: String,
url: String
)
private[prediction] case class Network(
bluetooth: Option[Boolean] = None,
carrier: Option[String] = None,
cellular: Option[Boolean] = None,
wifi: Option[Boolean] = None
)
private[prediction] case class Library(name: String, version: String)
private[prediction] case class Device(
id: Option[String] = None,
advertising_id: Option[String] = None,
ad_tracking_enabled: Option[Boolean] = None,
manufacturer: Option[String] = None,
model: Option[String] = None,
name: Option[String] = None,
`type`: Option[String] = None,
token: Option[String] = None
)
private[prediction] case class Campaign(
name: Option[String] = None,
source: Option[String] = None,
medium: Option[String] = None,
term: Option[String] = None,
content: Option[String] = None
)
private[prediction] case class App(
name: Option[String] = None,
version: Option[String] = None,
build: Option[String] = None
)
}
private[prediction] case class Common(
`type`: String,
sent_at: String,
timestamp: String,
version: String,
anonymous_id: Option[String] = None,
user_id: Option[String] = None,
context: Option[Common.Context] = None,
integrations: Option[Common.Integrations] = None
)
| jasonchaffee/PredictionIO | data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala | Scala | apache-2.0 | 8,447 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.fixture.flatspec.sharing
import java.util.concurrent.ConcurrentHashMap
import org.scalatest._
import DbServer._
import java.util.UUID.randomUUID
object DbServer { // Simulating a database server
type Db = StringBuffer
private val databases = new ConcurrentHashMap[String, Db]
def createDb(name: String): Db = {
val db = new StringBuffer
databases.put(name, db)
db
}
def removeDb(name: String) {
databases.remove(name)
}
}
trait DbFixture { this: fixture.TestSuite =>
type FixtureParam = Db
// Allow clients to populate the database after
// it is created
def populateDb(db: Db) {}
def withFixture(test: OneArgTest): Outcome = {
val dbName = randomUUID.toString
val db = createDb(dbName) // create the fixture
try {
populateDb(db) // setup the fixture
withFixture(test.toNoArgTest(db)) // "loan" the fixture to the test
}
finally removeDb(dbName) // clean up the fixture
}
}
class ExampleSpec extends fixture.FlatSpec with DbFixture {
override def populateDb(db: Db) { // setup the fixture
db.append("ScalaTest is ")
}
"Testing" should "be easy" in { db =>
db.append("easy!")
assert(db.toString === "ScalaTest is easy!")
}
it should "be fun" in { db =>
db.append("fun!")
assert(db.toString === "ScalaTest is fun!")
}
// This test doesn't need a Db
"Test code" should "be clear" in { () =>
val buf = new StringBuffer
buf.append("ScalaTest code is ")
buf.append("clear!")
assert(buf.toString === "ScalaTest code is clear!")
}
}
| dotty-staging/scalatest | examples/src/test/scala/org/scalatest/examples/fixture/flatspec/sharing/ExampleSpec.scala | Scala | apache-2.0 | 2,210 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.mesos
import java.io.File
import java.util.{ArrayList => JArrayList, Collections, List => JList}
import scala.collection.JavaConverters._
import scala.collection.mutable.{HashMap, HashSet}
import org.apache.mesos.Protos.{ExecutorInfo => MesosExecutorInfo, TaskInfo => MesosTaskInfo, _}
import org.apache.mesos.SchedulerDriver
import org.apache.mesos.protobuf.ByteString
import org.apache.spark.{SparkContext, SparkException, TaskState}
import org.apache.spark.deploy.mesos.config
import org.apache.spark.executor.MesosExecutorBackend
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.util.Utils
/**
* A SchedulerBackend for running fine-grained tasks on Mesos. Each Spark task is mapped to a
* separate Mesos task, allowing multiple applications to share cluster nodes both in space (tasks
* from multiple apps can run on different cores) and in time (a core can switch ownership).
*/
private[spark] class MesosFineGrainedSchedulerBackend(
scheduler: TaskSchedulerImpl,
sc: SparkContext,
master: String)
extends SchedulerBackend
with org.apache.mesos.Scheduler
with MesosSchedulerUtils {
// Stores the slave ids that has launched a Mesos executor.
val slaveIdToExecutorInfo = new HashMap[String, MesosExecutorInfo]
val taskIdToSlaveId = new HashMap[Long, String]
// An ExecutorInfo for our tasks
var execArgs: Array[Byte] = null
var classLoader: ClassLoader = null
// The listener bus to publish executor added/removed events.
val listenerBus = sc.listenerBus
private[mesos] val mesosExecutorCores = sc.conf.getDouble("spark.mesos.mesosExecutor.cores", 1)
// Offer constraints
private[this] val slaveOfferConstraints =
parseConstraintString(sc.conf.get("spark.mesos.constraints", ""))
// reject offers with mismatched constraints in seconds
private val rejectOfferDurationForUnmetConstraints =
getRejectOfferDurationForUnmetConstraints(sc.conf)
private var schedulerDriver: SchedulerDriver = _
@volatile var appId: String = _
override def start() {
classLoader = Thread.currentThread.getContextClassLoader
val driver = createSchedulerDriver(
master,
MesosFineGrainedSchedulerBackend.this,
sc.sparkUser,
sc.appName,
sc.conf,
sc.conf.getOption("spark.mesos.driver.webui.url").orElse(sc.ui.map(_.webUrl)),
Option.empty,
Option.empty,
sc.conf.getOption("spark.mesos.driver.frameworkId")
)
unsetFrameworkID(sc)
startScheduler(driver)
}
/**
* Creates a MesosExecutorInfo that is used to launch a Mesos executor.
*
* @param availableResources Available resources that is offered by Mesos
* @param execId The executor id to assign to this new executor.
* @return A tuple of the new mesos executor info and the remaining available resources.
*/
def createExecutorInfo(
availableResources: JList[Resource],
execId: String): (MesosExecutorInfo, JList[Resource]) = {
val executorSparkHome = sc.conf.getOption("spark.mesos.executor.home")
.orElse(sc.getSparkHome()) // Fall back to driver Spark home for backward compatibility
.getOrElse {
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
}
val environment = Environment.newBuilder()
sc.conf.getOption("spark.executor.extraClassPath").foreach { cp =>
environment.addVariables(
Environment.Variable.newBuilder().setName("SPARK_EXECUTOR_CLASSPATH").setValue(cp).build())
}
val extraJavaOpts = sc.conf.getOption("spark.executor.extraJavaOptions").getOrElse("")
val prefixEnv = sc.conf.getOption("spark.executor.extraLibraryPath").map { p =>
Utils.libraryPathEnvPrefix(Seq(p))
}.getOrElse("")
environment.addVariables(
Environment.Variable.newBuilder()
.setName("SPARK_EXECUTOR_OPTS")
.setValue(extraJavaOpts)
.build())
sc.executorEnvs.foreach { case (key, value) =>
environment.addVariables(Environment.Variable.newBuilder()
.setName(key)
.setValue(value)
.build())
}
val command = CommandInfo.newBuilder()
.setEnvironment(environment)
val uri = sc.conf.getOption("spark.executor.uri")
.orElse(Option(System.getenv("SPARK_EXECUTOR_URI")))
val executorBackendName = classOf[MesosExecutorBackend].getName
if (uri.isEmpty) {
val executorPath = new File(executorSparkHome, "/bin/spark-class").getPath
command.setValue(s"$prefixEnv $executorPath $executorBackendName")
} else {
// Grab everything to the first '.'. We'll use that and '*' to
// glob the directory "correctly".
val basename = uri.get.split('/').last.split('.').head
command.setValue(s"cd ${basename}*; $prefixEnv ./bin/spark-class $executorBackendName")
command.addUris(CommandInfo.URI.newBuilder().setValue(uri.get))
}
val builder = MesosExecutorInfo.newBuilder()
val (resourcesAfterCpu, usedCpuResources) =
partitionResources(availableResources, "cpus", mesosExecutorCores)
val (resourcesAfterMem, usedMemResources) =
partitionResources(resourcesAfterCpu.asJava, "mem", executorMemory(sc))
builder.addAllResources(usedCpuResources.asJava)
builder.addAllResources(usedMemResources.asJava)
sc.conf.getOption("spark.mesos.uris").foreach(setupUris(_, command))
val executorInfo = builder
.setExecutorId(ExecutorID.newBuilder().setValue(execId).build())
.setCommand(command)
.setData(ByteString.copyFrom(createExecArg()))
executorInfo.setContainer(
MesosSchedulerBackendUtil.buildContainerInfo(sc.conf))
(executorInfo.build(), resourcesAfterMem.asJava)
}
/**
* Create and serialize the executor argument to pass to Mesos. Our executor arg is an array
* containing all the spark.* system properties in the form of (String, String) pairs.
*/
private def createExecArg(): Array[Byte] = {
if (execArgs == null) {
val props = new HashMap[String, String]
for ((key, value) <- sc.conf.getAll) {
props(key) = value
}
// Serialize the map as an array of (String, String) pairs
execArgs = Utils.serialize(props.toArray)
}
execArgs
}
override def offerRescinded(d: org.apache.mesos.SchedulerDriver, o: OfferID) {}
override def registered(
driver: org.apache.mesos.SchedulerDriver,
frameworkId: FrameworkID,
masterInfo: MasterInfo) {
inClassLoader() {
appId = frameworkId.getValue
logInfo("Registered as framework ID " + appId)
this.schedulerDriver = driver
markRegistered()
}
}
private def inClassLoader()(fun: => Unit) = {
val oldClassLoader = Thread.currentThread.getContextClassLoader
Thread.currentThread.setContextClassLoader(classLoader)
try {
fun
} finally {
Thread.currentThread.setContextClassLoader(oldClassLoader)
}
}
override def disconnected(d: org.apache.mesos.SchedulerDriver) {}
override def reregistered(d: org.apache.mesos.SchedulerDriver, masterInfo: MasterInfo) {}
private def getTasksSummary(tasks: JArrayList[MesosTaskInfo]): String = {
val builder = new StringBuilder
tasks.asScala.foreach { t =>
builder.append("Task id: ").append(t.getTaskId.getValue).append("\\n")
.append("Slave id: ").append(t.getSlaveId.getValue).append("\\n")
.append("Task resources: ").append(t.getResourcesList).append("\\n")
.append("Executor resources: ").append(t.getExecutor.getResourcesList)
.append("---------------------------------------------\\n")
}
builder.toString()
}
/**
* Method called by Mesos to offer resources on slaves. We respond by asking our active task sets
* for tasks in order of priority. We fill each node with tasks in a round-robin manner so that
* tasks are balanced across the cluster.
*/
override def resourceOffers(d: org.apache.mesos.SchedulerDriver, offers: JList[Offer]) {
inClassLoader() {
// Fail first on offers with unmet constraints
val (offersMatchingConstraints, offersNotMatchingConstraints) =
offers.asScala.partition { o =>
val offerAttributes = toAttributeMap(o.getAttributesList)
val meetsConstraints =
matchesAttributeRequirements(slaveOfferConstraints, offerAttributes)
// add some debug messaging
if (!meetsConstraints) {
val id = o.getId.getValue
logDebug(s"Declining offer: $id with attributes: $offerAttributes")
}
meetsConstraints
}
// These offers do not meet constraints. We don't need to see them again.
// Decline the offer for a long period of time.
offersNotMatchingConstraints.foreach { o =>
d.declineOffer(o.getId, Filters.newBuilder()
.setRefuseSeconds(rejectOfferDurationForUnmetConstraints).build())
}
// Of the matching constraints, see which ones give us enough memory and cores
val (usableOffers, unUsableOffers) = offersMatchingConstraints.partition { o =>
val mem = getResource(o.getResourcesList, "mem")
val cpus = getResource(o.getResourcesList, "cpus")
val slaveId = o.getSlaveId.getValue
val offerAttributes = toAttributeMap(o.getAttributesList)
// check offers for
// 1. Memory requirements
// 2. CPU requirements - need at least 1 for executor, 1 for task
val meetsMemoryRequirements = mem >= executorMemory(sc)
val meetsCPURequirements = cpus >= (mesosExecutorCores + scheduler.CPUS_PER_TASK)
val meetsRequirements =
(meetsMemoryRequirements && meetsCPURequirements) ||
(slaveIdToExecutorInfo.contains(slaveId) && cpus >= scheduler.CPUS_PER_TASK)
val debugstr = if (meetsRequirements) "Accepting" else "Declining"
logDebug(s"$debugstr offer: ${o.getId.getValue} with attributes: "
+ s"$offerAttributes mem: $mem cpu: $cpus")
meetsRequirements
}
// Decline offers we ruled out immediately
unUsableOffers.foreach(o => d.declineOffer(o.getId))
val workerOffers = usableOffers.map { o =>
val cpus = if (slaveIdToExecutorInfo.contains(o.getSlaveId.getValue)) {
getResource(o.getResourcesList, "cpus").toInt
} else {
// If the Mesos executor has not been started on this slave yet, set aside a few
// cores for the Mesos executor by offering fewer cores to the Spark executor
(getResource(o.getResourcesList, "cpus") - mesosExecutorCores).toInt
}
new WorkerOffer(
o.getSlaveId.getValue,
o.getHostname,
cpus)
}.toIndexedSeq
val slaveIdToOffer = usableOffers.map(o => o.getSlaveId.getValue -> o).toMap
val slaveIdToWorkerOffer = workerOffers.map(o => o.executorId -> o).toMap
val slaveIdToResources = new HashMap[String, JList[Resource]]()
usableOffers.foreach { o =>
slaveIdToResources(o.getSlaveId.getValue) = o.getResourcesList
}
val mesosTasks = new HashMap[String, JArrayList[MesosTaskInfo]]
val slavesIdsOfAcceptedOffers = HashSet[String]()
// Call into the TaskSchedulerImpl
val acceptedOffers = scheduler.resourceOffers(workerOffers).filter(!_.isEmpty)
acceptedOffers
.foreach { offer =>
offer.foreach { taskDesc =>
val slaveId = taskDesc.executorId
slavesIdsOfAcceptedOffers += slaveId
taskIdToSlaveId(taskDesc.taskId) = slaveId
val (mesosTask, remainingResources) = createMesosTask(
taskDesc,
slaveIdToResources(slaveId),
slaveId)
mesosTasks.getOrElseUpdate(slaveId, new JArrayList[MesosTaskInfo])
.add(mesosTask)
slaveIdToResources(slaveId) = remainingResources
}
}
// Reply to the offers
val filters = Filters.newBuilder().setRefuseSeconds(1).build() // TODO: lower timeout?
mesosTasks.foreach { case (slaveId, tasks) =>
slaveIdToWorkerOffer.get(slaveId).foreach(o =>
listenerBus.post(SparkListenerExecutorAdded(System.currentTimeMillis(), slaveId,
// TODO: Add support for log urls for Mesos
new ExecutorInfo(o.host, o.cores, Map.empty)))
)
logTrace(s"Launching Mesos tasks on slave '$slaveId', tasks:\\n${getTasksSummary(tasks)}")
d.launchTasks(Collections.singleton(slaveIdToOffer(slaveId).getId), tasks, filters)
}
// Decline offers that weren't used
// NOTE: This logic assumes that we only get a single offer for each host in a given batch
for (o <- usableOffers if !slavesIdsOfAcceptedOffers.contains(o.getSlaveId.getValue)) {
d.declineOffer(o.getId)
}
}
}
/** Turn a Spark TaskDescription into a Mesos task and also resources unused by the task */
def createMesosTask(
task: TaskDescription,
resources: JList[Resource],
slaveId: String): (MesosTaskInfo, JList[Resource]) = {
val taskId = TaskID.newBuilder().setValue(task.taskId.toString).build()
val (executorInfo, remainingResources) = if (slaveIdToExecutorInfo.contains(slaveId)) {
(slaveIdToExecutorInfo(slaveId), resources)
} else {
createExecutorInfo(resources, slaveId)
}
slaveIdToExecutorInfo(slaveId) = executorInfo
val (finalResources, cpuResources) =
partitionResources(remainingResources, "cpus", scheduler.CPUS_PER_TASK)
val taskInfo = MesosTaskInfo.newBuilder()
.setTaskId(taskId)
.setSlaveId(SlaveID.newBuilder().setValue(slaveId).build())
.setExecutor(executorInfo)
.setName(task.name)
.addAllResources(cpuResources.asJava)
.setData(ByteString.copyFrom(TaskDescription.encode(task)))
.build()
(taskInfo, finalResources.asJava)
}
override def statusUpdate(d: org.apache.mesos.SchedulerDriver, status: TaskStatus) {
inClassLoader() {
val tid = status.getTaskId.getValue.toLong
val state = mesosToTaskState(status.getState)
synchronized {
if (TaskState.isFailed(mesosToTaskState(status.getState))
&& taskIdToSlaveId.contains(tid)) {
// We lost the executor on this slave, so remember that it's gone
removeExecutor(taskIdToSlaveId(tid), "Lost executor")
}
if (TaskState.isFinished(state)) {
taskIdToSlaveId.remove(tid)
}
}
scheduler.statusUpdate(tid, state, status.getData.asReadOnlyByteBuffer)
}
}
override def error(d: org.apache.mesos.SchedulerDriver, message: String) {
inClassLoader() {
logError("Mesos error: " + message)
markErr()
scheduler.error(message)
}
}
override def stop() {
if (schedulerDriver != null) {
schedulerDriver.stop()
}
}
override def reviveOffers() {
schedulerDriver.reviveOffers()
}
override def frameworkMessage(
d: org.apache.mesos.SchedulerDriver, e: ExecutorID, s: SlaveID, b: Array[Byte]) {}
/**
* Remove executor associated with slaveId in a thread safe manner.
*/
private def removeExecutor(slaveId: String, reason: String) = {
synchronized {
listenerBus.post(SparkListenerExecutorRemoved(System.currentTimeMillis(), slaveId, reason))
slaveIdToExecutorInfo -= slaveId
}
}
private def recordSlaveLost(
d: org.apache.mesos.SchedulerDriver, slaveId: SlaveID, reason: ExecutorLossReason) {
inClassLoader() {
logInfo("Mesos slave lost: " + slaveId.getValue)
removeExecutor(slaveId.getValue, reason.toString)
scheduler.executorLost(slaveId.getValue, reason)
}
}
override def slaveLost(d: org.apache.mesos.SchedulerDriver, slaveId: SlaveID) {
recordSlaveLost(d, slaveId, SlaveLost())
}
override def executorLost(
d: org.apache.mesos.SchedulerDriver, executorId: ExecutorID, slaveId: SlaveID, status: Int) {
logInfo("Executor lost: %s, marking slave %s as lost".format(executorId.getValue,
slaveId.getValue))
recordSlaveLost(d, slaveId, ExecutorExited(status, exitCausedByApp = true))
}
override def killTask(
taskId: Long, executorId: String, interruptThread: Boolean, reason: String): Unit = {
schedulerDriver.killTask(
TaskID.newBuilder()
.setValue(taskId.toString).build()
)
}
// TODO: query Mesos for number of cores
override def defaultParallelism(): Int = sc.conf.getInt("spark.default.parallelism", 8)
override def applicationId(): String =
Option(appId).getOrElse {
logWarning("Application ID is not initialized yet.")
super.applicationId
}
}
| brad-kaiser/spark | resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala | Scala | apache-2.0 | 17,635 |
package is.hail.types.encoded
import is.hail.annotations.Region
import is.hail.asm4s._
import is.hail.expr.ir.{EmitCodeBuilder}
import is.hail.types.BaseType
import is.hail.types.physical._
import is.hail.types.virtual._
import is.hail.io.{InputBuffer, OutputBuffer}
import is.hail.utils._
case object EInt64Optional extends EInt64(false)
case object EInt64Required extends EInt64(true)
class EInt64(override val required: Boolean) extends EFundamentalType {
def _buildFundamentalEncoder(cb: EmitCodeBuilder, pt: PType, v: Value[_], out: Value[OutputBuffer]): Unit = {
cb += out.writeLong(coerce[Long](v))
}
def _buildFundamentalDecoder(
cb: EmitCodeBuilder,
pt: PType,
region: Value[Region],
in: Value[InputBuffer]
): Code[Long] = in.readLong()
def _buildSkip(cb: EmitCodeBuilder, r: Value[Region], in: Value[InputBuffer]): Unit = cb += in.skipLong()
override def _compatible(pt: PType): Boolean = pt.isInstanceOf[PInt64]
def _decodedPType(requestedType: Type): PType = PInt64(required)
def _asIdent = "int64"
def _toPretty = "EInt64"
def setRequired(newRequired: Boolean): EInt64 = EInt64(newRequired)
}
object EInt64 {
def apply(required: Boolean = false): EInt64 = if (required) EInt64Required else EInt64Optional
}
| danking/hail | hail/src/main/scala/is/hail/types/encoded/EInt64.scala | Scala | mit | 1,272 |
package observer
trait Commentary {
def setDesc(desc: String)
}
| BBK-PiJ-2015-67/sdp-portfolio | exercises/week07/src/main/scala/observer/Commentary.scala | Scala | unlicense | 67 |
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright 2016-2020 Daniel Urban and contributors listed in NOTICE.txt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.tauri.choam
package bench
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import util._
@Fork(2)
class QueueTransferBench {
import QueueTransferBench._
final val waitTime = 128L
@Benchmark
def michaelScottQueue(s: MsSt, bh: Blackhole, ct: KCASImplState): Unit = {
import ct.kcasImpl
bh.consume(s.michaelScottQueue1.enqueue.unsafePerform(ct.nextString()))
bh.consume(s.transfer.unsafeRun)
if (s.michaelScottQueue2.tryDeque.unsafeRun eq None) throw Errors.EmptyQueue
Blackhole.consumeCPU(waitTime)
}
@Benchmark
def lockedQueue(s: LockedSt, bh: Blackhole, ct: RandomState): Unit = {
bh.consume(s.lockedQueue1.enqueue(ct.nextString()))
s.lockedQueue1.lock.lock()
s.lockedQueue2.lock.lock()
try {
val item = s.lockedQueue1.unlockedTryDequeue().get
bh.consume(s.lockedQueue2.unlockedEnqueue(item))
} finally {
s.lockedQueue1.lock.unlock()
s.lockedQueue2.lock.unlock()
}
if (s.lockedQueue2.tryDequeue() eq None) throw Errors.EmptyQueue
Blackhole.consumeCPU(waitTime)
}
@Benchmark
def stmQueue(s: StmSt, bh: Blackhole, ct: RandomState): Unit = {
import scala.concurrent.stm._
bh.consume(s.stmQueue1.enqueue(ct.nextString()))
bh.consume(atomic { implicit txn =>
val item = s.stmQueue1.tryDequeue().get
s.stmQueue2.enqueue(item)
})
if (s.stmQueue2.tryDequeue() eq None) throw Errors.EmptyQueue
Blackhole.consumeCPU(waitTime)
}
}
object QueueTransferBench {
@State(Scope.Benchmark)
class MsSt {
val michaelScottQueue1 = new MichaelScottQueue[String](Prefill.prefill())
val michaelScottQueue2 = new MichaelScottQueue[String](Prefill.prefill())
val transfer = michaelScottQueue1.tryDeque.map(_.get) >>> michaelScottQueue2.enqueue
}
@State(Scope.Benchmark)
class LockedSt {
val lockedQueue1 = new LockedQueue[String](Prefill.prefill())
val lockedQueue2 = new LockedQueue[String](Prefill.prefill())
}
@State(Scope.Benchmark)
class StmSt {
val stmQueue1 = new StmQueue[String](Prefill.prefill())
val stmQueue2 = new StmQueue[String](Prefill.prefill())
}
}
| durban/exp-reagents | bench/src/main/scala/dev/tauri/choam/bench/QueueTransferBench.scala | Scala | apache-2.0 | 2,854 |
/*
* Copyright 2011-2014 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.ebpi.yaidom.perftest
import java.io._
import java.net.URI
import scala.util.Try
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import nl.ebpi.yaidom.parse._
import nl.ebpi.yaidom.core.EName
import nl.ebpi.yaidom.core.Scope
import nl.ebpi.yaidom.resolved
import nl.ebpi.yaidom.simple.Node
/**
* Concrete AbstractMemoryUsageSuite sub-class using "resolved" yaidom Elems.
*
* See the documentation of the super-class for the advice to run this suite in isolation only!
*
* @author Chris de Vreeze
*/
@RunWith(classOf[JUnitRunner])
class MemoryUsageSuiteForResolvedElem extends AbstractMemoryUsageSuite {
type E = resolved.Elem
protected def parseXmlFiles(files: Vector[File]): Vector[Try[resolved.Elem]] = {
val docParser = getDocumentParser
files map { f => Try(docParser.parse(f)).map(_.documentElement).map(e => resolved.Elem(e)) }
}
protected def createCommonRootParent(rootElems: Vector[resolved.Elem]): resolved.Elem = {
resolved.Elem(EName("root"), Map(), rootElems)
}
protected def maxMemoryToFileLengthRatio: Int = 7
}
| EBPI/yaidom | src/perftest/scala/nl/ebpi/yaidom/perftest/MemoryUsageSuiteForResolvedElem.scala | Scala | apache-2.0 | 1,697 |
package com.danielasfregola.twitter4s
import akka.actor.ActorSystem
import com.danielasfregola.twitter4s.entities.{AccessToken, ConsumerToken}
import com.danielasfregola.twitter4s.http.clients.rest.RestClient
import com.danielasfregola.twitter4s.http.clients.rest.account.TwitterAccountClient
import com.danielasfregola.twitter4s.http.clients.rest.accountactivity.TwitterAccountActivityClient
import com.danielasfregola.twitter4s.http.clients.rest.application.TwitterApplicationClient
import com.danielasfregola.twitter4s.http.clients.rest.blocks.TwitterBlockClient
import com.danielasfregola.twitter4s.http.clients.rest.directmessages.TwitterDirectMessageClient
import com.danielasfregola.twitter4s.http.clients.rest.favorites.TwitterFavoriteClient
import com.danielasfregola.twitter4s.http.clients.rest.followers.TwitterFollowerClient
import com.danielasfregola.twitter4s.http.clients.rest.friends.TwitterFriendClient
import com.danielasfregola.twitter4s.http.clients.rest.friendships.TwitterFriendshipClient
import com.danielasfregola.twitter4s.http.clients.rest.geo.TwitterGeoClient
import com.danielasfregola.twitter4s.http.clients.rest.help.TwitterHelpClient
import com.danielasfregola.twitter4s.http.clients.rest.lists.TwitterListClient
import com.danielasfregola.twitter4s.http.clients.rest.media.TwitterMediaClient
import com.danielasfregola.twitter4s.http.clients.rest.mutes.TwitterMuteClient
import com.danielasfregola.twitter4s.http.clients.rest.savedsearches.TwitterSavedSearchClient
import com.danielasfregola.twitter4s.http.clients.rest.search.TwitterSearchClient
import com.danielasfregola.twitter4s.http.clients.rest.statuses.TwitterStatusClient
import com.danielasfregola.twitter4s.http.clients.rest.suggestions.TwitterSuggestionClient
import com.danielasfregola.twitter4s.http.clients.rest.trends.TwitterTrendClient
import com.danielasfregola.twitter4s.http.clients.rest.users.TwitterUserClient
import com.danielasfregola.twitter4s.util.Configurations._
import com.danielasfregola.twitter4s.util.SystemShutdown
/** Represents the functionalities offered by the Twitter REST API
*/
class TwitterRestClient(val consumerToken: ConsumerToken, val accessToken: AccessToken)(implicit _system: ActorSystem =
ActorSystem("twitter4s-rest"))
extends RestClients
with SystemShutdown {
protected val system = _system
protected val restClient = new RestClient(consumerToken, accessToken)
}
trait RestClients
extends TwitterAccountClient
with TwitterAccountActivityClient
with TwitterApplicationClient
with TwitterBlockClient
with TwitterDirectMessageClient
with TwitterFavoriteClient
with TwitterFollowerClient
with TwitterFriendClient
with TwitterFriendshipClient
with TwitterGeoClient
with TwitterHelpClient
with TwitterListClient
with TwitterMediaClient
with TwitterMuteClient
with TwitterSavedSearchClient
with TwitterSearchClient
with TwitterStatusClient
with TwitterSuggestionClient
with TwitterTrendClient
with TwitterUserClient
object TwitterRestClient {
def apply(): TwitterRestClient = {
val consumerToken = ConsumerToken(key = consumerTokenKey, secret = consumerTokenSecret)
val accessToken = AccessToken(key = accessTokenKey, secret = accessTokenSecret)
apply(consumerToken, accessToken)
}
def apply(consumerToken: ConsumerToken, accessToken: AccessToken): TwitterRestClient =
new TwitterRestClient(consumerToken, accessToken)
def withActorSystem(system: ActorSystem): TwitterRestClient = {
val consumerToken = ConsumerToken(key = consumerTokenKey, secret = consumerTokenSecret)
val accessToken = AccessToken(key = accessTokenKey, secret = accessTokenSecret)
withActorSystem(consumerToken, accessToken)(system)
}
def withActorSystem(consumerToken: ConsumerToken, accessToken: AccessToken)(system: ActorSystem): TwitterRestClient =
new TwitterRestClient(consumerToken, accessToken)(system)
}
| DanielaSfregola/twitter4s | src/main/scala/com/danielasfregola/twitter4s/TwitterRestClient.scala | Scala | apache-2.0 | 4,049 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.feature
import org.apache.spark.Logging
import org.apache.spark.annotation.{DeveloperApi, Since}
import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
import org.apache.spark.rdd.RDD
/**
* Standardizes features by removing the mean and scaling to unit std using column summary
* statistics on the samples in the training set.
*
* @param withMean False by default. Centers the data with mean before scaling. It will build a
* dense output, so this does not work on sparse input and will raise an exception.
* @param withStd True by default. Scales the data to unit standard deviation.
*/
@Since("1.1.0")
class StandardScaler @Since("1.1.0") (withMean: Boolean, withStd: Boolean) extends Logging {
@Since("1.1.0")
def this() = this(false, true)
if (!(withMean || withStd)) {
logWarning("Both withMean and withStd are false. The model does nothing.")
}
/**
* Computes the mean and variance and stores as a model to be used for later scaling.
*
* @param data The data used to compute the mean and variance to build the transformation model.
* @return a StandardScalarModel
*/
@Since("1.1.0")
def fit(data: RDD[Vector]): StandardScalerModel = {
// TODO: skip computation if both withMean and withStd are false
val summary = data.treeAggregate(new MultivariateOnlineSummarizer)(
(aggregator, data) => aggregator.add(data),
(aggregator1, aggregator2) => aggregator1.merge(aggregator2))
new StandardScalerModel(
Vectors.dense(summary.variance.toArray.map(v => math.sqrt(v))),
summary.mean,
withStd,
withMean)
}
}
/**
* Represents a StandardScaler model that can transform vectors.
*
* @param std column standard deviation values
* @param mean column mean values
* @param withStd whether to scale the data to have unit standard deviation
* @param withMean whether to center the data before scaling
*/
@Since("1.1.0")
class StandardScalerModel @Since("1.3.0") (
@Since("1.3.0") val std: Vector,
@Since("1.1.0") val mean: Vector,
@Since("1.3.0") var withStd: Boolean,
@Since("1.3.0") var withMean: Boolean) extends VectorTransformer {
/**
*/
@Since("1.3.0")
def this(std: Vector, mean: Vector) {
this(std, mean, withStd = std != null, withMean = mean != null)
require(this.withStd || this.withMean,
"at least one of std or mean vectors must be provided")
if (this.withStd && this.withMean) {
require(mean.size == std.size,
"mean and std vectors must have equal size if both are provided")
}
}
@Since("1.3.0")
def this(std: Vector) = this(std, null)
@Since("1.3.0")
@DeveloperApi
def setWithMean(withMean: Boolean): this.type = {
require(!(withMean && this.mean == null), "cannot set withMean to true while mean is null")
this.withMean = withMean
this
}
@Since("1.3.0")
@DeveloperApi
def setWithStd(withStd: Boolean): this.type = {
require(!(withStd && this.std == null),
"cannot set withStd to true while std is null")
this.withStd = withStd
this
}
// Since `shift` will be only used in `withMean` branch, we have it as
// `lazy val` so it will be evaluated in that branch. Note that we don't
// want to create this array multiple times in `transform` function.
private lazy val shift: Array[Double] = mean.toArray
/**
* Applies standardization transformation on a vector.
*
* @param vector Vector to be standardized.
* @return Standardized vector. If the std of a column is zero, it will return default `0.0`
* for the column with zero std.
*/
@Since("1.1.0")
override def transform(vector: Vector): Vector = {
require(mean.size == vector.size)
if (withMean) {
// By default, Scala generates Java methods for member variables. So every time when
// the member variables are accessed, `invokespecial` will be called which is expensive.
// This can be avoid by having a local reference of `shift`.
val localShift = shift
vector match {
case DenseVector(vs) =>
val values = vs.clone()
val size = values.size
if (withStd) {
var i = 0
while (i < size) {
values(i) = if (std(i) != 0.0) (values(i) - localShift(i)) * (1.0 / std(i)) else 0.0
i += 1
}
} else {
var i = 0
while (i < size) {
values(i) -= localShift(i)
i += 1
}
}
Vectors.dense(values)
case v => throw new IllegalArgumentException("Do not support vector type " + v.getClass)
}
} else if (withStd) {
vector match {
case DenseVector(vs) =>
val values = vs.clone()
val size = values.size
var i = 0
while(i < size) {
values(i) *= (if (std(i) != 0.0) 1.0 / std(i) else 0.0)
i += 1
}
Vectors.dense(values)
case SparseVector(size, indices, vs) =>
// For sparse vector, the `index` array inside sparse vector object will not be changed,
// so we can re-use it to save memory.
val values = vs.clone()
val nnz = values.size
var i = 0
while (i < nnz) {
values(i) *= (if (std(indices(i)) != 0.0) 1.0 / std(indices(i)) else 0.0)
i += 1
}
Vectors.sparse(size, indices, values)
case v => throw new IllegalArgumentException("Do not support vector type " + v.getClass)
}
} else {
// Note that it's safe since we always assume that the data in RDD should be immutable.
vector
}
}
}
| chenc10/Spark-PAF | mllib/src/main/scala/org/apache/spark/mllib/feature/StandardScaler.scala | Scala | apache-2.0 | 6,610 |
package com.twitter.finagle.mysql.transport
import com.twitter.concurrent.AsyncQueue
import com.twitter.finagle.transport.QueueTransport
import com.twitter.io.Buf
import com.twitter.util.{Await, Duration}
import org.scalatest.funsuite.AnyFunSuite
class MysqlTransportTest extends AnyFunSuite {
// This is an example MySQL server response in bytes
val serverBytes: Array[Byte] = Array(74, 0, 0, 0, 10, 53, 46, 55, 46, 50, 49, 0, -110, 14, 0, 0,
29, 65, 18, 114, 89, 41, 104, 101, 0, -1, -9, 33, 2, 0, -1, -127, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 37, 39, 12, 74, 93, 61, 77, 119, 52, 29, 4, 101, 0, 109, 121, 115, 113, 108, 95, 110, 97,
116, 105, 118, 101, 95, 112, 97, 115, 115, 119, 111, 114, 100, 0)
test("MysqlTransport understands reading MySQL Packets") {
val readq = new AsyncQueue[Buf]()
val writeq = new AsyncQueue[Buf]()
val qtrans = new QueueTransport(writeq, readq)
val trans = new MysqlTransport(qtrans.map(_.toBuf, Packet.fromBuf))
readq.offer(Buf.ByteArray.Owned(serverBytes))
// Read the initial greeting and ensure that it can be understood.
val packet = Await.result(trans.read(), Duration.fromSeconds(1))
assert(packet.seq == 0)
assert(packet.body.length == 74)
}
test("MysqlTransport understands writing MySQL Packets") {
// This test works by testing whether close properly writes
// the QuitCommand.
val readq = new AsyncQueue[Buf]()
val writeq = new AsyncQueue[Buf]()
val qtrans = new QueueTransport(writeq, readq)
val trans = new MysqlTransport(qtrans.map(_.toBuf, Packet.fromBuf))
trans.close()
// https://dev.mysql.com/doc/internals/en/com-quit.html
val quitBuf = Await.result(writeq.poll(), Duration.fromSeconds(1))
assert(quitBuf.length == 5)
assert(quitBuf.get(0) == 0x01)
assert(quitBuf.get(1) == 0x00)
assert(quitBuf.get(2) == 0x00)
assert(quitBuf.get(3) == 0x00)
assert(quitBuf.get(4) == 0x01)
}
}
| twitter/finagle | finagle-mysql/src/test/scala/com/twitter/finagle/mysql/unit/transport/MysqlTransportTest.scala | Scala | apache-2.0 | 1,956 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.storage.jdbc
import java.sql.{DriverManager, ResultSet}
import com.github.nscala_time.time.Imports._
import org.apache.predictionio.data.storage.{DataMap, Event, PEvents, StorageClientConfig}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.{JdbcRDD, RDD}
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.json4s.JObject
import org.json4s.native.Serialization
import scalikejdbc._
/** JDBC implementation of [[PEvents]] */
class JDBCPEvents(client: String, config: StorageClientConfig, namespace: String) extends PEvents {
@transient private implicit lazy val formats = org.json4s.DefaultFormats
def find(
appId: Int,
channelId: Option[Int] = None,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
entityType: Option[String] = None,
entityId: Option[String] = None,
eventNames: Option[Seq[String]] = None,
targetEntityType: Option[Option[String]] = None,
targetEntityId: Option[Option[String]] = None)(sc: SparkContext): RDD[Event] = {
val lower = startTime.map(_.getMillis).getOrElse(0.toLong)
/** Change the default upper bound from +100 to +1 year because MySQL's
* FROM_UNIXTIME(t) will return NULL if we use +100 years.
*/
val upper = untilTime.map(_.getMillis).getOrElse((DateTime.now + 1.years).getMillis)
val par = scala.math.min(
new Duration(upper - lower).getStandardDays,
config.properties.getOrElse("PARTITIONS", "4").toLong).toInt
val entityTypeClause = entityType.map(x => s"and entityType = '$x'").getOrElse("")
val entityIdClause = entityId.map(x => s"and entityId = '$x'").getOrElse("")
val eventNamesClause =
eventNames.map("and (" + _.map(y => s"event = '$y'").mkString(" or ") + ")").getOrElse("")
val targetEntityTypeClause = targetEntityType.map(
_.map(x => s"and targetEntityType = '$x'"
).getOrElse("and targetEntityType is null")).getOrElse("")
val targetEntityIdClause = targetEntityId.map(
_.map(x => s"and targetEntityId = '$x'"
).getOrElse("and targetEntityId is null")).getOrElse("")
val q = s"""
select
id,
event,
entityType,
entityId,
targetEntityType,
targetEntityId,
properties,
eventTime,
eventTimeZone,
tags,
prId,
creationTime,
creationTimeZone
from ${JDBCUtils.eventTableName(namespace, appId, channelId)}
where
eventTime >= ${JDBCUtils.timestampFunction(client)}(?) and
eventTime < ${JDBCUtils.timestampFunction(client)}(?)
$entityTypeClause
$entityIdClause
$eventNamesClause
$targetEntityTypeClause
$targetEntityIdClause
""".replace("\\n", " ")
new JdbcRDD(
sc,
() => {
DriverManager.getConnection(
client,
config.properties("USERNAME"),
config.properties("PASSWORD"))
},
q,
lower / 1000,
upper / 1000,
par,
(r: ResultSet) => {
Event(
eventId = Option(r.getString("id")),
event = r.getString("event"),
entityType = r.getString("entityType"),
entityId = r.getString("entityId"),
targetEntityType = Option(r.getString("targetEntityType")),
targetEntityId = Option(r.getString("targetEntityId")),
properties = Option(r.getString("properties")).map(x =>
DataMap(Serialization.read[JObject](x))).getOrElse(DataMap()),
eventTime = new DateTime(r.getTimestamp("eventTime").getTime,
DateTimeZone.forID(r.getString("eventTimeZone"))),
tags = Option(r.getString("tags")).map(x =>
x.split(",").toList).getOrElse(Nil),
prId = Option(r.getString("prId")),
creationTime = new DateTime(r.getTimestamp("creationTime").getTime,
DateTimeZone.forID(r.getString("creationTimeZone"))))
}).cache()
}
def write(events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
val sqlSession = SparkSession.builder().getOrCreate()
import sqlSession.implicits._
val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
val eventsColumnNamesInDF = Seq[String](
"id"
, "event"
, "entityType"
, "entityId"
, "targetEntityType"
, "targetEntityId"
, "properties"
, "eventTime"
, "eventTimeZone"
, "tags"
, "prId"
, "creationTime"
, "creationTimeZone")
// Necessary for handling postgres "case-sensitivity"
val eventsColumnNamesInSQL = JDBCUtils.driverType(client) match {
case "postgresql" => eventsColumnNamesInDF.map(_.toLowerCase)
case _ => eventsColumnNamesInDF
}
val eventDF = events.map { event =>
(event.eventId.getOrElse(JDBCUtils.generateId)
, event.event
, event.entityType
, event.entityId
, event.targetEntityType.orNull
, event.targetEntityId.orNull
, if (!event.properties.isEmpty) Serialization.write(event.properties.toJObject) else null
, new java.sql.Timestamp(event.eventTime.getMillis)
, event.eventTime.getZone.getID
, if (event.tags.nonEmpty) Some(event.tags.mkString(",")) else null
, event.prId
, new java.sql.Timestamp(event.creationTime.getMillis)
, event.creationTime.getZone.getID)
}.toDF(eventsColumnNamesInSQL:_*)
val prop = new java.util.Properties
prop.setProperty("user", config.properties("USERNAME"))
prop.setProperty("password", config.properties("PASSWORD"))
eventDF.write.mode(SaveMode.Append).jdbc(client, tableName, prop)
}
def delete(eventIds: RDD[String], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
eventIds.foreachPartition{ iter =>
DB(
DriverManager.getConnection(
client,
config.properties("USERNAME"),
config.properties("PASSWORD"))
) localTx { implicit session =>
val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
val table = SQLSyntax.createUnsafely(tableName)
iter.foreach { eventId =>
sql"""
delete from $table where id = $eventId
""".update().apply()
}
}
}
}
}
| takezoe/incubator-predictionio | storage/jdbc/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCPEvents.scala | Scala | apache-2.0 | 7,148 |
/*
* Copyright 2012 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.auth
import org.apache.shiro.authc.{DisabledAccountException, AuthenticationInfo}
import org.apache.shiro.subject.SimplePrincipalCollection
import org.eknet.publet.auth.store.{UserProperty, User}
/**
* @author Eike Kettner eike.kettner@gmail.com
* @since 21.10.12 23:40
*/
final case class UserAuthcInfo(user: User, realmName: String) extends AuthenticationInfo {
if (!user.isEnabled) throw new DisabledAccountException("Account disabled for '"+ user.login+ "'.")
def getPrincipals = new SimplePrincipalCollection(user.login, realmName)
def getCredentials = user.get(UserProperty.password).orNull
def algorithm: Option[String] = user.get(UserProperty.algorithm)
}
| eikek/publet | auth/src/main/scala/org/eknet/publet/auth/UserAuthcInfo.scala | Scala | apache-2.0 | 1,301 |
package scala.lms
package epfl
package test13
import common._
import internal._
import test1._
import test7.{Print,PrintExp,ScalaGenPrint}
import test7.{ArrayLoops,ArrayLoopsExp,ScalaGenArrayLoops}
import test8._
import test10._
import util.OverloadHack
import java.io.{PrintWriter,StringWriter,FileOutputStream}
import scala.reflect.SourceContext
case class RCell[T](tag: String) {
var value: T = _
def set(x: T) = { value = x; this }
}
trait CellOps extends Base {
type Cell[T] = Rep[RCell[T]]
implicit def cellTyp[T:Typ]: Typ[RCell[T]]
def cell[T:Typ](tag: String): Cell[T]
def infix_set[T:Typ](c: Cell[T], x: Rep[T]): Rep[Unit]
def infix_get[T:Typ](c: Cell[T]): Rep[T]
}
trait CellOpsExp extends CellOps with BaseExp with StaticDataExp {
implicit def cellTyp[T:Typ]: Typ[RCell[T]] = {
implicit val ManifestTyp(m) = typ[T]
ManifestTyp(implicitly)
}
case class CellInit[T](tag: String, x: Rep[T]) extends Def[RCell[T]]
case class CellSet[T](c: Cell[T], x: Rep[T]) extends Def[Unit]
case class CellGet[T](c: Cell[T]) extends Def[T]
def cell[T:Typ](tag: String): Cell[T] = staticData(new RCell[T](tag))//reflectMutable(CellInit(tag, x))
def infix_set[T:Typ](c: Cell[T], x: Rep[T]): Rep[Unit] = reflectWrite(c)(CellSet(c,x))
def infix_get[T:Typ](c: Cell[T]): Rep[T] = CellGet(c)
}
trait ScalaGenCellOps extends ScalaGenBase {
val IR: CellOpsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case CellInit(tag,x) => emitValDef(sym, "scala.lms.epfl.test13.RCell[" + remap(x.tp) + "](\"" + tag + "\")")
case CellSet(c,x) => emitValDef(sym, quote(c) + ".set(" + quote(x) + ")")
case CellGet(c) => emitValDef(sym, quote(c) + ".value")
case _ => super.emitNode(sym, rhs)
}
}
trait CompileDyn extends Base with Compile {
def dcompile[A:Typ,B:Typ](fv: List[Rep[Any]])(f: Rep[A] => Rep[B]): Rep[A=>B]
def dcompile[A:Typ,B:Typ](f: Rep[A] => Rep[B]): Rep[A=>B] = dcompile(freesyms(f))(f)
def dlet[A:Typ,B:Typ](x:Rep[A], fv: List[Rep[Any]])(f: A => Rep[B]): Rep[B]
def dlet[A:Typ,B:Typ](x:Rep[A])(f: A => Rep[B]): Rep[B] = dlet(x, freesyms(f))(f)
def unstage[A:Typ,B:Typ](x:Rep[A])(f: A => Rep[B]): Rep[B] = dlet(x)(f)
// TODO: @cps version of unstage
def freesyms(x:Any): List[Rep[Any]]
}
trait CompileDynExp extends CompileDyn with BaseExp with StaticDataExp with UncheckedOpsExp {
override def toString = "IR:" + getClass.getName
def freesyms(x:Any): List[Sym[Any]] = { // switch to syms again ...
val fields = x.getClass.getDeclaredFields
fields.foreach(_.setAccessible(true))
val res = fields.map(_.get(x)).collect{case x: Sym[Any] => x}.toList
//println("free vars: " + res)
res
}
def dcompile[A:Typ,B:Typ](fv: List[Exp[Any]])(f: Rep[A] => Rep[B]): Rep[A=>B] = {
// compile { u: Rep[A] => f(u) }
dcompileInternal[A,Rep[A],B](fv, (u,v) => u)(f)
}
def dlet[A:Typ,B:Typ](x:Exp[A], fv: List[Exp[Any]])(f: A => Rep[B]): Rep[B] = {
// compile { u: Rep[Unit] => f(x) } <--- x is runtime value
val fc = dcompileInternal[Unit,A,B](x::fv, (u,v) => v.head.asInstanceOf[A])(f) // don't really want x as free var but need lower bound on sym id for fresh ones
unchecked[B](fc,".apply(())")
}
def dcompileInternal[U:Typ,A,B:Typ](fv: List[Exp[Any]], g: (Rep[U],List[Any]) => A)(f: A => Rep[B]): Rep[U=>B] = {
// will generate: compile { u => f(g(u)) }
// the tricky bit: we must insert all free variables as staticData, redefining the corresponding symbols
val fvIds = fv map { case Sym(i) => i }
val maxid = (0::fvIds).max + 1
val callback = { (fvVals: List[Any]) =>
this.reset
this.nVars = maxid
compile { x: Rep[U] =>
(fv zip fvVals).foreach { case (si:Sym[_],xi) => createDefinition(si, StaticData(xi)) }
f(g(x,fvVals))
}
}
implicit val ManifestTyp(mA) = typ[U]
implicit val ManifestTyp(mB) = typ[B]
implicit val cbTyp: Typ[List[Any] => (U=>B)] = ManifestTyp(implicitly)
implicit val resTyp: Typ[U=>B] = ManifestTyp(implicitly)
unchecked[U=>B](staticData(callback),".apply("+fvIds.map(i=>"x"+i)+")","// compile dynamic: fv = ",fv)
/*unchecked("{import ",IR,"._;\n",
fvIds.map(i => "val s"+i+" = findDefinition(Sym("+i+")).map(infix_lhs(_).head).getOrElse(Sym("+i+"));\n").mkString, // XX codegen uses identity hash map ...
IR,".reset;",IR,".nVars="+maxid+"\n", // FIXME: reset harmful ???
"compile{(x:",atyp,") => \n",
fvIds.map(i => "createDefinition(s"+i+",StaticData(x"+i+"));\n").mkString,
"val y = ",f2,".asInstanceOf[",ftyp,"](",g("x"),")\n",
"println(\"freeVars/globalDefs for function of type "+f.getClass.getName+": "+fv+"\")\n",
"println(globalDefs)\n",
"y}}","//",fv) // last comment item necessary for dependency*/
}
}
trait StableVars extends CellOps with CompileDyn with Equal with PrimitiveOps with ArrayOps with Compile { self =>
abstract class Continue[A]
case class Done[A](x: Rep[A]) extends Continue[A]
case class ReadValue[A:Typ,B](s: RCell[A], f: A => Continue[B], fv: List[Rep[Any]]) extends Continue[B] { val m = typ[A] }
def readValue[A:Typ,B](s: RCell[A])(f: A => Rep[B]) = ReadValue(s, (x:A) => Done(f(x)), freesyms(f))
def readOneValue[A:Typ,B](s: RCell[A])(f: A => Continue[B]) = ReadValue(s, f, freesyms(f))
def compileStable[A:Typ,B:Typ](f: Rep[A] => Continue[B]): A=>B
}
trait StableVarsExp extends CellOpsExp with CompileDynExp with EffectExp with StaticDataExp with FunctionsExp with StableVars with EqualExpOpt with IfThenElseFatExp with UncheckedOpsExp {
def compileStable[A:Typ,B:Typ](f: Rep[A] => Continue[B]): A=>B = {
val codeHolder = RCell[A=>B]("code")
def compPart[A:Typ](m: Continue[A]): Rep[A] = m match {
case e@ReadValue(s,f:((a)=>Continue[A]), fv) =>
implicit val m = e.m
val s2 = staticData(s)
println("read value " + s + " sym " + s2)
val s2val = s2.get
if (s2val == staticData(s.value)) {
compPart(f(s.value))
} else {
staticData(codeHolder).set(unit(null))
// TODO: we're not *really* specializing the continuation yet,
// just using s2val as static data (we should use unit(..))
//val compiledCont = dcompile(s2val::fv)((x:Rep[a]) => compPart(f(s2val))) // <---- should specialize this to new value! (OSR!!)
//println("compiled " + compiledCont)
//doApply(compiledCont, s2val)
// BETTER YET: have f take static arg instead of Rep
dlet(s2val,fv)(z => compPart(f(z)))
}
case Done(c) => c
}
{ x: A =>
println("call with arg " + x)
if (codeHolder.value eq null) {
println("(re) compiling")
codeHolder.value = compile((x:Rep[A]) => compPart(f(x)))
}
val g = codeHolder.value
g(x)
}
}
}
class TestStable extends FileDiffSuite {
val prefix = home + "test-out/epfl/test13-"
trait DSL extends VectorOps with LiftPrimitives with PrimitiveOps with OrderingOps with BooleanOps with LiftVariables
with IfThenElse with While with RangeOps with Print with Compile
with ArrayOps with CastingOps with StableVars {
def test(): Unit
}
trait Impl extends DSL with VectorExp with OrderingOpsExpOpt with BooleanOpsExp
with EqualExpOpt with IfThenElseFatExp with LoopsFatExp with WhileExp
with RangeOpsExp with PrintExp with FatExpressions with CompileScala
with SeqOpsExp with StringOpsExp
with PrimitiveOpsExpOpt with ArrayOpsExp with CastingOpsExp with StaticDataExp
with StableVarsExp { self =>
override val verbosity = 1
dumpGeneratedCode = true
val codegen = new Codegen { val IR: self.type = self }
val runner = new Runner { val p: self.type = self }
runner.run()
}
trait Codegen extends ScalaGenVector with ScalaGenOrderingOps with ScalaGenBooleanOps
with ScalaGenVariables with ScalaGenEqual with ScalaGenIfThenElse with ScalaGenWhile
with ScalaGenRangeOps with ScalaGenPrint with ScalaGenFunctions
with ScalaGenPrimitiveOps with ScalaGenArrayOps with ScalaGenCastingOps with ScalaGenStaticData
with ScalaGenCellOps with ScalaGenUncheckedOps {
val IR: Impl
}
trait Runner {
val p: Impl
def run() = {
p.test()
}
}
def testUnstage = withOutFileChecked(prefix+"unstage1") {
trait Prog extends DSL with Functions with StaticData {
def test() = {
val f = compile { x: Rep[Int] =>
val a = x + 1
val b = x * 2
// specialize continuation at runtime to value of a+b
unstage(a+b) { y: Int =>
val z = unit(y) * (a + b)
z
}
}
println(f(9))
println(f(3))
println(f(1))
}
}
new Prog with Impl
}
def testStable1 = withOutFileChecked(prefix+"stable1") {
trait Prog extends DSL with Functions with StaticData {
def test() = {
val s = new RCell[Int]("stable")
s.value = 0
val f = compile { x: Rep[Int] =>
val a = x + 1
val b = x * 2
// we need to pass them explicitly: lambda lifting
val g = dcompile { y : Rep[Int] =>
val z = y * (a + b)
z
}
doApply(g, staticData(s).get)
}
s.value = 1
println(f(9))
s.value = 5
println(f(9))
s.value = 2
println(f(9))
}
}
new Prog with Impl
}
def testStable2 = withOutFileChecked(prefix+"stable2") {
trait Prog extends DSL {
def test() = {
val s = RCell[Int]("stable")
s.value = 0
val f = compileStable { x: Rep[Int] =>
val a = x + 1
val b = x * 2
// specialize to the value of s when first run
// next time, if s has changed:
// - recompile the continuation, specializing to new value, branch there
// - throw away compiled code for outer function, so it will be recompiled next time
readValue(s) { y =>
val z = y * (a + b)
z
}
}
s.value = 1
println(f(9)) // triggers first full compilation (specialized to s = 1)
s.value = 5
println(f(9)) // find out s has changed, triggers OSR compilation, invalidates compiled method
s.value = 2
println(f(9)) // triggers second full compilation (specialized to s = 2)
}
}
new Prog with Impl
}
}
| astojanov/virtualization-lms-core | test-src/epfl/test13-dynamic-jit/TestStable.scala | Scala | bsd-3-clause | 10,816 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the βLicenseβ); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an βAS ISβ BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.tools.data.downloader.consumer
import akka.actor.{Actor, Cancellable, OneForOneStrategy, PoisonPill, Props, SupervisorStrategy}
import akka.actor.Actor.Receive
import akka.actor.SupervisorStrategy.Resume
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.model.{HttpRequest, HttpResponse, Uri}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Sink, Source}
import cmwell.tools.data.utils.ArgsManipulations.{formatHost, HttpAddress}
import cmwell.tools.data.utils.akka._
import cmwell.tools.data.utils.text.Tokens
import akka.pattern._
import cmwell.tools.data.utils.ArgsManipulations
import cmwell.tools.data.utils.logging.DataToolsLogging
import scala.concurrent.duration._
import scala.util.Success
/**
* Created by matan on 20/3/17.
*/
/*
class ConsumerStatsActor(baseUrl: String,
initToken: String,
params: String = "",
override val label: Option[String] = None) extends Actor with DataToolsLogging {
case object Status
case object Reset
var counter = 0L
var expectedNumInfotons = 0L
var currentToken: String = initToken
var cancellableStatus: Cancellable = _
var numInfotonsFromPreviousTokens = 0L
var requestedNumRecordsAtStart = false
implicit val system = context.system
implicit val mat = ActorMaterializer()
implicit val ec = system.dispatcher
override def postRestart(reason: Throwable): Unit = {
logger.info(s"I finished restarting", reason)
}
override def postStop(): Unit = {
if (cancellableStatus != null) cancellableStatus.cancel()
}
override val receive: Receive = receiveBeforeNewData
def receiveBeforeNewData: Receive = {
case ConsumeEvent if requestedNumRecordsAtStart =>
counter += 1
case ConsumeEvent =>
counter += 1
logger.info(s"started working on init-token $initToken")
requestedNumRecordsAtStart = true
getNumRecords().map(NumRecordsAtStart.apply) pipeTo self
case NumRecordsAtStart(num) =>
expectedNumInfotons = num
cancellableStatus = context.system.scheduler.schedule(1.second, 5.seconds, self, Status)
case NewToken(t) =>
currentToken = t
logger.info(s"received new token: $t")
numInfotonsFromPreviousTokens = counter // counter checkpoint
case EndStreamEvent =>
getNumRecords().map(NumRecordsAtEnd.apply) pipeTo self
case NumRecordsAtEnd(num) =>
logger.info(s"received $counter/$expectedNumInfotons vs. $num infotons (end-time) current-token=$currentToken init-token $initToken")
logger.info(s"finished working, going to kill myself init-token $initToken")
context stop self
case Status =>
logger.info(s"received $counter/$expectedNumInfotons infotons, current-token=$currentToken, init-token $initToken")
case Reset =>
logger.info("reset statistics")
logger.info(s"old counter = $counter")
counter = numInfotonsFromPreviousTokens
logger.info(s"new counter = $counter")
case x =>
logger.warn(s"unexpected message: $x")
}
// def receiveAfterNewData: Receive = {
// case NumRecordsAtStart(num) =>
// expectedNumInfotons = num
// cancellableStatus = context.system.scheduler.schedule(1.second, 5.seconds, self, Status)
// case NumRecordsAtEnd(num) =>
// logger.info(s"received $counter/$expectedNumInfotons vs. $num infotons (end-time) current-token=$currentToken init-token $initToken")
// logger.info(s"finished working, going to kill myself init-token $initToken")
// context stop self
// case ConsumeEvent =>
// counter += 1
// case EndStreamEvent =>
// getNumRecords().map(NumRecordsAtEnd.apply) pipeTo self
// case Status =>
// logger.info(s"received $counter/$expectedNumInfotons infotons, current-token=$currentToken, init-token $initToken")
// case NewToken(t) =>
// currentToken = t
// logger.info(s"received new token: $t")
// numInfotonsFromPreviousTokens = counter // counter checkpoint
// case Reset =>
// logger.info("reset statistics")
// logger.info(s"old counter = $counter")
// counter = numInfotonsFromPreviousTokens
// logger.info(s"new counter = $counter")
// case x =>
// logger.warn(s"unexpected message on afterNewData: $x")
// }
def getNumRecords() = {
val decodedToken = Tokens.decompress(initToken).split('|')
val indexTime = decodedToken(0).toLong
val qp = if (decodedToken.size < 3) "" else s"&qp=${decodedToken.last}&indexTime=$indexTime"
val path = decodedToken(1)
// http query parameters which should be always present
val httpParams = Map(
"op" -> "search",
"qp" -> decodedToken.last,
"indexTime" -> indexTime.toString,
"format" -> "json",
"length" -> "1",
"pretty" -> "")
// http query parameters which are optional (i.e., API Garden)
val paramsMap = params.split("&")
.map(_.split("="))
.collect {
case Array(k, v) => (k, v)
case Array(k) if k.nonEmpty => (k, "")
}.toMap
val req = HttpRequest(uri = Uri(s"${formatHost(baseUrl)}$path").withQuery(Query(httpParams ++ paramsMap)))
logger.info(s"send stats request: ${req.uri}")
val HttpAddress(protocol, host, port, uriPrefix) = ArgsManipulations.extractBaseUrl(baseUrl)
val conn = HttpConnections.cachedHostConnectionPool[Option[_]](host, port, protocol)
Source.single(req -> None)
.via(conn)
.mapAsync(1) {
case (Success(HttpResponse(s, _, e, _)), _) =>
e.withoutSizeLimit().dataBytes
.via(lineSeparatorFrame)
.filter(_.utf8String.trim contains "\\"total\\" : ")
.map(_.utf8String)
.map(_.split(":")(1).init.tail.toLong)
.runWith(Sink.head)
}
.runWith(Sink.head)
// Http().singleRequest(req).flatMap { case HttpResponse(s, _, e, _) =>
// e.withoutSizeLimit().dataBytes
// .via(lineSeparatorFrame)
// .filter(_.utf8String.trim contains "\\"total\\" : ")
// .map(_.utf8String)
// .map(_.split(":")(1).init.tail.toLong)
// .runWith(Sink.head)
// }
}
}
*/
case object ConsumeEvent
case object EndStreamEvent
case class NumRecordsAtEnd(num: Long)
case class NumRecordsAtStart(num: Long)
case class NewToken(token: String)
| thomsonreuters/CM-Well | server/cmwell-data-tools/src/main/scala/cmwell/tools/data/downloader/consumer/ConsumerStatsActor.scala | Scala | apache-2.0 | 7,014 |
package one.lockstep.vault
import one.lockstep.lock.LockTicket
import one.lockstep.lock.client._
import one.lockstep.test.BaseSpec
import one.lockstep.util._
import one.lockstep.util.crypto._
trait VaultFixtures extends LockManagerFixtures {
spec: BaseSpec =>
def defaultVaultEncryptionCiphersuite = defaultCiphersuite
trait VaultManagerFixture extends Fixture {
this: LockManagerFixture with StorageFixture =>
protected lazy val ticketManager = new LockTicketManager()
def vaultEncryptionCiphersuite: Ciphersuite = defaultVaultEncryptionCiphersuite
lazy val vaultManager: VaultManager =
new VaultManager(clientStorage, lockManager, ticketManager, vaultEncryptionCiphersuite)
// placeholder for defaults
//def lockParams: LockParams
}
trait AbstractFreshVaultFixture extends Fixture {
this: VaultManagerFixture with LockManagerFixture with StorageFixture =>
lazy val vaultAlias = "vault"
lazy val vaultSecretId = "secretId"
lazy val vaultSecretValue = "secretValue".utf8
lazy val vaultSecretSpecs = Map(vaultSecretId -> Specs.secret(vaultSecretValue))
lazy val vaultAttrIds = Seq("a0", "a1")
def vaultAttrValue(attrId: String) = Bytes((attrId+"-value").getBytes)
lazy val vaultAttrs = vaultAttrIds.map(id => (id ,vaultAttrValue(id))).toMap
lazy val ticket = LockTicket.selfIssuedTicket()
lazy val lockId = ticket.lockId
lazy val passcode = "passcode".utf8
lazy val correctPasscode = passcode
lazy val nextWrongPasscode: () => Bytes = {
var iterator = Iterator.from(1).map(i => s"wrongPasscode$i".utf8)
() => iterator.next()
}
val vault = await {
vaultManager.assign(vaultAlias, ticket)
vaultManager
.prepareVaultLock(vaultAlias, lockParams, specs = vaultSecretSpecs, vaultAttrs)
.execute(passcode, userTimeout)
}
// convenience method
def unlock[A](passcode: Bytes)(onUnlock: UnlockedVault => A) = {
val unlocked = await(vault.unlock(passcode, userTimeout))
try {
onUnlock(unlocked)
unlocked.commit()
} finally unlocked.dispose()
}
def failedUnlock() = unlock(nextWrongPasscode()) { unlocked => fail("unexpected unlock success") }
}
class FreshVaultFixture(implicit lockManagerFixtureProvider: LockManagerFixtureProvider)
extends DelegatingLockManagerFixture
with AbstractFreshVaultFixture
with VaultManagerFixture
}
| lockstep-one/vault | vault-client/src/test/scala/one/lockstep/vault/VaultFixtures.scala | Scala | agpl-3.0 | 2,458 |
package controllers
import models.User
import oauth2.OauthDataHandler
import play.api
import play.api.mvc._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scalaoauth2.provider.OAuth2Provider
/**
* Controller class for all the ActionBuilders
*
* @author william.merino@zktechnology.eu
*/
object ActionBuilders extends Controller with OAuth2Provider {
/**
*
* @author william.merino@zktechnology.eu
*/
case class AuthenticatedRequest[A](user: User, request: Request[A]) extends WrappedRequest(request)
/**
* Action Builder for Authenticated Requests, it returns the current user.
*/
object Authenticated extends api.mvc.ActionBuilder[AuthenticatedRequest] {
def invokeBlock[A](request: Request[A], block: AuthenticatedRequest[A] => Future[Result]) = {
authenticate(block)(request)
}
}
/**
* Performs an authentication and checks if the input zone is valid.
* @param block
* @param zoneName
* @param request
* @tparam A
* @return
*/
private def authenticateAndCheckZone[A]( block: AuthenticatedRequest[A] => Future[Result],zoneName:String)(implicit request: Request[A]) = {
authorize(new OauthDataHandler()) { authInfo =>
if(User.findAllZoneName(authInfo.user._id).contains(zoneName))
block(AuthenticatedRequest(authInfo.user, request))
else
Future.successful(Unauthorized)
}
}
/**
* Performs an authentication
* @param block
* @param request
* @tparam A
* @return
*/
private def authenticate[A](block: AuthenticatedRequest[A] => Future[Result])(implicit request: Request[A]) = {
authorize(new OauthDataHandler()) { authInfo =>
block(AuthenticatedRequest(authInfo.user, request))
}
}
/**
* Action Builder for Authenticated requests and also checking if the user belongs to the zone
* @param zoneName
* @return
*/
def AuthenticatedZone(zoneName:String) = new ActionBuilder[AuthenticatedRequest] {
def invokeBlock[A](request: Request[A], block: (AuthenticatedRequest[A]) => Future[Result]) = {
authenticateAndCheckZone(block,zoneName)(request)
}
}
}
| ZKTecoEu/ZKRestApi | ZKRestServer/app/controllers/ActionBuilders.scala | Scala | mit | 2,190 |
package sampler.example.abc.flockMortality.util
import play.api.libs.json.Json
import play.api.libs.json.Writes
import sampler.abc.ABCConfig
import sampler.abc.Population
case class ABCResult(
prior: IntervalPrior,
observed: IndexedSeq[Observed],
config: ABCConfig,
population: Population[Parameters]
)
object ABCResult {
implicit val resultWrites = new Writes[ABCResult] {
def writes(data: ABCResult) = {
val observedJSON = data.observed.map(shed => Json.toJson(shed))
Json.obj(
"prior" -> Json.toJson(data.prior),
"observed" -> observedJSON,
"config" -> Json.obj(
"generations" -> data.config.numGenerations,
"particles" -> data.config.numParticles
),
"population" -> data.population.toJSON()
)
}
}
} | tearne/Sampler | sampler-examples/src/main/scala/sampler/example/abc/flockMortality/util/ABCResult.scala | Scala | apache-2.0 | 809 |
package valfinding
class OuterClass {
val outerExclusiveField = "outerExclusiveField"
val fff = "Outer's Field"
class InnerClass {
val fff /*{inner class field decl}*/ = "Inner's Field"
testFields
def testFields: Unit = {
val fff = "Local Shadower"
fff /*{method-local var shadowing field}*/ // Should show "Local Shadower"
this.fff /*{shadowed field accessed with this}*/ // Should show "Inner's field"
InnerClass.this.fff /*{shadowed field accessed with this with class name}*/ // Should show "Inner's field"
OuterClass.this.fff /*{shadowed field accessed with this with enclosing class name}*/ // Should show "Outer's field"
outerExclusiveField /*{exclusive field of enclosing class}*/
}
}
} | Kwestor/scala-ide | org.scala-ide.sdt.debug.tests/test-workspace/sfValFinding/src/NestedClasses.scala | Scala | bsd-3-clause | 796 |
package riftwarpx.mongo
import almhirt.serialization._
import scalaz.syntax.validation._
import scalaz.Validation.FlatMap._
import almhirt.common._
import almhirt.almvalidation.kit._
import reactivemongo.bson._
import riftwarp._
import scala.reflect.ClassTag
trait BsonDocumentSerializer[T] extends Serializes[T, BSONDocument] with Deserializes[BSONDocument, T] {
def serialize(what: T)(implicit params: SerializationParams = SerializationParams.empty): AlmValidation[BSONDocument]
def deserialize(what: BSONDocument)(implicit params: SerializationParams = SerializationParams.empty): AlmValidation[T]
def toSerializationFunc = (what: T) β serialize(what)
def toDeserializationFunc = (what: BSONDocument) β deserialize(what)
}
object BsonDocumentSerializer {
def apply[T](idLabel: Option[String], riftWarp: RiftWarp)(implicit tag: ClassTag[T]): BsonDocumentSerializer[T] =
new BsonDocumentSerializer[T] {
def serialize(what: T)(implicit params: SerializationParams): AlmValidation[BSONDocument] =
for {
packer β riftWarp.packers.getFor(what, None, None)
packed β packer.packBlind(what)(riftWarp.packers)
packedWithReplacedId β idLabel match {
case Some(idLabel) β
packed match {
case WarpObject(wd, elements) β
elements.find(_.label == idLabel) match {
case Some(warpElemForId) β
val newElems = WarpElement("_id", warpElemForId.value) +: elements.filterNot(_.label == idLabel)
WarpObject(wd, newElems).success
case None β
NoSuchElementProblem(s"""The id label "$idLabel" which will be replaced by "_id" was not found.""").failure
}
case x β
SerializationProblem(s"""Only objects can be serialized to a BSON document. "$x" is not allowed here.""").failure
}
case None β
packed.success
}
res β ToBsonDematerializer.dematerialize(packedWithReplacedId, Map.empty) match {
case d: BSONDocument β d.success
case x β SerializationProblem(s"""The warp package did not dematerialize to a BSON document(which should be impossible...).""").failure
}
} yield res
def deserialize(what: BSONDocument)(implicit params: SerializationParams): AlmValidation[T] =
for {
rematerializedPackage β FromBsonRematerializer.rematerialize(what, Map.empty)
rematerializedObjectWithIdLabel β rematerializedPackage match {
case wo: WarpObject β
idLabel match {
case Some(idLabel) β
wo.elements.find(_.label == idLabel) match {
case Some(warpElemForId) β
val newElems = WarpElement("_id", warpElemForId.value) +: wo.elements.filterNot(_.label == idLabel)
WarpObject(wo.warpDescriptor, newElems).success
case None β
NoSuchElementProblem(s"""The id label "$idLabel" which will be replaced by "_id" was not found.""").failure
}
case None β wo.success
}
case x β
SerializationProblem(s"""The BSON did not rematerialize to an object.""").failure
}
unpacker β rematerializedObjectWithIdLabel.warpDescriptor match {
case Some(wd) β
riftWarp.unpackers.get(wd)
case None β
riftWarp.unpackers.getByTag(tag)
}
unpackedUntyped β unpacker.unpack(rematerializedObjectWithIdLabel)(riftWarp.unpackers)
unpackedTyped β unpackedUntyped.castTo[T]
} yield unpackedTyped
}
} | chridou/almhirt | ext/riftwarpx-mongo/src/main/scala/riftwarpx/mongo/BsonSerializer.scala | Scala | apache-2.0 | 3,923 |
package cz.vse.easyminer.miner
import org.rosuda.REngine.Rserve.RConnection
trait RConnectionPool {
def borrow : BorrowedConnection
def release(bc: BorrowedConnection)
def refresh()
def close()
}
class BorrowedConnection(rServer : String, rPort : Int) extends RConnection(rServer, rPort) {
val created = System.currentTimeMillis
} | KIZI/EasyMiner-Apriori-R | src/main/scala/cz/vse/easyminer/miner/RConnectionPool.scala | Scala | bsd-3-clause | 343 |
package ch.wsl.box.model.boxentities
import ch.wsl.box.jdbc.PostgresProfile.api._
object BoxImageCache {
val profile = ch.wsl.box.jdbc.PostgresProfile
import profile._
case class BoxImageCache_row(key: String, data:Array[Byte])
class BoxImageCache(_tableTag: Tag) extends profile.api.Table[BoxImageCache_row](_tableTag,BoxSchema.schema, "image_cache") {
def * = (key,data) <> (BoxImageCache_row.tupled, BoxImageCache_row.unapply)
val key: Rep[String] = column[String]("key", O.PrimaryKey)
val data: Rep[Array[Byte]] = column[Array[Byte]]("data")
}
lazy val Table = new TableQuery(tag => new BoxImageCache(tag))
}
| Insubric/box | server/src/main/scala/ch/wsl/box/model/boxentities/BoxImageCache.scala | Scala | apache-2.0 | 646 |
package io.ddf.flink.content
import io.ddf.DDF
import io.ddf.content.Schema.{Column, ColumnType}
import io.ddf.content.{ConvertFunction, Representation}
import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo
import org.apache.flink.api.scala.{DataSet, _}
import org.rosuda.REngine._
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.util.Try
class ArrayObject2FlinkRList(@transient ddf: DDF) extends ConvertFunction(ddf) {
private def transformObjectArray2REXP(objArray: Seq[Object], column: Column): REXP = {
val result: REXP =
column.getType match {
case ColumnType.STRING =>
val updatedElems = objArray.map(elem => Try(elem.toString).getOrElse(null))
new REXPString(updatedElems.toArray)
case ColumnType.INT =>
val updatedElems = objArray.map(elem => Try(elem.toString.toInt).getOrElse(REXPInteger.NA))
new REXPInteger(updatedElems.toArray)
case ColumnType.BIGINT | ColumnType.FLOAT | ColumnType.DOUBLE =>
val updatedElems = objArray.map(elem => Try(elem.toString.toDouble).getOrElse(REXPDouble.NA))
new REXPDouble(updatedElems.toArray)
}
result
}
override def apply(rep: Representation): Representation = {
val repValue: Object = rep.getValue
repValue match {
case dataSet: DataSet[_] =>
dataSet.getType() match {
case x: ObjectArrayTypeInfo[_, _] =>
val columns: List[Column] = ddf.getSchema.getColumns.toList
val data: DataSet[Array[Object]] = dataSet.asInstanceOf[DataSet[Array[Object]]]
val columnNames: Array[String] = ddf.getColumnNames.asScala.toArray
val dataSetREXP: DataSet[FlinkRList] = data.mapPartition {
pdata =>
val subset = pdata.flatMap(x => x.zipWithIndex)
val groupedData = subset.toSeq.groupBy(elem => elem._2)
val rVectors = groupedData.map {
case (colIndex, colValues) =>
transformObjectArray2REXP(colValues.map(_._1), columns(colIndex))
}.toArray
val dfList = FlinkRList(rVectors, columnNames)
Iterator(dfList)
}
new Representation(dataSetREXP, RepresentationHandler.DATASET_RList.getTypeSpecsString)
}
}
}
}
| ddf-project/ddf-flink | flink/src/main/scala/io/ddf/flink/content/ArrayObject2FlinkRList.scala | Scala | apache-2.0 | 2,376 |
/*
* Artificial Intelligence for Humans
* Volume 2: Nature Inspired Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
*
* Copyright 2014 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.error
/**
* The sum of squares method (SSE) measures the error as the sum of the squared difference of each vector element.
* <p/>
* http://www.heatonresearch.com/wiki/Sum_of_Squares_Error
*/
class ErrorCalculationSSE extends AbstractErrorCalculation {
/**
* Calculate the error with SSE.
*
* @return The current error.
*/
def calculate: Double = {
if (setSize == 0)
Double.PositiveInfinity
else
globalError
}
def create: ErrorCalculation = new ErrorCalculationSSE
} | PeterLauris/aifh | vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/error/ErrorCalculationSSE.scala | Scala | apache-2.0 | 1,487 |
package org.jetbrains.plugins.scala
package lang.rearranger
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import scala.collection.{immutable, mutable}
/**
* @author Roman.Shein
* Date: 09.07.13
*/
class ScalaArrangementParseInfo {
/**
* All entries created from PSI tree by the moment.
*/
private val myEntries = mutable.Buffer[ScalaArrangementEntry]()
private val methodToEntry = mutable.HashMap[ScFunction, ScalaArrangementEntry]()
/**
* Anchors for method dependencies. Any method may only have one root it is connected to. When rearranging with some
* settings entries may be moved closer to roots.
*/
private val dependencyRoots = mutable.Buffer[ScalaArrangementDependency]()
/**
* Maps dependency root to set of its dependent methods.
*/
private val methodDependencies = mutable.HashMap[ScFunction, immutable.HashSet[ScFunction]]()
private val currentMethodDependencyRoots = mutable.HashSet[ScFunction]()
private val currentDependentMethods = mutable.HashSet[ScFunction]()
private var rebuildMethodDependencies = true
private val javaPropertiesData = mutable.HashMap[(String/*property name*/, PsiElement/*PSI parent*/), ScalaPropertyInfo]()
private val scalaPropertiesData = mutable.HashMap[(String/*property name*/, PsiElement/*PSI parent*/), ScalaPropertyInfo]()
def onMethodEntryCreated(method: ScFunction, entry: ScalaArrangementEntry) = methodToEntry += ((method, entry))
def addEntry(entry: ScalaArrangementEntry) = myEntries += entry
def entries: immutable.List[ScalaArrangementEntry] = myEntries.toList
def javaProperties: Iterable[ScalaPropertyInfo] = javaPropertiesData.values
def scalaProperties: Iterable[ScalaPropertyInfo] = scalaPropertiesData.values
def registerDependency(caller: ScFunction, callee: ScFunction) {
currentMethodDependencyRoots -= callee
if (!currentDependentMethods.contains(caller)) {
currentMethodDependencyRoots += caller
}
currentDependentMethods += callee
var callerDependent = if (!methodDependencies.contains(caller)) {
immutable.HashSet[ScFunction]()
} else {
methodDependencies(caller)
}
if (!callerDependent.contains(callee)) {
callerDependent = callerDependent + callee
}
methodDependencies += ((caller, callerDependent))
rebuildMethodDependencies = true
}
def getMethodDependencyRoots: mutable.Buffer[ScalaArrangementDependency] = {
if (rebuildMethodDependencies) {
dependencyRoots.clear()
val cache = new mutable.HashMap[ScFunction, ScalaArrangementDependency]
for (method <- currentMethodDependencyRoots) {
val info = buildMethodDependencyInfo(method, cache)
if (info.isDefined) dependencyRoots += info.get
}
rebuildMethodDependencies = false
}
dependencyRoots
}
private def buildMethodDependencyInfo(
method: ScFunction,
cache: mutable.HashMap[ScFunction, ScalaArrangementDependency]
): Option[ScalaArrangementDependency] = {
val entry: ScalaArrangementEntry = methodToEntry(method)
val result: ScalaArrangementDependency = new ScalaArrangementDependency(entry)
var toProcess: List[(ScFunction, ScalaArrangementDependency)] = List[(ScFunction, ScalaArrangementDependency)]()
toProcess = (method, result)::toProcess
var usedMethods = Set.empty[ScFunction]
while (toProcess.nonEmpty) {
val (depenenceSource, dependency) = toProcess.head
toProcess = toProcess.tail
methodDependencies.get(depenenceSource) match {
case Some(dependencies) =>
usedMethods += depenenceSource
for (dependentMethod <- dependencies.toList) {
if (usedMethods.contains(dependentMethod)) {
return None
}
methodToEntry.get(dependentMethod).foreach(dependentEntry =>
if (dependentEntry != null) {
val dependentMethodInfo = if (cache.contains(dependentMethod)) {
cache(dependentMethod)
} else {
new ScalaArrangementDependency(dependentEntry)
}
cache.put(dependentMethod, dependentMethodInfo)
dependency.addDependentMethodInfo(dependentMethodInfo)
toProcess = (dependentMethod, dependentMethodInfo) :: toProcess
}
)
}
case None =>
}
}
Some(result)
}
def registerJavaGetter(key: (String, PsiElement), getter: ScFunction, entry: ScalaArrangementEntry) {
javaPropertiesData.get(key) match {
case Some(existingData) => javaPropertiesData += (key -> new ScalaPropertyInfo(entry, existingData.setter))
case None => javaPropertiesData += (key -> new ScalaPropertyInfo(entry, null))
}
}
def registerJavaSetter(key: (String, PsiElement), setter: ScFunction, entry: ScalaArrangementEntry) {
javaPropertiesData.get(key) match {
case Some(existingData) => javaPropertiesData += (key -> new ScalaPropertyInfo(existingData.getter, entry))
case None => javaPropertiesData += (key -> new ScalaPropertyInfo(null, entry))
}
}
def registerScalaGetter(key: (String, PsiElement), getter: ScFunction, entry: ScalaArrangementEntry) {
scalaPropertiesData.get(key) match {
case Some(existingData) => scalaPropertiesData += (key -> new ScalaPropertyInfo(entry, existingData.setter))
case None => scalaPropertiesData += (key -> new ScalaPropertyInfo(entry, null))
}
}
def registerScalaSetter(key: (String, PsiElement), setter: ScFunction, entry: ScalaArrangementEntry) {
scalaPropertiesData.get(key) match {
case Some(existingData) => scalaPropertiesData += (key -> new ScalaPropertyInfo(existingData.getter, entry))
case None => scalaPropertiesData += (key -> new ScalaPropertyInfo(null, entry))
}
}
}
| loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/rearranger/ScalaArrangementParseInfo.scala | Scala | apache-2.0 | 5,951 |
package org.davidbild.tristate.contrib.cats
import cats.implicits._
import cats.laws.discipline._
import cats.kernel.laws.discipline.{PartialOrderTests, OrderTests}
import org.scalacheck.{Arbitrary, Cogen, Gen}
import org.scalatest.matchers.should.Matchers
import org.scalatest.funsuite.AnyFunSuite
import org.scalatestplus.scalacheck._
import org.typelevel.discipline.scalatest.FunSuiteDiscipline
import org.scalacheck.rng.Seed
import org.davidbild.tristate._
import org.davidbild.tristate.Tristate._
class TristateTests extends AnyFunSuite with Matchers with ScalaCheckPropertyChecks with FunSuiteDiscipline {
checkAll("Tristate[Int]", FunctorTests[Tristate].functor[Int, Int, Int])
checkAll("Tristate[Int] with Option", TraverseTests[Tristate].traverse[Int, Int, Int, Int, Tristate, Option])
checkAll("Tristate with Unit", ApplicativeErrorTests[Tristate, Unit].applicativeError[Int, Int, Int])
checkAll("Tristate[Int]", ApplicativeTests[Tristate].applicative[Int, Int, Int])
checkAll("Tristate[Int]", MonadTests[Tristate].monad[Int, Int, Int])
checkAll("Tristate[Int]", MonoidKTests[Tristate].monoidK[Int])
checkAll("Tristate[Int]", CoflatMapTests[Tristate].coflatMap[Int, Int, Int])
checkAll("Tristate[Int]", PartialOrderTests[Tristate[Int]].partialOrder)
checkAll("Tristate[Int]", OrderTests[Tristate[Int]].order)
test("show") {
absent[String].show should === ("Absent")
unspecified[String].show should === ("Unspecified")
forAll { fs: Tristate[String] =>
fs.show should === (fs.toString)
}
}
private implicit def cogenTristate[A: Cogen]: Cogen[Tristate[A]] = {
val A = implicitly[Cogen[A]]
Cogen((seed: Seed, t: Tristate[A]) => t.cata(a => A.perturb(seed.next, a), seed, seed))
}
private implicit def arbTristate[A: Arbitrary]: Arbitrary[Tristate[A]] = {
val A = implicitly[Arbitrary[A]]
Arbitrary(Gen.sized(n =>
Gen.frequency(
(1, A.arbitrary.map(Present(_))),
(1, Gen.const(Absent)),
(1, Gen.const(Unspecified))
)
))
}
}
| drbild/tristate | tristate-cats/src/test/scala/org/davidbild/tristate/contrib/cats/TristateTests.scala | Scala | apache-2.0 | 2,048 |
import scala.collection.mutable
class Phrase(text: String) {
def wordCount():mutable.Map[String,Int] = {
val normalizedWords =
text.replaceAll("[^\\\\w\\\\s']", " ").
toLowerCase.
split(' ').
filter(c => !c.forall(_.isSpaceChar))
(mutable.Map[String, Int]().withDefaultValue(0) /: normalizedWords) {
(accumulator, word) => {
accumulator += (word -> (accumulator(word) + 1))
}
}
}
} | basbossink/exercism | scala/word-count/Phrase.scala | Scala | gpl-3.0 | 445 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 Mark Harrah
*/
package sbt
import java.io.{ File, Writer }
import inc.Relations
object DotGraph {
private def fToString(roots: Iterable[File]): (File => String) =
(x: File) => sourceToString(roots, x)
def sources(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]): Unit = {
val toString = fToString(sourceRoots)
apply(relations, outputDirectory, toString, toString)
}
def packages(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]): Unit = {
val packageOnly = (path: String) =>
{
val last = path.lastIndexOf(File.separatorChar)
val packagePath = (if (last > 0) path.substring(0, last) else path).trim
if (packagePath.isEmpty) "" else packagePath.replace(File.separatorChar, '.')
}
val toString = packageOnly compose fToString(sourceRoots)
apply(relations, outputDirectory, toString, toString)
}
def apply(relations: Relations, outputDir: File, sourceToString: File => String, externalToString: File => String): Unit = {
def file(name: String) = new File(outputDir, name)
IO.createDirectory(outputDir)
generateGraph(file("int-source-deps"), "dependencies", relations.internalSrcDep, sourceToString, sourceToString)
generateGraph(file("binary-dependencies"), "externalDependencies", relations.binaryDep, externalToString, sourceToString)
}
def generateGraph[Key, Value](file: File, graphName: String, relation: Relation[Key, Value],
keyToString: Key => String, valueToString: Value => String) {
import scala.collection.mutable.{ HashMap, HashSet }
val mappedGraph = new HashMap[String, HashSet[String]]
for ((key, values) <- relation.forwardMap; keyString = keyToString(key); value <- values)
mappedGraph.getOrElseUpdate(keyString, new HashSet[String]) += valueToString(value)
val mappings =
for {
(dependsOn, dependants) <- mappedGraph.toSeq
dependant <- dependants
if dependant != dependsOn && !dependsOn.isEmpty && !dependant.isEmpty
} yield "\\"" + dependant + "\\" -> \\"" + dependsOn + "\\""
val lines =
("digraph " + graphName + " {") +:
mappings :+
"}"
IO.writeLines(file, lines)
}
def sourceToString(roots: Iterable[File], source: File) =
relativized(roots, source).trim.stripSuffix(".scala").stripSuffix(".java")
private def relativized(roots: Iterable[File], path: File): String =
{
val relativized = roots.flatMap(root => IO.relativize(root, path))
val shortest = (Int.MaxValue /: relativized)(_ min _.length)
relativized.find(_.length == shortest).getOrElse(path.getName)
}
}
| jasonchaffee/sbt | main/actions/src/main/scala/sbt/DotGraph.scala | Scala | bsd-3-clause | 2,716 |
package es.upm.oeg.epnoi.matching.metrics.feature
import java.nio.charset.Charset
import org.apache.lucene.analysis.standard.StandardAnalyzer
import org.apache.spark.rdd.RDD
import org.slf4j.LoggerFactory
/**
* Created by cbadenes on 21/04/15.
*/
object CommonTokenizer {
val log = LoggerFactory.getLogger(CommonTokenizer.getClass);
def split (line: String): Seq[String] = {
line.toLowerCase.split("\\\\s").filter(isValid)
}
def isValid (word: String): Boolean ={
word.length > 4 && !StandardAnalyzer.STOP_WORDS_SET.contains(word) && word.forall(java.lang.Character.isLetter) && word.forall(x=>isEncoded("US-ASCII",x))
}
def isEncoded (charset: String, letter: Char): Boolean ={
Charset.forName(charset).newEncoder().canEncode(letter)
}
def printAll(tokens: RDD[Seq[String]]): RDD[Seq[String]] ={
log.info("*"*20+" Tokenizer.split:")
tokens.collect().foreach(x => log.info(s"Β·$x"))
return tokens
}
}
| cbadenes/epnoi-matching-metrics | src/main/scala/es/upm/oeg/epnoi/matching/metrics/feature/CommonTokenizer.scala | Scala | apache-2.0 | 953 |
/*
* Copyright (C) 2012-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package scala.async
package run
package futures
import scala.language.postfixOps
import scala.concurrent._
import scala.concurrent.duration._
import scala.concurrent.duration.Duration.Inf
import scala.collection._
import scala.runtime.NonLocalReturnControl
import scala.util.{Try,Success,Failure}
import scala.async.Async.{async, await}
import org.junit.Test
class FutureSpec {
/* some utils */
def testAsync(s: String)(implicit ec: ExecutionContext): Future[String] = s match {
case "Hello" => future { "World" }
case "Failure" => Future.failed(new RuntimeException("Expected exception; to test fault-tolerance"))
case "NoReply" => Promise[String]().future
}
val defaultTimeout = 5 seconds
/* future specification */
@Test def `A future with custom ExecutionContext should handle Throwables`() {
val ms = new mutable.HashSet[Throwable] with mutable.SynchronizedSet[Throwable]
implicit val ec = scala.concurrent.ExecutionContext.fromExecutor(new scala.concurrent.forkjoin.ForkJoinPool(), {
t =>
ms += t
})
class ThrowableTest(m: String) extends Throwable(m)
val f1 = future[Any] {
throw new ThrowableTest("test")
}
intercept[ThrowableTest] {
Await.result(f1, defaultTimeout)
}
val latch = new TestLatch
val f2 = future {
Await.ready(latch, 5 seconds)
"success"
}
val f3 = async {
val s = await(f2)
s.toUpperCase
}
f2 foreach { _ => throw new ThrowableTest("dispatcher foreach") }
f2 onSuccess { case _ => throw new ThrowableTest("dispatcher receive") }
latch.open()
Await.result(f2, defaultTimeout) mustBe ("success")
f2 foreach { _ => throw new ThrowableTest("current thread foreach") }
f2 onSuccess { case _ => throw new ThrowableTest("current thread receive") }
Await.result(f3, defaultTimeout) mustBe ("SUCCESS")
val waiting = future {
Thread.sleep(1000)
}
Await.ready(waiting, 2000 millis)
ms.size mustBe (4)
//FIXME should check
}
import ExecutionContext.Implicits._
@Test def `A future with global ExecutionContext should compose with for-comprehensions`() {
import scala.reflect.ClassTag
def asyncInt(x: Int) = future { (x * 2).toString }
val future0 = future[Any] {
"five!".length
}
val future1 = async {
val a = await(future0.mapTo[Int]) // returns 5
val b = await(asyncInt(a)) // returns "10"
val c = await(asyncInt(7)) // returns "14"
b + "-" + c
}
val future2 = async {
val a = await(future0.mapTo[Int])
val b = await((future { (a * 2).toString }).mapTo[Int])
val c = await(future { (7 * 2).toString })
b + "-" + c
}
Await.result(future1, defaultTimeout) mustBe ("10-14")
//assert(checkType(future1, manifest[String]))
intercept[ClassCastException] { Await.result(future2, defaultTimeout) }
}
//TODO this is not yet supported by Async
@Test def `support pattern matching within a for-comprehension`() {
case class Req[T](req: T)
case class Res[T](res: T)
def asyncReq[T](req: Req[T]) = req match {
case Req(s: String) => future { Res(s.length) }
case Req(i: Int) => future { Res((i * 2).toString) }
}
val future1 = for {
Res(a: Int) <- asyncReq(Req("Hello"))
Res(b: String) <- asyncReq(Req(a))
Res(c: String) <- asyncReq(Req(7))
} yield b + "-" + c
val future2 = for {
Res(a: Int) <- asyncReq(Req("Hello"))
Res(b: Int) <- asyncReq(Req(a))
Res(c: Int) <- asyncReq(Req(7))
} yield b + "-" + c
Await.result(future1, defaultTimeout) mustBe ("10-14")
intercept[NoSuchElementException] { Await.result(future2, defaultTimeout) }
}
@Test def mini() {
val future4 = async {
await(Future.successful(0)).toString
}
Await.result(future4, defaultTimeout)
}
@Test def `recover from exceptions`() {
val future1 = Future(5)
val future2 = async { await(future1) / 0 }
val future3 = async { await(future2).toString }
val future1Recovered = future1 recover {
case e: ArithmeticException => 0
}
val future4 = async { await(future1Recovered).toString }
val future2Recovered = future2 recover {
case e: ArithmeticException => 0
}
val future5 = async { await(future2Recovered).toString }
val future2Recovered2 = future2 recover {
case e: MatchError => 0
}
val future6 = async { await(future2Recovered2).toString }
val future7 = future3 recover {
case e: ArithmeticException => "You got ERROR"
}
val future8 = testAsync("Failure")
val future9 = testAsync("Failure") recover {
case e: RuntimeException => "FAIL!"
}
val future10 = testAsync("Hello") recover {
case e: RuntimeException => "FAIL!"
}
val future11 = testAsync("Failure") recover {
case _ => "Oops!"
}
Await.result(future1, defaultTimeout) mustBe (5)
intercept[ArithmeticException] { Await.result(future2, defaultTimeout) }
intercept[ArithmeticException] { Await.result(future3, defaultTimeout) }
Await.result(future4, defaultTimeout) mustBe ("5")
Await.result(future5, defaultTimeout) mustBe ("0")
intercept[ArithmeticException] { Await.result(future6, defaultTimeout) }
Await.result(future7, defaultTimeout) mustBe ("You got ERROR")
intercept[RuntimeException] { Await.result(future8, defaultTimeout) }
Await.result(future9, defaultTimeout) mustBe ("FAIL!")
Await.result(future10, defaultTimeout) mustBe ("World")
Await.result(future11, defaultTimeout) mustBe ("Oops!")
}
@Test def `recoverWith from exceptions`() {
val o = new IllegalStateException("original")
val r = new IllegalStateException("recovered")
intercept[IllegalStateException] {
val failed = Future.failed[String](o) recoverWith {
case _ if false == true => Future.successful("yay!")
}
Await.result(failed, defaultTimeout)
} mustBe (o)
val recovered = Future.failed[String](o) recoverWith {
case _ => Future.successful("yay!")
}
Await.result(recovered, defaultTimeout) mustBe ("yay!")
intercept[IllegalStateException] {
val refailed = Future.failed[String](o) recoverWith {
case _ => Future.failed[String](r)
}
Await.result(refailed, defaultTimeout)
} mustBe (r)
}
@Test def `andThen like a boss`() {
val q = new java.util.concurrent.LinkedBlockingQueue[Int]
for (i <- 1 to 1000) {
val chained = future {
q.add(1); 3
} andThen {
case _ => q.add(2)
} andThen {
case Success(0) => q.add(Int.MaxValue)
} andThen {
case _ => q.add(3);
}
Await.result(chained, defaultTimeout) mustBe (3)
q.poll() mustBe (1)
q.poll() mustBe (2)
q.poll() mustBe (3)
q.clear()
}
}
@Test def `firstCompletedOf`() {
def futures = Vector.fill[Future[Int]](10) {
Promise[Int]().future
} :+ Future.successful[Int](5)
Await.result(Future.firstCompletedOf(futures), defaultTimeout) mustBe (5)
Await.result(Future.firstCompletedOf(futures.iterator), defaultTimeout) mustBe (5)
}
@Test def `find`() {
val futures = for (i <- 1 to 10) yield future {
i
}
val result = Future.find[Int](futures)(_ == 3)
Await.result(result, defaultTimeout) mustBe (Some(3))
val notFound = Future.find[Int](futures.iterator)(_ == 11)
Await.result(notFound, defaultTimeout) mustBe (None)
}
@Test def `zip`() {
val timeout = 10000 millis
val f = new IllegalStateException("test")
intercept[IllegalStateException] {
val failed = Future.failed[String](f) zip Future.successful("foo")
Await.result(failed, timeout)
} mustBe (f)
intercept[IllegalStateException] {
val failed = Future.successful("foo") zip Future.failed[String](f)
Await.result(failed, timeout)
} mustBe (f)
intercept[IllegalStateException] {
val failed = Future.failed[String](f) zip Future.failed[String](f)
Await.result(failed, timeout)
} mustBe (f)
val successful = Future.successful("foo") zip Future.successful("foo")
Await.result(successful, timeout) mustBe (("foo", "foo"))
}
@Test def `fold`() {
val timeout = 10000 millis
def async(add: Int, wait: Int) = future {
Thread.sleep(wait)
add
}
val futures = (0 to 9) map {
idx => async(idx, idx * 20)
}
val folded = Future.fold(futures)(0)(_ + _)
Await.result(folded, timeout) mustBe (45)
val futuresit = (0 to 9) map {
idx => async(idx, idx * 20)
}
val foldedit = Future.fold(futures)(0)(_ + _)
Await.result(foldedit, timeout) mustBe (45)
}
@Test def `fold by composing`() {
val timeout = 10000 millis
def async(add: Int, wait: Int) = future {
Thread.sleep(wait)
add
}
def futures = (0 to 9) map {
idx => async(idx, idx * 20)
}
val folded = futures.foldLeft(Future(0)) {
case (fr, fa) => for (r <- fr; a <- fa) yield (r + a)
}
Await.result(folded, timeout) mustBe (45)
}
@Test def `fold with an exception`() {
val timeout = 10000 millis
def async(add: Int, wait: Int) = future {
Thread.sleep(wait)
if (add == 6) throw new IllegalArgumentException("shouldFoldResultsWithException: expected")
add
}
def futures = (0 to 9) map {
idx => async(idx, idx * 10)
}
val folded = Future.fold(futures)(0)(_ + _)
intercept[IllegalArgumentException] {
Await.result(folded, timeout)
}.getMessage mustBe ("shouldFoldResultsWithException: expected")
}
@Test def `fold mutable zeroes safely`() {
import scala.collection.mutable.ArrayBuffer
def test(testNumber: Int) {
val fs = (0 to 1000) map (i => Future(i))
val f = Future.fold(fs)(ArrayBuffer.empty[AnyRef]) {
case (l, i) if i % 2 == 0 => l += i.asInstanceOf[AnyRef]
case (l, _) => l
}
val result = Await.result(f.mapTo[ArrayBuffer[Int]], 10000 millis).sum
assert(result == 250500)
}
(1 to 100) foreach test //Make sure it tries to provoke the problem
}
@Test def `return zero value if folding empty list`() {
val zero = Future.fold(List[Future[Int]]())(0)(_ + _)
Await.result(zero, defaultTimeout) mustBe (0)
}
@Test def `shouldReduceResults`() {
def async(idx: Int) = future {
Thread.sleep(idx * 20)
idx
}
val timeout = 10000 millis
val futures = (0 to 9) map { async }
val reduced = Future.reduce(futures)(_ + _)
Await.result(reduced, timeout) mustBe (45)
val futuresit = (0 to 9) map { async }
val reducedit = Future.reduce(futuresit)(_ + _)
Await.result(reducedit, timeout) mustBe (45)
}
@Test def `shouldReduceResultsWithException`() {
def async(add: Int, wait: Int) = future {
Thread.sleep(wait)
if (add == 6) throw new IllegalArgumentException("shouldFoldResultsWithException: expected")
else add
}
val timeout = 10000 millis
def futures = (1 to 10) map {
idx => async(idx, idx * 10)
}
val failed = Future.reduce(futures)(_ + _)
intercept[IllegalArgumentException] {
Await.result(failed, timeout)
}.getMessage mustBe ("shouldFoldResultsWithException: expected")
}
@Test def `shouldReduceThrowNSEEOnEmptyInput`() {
intercept[java.util.NoSuchElementException] {
val emptyreduced = Future.reduce(List[Future[Int]]())(_ + _)
Await.result(emptyreduced, defaultTimeout)
}
}
@Test def `shouldTraverseFutures`() {
object counter {
var count = -1
def incAndGet() = counter.synchronized {
count += 2
count
}
}
val oddFutures = List.fill(100)(future { counter.incAndGet() }).iterator
val traversed = Future.sequence(oddFutures)
Await.result(traversed, defaultTimeout).sum mustBe (10000)
val list = (1 to 100).toList
val traversedList = Future.traverse(list)(x => Future(x * 2 - 1))
Await.result(traversedList, defaultTimeout).sum mustBe (10000)
val iterator = (1 to 100).toList.iterator
val traversedIterator = Future.traverse(iterator)(x => Future(x * 2 - 1))
Await.result(traversedIterator, defaultTimeout).sum mustBe (10000)
}
@Test def `shouldBlockUntilResult`() {
val latch = new TestLatch
val f = future {
Await.ready(latch, 5 seconds)
5
}
val f2 = future {
val res = Await.result(f, Inf)
res + 9
}
intercept[TimeoutException] {
Await.ready(f2, 100 millis)
}
latch.open()
Await.result(f2, defaultTimeout) mustBe (14)
val f3 = future {
Thread.sleep(100)
5
}
intercept[TimeoutException] {
Await.ready(f3, 0 millis)
}
}
@Test def `run callbacks async`() {
val latch = Vector.fill(10)(new TestLatch)
val f1 = future {
latch(0).open()
Await.ready(latch(1), TestLatch.DefaultTimeout)
"Hello"
}
val f2 = async {
val s = await(f1)
latch(2).open()
Await.ready(latch(3), TestLatch.DefaultTimeout)
s.length
}
for (_ <- f2) latch(4).open()
Await.ready(latch(0), TestLatch.DefaultTimeout)
f1.isCompleted mustBe (false)
f2.isCompleted mustBe (false)
latch(1).open()
Await.ready(latch(2), TestLatch.DefaultTimeout)
f1.isCompleted mustBe (true)
f2.isCompleted mustBe (false)
val f3 = async {
val s = await(f1)
latch(5).open()
Await.ready(latch(6), TestLatch.DefaultTimeout)
s.length * 2
}
for (_ <- f3) latch(3).open()
Await.ready(latch(5), TestLatch.DefaultTimeout)
f3.isCompleted mustBe (false)
latch(6).open()
Await.ready(latch(4), TestLatch.DefaultTimeout)
f2.isCompleted mustBe (true)
f3.isCompleted mustBe (true)
val p1 = Promise[String]()
val f4 = async {
val s = await(p1.future)
latch(7).open()
Await.ready(latch(8), TestLatch.DefaultTimeout)
s.length
}
for (_ <- f4) latch(9).open()
p1.future.isCompleted mustBe (false)
f4.isCompleted mustBe (false)
p1 complete Success("Hello")
Await.ready(latch(7), TestLatch.DefaultTimeout)
p1.future.isCompleted mustBe (true)
f4.isCompleted mustBe (false)
latch(8).open()
Await.ready(latch(9), TestLatch.DefaultTimeout)
Await.ready(f4, defaultTimeout).isCompleted mustBe (true)
}
@Test def `should not deadlock with nested await (ticket 1313)`() {
val simple = async {
await { Future { } }
val unit = Future(())
val umap = unit map { _ => () }
Await.result(umap, Inf)
}
Await.ready(simple, Inf).isCompleted mustBe (true)
val l1, l2 = new TestLatch
val complex = async {
await{ Future { } }
blocking {
val nested = Future(())
for (_ <- nested) l1.open()
Await.ready(l1, TestLatch.DefaultTimeout) // make sure nested is completed
for (_ <- nested) l2.open()
Await.ready(l2, TestLatch.DefaultTimeout)
}
}
Await.ready(complex, defaultTimeout).isCompleted mustBe (true)
}
@Test def `should not throw when Await.ready`() {
val expected = try Success(5 / 0) catch { case a: ArithmeticException => Failure(a) }
val f = async { await(future(5)) / 0 }
Await.ready(f, defaultTimeout).value.get.toString mustBe expected.toString
}
}
| anand-singh/async | src/test/scala/scala/async/run/futures/FutureSpec.scala | Scala | bsd-3-clause | 16,865 |
package com.rikmuld.corerm.objs.blocks.bounds
import net.minecraft.nbt.NBTTagCompound
import net.minecraft.util.math.AxisAlignedBB
object Bounds {
def readFromNBT(tag: NBTTagCompound): Bounds =
Bounds(
tag.getDouble("xMin"),
tag.getDouble("yMin"),
tag.getDouble("zMin"),
tag.getDouble("xMax"),
tag.getDouble("yMax"),
tag.getDouble("zMax")
)
def canRead(tag: NBTTagCompound): Boolean =
tag.hasKey("xMin")
def fromAxis(axis: AxisAlignedBB): Bounds =
Bounds(axis.minX, axis.minY, axis.minZ, axis.maxX, axis.maxY, axis.maxZ)
}
case class Bounds(xMin: Double, yMin: Double, zMin: Double, xMax: Double, yMax: Double, zMax: Double) {
val bounds: AxisAlignedBB =
new AxisAlignedBB(xMin, yMin, zMin, xMax, yMax, zMax)
val collisionBounds :AxisAlignedBB =
new AxisAlignedBB(
Math.max(xMin, 0),
Math.max(yMin, 0),
Math.max(zMin, 0),
Math.min(xMax, 1),
Math.min(yMax, 1),
Math.min(zMax, 1)
)
def writeToNBT(tag: NBTTagCompound) {
tag.setDouble("xMin", xMin)
tag.setDouble("yMin", yMin)
tag.setDouble("zMin", zMin)
tag.setDouble("xMax", xMax)
tag.setDouble("yMax", yMax)
tag.setDouble("zMax", zMax)
}
} | Rikmuld/RikMuldsCore-mcmod | scala/com/rikmuld/corerm/objs/blocks/bounds/Bounds.scala | Scala | gpl-3.0 | 1,234 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package scala.scalajs.test
package compiler
import scala.scalajs.test.JasmineTest
import scala.scalajs.js.Any.fromInt
object CharTest extends JasmineTest {
describe("Char primitives") {
it("should always be positive (when coerced)") {
expect(-3.toByte.toChar.toInt).toEqual(65533)
expect(-100.toShort.toChar.toInt).toEqual(65436)
expect(-66000.toChar.toInt).toEqual(65072)
expect(-4567L.toChar.toInt).toEqual(60969)
expect(-5.3f.toChar.toInt).toEqual(65531)
expect(-7.9.toChar.toInt).toEqual(65529)
}
it("should overflow (when coerced)") {
expect(347876543.toChar.toInt).toEqual(11455)
expect(34234567876543L.toChar.toInt).toEqual(57279)
}
it("should overflow with *") {
def test(a: Char, b: Char, expected: Int): Unit =
expect(a * b).toEqual(expected)
// note: expected values are constant-folded by the compiler on the JVM
test(Char.MaxValue, Char.MaxValue, Char.MaxValue * Char.MaxValue)
}
}
}
| swhgoon/scala-js | test/src/test/scala/scala/scalajs/test/compiler/CharTest.scala | Scala | bsd-3-clause | 1,532 |
/**
* CopyrightΒ (c)Β 2016 IntelΒ CorporationΒ
*
* LicensedΒ underΒ theΒ ApacheΒ License,Β VersionΒ 2.0Β (theΒ "License");
* youΒ mayΒ notΒ useΒ thisΒ fileΒ exceptΒ inΒ complianceΒ withΒ theΒ License.
* YouΒ mayΒ obtainΒ aΒ copyΒ ofΒ theΒ LicenseΒ at
*
* Β Β Β Β Β http://www.apache.org/licenses/LICENSE-2.0
*
* UnlessΒ requiredΒ byΒ applicableΒ lawΒ orΒ agreedΒ toΒ inΒ writing,Β software
* distributedΒ underΒ theΒ LicenseΒ isΒ distributedΒ onΒ anΒ "ASΒ IS"Β BASIS,
* WITHOUTΒ WARRANTIESΒ ORΒ CONDITIONSΒ OFΒ ANYΒ KIND,Β eitherΒ expressΒ orΒ implied.
* SeeΒ theΒ LicenseΒ forΒ theΒ specificΒ languageΒ governingΒ permissionsΒ and
* limitationsΒ underΒ theΒ License.
*/
package org.trustedanalytics.sparktk.testutils
import org.apache.log4j.{ Level, Logger }
/**
* Utility methods related to logging in Unit testing.
* <p>
* Logging of underlying libraries can get annoying in unit
* tests so it is nice to be able to change easily.
* </p>
*
* TODO: this class doesn't really seem to work any more. It needs to be updated to turn off logging.
*
*/
object LogUtils {
/**
* Turn down logging since Spark gives so much output otherwise.
*/
def silenceSpark() {
setLogLevels(Level.WARN, Seq("o.a.spark.scheduler.DAGScheduler",
"o.a.spark.scheduler.TaskSetManager", "org.eclipse.jetty", "akka"))
}
private def setLogLevels(level: org.apache.log4j.Level, loggers: TraversableOnce[String]): Unit = {
loggers.foreach(loggerName => Logger.getLogger(loggerName).setLevel(level))
}
}
| ashaarunkumar/spark-tk | sparktk-core/src/test/scala/org/trustedanalytics/sparktk/testutils/LogUtils.scala | Scala | apache-2.0 | 1,544 |
package bundlepricing.util
import scalaz.{Foldable1, Monad, NonEmptyList, Order, Semigroup}
/** An immutable Set with at least one element */
class NonEmptySet[A] private(raw: Set[A]) {
assert(raw.nonEmpty)
def contains(a: A): Boolean = raw.contains(a)
def size: Int = raw.size
/** Remove an element from the set. Returns None if the removed element was the only one. */
def -(k: A): Option[NonEmptySet[A]] =
if (raw.size > 1) Some(new NonEmptySet(raw - k))
else if (raw.contains(k)) None
else Some(this)
/** Add an element to the set */
def +(a: A): NonEmptySet[A] =
new NonEmptySet(raw + a)
def ++(that: NonEmptySet[A]): NonEmptySet[A] = new NonEmptySet(this.toSet ++ that.toSet)
def ++(that: Set[A]): NonEmptySet[A] = new NonEmptySet(this.toSet ++ that)
def map[B](f: A => B): NonEmptySet[B] = new NonEmptySet(raw map f)
def flatMap[B](f: A => NonEmptySet[B]): NonEmptySet[B] =
new NonEmptySet(raw flatMap (f andThen (_.toSet)))
def foldLeft[B](z: B)(f: (B, A) => B): B = raw.foldLeft(z)(f)
def foldMap1[B: Semigroup](f: A => B): B = raw.tail.foldLeft(f(raw.head))((b, a) => Semigroup[B].append(b, f(a)))
def foldMapRight1[B](z: A => B)(f: (A, => B) => B): B = {
val reversed = toList.reverse
reversed.tail.foldLeft(z(reversed.head))((b,a) => f(a,b))
}
def toSet: Set[A] = raw
def toList: List[A] = raw.toList
def toNel: NonEmptyList[A] = NonEmptyList(raw.head, raw.tail.toSeq: _*)
override def toString: String = raw.mkString("NonEmptySet(",",",")")
}
object NonEmptySet {
def apply[A](one: A, others: A*): NonEmptySet[A] =
new NonEmptySet[A](Set(others: _*) + one)
implicit def semigroup[A]: Semigroup[NonEmptySet[A]] =
Semigroup.instance[NonEmptySet[A]] { (a, b) => a ++ b }
// delegate to the instance for Set, which does crazy stuff to make use of Order
implicit def order[A: Order] =
Order[Set[A]](scalaz.std.set.setOrder).contramap[NonEmptySet[A]](_.toSet)
implicit def nonEmptySetInstance: Monad[NonEmptySet] with Foldable1[NonEmptySet] =
new Monad[NonEmptySet] with Foldable1[NonEmptySet] {
override def foldLeft[A, B](fa: NonEmptySet[A], z: B)(f: (B, A) => B): B = fa.foldLeft(z)(f)
def foldMap1[A, B](fa: NonEmptySet[A])(f: (A) => B)(implicit F: Semigroup[B]): B = fa foldMap1 f
def foldMapRight1[A, B](fa: NonEmptySet[A])(z: (A) => B)(f: (A, => B) => B): B = fa.foldMapRight1(z)(f)
override def map[A, B](fa: NonEmptySet[A])(f: (A) => B): NonEmptySet[B] = fa map f
def bind[A, B](fa: NonEmptySet[A])(f: (A) => NonEmptySet[B]): NonEmptySet[B] = fa flatMap f
def point[A](a: => A): NonEmptySet[A] = NonEmptySet(a)
}
} | refried/bundle-pricing | src/main/scala/bundlepricing/util/NonEmptySet.scala | Scala | mit | 2,677 |
package lang.lightweightjava
import lang.lightweightjava.ast._
import lang.lightweightjava.ast.returnvalue._
import lang.lightweightjava.ast.statement._
import lang.lightweightjava.configuration._
import name.Name
object Interpreter {
def interpret(configuration: Configuration): Configuration = {
// Type checking will raise an error if there are type errors, so if the call returns,
// it can be assumed that the program is correctly typed.
// Please also note that only the program is type checked, so type errors in the program flow
// might cause undefined behavior or runtime errors. This is as defined by LJ specification.
configuration.program.typeCheck()
// Using a separate method here to avoid unnecessary type checks after each interpretation step.
interpretInternal(configuration)
}
private def interpretInternal(configuration: Configuration): Configuration = {
configuration match {
case NormalConfiguration(program, state, heap, programFlow@_*) =>
if (programFlow.isEmpty) configuration
else {
// Temporary variables for configuration parameters that can be overwritten
// and are used for the next interpretation step
var newState = state + (Null.name -> NullValue)
var newHeap = heap
var newProgramFlow = programFlow.tail
var exception: LangException = null
programFlow.head match {
case ConditionalBranch(leftVariable, rightVariable, ifBranch, elseBranch) =>
if (state(leftVariable.name) == state(rightVariable.name)) newProgramFlow = ifBranch +: newProgramFlow
else newProgramFlow = elseBranch +: newProgramFlow
case FieldRead(target, sourceObject, sourceField) => newState(sourceObject.name) match {
case oid@OID(_) =>
val (_, fields) = newHeap(oid)
newState = newState + (target.name -> fields(sourceField.name))
case NullValue => exception = NullPointerException
}
case FieldWrite(targetObject, targetField, source) => newState(targetObject.name) match {
case oid@OID(_) =>
val (objectType, fields) = newHeap(oid)
val updatedFieldMap = fields + (targetField.name -> newState(source.name))
newHeap = newHeap + (oid -> (objectType, updatedFieldMap))
case NullValue => exception = NullPointerException
}
case MethodCall(target, sourceObject, methodName, methodParameters@_*) => newState(sourceObject.name) match {
case oid@OID(_) =>
val (objectType, _) = newHeap(oid)
val methodDefinition = program.findMethod(program.findClassDefinition(objectType.asInstanceOf[ClassName]).get, methodName.name).get
methodDefinition match {
case MethodDefinition(MethodSignature(_, _, _, parameters@_*), methodBody) =>
// Generate a map with fresh names for each method parameter
var renamedParameters = parameters.foldLeft(Map[Name, Name]())((renamedMap, parameter) =>
renamedMap + (parameter.variableName.name -> configuration.freshName(renamedMap.values.toSet, parameter.variableName.name)))
// Generate a fresh name for "this"
val thisRenaming = configuration.freshName(renamedParameters.values.toSet, This.name)
renamedParameters = renamedParameters + (This.name -> thisRenaming)
// Perform the renaming to fresh names
val renamedMethodBody = methodBody.rename(id => id.rename(renamedParameters.getOrElse(id.name, id.name))).asInstanceOf[MethodBody]
// Add the values used for the call to the stack (and let "this" point to the OID of the method owning object)
newState = newState ++ parameters.
zip(methodParameters).
map(p => (renamedParameters(p._1.variableName.name), newState(p._2.name))).
toMap[Name, Value] + (thisRenaming -> oid)
// Add the method body at the front the program flow (and replace this statement by a simple assignment of the return value)
newProgramFlow = (renamedMethodBody.statements :+ (renamedMethodBody.returnValue match {
case ReturnVariable(returnVariable) => VariableAssignment(target, returnVariable)
case ReturnField(returnObject, returnField) => FieldRead(target, returnObject, returnField)
case ReturnMethodCall(returnObject, returnMethodName, returnMethodParameters@_*) => MethodCall(target, returnObject, returnMethodName, returnMethodParameters:_*)
case ReturnObjectInstantiation(returnClassRef) => ObjectInstantiation(target, returnClassRef)
})) ++: newProgramFlow
}
case NullValue => exception = NullPointerException
}
case VoidMethodCall(sourceObject, methodName, methodParameters@_*) => newState(sourceObject.name) match {
case oid@OID(_) =>
val (objectType, _) = newHeap(oid)
val methodDefinition = program.findMethod(program.findClassDefinition(objectType.asInstanceOf[ClassName]).get, methodName.name).get
methodDefinition match {
case MethodDefinition(MethodSignature(_, _, _, parameters@_*), methodBody) =>
// Generate a map with fresh names for each method parameter
var renamedParameters = parameters.foldLeft(Map[Name, Name]())((renamedMap, parameter) =>
renamedMap + (parameter.variableName.name -> configuration.freshName(renamedMap.values.toSet, parameter.variableName.name)))
// Generate a fresh name for "this"
val thisRenaming = configuration.freshName(renamedParameters.values.toSet, This.name)
renamedParameters = renamedParameters + (This.name -> thisRenaming)
// Perform the renaming to fresh names
val renamedMethodBody = methodBody.rename(id => id.rename(renamedParameters.getOrElse(id.name, id.name))).asInstanceOf[MethodBody]
// Add the values used for the call to the stack (and let "this" point to the OID of the method owning object)
newState = newState ++ parameters.
zip(methodParameters).
map(p => (renamedParameters(p._1.variableName.name), newState(p._2.name))).
toMap[Name, Value] + (thisRenaming -> oid)
// Add the method body at the front the program flow (and replace this statement by a simple assignment of the return value)
newProgramFlow = renamedMethodBody.statements ++: newProgramFlow
}
case NullValue => exception = NullPointerException
}
case ObjectInstantiation(target, classRef) =>
val newValue = OID(configuration.freshOID())
val newFieldsList: Map[Name, Value] = classRef match {
case className:ClassName =>
val classFields = program.findAllFields(program.findClassDefinition(className).get)
// All class fields are initialized as "null"
classFields.map(field => (field.fieldName.name, NullValue)).toMap[Name, Value]
// Object class doesn't have fields
case _ => Map()
}
val newHeapEntry = (classRef, newFieldsList)
newState = newState + (target.name -> newValue)
newHeap = newHeap + (newValue -> newHeapEntry)
case StatementBlock(blockBody@_*) => newProgramFlow = blockBody ++: newProgramFlow
case VariableAssignment(target, source) => newState = newState + (target.name -> newState(source.name))
}
if (exception == null)
interpretInternal(NormalConfiguration(program, newState, newHeap, newProgramFlow: _*))
else
ExceptionConfiguration(program, newState, newHeap, exception)
}
case ExceptionConfiguration(program, state, heap, exception) => sys.error("Can't interpret exception configuration: " + exception.message)
}
}
}
| matthisk/hygienic-transformations | scala/src/main/scala/lang/lightweightjava/Interpreter.scala | Scala | lgpl-3.0 | 8,479 |
package scala
//: Created by Administrator on 2015/12/4.
//: scala.Test.java in project of island_bak
class Test {
def this(a: Int, b: Int){
this()
}
def min(x: Int, y: Int)= {
var a = x
if(x<y) a=y
a
}
val max = (x: Int, y: Int) => {if(x>y) x else y}
val add_one: (Int) => Int = (x) => (x+1)
}
object Test{
def apply(x: Int, y: Int) = new Test(x, y)
def main(args: Array[String]){
val t: Test = new Test()
t.min(30,40)
println(t.max(10, 20))
}
} | TaronLiu/island | scala/Test.scala | Scala | gpl-2.0 | 495 |
/**
* Created by root on 15-7-6.
*/
object HelloWorld {
def main(args: Array[String]): Unit ={
println("Hello world")
}
} | baokunguo/learning-spark-examples | note/process/2015-07-11/src/HelloWorld.scala | Scala | mit | 131 |
package nlpdata.datasets.wiktionary
import java.nio.file.Path
class WiktionaryFileSystemService(location: Path) {
/** Constructs an Inflections object containing all known inflections
* for a given set of words.
*
* @param tokens an iterator over all words we might want inflections for
*/
def getInflectionsForTokens(tokens: Iterator[String]): Inflections = {
val wiktionaryFilepath = location.resolve("en_verb_inflections.txt")
val wordDict = new CountDictionary()
tokens.foreach(wordDict.addString)
val inflDict = new VerbInflectionDictionary(wordDict)
inflDict.loadDictionaryFromFile(wiktionaryFilepath.toString)
new Inflections(inflDict)
}
}
| julianmichael/nlpdata | nlpdata/src-jvm/nlpdata/datasets/wiktionary/WiktionaryFileSystemService.scala | Scala | mit | 696 |
package net.atos.cis.web.endpoint
import org.eclipse.jetty.server.Server
object CisEndpoint {
val cisHandler = new CISHandler
def defaultPort = {
val portString = System.getenv("PORT")
println("PortString[" + portString + "]")
val port = portString match { case null => 8091; case _ => portString.toInt }
println("Port[" + port + "]")
port
}
def main(args: Array[String]) {
val s = new Server(defaultPort);
s.setHandler(cisHandler);
s.start
s.join
}
} | scott-thomson/cis | src/main/scala/net/atos/cis/web/endpoint/CisEndpoint.scala | Scala | bsd-2-clause | 501 |
/*
* Copyright (C) 2017
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package code
package lib
import model._
import net.liftweb._
import mapper._
import util._
import common._
import TimeHelpers._
import http._
import rest._
import json._
import provider.HTTPCookie
import net.liftweb.json.JsonDSL._
import net.liftweb.json.JsonAST._
object JsonUserSortedListHandler extends RestHelper {
def toJSON (n: List[SortedTopic]): JValue = {
n.map( t => "label" -> t.title.get )
}
serve( "json" / "sortedlist" prefix {
case "suggest" :: _ Get _ =>
for{
term <- S.param("term") ?~ "term parameter missing" ~> 400
} yield {
toJSON(SortedTopic.findSuggestForString("%" + term + "%"))
}
case "add" :: _ Post _ =>
for{
term <- S.param("term") ?~ "term parameter missing" ~> 400
} yield {
var out: JValue = "status" -> "success"
try{
val newTopic = SortedTopic.findOrCreateTopic(term)
newTopic.writeins(newTopic.writeins.get + 1).save
out = ("status" -> "success") ~ ("value" -> newTopic.hash.get)
} catch {
case _: Throwable => out = "status" -> "internal server error"
}
out
}
case "sort" :: _ Get _ =>
for{
sortorder <- S.param("o") ?~ "sort order parameter missing" ~> 400
request <- S.containerRequest
} yield {
val cookie = S.findCookie("topicsortorder") match {
case Full(cookie) => cookie
case _ => new HTTPCookie("topicsortorder", Full("empty"), Empty, Full("/"), Full(31557600), Empty, Empty, Empty) //new HTTPCookie("topicsortorder", Full("empty"), Empty, Empty, Full(31557600), Full(1), Empty, Empty)
}
val sortList = sortorder.split(",").toList
val dbcookie = SortedTopicCookie.findOrCreateCookie(cookie.value.getOrElse(""))
dbcookie.ipaccess(request.remoteAddress).useragent(request.userAgent openOr null).topics(sortList).save
S.addCookie(cookie.setValue(dbcookie.hash.get))
S.addCookie(new HTTPCookie("sortref", Full(dbcookie.hash.get), Empty, Full("/"), Full(31557600), Empty, Empty, Empty))
val ret: JValue = ("cookie" -> cookie.value.openOr(""))
ret
}
})
} | EasterTheBunny/ourdistrict | src/main/scala/code/lib/JsonUserSortedListHandler.scala | Scala | gpl-3.0 | 2,906 |
object PatmatOrType {
def foo1(x: Int | Double) = x match {
case _: Int => true
case _: Double => true
}
def foo2a(x: Int | Double | String) = x match { // _: String not matched
case _: Int => true
case _: Double => true
}
def foo2b(x: Int | Double | String) = x match {
case _: Int => true
case _: (Double | String) => true
}
def foo3(x: Option[Int | Double | String]) = x match { // warning: None, Some(_: String) not matched
case Some(_: Int) => true
case Some(_: Double) => true
}
def foo4(x: Option[Int | Double | String]) = x match {
case Some(_: Int) => true
case Some(_: Double) => true
case Some(_: String) => true
case None => false
}
def foo5a(x: Option[Int | Double | String]) = x match {
case Some(_: (Int | Double)) => true
case Some(_: String) => true
case None => false
}
def foo5b(x: Option[Int | Double | String]) = x match { // warning: Some(_: String) not matched
case Some(_: (Int | Double)) => true
case None => false
}
} | som-snytt/dotty | tests/patmat/patmat-ortype.scala | Scala | apache-2.0 | 1,046 |
//package com.eharmony.aloha.models.ensemble.maxima
//
//import java.{lang => jl}
//
//import org.junit.Test
//import org.junit.Assert._
//import org.junit.runner.RunWith
//import org.junit.internal.runners.JUnit4ClassRunner
//
//import com.eharmony.aloha.id.ModelId
//import com.eharmony.aloha.models.ConstantModel
//import com.eharmony.aloha.models.ensemble.tie.TakeFirstTieBreaker
//import com.eharmony.aloha.score.proto.conversions.ScoreConverter
//import com.eharmony.aloha.score.proto.conversions.rich.RichScore
//import com.eharmony.aloha.score.proto.conversions.ScoreConverter.Implicits._
//import com.eharmony.aloha.score.Scores.Score
//import com.eharmony.aloha.score.basic.ModelOutput
//
//@RunWith(classOf[JUnit4ClassRunner])
//class ArgMaxModelTest {
// val digits = IndexedSeq("zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine")
//
// @Test def test1() {
// val m = getArgMaxModelInt
// val s: Score = m.score(null)
// assertTrue("There should be no errors", s.allErrors.isEmpty)
// assertEquals("three", s.relaxed.asString.get)
// }
//
// /** Get an argMax model
// * @return [[com.eharmony.aloha.models.ensemble.maxima.ArgMax]] [Any, Int, String].
// */
// def getArgMaxModelInt = getArgMaxModel[Int](Seq(3,1,4,2), _.toLong)
//
// def getArgMaxModelInteger: ArgMax[Any, Integer, String] = getArgMaxModel[jl.Integer](Seq(3,1,4,2), _.toLong)
//
// private[this] def getArgMaxModel[A: ScoreConverter: Ordering](vals: Seq[A], toLong: A => Long) = {
// val m = ArgMax(
// vals.map(i => ConstantModel(ModelOutput(i), ModelId(toLong(i), i.toString))),
// (1 to vals.size).map(digits),
// new TakeFirstTieBreaker[A],
// ModelId(0, "0")
// )
// m
// }
//
// /**
// * output should be 2.0. b beats a and emits a 7. 3 beats 2 and emits a 6. 7 beats 6 so a 2.0 is emitted.
// * {{{
// * // +---------------------------------+
// * // 1 -----------> |2 --------> ("a") (label: 3) |
// * // \\ | \\ | 7 (label 2.0)
// * // \\ | -------> ("b") (label: 7) |
// * // \\ +---------------------------------+
// * // \\ +---------------------------------+
// * // -------> | 5 --------> (2 byte) (label: 4) |
// * // | \\ | 6 (label 5.0)
// * // | -------> (3 byte) (label: 6) |
// * // +---------------------------------+
// * }}}
// */
// def getArgMaxTree = {
//
// // ArgMax[Any, String, java.lang.Integer]
// val a = ArgMax(
// Seq(ConstantModel(ModelOutput("a"), ModelId(3, "3")),
// ConstantModel(ModelOutput("b"), ModelId(7, "7"))),
// IndexedSeq(jl.Integer.valueOf(3), jl.Integer.valueOf(7)),
// new TakeFirstTieBreaker,
// ModelId(2, "2")
// )
//
// // Notice the
// // ArgMax[Any, Byte, java.lang.Integer]
// val b = ArgMax(
// Seq(ConstantModel(ModelOutput(2.toByte), ModelId(4, "4")),
// ConstantModel(ModelOutput(3.toByte), ModelId(6, "6"))),
// IndexedSeq(jl.Integer.valueOf(4), jl.Integer.valueOf(6)),
// new TakeFirstTieBreaker,
// ModelId(5, "5")
// )
//
// // ArgMax[Any, java.lang.Integer, java.lang.Double]
// val c = ArgMax(
// Seq(a, b),
// IndexedSeq(jl.Double.valueOf(2), jl.Double.valueOf(5)),
// new TakeFirstTieBreaker,
// ModelId(1, "1")
// )
// c
// }
//}
| eHarmony/aloha | aloha-core/src/test/scala/com/eharmony/aloha/models/ensemble/maxima/ArgMaxModelTest.scala | Scala | mit | 3,749 |
package net.chwthewke.passman
package data
package legacy
import java.time.LocalDateTime
import java.time.temporal.ChronoUnit
object TicksCodec extends Codec[LocalDateTime] with SafeCodec[LocalDateTime] {
override def decodeUnsafely(string: String): LocalDateTime = {
val ticks = string.toLong
if (ticks < MinTicks || ticks > MaxTicks) throw new IllegalArgumentException("Ticks out of range: " + ticks)
val (days, millisOfDay) = daysAndMillis(ticks)
Date0.plusDays(days).plus(millisOfDay, ChronoUnit.MILLIS)
}
private val Date0 = LocalDateTime.of(1, 1, 1, 0, 0)
private def daysAndMillis(ticks: Long) = (ticks / TicksPerDay, (ticks % TicksPerDay) / TicksPerMilli)
private val TicksPerMilli = 10000L
private val TicksPerDay = 24L * 60L * 60L * 1000L * TicksPerMilli
private val MinTicks = 0
private val MaxTicks = ((365L * 400L + 97L) * 25L - 366L) * TicksPerDay - 1L
override def encode(value: LocalDateTime): String = sys.error("TicksCodec cannot encode")
}
| chwthewke/passman | passman-core/src/main/scala/net/chwthewke/passman/data/legacy/TicksCodec.scala | Scala | bsd-3-clause | 1,013 |
package cc.factorie.app.nlp.relation
import cc.factorie.app.nlp._
import cc.factorie.app.nlp.coref.{ParseForwardCoref, WithinDocCoref}
import scala.io.Source
import java.io.InputStream
/**
* @author John Sullivan, Benjamin Roth
*/
class PatternBasedRelationFinder(predictors:Seq[PatternRelationPredictor]) extends DocumentAnnotator{
/** How the annotation of this DocumentAnnotator should be printed in one-word-per-line (OWPL) format.
If there is no per-token annotation, return null. Used in Document.owplString. */
def tokenAnnotationString(token: Token) = null
def postAttrs = Seq(classOf[RelationMentionSeq])
def prereqAttrs = (Seq(classOf[WithinDocCoref]) ++ ParseForwardCoref.prereqAttrs).distinct
def process(doc: Document) = {
val coref = doc.coref
val mentions = coref.mentions.sortBy(_.phrase.asInstanceOf[TokenSpan]).toList
/** this produces a sliding window of 4 mentions that we then compare to generate contexts. Each mention should be compared
* to the three mentions before and after it in the following loop. The last element is a singleton list which we drop.
* The last mention in the document has already been compared to the three mentions that preceed it.
* */
val mentionGrouping = (0 until mentions.size).map(idx => mentions.slice(idx, math.min(idx + 4, mentions.size))).dropRight(1).toList
val relationMentions = (for(m1 :: ms <- mentionGrouping;
m2 <- ms;
if ((m1.phrase.sentence eq m2.phrase.sentence) && (m1.phrase.sentence.length < 100)))
yield {Seq(new RelationMention(m1, m2, true), new RelationMention(m2, m1, false))}).flatten
for (rm <- relationMentions;
predictor <- predictors;
matchLevel = predictor.relationMatch(rm);
if matchLevel > 0.0) {
rm._relations.+=(TACRelation(predictor.relation, matchLevel, rm.arg1.phrase.sentence.string))
}
val relSet = new RelationMentionSeq()
relSet.++=(relationMentions.filter(_._relations.nonEmpty))
doc.attr += relSet
doc
}
}
object OntoNotesPatternBasedRelationFinder extends PatternBasedRelationFinder(PatternRelationPredictor.predictorsFromStreams(getClass.getResourceAsStream("/cc/factorie/app/nlp/relation/patterns.tuned"), getClass.getResourceAsStream("/cc/factorie/app/nlp/relation/argtypes_ontonotes")))
object ConllPatternBasedRelationFinder extends PatternBasedRelationFinder(PatternRelationPredictor.predictorsFromStreams(getClass.getResourceAsStream("/cc/factorie/app/nlp/relation/patterns.tuned"), getClass.getResourceAsStream("/cc/factorie/app/nlp/relation/argtypes_conll")))
case class PatternRelationPredictor(relation : String, patternConfidences : Map[String, Double], qTypes : Set[String],
sTypes : Set[String]) {
val ARG1 = "$ARG1"
val ARG2 = "$ARG2"
/** The first boolean indicates if the relation holds in the forward direction (arg1 first) the second if it holds in the reverse */
def relationMatch(rm : RelationMention) : Double = {
val arg1End = rm.arg1.phrase.last.positionInSentence
val arg2Start = rm.arg2.phrase.head.positionInSentence
val forwardPattern = ARG1 + " " + rm.arg1.phrase.sentence.slice(arg1End + 1, arg2Start).map(_.string).mkString(" ") + " " + ARG2
val backwardPattern = ARG2 + " " + rm.arg1.phrase.sentence.slice(arg1End + 1, arg2Start).map(_.string).mkString(" ") + " " + ARG1
val pattern = if(rm.isArg1First) forwardPattern else backwardPattern
val arg1Type = rm.arg1.phrase.head.nerTag.baseCategoryValue
val arg2Type = rm.arg2.phrase.head.nerTag.baseCategoryValue
val hasMatch = qTypes.contains(arg1Type) && sTypes.contains(arg2Type) && patternConfidences.contains(pattern)
if(hasMatch) patternConfidences(pattern) else 0.0
}
}
object PatternRelationPredictor {
def predictorsFromStreams(patternStream:InputStream, typeFileStream:InputStream):Seq[PatternRelationPredictor] = {
val relToPats = Source.fromInputStream(patternStream, "UTF8").getLines.map(_.stripLineEnd.split(" ", 3)).
map(fields => fields(1) -> (fields(2), fields(0).toDouble)).toList.groupBy(_._1).map { case (k,v) => (k,v.map(_._2).toMap)}
// reads types from a white-space & comma-separted file of the form:
// relation arg1type,arg1type... arg2type,arg2type
// Types of ontonotes domain described here: http://catalog.ldc.upenn.edu/docs/LDC2008T04/OntoNotes-Release-2.0.pdf
val relToTypes = Source.fromInputStream(typeFileStream, "UTF8").getLines.map(_.stripLineEnd.split(" ", 3)).
map(fields => fields(0) -> (fields(1).split(',').toSet, fields(2).split(',').toSet)).toList
for ((rel, (arg1types, arg2types)) <- relToTypes) yield
new PatternRelationPredictor(rel, relToPats.getOrElse(rel, Map.empty[String, Double]), arg1types, arg2types)
}
}
| hlin117/factorie | src/main/scala/cc/factorie/app/nlp/relation/PatternBasedRelationFinder.scala | Scala | apache-2.0 | 4,875 |
package apps
/** The `process` package contains example applications for process oriented
* simulation models.
*/
package object process { }
| NBKlepp/fda | scalation_1.3/scalation_models/src/main/scala/apps/process/package.scala | Scala | mit | 148 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package io.snappydata.hydra.northwind
import java.io.{File, FileOutputStream, PrintWriter}
import org.apache.spark.sql.SnappyContext
import org.apache.spark.{SparkConf, SparkContext}
object NWTestSparkApp {
val conf = new SparkConf().
setAppName("NWTestSparkApp Application")
val sc = new SparkContext(conf)
val snc = SnappyContext(sc)
def main(args: Array[String]) {
val dataFilesLocation = args(0)
println(s"dataFilesLocation : ${dataFilesLocation}")
snc.setConf("dataFilesLocation", dataFilesLocation)
snc.sql("set spark.sql.shuffle.partitions=6")
NWQueries.snc = snc
NWQueries.dataFilesLocation = dataFilesLocation
val pw = new PrintWriter(new FileOutputStream(new File("NWTestSparkApp.out"), true));
NWTestUtil.dropTables(snc)
println("Test replicated row tables queries started")
NWTestUtil.createAndLoadReplicatedTables(snc)
NWTestUtil.validateQueries(snc, "Replicated Row Table", pw)
println("Test replicated row tables queries completed successfully")
NWTestUtil.dropTables(snc)
println("Test partitioned row tables queries started")
NWTestUtil.createAndLoadPartitionedTables(snc)
NWTestUtil.validateQueries(snc, "Partitioned Row Table", pw)
println("Test partitioned row tables queries completed successfully")
NWTestUtil.dropTables(snc)
println("Test column tables queries started")
NWTestUtil.createAndLoadColumnTables(snc)
NWTestUtil.validateQueries(snc, "Column Table", pw)
println("Test column tables queries completed successfully")
NWTestUtil.dropTables(snc)
NWTestUtil.createAndLoadColocatedTables(snc)
NWTestUtil.validateQueries(snc, "Colocated Table", pw)
pw.close()
}
}
| vjr/snappydata | dtests/src/test/scala/io/snappydata/hydra/northwind/NWTestSparkApp.scala | Scala | apache-2.0 | 2,370 |
package im.actor.server.file.local
import java.io.IOException
import java.net.URLEncoder
import java.time.{ Duration, Instant }
import akka.actor.ActorSystem
import akka.http.scaladsl.model.{ HttpMethods, Uri }
import akka.http.scaladsl.util.FastFuture
import akka.stream.{ ActorMaterializer, Materializer }
import better.files._
import im.actor.acl.ACLFiles
import im.actor.server.api.http.{ HttpApi, HttpApiConfig }
import im.actor.server.db.ActorPostgresDriver.api._
import im.actor.server.db.DbExtension
import im.actor.server.file._
import im.actor.server.model.{ File β FileModel }
import im.actor.server.file.local.http.FilesHttpHandler
import im.actor.server.persist.files.FileRepo
import im.actor.util.ThreadLocalSecureRandom
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.{ Failure, Success, Try }
/**
* File adapter that works with local file system to store and retrieve files
* To use this file adapter as default provide its FQCN in modules.files.adapter of your server config.
* You also need to provide default file location in services.file-storage.location of your server config
* On initialization it will try to create default file location, if it does not exist and write simple text file
* to check user's read/write permissions
*
* @param _system actor system
*/
final class LocalFileStorageAdapter(_system: ActorSystem)
extends FileStorageAdapter
with RequestSigning
with FileStorageOperations
with LocalUploadKeyImplicits {
protected implicit val system: ActorSystem = _system
protected implicit val ec: ExecutionContext = system.dispatcher
protected implicit val mat: Materializer = ActorMaterializer()
private val db = DbExtension(system).db
private val httpConfig: HttpApiConfig = HttpApiConfig.load.get
private val storageConfig: LocalFileStorageConfig = LocalFileStorageConfig.load.get
private val httpHandler = new FilesHttpHandler(storageConfig)
HttpApi(system).registerRoute("localstorage") { _ β httpHandler.routes }
HttpApi(system).registerRejection("localstorage") { _ β httpHandler.rejectionHandler }
protected val storageLocation = initFileStorage(storageConfig.location)
/**
* Initializes local file storage, and performs check that user have read/write permissions on file system
*
* @param location file storage location
* @return same location as passed parameter if check succeeds
*/
def initFileStorage(location: String): String = {
val initMessage = "Actor local file storage initialized."
(for {
storageDir β Try(file"$location".createIfNotExists(asDirectory = true))
_ β Try((storageDir / "init").createIfNotExists() < initMessage)
} yield ()) match {
case Success(_) β location
case Failure(e: IOException) β
system.log.error(e, "Failed to initialize local file storage. You should provide correct path to directory with read and write permissions for user `actor`")
println("Failed to initialize local file storage. It is probably issue with read/write permissions on file system")
throw new RuntimeException("Failed to initialize local file storage", e)
case Failure(e) β
system.log.error(e, "Failed to initialize local file storage.")
println("Failed to initialize local file storage.")
throw new RuntimeException("Failed to initialize local file storage", e)
}
}
val baseUri = Uri(httpConfig.baseUri)
override def uploadFile(name: UnsafeFileName, data: Array[Byte]): DBIO[FileLocation] = {
val rng = ThreadLocalSecureRandom.current()
val id = ACLFiles.randomLong(rng)
val accessSalt = ACLFiles.nextAccessSalt(rng)
val size = data.length
for {
_ β FileRepo.create(id, size.toLong, accessSalt, LocalUploadKey.fileKey(id).key)
_ β DBIO.from(createFile(id, name.safe, data))
_ β FileRepo.setUploaded(id, name.safe)
} yield FileLocation(id, ACLFiles.fileAccessHash(id, accessSalt))
}
override def uploadFileF(name: UnsafeFileName, data: Array[Byte]): Future[FileLocation] = db.run(uploadFile(name, data))
/**
* Generates upload uri similar to:
* https://api.actor.im/v1/files/:fileId?expires=:expiresAt&signature=:signature
*
* @param fileId uploaded file id
* @return file upload uri
*/
override def getFileUploadUrl(fileId: Long): Future[(UploadKey, String)] = {
val query = baseUri
.withPath(Uri.Path(s"/v1/files/$fileId"))
.withQuery(Uri.Query("expires" β expiresAt().toString))
FastFuture.successful(LocalUploadKey.fileKey(fileId) β signRequest(HttpMethods.PUT, query, ACLFiles.secretKey()).toString)
}
override def completeFileUpload(fileId: Long, fileSize: Long, fileName: UnsafeFileName, partNames: Seq[String]): Future[Unit] = {
val fileDir = fileDirectory(fileId)
for {
isComplete β haveAllParts(fileDir, partNames, fileSize)
result β concatFiles(fileDir, partNames, fileName.safe, fileSize)
_ β if (isComplete) deleteUploadedParts(fileDir, partNames) else FastFuture.successful(())
_ β db.run(FileRepo.setUploaded(fileId, fileName.safe))
} yield ()
}
override def downloadFile(id: Long): DBIO[Option[Array[Byte]]] = DBIO.from(downloadFileF(id))
override def downloadFileF(id: Long): Future[Option[Array[Byte]]] = getFileData(id) map (_ map (_.toArray))
/**
* Generates download uri similar to:
* https://api.actor.im/v1/files/:fileId/fileName?expires=:expiresAt&signature=:signature
* or if filename not present:
* https://api.actor.im/v1/files/:fileId?expires=:expiresAt&signature=:signature
*
* @param file file model
* @param accessHash file access hash
* @return file download uri
*/
override def getFileDownloadUrl(file: FileModel, accessHash: Long): Future[Option[String]] = {
if (ACLFiles.fileAccessHash(file.id, file.accessSalt) == accessHash) {
val filePart = Option(file.name) filter (_.trim.nonEmpty) map (n β s"/${URLEncoder.encode(n, "UTF-8")}") getOrElse ""
val query = baseUri
.withPath(Uri.Path(s"/v1/files/${file.id}" + filePart))
.withQuery(Uri.Query("expires" β expiresAt().toString))
val signedRequest = signRequest(HttpMethods.GET, query, ACLFiles.secretKey()).toString
FastFuture.successful(Some(signedRequest))
} else FastFuture.successful(None)
}
/**
* Generates upload uri for parts similar to:
* https://api.actor.im/v1/files/:fileId/:partNumber?expires=:expiresAt&signature=:signature
*
* @param fileId file id
* @param partNumber part number
* @return file part upload uri
*/
override def getFileUploadPartUrl(fileId: Long, partNumber: Int): Future[(UploadKey, String)] = {
val query =
baseUri
.withPath(Uri.Path(s"/v1/files/$fileId/$partNumber"))
.withQuery(Uri.Query("expires" β expiresAt().toString))
FastFuture.successful(LocalUploadKey.partKey(fileId, partNumber) β signRequest(HttpMethods.PUT, query, ACLFiles.secretKey()).toString)
}
def parseKey(bytes: Array[Byte]): UploadKey = LocalUploadKey.parseFrom(bytes)
private def expiresAt(): Long = Instant.now.plus(Duration.ofDays(1)).toEpochMilli
}
| ufosky-server/actor-platform | actor-server/actor-fs-adapters/src/main/scala/im/actor/server/file/local/LocalFileStorageAdapter.scala | Scala | agpl-3.0 | 7,212 |
package us.feliscat.text.term.ja
import us.feliscat.m17n.Japanese
import us.feliscat.ner.NamedEntity
import us.feliscat.ner.ja.JapaneseNamedEntityRecognizerInEventOntology
import us.feliscat.text.StringOption
import us.feliscat.text.term.MultiLingualTermNormalizerInEventOntology
/**
* <pre>
* Created on 2017/02/09.
* </pre>
*
* @author K.Sakamoto
*/
object JapaneseTermNormalizerInEventOntology extends MultiLingualTermNormalizerInEventOntology with Japanese {
override def recognize(textOpt: StringOption): Seq[NamedEntity] = {
JapaneseNamedEntityRecognizerInEventOntology.recognize(textOpt)
}
}
| ktr-skmt/FelisCatusZero-multilingual | libraries/src/main/scala/us/feliscat/text/term/ja/JapaneseTermNormalizerInEventOntology.scala | Scala | apache-2.0 | 621 |
// Project: angulate2-examples
// Module: 07 Router
// Description: CrisisCenterComponent
// Copyright (c) 2016. Distributed under the MIT License (see included LICENSE file).
package router.crisis
import angulate2._
import angulate2.router.{RDef, RouteConfig, RouterOutlet}
@Component(
template =
"""<h2>CRISIS CENTER</h2>
<router-outlet></router-outlet>""",
directives = @@[RouterOutlet],
providers = @@[CrisisService]
)
@RouteConfig(
RDef(path = "/", name = "CrisisCenter", component = @#[CrisisListComponent], useAsDefault = true),
RDef(path = "/:id", name = "CrisisDetail", component = @#[CrisisDetailComponent])
)
class CrisisCenterComponent {
}
| jokade/angulate2-examples | archive/07_router/src/main/scala/router/crisis/CrisisCenterComponent.scala | Scala | mit | 684 |
package com.containant.casestudies
/** The comparison framework:
*
* Do 100 runs on the given instances for each heuristic
* (while fixing the number of fitness function evaluations).
*
* Analyze the resulting data using the Wineberg-Christensen protocol.
*/
import scala.reflect.runtime.universe._
import scala.reflect.ClassTag
import org.apache.commons.math3.stat._
import org.apache.commons.math3.stat.inference._
import com.containant._
import com.containant.heuristics._
object Framework {
case class RunResult[T](heuristic: Heuristic, solution: T, fitness: Double)
/**
* A run corresponds to a single execution of a given heuristic on a given
* test instance with the goal of maximizing a fitness function.
*
* The best of run is returned.
*/
def run[T](
heuristic: Heuristic,
instance: Module,
maximizing: T => Double
)(implicit ev: TypeTag[T], ew: ClassTag[T]): RunResult[T] = {
object CA extends ContainAnt(heuristic)
val created = CA create (instance, maximizing)
RunResult[T](heuristic, created, maximizing(created))
}
trait DescriptiveSummary {
val min: Double
val mean: Double
val max: Double
val variance: Double
override def toString: String = s"$min,$mean,$max,$variance"
}
case class ExperimentResult[T](
heuristic1: Heuristic,
heuristic2: Heuristic,
results1: Seq[RunResult[T]],
results2: Seq[RunResult[T]],
mean1: Double,
mean2: Double,
pvalue: Double
) {
object summary1 extends DescriptiveSummary {
override val min: Double =
StatUtils.min( results1.map(_.fitness).toArray )
override val mean: Double =
StatUtils.mean( results1.map(_.fitness).toArray )
override val max: Double =
StatUtils.max( results1.map(_.fitness).toArray )
override val variance: Double =
StatUtils.variance( results1.map(_.fitness).toArray )
override def toString: String = s"$heuristic1,$min,$mean,$max,$variance"
}
object summary2 extends DescriptiveSummary {
override val min: Double =
StatUtils.min( results2.map(_.fitness).toArray )
override val mean: Double =
StatUtils.mean( results2.map(_.fitness).toArray )
override val max: Double =
StatUtils.max( results2.map(_.fitness).toArray )
override val variance: Double =
StatUtils.variance( results2.map(_.fitness).toArray )
override def toString: String = s"$heuristic2,$min,$mean,$max,$variance"
}
}
/**
* During a comparison experiment, two heuristics are compared to find which
* one has a higher mean best of run on a single given test instance.
*
* We use the Wineberg-Christensen protocol to determine the winner.
*/
def experiment[T](
heuristic1: Heuristic,
heuristic2: Heuristic,
runs: Int,
instance: Module,
maximizing: T => Double
)(implicit ev: TypeTag[T], ew: ClassTag[T]): ExperimentResult[T] = {
// Perform all runs
val results1 = for(r <- 1 to runs) yield
run(heuristic1, instance, maximizing)
val results2 = for(r <- 1 to runs) yield
run(heuristic2, instance, maximizing)
// Merge results
val merged = results1 ++ results2
// Rank results
val ranked = merged.sortBy(_.fitness).zipWithIndex
// Average ranks after combining by fitness
val combined = ranked.groupBy(_._1.fitness)
val averaged = combined.values flatMap { xs =>
val rank = xs.map(_._2).sum / (1.0*xs.size)
xs.map(x => (x._1,rank))
}
// Break the ranks into groups
val ranks1 =
averaged.filter(x => x._1.heuristic == heuristic1).map(_._2.toDouble)
val ranks2 =
averaged.filter(x => x._1.heuristic == heuristic2).map(_._2.toDouble)
// Perform a t-test on the rank groups
val test = new TTest()
val mean1 = StatUtils.mean(ranks1.toArray)
val mean2 = StatUtils.mean(ranks2.toArray)
val pvalue = test.pairedTTest(ranks1.toArray, ranks2.toArray)
ExperimentResult(
heuristic1, heuristic2,
results1, results2,
mean1, mean2,
pvalue
)
}
}
| zaklogician/ContainAnt-devel | src/main/scala/com/containant/casestudies/Framework.scala | Scala | bsd-3-clause | 4,153 |
package org.http4s
import java.util.concurrent.atomic.AtomicReferenceArray
import cats._
import org.http4s.Status.ResponseClass
import org.http4s.util.Renderable
/** Representation of the HTTP response code and reason
*
* '''Note: ''' the reason is not important to the protocol and is not considered in equality checks.
*
* @param code HTTP status code
* @param reason reason for the response. eg, OK
*
* @see [http://tools.ietf.org/html/rfc7231#section-6 RFC7231, Section 6]
* @see [http://www.iana.org/assignments/http-status-codes/http-status-codes.xhtml IANA Status Code Registry]
*/
final case class Status private (code: Int)(val reason: String = "", val isEntityAllowed: Boolean = true) extends Ordered[Status] with Renderable {
// scalastyle:off magic.number
val responseClass: ResponseClass =
if (code < 200) Status.Informational
else if (code < 300) Status.Successful
else if (code < 400) Status.Redirection
else if (code < 500) Status.ClientError
else Status.ServerError
// scalastyle:on magic.number
def compare(that: Status): Int = code - that.code
def isSuccess: Boolean = responseClass.isSuccess
def withReason(reason: String): Status = new Status(code)(reason, isEntityAllowed)
override def render(writer: org.http4s.util.Writer): writer.type = writer << code << ' ' << reason
/** Helpers for for matching against a [[Response]] */
def unapply(msg: Response): Option[Response] = {
if (msg.status == this) Some(msg) else None
}
}
object Status {
sealed trait ResponseClass {
def isSuccess: Boolean
/** Match a [[Response]] based on [[Status]] category */
final def unapply(resp: Response): Option[Response] =
if (resp.status.responseClass == this) Some(resp) else None
}
case object Informational extends ResponseClass { val isSuccess = true }
case object Successful extends ResponseClass { val isSuccess = true }
case object Redirection extends ResponseClass { val isSuccess = true }
case object ClientError extends ResponseClass { val isSuccess = false }
case object ServerError extends ResponseClass { val isSuccess = false }
object ResponseClass {
@deprecated("Moved to org.http4s.Status.Informational", "0.16")
val Informational = Status.Informational
@deprecated("Moved to org.http4s.Status.Successful", "0.16")
val Successful = Status.Successful
@deprecated("Moved to org.http4s.Status.Redirection", "0.16")
val Redirection = Status.Informational
@deprecated("Moved to org.http4s.Status.ClientError", "0.16")
val ClientError = Status.Informational
@deprecated("Moved to org.http4s.Status.ServerError", "0.16")
val ServerError = Status.Informational
}
private def mkStatus(code: Int, reason: String = ""): ParseResult[Status] =
if (code >= 100 && code <= 599) ParseResult.success(Status(code)(reason, isEntityAllowed = true))
else ParseResult.fail("Invalid status", s"Code $code must be between 100 and 599, inclusive")
private def lookup(code: Int) =
if (code < 100 || code > 599) None else Option(registry.get(code))
def fromInt(code: Int): ParseResult[Status] = lookup(code).getOrElse(mkStatus(code))
def fromIntAndReason(code: Int, reason: String): ParseResult[Status] =
lookup(code).filter(_.right.get.reason == reason).getOrElse(mkStatus(code, reason))
// scalastyle:off magic.number
private val registry =
new AtomicReferenceArray[Right[Nothing, Status]](600)
// scalastyle:on magic.number
def registered: Iterable[Status] = for {
code <- 100 to 599
status <- Option(registry.get(code)).map(_.right.get)
} yield status
def register(status: Status): status.type = {
// Xor.Right, not right, for specific inference
registry.set(status.code, Right(status))
status
}
/**
* Status code list taken from http://www.iana.org/assignments/http-status-codes/http-status-codes.xml
*/
// scalastyle:off magic.number
val Continue = register(Status(100)("Continue", isEntityAllowed = false))
val SwitchingProtocols = register(Status(101)("Switching Protocols", isEntityAllowed = false))
val Processing = register(Status(102)("Processing", isEntityAllowed = false))
val Ok = register(Status(200)("OK"))
val Created = register(Status(201)("Created"))
val Accepted = register(Status(202)("Accepted"))
val NonAuthoritativeInformation = register(Status(203)("Non-Authoritative Information"))
val NoContent = register(Status(204)("No Content", isEntityAllowed = false))
val ResetContent = register(Status(205)("Reset Content", isEntityAllowed = false))
val PartialContent = register(Status(206)("Partial Content"))
val MultiStatus = register(Status(207)("Multi-Status"))
val AlreadyReported = register(Status(208)("Already Reported"))
val IMUsed = register(Status(226)("IM Used"))
val MultipleChoices = register(Status(300)("Multiple Choices"))
val MovedPermanently = register(Status(301)("Moved Permanently"))
val Found = register(Status(302)("Found"))
val SeeOther = register(Status(303)("See Other"))
val NotModified = register(Status(304)("Not Modified", isEntityAllowed = false))
val UseProxy = register(Status(305)("Use Proxy"))
val TemporaryRedirect = register(Status(307)("Temporary Redirect"))
val PermanentRedirect = register(Status(308)("Permanent Redirect"))
val BadRequest = register(Status(400)("Bad Request"))
val Unauthorized = register(Status(401)("Unauthorized"))
val PaymentRequired = register(Status(402)("Payment Required"))
val Forbidden = register(Status(403)("Forbidden"))
val NotFound = register(Status(404)("Not Found"))
val MethodNotAllowed = register(Status(405)("Method Not Allowed"))
val NotAcceptable = register(Status(406)("Not Acceptable"))
val ProxyAuthenticationRequired = register(Status(407)("Proxy Authentication Required"))
val RequestTimeout = register(Status(408)("Request Timeout"))
val Conflict = register(Status(409)("Conflict"))
val Gone = register(Status(410)("Gone"))
val LengthRequired = register(Status(411)("Length Required"))
val PreconditionFailed = register(Status(412)("Precondition Failed"))
val PayloadTooLarge = register(Status(413)("Payload Too Large"))
val UriTooLong = register(Status(414)("URI Too Long"))
val UnsupportedMediaType = register(Status(415)("Unsupported Media Type"))
val RangeNotSatisfiable = register(Status(416)("Range Not Satisfiable"))
val ExpectationFailed = register(Status(417)("Expectation Failed"))
val UnprocessableEntity = register(Status(422)("Unprocessable Entity"))
val Locked = register(Status(423)("Locked"))
val FailedDependency = register(Status(424)("Failed Dependency"))
val UpgradeRequired = register(Status(426)("Upgrade Required"))
val PreconditionRequired = register(Status(428)("Precondition Required"))
val TooManyRequests = register(Status(429)("Too Many Requests"))
val RequestHeaderFieldsTooLarge = register(Status(431)("Request Header Fields Too Large"))
val UnavailableForLegalReasons = register(Status(451)("Unavailable For Legal Reasons"))
val InternalServerError = register(Status(500)("Internal Server Error"))
val NotImplemented = register(Status(501)("Not Implemented"))
val BadGateway = register(Status(502)("Bad Gateway"))
val ServiceUnavailable = register(Status(503)("Service Unavailable"))
val GatewayTimeout = register(Status(504)("Gateway Timeout"))
val HttpVersionNotSupported = register(Status(505)("HTTP Version not supported"))
val VariantAlsoNegotiates = register(Status(506)("Variant Also Negotiates"))
val InsufficientStorage = register(Status(507)("Insufficient Storage"))
val LoopDetected = register(Status(508)("Loop Detected"))
val NotExtended = register(Status(510)("Not Extended"))
val NetworkAuthenticationRequired = register(Status(511)("Network Authentication Required"))
// scalastyle:on magic.number
}
trait StatusInstances {
implicit val StatusShow = Show.fromToString[Status]
implicit val StatusOrder = Order.fromOrdering[Status]
}
| ZizhengTai/http4s | core/src/main/scala/org/http4s/Status.scala | Scala | apache-2.0 | 8,037 |
/*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.config.scala
import com.netflix.config.{ChainedDynamicProperty, DynamicIntProperty => JavaDynamicIntProperty}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.matchers.ShouldMatchers
@RunWith(classOf[JUnitRunner])
class ChainMakersTest extends PropertiesTestHelp with ShouldMatchers {
import ChainMakers._
private def wrapRoot(p: String, r: JavaDynamicIntProperty) = new ChainedDynamicProperty.IntProperty(p, r)
private def wrapLink(p: String, n: ChainedDynamicProperty.IntProperty) = new ChainedDynamicProperty.IntProperty(p, n)
"fanPropertyName" should {
"understand a single-part name with prefix and suffix" in {
fanPropertyName(Some("x"), "a", Some("y")) should equal(Seq("x.y", "x.a.y"))
}
"understand a dual-part name with prefix and suffix" in {
fanPropertyName(Some("x"), "a.b", Some("y")) should equal(Seq("x.y", "x.a.y", "x.a.b.y"))
}
}
"deriveChain" should {
"produce a chain given property names and helper functions" in {
val baseName = "foo"
val topName = s"${baseName}.bar"
val default = -1
Seq(baseName, topName) foreach clearProperty
val chain = deriveChain( Seq(topName), intProperty(baseName, default), wrapRoot, wrapLink )
chain should not be null
chain.getName should be(topName)
chain.getDefaultValue should be(default)
}
"produce a chain which produces the default value when properties are not set" in {
val baseName = "foo"
val topName = s"${baseName}.bar"
val default = -1
Seq(baseName, topName) foreach clearProperty
val chain = deriveChain( Seq(topName), intProperty(baseName, default), wrapRoot, wrapLink )
chain should not be null
chain.getName should be(topName)
chain.get() should be(default)
}
"produce a chain which responds to setting the root property" in {
val baseName = "foo"
val topName = s"${baseName}.bar"
val baseValue = 1
val default = -1
Seq(baseName, topName) foreach clearProperty
val chain = deriveChain( Seq(topName), intProperty(baseName, default), wrapRoot, wrapLink )
chain should not be null
chain.get() should be(default)
setProperty(baseName, baseValue)
chain.get() should be(baseValue)
}
"produce a chain which responds to setting a non-root property" in {
val baseName = "foo"
val topName = s"${baseName}.bar"
val topValue = 2
val default = -1
Seq(baseName, topName) foreach clearProperty
val chain = deriveChain( Seq(topName), intProperty(baseName, default), wrapRoot, wrapLink )
chain should not be null
chain.get() should be(default)
setProperty(topName, topValue)
chain.get() should be(topValue)
}
"produce a chain which prefers a non-root property's value over the root property" in {
val baseName = "foo"
val topName = s"${baseName}.bar"
val baseValue = 1
val topValue = 2
val default = -1
Seq(baseName, topName) foreach clearProperty
val chain = deriveChain( Seq(topName), intProperty(baseName, default), wrapRoot, wrapLink )
chain should not be null
chain.get() should be(default)
setProperty(baseName, baseValue)
chain.get() should be(baseValue)
setProperty(topName, topValue)
chain.get() should be(topValue)
}
"explode given null property names" in {
intercept[NullPointerException] {
deriveChain( null, intProperty("foo", -1), wrapRoot, wrapLink )
}
}
"explode given no property names" in {
intercept[NoSuchElementException] {
deriveChain( Seq.empty, intProperty("foo", -1), wrapRoot, wrapLink )
}
}
}
}
| gorzell/archaius | archaius-scala/src/test/scala/com/netflix/config/scala/ChainMakersTest.scala | Scala | apache-2.0 | 4,371 |
/**
* Copyright 2015 Mohiva Organisation (license at mohiva dot com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mohiva.play.silhouette.impl.daos
import com.mohiva.play.silhouette.api.AuthInfo
import scala.reflect.ClassTag
/**
* An implementation of the auth info DAO.
*
* This abstract implementation of the [[com.mohiva.play.silhouette.impl.daos.AuthInfoDAO]] trait
* allows us to get the class tag of the auth info it is responsible for. Based on the class tag
* the [[com.mohiva.play.silhouette.impl.repositories.DelegableAuthInfoRepository]] class can
* delegate operations to the DAO which is responsible for the currently handled auth info.
*
* @param classTag The class tag for the type parameter.
* @tparam T The type of the auth info to store.
*/
abstract class DelegableAuthInfoDAO[T <: AuthInfo](implicit val classTag: ClassTag[T]) extends AuthInfoDAO[T]
| cemcatik/play-silhouette | silhouette/app/com/mohiva/play/silhouette/impl/daos/DelegableAuthInfoDAO.scala | Scala | apache-2.0 | 1,411 |
package run
import java.io.File
import akka.actor.{ActorSystem, Props}
import akka.cluster.pubsub.DistributedPubSub
import akka.extension.{ConfigPathBuilder, ConfigurationManager}
import com.typesafe.config.{Config, ConfigFactory}
import system.cell.core.CellCoreActor
import system.names.NamingSystem
import system.ontologies.messages.Location._
import system.ontologies.messages.MessageType.Init
import system.ontologies.messages.{AriadneMessage, Greetings, Location}
/**
* Created by Alessandro on 28/06/2017.
*/
object RunCellN extends App {
val REQUIRED_ARGS = 2
if (args.length == REQUIRED_ARGS) {
// "*root*/res/conf/test/testCell1.conf" "*root*/res/json/cell/cell1.json"
val path2AkkaConfig = args(0)
val pathToCellConfig = args(1)
implicit val config: Config = ConfigFactory.parseFile(new File(path2AkkaConfig))
.withFallback(ConfigFactory.load()).resolve()
try {
val system = ActorSystem(NamingSystem.ActorSystem, config)
val middleware = DistributedPubSub(system).mediator
val loadedConf = ConfigurationManager(system)
val builder = ConfigPathBuilder()
val serialNumber = loadedConf.property(builder.akka.actor("serial-number")).asString
val core = system.actorOf(Props(new CellCoreActor(middleware)), NamingSystem.CellCore + serialNumber)
core ! AriadneMessage(Init, Init.Subtype.Greetings,
Location.Master >> Location.Self, Greetings(List(pathToCellConfig)))
} catch {
case ex: Exception =>
ex.printStackTrace()
System.exit(0)
}
println("System online...")
} else {
println(s"Wrong number of Arguments... Wanted $REQUIRED_ARGS, found " + args.length)
System.exit(0)
}
} | albertogiunta/arianna | src/main/scala/run/RunCellN.scala | Scala | gpl-3.0 | 1,921 |
package debop4s.core
/**
* Java μμ Object λ₯Ό Mixin μΌλ‘ μ¬μ©νκΈ° μν trait μ
λλ€.
* Created by debop on 2014. 4. 5.
*/
trait JavaSingleton {
def get = this
}
| debop/debop4s | debop4s-core/src/main/scala/debop4s/core/JavaSingleton.scala | Scala | apache-2.0 | 182 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package statements
import com.intellij.lang.ASTNode
import com.intellij.psi.PsiElementVisitor
import com.intellij.psi.stubs.StubElement
import com.intellij.psi.tree.IElementType
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScBindingPattern, _}
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.stubs.ScValueStub
import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, TypingContext}
/**
* @author Alexander Podkhalyuzin
*/
class ScPatternDefinitionImpl private (stub: StubElement[ScValue], nodeType: IElementType, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, nodeType, node) with ScPatternDefinition {
override def accept(visitor: PsiElementVisitor): Unit = {
visitor match {
case visitor: ScalaElementVisitor => super.accept(visitor)
case _ => super.accept(visitor)
}
}
def this(node: ASTNode) = {this(null, null, node)}
def this(stub: ScValueStub) = {this(stub, ScalaElementTypes.PATTERN_DEFINITION, null)}
override def toString: String = "ScPatternDefinition"
def bindings: Seq[ScBindingPattern] = {
val plist = this.pList
if (plist != null) {
val patterns = plist.patterns
if (patterns.length == 1) {
patterns(0).bindings
} else patterns.flatMap((p: ScPattern) => p.bindings)
} else Seq.empty
}
def declaredElements = bindings
def getType(ctx: TypingContext) = {
typeElement match {
case Some(te) => te.getType(ctx)
case None => expr.map(_.getType(ctx)).getOrElse(Failure("Cannot infer type without an expression", Some(this)))
}
}
def expr: Option[ScExpression] = {
val stub = getStub
if (stub != null) {
return stub.asInstanceOf[ScValueStub].getBodyExpr.orElse(Option(findChildByClassScala(classOf[ScExpression])))
}
Option(findChildByClassScala(classOf[ScExpression]))
}
def typeElement: Option[ScTypeElement] = {
val stub = getStub
if (stub != null) {
stub.asInstanceOf[ScValueStub].getTypeElement
}
else findChild(classOf[ScTypeElement])
}
def pList: ScPatternList = {
val stub = getStub
if (stub != null) {
stub.getChildrenByType(ScalaElementTypes.PATTERN_LIST, JavaArrayFactoryUtil.ScPatternListFactory).apply(0)
} else findChildByClass(classOf[ScPatternList])
}
} | double-y/translation-idea-plugin | src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScPatternDefinitionImpl.scala | Scala | apache-2.0 | 2,756 |
package ahusby.scalautils.io
import java.io.File.separator
import java.nio.file.{Files, Path, Paths}
//import resource.managed
import scala.io.Source
import scala.util.Try
object FileReader {
/**
* Brukes slik:{{{
* private val read = readResourceFile(getClass.getClassLoader) _
* ...
* val fileContents1 = read(filename1)
* val fileContents2 = read(filename2)
* }}}
*
*/
def readResourceFile(cl: ClassLoader)(resourcefileName: String): String = {
val name = resourcefileName.stripPrefix(separator)
val url = cl.getResource(name)
if (url == null) {
throw new ResourcefileNotFound(name)
} else {
// managed(Source.fromURL(url)).acquireAndGet(_.mkString)
import scala.language.reflectiveCalls
closing(Source.fromURL(url))(_.mkString)
}
}
def readFile(p: Path): Try[String] = Try {
val filename = p.toAbsolutePath.toString
if (!Files.exists(p)) throw new FileNotFound(filename)
if (!Files.isReadable(p)) throw new FileNotReadable(filename)
if (!Files.isRegularFile(p)) throw new NotARegularRegularFile(filename)
// managed(Source.fromFile(p.toFile)).acquireAndGet(_.mkString)
closing(Source.fromFile(p.toFile))(_.mkString)
}
def readFile(filename: String): Try[String] = readFile(Paths.get(filename))
class ResourcefileNotFound(msg: String) extends RuntimeException(msg)
class FileNotFound(msg: String) extends RuntimeException(msg)
class FileNotReadable(msg: String) extends RuntimeException(msg)
class NotARegularRegularFile(msg: String) extends RuntimeException(msg)
}
| ahusby/scalautils | src/main/scala/ahusby/scalautils/io/FileReader.scala | Scala | gpl-3.0 | 1,609 |
package pl.mpieciukiewicz.webapp.webserver
import javax.servlet.ServletContext
import akka.actor.ActorSystem
import org.scalatra.LifeCycle
import pl.mpieciukiewicz.webapp.RestHandler
class Bootstrap extends LifeCycle {
val actorSystem = ActorSystem("webapp")
override def init(servletContext: ServletContext) {
servletContext.mount(new RestHandler(actorSystem), "/")
}
override def destroy(context: ServletContext) {
actorSystem.shutdown()
}
}
| marpiec/WebAppArchetype | src/main/scala/pl/mpieciukiewicz/webapp/webserver/Bootstrap.scala | Scala | apache-2.0 | 469 |
/*
* Copyright (c) 2017 Uber Technologies, Inc. (hoodie-dev-group@uber.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
import com.uber.hoodie.common.util.{SchemaTestUtil, TypedProperties}
import com.uber.hoodie.exception.HoodieException
import com.uber.hoodie.{DataSourceWriteOptions, OverwriteWithLatestAvroPayload, SimpleKeyGenerator}
import org.apache.avro.generic.GenericRecord
import org.junit.Assert._
import org.junit.{Before, Test}
import org.scalatest.junit.AssertionsForJUnit
/**
* Tests on the default key generator, payload classes.
*/
class DataSourceDefaultsTest extends AssertionsForJUnit {
val schema = SchemaTestUtil.getComplexEvolvedSchema
var baseRecord: GenericRecord = null
@Before def initialize(): Unit = {
baseRecord = SchemaTestUtil
.generateAvroRecordFromJson(schema, 1, "001", "f1")
}
private def getKeyConfig(recordKeyFieldName: String, partitionPathField: String): TypedProperties = {
val props = new TypedProperties()
props.setProperty(DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY, recordKeyFieldName)
props.setProperty(DataSourceWriteOptions.PARTITIONPATH_FIELD_OPT_KEY, partitionPathField)
props
}
@Test def testSimpleKeyGenerator() = {
// top level, valid fields
val hk1 = new SimpleKeyGenerator(getKeyConfig("field1", "name")).getKey(baseRecord)
assertEquals("field1", hk1.getRecordKey)
assertEquals("name1", hk1.getPartitionPath)
// partition path field not specified
try {
val props = new TypedProperties()
props.setProperty(DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY, "field1")
new SimpleKeyGenerator(props).getKey(baseRecord)
fail("Should have errored out")
} catch {
case e: IllegalArgumentException => {
// do nothing
}
};
// recordkey field not specified
try {
val props = new TypedProperties()
props.setProperty(DataSourceWriteOptions.PARTITIONPATH_FIELD_OPT_KEY, "partitionField")
new SimpleKeyGenerator(props).getKey(baseRecord)
fail("Should have errored out")
} catch {
case e: IllegalArgumentException => {
// do nothing
}
};
// nested field as record key and partition path
val hk2 = new SimpleKeyGenerator(getKeyConfig("testNestedRecord.userId", "testNestedRecord.isAdmin"))
.getKey(baseRecord)
assertEquals("UserId1@001", hk2.getRecordKey)
assertEquals("false", hk2.getPartitionPath)
// Nested record key not found
try {
new SimpleKeyGenerator(getKeyConfig("testNestedRecord.NotThere", "testNestedRecord.isAdmin"))
.getKey(baseRecord)
fail("Should have errored out")
} catch {
case e: HoodieException => {
// do nothing
}
};
// if partition path can't be found, return default partition path
val hk3 = new SimpleKeyGenerator(getKeyConfig("testNestedRecord.userId", "testNestedRecord.notThere"))
.getKey(baseRecord);
assertEquals("default", hk3.getPartitionPath)
}
@Test def testOverwriteWithLatestAvroPayload() = {
val overWritePayload1 = new OverwriteWithLatestAvroPayload(baseRecord, 1)
val laterRecord = SchemaTestUtil
.generateAvroRecordFromJson(schema, 2, "001", "f1")
val overWritePayload2 = new OverwriteWithLatestAvroPayload(laterRecord, 2)
// it will provide the record with greatest combine value
val combinedPayload12 = overWritePayload1.preCombine(overWritePayload2)
val combinedGR12 = combinedPayload12.getInsertValue(schema).get().asInstanceOf[GenericRecord]
assertEquals("field2", combinedGR12.get("field1").toString)
// and it will be deterministic, to order of processing.
val combinedPayload21 = overWritePayload2.preCombine(overWritePayload1)
val combinedGR21 = combinedPayload21.getInsertValue(schema).get().asInstanceOf[GenericRecord]
assertEquals("field2", combinedGR21.get("field1").toString)
}
}
| vinothchandar/hoodie | hoodie-spark/src/test/scala/DataSourceDefaultsTest.scala | Scala | apache-2.0 | 4,458 |
/*
* Copyright 2014 - 2015 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package slamdata.engine.std
import slamdata.Predef._
import scalaz._
import Scalaz._
import Validation.{success, failure}
import NonEmptyList.nel
import slamdata.engine._
import SemanticError._
trait StructuralLib extends Library {
import Type._
val MakeObject = Mapping(
"MAKE_OBJECT",
"Makes a singleton object containing a single field",
Str :: Top :: Nil,
noSimplification,
partialTyper {
case List(Const(Data.Str(name)), Const(data)) => Const(Data.Obj(Map(name -> data)))
case List(Const(Data.Str(name)), valueType) => Obj(Map(name -> valueType), None)
case List(_, valueType) => Obj(Map(), Some(valueType))
},
partialUntyperV(AnyObject) {
case Const(Data.Obj(map)) => map.headOption match {
case Some((key, value)) => success(List(Const(Data.Str(key)), Const(value)))
case None => failure(NonEmptyList(GenericError("MAKE_OBJECT canβt result in an empty object")))
}
case Obj(map, uk) => map.headOption.fold(
uk.fold[ValidationNel[SemanticError, List[Type]]](
failure(NonEmptyList(GenericError("MAKE_OBJECT canβt result in an empty object"))))(
t => success(List(Str, t)))) {
case (key, value) => success(List(Const(Data.Str(key)), value))
}
})
val MakeArray = Mapping(
"MAKE_ARRAY",
"Makes a singleton array containing a single element",
Top :: Nil,
noSimplification,
partialTyper {
case Const(data) :: Nil => Const(Data.Arr(data :: Nil))
case valueType :: Nil => Arr(List(valueType))
},
partialUntyper(AnyArray) {
case Const(Data.Arr(List(elem))) => List(Const(elem))
case Arr(List(elemType)) => List(elemType)
case FlexArr(_, _, elemType) => List(elemType)
})
val ObjectConcat: Mapping = Mapping(
"OBJECT_CONCAT",
"A right-biased merge of two objects into one object",
AnyObject :: AnyObject :: Nil,
noSimplification,
partialTyperV {
case List(Const(Data.Obj(map1)), Const(Data.Obj(map2))) =>
success(Const(Data.Obj(map1 ++ map2)))
case List(Const(o1 @ Data.Obj(_)), o2) => ObjectConcat(o1.dataType, o2)
case List(o1, Const(o2 @ Data.Obj(_))) => ObjectConcat(o1, o2.dataType)
case List(Obj(map1, uk1), Obj(map2, None)) =>
success(Obj(map1 ++ map2, uk1))
case List(Obj(map1, uk1), Obj(map2, Some(uk2))) =>
success(Obj(
map1 β (Coproduct(_, uk2)) ++ map2,
Some(uk1.fold(uk2)(Coproduct(_, uk2)))))
},
partialUntyper(AnyObject) {
case x if x.objectLike =>
val t = Obj(Map(), x.objectType)
List(t, t)
})
val ArrayConcat: Mapping = Mapping(
"ARRAY_CONCAT",
"A merge of two arrays into one array",
AnyArray :: AnyArray :: Nil,
noSimplification,
partialTyperV {
case List(Const(Data.Arr(els1)), Const(Data.Arr(els2))) =>
success(Const(Data.Arr(els1 ++ els2)))
case List(Arr(els1), Arr(els2)) => success(Arr(els1 ++ els2))
case List(Const(a1 @ Data.Arr(_)), a2) => ArrayConcat(a1.dataType, a2)
case List(a1, Const(a2 @ Data.Arr(_))) => ArrayConcat(a1, a2.dataType)
case List(a1, FlexArr(min2, max2, elem2)) =>
(a1.arrayMinLength |@| a1.arrayType)((min1, typ1) =>
success(FlexArr(
min1 + min2,
(a1.arrayMaxLength |@| max2)(_ + _),
Type.lub(typ1, elem2))))
.getOrElse(failure(NonEmptyList(GenericError(a1.toString + " is not an array."))))
case List(FlexArr(min1, max1, elem1), a2) =>
(a2.arrayMinLength |@| a2.arrayType)((min2, typ2) =>
success(FlexArr(
min1 + min2,
(max1 |@| a2.arrayMaxLength)(_ + _),
Type.lub(elem1, typ2))))
.getOrElse(failure(NonEmptyList(GenericError(a2.toString + " is not an array."))))
},
partialUntyperV(AnyArray) {
case x if x.arrayLike =>
x.arrayType.fold[ValidationNel[SemanticError, List[Type]]](
failure(NonEmptyList(GenericError("internal error: " + x.toString + " is arrayLike, but no arrayType")))) {
typ =>
val t = FlexArr(0, x.arrayMaxLength, typ)
success(List(t, t))
}
})
// NB: Used only during type-checking, and then compiled into either (string) Concat or ArrayConcat.
val ConcatOp = Mapping(
"(||)",
"A merge of two arrays/strings.",
(AnyArray | Str) :: (AnyArray | Str) :: Nil,
noSimplification,
partialTyperV {
case t1 :: t2 :: Nil if (t1.arrayLike) && (t2 contains Top) => success(t1 & FlexArr(0, None, Top))
case t1 :: t2 :: Nil if (t1 contains Top) && (t2.arrayLike) => success(FlexArr(0, None, Top) & t2)
case t1 :: t2 :: Nil if (t1.arrayLike) && (t2.arrayLike) => ArrayConcat(t1, t2)
case Const(Data.Str(str1)) :: Const(Data.Str(str2)) :: Nil => success(Const(Data.Str(str1 ++ str2)))
case t1 :: t2 :: Nil if (Str contains t1) && (t2 contains Top) => success(Type.Str)
case t1 :: t2 :: Nil if (t1 contains Top) && (Str contains t2) => success(Type.Str)
case t1 :: t2 :: Nil if (Str contains t1) && (Str contains t2) => success(Type.Str)
case t1 :: t2 :: Nil if t1 == t2 => success(t1)
case t1 :: t2 :: Nil if (Str contains t1) && (t2.arrayLike) => failure(NonEmptyList(GenericError("cannot concat string with array")))
case t1 :: t2 :: Nil if (t1.arrayLike) && (Str contains t2) => failure(NonEmptyList(GenericError("cannot concat array with string")))
},
partialUntyperV(AnyArray | Str) {
case x if x contains (AnyArray | Str) => success((AnyArray | Str) :: (AnyArray | Str) :: Nil)
case x if x.arrayLike => ArrayConcat.untype(x)
case Type.Str => success(Type.Str :: Type.Str :: Nil)
})
val ObjectProject = Mapping(
"({})",
"Extracts a specified field of an object",
AnyObject :: Str :: Nil,
noSimplification,
partialTyperV { case List(v1, v2) => v1.objectField(v2) },
x => success(Obj(Map(), Some(x)) :: Str :: Nil))
val ArrayProject = Mapping(
"([])",
"Extracts a specified index of an array",
AnyArray :: Int :: Nil,
noSimplification,
partialTyperV { case List(v1, v2) => v1.arrayElem(v2) },
x => success(FlexArr(0, None, x) :: Int :: Nil) )
val DeleteField: Mapping = Mapping(
"DELETE_FIELD",
"Deletes a specified field from an object",
AnyObject :: Str :: Nil,
noSimplification,
partialTyper {
case List(Const(Data.Obj(map)), Const(Data.Str(key))) =>
Const(Data.Obj(map - key))
case List(Obj(map, uk), Const(Data.Str(key))) => Obj(map - key, uk)
case List(v1, _) => Obj(Map(), v1.objectType)
},
partialUntyperV(AnyObject) {
case Const(o @ Data.Obj(map)) => DeleteField.untype(o.dataType)
case Obj(map, _) => success(List(Obj(map, Some(Top)), Str))
})
val FlattenObject = ExpansionFlat(
"FLATTEN_OBJECT",
"Flattens an object into a set",
AnyObject :: Nil,
noSimplification,
partialTyperV {
case List(x) if x.objectLike =>
x.objectType.fold[ValidationNel[SemanticError, Type]](
failure(NonEmptyList(GenericError("internal error: objectLike, but no objectType"))))(
success)
},
tpe => success(List(Obj(Map(), Some(tpe)))))
val FlattenArray = ExpansionFlat(
"FLATTEN_ARRAY",
"Flattens an array into a set",
AnyArray :: Nil,
noSimplification,
partialTyperV {
case List(x) if x.arrayLike =>
x.arrayType.fold[ValidationNel[SemanticError, Type]](
failure(NonEmptyList(GenericError("internal error: arrayLike, but no arrayType"))))(
success)
},
tpe => success(List(FlexArr(0, None, tpe))))
def functions = MakeObject :: MakeArray ::
ObjectConcat :: ArrayConcat :: ConcatOp ::
ObjectProject :: ArrayProject ::
FlattenObject :: FlattenArray ::
Nil
// TODO: fix types and add the VirtualFuncs to the list of functions
// val MakeObjectN = new VirtualFunc {
object MakeObjectN {
import slamdata.engine.analysis.fixplate._
// Note: signature does not match VirtualFunc
def apply(args: (Term[LogicalPlan], Term[LogicalPlan])*): Term[LogicalPlan] =
args.map(t => MakeObject(t._1, t._2)) match {
case t :: Nil => t
case mas => mas.reduce((t, ma) => ObjectConcat(t, ma))
}
// Note: signature does not match VirtualFunc
def unapply(t: Term[LogicalPlan]): Option[List[(Term[LogicalPlan], Term[LogicalPlan])]] =
t.unFix match {
case MakeObject(List(name, expr)) => Some(List((name, expr)))
case ObjectConcat(List(a, b)) => (unapply(a) |@| unapply(b))(_ ::: _)
case _ => None
}
}
object MakeArrayN {
import slamdata.engine.analysis.fixplate._
def apply(args: Term[LogicalPlan]*): Term[LogicalPlan] =
args.map(MakeArray(_)) match {
case Nil => LogicalPlan.Constant(Data.Arr(Nil))
case t :: Nil => t
case mas => mas.reduce((t, ma) => ArrayConcat(t, ma))
}
def unapply(t: Term[LogicalPlan]): Option[List[Term[LogicalPlan]]] =
Attr.unapply(attrK(t, ())).map(l => l.map(forget(_)))
object Attr {
def unapply[A](t: Cofree[LogicalPlan, A]): Option[List[Cofree[LogicalPlan, A]]] = t.tail match {
case MakeArray(x :: Nil) =>
Some(x :: Nil)
case ArrayConcat(a :: b :: Nil) =>
(unapply(a) |@| unapply(b))(_ ::: _)
case _ => None
}
}
}
}
object StructuralLib extends StructuralLib
| wemrysi/quasar | core/src/main/scala/slamdata/engine/std/structural.scala | Scala | apache-2.0 | 10,319 |
package io.pipeline.prediction.tensorflow
import com.netflix.hystrix.HystrixCommand
import com.netflix.hystrix.HystrixCommandGroupKey
import com.netflix.hystrix.HystrixCommandKey
import com.netflix.hystrix.HystrixThreadPoolKey
import com.netflix.hystrix.HystrixCommandProperties
import com.netflix.hystrix.HystrixThreadPoolProperties
object TensorflowGrpcCommandOps {
val client = new com.fluxcapacitor.TensorflowPredictionClientGrpc("127.0.0.1", 9000);
}
class TensorflowGrpcCommand(commandName: String,
namespace: String,
modelName: String,
version: Integer,
inputs: Map[String, Any],
fallback: String,
timeout: Int,
concurrencyPoolSize: Int,
rejectionThreshold: Int)
extends HystrixCommand[String](
HystrixCommand.Setter
.withGroupKey(HystrixCommandGroupKey.Factory.asKey(commandName))
.andCommandKey(HystrixCommandKey.Factory.asKey(commandName))
.andThreadPoolKey(HystrixThreadPoolKey.Factory.asKey(commandName))
.andCommandPropertiesDefaults(
HystrixCommandProperties.Setter()
.withExecutionTimeoutInMilliseconds(timeout)
.withExecutionIsolationStrategy(HystrixCommandProperties.ExecutionIsolationStrategy.SEMAPHORE)
.withExecutionIsolationSemaphoreMaxConcurrentRequests(concurrencyPoolSize)
.withFallbackIsolationSemaphoreMaxConcurrentRequests(rejectionThreshold)
)
.andThreadPoolPropertiesDefaults(
HystrixThreadPoolProperties.Setter()
.withCoreSize(concurrencyPoolSize)
.withQueueSizeRejectionThreshold(rejectionThreshold)
)
)
{
def run(): String = {
val results = TensorflowGrpcCommandOps.client.predict(namespace, modelName, version, "")
s"""${results}"""
}
override def getFallback(): String = {
s"""${fallback}"""
}
}
| BrentDorsey/pipeline | prediction.ml/tensorflow/src/main/scala/io/pipeline/prediction/tensorflow/TensorflowGrpcCommand.scala | Scala | apache-2.0 | 2,035 |
package neophytesguide01
object ExtractorDemo1 {
def main(args: Array[String]): Unit = {
val user1: User = new FreeUser("Adam")
val user2: User = new PremiumUser("Eva")
def greet(user: User): String = user match {
case FreeUser(name) => "Hello " + name
case PremiumUser(name) => "Welcome back, dear " + name
}
println(greet(user1))
println(greet(user2))
}
trait User {
def name: String
}
class FreeUser(val name: String) extends User
object FreeUser {
def unapply(user: FreeUser): Option[String] = Some(user.name)
}
class PremiumUser(val name: String) extends User
object PremiumUser {
def unapply(user: PremiumUser): Option[String] = Some(user.name)
}
} | sebastian-dasse/uni-scala | ScalaKurs(Knabe)_S_sbt/src/main/scala/neophytesguide01/ExtractorDemo1.scala | Scala | mit | 741 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.rich
import org.bdgenomics.formats.avro.{ GenotypeType, GenotypeAllele, Genotype }
import scala.collection.JavaConversions._
object RichGenotype {
implicit def genotypeToRichGenotype(g: Genotype) = new RichGenotype(g)
implicit def richGenotypeToGenotype(g: RichGenotype) = g.genotype
}
class RichGenotype(val genotype: Genotype) {
def ploidy: Int = genotype.getAlleles.size
def getType: GenotypeType = {
assert(ploidy <= 2, "getType only meaningful for genotypes with ploidy <= 2")
genotype.getAlleles.toList.distinct match {
case List(GenotypeAllele.Ref) => GenotypeType.HOM_REF
case List(GenotypeAllele.Alt) => GenotypeType.HOM_ALT
case List(GenotypeAllele.Ref, GenotypeAllele.Alt) |
List(GenotypeAllele.Alt, GenotypeAllele.Ref) => GenotypeType.HET
case _ => GenotypeType.NO_CALL
}
}
}
| allenday/adam | adam-core/src/main/scala/org/bdgenomics/adam/rich/RichGenotype.scala | Scala | apache-2.0 | 1,668 |
package example
object Lists {
/**
* This method computes the sum of all elements in the list xs. There are
* multiple techniques that can be used for implementing this method, and
* you will learn during the class.
*
* For this example assignment you can use the following methods in class
* `List`:
*
* - `xs.isEmpty: Boolean` returns `true` if the list `xs` is empty
* - `xs.head: Int` returns the head element of the list `xs`. If the list
* is empty an exception is thrown
* - `xs.tail: List[Int]` returns the tail of the list `xs`, i.e. the the
* list `xs` without its `head` element
*
* ''Hint:'' instead of writing a `for` or `while` loop, think of a recursive
* solution.
*
* @param xs A list of natural numbers
* @return The sum of all elements in `xs`
*/
def sum(xs: List[Int]): Int = {
if (xs.isEmpty) 0
else xs.head + sum(xs.tail)
}
/**
* This method returns the largest element in a list of integers. If the
* list `xs` is empty it throws a `java.util.NoSuchElementException`.
*
* You can use the same methods of the class `List` as mentioned above.
*
* ''Hint:'' Again, think of a recursive solution instead of using looping
* constructs. You might need to define an auxiliary method.
*
* @param xs A list of natural numbers
* @return The largest element in `xs`
* @throws java.util.NoSuchElementException if `xs` is an empty list
*/
def max(xs: List[Int]): Int = {
if (xs.isEmpty) throw new NoSuchElementException("The list is empty")
def filterMax(current: Int, xs: List[Int]): Int = {
if (xs.isEmpty) current
else if (current > xs.head) filterMax(current, xs.tail)
else filterMax(xs.head, xs.tail)
}
filterMax(xs.head, xs.tail)
}
}
| guhemama/moocs | Functional.Programming.in.Scala.Coursera/AssignmentExample/src/main/scala/example/Lists.scala | Scala | bsd-3-clause | 1,844 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.oap.ui
import org.apache.spark.internal.Logging
import org.apache.spark.ui.{SparkUI, SparkUITab}
class OapTab(parent: SparkUI) extends SparkUITab(parent, "OAP") with Logging {
// val listener = parent.executorsListener
attachPage(new FiberCacheManagerPage(this))
parent.attachTab(this)
parent.addStaticHandler(OapTab.STATIC_RESOURCE_DIR, "/static/oap")
}
object OapTab {
private val STATIC_RESOURCE_DIR = "oap/static"
}
| Intel-bigdata/OAP | oap-cache/oap/src/main/scala/org/apache/spark/sql/oap/ui/OapTab.scala | Scala | apache-2.0 | 1,266 |
/*
* Copyright 2020 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.extra.hll.zetasketch
import com.spotify.scio.testing.PipelineSpec
import com.spotify.scio.testing.ApproximationAssertions._
class ZetaSketchHllPlusPlusTestTest extends PipelineSpec {
"ZetasketchHLL++" should "estimate int distinct count" in {
val estimator = ZetaSketchHllPlusPlus[Int]()
val input = for (i <- 0 to 10000) yield (i % 20)
val output = runWithData(input) { scl =>
scl
.countApproxDistinct(estimator)
}
output shouldApproximate withErrorRate(Seq(20L), 0.6)
}
it should "estimate strings distinct count" in {
val estimator = ZetaSketchHllPlusPlus[String]()
val input = for (i <- 0 to 10000) yield s"${i % 20}_"
val output = runWithData(input) { scl =>
scl
.countApproxDistinct(estimator)
}
output shouldApproximate withErrorRate(Seq(20L), 0.6)
}
it should "estimate longs distinct count" in {
val estimator = ZetaSketchHllPlusPlus[Long]()
val input = for (i <- 0L to 10000) yield (i % 20)
val output = runWithData(input) { scl =>
scl
.countApproxDistinct(estimator)
}
output shouldApproximate withErrorRate(Seq(20L), 0.6)
}
it should "estimate byte array distinct count" in {
val estimator = ZetaSketchHllPlusPlus[Array[Byte]]()
val input = for (i <- 0 to 10000) yield (s"${i % 20}_".getBytes)
val output = runWithData(input) { scl =>
scl
.countApproxDistinct(estimator)
}
output shouldApproximate withErrorRate(Seq(20L), 0.6)
}
it should "estimate distinct count per key" in {
val estimator = ZetaSketchHllPlusPlus[Int]()
val upperLimit = 10000
val in = 0 to upperLimit
val expt = for (i <- 0 until 5) yield (i, (upperLimit / 5).toLong)
val output = runWithData(in) { scl =>
scl
.keyBy(_ % 5)
.countApproxDistinctByKey(estimator)
}
output shouldApproximate withErrorRatePerKey(expt, 0.5)
}
}
| spotify/scio | scio-extra/src/test/scala/com/spotify/scio/extra/hll/zetasketch/ZetaSketchHllPlusPlusTest.scala | Scala | apache-2.0 | 2,544 |
package breeze.linalg
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import operators._
import scala.{specialized=>spec}
import breeze.generic.CanMapValues
import breeze.math.{TensorSpace, Ring, Field}
import collection.immutable.BitSet
import support.{CanZipMapValues, CanCopy}
import util.Random
import breeze.storage.Storage
/**
*
* @author dlwh
*/
trait VectorLike[@spec E, +Self <: Vector[E]] extends Tensor[Int, E] with TensorLike[Int, E, Self] {
def map[E2, That](fn: E=>E2)(implicit canMapValues: CanMapValues[Self, E, E2, That]):That = values map fn
def foreach[U](fn: E=>U) { values foreach fn }
def copy: Self
}
trait Vector[@spec(Int, Double, Float) E] extends VectorLike[E, Vector[E]]{
def keySet: Set[Int] = BitSet( (0 until length) :_*)
def length: Int
override def size = length
def iterator = Iterator.range(0, size).map{i => i -> apply(i)}
def valuesIterator = Iterator.range(0, size).map{i => apply(i)}
def keysIterator = Iterator.range(0, size)
/** Returns the k-norm of this Vector. */
def norm(n : Double)(implicit field: Ring[E]) : Double = {
if (n == 1) {
var sum = 0.0
activeValuesIterator foreach (v => sum += field.norm(v))
sum
} else if (n == 2) {
var sum = 0.0
activeValuesIterator foreach (v => { val nn = field.norm(v); sum += nn * nn })
math.sqrt(sum)
} else if (n == Double.PositiveInfinity) {
var max = Double.NegativeInfinity
activeValuesIterator foreach (v => { val nn = field.norm(v); if (nn > max) max = nn })
max
} else {
var sum = 0.0
activeValuesIterator foreach (v => { val nn = field.norm(v); sum += math.pow(nn,n) })
math.pow(sum, 1.0 / n)
}
}
}
object Vector extends VectorOps_Int with VectorOps_Double with VectorOps_Float {
implicit def canCopy[E]:CanCopy[Vector[E]] = new CanCopy[Vector[E]] {
// Should not inherit from T=>T because those get used by the compiler.
def apply(t: Vector[E]): Vector[E] = t.copy
}
// There's a bizarre error specializing float's here.
class CanZipMapValuesVector[@specialized(Int, Double) V, @specialized(Int, Double) RV:ClassManifest] extends CanZipMapValues[Vector[V],V,RV,Vector[RV]] {
def create(length : Int) = new DenseVector(new Array[RV](length))
/**Maps all corresponding values from the two collection. */
def map(from: Vector[V], from2: Vector[V], fn: (V, V) => RV) = {
require(from.length == from2.length, "Vector lengths must match!")
val result = create(from.length)
var i = 0
while (i < from.length) {
result.data(i) = fn(from(i), from2(i))
i += 1
}
result
}
}
implicit def canMapValues[V, V2](implicit man: ClassManifest[V2]):CanMapValues[Vector[V], V, V2, Vector[V2]] = {
new CanMapValues[Vector[V], V, V2, Vector[V2]] {
/**Maps all key-value pairs from the given collection. */
def map(from: Vector[V], fn: (V) => V2) = {
DenseVector.tabulate(from.length)(i => fn(from(i)))
}
/**Maps all active key-value pairs from the given collection. */
def mapActive(from: Vector[V], fn: (V) => V2) = {
map(from, fn)
}
}
}
implicit def negFromScale[@specialized(Int, Float, Double) V, Double](implicit scale: BinaryOp[Vector[V], V, OpMulScalar, Vector[V]], field: Ring[V]) = {
new UnaryOp[Vector[V], OpNeg, Vector[V]] {
override def apply(a : Vector[V]) = {
scale(a, field.negate(field.one))
}
}
}
implicit def zipMap[V, R:ClassManifest] = new CanZipMapValuesVector[V, R]
implicit val zipMap_d = new CanZipMapValuesVector[Double, Double]
implicit val zipMap_f = new CanZipMapValuesVector[Float, Float]
implicit val zipMap_i = new CanZipMapValuesVector[Int, Int]
implicit val space_d = TensorSpace.make[Vector[Double], Int, Double]
implicit val space_f = TensorSpace.make[Vector[Float], Int, Float]
implicit val space_i = TensorSpace.make[Vector[Int], Int, Int]
}
trait VectorConstructors[Vec[T]<:Vector[T]] {
def zeros[V:ClassManifest](size: Int):Vec[V]
def apply[@spec(Double, Int, Float) V](values: Array[V]):Vec[V]
def apply[V:ClassManifest](values: V*):Vec[V] = {
// manual specialization so that we create the right DenseVector specialization... @specialized doesn't work here
val man = implicitly[ClassManifest[V]]
if(man == manifest[Double]) apply(values.toArray.asInstanceOf[Array[Double]]).asInstanceOf[Vec[V]]
else if (man == manifest[Float]) apply(values.toArray.asInstanceOf[Array[Float]]).asInstanceOf[Vec[V]]
else if (man == manifest[Int]) apply(values.toArray.asInstanceOf[Array[Int]]).asInstanceOf[Vec[V]]
else apply(values.toArray)
// apply(values.toArray)
}
def fill[@spec(Double, Int, Float) V:ClassManifest](size: Int)(v: =>V):Vec[V] = apply(Array.fill(size)(v))
def tabulate[@spec(Double, Int, Float) V:ClassManifest](size: Int)(f: Int=>V):Vec[V]= apply(Array.tabulate(size)(f))
def rand(size: Int, rand: Random = new Random()) = {
fill(size)(rand.nextDouble())
}
}
trait StorageVector[E] extends Vector[E] with Storage[E] | tjhunter/scalanlp-core | math/src/main/scala/breeze/linalg/Vector.scala | Scala | apache-2.0 | 5,641 |
/*
* Copyright (c) 2013-14 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import org.junit.Test
import org.junit.Assert._
import test._
import testutil._
import ops.coproduct._
class CoproductTests {
type ISB = Int :+: String :+: Boolean :+: CNil
type III = Int :+: Int :+: Int :+: CNil
trait Fruit
case class Apple() extends Fruit
case class Pear() extends Fruit
case class Banana() extends Fruit
type APB = Apple :+: Pear :+: Banana :+: CNil
object size extends Poly1 {
implicit val caseInt = at[Int](_ => 1)
implicit val caseString = at[String](_.length)
implicit val caseBoolean = at[Boolean](_ => 1)
}
@Test
def testInject {
implicitly[Inject[Int :+: CNil, Int]]
implicitly[Inject[Int :+: Int :+: CNil, Int]]
implicitly[Inject[Int :+: Int :+: Int :+: CNil, Int]]
implicitly[Inject[String :+: Int :+: CNil, Int]]
implicitly[Inject[Int :+: String :+: CNil, Int]]
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
illTyped("""
val foo4 = Coproduct[ISB](1.0)
""")
illTyped("""
val foo4 = Coproduct[ISB](CNil)
""")
}
@Test
def testMatch {
def cpMatch(v: ISB) = v match {
case Inl(x) =>
typed[Int](x)
case Inr(Inl(x)) =>
typed[String](x)
case Inr(Inr(Inl(x))) =>
typed[Boolean](x)
case Inr(Inr(Inr(_))) => ??? // This impossible case required for exhaustivity
}
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
cpMatch(foo1)
cpMatch(foo2)
cpMatch(foo3)
}
@Test
def testSelect {
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val sel1a = foo1.select[Int]
typed[Option[Int]](sel1a)
assertEquals(Some(23), sel1a)
val sel1b = foo1.select[String]
typed[Option[String]](sel1b)
assertEquals(None, sel1b)
val sel1c = foo1.select[Boolean]
typed[Option[Boolean]](sel1c)
assertEquals(None, sel1c)
illTyped("""
foo1.select[Double]
""")
val sel2a = foo2.select[Int]
typed[Option[Int]](sel2a)
assertEquals(None, sel2a)
val sel2b = foo2.select[String]
typed[Option[String]](sel2b)
assertEquals(Some("foo"), sel2b)
val sel2c = foo2.select[Boolean]
typed[Option[Boolean]](sel2c)
assertEquals(None, sel2c)
illTyped("""
foo2.select[Double]
""")
val sel3a = foo3.select[Int]
typed[Option[Int]](sel3a)
assertEquals(None, sel3a)
val sel3b = foo3.select[String]
typed[Option[String]](sel3b)
assertEquals(None, sel3b)
val sel3c = foo3.select[Boolean]
typed[Option[Boolean]](sel3c)
assertEquals(Some(true), sel3c)
illTyped("""
foo3.select[Double]
""")
}
@Test
def testFlatMap {
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
object coIdentity extends Poly1 {
implicit def default[A] = at[A](a => Coproduct[A :+: CNil](a))
}
val r1 = in1.flatMap(coIdentity)
assertTypedEquals[I :+: CNil](in1, r1)
val r2 = is.flatMap(coIdentity)
assertTypedEquals[I :+: S :+: CNil](is, r2)
object coSquare extends Poly1 {
implicit def default[A] = at[A](a => Coproduct[A :+: A :+: CNil](a))
}
val r3 = in1.flatMap(coSquare)
assertTypedEquals[I :+: I :+: CNil](Coproduct[I :+:I :+: CNil](1), r3)
val r4 = is.flatMap(coSquare)
assertTypedEquals[I :+: I :+: S :+: S :+: CNil](
Coproduct[I :+: I :+: S :+: S :+: CNil](1), r4)
object complex extends Poly1 {
implicit def caseInt = at[Int](i => Coproduct[S :+: CNil](i.toString))
implicit def caseString = at[String](s => Coproduct[C :+: D :+: CNil](s(0)))
implicit def caseDouble = at[Double](d => Coproduct[I :+: S :+: CNil](d.toInt))
}
val r5 = isd.flatMap(complex)
assertTypedEquals[S :+: C :+: D :+: I :+: S :+: CNil](
Coproduct[S :+: C :+: D :+: I :+: S :+: CNil]("1"), r5)
}
@Test
def testMap {
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val foo1b = foo1 map size
typed[III](foo1b)
assertEquals(Inl(1), foo1b)
val foo2b = foo2 map size
typed[III](foo2b)
assertEquals(Inr(Inl(3)), foo2b)
val foo3b = foo3 map size
typed[III](foo3b)
assertEquals(Inr(Inr(Inl(1))), foo3b)
}
@Test
def testUnify {
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val foo1b = foo1 map size
val foo2b = foo2 map size
val foo3b = foo3 map size
val foo1c = foo1b.unify
typed[Int](foo1c)
assertEquals(1, foo1c)
val foo2c = foo2b.unify
typed[Int](foo2c)
assertEquals(3, foo2c)
val foo3c = foo3b.unify
typed[Int](foo3c)
assertEquals(1, foo3c)
val f1 = Coproduct[APB](Apple())
val f2 = Coproduct[APB](Pear())
val f3 = Coproduct[APB](Banana())
val f1b = f1.unify
typed[Fruit](f1b)
val f2b = f2.unify
typed[Fruit](f2b)
val f3b = f3.unify
typed[Fruit](f3b)
// See https://github.com/milessabin/shapeless/issues/242
case class Foo[T](c: T)
val existentials1 = Coproduct[Foo[Double] :+: Foo[Float] :+: CNil](Foo(23F)).unify
val existentials2 = Coproduct[Foo[Double] :+: Foo[Float] :+: Foo[Int] :+: CNil](Foo(23F)).unify
typed[Foo[_ >: Float with Double <: AnyVal]](existentials1)
typed[Foo[_ >: Int with Float with Double <: AnyVal]](existentials2)
}
@Test
def testFold {
import poly.identity
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val foo1b = foo1 fold size
val foo2b = foo2 fold size
val foo3b = foo3 fold size
typed[Int](foo1b)
assertEquals(1, foo1b)
typed[Int](foo2b)
assertEquals(3, foo2b)
typed[Int](foo3b)
assertEquals(1, foo3b)
val f1 = Coproduct[APB](Apple())
val f2 = Coproduct[APB](Pear())
val f3 = Coproduct[APB](Banana())
val f1b = f1 fold identity
typed[Fruit](f1b)
val f2b = f2 fold identity
typed[Fruit](f2b)
val f3b = f3 fold identity
typed[Fruit](f3b)
}
@Test
def testWithKeys {
import syntax.singleton._
import union._
import ops.union._
type U = Union.`'i -> Int, 's -> String, 'b -> Boolean`.T
val cKeys = Keys[U].apply()
val u1 = Coproduct[ISB](23).zipWithKeys(cKeys)
val v1 = u1.get('i)
typed[Option[Int]](v1)
assertEquals(Some(23), v1)
assertEquals(None, u1.get('s))
val u2 = Coproduct[ISB]("foo").zipWithKeys(cKeys)
val v2 = u2.get('s)
typed[Option[String]](v2)
assertEquals(Some("foo"), v2)
assertEquals(None, u2.get('b))
val u3 = Coproduct[ISB](true).zipWithKeys(cKeys)
val v3 = u3.get('b)
typed[Option[Boolean]](v3)
assertEquals(Some(true), v3)
assertEquals(None, u3.get('i))
illTyped("v3.get('d)")
// key/value lengths must match up
illTyped("u1.zipWithKeys(uKeys.tail)")
// Explicit type argument
{
val u1 = Coproduct[ISB](23).zipWithKeys[HList.`'i, 's, 'b`.T]
val v1 = u1.get('i)
typed[Option[Int]](v1)
assertEquals(Some(23), v1)
assertEquals(None, u1.get('s))
}
{
val u2 = Coproduct[ISB]("foo").zipWithKeys[HList.`'i, 's, 'b`.T]
val v2 = u2.get('s)
typed[Option[String]](v2)
assertEquals(Some("foo"), v2)
assertEquals(None, u2.get('b))
}
{
val u3 = Coproduct[ISB](true).zipWithKeys[HList.`'i, 's, 'b`.T]
val v3 = u3.get('b)
typed[Option[Boolean]](v3)
assertEquals(Some(true), v3)
assertEquals(None, u3.get('i))
illTyped("v3.get('d)")
}
illTyped(" Coproduct[ISB](true).zipWithKeys[HList.`'i, 's, 'b, 'd`.T] ")
}
@Test
def testPartialOrdering {
val (one, two, abc, xyz) =
(Coproduct[ISB](1), Coproduct[ISB](2), Coproduct[ISB]("abc"), Coproduct[ISB]("xyz"))
def assertPOEquals(expected: Option[Int], l: ISB, r: ISB)(implicit po: PartialOrdering[ISB]) =
assertEquals(s"${l} ${r}", expected, po.tryCompare(l, r))
assertPOEquals(Some(0), one, one)
assertPOEquals(Some(-1), one, two)
assertPOEquals(Some(1), two, one)
assertPOEquals(Some(0), abc, abc)
assertPOEquals(Some(-23), abc, xyz)
assertPOEquals(Some(23), xyz, abc)
assertPOEquals(None, one, abc)
assertPOEquals(None, abc, one)
}
@Test
def testLength {
val r1 = Coproduct[Int :+: CNil](123).length
assertTypedEquals[Nat._1](Nat._1, r1)
val r2 = Coproduct[Int :+: String :+: CNil](123).length
assertTypedEquals[Nat._2](Nat._2, r2)
val r3 = Coproduct[Int :+: String :+: Double :+: CNil](123).length
assertTypedEquals[Nat._3](Nat._3, r3)
val r4 = Coproduct[Int :+: String :+: Double :+: Char :+: CNil](123).length
assertTypedEquals[Nat._4](Nat._4, r4)
}
@Test
def testExtendRight {
type S = String; type I = Int; type D = Double; type C = Char
type CoI = I :+: CNil
type CoIS = I :+: S :+: CNil
type CoISD = I :+: S :+: D :+: CNil
type CoISDC = I :+: S :+: D :+: C :+: CNil
val r1 = Coproduct[CoI](1).extendRight[S]
assertTypedEquals[CoIS](Coproduct[CoIS](1), r1)
val r2 = Coproduct[CoIS](1).extendRight[D]
assertTypedEquals[CoISD](Coproduct[CoISD](1), r2)
val r3 = Coproduct[CoISD](1).extendRight[C]
assertTypedEquals[CoISDC](Coproduct[CoISDC](1), r3)
}
@Test
def testExtendLeft {
type S = String; type I = Int; type D = Double; type C = Char
type CoI = I :+: CNil
type CoSI = S :+: I :+: CNil
type CoDSI = D :+: S :+: I :+: CNil
type CoCDSI = C :+: D :+: S :+: I :+: CNil
val r1 = Coproduct[CoI](1).extendLeft[S]
assertTypedEquals[CoSI](Coproduct[CoSI](1), r1)
val r2 = Coproduct[CoSI](1).extendLeft[D]
assertTypedEquals[CoDSI](Coproduct[CoDSI](1), r2)
val r3 = Coproduct[CoDSI](1).extendLeft[C]
assertTypedEquals[CoCDSI](Coproduct[CoCDSI](1), r3)
}
@Test
def testExtendLeftBy {
type S = String; type I = Int; type D = Double; type C = Char
type CoI = I :+: CNil
type CoSI = S :+: I :+: CNil
type CoDSI = D :+: S :+: I :+: CNil
type CoCDSI = C :+: D :+: S :+: I :+: CNil
val coi = Coproduct[CoI](1)
val r1 = coi.extendLeftBy[CNil]
assertTypedEquals[CoI](coi, r1)
val r2 = coi.extendLeftBy[S :+: CNil]
assertTypedEquals[CoSI](Coproduct[CoSI](1), r2)
val r3 = coi.extendLeftBy[D :+: S :+: CNil]
assertTypedEquals[CoDSI](Coproduct[CoDSI](1), r3)
val r4 = coi.extendLeftBy[C :+: D :+: S :+: CNil]
assertTypedEquals[CoCDSI](Coproduct[CoCDSI](1), r4)
}
@Test
def testExtendRightBy {
type S = String; type I = Int; type D = Double; type C = Char
type CoI = I :+: CNil
type CoIS = I :+: S :+: CNil
type CoISD = I :+: S :+: D :+: CNil
type CoISDC = I :+: S :+: D :+: C :+: CNil
val coi = Coproduct[CoI](1)
val r1 = coi.extendRightBy[CNil]
assertTypedEquals[CoI](coi, r1)
val r2 = coi.extendRightBy[S :+: CNil]
assertTypedEquals[CoIS](Coproduct[CoIS](1), r2)
val r3 = coi.extendRightBy[S :+: D :+: CNil]
assertTypedEquals[CoISD](Coproduct[CoISD](1), r3)
val r4 = coi.extendRightBy[S :+: D :+: C :+: CNil]
assertTypedEquals[CoISDC](Coproduct[CoISDC](1), r4)
}
@Test
def testRotateLeft {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val in2 = Coproduct[I :+: S :+: CNil](1)
val in3 = Coproduct[I :+: S :+: D :+: CNil](1)
val in4 = Coproduct[I :+: S :+: D :+: C :+: CNil](1)
{ // rotateLeft(0)
val r1 = in1.rotateLeft(0)
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in2.rotateLeft(0)
assertTypedSame[I :+: S :+: CNil](in2, r2)
val r3 = in3.rotateLeft(0)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r3)
val r4 = in4.rotateLeft(0)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r4)
}
{ // rotateLeft[_0]
val r1 = in1.rotateLeft[_0]
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in2.rotateLeft[_0]
assertTypedSame[I :+: S :+: CNil](in2, r2)
val r3 = in3.rotateLeft[_0]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r3)
val r4 = in4.rotateLeft[_0]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r4)
}
{ // rotateLeft(n % size == 0)
val r1 = in1.rotateLeft(1)
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in1.rotateLeft(2)
assertTypedSame[I :+: CNil](in1, r2)
val r3 = in2.rotateLeft(2)
assertTypedSame[I :+: S :+: CNil](in2, r3)
val r4 = in2.rotateLeft(4)
assertTypedSame[I :+: S :+: CNil](in2, r4)
val r5 = in3.rotateLeft(3)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r5)
val r6 = in3.rotateLeft(6)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r6)
val r7 = in4.rotateLeft(4)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r7)
val r8 = in4.rotateLeft(8)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r8)
}
{ // rotateLeft[N % Size == 0]
val r1 = in1.rotateLeft[_1]
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in1.rotateLeft[_2]
assertTypedSame[I :+: CNil](in1, r2)
val r3 = in2.rotateLeft[_2]
assertTypedSame[I :+: S :+: CNil](in2, r3)
val r4 = in2.rotateLeft[_4]
assertTypedSame[I :+: S :+: CNil](in2, r4)
val r5 = in3.rotateLeft[_3]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r5)
val r6 = in3.rotateLeft[_6]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r6)
val r7 = in4.rotateLeft[_4]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r7)
val r8 = in4.rotateLeft[_8]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r8)
}
{ // other(n)
val r1 = in2.rotateLeft(1)
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r1)
val r2 = in3.rotateLeft(1)
assertTypedEquals[S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](1), r2)
val r3 = in4.rotateLeft(1)
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r3)
val r4 = in4.rotateLeft(2)
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r4)
val r5 = in4.rotateLeft(3)
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r5)
val r6 = in4.rotateLeft(5)
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r6)
val r7 = in4.rotateLeft(6)
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r7)
}
{ // other[N]
val r1 = in2.rotateLeft[_1]
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r1)
val r2 = in3.rotateLeft[_1]
assertTypedEquals[S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](1), r2)
val r3 = in4.rotateLeft[_1]
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r3)
val r4 = in4.rotateLeft[_2]
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r4)
val r5 = in4.rotateLeft[_3]
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r5)
val r6 = in4.rotateLeft[_5]
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r6)
val r7 = in4.rotateLeft[_6]
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r7)
}
}
@Test
def testRotateRight {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val in2 = Coproduct[I :+: S :+: CNil](1)
val in3 = Coproduct[I :+: S :+: D :+: CNil](1)
val in4 = Coproduct[I :+: S :+: D :+: C :+: CNil](1)
{ // rotateRight(0)
val r1 = in1.rotateRight(0)
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in2.rotateRight(0)
assertTypedSame[I :+: S :+: CNil](in2, r2)
val r3 = in3.rotateRight(0)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r3)
val r4 = in4.rotateRight(0)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r4)
}
{ // rotateRight[_0]
val r1 = in1.rotateRight[_0]
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in2.rotateRight[_0]
assertTypedSame[I :+: S :+: CNil](in2, r2)
val r3 = in3.rotateRight[_0]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r3)
val r4 = in4.rotateRight[_0]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r4)
}
{ // rotateRight(n % size == 0)
val r1 = in1.rotateRight(1)
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in1.rotateRight(2)
assertTypedSame[I :+: CNil](in1, r2)
val r3 = in2.rotateRight(2)
assertTypedSame[I :+: S :+: CNil](in2, r3)
val r4 = in2.rotateRight(4)
assertTypedSame[I :+: S :+: CNil](in2, r4)
val r5 = in3.rotateRight(3)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r5)
val r6 = in3.rotateRight(6)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r6)
val r7 = in4.rotateRight(4)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r7)
val r8 = in4.rotateRight(8)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r8)
}
{ // rotateRight[N % Size == 0]
val r1 = in1.rotateRight[_1]
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in1.rotateRight[_2]
assertTypedSame[I :+: CNil](in1, r2)
val r3 = in2.rotateRight[_2]
assertTypedSame[I :+: S :+: CNil](in2, r3)
val r4 = in2.rotateRight[_4]
assertTypedSame[I :+: S :+: CNil](in2, r4)
val r5 = in3.rotateRight[_3]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r5)
val r6 = in3.rotateRight[_6]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r6)
val r7 = in4.rotateRight[_4]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r7)
val r8 = in4.rotateRight[_8]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r8)
}
{ // other(n)
val r1 = in2.rotateRight(1)
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r1)
val r2 = in3.rotateRight(1)
assertTypedEquals[D :+: I :+: S :+: CNil](Coproduct[D :+: I :+: S :+: CNil](1), r2)
val r3 = in4.rotateRight(1)
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r3)
val r4 = in4.rotateRight(2)
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r4)
val r5 = in4.rotateRight(3)
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r5)
val r6 = in4.rotateRight(5)
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r6)
val r7 = in4.rotateRight(6)
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r7)
}
{ // other[N]
val r1 = in2.rotateRight[_1]
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r1)
val r2 = in3.rotateRight[_1]
assertTypedEquals[D :+: I :+: S :+: CNil](Coproduct[D :+: I :+: S :+: CNil](1), r2)
val r3 = in4.rotateRight[_1]
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r3)
val r4 = in4.rotateRight[_2]
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r4)
val r5 = in4.rotateRight[_3]
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r5)
val r6 = in4.rotateRight[_5]
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r6)
val r7 = in4.rotateRight[_6]
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r7)
}
}
@Test
def testHead {
val r1 = Coproduct[Int :+: CNil](1).head
assertTypedEquals[Option[Int]](Some(1), r1)
val r2 = Coproduct[Int :+: String :+: CNil](1).head
assertTypedEquals[Option[Int]](Some(1), r2)
val r3 = Coproduct[Int :+: String :+: CNil]("foo").head
assertTypedEquals[Option[Int]](None, r3)
}
@Test
def testTail {
val r1 = Coproduct[Int :+: CNil](1).tail
assertTypedEquals[Option[CNil]](None, r1)
val r2 = Coproduct[Int :+: String :+: CNil](1).tail
assertTypedEquals[Option[String :+: CNil]](None, r2)
val r3 = Coproduct[Int :+: String :+: CNil]("foo").tail
assertTypedEquals[Option[String :+: CNil]](Some(Coproduct[String :+: CNil]("foo")), r3)
}
@Test
def testAlign {
type K0 = Int :+: String :+: Boolean :+: CNil
type K1 = Int :+: Boolean :+: String :+: CNil
type K2 = String :+: Int :+: Boolean :+: CNil
type K3 = String :+: Boolean :+: Int :+: CNil
type K4 = Boolean :+: Int :+: String :+: CNil
type K5 = Boolean :+: String :+: Int :+: CNil
val k0i = Coproduct[K0](13)
val k0s = Coproduct[K0]("bar")
val k0b = Coproduct[K0](false)
val k1i = Coproduct[K1](13)
val k1s = Coproduct[K1]("bar")
val k1b = Coproduct[K1](false)
val k2i = Coproduct[K2](13)
val k2s = Coproduct[K2]("bar")
val k2b = Coproduct[K2](false)
val k3i = Coproduct[K3](13)
val k3s = Coproduct[K3]("bar")
val k3b = Coproduct[K3](false)
val k4i = Coproduct[K4](13)
val k4s = Coproduct[K4]("bar")
val k4b = Coproduct[K4](false)
val k5i = Coproduct[K5](13)
val k5s = Coproduct[K5]("bar")
val k5b = Coproduct[K5](false)
type C = K0
val ci = Coproduct[C](23)
val cs = Coproduct[C]("foo")
val cb = Coproduct[C](true)
val a0i = ci.align(k0i)
assertTypedEquals[K0](Coproduct[K0](23), a0i)
val a0s = cs.align(k0s)
assertTypedEquals[K0](Coproduct[K0]("foo"), a0s)
val a0b = cb.align(k0b)
assertTypedEquals[K0](Coproduct[K0](true), a0b)
val a1i = ci.align(k1i)
assertTypedEquals[K1](Coproduct[K1](23), a1i)
val a1s = cs.align(k1s)
assertTypedEquals[K1](Coproduct[K1]("foo"), a1s)
val a1b = cb.align(k1b)
assertTypedEquals[K1](Coproduct[K1](true), a1b)
val a2i = ci.align(k2i)
assertTypedEquals[K2](Coproduct[K2](23), a2i)
val a2s = cs.align(k2s)
assertTypedEquals[K2](Coproduct[K2]("foo"), a2s)
val a2b = cb.align(k2b)
assertTypedEquals[K2](Coproduct[K2](true), a2b)
val a3i = ci.align(k3i)
assertTypedEquals[K3](Coproduct[K3](23), a3i)
val a3s = cs.align(k3s)
assertTypedEquals[K3](Coproduct[K3]("foo"), a3s)
val a3b = cb.align(k3b)
assertTypedEquals[K3](Coproduct[K3](true), a3b)
val a4i = ci.align(k4i)
assertTypedEquals[K4](Coproduct[K4](23), a4i)
val a4s = cs.align(k4s)
assertTypedEquals[K4](Coproduct[K4]("foo"), a4s)
val a4b = cb.align(k4b)
assertTypedEquals[K4](Coproduct[K4](true), a4b)
val a5i = ci.align(k5i)
assertTypedEquals[K5](Coproduct[K5](23), a5i)
val a5s = cs.align(k5s)
assertTypedEquals[K5](Coproduct[K5]("foo"), a5s)
val a5b = cb.align(k5b)
assertTypedEquals[K5](Coproduct[K5](true), a5b)
val b0i = ci.align[K0]
assertTypedEquals[K0](Coproduct[K0](23), b0i)
val b0s = cs.align[K0]
assertTypedEquals[K0](Coproduct[K0]("foo"), b0s)
val b0b = cb.align[K0]
assertTypedEquals[K0](Coproduct[K0](true), b0b)
val b1i = ci.align[K1]
assertTypedEquals[K1](Coproduct[K1](23), b1i)
val b1s = cs.align[K1]
assertTypedEquals[K1](Coproduct[K1]("foo"), b1s)
val b1b = cb.align[K1]
assertTypedEquals[K1](Coproduct[K1](true), b1b)
val b2i = ci.align[K2]
assertTypedEquals[K2](Coproduct[K2](23), b2i)
val b2s = cs.align[K2]
assertTypedEquals[K2](Coproduct[K2]("foo"), b2s)
val b2b = cb.align[K2]
assertTypedEquals[K2](Coproduct[K2](true), b2b)
val b3i = ci.align[K3]
assertTypedEquals[K3](Coproduct[K3](23), b3i)
val b3s = cs.align[K3]
assertTypedEquals[K3](Coproduct[K3]("foo"), b3s)
val b3b = cb.align[K3]
assertTypedEquals[K3](Coproduct[K3](true), b3b)
val b4i = ci.align[K4]
assertTypedEquals[K4](Coproduct[K4](23), b4i)
val b4s = cs.align[K4]
assertTypedEquals[K4](Coproduct[K4]("foo"), b4s)
val b4b = cb.align[K4]
assertTypedEquals[K4](Coproduct[K4](true), b4b)
val b5i = ci.align[K5]
assertTypedEquals[K5](Coproduct[K5](23), b5i)
val b5s = cs.align[K5]
assertTypedEquals[K5](Coproduct[K5]("foo"), b5s)
val b5b = cb.align[K5]
assertTypedEquals[K5](Coproduct[K5](true), b5b)
illTyped("""
(Coproduct[String :+: CNil]).align[Int :+: CNil]
""")
illTyped("""
(Coproduct[String :+: Int :+: CNil]).align[String :+: CNil]
""")
illTyped("""
(Coproduct[Int :+: CNil]).align[Int :+: String :+: CNil]
""")
}
@Test
def testReverse {
type S = String; type I = Int; type D = Double; type C = Char
type SI = S :+: I :+: CNil; type IS = I :+: S :+: CNil
val r1 = Coproduct[I :+: CNil](1).reverse
assertTypedEquals[I :+: CNil](Coproduct[I :+: CNil](1), r1)
val r2 = Coproduct[IS](1).reverse
assertTypedEquals[SI](Coproduct[SI](1), r2)
val r3 = Coproduct[IS]("foo").reverse
assertTypedEquals[SI](Coproduct[SI]("foo"), r3)
}
@Test
def testInit {
val r1 = Coproduct[Int :+: CNil](1).init
assertTypedEquals[Option[CNil]](None, r1)
val r2 = Coproduct[Int :+: String :+: CNil]("foo").init
assertTypedEquals[Option[Int :+: CNil]](None, r2)
val r3 = Coproduct[Int :+: String :+: CNil](1).init
assertTypedEquals[Option[Int :+: CNil]](Some(Coproduct[Int :+: CNil](1)), r3)
}
@Test
def testLast {
val r1 = Coproduct[Int :+: CNil](1).last
assertTypedEquals[Option[Int]](Some(1), r1)
val r2 = Coproduct[Int :+: String :+: CNil]("foo").last
assertTypedEquals[Option[String]](Some("foo"), r2)
val r3 = Coproduct[Int :+: String :+: CNil](1).last
assertTypedEquals[Option[String]](None, r3)
}
@Test
def testAt {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val in2 = Coproduct[I :+: S :+: CNil](1)
val in3 = Coproduct[I :+: S :+: D :+: CNil](1)
{
val r1 = in1.at(0)
assertTypedEquals[Option[I]](Some(1), r1)
val r2 = in2.at(0)
assertTypedEquals[Option[I]](Some(1), r2)
val r3 = in3.at(0)
assertTypedEquals[Option[I]](Some(1), r3)
val r4 = in2.at(1)
assertTypedEquals[Option[S]](None, r4)
val r5 = in3.at(1)
assertTypedEquals[Option[S]](None, r5)
val r6 = in3.at(2)
assertTypedEquals[Option[D]](None, r6)
}
{
val r1 = in1.at[nat._0]
assertTypedEquals[Option[I]](Some(1), r1)
val r2 = in2.at[nat._0]
assertTypedEquals[Option[I]](Some(1), r2)
val r3 = in3.at[nat._0]
assertTypedEquals[Option[I]](Some(1), r3)
val r4 = in2.at[nat._1]
assertTypedEquals[Option[S]](None, r4)
val r5 = in3.at[nat._1]
assertTypedEquals[Option[S]](None, r5)
val r6 = in3.at[nat._2]
assertTypedEquals[Option[D]](None, r6)
}
}
@Test
def testPartition {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isdi: I :+: S :+: D :+: I :+: CNil =
Inr[I, S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](2))
val r1 = i.partition[I]
assertTypedEquals[Either[I :+: CNil, CNil]](Left(i), r1)
val r2 = is.partition[I]
assertTypedEquals[Either[I :+: CNil, S :+: CNil]](Left(i), r2)
val r3 = i.partition[S]
assertTypedEquals[Either[CNil, I :+: CNil]](Right(i), r3)
val r4 = is.partition[S]
assertTypedEquals[Either[S :+: CNil, I :+: CNil]](Right(i), r4)
val r5 = i.partition[C]
assertTypedEquals[Either[CNil, I :+: CNil]](Right(i), r5)
val r6 = is.partition[C]
assertTypedEquals[Either[CNil, I :+: S :+: CNil]](Right(is), r6)
val r7 = isdi.partition[I]
assertTypedEquals[Either[I :+: I :+: CNil, S :+: D :+: CNil]](Left(Inr[I, I :+: CNil](Inl[I, CNil](2))), r7)
}
@Test
def testPartitionC {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isdi: I :+: S :+: D :+: I :+: CNil =
Inr[I, S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](2))
val r1 = i.partitionC[I]
assertTypedEquals[(I :+: CNil) :+: CNil :+: CNil](Inl(i), r1)
val r2 = is.partitionC[I]
assertTypedEquals[(I :+: CNil) :+: (S :+: CNil) :+: CNil](Inl(i), r2)
val r3 = i.partitionC[S]
assertTypedEquals[CNil :+: (I :+: CNil) :+: CNil](Inr(Inl(i)), r3)
val r4 = is.partitionC[S]
assertTypedEquals[(S :+: CNil) :+: (I :+: CNil) :+: CNil](Inr(Inl(i)), r4)
val r5 = i.partitionC[C]
assertTypedEquals[CNil :+: (I :+: CNil) :+: CNil](Inr(Inl(i)), r5)
val r6 = is.partitionC[C]
assertTypedEquals[CNil :+: (I :+: S :+: CNil) :+: CNil](Inr(Inl(is)), r6)
val r7 = isdi.partitionC[I]
assertTypedEquals[(I :+: I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](
Inl(Inr[I, I :+: CNil](Inl[I, CNil](2))), r7)
}
@Test
def testFilter {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isdi: I :+: S :+: D :+: I :+: CNil =
Inr[I, S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](2))
val r1 = i.filter[I]
assertTypedEquals[Option[I :+: CNil]](Some(i), r1)
val r2 = is.filter[I]
assertTypedEquals[Option[I :+: CNil]](Some(i), r2)
val r3 = i.filter[S]
assertTypedEquals[Option[CNil]](None, r3)
val r4 = is.filter[S]
assertTypedEquals[Option[S :+: CNil]](None, r4)
val r5 = i.filter[C]
assertTypedEquals[Option[CNil]](None, r5)
val r6 = is.filter[C]
assertTypedEquals[Option[CNil]](None, r6)
val r7 = isdi.filter[I]
assertTypedEquals[Option[I :+: I :+: CNil]](Some(Inr[I, I :+: CNil](Inl[I, CNil](2))), r7)
}
@Test
def testFilterNot {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isdi: I :+: S :+: D :+: I :+: CNil =
Inr[I, S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](2))
val r1 = i.filterNot[I]
assertTypedEquals[Option[CNil]](None, r1)
val r2 = is.filterNot[I]
assertTypedEquals[Option[S :+: CNil]](None, r2)
val r4 = i.filterNot[S]
assertTypedEquals[Option[I :+: CNil]](Some(i), r4)
val r5 = is.filterNot[S]
assertTypedEquals[Option[I :+: CNil]](Some(i), r5)
val r7 = i.filterNot[D]
assertTypedEquals[Option[I :+: CNil]](Some(i), r7)
val r8 = is.filterNot[D]
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r8)
val r14 = isdi.filterNot[I]
assertTypedEquals[Option[S :+: D :+: CNil]](None, r14)
}
@Test
def testSplit {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val dc = Coproduct[D :+: C :+: CNil](2.0)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
val isdc = Coproduct[I :+: S :+: D :+: C :+: CNil](2.0)
{
val r1 = in1.split(0)
assertTypedEquals[Either[CNil, I :+: CNil]](Right(in1), r1)
val r2 = is.split(0)
assertTypedEquals[Either[CNil, I :+: S :+: CNil]](Right(is), r2)
val r3 = in1.split(1)
assertTypedEquals[Either[I :+: CNil, CNil]](Left(in1), r3)
val r4 = is.split(1)
assertTypedEquals[Either[I :+: CNil, S :+: CNil]](Left(in1), r4)
val r5 = isd.split(1)
assertTypedEquals[Either[I :+: CNil, S :+: D :+: CNil]](Left(in1), r5)
// Cannot split at index 2 a coproduct of length 1
illTyped(""" in1.split(2) """)
val r7 = is.split(2)
assertTypedEquals[Either[I :+: S :+: CNil, CNil]](Left(is), r7)
val r8 = isd.split(2)
assertTypedEquals[Either[I :+: S :+: CNil, D :+: CNil]](Left(is), r8)
val r9 = isdc.split(2)
assertTypedEquals[Either[I :+: S :+: CNil, D :+: C :+: CNil]](Right(dc), r9)
}
{
val r1 = in1.split[_0]
assertTypedEquals[Either[CNil, I :+: CNil]](Right(in1), r1)
val r2 = is.split[_0]
assertTypedEquals[Either[CNil, I :+: S :+: CNil]](Right(is), r2)
val r3 = in1.split[_1]
assertTypedEquals[Either[I :+: CNil, CNil]](Left(in1), r3)
val r4 = is.split[_1]
assertTypedEquals[Either[I :+: CNil, S :+: CNil]](Left(in1), r4)
val r5 = isd.split[_1]
assertTypedEquals[Either[I :+: CNil, S :+: D :+: CNil]](Left(in1), r5)
// Cannot split at index 2 a coproduct of length 1
illTyped(""" in1.split[_2] """)
val r7 = is.split[_2]
assertTypedEquals[Either[I :+: S :+: CNil, CNil]](Left(is), r7)
val r8 = isd.split[_2]
assertTypedEquals[Either[I :+: S :+: CNil, D :+: CNil]](Left(is), r8)
val r9 = isdc.split[_2]
assertTypedEquals[Either[I :+: S :+: CNil, D :+: C :+: CNil]](Right(dc), r9)
}
}
@Test
def testSplitC {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val dc = Coproduct[D :+: C :+: CNil](2.0)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
val isdc = Coproduct[I :+: S :+: D :+: C :+: CNil](2.0)
{
val r1 = in1.splitC(0)
assertTypedEquals[CNil :+: (I :+: CNil) :+: CNil](
Coproduct[CNil :+: (I :+: CNil) :+: CNil](in1), r1)
val r2 = is.splitC(0)
assertTypedEquals[CNil :+: (I :+: S :+: CNil) :+: CNil](
Coproduct[CNil :+: (I :+: S :+: CNil) :+: CNil](is), r2)
val r3 = in1.splitC(1)
assertTypedEquals[(I :+: CNil) :+: CNil :+: CNil](
Coproduct[(I :+: CNil) :+: CNil :+: CNil](in1), r3)
val r4 = is.splitC(1)
assertTypedEquals[(I :+: CNil) :+: (S :+: CNil) :+: CNil](
Coproduct[(I :+: CNil) :+: (S :+: CNil) :+: CNil](in1), r4)
val r5 = isd.splitC(1)
assertTypedEquals[(I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](
Coproduct[(I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](in1), r5)
// Cannot split at index 2 a coproduct of length 1
illTyped(""" in1.splitC(2) """)
val r7 = is.splitC(2)
assertTypedEquals[(I :+: S :+: CNil) :+: CNil :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: CNil :+: CNil](is), r7)
val r8 = isd.splitC(2)
assertTypedEquals[(I :+: S :+: CNil) :+: (D :+: CNil) :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: (D :+: CNil) :+: CNil](is), r8)
val r9 = isdc.splitC(2)
assertTypedEquals[(I :+: S :+: CNil) :+: (D :+: C :+: CNil) :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: (D :+: C :+: CNil) :+: CNil](dc), r9)
}
{
val r1 = in1.splitC[_0]
assertTypedEquals[CNil :+: (I :+: CNil) :+: CNil](
Coproduct[CNil :+: (I :+: CNil) :+: CNil](in1), r1)
val r2 = is.splitC[_0]
assertTypedEquals[CNil :+: (I :+: S :+: CNil) :+: CNil](
Coproduct[CNil :+: (I :+: S :+: CNil) :+: CNil](is), r2)
val r3 = in1.splitC[_1]
assertTypedEquals[(I :+: CNil) :+: CNil :+: CNil](
Coproduct[(I :+: CNil) :+: CNil :+: CNil](in1), r3)
val r4 = is.splitC[_1]
assertTypedEquals[(I :+: CNil) :+: (S :+: CNil) :+: CNil](
Coproduct[(I :+: CNil) :+: (S :+: CNil) :+: CNil](in1), r4)
val r5 = isd.splitC[_1]
assertTypedEquals[(I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](
Coproduct[(I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](in1), r5)
// Cannot split at index 2 a coproduct of length 1
illTyped(""" in1.splitC[_2] """)
val r7 = is.splitC[_2]
assertTypedEquals[(I :+: S :+: CNil) :+: CNil :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: CNil :+: CNil](is), r7)
val r8 = isd.splitC[_2]
assertTypedEquals[(I :+: S :+: CNil) :+: (D :+: CNil) :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: (D :+: CNil) :+: CNil](is), r8)
val r9 = isdc.splitC[_2]
assertTypedEquals[(I :+: S :+: CNil) :+: (D :+: C :+: CNil) :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: (D :+: C :+: CNil) :+: CNil](dc), r9)
}
}
@Test
def testTake {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val dc = Coproduct[D :+: C :+: CNil](2.0)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
val isdc = Coproduct[I :+: S :+: D :+: C :+: CNil](2.0)
{
val r1 = in1.take(0)
assertTypedEquals[Option[CNil]](None, r1)
val r2 = is.take(0)
assertTypedEquals[Option[CNil]](None, r2)
val r3 = in1.take(1)
assertTypedEquals[Option[I :+: CNil]](Some(in1), r3)
val r4 = is.take(1)
assertTypedEquals[Option[I :+: CNil]](Some(in1), r4)
val r5 = isd.take(1)
assertTypedEquals[Option[I :+: CNil]](Some(in1), r5)
// Cannot take 2 elements out of a coproduct of length 1
illTyped(""" in1.take(2) """)
val r7 = is.take(2)
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r7)
val r8 = isd.take(2)
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r8)
val r9 = isdc.take(2)
assertTypedEquals[Option[I :+: S :+: CNil]](None, r9)
}
{
val r1 = in1.take[_0]
assertTypedEquals[Option[CNil]](None, r1)
val r2 = is.take[_0]
assertTypedEquals[Option[CNil]](None, r2)
val r3 = in1.take[_1]
assertTypedEquals[Option[I :+: CNil]](Some(in1), r3)
val r4 = is.take[_1]
assertTypedEquals[Option[I :+: CNil]](Some(in1), r4)
val r5 = isd.take[_1]
assertTypedEquals[Option[I :+: CNil]](Some(in1), r5)
// Cannot take 2 elements out of a coproduct of length 1
illTyped(""" in1.take[_2] """)
val r7 = is.take[_2]
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r7)
val r8 = isd.take[_2]
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r8)
val r9 = isdc.take[_2]
assertTypedEquals[Option[I :+: S :+: CNil]](None, r9)
}
}
@Test
def testDrop {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val dc = Coproduct[D :+: C :+: CNil](2.0)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
val isdc = Coproduct[I :+: S :+: D :+: C :+: CNil](2.0)
{
val r1 = in1.drop(0)
assertTypedEquals[Option[I :+: CNil]](Some(in1), r1)
val r2 = is.drop(0)
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r2)
val r3 = in1.drop(1)
assertTypedEquals[Option[CNil]](None, r3)
val r4 = is.drop(1)
assertTypedEquals[Option[S :+: CNil]](None, r4)
val r5 = isd.drop(1)
assertTypedEquals[Option[S :+: D :+: CNil]](None, r5)
// Cannot drop 2 elements out of a coproduct of length 1
illTyped(""" in1.drop(2) """)
val r7 = is.drop(2)
assertTypedEquals[Option[CNil]](None, r7)
val r8 = isd.drop(2)
assertTypedEquals[Option[D :+: CNil]](None, r8)
val r9 = isdc.drop(2)
assertTypedEquals[Option[D :+: C :+: CNil]](Some(dc), r9)
}
{
val r1 = in1.drop[_0]
assertTypedEquals[Option[I :+: CNil]](Some(in1), r1)
val r2 = is.drop[_0]
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r2)
val r3 = in1.drop[_1]
assertTypedEquals[Option[CNil]](None, r3)
val r4 = is.drop[_1]
assertTypedEquals[Option[S :+: CNil]](None, r4)
val r5 = isd.drop[_1]
assertTypedEquals[Option[S :+: D :+: CNil]](None, r5)
// Cannot drop 2 elements out of a coproduct of length 1
illTyped(""" in1.drop[_2] """)
val r7 = is.drop[_2]
assertTypedEquals[Option[CNil]](None, r7)
val r8 = isd.drop[_2]
assertTypedEquals[Option[D :+: CNil]](None, r8)
val r9 = isdc.drop[_2]
assertTypedEquals[Option[D :+: C :+: CNil]](Some(dc), r9)
}
}
@Test
def testRemoveElem {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val ii = Coproduct[I :+: I :+: CNil](1)
val r1 = i.removeElemC[I]
assertTypedEquals[I :+: CNil](i, r1)
val r2 = i.removeElem[I]
assertTypedEquals[Either[I, CNil]](Left(1), r2)
val r3 = is.removeElemC[I]
assertTypedEquals[I :+: S :+: CNil](is, r3)
val r4 = is.removeElem[I]
assertTypedEquals[Either[I, S :+: CNil]](Left(1), r4)
val r5 = is.removeElemC[S]
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r5)
val r6 = is.removeElem[S]
assertTypedEquals[Either[S, I :+: CNil]](Right(i), r6)
// See https://github.com/milessabin/shapeless/issues/251
val r7 = ii.removeElemC[I]
assertTypedEquals[I :+: I :+: CNil](ii, r7)
val r8 = ii.removeElem[I]
assertTypedEquals[Either[I, I :+: CNil]](Left(1), r8)
}
@Test
def testRemoveInverse = {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val is0 = Coproduct[I :+: S :+: CNil]("a")
val iis = Coproduct[I :+: S :+: I :+: CNil](2)
val iis0 = Coproduct[I :+: S :+: I :+: CNil]("b")
val u1 = Remove[I :+: CNil, I]
val r1 = u1.inverse(Left(1))
assertTypedEquals[I :+: CNil](i, r1)
val u2 = Remove[I :+: S :+: CNil, I]
val r2 = u2.inverse(Left(1))
assertTypedEquals[I :+: S :+: CNil](is, r2)
val r2_0 = u2.inverse(Right(Inl("a")))
assertTypedEquals[I :+: S :+: CNil](is0, r2_0)
val u3 = Remove[I :+: S :+: I :+: CNil, I]
val r3 = u3.inverse(Left(2))
assertTypedEquals[I :+: S :+: I :+: CNil](iis, r3)
val r3_0 = u3.inverse(Right(Inl("b")))
assertTypedEquals[I :+: S :+: I :+: CNil](iis0, r3_0)
}
@Test
def testRemoveLastInverse = {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val is0 = Coproduct[I :+: S :+: CNil]("a")
val iis: I :+: S :+: I :+: CNil = Inr(Inr(Inl(2)))
val iis0 = Coproduct[I :+: S :+: I :+: CNil]("b")
val u1 = RemoveLast[I :+: CNil, I]
val r1 = u1.inverse(Left(1))
assertTypedEquals[I :+: CNil](i, r1)
val u2 = RemoveLast[I :+: S :+: CNil, I]
val r2 = u2.inverse(Left(1))
assertTypedEquals[I :+: S :+: CNil](is, r2)
val r2_0 = u2.inverse(Right(Inl("a")))
assertTypedEquals[I :+: S :+: CNil](is0, r2_0)
// These two are different from testRemoveInverse
val u3 = RemoveLast[I :+: S :+: I :+: CNil, I]
val r3 = u3.inverse(Left(2))
assertTypedEquals[I :+: S :+: I :+: CNil](iis, r3)
val r3_0 = u3.inverse(Right(Inr(Inl("b"))))
assertTypedEquals[I :+: S :+: I :+: CNil](iis0, r3_0)
}
@Test
def testToHList {
type CISB = Int :+: String :+: Boolean :+: CNil
type PISBa = Int :: String :: Boolean :: HNil
type PISBb = the.`ToHList[CISB]`.Out
implicitly[PISBa =:= PISBb]
}
@Test
def testEmbedDeembed {
type S1 = Int :+: CNil
type S2 = Int :+: String :+: CNil
type S3 = Int :+: String :+: Boolean :+: CNil
type S4 = String :+: Boolean :+: CNil
type S5 = Int :+: Int :+: Int :+: CNil
val c1_0 = Coproduct[S1](5)
val c1_1 = c1_0.embed[S2]
assertTypedEquals[S2](c1_1, Coproduct[S2](5))
assertTypedEquals[S1](c1_0, c1_1.deembed[S1].right.get)
val c1_2 = c1_0.embed[S3]
assertTypedEquals[S3](c1_2, Coproduct[S3](5))
assertTypedEquals[S1](c1_0, c1_2.deembed[S1].right.get)
val c2_0 = Coproduct[S2]("toto")
val c2 = c2_0.embed[S3]
assertTypedEquals[S3](c2, Coproduct[S3]("toto"))
assertTypedEquals[S2](c2_0, c2.deembed[S2].right.get)
illTyped("Coproduct[S1](5).embed[S4]")
// See https://github.com/milessabin/shapeless/issues/253
illTyped("Coproduct[S5](3).embed[S1]")
// See https://github.com/milessabin/shapeless/issues/253#issuecomment-59648119
{
type II = Int :+: Int :+: CNil
type IDI = Int :+: Double :+: Int :+: CNil
val c1: II = Inr(Inl(1))
val c2: II = Inl(1)
val c1_0 = c1.embed[IDI].deembed[II].right.get
val c2_0 = c2.embed[IDI].deembed[II].right.get
assertTypedEquals[II](c1, c1_0)
assertTypedEquals[II](c2, c2_0)
assert(c2 != c1_0)
}
}
@Test
def testCoproductTypeSelector {
import syntax.singleton._
{
type C = Coproduct.` `.T
implicitly[C =:= CNil]
}
{
type C = Coproduct.`Int`.T
typed[C](Inl(23))
}
{
type C = Coproduct.`Int, String`.T
typed[C](Inl(23))
typed[C](Inr(Inl("foo")))
}
{
type C = Coproduct.`Int, String, Boolean`.T
typed[C](Inl(23))
typed[C](Inr(Inl("foo")))
typed[C](Inr(Inr(Inl(true))))
}
// Literal types
{
type C = Coproduct.`2`.T
typed[C](Inl(2.narrow))
}
{
type C = Coproduct.`2, "a", true`.T
typed[C](Inl(2.narrow))
typed[C](Inr(Inl("a".narrow)))
typed[C](Inr(Inr(Inl(true.narrow))))
}
{
type C = Coproduct.`2`.T
illTyped(""" typed[C](Inl(3.narrow)) """)
()
}
// Mix of standard and literal types
{
type C = Coproduct.`2, String, true`.T
typed[C](Inl(2.narrow))
typed[C](Inr(Inl("a")))
typed[C](Inr(Inr(Inl(true.narrow))))
}
}
}
| japgolly/shapeless | core/src/test/scala/shapeless/coproduct.scala | Scala | apache-2.0 | 46,918 |
package com.sksamuel.elastic4s.searches.queries
import org.elasticsearch.index.query.QueryBuilders
case class SpanMultiTermQueryDefinition(query: MultiTermQueryDefinition) extends SpanQueryDefinition {
override val builder = QueryBuilders.spanMultiTermQueryBuilder(query.builder)
}
| ulric260/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/queries/SpanMultiTermQueryDefinition.scala | Scala | apache-2.0 | 286 |
package org.jetbrains.plugins.scala
package codeInsight.intention.expression
import com.intellij.codeInsight.hint.HintManager
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.TextRange
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.psi.{PsiDocumentManager, PsiElement}
import org.jetbrains.plugins.scala.codeInsight.intention.expression.IntroduceImplicitParameterIntention._
import org.jetbrains.plugins.scala.codeInspection.InspectionBundle
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaRecursiveElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createExpressionFromText
import org.jetbrains.plugins.scala.lang.psi.types.ScTypeExt
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaRefactoringUtil
import org.jetbrains.plugins.scala.project.ProjectContext
import scala.annotation.tailrec
import scala.collection.mutable
/**
* @author Ksenia.Sautina
* @since 4/18/12
*/
object IntroduceImplicitParameterIntention {
def familyName = "Introduce implicit parameter"
def createExpressionToIntroduce(expr: ScFunctionExpr, withoutParameterTypes: Boolean): Either[ScExpression, String] = {
implicit val ctx: ProjectContext = expr
def seekParams(fun: ScFunctionExpr): mutable.HashMap[String, Int] = {
val map: mutable.HashMap[String, Int] = new mutable.HashMap[String, Int]()
var clearMap = false
val visitor = new ScalaRecursiveElementVisitor {
override def visitReferenceExpression(expr: ScReferenceExpression) {
expr.resolve() match {
case p: ScParameter if fun.parameters.contains(p) =>
if (!map.keySet.contains(expr.getText)) {
map.put(expr.getText, expr.getTextRange.getStartOffset)
} else {
clearMap = true
}
case _ =>
}
super.visitReferenceExpression(expr)
}
}
fun.accept(visitor)
if (clearMap) map.clear()
map
}
@tailrec
def isValidExpr(expr: ScExpression, paramCount: Int): Boolean = {
if (ScUnderScoreSectionUtil.underscores(expr).length == paramCount) return true
expr match {
case e: ScBlockExpr if e.exprs.size == 1 =>
isValidExpr(e.exprs(0), paramCount)
case e: ScParenthesisedExpr =>
isValidExpr(ScalaRefactoringUtil.unparExpr(e), paramCount)
case _ => false
}
}
val result = expr.result.getOrElse(return Right(InspectionBundle.message("introduce.implicit.not.allowed.here")))
val buf = new StringBuilder
buf.append(result.getText)
val diff = result.getTextRange.getStartOffset
var previousOffset = -1
var occurrences: mutable.HashMap[String, Int] = new mutable.HashMap[String, Int]
occurrences = seekParams(expr)
if (occurrences.isEmpty || occurrences.size != expr.parameters.size)
return Right(InspectionBundle.message("introduce.implicit.incorrect.count"))
for (p <- expr.parameters) {
if (!occurrences.keySet.contains(p.name) || occurrences(p.name) < previousOffset)
return Right(InspectionBundle.message("introduce.implicit.incorrect.order"))
previousOffset = occurrences(p.name)
}
for (p <- expr.parameters.reverse) {
val expectedType = p.expectedParamType
val declaredType = p.typeElement
val newParam = declaredType match {
case None => "_"
case _ if withoutParameterTypes => "_"
case Some(t) if expectedType.exists(_.equiv(t.getType().getOrAny)) => "_"
case Some(_) => s"(_: ${p.typeElement.get.getText})"
}
val offset = occurrences(p.name) - diff
buf.replace(offset, offset + p.name.length, newParam)
}
val newExpr = createExpressionFromText(buf.toString())(expr.getManager)
if (!isValidExpr(newExpr, expr.parameters.length))
return Right(InspectionBundle.message("introduce.implicit.not.allowed.here"))
Left(newExpr)
}
}
class IntroduceImplicitParameterIntention extends PsiElementBaseIntentionAction {
def getFamilyName: String = familyName
override def getText: String = getFamilyName
def isAvailable(project: Project, editor: Editor, element: PsiElement): Boolean = {
val expr: ScFunctionExpr = PsiTreeUtil.getParentOfType(element, classOf[ScFunctionExpr], false)
if (expr == null) return false
val range: TextRange = expr.params.getTextRange
val offset = editor.getCaretModel.getOffset
if (range.getStartOffset <= offset && offset <= range.getEndOffset + 3) return true
false
}
override def invoke(project: Project, editor: Editor, element: PsiElement) {
def showErrorHint(hint: String) {
if (ApplicationManager.getApplication.isUnitTestMode) throw new RuntimeException(hint)
else HintManager.getInstance().showErrorHint(editor, hint)
}
val expr: ScFunctionExpr = PsiTreeUtil.getParentOfType(element, classOf[ScFunctionExpr], false)
if (expr == null || !expr.isValid) return
val startOffset = expr.getTextRange.getStartOffset
createExpressionToIntroduce(expr, withoutParameterTypes = false) match {
case Left(newExpr) =>
inWriteAction {
expr.replace(newExpr)
editor.getCaretModel.moveToOffset(startOffset)
PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument)
}
case Right(message) =>
showErrorHint(message)
}
}
}
| loskutov/intellij-scala | src/org/jetbrains/plugins/scala/codeInsight/intention/expression/IntroduceImplicitParameterIntention.scala | Scala | apache-2.0 | 5,819 |
package pt.up.fe.iart.proj1.problem
import pt.up.fe.iart.proj1.problem
import pt.up.fe.iart.proj1.problem.Location._
import scala.collection.immutable.HashSet
import scala.collection.GenSeq
import pt.up.fe.iart.proj1.collections.Graph
import java.io.FileInputStream
import org.antlr.v4.runtime.{CommonTokenStream, ANTLRInputStream}
import pt.up.fe.iart.proj1.parser.{GraphVisitor, PTPParser, PTPLexer}
import pt.up.fe.iart.proj1.solver.Problem
class PatientTransportationProblem(map: Map[Int, Location], costs: Array[Array[Option[Double]]], ambulanceCapacity: Int, maxGasLevel: Double) extends Problem[State, Int] {
val patientLocations = map.filter(x => isPatientLocation(x._2)).keySet.to[HashSet]
val filiations = map.filter(x => isFiliation(x._2)).keySet.to[HashSet]
val gasStations = map.filter(x => isGasStation(x._2)).keySet.to[HashSet]
val nonPickupLocations = map.filter(x => isGenericLocation(x._2)).keySet.to[HashSet]
val doubleCosts = for (row <- costs) yield for (value <- row) yield value.getOrElse(Double.MaxValue)
val mapIndices: Map[Location, Int] = map.map(_.swap)
private def nearestFiliation(locIndex: Int): (Location, Double) = {
for (fl <- filiations) yield map(fl) -> doubleCosts(locIndex)(fl)
}.minBy(_._2)
private def nearestFiliation(location: Location): (Location, Double) = nearestFiliation(mapIndices(location))
private def patientLocation(p: Patient) = map.values.find(loc => patient(loc).exists(_ == p))
val estimatedCostToDeliverMap: Map[Patient, Double] = {
for (pl <- patientLocations) yield {
val patient = Location.patient(map(pl)).get
patient -> doubleCosts(pl)(mapIndices(patient.destination.getOrElse(nearestFiliation(pl)._1)))
}
}.toMap
def patientDestination(p: Patient): Location = {
val pl = patientLocation(p).get
p.destination.getOrElse(nearestFiliation(pl)._1)
}
val estimatedCostToPickupMap = {
for (pl <- patientLocations) yield pl -> {
for ((row, from) <- doubleCosts.zipWithIndex if from != pl) yield row(pl)
}.min
}.toMap
private def distinctBy[A, B](fun: A => B, list: List[A]): List[A] = {
val b = collection.mutable.ArrayBuffer[A]()
val seen = collection.mutable.HashSet[B]()
for (x <- list) {
val y = fun(x)
if (!seen(y)) {
b += x
seen += y
}
}
b.to[List]
}
override def estimatedCostToGoal(from: State) = {
val inAmbulance: List[Patient] = from.patientsAmbulance
val all: List[Patient] = patientLocations.map(pl => Location.patient(map(pl))).flatMap(op => if (op.isDefined) List(op.get) else List()).to[List]
val e = estimatedCostToPickupMap.filterKeys { k => from.currentLocation != k && !from.previousLocations.contains(k)}.foldLeft(0.0)(_ + _._2) +
distinctBy(
(p: Patient) => mapIndices(patientDestination(p)),
patientLocations.diff(from.previousLocations + from.currentLocation).flatMap(pl => Location.patient(map(pl)) match {
case Some(p) => List(p);
case None => List()
}).to[List] ++ from.patientsAmbulance
).map(estimatedCostToDeliverMap).sum
// val e1 = from.patientsAmbulance.map(p => mapIndices(patientDestination(p))).distinct.map(doubleCosts(from.currentLocation)).sum
//val e1 = distinctBy((p: Patient) => mapIndices(patientDestination(p)), from.patientsAmbulance).map(estimatedCostToDeliverMap).sum
e
}
def printEstimatedCostToGoal(from: State) = {
println(from)
val e = estimatedCostToPickupMap.filterKeys { k => from.currentLocation != k && !from.previousLocations.contains(k)}.foldLeft(0.0)(_ + _._2)
val e2 = distinctBy(
(p: Patient) => mapIndices(patientDestination(p)),
patientLocations.diff(from.previousLocations + from.currentLocation).flatMap(pl => Location.patient(map(pl)) match {
case Some(p) => List(p);
case None => List()
}).to[List] ++ from.patientsAmbulance
).map(estimatedCostToDeliverMap).sum
println("estimatedCostToPickupMap: " + e)
println("estimatedCostToDeliver: " + e2)
println("total: " + (e + e2))
println()
}
override def stepCost(from: State, action: Int, to: State): Double = doubleCosts(from.currentLocation)(action)
override def result(s: State, a: Int): State = new State(
s.previousLocations + s.currentLocation,
a,
map(a) match {
case PatientLocation(_, p) => p :: s.patientsAmbulance
case f@Filiation(_, _) => s.patientsAmbulance.filterNot(p => p.destination.isEmpty || p.destination.exists(_ == f))
case _ => s.patientsAmbulance
},
map(a) match {
case GasStation(_) => maxGasLevel
case _ => s.gasLevel - doubleCosts(s.currentLocation)(a)
}
)
override def actions(s: State): List[Int] = ((
(if (s.numberPatientsAmbulance < ambulanceCapacity) patientLocations -- s.previousLocations else HashSet.empty[Int]) ++
(if (s.numberPatientsAmbulance > 0) {
if (s.patientsAmbulance.exists(p => p.destination.isEmpty))
filiations
else
filiations.filter(x => {
val f = map(x)
s.patientsAmbulance.exists(p => p.destination.exists(_ == f))
})
}
else
HashSet.empty[Int])
++ (if (s.gasLevel < maxGasLevel) gasStations else HashSet.empty[Int])
) - s.currentLocation).filter { index => doubleCosts(s.currentLocation)(index) <= s.gasLevel}.toList
override def goalTest(s: State): Boolean = s.numberPatientsAmbulance == 0 && patientLocations.forall(s.previousLocations.contains)
override def initialState: State = map.find { case (index, loc) => Location.isFiliation(loc) && Location.hasGarage(loc)} match {
case Some((index, _)) => new State(HashSet.empty, index, List.empty, maxGasLevel)
case None => throw new Error("No initial state")
}
}
object PatientTransportationProblem {
def readGraph(fileName: String): Graph[problem.Location] = {
val inputFile = fileName
val is = new FileInputStream(inputFile)
val input = new ANTLRInputStream(is)
val lexer = new PTPLexer(input)
val tokens = new CommonTokenStream(lexer)
val parser = new PTPParser(tokens)
val tree = parser.map()
val visitor = new GraphVisitor()
visitor.visit(tree)
}
}
| migulorama/feup-iart-2014 | src/main/scala/pt/up/fe/iart/proj1/problem/PatientTransportationProblem.scala | Scala | mit | 6,784 |
package akka.persistence.cassandra.compaction
import com.typesafe.config.Config
import scala.collection.JavaConverters._
/*
* Based upon https://github.com/apache/cassandra/blob/cassandra-2.2/src/java/org/apache/cassandra/db/compaction/SizeTieredCompactionStrategy.java
*/
class SizeTieredCompactionStrategy(config: Config) extends BaseCompactionStrategy(config) {
require(config.hasPath("class") && config.getString("class") == SizeTieredCompactionStrategy.ClassName, s"Config does not specify a ${SizeTieredCompactionStrategy.ClassName}")
require(
config.entrySet()
.asScala
.map(_.getKey)
.forall(SizeTieredCompactionStrategy.propertyKeys.contains(_)),
s"Config contains properties not supported by a ${SizeTieredCompactionStrategy.ClassName}"
)
val bucketHigh: Double = if (config.hasPath("bucket_high")) config.getDouble("bucket_high") else 1.5
val bucketLow: Double = if (config.hasPath("bucket_low")) config.getDouble("bucket_low") else 0.5
val coldReadsToOmit: Double = if (config.hasPath("cold_reads_to_omit")) config.getDouble("cold_reads_to_omit") else 0.05
val maxThreshold: Int = if (config.hasPath("max_threshold")) config.getInt("max_threshold") else 32
val minThreshold: Int = if (config.hasPath("min_threshold")) config.getInt("min_threshold") else 4
val minSSTableSize: Long = if (config.hasPath("min_sstable_size")) config.getLong("min_sstable_size") else 50
require(bucketHigh > bucketLow, s"bucket_high must be larger than bucket_low, but was $bucketHigh")
require(maxThreshold > 0, s"max_threshold must be larger than 0, but was $maxThreshold")
require(minThreshold > 1, s"min_threshold must be larger than 1, but was $minThreshold")
require(maxThreshold > minThreshold, s"max_threshold must be larger than min_threshold, but was $maxThreshold")
require(minSSTableSize > 0, s"min_sstable_size must be larger than 0, but was $minSSTableSize")
override def asCQL: String =
s"""{
|'class' : '${SizeTieredCompactionStrategy.ClassName}',
|${super.asCQL},
|'bucket_high' : $bucketHigh,
|'bucket_low' : $bucketLow,
|'cold_reads_to_omit' : $coldReadsToOmit,
|'max_threshold' : $maxThreshold,
|'min_threshold' : $minThreshold,
|'min_sstable_size' : $minSSTableSize
|}
""".stripMargin.trim
}
object SizeTieredCompactionStrategy extends CassandraCompactionStrategyConfig[SizeTieredCompactionStrategy] {
override val ClassName: String = "SizeTieredCompactionStrategy"
override def propertyKeys: List[String] = (
BaseCompactionStrategy.propertyKeys union List(
"bucket_high",
"bucket_low",
"cold_reads_to_omit",
"max_threshold",
"min_threshold",
"min_sstable_size"
)
).sorted
override def fromConfig(config: Config): SizeTieredCompactionStrategy = new SizeTieredCompactionStrategy(config)
} | jparkie/akka-persistence-cassandra | src/main/scala/akka/persistence/cassandra/compaction/SizeTieredCompactionStrategy.scala | Scala | apache-2.0 | 2,891 |
package bad.robot.radiate
object StatusAggregator {
def aggregated(statuses: Iterable[Status]) = new StatusAggregator(statuses)
}
class StatusAggregator(statuses: Iterable[Status]) {
def getStatus: Status = {
if (statuses.isEmpty) Unknown
else statuses.reduce[Status] {
case (first, second) if first == Broken || second == Broken => Broken
case (first, second) if first == Unknown || second == Unknown => Unknown
case _ => Ok;
}
}
} | tobyweston/radiate | src/main/scala/bad/robot/radiate/StatusAggregator.scala | Scala | apache-2.0 | 471 |
package objektwerks
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.expressions.scalalang.typed
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
class DatasetTest extends AnyFunSuite with Matchers {
import SparkInstance._
import org.apache.spark.sql.expressions._
import org.apache.spark.sql.functions._
import sparkSession.implicits._
val dataset = sparkSession.read.json("./data/person/person.json").as[Person].cache
dataset.write.json("./target/dataset/person.json")
test("dataset") {
dataset.count shouldBe 4
assert(dataset.toDF.isInstanceOf[Dataset[Row]])
assert(dataset.rdd.isInstanceOf[RDD[Person]])
}
test("column") {
val idColumn = dataset.col("id")
val nameColumn = col("name")
val ageColumn = column("age")
val roleColumn = expr("role")
dataset
.select(idColumn, nameColumn, ageColumn, roleColumn)
.as[Person]
.count shouldBe 4
}
test("selectExpr") {
dataset
.selectExpr("id", "name", "age", "role")
.as[Person]
.count shouldBe 4
}
test("extend") {
dataset
.withColumn("dogAge", $"age" * 7)
.as[PersonAsDog]
.head
.dogAge shouldBe 168
}
test("update") {
val incrementAgeNameToUpper = dataset
.withColumn("age", 'age + 1)
.withColumn("name", upper('name))
.as[Person]
.cache
incrementAgeNameToUpper.count shouldBe 4
incrementAgeNameToUpper.head.age shouldBe 25
incrementAgeNameToUpper.head.name shouldBe "FRED"
}
test("transform") {
def incrementAge(ds: Dataset[Person]): Dataset[Person] = ds.withColumn("age", $"age" + 1).as[Person]
def nameToUpper(ds: Dataset[Person]): Dataset[Person] = ds.withColumn("name", upper($"name")).as[Person]
val incrementAgeNameToUpper = dataset
.transform(incrementAge)
.transform(nameToUpper)
.cache
incrementAgeNameToUpper.count shouldBe 4
incrementAgeNameToUpper.head.age shouldBe 25
incrementAgeNameToUpper.head.name shouldBe "FRED"
}
test("map") {
val mapNameToUpperCase = dataset.map(_.name.toUpperCase).cache
mapNameToUpperCase.count shouldBe 4
mapNameToUpperCase.head shouldBe "FRED"
}
test("filter") {
val filterByName = dataset.filter(_.name == "barney").cache
filterByName.count shouldBe 1
filterByName.head.name shouldBe "barney"
val filterByAge = dataset.filter(_.age > 23).cache
filterByAge.count shouldBe 1
filterByAge.head.age shouldBe 24
}
test("filter > map") {
val betty = dataset.filter(_.name == "betty").map(_.name.toUpperCase).cache
betty.count shouldBe 1
betty.head shouldBe "BETTY"
}
test("sort") {
val sortByName = dataset.sort('name).cache
sortByName.count shouldBe 4
sortByName.head.name shouldBe "barney"
}
test("select > orderBy") {
val orderByName = dataset.select('name).orderBy('name).as[String].cache
orderByName.count shouldBe 4
orderByName.head shouldBe "barney"
}
test("select > agg > case class") {
dataset.select(min(col("age"))).map(row => Age(row.getLong(0))).head shouldBe Age(21)
dataset.select(max(col("age"))).map(row => Age(row.getLong(0))).head shouldBe Age(24)
}
test("groupByKey > avg") {
dataset
.groupByKey( _.role )
.agg( typed.avg(_.age.toDouble) )
.map( tuple => AvgAgeByRole(tuple._1, tuple._2) )
.collect.foreach {
case AvgAgeByRole("husband", avgAge) => avgAge shouldBe 23.0
case AvgAgeByRole("wife", avgAge) => avgAge shouldBe 22.0
case AvgAgeByRole(_, _) => throw new IllegalArgumentException("GroupByRole test failed!")
}
}
test("groupBy > avg") {
val groupByRole = dataset
.groupBy('role)
.avg("age")
.as[(String, Double)]
.cache
groupByRole.count shouldBe 2
groupByRole.collect.foreach {
case ("husband", avgAge) => avgAge shouldBe 23.0
case ("wife", avgAge) => avgAge shouldBe 22.0
case _ => fail("groupBy > avg test failed!")
}
}
test("groupBy > agg(min, avg, max)") {
val groupByRole = dataset
.groupBy("role")
.agg(
min("age"),
avg("age"),
max("age")
)
.cache
groupByRole.count shouldBe 2
groupByRole.collect.foreach {
case Row("husband", minAge, avgAge, maxAge) =>
minAge shouldBe 22
avgAge shouldBe 23.0
maxAge shouldBe 24
case Row("wife", minAge, avgAge, maxAge) =>
minAge shouldBe 21
avgAge shouldBe 22.0
maxAge shouldBe 23
case _ => fail("groupBy > agg( min, avg, max) test failed!")
}
}
test("when > otherwise") {
val personsWithGender = dataset
.withColumn("gender", when($"role" === "husband", "male").otherwise("female"))
.as[PersonWithGender]
personsWithGender.collect.foreach {
case PersonWithGender(_, _, _, "husband", gender) => gender shouldBe "male"
case PersonWithGender(_, _, _, "wife", gender) => gender shouldBe "female"
case _ => fail("when > otherwise test failed!")
}
}
test("window") {
val window = Window.partitionBy('role).orderBy($"age".desc)
val ranking = rank.over(window).as("rank")
val result = dataset.select(col("role"), col("name"), col("age"), ranking).as[(String, String, Long, Int)].cache
("wife", "wilma", 23, 1) shouldEqual result.head
}
test("join") {
val persons = sparkSession.read.json("./data/person/person.json").as[Person].cache
val tasks = sparkSession.read.json("./data/task/task.json").as[Task].cache
persons.count shouldBe 4
tasks.count shouldBe 4
val joinBy = persons.col("id") === tasks.col("pid")
val personsTasks = persons.join(tasks, joinBy)
personsTasks.count shouldBe 4
}
} | objektwerks/spark | src/test/scala/objektwerks/DatasetTest.scala | Scala | apache-2.0 | 5,833 |
package scalaz.stream.mongodb
import scalaz.stream.mongodb.channel.ChannelResult
import com.mongodb.DBCollection
/**
* Each operation on mongo shall result in this command
* @tparam A Type of result. DBObject or command now
*/
trait MongoCollectionCommand[A] {
def toChannelResult: ChannelResult[DBCollection,A]
}
| Spinoco/scalaz-stream-mongodb | core/src/main/scala/scalaz/stream/mongodb/MongoCollectionCommand.scala | Scala | mit | 324 |
// scalac: -Yrangepos
//
class Foo {
class Bar
object Bar {
implicit def fromString(a: String) = new Bar
}
def andThen(b : Bar) = b
def andThen1(i : Int)(b : Bar) = b
def andThen2(b : Bar)(implicit dummy: DummyImplicit) = b
def andThen3[T](b: Bar) = b
}
object Test {
(new Foo) andThen ("Bar")
(new Foo).andThen1(23)("Bar")
(new Foo) andThen2 ("Bar")
(new Foo) andThen3[Int]("Bar")
}
| lrytz/scala | test/files/pos/t4225b.scala | Scala | apache-2.0 | 412 |
/*
Spot is a bot, implementing a subset of AIML, and some extensions.
Copyright (C) 2016 Marius Feteanu
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.spotai
package state
import scala.language.postfixOps // This is so we can write 30 seconds for duration
import scala.concurrent.{Future, Await}
import scala.concurrent.duration._
import slick.driver.SQLiteDriver.api._
import slick.lifted.ProvenShape.proveShapeOf
class SQLBotContext(botInstanceId:String) extends BotContext {
class BotLastResponse(tag: Tag) extends Table[(String, String)](tag, "bot_last_response") {
def id = column[String]("bot_id", O.PrimaryKey)
def lastResponse = column[String]("last_response")
def * = proveShapeOf((id, lastResponse))
}
val botLastResponse = (TableQuery[BotLastResponse])
class Predicate(tag: Tag) extends Table[(String, String, String)](tag, "bot_predicate") {
def id = column[String]("bot_id")
def predicateName = column[String]("name")
def predicateValue = column[String]("value")
def pk = primaryKey("pk_bot_predicate", (id, predicateName))
def * = proveShapeOf((id, predicateName, predicateValue))
}
val predicate = (TableQuery[Predicate])
Class.forName("org.sqlite.JDBC")
def setup() = {
val db = Database.forConfig("botSQL")
try {
val setupAction = DBIO.seq((botLastResponse.schema ++ predicate.schema).create)
val setupFuture: Future[Unit] = db.run(setupAction)
Await.result(setupFuture, 30 seconds)
} finally db.close
}
override def lastResponse:Option[String] = {
val db = Database.forConfig("botSQL")
try {
val futureResult = db.run(botLastResponse.filter(_.id === botInstanceId).map(_.lastResponse).result.headOption)
Await.result(futureResult, 30 seconds)
} finally db.close
}
override def lastResponse_=(lastResponse:Option[String]) = {
val db = Database.forConfig("botSQL")
try {
lastResponse match {
case Some(someResponse:String) => Await.ready(db.run(botLastResponse.insertOrUpdate((botInstanceId, someResponse))), 30 seconds)
case None => Await.ready(db.run(botLastResponse.filter(_.id === botInstanceId).delete), 30 seconds)
}
} finally db.close
}
override def predicates:Map[String,String] = {
val db = Database.forConfig("botSQL")
try {
val futureResult = db.run(predicate.filter(_.id === botInstanceId).map(row => (row.predicateName -> row.predicateValue)).result)
Await.result(futureResult, 30 seconds).toMap
} finally db.close
}
override def predicates_=(predicates:Map[String,String]) = {
val db = Database.forConfig("botSQL")
try {
predicates.map({case (name, value) =>
Await.ready(db.run(predicate.insertOrUpdate((botInstanceId, name, value))), 30 seconds)
})
} finally db.close
}
}
object SQLBotContext{
def apply(botInstanceId:String):SQLBotContext = {
new SQLBotContext(botInstanceId)
}
def setup() = {
(SQLBotContext("")).setup()
}
}
| mariusfeteanu/spot | src/main/scala/com/spotai/state/SQLBotContext.scala | Scala | gpl-3.0 | 3,605 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.spark.jts.udf
import org.locationtech.jts.geom.{Coordinate, GeometryFactory}
import org.apache.spark.sql.functions._
import org.junit.runner.RunWith
import org.locationtech.geomesa.spark.jts._
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class GeometricOutputFunctionsTest extends Specification with TestEnvironment {
"sql geometry constructors" should {
sequential
// before
step {
// Trigger initialization of spark session
val _ = spark
}
"st_asBinary" >> {
sc.sql("select st_asBinary(null)").collect.head(0) must beNull
dfBlank.select(st_asBinary(lit(null))).first must beNull
val r = sc.sql(
"""
|select st_asBinary(st_geomFromWKT('POLYGON((0 0, 2 0, 2 2, 0 2, 0 0))'))
""".stripMargin
)
val expected = Array[Byte](0, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0, 5, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0,
64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0
)
r.collect().head.getAs[Array[Byte]](0) mustEqual expected
dfBlank.select(st_asBinary(st_geomFromWKT("POLYGON((0 0, 2 0, 2 2, 0 2, 0 0))"))).first mustEqual expected
}
"st_asGeoJSON" >> {
"null" >> {
sc.sql("select st_asGeoJSON(null)").collect.head(0) must beNull
dfBlank.select(st_asGeoJSON(lit(null))).first must beNull
}
"point" >> {
val r = sc.sql(
"""
|select st_asGeoJSON(st_geomFromWKT('POINT(0 0)'))
""".stripMargin
)
val expected = """{"type":"Point","coordinates":[0.0,0.0]}"""
r.collect().head.getAs[String](0) mustEqual expected
dfBlank.select(st_asGeoJSON(st_geomFromWKT("POINT(0 0)"))).first mustEqual expected
}
"lineString" >> {
val line = "LINESTRING(0 0, 1 1, 2 2)"
val r = sc.sql(
s"""
|select st_asGeoJSON(st_geomFromWKT('$line'))
""".stripMargin
)
val expected = """{"type":"LineString","coordinates":[[0.0,0.0],[1,1],[2,2]]}"""
r.collect().head.getAs[String](0) mustEqual expected
dfBlank.select(st_asGeoJSON(st_geomFromWKT(line))).first mustEqual expected
}
"polygon" >> {
val poly = "POLYGON((0.45 0.75, 1.15 0.75, 1.15 1.45, 0.45 1.45, 0.45 0.75))"
val r = sc.sql(
s"""
|select st_asGeoJSON(st_geomFromWKT('$poly'))
""".stripMargin
)
val expected = """{"type":"Polygon","coordinates":[[[0.45,0.75],[1.15,0.75],[1.15,1.45],[0.45,1.45],[0.45,0.75]]]}"""
r.collect().head.getAs[String](0) mustEqual expected
dfBlank.select(st_asGeoJSON(st_geomFromWKT(poly)))
.first mustEqual expected
}
"multiPoint" >> {
val point = "MULTIPOINT((0 0), (1 1))"
val r = sc.sql(
s"""
|select st_asGeoJSON(st_geomFromWKT('$point'))
""".stripMargin
)
val expected = """{"type":"MultiPoint","coordinates":[[0.0,0.0],[1,1]]}"""
r.collect().head.getAs[String](0) mustEqual expected
dfBlank.select(st_asGeoJSON(st_geomFromWKT(point))).first mustEqual expected
}
"multiLineString" >> {
val line = "MULTILINESTRING((0 0, 1 1, 2 2), (-3 -3, -2 -2, -1 -1))"
val r = sc.sql(
s"""
|select st_asGeoJSON(st_geomFromWKT('$line'))
""".stripMargin
)
val expected = """{"type":"MultiLineString","coordinates":[[[0.0,0.0],[1,1],[2,2]],[[-3,-3],[-2,-2],[-1,-1]]]}"""
r.collect().head.getAs[String](0) mustEqual expected
}
"multiPolygon" >> {
val poly = "MULTIPOLYGON(((0.45 0.75, 1.15 0.75, 1.15 1.45, 0.45 1.45, 0.45 0.75)),((0 0, 1 0, 1 1, 0 1, 0 0)))"
val r = sc.sql(
s"""
|select st_asGeoJSON(st_geomFromWKT('$poly'))
""".stripMargin
)
val expected = """{"type":"MultiPolygon","coordinates":[[[[0.45,0.75],[1.15,0.75],[1.15,1.45],[0.45,1.45],""" +
"""[0.45,0.75]]],[[[0.0,0.0],[1,0.0],[1,1],[0.0,1],[0.0,0.0]]]]}"""
r.collect().head.getAs[String](0) mustEqual expected
dfBlank.select(st_asGeoJSON(st_geomFromWKT(poly))).first mustEqual expected
}
"geometryCollection" >> {
val geom = "GEOMETRYCOLLECTION(POINT(0 0), LINESTRING(0 0, 1 1, 2 2))"
val r = sc.sql(
s"""
|select st_asGeoJSON(st_geomFromWKT('$geom'))
""".stripMargin
)
val expected = """{"type":"GeometryCollection","geometries":[{"type":"Point","coordinates":[0.0,0.0]},""" +
"""{"type":"LineString","coordinates":[[0.0,0.0],[1,1],[2,2]]}]}"""
r.collect().head.getAs[String](0) mustEqual expected
dfBlank.select(st_asGeoJSON(st_geomFromWKT(geom))).first mustEqual expected
}
}
"st_asLatLonText" >> {
sc.sql("select st_asLatLonText(null)").collect.head(0) must beNull
import org.apache.spark.sql.functions.col
val gf = new GeometryFactory()
val df = sc.createDataset(Seq(gf.createPoint(new Coordinate(-76.5, 38.5)))).toDF()
val r = df.select(st_asLatLonText(col("value")))
r.collect().head mustEqual """38Β°30'0.000"N 77Β°30'0.000"W"""
}
"st_asText" >> {
sc.sql("select st_asText(null)").collect.head(0) must beNull
dfBlank.select(st_asText(lit(null))).first must beNull
val point = "POINT (-76.5 38.5)"
val r = sc.sql(
s"""
|select st_asText(st_geomFromWKT('$point'))
""".stripMargin
)
val expected = "POINT (-76.5 38.5)"
r.collect().head.getAs[String](0) mustEqual expected
dfBlank.select(st_asText(st_geomFromWKT(point))).first mustEqual expected
}
"st_geoHash" >> {
sc.sql("select st_geoHash(null, null)").collect.head(0) must beNull
dfBlank.select(st_geoHash(lit(null), lit(null))).first must beNull
val point = "POINT (-76.5 38.5)"
val precision = 25
val r = sc.sql(
s"""
|select st_geoHash(st_geomFromWKT('$point'), $precision)
""".stripMargin
)
val expected = "dqce5"
r.collect().head.getAs[String](0) mustEqual expected
dfBlank.select(st_geoHash(st_geomFromWKT(lit(point)), lit(precision))).first mustEqual expected
}
//after
step {
spark.stop()
}
}
}
| elahrvivaz/geomesa | geomesa-spark/geomesa-spark-jts/src/test/scala/org/locationtech/geomesa/spark/jts/udf/GeometricOutputFunctionsTest.scala | Scala | apache-2.0 | 7,072 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.filters
import org.geotools.filter.text.ecql.ECQL
import org.junit.runner.RunWith
import org.locationtech.geomesa.index.api.ShardStrategy.NoShardStrategy
import org.locationtech.geomesa.index.index.z3.Z3IndexKeySpace
import org.locationtech.geomesa.index.utils.ExplainNull
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.matcher.MatchResult
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class Z3FilterTest extends Specification {
val sft = SimpleFeatureTypes.createType("z3FilterTest", "dtg:Date,*geom:Point:srid=4326")
val keySpace = new Z3IndexKeySpace(sft, NoShardStrategy, "geom", "dtg")
val filters = Seq(
"bbox(geom,38,48,52,62) and dtg DURING 2014-01-01T00:00:00.000Z/2014-01-08T12:00:00.000Z",
"bbox(geom,38,48,52,62) and dtg DURING 2013-12-15T00:00:00.000Z/2014-01-15T00:00:00.000Z",
"dtg DURING 2014-01-01T00:00:00.000Z/2014-01-08T12:00:00.000Z"
).map(ECQL.toFilter)
val values = filters.map(keySpace.getIndexValues(_, ExplainNull))
def compare(actual: Z3Filter, expected: Z3Filter): MatchResult[Boolean] = {
val left = Array[AnyRef](actual.xy, actual.t, Short.box(actual.minEpoch), Short.box(actual.maxEpoch))
val right = Array[AnyRef](expected.xy, expected.t, Short.box(expected.minEpoch), Short.box(expected.maxEpoch))
java.util.Arrays.deepEquals(left, right) must beTrue
}
"Z3Filter" should {
"serialize to and from bytes" in {
forall(values) { value =>
val filter = Z3Filter(value)
val result = Z3Filter.deserializeFromBytes(Z3Filter.serializeToBytes(filter))
compare(result, filter)
}
}
"serialize to and from strings" in {
forall(values) { value =>
val filter = Z3Filter(value)
val result = Z3Filter.deserializeFromStrings(Z3Filter.serializeToStrings(filter))
compare(result, filter)
}
}
}
}
| aheyne/geomesa | geomesa-index-api/src/test/scala/org/locationtech/geomesa/index/filters/Z3FilterTest.scala | Scala | apache-2.0 | 2,449 |
package models
import github.GitHubAPI
import github.GitHubEvent
trait GitHubAction {
def isMatch(msg: GitHubEvent): Boolean
def process(api: GitHubAPI, msg: GitHubEvent): Unit
} | shunjikonishi/github-hook | app/models/GitHubAction.scala | Scala | mit | 184 |
package propositional.schema
import parsing.ParsableLexicalCategory
object Name extends ParsableLexicalCategory({ a =>
SequentSchema.allTokens.forall(tok => !a.contains(tok))
})
| julianmichael/deduction | src/main/scala/propositional/schema/Name.scala | Scala | mit | 181 |
import com.typesafe.sbt.less.Import.LessKeys
import com.typesafe.sbt.web.Import._
import com.typesafe.sbt.web.SbtWeb
import sbt.Keys._
import sbt._
object ScalaTestTutorial extends Build {
val appName = "scalatest-tutorial"
val appVersion = "1.0"
val scalazVersion = "7.0.6"
val playVersion = "2.3.8"
val akkaVersion = "2.3.8"
scalacOptions ++= Seq("-feature")
val sharedSettings: Seq[Def.Setting[_]] = Seq(
organization := "com.torfox",
version := appVersion,
scalaVersion := "2.11.6",
resolvers ++= Seq(
"Typesafe repository snapshots" at "http://repo.typesafe.com/typesafe/snapshots/",
"Typesafe repository releases" at "http://repo.typesafe.com/typesafe/releases/",
"Sonatype releases" at "https://oss.sonatype.org/content/repositories/releases",
"Sonatype snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
"Sonatype repo" at "https://oss.sonatype.org/content/groups/scala-tools/",
"Sonatype staging" at "http://oss.sonatype.org/content/repositories/staging",
"spray repo" at "http://repo.spray.io",
"Java.net Maven2 Repository" at "http://download.java.net/maven/2/",
"Twitter Repository" at "http://maven.twttr.com",
"Websudos releases" at "http://maven.websudos.co.uk/ext-release-local"
),
scalacOptions ++= Seq(
"-language:postfixOps",
"-language:implicitConversions",
"-language:reflectiveCalls",
"-language:higherKinds",
"-language:existentials",
"-Yinline-warnings",
"-Xlint",
"-deprecation",
"-feature",
"-unchecked"
),
libraryDependencies ++= Seq(
"com.typesafe.scala-logging" %% "scala-logging" % "3.1.0",
"org.scalatest" %% "scalatest" % "2.2.4" % "test",
"org.scalacheck" %% "scalacheck" % "1.12.2" % "test",
"org.scalamock" %% "scalamock-scalatest-support" % "3.2.1" % "test"
)
)
lazy val root = Project(
id = "scalatest-tutorial",
base = file("."),
settings = Defaults.coreDefaultSettings ++ sharedSettings
).settings(
name := "main"
).aggregate(
uiPiece,
common
)
lazy val common = Project(
id = "common",
base = file("common"),
settings = Defaults.coreDefaultSettings ++ sharedSettings
).settings(
name := "common",
libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-actor" % akkaVersion,
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion,
"com.github.nscala-time" %% "nscala-time" % "1.0.0",
"org.scalatest" %% "scalatest" % "2.2.4" % "test",
"org.scalatestplus" %% "play" % "1.2.0" % "test"
)
)
lazy val uiPiece = Project(
id = "ui-piece",
base = file("ui-piece"),
settings = Defaults.coreDefaultSettings ++ sharedSettings
).enablePlugins(play.PlayScala, SbtWeb).settings(
name := "ui-piece",
libraryDependencies ++= Seq(
"com.typesafe.play" %% "play-test" % playVersion % "test"
),
includeFilter in (Assets, LessKeys.less) := "*.less"
).dependsOn(common)
}
| bjankie1/scalatest-tutorial | project/Build.scala | Scala | apache-2.0 | 3,421 |
package com.twitter.finagle.param
import com.twitter.finagle.Stack
/**
* Provides the `withSession` API entry point for servers.
*
* @see [[SessionParams]]
*/
trait WithServerSession[A <: Stack.Parameterized[A]] { self: Stack.Parameterized[A] =>
/**
* An entry point for configuring the client's sessions.
*
* Session might be viewed as logical connection that wraps a physical connection
* (i.e., [[com.twitter.finagle.transport.Transport transport]]) and controls its
* lifecycle. Sessions are used in Finagle to maintain liveness, requests cancellation,
* draining, and many more.
*
* The default setup for a Finagle client's sessions is to not put any
* timeouts on it.
*/
val withSession: SessionParams[A] = new SessionParams(self)
}
| koshelev/finagle | finagle-core/src/main/scala/com/twitter/finagle/param/WithServerSession.scala | Scala | apache-2.0 | 780 |
/*
* @author Philip Stutz
*
* Copyright 2011 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.factory.storage
import com.signalcollect.interfaces.Storage
import com.signalcollect.interfaces.StorageFactory
import com.signalcollect.storage.JavaMapVertexStorage
/**
* Storage backed by Java HashMaps.
*/
class JavaMapStorage[@specialized(Int, Long) Id, Signal] extends StorageFactory[Id, Signal] {
def createInstance: Storage[Id, Signal] = new JavaMapVertexStorage[Id, Signal]
override def toString = "JavaMapStorage"
}
| danihegglin/DynDCO | src/main/scala/com/signalcollect/factory/storage/JavaMapStorage.scala | Scala | apache-2.0 | 1,107 |
/* Copyright 2017-18, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow.api.ops.variables
import org.platanios.tensorflow.api.core.exception.InvalidDataTypeException
import org.platanios.tensorflow.api.core.{Graph, Shape}
import org.platanios.tensorflow.api.ops.{Op, Output, OutputConvertible}
import org.platanios.tensorflow.api.types.DataType
/**
* @author Emmanouil Antonios Platanios
*/
trait VariableLike extends OutputConvertible {
/** Graph where this variable is defined. */
val graph: Graph
/** Name of this variable. */
val name: String
/** Data type of this variable. */
val dataType: DataType
/** Shape of this variable. */
val shape: Shape
/** Returns a cached op which reads the last value of this partitioned variable.
*
* You can not assign a new value to the returned tensor as it is not a reference to the variable.
*
* The returned op output will not inherit the control dependencies from the scope where the value is used, which is
* equivalent behavior to that of getting the value of a variable.
*
* NOTE: You usually do not need to call this method directly, as all ops that use variables do so by internally
* converting them to tensors.
*/
val value: Output
/** Op responsible for initializing this variable. */
val initializer: Op
/** Op output that is `true` when the variable has been initialized and `false` otherwise. */
val isInitialized: Output
/** Value of the initialized variable. You should use this instead of the variable itself to initialize
* another variable with a value that depends on the value of this variable.
*
* Example:
* {{{
* // Initialize `v` with random values, and then use `initializedValue` to guarantee that `v` has been initialized
* // before its value is used to initialize `w`. The random tensor will only be sampled once.
* val v = tf.variable("v", FLOAT32, Shape(10, 40), tf.RandomTruncatedNormalInitializer())
* val w = tf.variable("w", initializer = tf.ConstantInitializer(v.initializedValue * 2.0))
* }}}
*/
val initializedValue: Output
/** Creates an op that reads the value of this variable.
*
* This method should be used when there are multiple reads, or when it is desirable to read the value only after
* some condition is true.
*
* The returned value may be different from that of [[value]] depending on the device being used, the control
* dependencies, etc.
*
* @return Created op.
*/
def read(name: String = "Read"): Output
/** Creates an op that reads the value of this variable sparsely, using the provided `indices`.
*
* This method should be used when there are multiple reads, or when it is desirable to read the value only after
* some condition is true.
*
* @param indices Indices to use for the sparse read.
* @param name Name for the created op.
* @return Created op.
*/
@throws[UnsupportedOperationException]
def gather(indices: Output, name: String = "Gather"): Output
/** Creates an op that assigns the provided value to this variable and returns its value.
*
* @param value Value to assign the variable to.
* @param name Name for created op.
* @return Variable value read op, after the assignment.
*/
@throws[UnsupportedOperationException]
def assign(value: Output, name: String = "Assign"): Output
/** Creates an op that adds the provided value to the current value of the variable and returns its value.
*
* @param value Value to add to the current variable value.
* @param name Name for created op.
* @return Variable value read op, after the addition.
*/
@throws[UnsupportedOperationException]
@throws[InvalidDataTypeException]
def assignAdd(value: Output, name: String = "AssignAdd"): Output
/** Creates an op that subtracts the provided value from the current value of the variable and returns its value.
*
* @param value Value to subtract from the current variable value.
* @param name Name for created op.
* @return Variable value read op, after the subtraction.
*/
@throws[UnsupportedOperationException]
@throws[InvalidDataTypeException]
def assignSub(value: Output, name: String = "AssignAdd"): Output
/** Creates an op that applies updates the provided sparse value updates to this variable and returns its value.
*
* @param indices Indices corresponding to the `values` used for the update.
* @param values Values to use for updating, corresponding to the provided `indices`.
* @param name Name for created op.
* @return Variable value read op, after the addition.
*/
@throws[UnsupportedOperationException]
@throws[InvalidDataTypeException]
def assignScatter(indices: Output, values: Output, name: String = "AssignScatter"): Output
/** Creates an op that adds the provided sparse value to the current value of the variable and returns its value.
*
* @param indices Indices corresponding to the `values` being added.
* @param values Values to be added, corresponding to the provided `indices`.
* @param name Name for created op.
* @return Variable value read op, after the addition.
*/
@throws[UnsupportedOperationException]
@throws[InvalidDataTypeException]
def assignScatterAdd(indices: Output, values: Output, name: String = "AssignScatterAdd"): Output
/** Creates an op that subtracts the provided sparse value from the current value of the variable and returns its
* value.
*
* @param indices Indices corresponding to the `values` being subtracted.
* @param values Values to be subtracted, corresponding to the provided `indices`.
* @param name Name for created op.
* @return Variable value read op, after the addition.
*/
@throws[UnsupportedOperationException]
@throws[InvalidDataTypeException]
def assignScatterSub(indices: Output, values: Output, name: String = "AssignScatterAdd"): Output
/** Converts this variable to an op output. This function simply returns an op corresponding to the variable value. */
def toOutput: Output = value
}
| eaplatanios/tensorflow | tensorflow/scala/api/src/main/scala/org/platanios/tensorflow/api/ops/variables/VariableLike.scala | Scala | apache-2.0 | 6,783 |
package org.jetbrains.sbt.editor.documentationProvider
import org.jetbrains.sbt.MockSbt_1_0
class SbtScalacOptionsDocumentationProviderTest_Sbt_1 extends SbtScalacOptionsDocumentationProviderTestBase
with SbtScalacOptionsDocumentationProviderCommonTests
with MockSbt_1_0
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/sbt/editor/documentationProvider/SbtScalacOptionsDocumentationProviderTest_Sbt_1.scala | Scala | apache-2.0 | 277 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools.ingest
import java.io.{File, FileWriter, PrintWriter}
import com.beust.jcommander.{Parameter, ParameterException}
import com.typesafe.config.{Config, ConfigRenderOptions}
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.io.FilenameUtils
import org.geotools.data.DataStore
import org.locationtech.geomesa.convert.shp.ShapefileConverterFactory
import org.locationtech.geomesa.convert.ConverterConfigLoader
import org.locationtech.geomesa.convert2.SimpleFeatureConverter
import org.locationtech.geomesa.tools.DistributedRunParam.RunModes
import org.locationtech.geomesa.tools._
import org.locationtech.geomesa.tools.utils.{CLArgResolver, DataFormats, Prompt}
import org.locationtech.geomesa.utils.geotools.{ConfigSftParsing, SimpleFeatureTypes}
import org.locationtech.geomesa.utils.io.{PathUtils, WithClose}
import org.opengis.feature.simple.SimpleFeatureType
import scala.util.Try
import scala.util.control.NonFatal
trait IngestCommand[DS <: DataStore] extends DataStoreCommand[DS] with InteractiveCommand with LazyLogging {
import scala.collection.JavaConversions._
override val name = "ingest"
override def params: IngestParams
def libjarsFile: String
def libjarsPaths: Iterator[() => Seq[File]]
override def execute(): Unit = {
import org.locationtech.geomesa.utils.conversions.ScalaImplicits.RichIterator
ensureSameFs(PathUtils.RemotePrefixes)
// try to load the sft, first check for an existing schema, then load from the params/environment
var sft: SimpleFeatureType =
Option(params.featureName).flatMap(n => Try(withDataStore(_.getSchema(n))).filter(_ != null).toOption)
.orElse(Option(params.spec).flatMap(s => Option(CLArgResolver.getSft(s, params.featureName))))
.orNull
var converter: Config = Option(params.config).map(CLArgResolver.getConfig).orNull
if (converter == null && params.files.nonEmpty) {
// if there is no converter passed in, try to infer the schema from the input files themselves
Command.user.info("No converter defined - will attempt to detect schema from input files")
val file = params.files.iterator.flatMap(PathUtils.interpretPath).headOption.getOrElse {
throw new ParameterException(s"<files> '${params.files.mkString(",")}' did not evaluate to anything" +
"that could be read")
}
val (inferredSft, inferredConverter) = {
val opt = if (params.fmt == DataFormats.Shp) {
ShapefileConverterFactory.infer(file.path, Option(sft))
} else {
SimpleFeatureConverter.infer(() => file.open, Option(sft))
}
opt.getOrElse {
throw new ParameterException("Could not determine converter from inputs - please specify a converter")
}
}
val renderOptions = ConfigRenderOptions.concise().setFormatted(true)
var inferredSftString: Option[String] = None
if (sft == null) {
val typeName = Option(params.featureName).getOrElse {
val existing = withDataStore(_.getTypeNames)
val fileName = Option(FilenameUtils.getBaseName(file.path))
val base = fileName.map(_.trim.replaceAll("[^A-Za-z0-9]+", "_")).filterNot(_.isEmpty).getOrElse("geomesa")
var name = base
var i = 0
while (existing.contains(name)) {
name = s"${base}_$i"
i += 1
}
name
}
sft = SimpleFeatureTypes.renameSft(inferredSft, typeName)
inferredSftString = Some(SimpleFeatureTypes.toConfig(sft, includePrefix = false).root().render(renderOptions))
if (!params.force) {
Command.user.info(s"Inferred schema: $typeName identified ${SimpleFeatureTypes.encodeType(sft)}")
}
}
converter = inferredConverter
if (!params.force) {
val converterString = inferredConverter.root().render(renderOptions)
def persist(): Unit = if (Prompt.confirm("Persist this converter for future use (y/n)? ")) {
writeInferredConverter(sft.getTypeName, converterString, inferredSftString)
}
Command.user.info(s"Inferred converter:\\n$converterString")
if (Prompt.confirm("Use inferred converter (y/n)? ")) {
persist()
} else {
Command.user.info("Please re-run with a valid converter")
persist()
return
}
}
}
if (sft == null) {
throw new ParameterException("SimpleFeatureType name and/or specification argument is required")
} else if (converter == null) {
throw new ParameterException("Converter config argument is required")
}
if (params.fmt == DataFormats.Shp) {
// shapefiles have to be ingested locally, as we need access to the related files
if (params.mode == RunModes.Distributed) {
Command.user.warn("Forcing run mode to local for shapefile ingestion")
}
params.mode = RunModes.Local
}
createConverterIngest(sft, converter).run()
}
protected def createConverterIngest(sft: SimpleFeatureType, converterConfig: Config): Runnable = {
new ConverterIngest(sft, connection, converterConfig, params.files, Option(params.mode),
libjarsFile, libjarsPaths, params.threads)
}
private def ensureSameFs(prefixes: Seq[String]): Unit = {
prefixes.foreach { pre =>
if (params.files.exists(_.toLowerCase.startsWith(s"$pre://")) &&
!params.files.forall(_.toLowerCase.startsWith(s"$pre://"))) {
throw new ParameterException(s"Files must all be on the same file system: ($pre) or all be local")
}
}
}
private def writeInferredConverter(typeName: String, converterString: String, schemaString: Option[String]): Unit = {
try {
val conf = this.getClass.getClassLoader.getResources("reference.conf").find { u =>
"file".equalsIgnoreCase(u.getProtocol) && u.getPath.endsWith("/conf/reference.conf")
}
conf match {
case None => Command.user.error("Could not persist converter: could not find 'conf/reference.conf'")
case Some(r) =>
val reference = new File(r.toURI)
val folder = reference.getParentFile
val baseName = typeName.replaceAll("[^A-Za-z0-9_]+", "_")
var convert = new File(folder, s"$baseName.conf")
var i = 1
while (convert.exists()) {
convert = new File(folder, s"${baseName}_$i.conf")
i += 1
}
WithClose(new PrintWriter(new FileWriter(convert))) { writer =>
writer.println(s"${ConverterConfigLoader.path}.$baseName : $converterString")
schemaString.foreach(s => writer.println(s"${ConfigSftParsing.path}.$baseName : $s"))
}
WithClose(new PrintWriter(new FileWriter(reference, true))) { writer =>
writer.println(s"""include "${convert.getName}"""")
}
val (names, refs) = if (schemaString.isDefined) {
("schema and converter", s"'--spec $baseName' and '--converter $baseName'")
} else {
("converter", s"'--converter $baseName'")
}
Command.user.info(s"Added import in reference.conf and saved inferred $names to ${convert.getAbsolutePath}")
Command.user.info(s"In future commands, the $names may be invoked with $refs")
}
} catch {
case NonFatal(e) =>
logger.error("Error trying to persist inferred schema", e)
Command.user.error(s"Error trying to persist inferred schema: $e")
}
}
}
// @Parameters(commandDescription = "Ingest/convert various file formats into GeoMesa")
trait IngestParams extends OptionalTypeNameParam with OptionalFeatureSpecParam with OptionalForceParam
with OptionalConverterConfigParam with OptionalInputFormatParam with DistributedRunParam {
@Parameter(names = Array("-t", "--threads"), description = "Number of threads if using local ingest")
var threads: Integer = 1
}
| ddseapy/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/ingest/IngestCommand.scala | Scala | apache-2.0 | 8,433 |
package org.jetbrains.plugins.scala.lang.overrideImplement
import org.jetbrains.plugins.scala.base.ScalaLightPlatformCodeInsightTestCaseAdapter
import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaCodeStyleSettings
import org.jetbrains.plugins.scala.overrideImplement.ScalaOIUtil
import org.jetbrains.plugins.scala.settings.ScalaApplicationSettings
import org.jetbrains.plugins.scala.util.TypeAnnotationSettings
/**
* @author Alefas
* @since 14.05.12
*/
class ScalaOverrideImplementTest extends ScalaLightPlatformCodeInsightTestCaseAdapter {
def runTest(methodName: String, fileText: String, expectedText: String, isImplement: Boolean,
settings: ScalaCodeStyleSettings = TypeAnnotationSettings.alwaysAddType(ScalaCodeStyleSettings.getInstance(getProjectAdapter)),
copyScalaDoc: Boolean = false) {
configureFromFileTextAdapter("dummy.scala", fileText.replace("\\r", "").stripMargin.trim)
val oldSettings = ScalaCodeStyleSettings.getInstance(getProjectAdapter).clone()
TypeAnnotationSettings.set(getProjectAdapter, settings)
ScalaApplicationSettings.getInstance().COPY_SCALADOC = copyScalaDoc
ScalaOIUtil.invokeOverrideImplement(getProjectAdapter, getEditorAdapter, getFileAdapter, isImplement, methodName)
TypeAnnotationSettings.set(getProjectAdapter, oldSettings.asInstanceOf[ScalaCodeStyleSettings])
checkResultByText(expectedText.replace("\\r", "").stripMargin.trim)
}
def testFoo() {
val fileText =
"""
|package test
|
|class Foo extends b {
| <caret>
|}
|abstract class b {
| def foo(x: b): b
|}
"""
val expectedText =
"""
|package test
|
|class Foo extends b {
| def foo(x: b): b = <selection>???</selection>
|}
|abstract class b {
| def foo(x: b): b
|}
"""
val methodName: String = "foo"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testEmptyLinePos() {
val fileText =
"""
|package test
|class Empty extends b {
| def foo(): Int = 3
|
|
| <caret>
|
|
|}
|abstract class b {
| def too: b
|}
"""
val expectedText =
"""
|package test
|class Empty extends b {
| def foo(): Int = 3
|
| def too: b = <selection>???</selection>
|}
|abstract class b {
| def too: b
|}
"""
val methodName: String = "too"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testNewLineBetweenMethods() {
val fileText =
"""
|package test
|
|class MethodsNewLine extends b {
| def foo(): Int = 3<caret>
|}
|abstract class b {
| def too: b
|}
"""
val expectedText =
"""
|package test
|
|class MethodsNewLine extends b {
| def foo(): Int = 3
|
| def too: b = <selection>???</selection>
|}
|abstract class b {
| def too: b
|}
"""
val methodName: String = "too"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testNewLineUpper() {
val fileText =
"""
|package test
|
|class UpperNewLine extends b {
| <caret>
| def foo(): Int = 3
|}
|abstract class b {
| def too: b
|}
"""
val expectedText =
"""
|package test
|
|class UpperNewLine extends b {
|
| def too: b = <selection>???</selection>
|
| def foo(): Int = 3
|}
|abstract class b {
| def too: b
|}
"""
val methodName: String = "too"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testOverrideFunction() {
val fileText =
"""
|package test
|
|class A {
| def foo(): A = null
|}
|class FunctionOverride extends A {
| val t = foo()
|
|
| <caret>
|}
"""
val expectedText =
"""
|package test
|
|class A {
| def foo(): A = null
|}
|class FunctionOverride extends A {
| val t = foo()
|
| override def foo(): A = <selection>super.foo()</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testImplementTypeAlias() {
val fileText =
"""
|package Y
|trait Aa {
| type K
|}
|class TypeAlias extends Aa {
| val t = foo()
| <caret>
| def y(): Int = 3
|}
"""
val expectedText =
"""
|package Y
|trait Aa {
| type K
|}
|class TypeAlias extends Aa {
| val t = foo()
|
| type K = <selection>this.type</selection>
|
| def y(): Int = 3
|}
"""
val methodName: String = "K"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testOverrideValue() {
val fileText =
"""
|package test
|
|class A {
| val foo: A = new A
|}
|class OverrideValue extends A {
| val t = foo()
| <caret>
|}
"""
val expectedText =
"""
|package test
|
|class A {
| val foo: A = new A
|}
|class OverrideValue extends A {
| val t = foo()
| override val foo: A = <selection>_</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testImplementVar() {
val fileText =
"""
|package test
|
|trait A {
| var foo: A
|}
|class VarImplement extends A {
| val t = foo()
| <caret>
| def y(): Int = 3
|}
"""
val expectedText =
"""
|package test
|
|trait A {
| var foo: A
|}
|class VarImplement extends A {
| val t = foo()
|
| var foo: A = <selection>_</selection>
|
| def y(): Int = 3
|}
"""
val methodName: String = "foo"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testImplementFromSelfType() {
val fileText =
"""
|package test
|
|trait A {
| def foo: Int
|}
|trait B {
| self: A =>
| <caret>
|}
"""
val expectedText =
"""
|package test
|
|trait A {
| def foo: Int
|}
|trait B {
| self: A =>
| def foo: Int = <selection>???</selection>
|}
"""
val methodName: String = "foo"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testOverrideFromSelfType() {
val fileText =
"""
|package test
|
|trait A {
| def foo: Int = 1
|}
|trait B {
| self: A =>
| <caret>
|}
"""
val expectedText =
"""
|package test
|
|trait A {
| def foo: Int = 1
|}
|trait B {
| self: A =>
| override def foo = <selection>self.foo</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
val settings = TypeAnnotationSettings.alwaysAddType(ScalaCodeStyleSettings.getInstance(getProjectAdapter))
runTest(methodName, fileText, expectedText, isImplement, settings = TypeAnnotationSettings.noTypeAnnotationForPublic(settings))
}
def testTypeAlias() {
val fileText =
"""
|class ImplementTypeAlias extends b {
| <caret>
|}
|abstract class b {
| type L
|}
"""
val expectedText =
"""
|class ImplementTypeAlias extends b {
| type L = <selection>this.type</selection>
|}
|abstract class b {
| type L
|}
"""
val methodName: String = "L"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testVal() {
val fileText =
"""
|package test
|
|class Val extends b {
| <caret>
|}
|abstract class b {
| val too: b
|}
"""
val expectedText =
"""
|package test
|
|class Val extends b {
| val too: b = <selection>_</selection>
|}
|abstract class b {
| val too: b
|}
"""
val methodName: String = "too"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testVar() {
val fileText =
"""
|package test
|
|class Var extends b {
| <caret>
|}
|abstract class b {
| var too: b
|}
"""
val expectedText =
"""
|package test
|
|class Var extends b {
| var too: b = <selection>_</selection>
|}
|abstract class b {
| var too: b
|}
"""
val methodName: String = "too"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testClassTypeParam() {
val fileText =
"""
|class A[T] {
| def foo: T = new T
|}
|
|class ClassTypeParam extends A[Int] {
| <caret>
|}
"""
val expectedText =
"""
|class A[T] {
| def foo: T = new T
|}
|
|class ClassTypeParam extends A[Int] {
| override def foo: Int = <selection>super.foo</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testHardSubstituting() {
val fileText =
"""
|class A[T] {
| def foo(x: (T) => T, y: (T, Int) => T): Double = 1.0
|}
|
|class Substituting extends A[Float] {
| <caret>
|}
"""
val expectedText =
"""
|class A[T] {
| def foo(x: (T) => T, y: (T, Int) => T): Double = 1.0
|}
|
|class Substituting extends A[Float] {
| override def foo(x: (Float) => Float, y: (Float, Int) => Float): Double = <selection>super.foo(x, y)</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testSimpleTypeParam() {
val fileText =
"""
|abstract class A {
| def foo[T](x: T): T
|}
|class SimpleTypeParam extends A {
| <caret>
|}
"""
val expectedText =
"""
|abstract class A {
| def foo[T](x: T): T
|}
|class SimpleTypeParam extends A {
| def foo[T](x: T): T = <selection>???</selection>
|}
"""
val methodName: String = "foo"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testSCL1997() {
val fileText =
"""
|package test
|
|trait Foo {
| def foo(a: Any*): Any
|}
|
|trait Sub extends Foo {
| <caret>
|}
"""
val expectedText =
"""
|package test
|
|trait Foo {
| def foo(a: Any*): Any
|}
|
|trait Sub extends Foo {
| def foo(a: Any*): Any = <selection>???</selection>
|}
"""
val methodName: String = "foo"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testSCL1999() {
val fileText =
"""
|package test
|
|trait Parent {
| def m(p: T forSome {type T <: Number})
|}
|
|class Child extends Parent {
| <caret>
|}
"""
val expectedText =
"""
|package test
|
|trait Parent {
| def m(p: T forSome {type T <: Number})
|}
|
|class Child extends Parent {
| def m(p: (T) forSome {type T <: Number}): Unit = <selection>???</selection>
|}
"""
val methodName: String = "m"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testSCL2540() {
val fileText =
"""
|class A {
| def foo(x_ : Int) = 1
|}
|
|class B extends A {
| <caret>
|}
"""
val expectedText =
"""
|class A {
| def foo(x_ : Int) = 1
|}
|
|class B extends A {
| override def foo(x_ : Int): Int = <selection>super.foo(x_)</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testSCL2010() {
val fileText =
"""
|package test
|
|class Parent {
| def doSmth(smth: => String) {}
|}
|
|class Child extends Parent {
| <caret>
|}
"""
val expectedText =
"""
|package test
|
|class Parent {
| def doSmth(smth: => String) {}
|}
|
|class Child extends Parent {
| override def doSmth(smth: => String): Unit = <selection>super.doSmth(smth)</selection>
|}
"""
val methodName: String = "doSmth"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testSCL2052A() {
val fileText =
"""
|class A {
| type ID[X] = X
| def foo(in: ID[String]): ID[Int] = null
|}
|
|class B extends A {
| <caret>
|}
"""
val expectedText =
"""
|class A {
| type ID[X] = X
| def foo(in: ID[String]): ID[Int] = null
|}
|
|class B extends A {
| override def foo(in: ID[String]): ID[Int] = <selection>super.foo(in)</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testSCL2052B() {
val fileText =
"""
|class A {
| type ID[X] = X
| val foo: ID[Int] = null
|}
|
|class B extends A {
| <caret>
|}
"""
val expectedText =
"""
|class A {
| type ID[X] = X
| val foo: ID[Int] = null
|}
|
|class B extends A {
| override val foo: ID[Int] = <selection>_</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testSCL2052C() {
val fileText =
"""
|class A {
| type F = (Int => String)
| def foo(f: F): Any = null
|}
|
|object B extends A {
| <caret>
|}
"""
val expectedText =
"""
|class A {
| type F = (Int => String)
| def foo(f: F): Any = null
|}
|
|object B extends A {
| override def foo(f: B.F): Any = <selection>super.foo(f)</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testSCL3808() {
val fileText =
"""
|trait TC[_]
|
|class A {
| def foo[M[X], N[X[_]]: TC]: String = ""
|}
|
|object B extends A {
| <caret>
|}
"""
val expectedText =
"""
|trait TC[_]
|
|class A {
| def foo[M[X], N[X[_]]: TC]: String = ""
|}
|
|object B extends A {
| override def foo[M[X], N[X[_]] : TC]: String = <selection>super.foo</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testSCL3305() {
val fileText =
"""
|package test
|
|object A {
|
| object Nested {
|
| class Nested2
|
| }
|
|}
|
|abstract class B {
| def foo(v: A.Nested.Nested2)
|}
|
|class C extends B {
| <caret>
|}
"""
val expectedText =
"""
|package test
|
|import test.A.Nested
|
|object A {
|
| object Nested {
|
| class Nested2
|
| }
|
|}
|
|abstract class B {
| def foo(v: A.Nested.Nested2)
|}
|
|class C extends B {
| def foo(v: Nested.Nested2): Unit = <selection>???</selection>
|}
"""
val methodName: String = "foo"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testUnitReturn() {
val fileText =
"""
|package test
|
|class Foo extends b {
| <caret>
|}
|abstract class b {
| def foo(x: b): Unit
|}
"""
val expectedText =
"""
|package test
|
|class Foo extends b {
| def foo(x: b): Unit = <selection>???</selection>
|}
|abstract class b {
| def foo(x: b): Unit
|}
"""
val methodName: String = "foo"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testUnitInferredReturn() {
val fileText =
"""
|package test
|
|class Foo extends b {
| <caret>
|}
|abstract class b {
| def foo(x: b) = ()
|}
"""
val expectedText =
"""
|package test
|
|class Foo extends b {
| override def foo(x: b): Unit = <selection>super.foo(x)</selection>
|}
|abstract class b {
| def foo(x: b) = ()
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testInferredReturn() {
val fileText =
"""
|package test
|
|class Foo extends b {
| <caret>
|}
|abstract class b {
| def foo(x: b) = 1
|}
"""
val expectedText =
"""
|package test
|
|class Foo extends b {
| override def foo(x: b): Int = <selection>super.foo(x)</selection>
|}
|abstract class b {
| def foo(x: b) = 1
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
//don't add return type for override public
def testNoExplicitReturn() {
val fileText =
"""
|class A {
| def foo(x : Int): Int = 1
|}
|
|class B extends A {
| <caret>
|}
"""
val expectedText =
"""
|class A {
| def foo(x : Int): Int = 1
|}
|
|class B extends A {
| override def foo(x: Int) = <selection>super.foo(x)</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
val settings = TypeAnnotationSettings.alwaysAddType(ScalaCodeStyleSettings.getInstance(getProjectAdapter))
runTest(methodName, fileText, expectedText, isImplement,
settings = TypeAnnotationSettings.noTypeAnnotationForPublic(TypeAnnotationSettings.noTypeAnnotationForOverride(settings)))
}
def testImplicitParams() {
val fileText =
"""
|trait A {
| def foo(x : Int)(implicit name: String): Int = name + x
|}
|
|class B extends A {
| <caret>
|}
"""
val expectedText =
"""
|trait A {
| def foo(x : Int)(implicit name: String): Int = name + x
|}
|
|class B extends A {
| override def foo(x: Int)(implicit name: String): Int = <selection>super.foo(x)</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
//don't add return type for protected
def testProtectedMethod() {
val fileText =
"""
|abstract class A {
| protected def foo(): Unit
|}
|
|class B extends A {
| <caret>
|}
"""
val expectedText =
"""
|abstract class A {
| protected def foo(): Unit
|}
|
|class B extends A {
| protected def foo() = <selection>???</selection>
|}
"""
val methodName: String = "foo"
val isImplement = true
val settings = TypeAnnotationSettings.alwaysAddType(ScalaCodeStyleSettings.getInstance(getProjectAdapter))
runTest(methodName, fileText, expectedText, isImplement, settings = TypeAnnotationSettings.noTypeAnnotationForProtected(settings))
}
def testProtectedMethodNoBody() {
val fileText =
"""
|abstract class A {
| protected def foo(): Unit
|}
|
|class B<caret> extends A
"""
val expectedText =
"""
|abstract class A {
| protected def foo(): Unit
|}
|
|class B extends A {
| protected def foo(): Unit = <selection>???</selection>
|}
"""
val methodName: String = "foo"
val isImplement = true
runTest(methodName, fileText, expectedText, isImplement)
}
def testOverrideProtectedMethodNoBody() {
val fileText =
"""
|abstract class A {
| protected def foo(): Unit = {}
|}
|
|class B<caret> extends A
"""
val expectedText =
"""
|abstract class A {
| protected def foo(): Unit = {}
|}
|
|class B extends A {
| override protected def foo(): Unit = <selection>super.foo()</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testCopyScalaDoc() = {
val fileText =
"""
|abstract class A {
|
| /**
| * qwerty
| *
| * @return
| */
| protected def foo(): Unit = {}
|}
|
|class B<caret> extends A
"""
val expectedText =
"""
|abstract class A {
|
| /**
| * qwerty
| *
| * @return
| */
| protected def foo(): Unit = {}
|}
|
|class B extends A {
| /**
| * qwerty
| *
| * @return
| */
| override protected def foo(): Unit = <selection>super.foo()</selection>
|}
"""
val methodName: String = "foo"
val isImplement = false
val copyScalaDoc = true
runTest(methodName, fileText, expectedText, isImplement, copyScalaDoc = copyScalaDoc)
}
def testNoImportScalaSeq(): Unit = {
val fileText =
"""
|import scala.collection.Seq
|
|class Test {
| def foo: Seq[Int] = Seq(1)
|}
|
|class Test2 extends Test {
|<caret>
|}
"""
val expectedText =
"""
|import scala.collection.Seq
|
|class Test {
| def foo: Seq[Int] = Seq(1)
|}
|
|class Test2 extends Test {
| override def foo: Seq[Int] = super.foo
|}
"""
val methodName: String = "foo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
def testOverrideClassParam(): Unit = {
val fileText =
"""
|class Parent(val param1: Int, var param2: String)
|
|class Child extends Parent(4, "") {
| <caret>
|}
"""
val expectedText =
"""
|class Parent(val param1: Int, var param2: String)
|
|class Child extends Parent(4, "") {
| override val param1: Int = _
|}
"""
runTest("param1", fileText, expectedText, isImplement = false)
}
def testDoNotSaveAnnotations(): Unit ={
val fileText =
"""
|trait Base {
| @throws(classOf[Exception])
| @deprecated
| def annotFoo(int: Int): Int = 45
|}
|
|class Inheritor extends Base {
| <caret>
|}
"""
val expectedText =
"""
|trait Base {
| @throws(classOf[Exception])
| @deprecated
| def annotFoo(int: Int): Int = 45
|}
|
|class Inheritor extends Base {
| override def annotFoo(int: Int): Int = super.annotFoo(int)
|}
"""
val methodName: String = "annotFoo"
val isImplement = false
runTest(methodName, fileText, expectedText, isImplement)
}
}
| ilinum/intellij-scala | test/org/jetbrains/plugins/scala/lang/overrideImplement/ScalaOverrideImplementTest.scala | Scala | apache-2.0 | 25,986 |
/*
* Copyright 1998-2022 Linux.org.ru
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.org.linux.help
import org.junit.runner.RunWith
import org.junit.{Before, Test}
import org.mockito.ArgumentMatchers._
import org.mockito.Mockito._
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.annotation.{Bean, Configuration}
import org.springframework.test.context.ContextConfiguration
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner
import org.springframework.test.context.web.WebAppConfiguration
import org.springframework.test.web.servlet.MockMvc
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders._
import org.springframework.test.web.servlet.result.MockMvcResultMatchers._
import org.springframework.test.web.servlet.setup.MockMvcBuilders._
import org.springframework.web.context.WebApplicationContext
import org.springframework.web.servlet.config.annotation.{EnableWebMvc, PathMatchConfigurer, WebMvcConfigurer}
import ru.org.linux.util.markdown.MarkdownFormatter
@RunWith(classOf[SpringJUnit4ClassRunner])
@WebAppConfiguration
@ContextConfiguration(classes = Array(classOf[HelpControllerTestConfig]))
class HelpControllerTest extends MVCTest {
@Test
def testOk(): Unit = {
mockMvc.perform(get("/help/lorcode.md")).andExpect(status.is(200))
}
@Test
def test404(): Unit = {
mockMvc.perform(get("/help/wrong.md")).andExpect(status.is(404))
}
}
@Configuration
@EnableWebMvc
class HelpControllerTestConfig extends WebMvcConfigurer {
override def configurePathMatch(configurer: PathMatchConfigurer): Unit = {
configurer.setUseSuffixPatternMatch(false)
}
@Bean
def controller = {
val markdown: MarkdownFormatter = mock(classOf[MarkdownFormatter])
when(markdown.renderToHtml(anyString(), anyBoolean())).thenReturn("ok")
new HelpController(markdown)
}
}
trait MVCTest {
@Autowired
var wac: WebApplicationContext = _
var mockMvc: MockMvc = _
@Before
def setup(): Unit = {
this.mockMvc = webAppContextSetup(this.wac).build()
}
}
| maxcom/lorsource | src/test/scala/ru/org/linux/help/HelpControllerTest.scala | Scala | apache-2.0 | 2,632 |
package com.twitter.finatra.json.benchmarks
import com.twitter.inject.Test
class JsonBenchmarkTest extends Test {
"test" in {
val benchmark = new JsonBenchmark()
benchmark.finatraCustomCaseClassDeserializer()
benchmark.jacksonScalaModuleCaseClassDeserializer()
}
}
| syamantm/finatra | benchmarks/src/test/scala/com/twitter/finatra/json/benchmarks/JsonBenchmarkTest.scala | Scala | apache-2.0 | 284 |
package elea.term
import elea.{LispPrintSettings, Name}
import elea.rewrite.Env
import scalaz.Scalaz._
import scalaz.{Name => _, _}
case class App private(fun: Term, args: NonEmptyList[Term]) extends Term with FirstOrder[Term] {
require(!fun.isInstanceOf[App])
override def apply(args2: IList[Term]) = App(fun, args :::> args2)
/**
* Print the lisp representation of this code
*/
override def toLisp(settings: LispPrintSettings): String =
s"(${fun.toLisp(settings)}${args.toList.map(_.toLisp(settings)).mkString(" ", " ", "")})"
override def reduceHead(env: Env): Term = {
val reduced = fun.reduceHeadApp(env, args)
env.matches
.lookup(reduced)
// Even though this is a constructor we should re-reduce in case any of its
// free vars has been matched to something
.map(_.asTerm.reduce(env))
.getOrElse(reduced)
}
override def reduceHeadApp(env: Env, args2: NonEmptyList[Term]): Term =
apply(args2.list)
override def reduceHeadCase(env: Env, enclosingCase: Case): Term =
fun match {
case fun: Constructor if !env.alreadySeen(enclosingCase) =>
fun.reduceCase(args.list, enclosingCase.branches)
.reduce(env.havingSeen(enclosingCase))
case _ =>
super.reduceHeadCase(env, enclosingCase)
}
private def flatten: App =
fun match {
case fun: App =>
App(fun.fun, fun.args append args)
case _ =>
this
}
def mapImmediateSubtermsWithBindings(f: (ISet[Name], Term) => Term): Term =
App(f(ISet.empty, fun), args.map(t => f(ISet.empty, t)).list)
def zip(other: Term): Option[IList[(Term, Term)]] =
other match {
case other: App if other.args.size == args.size =>
Some((fun, other.fun) +: args.zip(other.args).list)
case _ =>
None
}
def arbitraryOrderingNumber: Int = 0
override def order(other: Term): Ordering =
other match {
case other: App =>
args.size ?|? other.args.size |+|
fun ?|? other.fun |+|
args.fzipWith(other.args)(_ ?|? _).concatenate
case _ =>
arbitraryOrderingNumber ?|? other.arbitraryOrderingNumber
}
override def leftmost: Term = fun.leftmost
override def deepBranches: IList[Term] =
args.map(_.deepBranches)
.sequence[({ type G[X] = IList[X] })#G, Term]
.map((xs: NonEmptyList[Term]) => App(fun, xs))
override def unfold: Term =
fun.unfold.betaReduce(args)
def isFPPF: Boolean =
fun match {
case fun: Fix => fun.isFPPF(args.list)
case _ => false
}
override def unifyLeftUnchecked(to: Term): Option[Substitution] =
(to, fun) match {
case (to: App, fun: Var) if to.args.size > args.size =>
val toArgsRight = to.args.list.takeRight(args.size)
val funMatch = App(to.fun, to.args.list.dropRight(args.size))
for {
argsSub <- Substitution.merge(args.list.fzipWith(toArgsRight)(_ unifyLeft _))
mergedSub <- Substitution(fun.name -> funMatch) ++ argsSub
} yield mergedSub
case _ =>
super.unifyLeftUnchecked(to)
}
override def replace(from: Term, to: Term): Term =
(this, from) match {
case AppPrefix(left, from, excessArgs) if (left =@= from) =>
App(to, excessArgs.map(_.replace(from, to)))
case _ =>
super.replace(from, to)
}
}
object App {
def apply(fun: Term, args: IList[Term]): Term =
fun match {
case fun: App =>
fun.apply(args)
case _ =>
args.toNel
.map(App(fun, _))
.getOrElse(fun)
}
}
/**
* View any [[Term]] as term application, potentially of zero arguments
*/
object AppView {
def unapply(term: Term): Option[(Term, IList[Term])] =
term match {
case term: App => Some((term.fun, term.args.list))
case _ => Some((term, IList.empty[Term]))
}
}
/**
* Pattern match out terms in fixed-point promoted form
*/
object FPPF {
def unapply(term: Term): Option[(Fix, IList[Name])] =
term match {
case AppView(fun: Fix, args) if fun.isFPPF(args) =>
Some((fun, args.map(_.asInstanceOf[Var].name)))
case _ =>
None
}
}
object AppPrefix {
def unapply(terms: (Term, Term)): Option[(App, App, NonEmptyList[Term])] =
(terms._1, terms._2) match {
case (longTerm: App, shortTerm: App) if longTerm.args.size > shortTerm.args.size =>
val longArgs = longTerm.args.list
val excessArgs = longArgs.takeRight(longArgs.length - shortTerm.args.size).toNel.get
Some((App(longTerm.fun, longArgs.dropRight(excessArgs.size).toNel.get), shortTerm, excessArgs))
case _ => None
}
}
| wsonnex/elea | src/main/scala/elea/term/App.scala | Scala | mit | 4,829 |
package scalaxy.streams
import scala.collection.mutable.ArrayBuffer
/**
* Refugee from Scalaxy/Components
* TODO: modernize (quasiquotes...) / make it go away.
*/
trait TupleAnalysis
extends TuploidValues
with TreeBuilders
with WithLocalContext {
// extends MiscMatchers
// with TreeBuilders {
val global: reflect.api.Universe
import global._
import definitions._
case class TupleInfo(tpe: Type, components: Seq[TupleInfo]) {
assert(tpe != null, "null type in TupleInfo")
lazy val flattenTypes: Seq[Type] = {
components match {
case Seq() =>
Seq(tpe)
case _ =>
components.flatMap(_.flattenTypes)
}
}
lazy val flattenPaths: Seq[List[Int]] = {
components match {
case Seq() =>
Seq(Nil)
case _ =>
components.zipWithIndex.flatMap { case (c, i) => c.flattenPaths.map(p => i :: p) }
}
}
lazy val componentSize: Int = {
components match {
case Seq() =>
1
case _ =>
components.map(_.componentSize).sum
}
}
}
private val tupleInfos = new scala.collection.mutable.HashMap[Type, TupleInfo]
def getTupleInfo(tpe: Type): TupleInfo = {
assert(tpe != null, "null type in getTupleInfo")
val actualTpe = normalize(tpe)
tupleInfos.getOrElseUpdate(
actualTpe,
if (actualTpe <:< typeOf[Unit])
TupleInfo(UnitTpe, Seq())
else {
actualTpe match {
case t: TypeRef =>
if (isTupleSymbol(t.sym))
TupleInfo(t, t.args.map(getTupleInfo))
else
TupleInfo(t, Seq())
case NoType =>
TupleInfo(NoType, Seq())
case _ =>
throw new RuntimeException("Unhandled type : " + tpe + " (" + actualTpe + ": " + Option(actualTpe).map(_.getClass.getName) + ")")
//System.exit(0)
null
}
}
)
}
def flattenTypes(tpe: Type): Seq[Type] =
getTupleInfo(tpe).flattenTypes
def flattenFiberPaths(tpe: Type): Seq[List[Int]] =
flattenFiberPaths(getTupleInfo(tpe))
def flattenFiberPaths(info: TupleInfo): Seq[List[Int]] = {
val TupleInfo(_, components) = info
if (components.isEmpty)
Seq(Nil)
else
components.map(flattenFiberPaths).zipWithIndex flatMap {
case (paths, i) => paths.map(path => i :: path)
}
}
def getType(tree: Tree) = {
if (tree.tpe == null || tree.tpe == NoType) {
if (tree.symbol == null || tree.symbol == NoSymbol)
NoType
else
tree.symbol.typeSignature
} else {
tree.tpe
}
}
def applyFiberPath(rootGen: TreeGen, path: List[Int]): (Tree, Type) = {
applyFiberPath(rootGen, rootGen().tpe, path)
}
def applyFiberPath(rootGen: TreeGen, rootTpe: Type, path: List[Int]): (Tree, Type) = {
def sub(invertedPath: List[Int]): (Tree, Type) = invertedPath match {
case Nil =>
val root = typecheck { rootGen() }
(root, rootTpe)
case i :: rest =>
val (inner, innerTpe) = applyFiberPath(rootGen, rootTpe, rest)
val name = N("_" + (i + 1))
//println("Getting member " + i + " of (" + inner + ": " + inner.tpe + ") ; invertedPath = " + invertedPath)
assert(innerTpe != NoType, "Cannot apply tuple path on untyped tree")
val info = getTupleInfo(innerTpe)
assert(i < info.components.size, "bad path : i = " + i + ", type = " + innerTpe + ", path = " + path + ", root = " + rootGen())
//val sym = innerTpe member name
//println(s"innerTpe($innerTpe).member(name($name)) = sym($sym: ${sym.typeSignature})")
// TODO typeCheck?
//typeCheck
(
Select(inner, name),
info.components(i).tpe
)
}
sub(path.reverse)
}
def getComponentOffsetAndSizeOfIthMember(tpe: Type, i: Int) = {
val TupleInfo(_, components) = getTupleInfo(tpe)
(
components.take(i).map(_.componentSize).sum,
components(i).componentSize
)
}
/**
* Phases :
* - unique renaming
* - tuple cartography (map symbols and treeId to TupleSlices : x._2 will be checked against x ; if is x's symbol is mapped, the resulting slice will be composed and flattened
* - tuple + block flattening (gives (Seq[Tree], Seq[Tree]) result)
*/
// separate pass should return symbolsDefined, symbolsUsed
// DefTree vs. RefTree
// def typed(tree: Tree): Tree
// def typeCheck(tree: Tree, pt: Type): Tree
def ident(sym: Symbol, tpe: Type): Ident = {
assert(sym != NoSymbol)
val i = Ident(sym)
try {
typecheck(i, pt = sym.typeSignature).asInstanceOf[Ident]
} catch {
case _: Throwable =>
i
}
}
case class TupleSlice(baseSymbol: Symbol, sliceOffset: Int, sliceLength: Int) {
def subSlice(offset: Int, length: Int) =
TupleSlice(baseSymbol, sliceOffset + offset, length)
def toTreeGen(analyzer: TupleAnalyzer): TreeGen = () => {
val rootTpe = baseSymbol.typeSignature
val info = getTupleInfo(rootTpe)
val flatPaths = info.flattenPaths
assert(sliceLength == 1)
assert(sliceOffset < flatPaths.size, "slice offset = " + sliceOffset + ", flat paths = " + flatPaths)
val root: TreeGen = () => ident(baseSymbol, rootTpe)
// println(s"FLATPATHS($baseSymbol: ${rootTpe}) = $flatPaths")
if (flatPaths.size == 1) {
assert(sliceOffset == 0)
root()
} else {
//TupleCreation((0 until sliceLength).map(i => applyFiberPath(root, info.flattenPaths(sliceOffset + i))):_*)
//println(s"baseSymbol = $baseSymbol, ${baseSymbol.typeSignature}, ${root().symbol.typeSignature}")
var (res, resTpe) = applyFiberPath(root, rootTpe, flatPaths(sliceOffset))
//analyzer.setSlice(res, this)
//res = replace(res)
analyzer.setSlice(res, this)
res
}
}
}
class BoundTuple(rootSlice: TupleSlice) {
def unapply(tree: Tree): Option[Seq[(Symbol, TupleSlice)]] = tree match {
case Bind(name, what) =>
val sub = this
what match {
case Ident(_) =>
Some(Seq(tree.symbol -> rootSlice))
case sub(m) =>
Some(m :+ (tree.symbol -> rootSlice))
case _ =>
throw new RuntimeException("Not a bound tuple : " + tree + " (" + tree.getClass.getName + ")")
None
}
case TupleCreation(components) =>
//println("Found tuple creation with components " + components)
var currentOffset = 0
val ret = ArrayBuffer[(Symbol, TupleSlice)]()
for ((component, i) <- components.zipWithIndex) {
val compTpes = flattenTypes(component.tpe)
val compSize = compTpes.size
val subMatcher = new BoundTuple(rootSlice.subSlice(currentOffset, compSize))
component match {
case subMatcher(m) =>
ret ++= m
case _ =>
//println("Cancelling BoundTuple because of component " + component + " of type " + component.tpe + " (length " + compSize + ") at offset " + currentOffset)
return None // strict binding
}
currentOffset += compSize
}
Some(ret.toList)
case _ =>
throw new RuntimeException("Not a bound tuple : " + tree + " (" + tree.getClass.getName + ")") //\\n\\tnodes = " + nodeToString(tree))
//System.exit(1)
None
}
}
class TupleAnalyzer(tree: Tree) {
var treeTupleSlices = new scala.collection.mutable.HashMap[( /*Int,*/ Tree), TupleSlice]()
//private var symbolTupleSlices = new scala.collection.mutable.HashMap[Symbol, TupleSlice]()
var symbolTupleSlices = new scala.collection.mutable.HashMap[Symbol, TupleSlice]()
def getSymbolSlice(sym: Symbol, recursive: Boolean = false): Option[TupleSlice] = {
val direct = symbolTupleSlices.get(sym)
direct match {
case Some(directSlice) if recursive && directSlice.sliceLength > 1 && directSlice.baseSymbol != sym =>
getSymbolSlice(directSlice.baseSymbol, recursive).orElse(direct)
case _ =>
direct
}
}
def createTupleSlice(sym: Symbol, tpe: Type) = {
val info = getTupleInfo(tpe)
//assert(info.componentSize == 1, "Invalid multi-fibers slice for symbol " + sym + " (" + info.componentSize + " fibers)")
TupleSlice(sym, 0, info.componentSize)
}
def getTreeSlice(tree: Tree, recursive: Boolean = false): Option[TupleSlice] = {
val direct = symbolTupleSlices.get(tree.symbol).orElse(treeTupleSlices.get(( /*tree.id, */ tree)))
if (recursive && direct != None)
getSymbolSlice(direct.get.baseSymbol, recursive).orElse(direct)
else
direct.orElse(
if (tree.symbol == null ||
!tree.symbol.isTerm || //.getClass != classOf[TermSymbol] || // not isInstanceOf ! We don't want ModuleSymbol nor MethodSymbol here, which are both TermSymbol subclasses
tree.tpe == null || tree.tpe == NoType)
None
else {
//println("Created slice for symbol " + tree.symbol + " (tree = " + tree + ", symbol.class = " + tree.symbol.getClass.getName + ")")
Some(createTupleSlice(tree.symbol, tree.tpe))
//None
}
)
}
def setSlice(sym: Symbol, slice: TupleSlice) = {
assert(sym != slice.baseSymbol, "Invalid self-slice for symbol " + sym)
//println("Setting slice " + slice + " for symbol " + sym)
symbolTupleSlices(sym) = slice
}
def setSlice(tree: Tree, slice: TupleSlice) = {
//println("Setting slice " + slice + " for tree " + tree)
val info = getTupleInfo(slice.baseSymbol.typeSignature)
val n = info.flattenPaths.size
assert(slice.sliceOffset + slice.sliceLength <= n, "invalid slice for type " + tree.tpe + " : " + slice + ", flat types = " + info.flattenTypes)
treeTupleSlices(( /*tree.id, */ tree)) = slice
tree match {
case vd: ValDef =>
symbolTupleSlices(tree.symbol) = slice
case _ =>
}
}
// Identify trees and symbols that represent tuple slices
new Traverser {
override def traverse(tree: Tree): Unit = {
tree match {
case ValDef(mods, name, tpt, rhs) if !mods.hasFlag(Flag.MUTABLE) =>
super.traverse(tree)
//println("Got valdef " + name)
val tupleInfo = getTupleInfo(rhs.tpe)
if (tupleInfo == null) {
throw new RuntimeException("No tuple info for type " + rhs.tpe + " !")
}
//setSlice(tree.symbol, TupleSlice(tree.symbol, 0, tupleInfo.componentSize))
for (slice <- getTreeSlice(rhs, true)) {
//println("\\tvaldef " + tree.symbol + " linked to rhs slice " + slice)
setSlice(tree.symbol, slice)
}
case Match(selector, cases) =>
traverse(selector)
//println("Found match")
for (slice <- getTreeSlice(selector)) {
//println("\\tMatch has slice " + slice)
val subMatcher = new BoundTuple(slice)
for (CaseDef(pat, guard, body) <- cases) {
pat match {
case subMatcher(m) =>
//println("CaseDef has guard " + guard + " (cases = " + cases + ")")
assert(guard == EmptyTree, guard)
for ((sym, subSlice) <- m) {
//println("Binding " + sym + " to " + subSlice)
setSlice(sym, subSlice)
}
for (bodySlice <- getTreeSlice(body)) {
//println("Set body slice " + bodySlice + " for body " + body)
setSlice(tree, bodySlice)
}
case _ =>
assert(false, "Case matching only supports tuples for now (TODO: add (CL)Array(...) case).")
}
}
}
cases.foreach(traverse(_))
case TupleComponent(target, i) if target != null =>
super.traverse(tree)
val (componentsOffset, componentCount) = getComponentOffsetAndSizeOfIthMember(target.tpe, i)
//println("Identified tuple component " + i + " of " + target)
getTreeSlice(target) match {
case Some(slice) =>
//println("\\ttarget got slice " + slice)
setSlice(tree, TupleSlice(slice.baseSymbol, componentsOffset, componentCount))
case _ =>
//println("No tuple slice symbol info for tuple component i = " + i + " : " + target + "\\n\\t-> " + nodeToStringNoComment(target))
//println("\\ttree : " + nodeToStringNoComment(tree))
}
case Typed(expr, tpt) =>
super.traverse(tree)
propagateSlice(expr, tree)
case Annotated(annot, arg) =>
super.traverse(tree)
propagateSlice(arg, tree)
case _ =>
super.traverse(tree)
}
}
// TODO: Understand why we can't just use Tree param types: getting error:
// "Parameter type in structural refinement may not refer to an abstract type defined outside that refinement"
def propagateSlice(aSource: AnyRef, aDestination: AnyRef) = { //source: Tree, destination: Tree) = {
val source = aSource.asInstanceOf[Tree]
val destination = aDestination.asInstanceOf[Tree]
getTreeSlice(source) match {
case Some(slice) =>
setSlice(destination, slice)
//println("Propagated slice " + slice + " from " + source + " to " + destination)
case _ =>
}
}
}.traverse(tree)
//println("treeTupleSlices = \\n\\t" + treeTupleSlices.mkString("\\n\\t"))
//println("symbolTupleSlices = \\n\\t" + symbolTupleSlices.mkString("\\n\\t"))
// 1) Create unique names for unique symbols !
// 2) Detect external references, lift them up in arguments.
// 3) Annotate code with usage :
// - symbol to Array and CLArray val : read, written, both ?
// - extern vars : forbidden
// -
// 4) Flatten tuples and blocks, function definitions arg lists, function calls args
//
// Symbol => TupleSlice
// Tree => TupleSlice
// e.g. x: ((Double, Float), Int) ; x._1._2 => TupleSlice(x, 1, 1)
//
// Tuples flattening :
// - list tuple definitions
// - explode each definition unless it's an OpenCL intrinsic :
// -> create a new symbol for each split component,
// -> map resulting TupleSlice => componentSymbol
// -> splitSymbolsTable = Map[Symbol, Seq[(TupleSlice, componentSymbol, componentName)]]
// - given a Tree, we get an exploded Seq[Tree] + pre-definitions
// e.g.:
// val a: (Int, Int) = (1, 3)
// -> val a1 = 1
// val a2 = 3
// val a: (Int, Int) = f(x) // special case for int2 : no change
// We need to propagate slices that are of length > 1 :
// - arr1.zip(arr2).zipWithIndex.map { case r @ (p @ (a1, a2), i) => p } map { p => p._1 }
// -> p => TupleSlice(mapArg, 0, 2)
// - val (a, b) = p // p is mapped
// -> val a = p1 // using splitSymbolsTable
// val b = p2
// Jump over blocks :
// val p = {
// val x = 10
// (x, x + 2)
// }
// ->
// val x = 10
// val p1 = x
// val p2 = x + 2
//
// Each Tree gives a list of statements + a list of split value components :
// convertTree(tree: Tree): (Seq[Tree], Seq[Tree])
//
//
}
}
| nativelibs4java/scalaxy-streams | src/main/scala/streams/matchers/legacy/TuploidAnalysis.scala | Scala | bsd-3-clause | 15,670 |
package edu.berkeley.nlp.entity
import java.util.IdentityHashMap
import scala.collection.JavaConverters.asScalaBufferConverter
import scala.collection.JavaConverters.collectionAsScalaIterableConverter
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import edu.berkeley.nlp.futile.fig.basic.Pair
import edu.berkeley.nlp.futile.ling.HeadFinder
import edu.berkeley.nlp.futile.syntax.Trees.PennTreeRenderer
import edu.berkeley.nlp.futile.syntax.Tree
import edu.berkeley.nlp.futile.util.Logger
import scala.collection.mutable.HashSet
import scala.collection.JavaConverters._
import java.util.Collections
import edu.berkeley.nlp.entity.lang.ModCollinsHeadFinder
import edu.berkeley.nlp.futile.ling.CollinsHeadFinder
class DepConstTree(val constTree: Tree[String],
val pos: Seq[String],
val words: Seq[String],
val childParentDepMap: HashMap[Int,Int]) {
require(childParentDepMap.keys.toSeq.sorted.sameElements((0 until words.size)), PennTreeRenderer.render(constTree));
def size = words.size;
override def toString() = {
var strRep = "";
strRep += PennTreeRenderer.render(constTree) + "\\n";
for (i <- 0 until words.size) {
val headIdx = childParentDepMap(i);
strRep += words(i) + "(" + i + "), head = " + (if (headIdx == -1) "ROOT" else words(headIdx) + "(" + headIdx + ")") + "\\n";
}
strRep;
}
/**
* Fetches the head for an arbitrary span; this is needed for head-finding of mentions
* that might not be constituents.
*/
def getSpanHead(startIdx: Int, endIdx: Int) = DepConstTree.getSpanHead(childParentDepMap, startIdx, endIdx);
def getSpanHeadACECustom(startIdx: Int, endIdx: Int) = DepConstTree.getSpanHeadACECustom(childParentDepMap, pos, startIdx, endIdx);
/**
* Usually returns the head of the span, unless it's an NP or NML that dominates
* N* CC N* or [N* ,]+ N* , CC N* (coordination cases). Then we return the head
* of each coordinated thing.
*
* Note: English-specific
*/
def getSpanHeadOrNPCoordinatedHeads(startIdx: Int, endIdx: Int): Set[Int] = DepConstTree.getSpanHeadOrNPCoordinatedHeads(constTree, childParentDepMap, startIdx, endIdx);
def isConstituent(start: Int, end: Int) = {
val spans = constTree.getSpanMap();
spans.containsKey(Pair.makePair(new Integer(start), new Integer(end)));
}
def doesCrossBrackets(start: Int, end: Int) = {
val spans = constTree.getSpanMap();
var crossesBrackets = false;
for (span <- spans.keySet().asScala) {
val spanStart = span.getFirst.intValue();
val spanEnd = span.getSecond.intValue();
crossesBrackets = crossesBrackets || ((start < spanStart && end > spanStart && end < spanEnd) || (start > spanStart && start < spanEnd && end > spanEnd));
}
crossesBrackets;
}
def getConstituentType(start: Int, end: Int) = {
val spans = constTree.getSpanMap();
val pair = Pair.makePair(new Integer(start), new Integer(end));
if (spans.containsKey(pair) && !spans.get(pair).isEmpty()) {
spans.get(pair).get(0).getLabel();
} else {
"";
}
}
def getAllConstituentTypes(start: Int, end: Int): Seq[String] = {
val spans = constTree.getSpanMap();
val pair = Pair.makePair(new Integer(start), new Integer(end));
if (spans.containsKey(pair) && !spans.get(pair).isEmpty()) {
spans.get(pair).asScala.map(_.getLabel());
} else {
Seq[String]();
}
}
// XXX: This is broken in some subtle way
// def cCommand(commanderStart: Int, commanderEnd: Int, commandeeStart: Int, commandeeEnd: Int): String = {
// val spans = constTree.getSpanMap();
// if (spans.containsKey(fig.basic.Pair.makePair(new Integer(commanderStart), new Integer(commanderEnd - 1)))) {
// "UNKNOWN"
// } else {
// // Find the smallest span properly containing this one
// var parentStart = -1;
// var parentEnd = constTree.size() + 1;
// for (span <- spans.keySet.asScala) {
// val thisStart = span.getFirst.intValue;
// val thisEnd = span.getSecond.intValue + 1;
// val containsProperly = thisStart <= commanderStart && commanderEnd <= thisEnd && (thisStart != commanderStart || commanderEnd != thisEnd);
// if (containsProperly && thisStart >= parentStart && thisEnd <= parentEnd) {
// parentStart = thisStart;
// parentEnd = thisEnd;
// }
// }
// require(parentStart != -1 && parentEnd != constTree.size() + 1);
// if (parentStart <= commandeeStart && commandeeEnd <= parentEnd) {
// "TRUE";
// } else {
// "FALSE";
// }
// }
// }
def getSpansAndHeadsOfType(constituentType: String): Seq[(Int, Int, Int)] = {
val results = new ArrayBuffer[(Int, Int, Int)];
for (constituent <- constTree.getConstituentCollection().asScala) {
if (constituent.getLabel() == constituentType) {
results += new Tuple3(constituent.getStart(), constituent.getEnd() + 1, getSpanHead(constituent.getStart(), constituent.getEnd() + 1));
}
}
results;
}
def getSpansAndCoordinatedHeadsOfType(constituentType: String): Seq[(Int, Int, Set[Int])] = {
val results = new ArrayBuffer[(Int, Int, Set[Int])];
for (constituent <- constTree.getConstituentCollection().asScala) {
if (constituent.getLabel() == constituentType) {
results += new Tuple3(constituent.getStart(), constituent.getEnd() + 1, getSpanHeadOrNPCoordinatedHeads(constituent.getStart(), constituent.getEnd() + 1));
}
}
results;
}
def computeSyntacticUnigram(headIdx: Int): String = {
val parentIdx: Int = if (childParentDepMap.contains(headIdx)) childParentDepMap(headIdx) else -1;
val parentStr = if (parentIdx == -1) {
"NULL";
} else {
pos(parentIdx) + "(" + (if (headIdx > parentIdx) "L" else "R") + ")";
}
parentStr;
}
def computeSyntacticBigram(headIdx: Int): String = {
val parentIdx: Int = if (childParentDepMap.contains(headIdx)) childParentDepMap(headIdx) else -1;
val grandparentIdx: Int = if (parentIdx != -1 && childParentDepMap.contains(parentIdx)) childParentDepMap(parentIdx) else -1;
val parentStr = if (parentIdx == -1) {
"NULL";
} else {
pos(parentIdx) + "(" + (if (headIdx > parentIdx) "L" else "R") + ")";
}
val grandparentStr = if (grandparentIdx == -1) {
"NULL";
} else {
pos(grandparentIdx) + "(" + (if (parentIdx > grandparentIdx) "L" else "R") + ")";
}
parentStr + "-" + grandparentStr;
}
def computeSyntacticPositionSimple(headIdx: Int): String = {
val parentIdx: Int = if (childParentDepMap.contains(headIdx)) childParentDepMap(headIdx) else -1;
val grandparentIdx: Int = if (parentIdx != -1 && childParentDepMap.contains(parentIdx)) childParentDepMap(parentIdx) else -1;
if (parentIdx != -1 && pos(parentIdx).startsWith("V") && headIdx < parentIdx) {
"SUBJECT";
} else if (parentIdx != -1 && pos(parentIdx).startsWith("V") && headIdx > parentIdx) {
"DIROBJ";
} else if (parentIdx != -1 && grandparentIdx != -1 && (pos(parentIdx) == "IN" || pos(parentIdx) == "TO") && pos(grandparentIdx).startsWith("V")) {
"INDIROBJ";
} else {
"OTHER";
}
}
def computeDependencyPath(startIdx: Int, endIdx: Int): (Seq[String], Seq[String]) = {
val startParents = getAllParentIndices(startIdx);
val endParents = getAllParentIndices(endIdx);
var spIndex = -1;
for (i <- 0 until startParents.size) {
if (spIndex == -1 && endParents.contains(startParents(i))) {
spIndex = i;
}
}
val epIndex = endParents.indexOf(startParents(spIndex));
(startParents.slice(0, spIndex+1).map(pos(_)), endParents.slice(0, epIndex+1).reverse.map(pos(_)));
}
def isChild(child: Int, parent: Int) = childParentDepMap(child) == parent;
def getAllParentIndices(idx: Int): Seq[Int] = {
var parents = new ArrayBuffer[Int];
var currentParent = idx;
while (currentParent != -1) {
parents += currentParent;
currentParent = childParentDepMap(currentParent);
}
parents;
}
def getAllChildrenIndices(idx: Int): IndexedSeq[Int] = {
val children = new ArrayBuffer[Int];
var i = 0;
while (i < size) {
if (childParentDepMap(i) == idx) {
children += i;
}
i += 1;
}
children;
}
}
object DepConstTree {
val hfEnglish = new ModCollinsHeadFinder();
/**
* Fetches the head for an arbitrary span; this is needed for head-finding of mentions
* that might not be constituents.
*/
def getSpanHead(childParentDepMap: HashMap[Int,Int], startIdx: Int, endIdx: Int) = {
// If it's a constituent, only one should have a head outside
val outsidePointing = new ArrayBuffer[Int];
for (i <- startIdx until endIdx) {
val ptr = childParentDepMap(i);
if (ptr < startIdx || ptr >= endIdx) {
outsidePointing += i;
}
}
// If our heuristic failed to identify anything, assume head final
if (outsidePointing.isEmpty) {
Logger.logss("WARNING: Empty outside pointing " + startIdx + ", " + endIdx + ": " + childParentDepMap);
endIdx - 1;
} else {
outsidePointing.last;
}
}
/**
* Fetches the head for a span where you know it's a nominal but the parser might be wrong; basically
* corrects for the problem of the span actually being (NP ...) (PP ...) and the head of the PP being
* taken as the head of the whole span
*/
def getSpanHeadACECustom(childParentDepMap: HashMap[Int,Int], pos: Seq[String], startIdx: Int, endIdx: Int) = {
// If it's a constituent, only one should have a head outside
val outsidePointing = new ArrayBuffer[Int];
for (i <- startIdx until endIdx) {
val ptr = childParentDepMap(i);
if (ptr < startIdx || ptr >= endIdx) {
outsidePointing += i;
}
}
// If our heuristic failed to identify anything, assume head final
if (outsidePointing.isEmpty) {
Logger.logss("WARNING: Empty outside pointing " + startIdx + ", " + endIdx + ": " + childParentDepMap);
endIdx - 1;
} else {
// Take the last nominal head
val headsAndTags = outsidePointing.map(idx => idx -> pos(idx));
val nominalHeadsAndTags = headsAndTags.filter(_._2.startsWith("N"));
if (!nominalHeadsAndTags.isEmpty) {
nominalHeadsAndTags.last._1;
} else {
outsidePointing.last;
}
}
}
/**
* Usually returns the head of the span, unless it's an NP or NML that dominates
* N* CC N* or [N* ,]+ N* , CC N* (coordination cases). Then we return the head
* of each coordinated thing.
*/
def getSpanHeadOrNPCoordinatedHeads(constTree: Tree[String], childParentDepMap: HashMap[Int,Int], startIdx: Int, endIdx: Int): Set[Int] = {
val spanMap = constTree.getSpanMap();
val pair = Pair.makePair(new Integer(startIdx), new Integer(endIdx));
val coordinatedHeads = new HashSet[Int];
if (spanMap.containsKey(pair)) {
val nonUnaryConstituents = spanMap.get(pair).asScala.filter(_.getChildren().size() > 1);
if (nonUnaryConstituents.size == 1 && nonUnaryConstituents.head.getLabel.startsWith("N")) {
val children = nonUnaryConstituents.head.getChildren;
if (children.size > 1) {
val childLabels = children.asScala.map(_.getLabel);
if (childLabels.contains("CC")) {
val childSizes = children.asScala.map(_.getYield.size);
if (childLabels.size == 3 && childLabels(0).startsWith("N") && childLabels(2).startsWith("N")) {
// Case 1: N* CC N*
coordinatedHeads += getSpanHead(childParentDepMap, startIdx, startIdx + childSizes(0));
coordinatedHeads += getSpanHead(childParentDepMap, endIdx - childSizes(2), endIdx);
} else if (childLabels.size >= 6 && childLabels.size % 2 == 0) {
// Case 2: [N* ,]+ N* , CC N*
var isNsAndCommas = true;
for (i <- 0 until childLabels.size - 2 by 2) {
isNsAndCommas = isNsAndCommas && childLabels(i).startsWith("N") && childLabels(i+1) == ",";
}
isNsAndCommas = isNsAndCommas && childLabels(childLabels.size - 2) == "CC" && childLabels(childLabels.size - 1).startsWith("N");
if (isNsAndCommas) {
var currStart = startIdx;
for (i <- 0 until childLabels.size - 2 by 2) {
coordinatedHeads += getSpanHead(childParentDepMap, currStart, currStart + childSizes(i));
currStart += childSizes(i) + childSizes(i + 1);
}
// Add the last child
coordinatedHeads += getSpanHead(childParentDepMap, endIdx - childSizes(childSizes.size - 1), endIdx);
}
}
}
}
}
}
if (coordinatedHeads.size >= 1) {
coordinatedHeads.toSet;
} else {
Set(getSpanHead(childParentDepMap, startIdx, endIdx));
}
}
def extractDependencyStructure(constTree: Tree[String], headFinder: HeadFinder): HashMap[Int, Int] = {
// Type created by this method is an IdentityHashMap, which is correct
// N.B. The constituent end index is the last word of the mention, it's not on fenceposts
val constituents = constTree.getConstituents()
val subtreeHeads = new IdentityHashMap[Tree[String],Int];
val trees = constTree.getPostOrderTraversal().asScala;
require(trees.last eq constTree);
val heads = new HashMap[Int,Int]();
for (tree <- trees) {
if (tree.isLeaf) {
// Do nothing
} else if (tree.isPreTerminal) {
val constituent = constituents.get(tree);
require(!subtreeHeads.containsKey(tree));
subtreeHeads.put(tree, constituent.getStart());
} else {
val children = tree.getChildren();
val head = headFinder.determineHead(tree);
if (head == null) {
Logger.logss("WARNING: null head: " + PennTreeRenderer.render(constTree) + "\\n" + PennTreeRenderer.render(tree));
}
val headIdx = subtreeHeads.get(head);
for (child <- children.asScala) {
if (child eq head) {
subtreeHeads.put(tree, headIdx);
} else {
require(!heads.contains(subtreeHeads.get(child)), "\\n" + PennTreeRenderer.render(constTree) +
"\\n" + PennTreeRenderer.render(tree) +
"\\n" + PennTreeRenderer.render(child) +
"\\n" + heads);
heads(subtreeHeads.get(child)) = headIdx;
}
}
}
}
// Set the root head
heads(subtreeHeads.get(constTree)) = -1;
val numLeaves = constTree.getYield.size();
for (i <- 0 until numLeaves) {
require(heads.contains(i), heads + "\\n" + PennTreeRenderer.render(constTree));
}
heads;
}
def apply(tree: Tree[String]) = {
new DepConstTree(tree, tree.getPreTerminalYield().asScala, tree.getYield().asScala, extractDependencyStructure(tree, hfEnglish));
}
def main(args: Array[String]) {
// Can't have duplicate substructures
val childComma1 = new Tree[String](",", Collections.singletonList(new Tree[String](",")));
val childComma2 = new Tree[String](",", Collections.singletonList(new Tree[String](",")));
val childComma3 = new Tree[String](",", Collections.singletonList(new Tree[String](",")));
val childAnd = new Tree[String]("CC", Collections.singletonList(new Tree[String]("CC")));
val child1 = new Tree[String]("NN", Collections.singletonList(new Tree[String]("bacon")));
val child2 = new Tree[String]("NN", Collections.singletonList(new Tree[String]("sausage")));
val child3 = new Tree[String]("NNS", Collections.singletonList(new Tree[String]("eggs")));
val child4 = new Tree[String]("NN", Collections.singletonList(new Tree[String]("cheese")));
val goodExampleType1 = new Tree[String]("NP", IndexedSeq(child1, childAnd, child2).asJava);
val goodExampleType2 = new Tree[String]("NP", IndexedSeq(child1, childComma1, child2, childComma2, childAnd, child3).asJava);
val goodExampleType2a = new Tree[String]("NP", IndexedSeq(child1, childComma1, child2, childComma2, child3, childComma3, childAnd, child4).asJava);
val badExample = new Tree[String]("NP", IndexedSeq(child1, childComma1, child2, childComma2).asJava);
test(goodExampleType1);
test(goodExampleType2);
test(goodExampleType2a);
test(badExample);
}
private def test(tree: Tree[String]) {
val hf = new ModCollinsHeadFinder();
val ds = extractDependencyStructure(tree, hf);
println(getSpanHeadOrNPCoordinatedHeads(tree, ds, 0, tree.getYield().size));
}
}
| malcolmgreaves/berkeley-entity | src/main/java/edu/berkeley/nlp/entity/DepConstTree.scala | Scala | gpl-3.0 | 16,795 |
package pl.newicom.dddd.clustertest
import org.scalatest.{ BeforeAndAfterAll, WordSpecLike }
import org.scalatest.Matchers
import akka.remote.testkit.MultiNodeSpecCallbacks
/**
* Hooks up MultiNodeSpec with ScalaTest
*/
trait STMultiNodeSpec extends MultiNodeSpecCallbacks
with WordSpecLike with Matchers with BeforeAndAfterAll {
override def beforeAll() = multiNodeSpecBeforeAll()
override def afterAll() = multiNodeSpecAfterAll()
} | ahjohannessen/akka-ddd | akka-ddd-test/src/main/scala/pl/newicom/dddd/clustertest/STMultiNodeSpec.scala | Scala | mit | 445 |
package io.amient.affinity.core.http
import akka.http.scaladsl.model.{StatusCode, StatusCodes}
case class RequestException(code: StatusCode, serverMessage: String) extends RuntimeException(code.intValue + " - " + code.reason + " - " + serverMessage) {
def this(code: Int, clientReason: String, serverMessage: String) = this(
StatusCodes.custom(code, clientReason, null, isSuccess = false, allowsEntity = false)
, serverMessage)
}
| amient/affinity | core/src/main/scala/io/amient/affinity/core/http/RequestException.scala | Scala | apache-2.0 | 444 |
/*
* Copyright 2017 Georgi Krastev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink
package api.scala.derived.typeutils
import api.common.ExecutionConfig
import api.common.typeinfo.TypeInformation
import api.common.typeutils.TypeSerializer
import scala.reflect.ClassTag
/** `TypeInformation` for recursive product types (case classes). */
class ProductTypeInfo[P](fs: => Seq[TypeInformation[_]])
(from: Seq[Any] => P, to: P => Seq[Any])(implicit tag: ClassTag[P])
extends TypeInformation[P] with InductiveObject {
private lazy val fields = fs
@transient private var serializer: ProductSerializer[P] = _
def isBasicType: Boolean = false
def isTupleType: Boolean = false
def isKeyType: Boolean = false
def getArity: Int = fields.size
def getTotalFields: Int = getArity
def getTypeClass: Class[P] =
tag.runtimeClass.asInstanceOf[Class[P]]
// Handle cycles in the object graph.
def createSerializer(config: ExecutionConfig): TypeSerializer[P] =
inductive(serializer) {
serializer = ProductSerializer()(from, to)
serializer.fields = for (f <- fields)
yield f.createSerializer(config).asInstanceOf[TypeSerializer[Any]]
serializer
}
def canEqual(that: Any): Boolean =
that.isInstanceOf[ProductTypeInfo[_]]
override def equals(other: Any): Boolean = other match {
case that: ProductTypeInfo[_] =>
(this eq that) || (that canEqual this) && this.fields == that.fields
case _ => false
}
override def hashCode: Int =
inductive(0)(31 * fields.##)
override def toString: String = inductive("this") {
s"${getTypeClass.getTypeName}(${fields.mkString(", ")})"
}
}
| joroKr21/flink-shapeless | src/main/scala/org/apache/flink/api/scala/derived/typeutils/ProductTypeInfo.scala | Scala | apache-2.0 | 2,201 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.