code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package com.alexitc.coinalerts.models
import com.alexitc.playsonify.models.{WrappedInt, WrappedString}
import play.api.libs.json.{JsPath, Json, Reads, Writes}
/**
* [[ExchangeCurrency]] represents a currency that can be traded in
* the [[Market]] supported by the [[Exchange]].
*
* For example, I could go to BITTREX (exchange) to buy BTC (market)
* paying with LTC (currency).
*/
case class ExchangeCurrency(
id: ExchangeCurrencyId,
exchange: Exchange,
market: Market,
currency: Currency,
currencyName: Option[CurrencyName])
object ExchangeCurrency {
implicit val writes: Writes[ExchangeCurrency] = Json.writes[ExchangeCurrency]
}
case class ExchangeCurrencyId(int: Int) extends AnyVal with WrappedInt
object ExchangeCurrencyId {
implicit val reads: Reads[ExchangeCurrencyId] = {
JsPath.read[Int].map(ExchangeCurrencyId.apply)
}
}
case class CurrencyName(string: String) extends AnyVal with WrappedString
case class CreateExchangeCurrencyModel(
exchange: Exchange,
market: Market,
currency: Currency,
currencyName: Option[CurrencyName])
|
AlexITC/crypto-coin-alerts
|
alerts-server/app/com/alexitc/coinalerts/models/exchangeCurrency.scala
|
Scala
|
gpl-3.0
| 1,096
|
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.arrow.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.arrow.data.ArrowDataStore
import org.locationtech.geomesa.arrow.tools.ArrowDataStoreCommand
import org.locationtech.geomesa.arrow.tools.UrlParam
import org.locationtech.geomesa.tools.stats.{StatsCountCommand, StatsCountParams}
class ArrowStatsCountCommand extends StatsCountCommand[ArrowDataStore] with ArrowDataStoreCommand {
override val params = new ArrowStatsCountParams
override def execute(): Unit = {
params.exact = true
super.execute()
}
}
@Parameters(commandDescription = "Calculate feature counts in a GeoMesa feature type")
class ArrowStatsCountParams extends StatsCountParams with UrlParam
|
ddseapy/geomesa
|
geomesa-arrow/geomesa-arrow-tools/src/main/scala/org/locationtech/geomesa/arrow/tools/stats/ArrowStatsCountCommand.scala
|
Scala
|
apache-2.0
| 1,208
|
package com.twitter.finagle.netty3
import com.twitter.io.Buf
import org.jboss.netty.buffer.{ChannelBuffers, ChannelBuffer}
/**
* A [[com.twitter.io.Buf]] wrapper for
* Netty [[org.jboss.netty.buffer.ChannelBuffer ChannelBuffers]].
*
* @note Since `ChannelBuffer`s are mutable, modifying the wrapped buffer
* within `slice`s of a `ChannelBufferBuf` will modify the original wrapped
* `ChannelBuffer`. Similarly, modifications to the original buffer will be
* reflected in slices.
*
* @param underlying The [[org.jboss.netty.buffer.ChannelBuffer]] to be wrapped in a
* [[com.twitter.io.Buf]] interface.
*/
class ChannelBufferBuf(protected val underlying: ChannelBuffer) extends Buf {
def length: Int = underlying.readableBytes
override def toString: String = s"ChannelBufferBuf($underlying)"
def get(index: Int): Byte = {
val pos = underlying.readerIndex + index
underlying.getByte(pos)
}
def process(from: Int, until: Int, processor: Buf.Processor): Int = {
Buf.checkSliceArgs(from, until)
if (isSliceEmpty(from, until)) return -1
val off = underlying.readerIndex + from
val endAt = math.min(length, underlying.readerIndex + until)
var i = 0
var continue = true
while (continue && i < endAt) {
val byte = underlying.getByte(off + i)
if (processor(byte))
i += 1
else
continue = false
}
if (continue) -1
else from + i
}
def write(bytes: Array[Byte], off: Int): Unit = {
checkWriteArgs(bytes.length, off)
val dup = underlying.duplicate()
dup.readBytes(bytes, off, dup.readableBytes)
}
def write(buffer: java.nio.ByteBuffer): Unit = {
checkWriteArgs(buffer.remaining, 0)
val dup = underlying.duplicate()
val currentLimit = buffer.limit
buffer.limit(buffer.position + length)
dup.readBytes(buffer)
buffer.limit(currentLimit)
}
def slice(i: Int, j: Int): Buf = {
Buf.checkSliceArgs(i, j)
if (isSliceEmpty(i, j)) Buf.Empty
else if (isSliceIdentity(i, j)) this
else {
val from = i + underlying.readerIndex
val until = math.min(j - i, length - i)
new ChannelBufferBuf(underlying.slice(from, until))
}
}
override def equals(other: Any): Boolean = other match {
case ChannelBufferBuf(otherCB) => underlying.equals(otherCB)
case other: Buf => Buf.equals(this, other)
case _ => false
}
protected def unsafeByteArrayBuf: Option[Buf.ByteArray] =
if (underlying.hasArray) {
val bytes = underlying.array
val begin = underlying.arrayOffset + underlying.readerIndex
val end = begin + underlying.readableBytes
Some(new Buf.ByteArray(bytes, begin, end))
} else None
}
object ChannelBufferBuf {
private val Empty = new ChannelBufferBuf(ChannelBuffers.EMPTY_BUFFER)
/** Extract a read-only ChannelBuffer from a ChannelBufferBuf. */
def unapply(cbb: ChannelBufferBuf): Option[ChannelBuffer] =
Some(ChannelBuffers.unmodifiableBuffer(cbb.underlying))
/**
* Coerce a buf to a ChannelBufferBuf
*/
def coerce(buf: Buf): ChannelBufferBuf = buf match {
case buf: ChannelBufferBuf => buf
case _ if buf.isEmpty => ChannelBufferBuf.Empty
case _ =>
val Buf.ByteArray.Owned(bytes, begin, end) = Buf.ByteArray.coerce(buf)
val cb = ChannelBuffers.wrappedBuffer(bytes, begin, end - begin)
new ChannelBufferBuf(cb)
}
/**
* Java API for [[ChannelBufferBuf.Owned.apply]].
*/
def newOwned(cb: ChannelBuffer): Buf =
Owned(cb)
object Owned {
// N.B. We cannot use ChannelBuffers.unmodifiableBuffer to ensure
// correctness because it prevents direct access to its underlying byte
// array.
/**
* Obtain a buffer using the provided ChannelBuffer, which should not be
* mutated after being passed to this function.
*
* @see [[newOwned]] for a Java friendly API.
*/
def apply(cb: ChannelBuffer): Buf = cb match {
case _ if cb.readableBytes == 0 => Buf.Empty
case BufChannelBuffer(buf) => buf
case _ => new ChannelBufferBuf(cb)
}
/** Extract the buffer's underlying ChannelBuffer. It should not be mutated. */
def unapply(cbb: ChannelBufferBuf): Option[ChannelBuffer] = Some(cbb.underlying)
def extract(buf: Buf): ChannelBuffer = ChannelBufferBuf.coerce(buf).underlying
}
object Shared {
def apply(cb: ChannelBuffer): Buf = Owned(cb.copy)
def unapply(cbb: ChannelBufferBuf): Option[ChannelBuffer] = Owned.unapply(cbb).map(_.copy)
def extract(buf: Buf): ChannelBuffer = Owned.extract(buf).copy
}
}
|
mkhq/finagle
|
finagle-netty3/src/main/scala/com/twitter/finagle/netty3/ChannelBufferBuf.scala
|
Scala
|
apache-2.0
| 4,574
|
/**
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.protocol.codec
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait ChannelAcceptedEventFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val ChannelAcceptedEventFormat: JsonFormat[sbt.protocol.ChannelAcceptedEvent] = new JsonFormat[sbt.protocol.ChannelAcceptedEvent] {
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.protocol.ChannelAcceptedEvent = {
__jsOpt match {
case Some(__js) =>
unbuilder.beginObject(__js)
val channelName = unbuilder.readField[String]("channelName")
unbuilder.endObject()
sbt.protocol.ChannelAcceptedEvent(channelName)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.protocol.ChannelAcceptedEvent, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("channelName", obj.channelName)
builder.endObject()
}
}
}
|
xuwei-k/xsbt
|
protocol/src/main/contraband-scala/sbt/protocol/codec/ChannelAcceptedEventFormats.scala
|
Scala
|
apache-2.0
| 1,080
|
package org.twistednoodle.json_api
import java.net.URL
import scala.collection.immutable
// Created by iwaisman on 12/29/16.
/**
* This trait encompasses all data structures representing json:api objects.
*
* There remain restrictions in the specification which have not yet been
* encoded into the type system. For example, a relationship links object must contain
* at least one of 'self' and/or 'related'. <shrug>
*
* @see http://http://jsonapi.org/ for details
*
*/
trait Model { self: JsonApi =>
type Links = Map[String, Link]
type IdentifiedResourceObject = ResourceObject with ResourceIdentifier
case class JsonApiError(id: Option[String] = None,
about: Option[Link] = None,
status: Option[String] = None,
code: Option[String] = None,
title: Option[String] = None,
detail: Option[String] = None,
source: Option[ErrorSource] = None,
meta: Option[JSON] = None)
case class ErrorSource(pointer: Option[String] = None,
parameter: Option[String] = None)
/**
* A JsonApiObject. The name seemed more representative.
*/
case class Version(version: Option[String] = None,
meta: Option[JSON] = None)
case class Link(href: URL, meta: Option[JSON] = None)
// Resources ============================================
/**
* Sealed trait representing the two ways to represent data: single [[Resource]] or a sequence of them as a [[Resources]]
*/
sealed trait Data
/**
* A sealed trait comprised of [[ResourceIdentifier]]s and [[ResourceObject]]s
*/
sealed trait Resource extends Data
/**
* A container for multiple resources. This allows for consistent serialization as a json array.
* @param resources a sequence of [[Resource]]s
*/
case class Resources( resources: immutable.Seq[Resource]) extends Data
trait ResourceIdentifier extends Resource with RelationshipData {
val id: String
val tpe: String
val meta: Option[JSON]
}
case class SimpleResourceIdentifier(id: String,
tpe: String,
meta: Option[JSON] = None) extends ResourceIdentifier
case class ResourceObject(tpe: String,
attributes: Option[JSON] = None,
relationships: Map[String, Relationship] = Map.empty,
links: Links = Map.empty,
meta: Option[JSON] = None) extends Resource
object ResourceObject {
def apply(id: String,
tpe: String,
attributes: Option[JSON],
relationships: Map[String, Relationship],
links: Links,
meta: Option[JSON]): ResourceObject with ResourceIdentifier = {
val _id = id
new ResourceObject(tpe, attributes, relationships, links, meta) with ResourceIdentifier {
override val id: String = _id
}
}
def apply(id: String, obj: ResourceObject): ResourceObject with ResourceIdentifier =
ResourceObject(
id = id,
attributes = obj.attributes,
relationships = obj.relationships,
links = obj.links,
tpe = obj.tpe,
meta = obj.meta
)
}
// Relationships ========================================
/**
* Sealed trait representing the two ways to represent relationship data: single [[ResourceIdentifier]] or a sequence of them as a [[ResourceIdentifiers]]
*/
sealed trait RelationshipData
/**
* A container for multiple resource identifiers. This allows for consistent serialization as a json array.
* This is intended for use in a [[Relationship]]
* @param identifiers a sequence of [[ResourceIdentifiers]]s
*/
case class ResourceIdentifiers( identifiers: immutable.Seq[ResourceIdentifier]) extends RelationshipData
case class Relationship(links: Links = Map.empty,
data: Option[RelationshipData],
meta: Option[JSON] = None)
// Document, top-level objects ==========================
/**
* A trait representing top-level json:api documents.
* The two primary variants are [[DataDocument]] and [[ErrorDocument]].
*/
sealed trait JsonApiDocument {
val included: immutable.Seq[ResourceObject]
val links: Links
val meta: Option[JSON]
val version: Option[Version]
}
/**
* A Data document, as opposed to an Error document
* @param data Either a single [[Resource]] or sequence of [[Resource]]s as a [[Resources]] object
*/
case class DataDocument(data: Data,
override val included: immutable.Seq[ResourceObject] = immutable.Seq.empty,
override val links: Links = Map.empty,
override val meta: Option[JSON] = None,
override val version: Option[Version] = None
) extends JsonApiDocument
/**
* An Error document as opposed to a Data document.
* @param errors a sequence of [[JsonApiError]]s.
*/
case class ErrorDocument(errors: immutable.Seq[JsonApiError],
override val included: immutable.Seq[ResourceObject] = immutable.Seq.empty,
override val links: Links = Map.empty,
override val meta: Option[JSON] = None,
override val version: Option[Version] = None
) extends JsonApiDocument
}
|
iwaisman/json_api-scala
|
src/main/scala/org/twistednoodle/json_api/Model.scala
|
Scala
|
mit
| 5,669
|
package com.arcusys.valamis.persistence.impl.social.schema
import com.arcusys.valamis.persistence.common.DbNameUtils._
import com.arcusys.valamis.persistence.common.{LongKeyTableComponent, SlickProfile, TypeMapper}
import com.arcusys.valamis.social.model.Comment
import org.joda.time.DateTime
import com.arcusys.valamis.util.ToTuple
import com.arcusys.valamis.util.TupleHelpers._
trait CommentTableComponent extends LongKeyTableComponent with TypeMapper { self:SlickProfile =>
import driver.simple._
class CommentTable(tag: Tag) extends LongKeyTable[Comment](tag, "COMMENT") {
def companyId = column[Long]("COMPANY_ID")
def userId = column[Long]("USER_ID")
def content = column[String]("CONTENT")
def activityId = column[Long]("ACTIVITY_ID")
def creationDate = column[DateTime]("CREATION_DATE")
def lastUpdateDate = column[Option[DateTime]]("LAST_UPDATE_DATE")
def * = (companyId, userId, content, activityId, id.?, creationDate, lastUpdateDate) <> (Comment.tupled, Comment.unapply)
def update = (companyId, userId, content, activityId, creationDate, lastUpdateDate) <> (tupleToEntity, entityToTuple)
def entityToTuple(entity: TableElementType) = {
Some(toTupleWithFilter(entity))
}
}
val comments = TableQuery[CommentTable]
}
|
igor-borisov/valamis
|
valamis-slick-persistence/src/main/scala/com/arcusys/valamis/persistence/impl/social/schema/CommentTableComponent.scala
|
Scala
|
gpl-3.0
| 1,290
|
package com.twitter.finagle.client
import com.twitter.finagle._
import com.twitter.finagle.Namer.AddrWeightKey
import com.twitter.util._
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
final class EndpointRecorderTest extends FunSuite {
val name = "fooClient"
val path = "/foo"
val addrs: Set[Address] = Set(Address(8080))
val bound = Addr.Bound(addrs.toSet, Addr.Metadata(AddrWeightKey -> 2.0))
val endpoints = Var[Addr](bound)
val dtab = Dtab.read("/foo => /bar")
val neverFactory = ServiceFactory.const(new Service[Int, Int] {
def apply(req: Int) = Future.never
})
def getEndpoints(registry: EndpointRegistry, name: String, dtab: Dtab, path: String): Option[Addr] = {
registry.endpoints(name).get(dtab).flatMap(_.get(path))
}
test("EndpointRecorder is disabled if BindingFactory.Dest is not bound") {
val stk: StackBuilder[ServiceFactory[Int, Int]] = new StackBuilder(
Stack.Leaf(Stack.Role("never"), neverFactory)
)
stk.push(EndpointRecorder.module[Int, Int])
val factory = stk.make(Stack.Params.empty)
assert(factory == neverFactory)
}
test("EndpointRecorder registers in EndpointRegistry") {
val registry = new EndpointRegistry()
val factory = new EndpointRecorder(neverFactory, registry, name, dtab, path, endpoints)
assert(getEndpoints(registry, name, dtab, path) == Some(bound))
}
test("EndpointRecorder deregisters on close()") {
val registry = new EndpointRegistry()
val factory = new EndpointRecorder(neverFactory, registry, name, dtab, path, endpoints)
assert(getEndpoints(registry, name, dtab, path) == Some(bound))
factory.close()
assert(getEndpoints(registry, name, dtab, path) == None)
}
}
|
spockz/finagle
|
finagle-core/src/test/scala/com/twitter/finagle/client/EndpointRecorderTest.scala
|
Scala
|
apache-2.0
| 1,806
|
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval.internal
import java.util.concurrent.RejectedExecutionException
import monix.eval.Task
import monix.eval.Task.{Async, Context}
import monix.execution.schedulers.TracingScheduler
import monix.execution.{Callback, Scheduler}
import scala.concurrent.ExecutionContext
private[eval] object TaskShift {
/**
* Implementation for `Task.shift`
*/
def apply(ec: ExecutionContext): Task[Unit] = {
Async(
new Register(ec),
trampolineBefore = false,
trampolineAfter = false,
restoreLocals = false
)
}
// Implementing Async's "start" via `ForkedStart` in order to signal
// that this is a task that forks on evaluation.
//
// N.B. the contract is that the injected callback gets called after
// a full async boundary!
private final class Register(ec: ExecutionContext) extends ForkedRegister[Unit] {
def apply(context: Context, cb: Callback[Throwable, Unit]): Unit = {
val ec2 =
if (ec eq null) {
context.scheduler
} else if (context.options.localContextPropagation) {
ec match {
case sc: Scheduler if sc.features.contains(Scheduler.TRACING) =>
sc
case _ =>
TracingScheduler(ec)
}
} else {
ec
}
try {
ec2.execute(new Runnable {
def run(): Unit = {
context.frameRef.reset()
cb.onSuccess(())
}
})
} catch {
case e: RejectedExecutionException =>
Callback.signalErrorTrampolined(cb, e)
}
}
}
}
|
monifu/monix
|
monix-eval/shared/src/main/scala/monix/eval/internal/TaskShift.scala
|
Scala
|
apache-2.0
| 2,268
|
package mesosphere.marathon.core.storage.store.impl.cache
import java.io.NotActiveException
import java.time.OffsetDateTime
import akka.http.scaladsl.marshalling.Marshaller
import akka.http.scaladsl.unmarshalling.{ Unmarshal, Unmarshaller }
import akka.stream.Materializer
import akka.stream.scaladsl.Source
import akka.{ Done, NotUsed }
import com.typesafe.scalalogging.StrictLogging
import mesosphere.marathon.PrePostDriverCallback
import mesosphere.marathon.Protos.StorageVersion
import mesosphere.marathon.core.storage.store.impl.BasePersistenceStore
import mesosphere.marathon.core.storage.store.{ IdResolver, PersistenceStore }
import mesosphere.util.LockManager
import scala.async.Async.{ async, await }
import scala.collection.concurrent.TrieMap
import scala.collection.immutable.Seq
import scala.concurrent.{ ExecutionContext, Future, Promise }
/**
* A Write Ahead Cache of another persistence store that preloads the entire persistence store into memory before
* satisfying any requests.
*
* TODO: Consider an alternative strategy where we see if the promise is complete and use it
* otherwise going directly to the storage layer. This turns out to be much more complicated
* as the cache is populated asynchronously, so it would have to queue up all create/update operations
* onto the future to keep the value fully updated: then, there would be a short window of time when
* the cached data is actually stale.
*
* @param store The store to cache
* @param mat a materializer for akka streaming
* @param ctx The execution context for future chaining.
* @tparam Serialized The serialized format for the persistence store.
*/
class LoadTimeCachingPersistenceStore[K, Category, Serialized](
val store: BasePersistenceStore[K, Category, Serialized],
maxPreloadRequests: Int = 8)(
implicit
mat: Materializer,
ctx: ExecutionContext
) extends PersistenceStore[K, Category, Serialized] with StrictLogging with PrePostDriverCallback {
private val lockManager = LockManager.create()
private[store] var idCache: Future[TrieMap[Category, Seq[K]]] = Future.failed(new NotActiveException())
// When we pre-load the persistence store, we don't have an idResolver or an Unmarshaller, so we store the
// serialized form as a Left() until it is deserialized, in which case we store as a Right()
private[store] var valueCache: Future[TrieMap[K, Either[Serialized, Any]]] =
Future.failed(new NotActiveException())
override def storageVersion(): Future[Option[StorageVersion]] = store.storageVersion()
override def setStorageVersion(storageVersion: StorageVersion): Future[Done] =
store.setStorageVersion(storageVersion)
override def preDriverStarts: Future[Unit] = {
val cachePromise = Promise[TrieMap[K, Either[Serialized, Any]]]()
val idPromise = Promise[TrieMap[Category, Seq[K]]]()
idCache = idPromise.future
valueCache = cachePromise.future
val ids = TrieMap.empty[Category, Seq[K]]
val cached = TrieMap.empty[K, Either[Serialized, Any]]
val future = store.allKeys().mapAsync(maxPreloadRequests) { key =>
store.rawGet(key.key).map(v => key -> v)
}.runForeach {
case (categorized, value) =>
value.foreach(v => cached(categorized.key) = Left(v))
val children = ids.getOrElse(categorized.category, Nil)
ids.put(categorized.category, categorized.key +: children)
}
idPromise.completeWith(future.map(_ => ids))
cachePromise.completeWith(future.map(_ => cached))
future.map(_ => ())
}
override def postDriverTerminates: Future[Unit] = {
valueCache = Future.failed(new NotActiveException())
idCache = Future.failed(new NotActiveException())
Future.successful(())
}
@SuppressWarnings(Array("all")) // async/await
override def ids[Id, V]()(implicit ir: IdResolver[Id, V, Category, K]): Source[Id, NotUsed] = {
val category = ir.category
val future = lockManager.executeSequentially(category.toString) {
async { // linter:ignore UnnecessaryElseBranch
await(idCache).getOrElse(category, Nil).map(ir.fromStorageId)
}
}
Source.fromFuture(future).mapConcat(identity)
}
@SuppressWarnings(Array("all")) // async/await
private def deleteCurrentOrAll[Id, V](
k: Id,
delete: () => Future[Done])(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] = {
val storageId = ir.toStorageId(k, None)
val category = ir.category
lockManager.executeSequentially(category.toString) {
lockManager.executeSequentially(storageId.toString) {
async { // linter:ignore UnnecessaryElseBranch
val deleteFuture = delete()
val (cached, ids, _) = (await(valueCache), await(idCache), await(deleteFuture))
cached.remove(storageId)
val old = ids.getOrElse(category, Nil)
val children = old.filter(_ != storageId)
if (children.nonEmpty) { // linter:ignore UseIfExpression
ids.put(category, old.filter(_ != storageId))
} else {
ids.remove(category)
}
Done
}
}
}
}
override def deleteAll[Id, V](k: Id)(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] = {
deleteCurrentOrAll(k, () => store.deleteAll(k))
}
override def deleteCurrent[Id, V](k: Id)(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] = {
deleteCurrentOrAll(k, () => store.deleteCurrent(k))
}
@SuppressWarnings(Array("all")) // async/await
override def get[Id, V](id: Id)(implicit
ir: IdResolver[Id, V, Category, K],
um: Unmarshaller[Serialized, V]): Future[Option[V]] = {
val storageId = ir.toStorageId(id, None)
lockManager.executeSequentially(storageId.toString) {
async { // linter:ignore UnnecessaryElseBranch
val cached = await(valueCache)
cached.get(storageId) match {
case Some(Left(v)) =>
val deserialized = await(Unmarshal(v).to[V])
cached.put(storageId, Right(deserialized))
Some(deserialized)
case Some(Right(v)) =>
Some(v.asInstanceOf[V])
case None =>
None
}
}
}
}
override def get[Id, V](id: Id, version: OffsetDateTime)(implicit
ir: IdResolver[Id, V, Category, K],
um: Unmarshaller[Serialized, V]): Future[Option[V]] =
store.get(id, version)
@SuppressWarnings(Array("all")) // async/await
override def store[Id, V](id: Id, v: V)(implicit
ir: IdResolver[Id, V, Category, K],
m: Marshaller[V, Serialized]): Future[Done] = {
val category = ir.category
val storageId = ir.toStorageId(id, None)
lockManager.executeSequentially(category.toString) {
lockManager.executeSequentially(storageId.toString) {
async { // linter:ignore UnnecessaryElseBranch
val storeFuture = store.store(id, v)
val (cached, ids, _) = (await(valueCache), await(idCache), await(storeFuture))
cached(storageId) = Right(v)
val old = ids.getOrElse(ir.category, Nil)
ids(category) = storageId +: old
Done
}
}
}
}
@SuppressWarnings(Array("all")) // async/await
override def store[Id, V](id: Id, v: V, version: OffsetDateTime)(implicit
ir: IdResolver[Id, V, Category, K],
m: Marshaller[V, Serialized]): Future[Done] = {
val category = ir.category
val storageId = ir.toStorageId(id, None)
lockManager.executeSequentially(category.toString) {
async { // linter:ignore UnnecessaryElseBranch
val storeFuture = store.store(id, v, version)
val (idCache, _) = (await(this.idCache), await(storeFuture))
val old = idCache.getOrElse(category, Nil)
idCache.put(category, storageId +: old)
Done
}
}
}
override def versions[Id, V](id: Id)(implicit ir: IdResolver[Id, V, Category, K]): Source[OffsetDateTime, NotUsed] =
store.versions(id)
override def deleteVersion[Id, V](
k: Id,
version: OffsetDateTime)(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] =
store.deleteVersion(k, version)
override def toString: String = s"LoadTimeCachingPersistenceStore($store)"
}
|
timcharper/marathon
|
src/main/scala/mesosphere/marathon/core/storage/store/impl/cache/LoadTimeCachingPersistenceStore.scala
|
Scala
|
apache-2.0
| 8,187
|
package scorex.transaction
import scorex.account.Account
trait BalanceSheet {
def balance(account: Account, height: Option[Int] = None): Long
/**
*
* @return Minimum balance from current block to balance confirmation blocks ago
*/
def balanceWithConfirmations(account: Account, confirmations: Int): Long
}
|
alexeykiselev/WavesScorex
|
scorex-basics/src/main/scala/scorex/transaction/BalanceSheet.scala
|
Scala
|
cc0-1.0
| 325
|
package com.arcusys.learn.models.request
object QuizActionType extends Enumeration {
val GetAll = Value("GETALL")
val GetContent = Value("GETCONTENT")
val Add = Value("ADD")
val Publish = Value("PUBLISH")
val Delete = Value("DELETE")
val Update = Value("UPDATE")
val UpdateLogo = Value("UPDATELOGO")
val Clone = Value("CLONE")
val Convert = Value("CONVERT")
val AddCategory = Value("ADDCATEGORY")
val AddQuestion = Value("ADDQUESTION")
val AddQuestions = Value("ADDQUESTIONS")
val AddQuestionExternal = Value("ADDQUESTIONEXTERNALRESOURCE")
val AddQuestionPlainText = Value("ADDQUESTIONPLAINTEXT")
val AddQuestionRevealJS = Value("ADDQUESTIONREVEALJS")
val AddQuestionPDF = Value("ADDQUESTIONPDF")
val AddVideo = Value("ADDQUESTIONVIDEO")
val DeleteQuestion = Value("DELETEQUESTION")
val DeleteCategory = Value("DELETECATEGORY")
val UpdateCategory = Value("UPDATECATEGORY")
val UpdateQuestion = Value("UPDATEQUESTION")
val UpdateQuestionPlainText = Value("UPDATEQUESTIONPLAINTEXT")
val UpdateQuestionRevealJS = Value("UPDATEQUESTIONREVEALJS")
val UpdateQuestionPDF = Value("UPDATEQUESTIONPDF")
val UpdateQuestionPPTX = Value("UPDATEQUESTIONPPTX")
val UpdateQuestionExternal = Value("UPDATEQUESTIONEXTERNALRESOURCE")
val UpdateQuestionDLVideo = Value("UPDATEQUESTIONVIDEODL")
val MoveElement = Value("MOVEELEMENT")
val QuestionPreview = Value("GETQUESTIONPREVIEW")
val Import = Value("IMPORT")
type QuizActionType = Value
}
|
ViLPy/Valamis
|
learn-portlet/src/main/scala/com/arcusys/learn/models/request/QuizActionType.scala
|
Scala
|
lgpl-3.0
| 1,493
|
package com.github.vonnagy.service.container.health
import java.net.InetAddress
import java.util.jar.Attributes.Name
import java.util.jar.{Attributes, JarFile, Manifest}
import com.github.vonnagy.service.container.log.LoggingAdapter
/**
* Created by Ivan von Nagy on 1/12/15.
*/
object ContainerInfo extends LoggingAdapter {
private val mainClass = getMainClass;
private val applicationInfo = getApplicationInfo
val scalaVersion = util.Properties.versionString
val host = getHostInternal
val application = applicationInfo._1
val applicationVersion = applicationInfo._2
val containerManifest = getManifest(this.getClass)
val containerVersion = containerManifest.getMainAttributes().getValue("Implementation-Version") + "." + containerManifest.getMainAttributes().getValue("Implementation-Build")
/**
* Get the system host
* @return the host name
*/
private[health] def getHostInternal: String = {
try {
InetAddress.getLocalHost.getHostName.split("\\\\.")(0)
}
catch {
case _: Exception => {
"Unknown"
}
}
}
/**
* Get the name and version information for the application
* @return
*/
private[health] def getApplicationInfo: Tuple2[String, String] = {
if (mainClass.isDefined) {
val man: Manifest = getManifest(mainClass.get)
Tuple2[String, String](man.getMainAttributes.getValue(Name.IMPLEMENTATION_TITLE),
man.getMainAttributes.getValue("Implementation-Version") + "." +
man.getMainAttributes.getValue("Implementation-Build"))
}
else {
Tuple2[String, String]("Container Service", "1.0.0.N/A")
}
}
/**
* Find the main class that is the entry point
* @return
*/
private[health] def getMainClass: Option[Class[_]] = {
import scala.collection.JavaConverters._
def checkStack(elem: StackTraceElement): Option[Class[_]] = try {
if (elem.getMethodName.equals("main")) Some(Class.forName(elem.getClassName)) else None
} catch {
case _: ClassNotFoundException => {
// Swallow the exception
None
}
}
Thread.getAllStackTraces.asScala.values.flatMap(currStack => {
if (!currStack.isEmpty)
checkStack(currStack.last)
else
None
}).headOption match {
case None =>
sys.props.get("sun.java.command") match {
case Some(command) if !command.isEmpty =>
try {
Some(Class.forName(command))
} catch {
// Swallow the exception
case _: ClassNotFoundException =>
None
}
// Nothing could be located
case _ => None
}
case c => c
}
}
private[health] def getManifest(clazz: Class[_]): Manifest = {
val file: String = clazz.getProtectionDomain.getCodeSource.getLocation.getFile
try {
if (file.endsWith(".jar")) {
new JarFile(file).getManifest
}
else {
val manifest: Manifest = new Manifest
manifest.getMainAttributes.put(Name.IMPLEMENTATION_TITLE, "Container Service")
manifest.getMainAttributes.put(Name.IMPLEMENTATION_VERSION, "1.0.0")
manifest.getMainAttributes.put(new Attributes.Name("Implementation-Build"), "N/A")
manifest
}
}
catch {
case _: Exception => {
val manifest: Manifest = new Manifest
manifest.getMainAttributes.put(Name.IMPLEMENTATION_TITLE, "Container Service")
manifest.getMainAttributes.put(Name.IMPLEMENTATION_VERSION, "1.0.0")
manifest.getMainAttributes.put(new Attributes.Name("Implementation-Build"), "N/A")
manifest
}
}
}
}
|
vonnagy/service-container
|
service-container/src/main/scala/com/github/vonnagy/service/container/health/ContainerInfo.scala
|
Scala
|
apache-2.0
| 3,678
|
package com.sksamuel.scapegoat.inspections.exception
import com.sksamuel.scapegoat.PluginRunner
import org.scalatest.{ FreeSpec, Matchers, OneInstancePerTest }
/** @author Stephen Samuel */
class UnreachableCatchTest
extends FreeSpec
with Matchers
with PluginRunner
with OneInstancePerTest {
override val inspections = Seq(new UnreachableCatch)
"unreachable catch" - {
"should report warning" - {
"for subtype after supertype" in {
val code1 = """object Test {
try {
} catch {
case _ : Throwable =>
case e : Exception => // not reachable
}
} """.stripMargin
compileCodeSnippet(code1)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
}
"should not report warning" - {
"for super type after sub type" in {
val code2 = """object Test {
try {
} catch {
case e : RuntimeException =>
case f : Exception =>
case x : Throwable =>
}
} """.stripMargin
compileCodeSnippet(code2)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
}
}
|
pwwpche/scalac-scapegoat-plugin
|
src/test/scala/com/sksamuel/scapegoat/inspections/exception/UnreachableCatchTest.scala
|
Scala
|
apache-2.0
| 1,366
|
package com.letstalkdata.hexiles
package game
import com.letstalkdata.hexiles.shapes.Cube
import scala.language.postfixOps
/**
* A snapshot of the game at a point in time.
*
* Author: Phillip Johnson
* Date: 4/30/15
*/
class State(board: Board, pieces: Seq[Piece]) {
def isTerminal: Boolean = {
isBoardCovered && !boardPiecesOverlap
}
private def isBoardCovered: Boolean = {
val allPieceCubes: Seq[Cube] = pieces.flatMap(p => p.getHexes).map(hex => hex.cube)
board.tiles.forall(tile => allPieceCubes.contains(tile.cube))
}
private def boardPiecesOverlap: Boolean = {
val allPieceHexes = pieces.flatMap(p => p.getHexes)
board.tiles
.map(t => allPieceHexes.filter(p => p.cube == t.cube))
.exists(matches => matches.size > 1)
}
def asSolution: Solution = Solution(pieces.filter(p => p.hexes.forall(board.tiles contains)))
}
|
SeriousSoftware/ScalaJS-Hexiles-web
|
src/main/scala-2.11/com/letstalkdata/hexiles/game/State.scala
|
Scala
|
mit
| 879
|
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
object KmeansSpark {
type Point = (Double, Double)
var centroids: Array[Point] = Array[Point]()
//Receives each point and clusters, returns cluster(closest to that point and the point itself)
def assignment(x: Point, centers: Array[Point]) = {
var minDistance: Double = 0
var distance: Double = 0
var count = 0
var point: Point = (0.0, 0.0)
for (c: Point <- centers) {
distance = math.sqrt(math.pow((c._1 - x._1), 2) + math.pow((c._2 - x._2), 2))
if (count == 0) {
minDistance = distance
point = c
count = count + 1
}
if (distance < minDistance) {
minDistance = distance
point = c
}
}
point
}
//takes points assigned to one cluster and get the new cluster coordinates
// x,y coordinates of cluster is the mean x,y of all points
def update(points: Iterable[Point]) = {
var x: Double = 0.0
var y: Double = 0.0
val total = points.size
for (p: Point <- points) {
x += p._1
y += p._2
}
(x / total, y / total)
}
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Kmeans Clustering").setMaster("local[2]")
val sc = new SparkContext(conf)
centroids = sc.textFile("c").map(line => {
val a = line.split(",")
(a(0).toDouble, a(1).toDouble)})
.collect()
var points = sc.textFile("p").map(line => {
val a = line.split(",")
(a(0).toDouble, a(1).toDouble)
})
for( a <- 1 to 10) {
points.map(point => (assignment(point, centroids), point))
.groupByKey()
.map(points => update(points._2))
.foreach(println)
}
sc.stop()
}
}
|
fegaras/DIQL
|
tests/diablo/spark/KMeansSpark.scala
|
Scala
|
apache-2.0
| 1,766
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.kafka.interface.impl
import kafka.utils.{ZkUtils => KafkaZkUtils}
import kafka.common.TopicAndPartition
import kafka.controller.{LeaderIsrAndControllerEpoch, ReassignedPartitionsContext}
import ly.stealth.mesos.kafka.interface.ZkUtilsProxy
import scala.collection.{Map, Set, mutable}
class ZkUtils(zkUrl: String) extends ZkUtilsProxy {
private val DEFAULT_TIMEOUT_MS = 30000
private val zkUtils = KafkaZkUtils(zkUrl, DEFAULT_TIMEOUT_MS, DEFAULT_TIMEOUT_MS, isZkSecurityEnabled = false)
override def getAllTopics(): Seq[String] = zkUtils.getAllTopics()
override def getReplicaAssignmentForTopics(topics: Seq[String]): mutable.Map[TopicAndPartition, Seq[Int]]
= zkUtils.getReplicaAssignmentForTopics(topics)
override def getPartitionsBeingReassigned(): Map[TopicAndPartition, ReassignedPartitionsContext]
= zkUtils.getPartitionsBeingReassigned()
override def getReplicasForPartition(
topic: String,
partition: Int
): Seq[Int] = zkUtils.getReplicasForPartition(topic, partition)
override def updatePartitionReassignmentData(partitionsToBeReassigned: Map[TopicAndPartition, Seq[Int]]): Unit
= zkUtils.updatePartitionReassignmentData(partitionsToBeReassigned)
override def createPersistentPath(
path: String,
data: String
): Unit = zkUtils.createPersistentPath(path, data)
override def getPartitionAssignmentForTopics(topics: Seq[String]): mutable.Map[String, Map[Int, Seq[Int]]]
= zkUtils.getPartitionAssignmentForTopics(topics)
override def getPartitionLeaderAndIsrForTopics(topicAndPartitions: Set[TopicAndPartition]): mutable.Map[TopicAndPartition, LeaderIsrAndControllerEpoch]
= zkUtils.getPartitionLeaderAndIsrForTopics(null, topicAndPartitions)
override def getSortedBrokerList(): Seq[Int] = zkUtils.getSortedBrokerList()
}
|
tc-dc/kafka-mesos
|
src/scala/iface/0_9/ly/stealth/mesos/kafka/interface/impl/ZkUtils.scala
|
Scala
|
apache-2.0
| 2,616
|
class UpdateOnly {
def update(x: Int, y: Int) {
}
}
val x = new UpdateOnly
x(2/*caret*/) = 3
//x: Int
|
triggerNZ/intellij-scala
|
testdata/parameterInfo/functionParameterInfo/update/UpdateOnly.scala
|
Scala
|
apache-2.0
| 111
|
package patterns
trait Functor[C[_]] {
def map[A, B](f : A => B) : C[B]
}
trait Applicative[F[_]] extends Functor[F] {
def pure[A](a: A) : F[A]
def apply[A, B](f: F[A => B]) : F[B]
override def map[A, B](f: A => B): F[B] = apply(pure(f))
}
object ApplicativeVsMonad {
case class Foo(s: Symbol, n: Int)
val whatever = "Welcome"
def maybeComputeS(whatever: String) : Option[Symbol] = Some(Symbol(whatever))
def maybeComputeN(whatever: String) : Option[Int] = if (whatever == null) None else Some(whatever.length + 1)
val mayBeFoo = for {
s <- maybeComputeS(whatever)
n <- maybeComputeN(whatever)
} yield Foo(s, n)
/** *
* Here we can see Monad Option[Symbol] return by maybeComputeS and Monad Option[Int]
* don't have any dependency among them. But
* Here, maybeComputeN will be evaluated only after maybeComputeSmayBeFoo1
*
* In case, we don't want two evaluation independent of each other,
* we can applicative functor
*/
val mayBeFoo1 = maybeComputeS(whatever).flatMap(s => maybeComputeN(whatever).map(n => Foo(s, n)))
//(Applicative[Option] apply (Functor[Option] map g)(Option(5)))(Option(10))
def main(args: Array[String]) {
println(mayBeFoo)
println(mayBeFoo1)
}
}
|
dongarerahul/FunctionalProgrammingInScala
|
src/patterns/ApplicativeVsMonad.scala
|
Scala
|
apache-2.0
| 1,256
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.exhibitor
import org.junit.Assert._
import org.junit.{Before, Test}
import play.api.libs.json.Json
import scala.collection.JavaConversions._
class ExhibitorServerTest extends MesosTestCase {
var server: ExhibitorServer = null
@Before
override def before() {
super.before()
server = new ExhibitorServer("0")
server.config.cpus = 0
server.config.mem = 0
}
@Test
def matches() {
// cpu
server.config.cpus = 0.5
assertEquals(None, server.matches(offer(cpus = 0.5)))
assertEquals(Some("cpus 0.49 < 0.5"), server.matches(offer(cpus = 0.49)))
server.config.cpus = 0
// mem
server.config.mem = 100
assertEquals(None, server.matches(offer(mem = 100)))
assertEquals(Some("mem 99.0 < 100.0"), server.matches(offer(mem = 99)))
server.config.mem = 0
//port
assertEquals(None, server.matches(offer(ports = "100")))
assertEquals(Some("no suitable port"), server.matches(offer(ports = "")))
}
@Test
def matchesHostname() {
assertEquals(None, server.matches(offer(hostname = "master")))
assertEquals(None, server.matches(offer(hostname = "slave0")))
// like
server.constraints.clear()
server.constraints += "hostname" -> List(Constraint("like:master"))
assertEquals(None, server.matches(offer(hostname = "master")))
assertEquals(Some("hostname doesn't match like:master"), server.matches(offer(hostname = "slave0")))
server.constraints.clear()
server.constraints += "hostname" -> List(Constraint("like:master.*"))
assertEquals(None, server.matches(offer(hostname = "master")))
assertEquals(None, server.matches(offer(hostname = "master-2")))
assertEquals(Some("hostname doesn't match like:master.*"), server.matches(offer(hostname = "slave0")))
// unique
server.constraints.clear()
server.constraints += "hostname" -> List(Constraint("unique"))
assertEquals(None, server.matches(offer(hostname = "master")))
assertEquals(Some("hostname doesn't match unique"), server.matches(offer(hostname = "master"), _ => List("master")))
assertEquals(None, server.matches(offer(hostname = "master"), _ => List("slave0")))
// multiple
server.constraints.clear()
server.constraints += "hostname" -> List(Constraint("unique"), Constraint("like:slave.*"))
assertEquals(None, server.matches(offer(hostname = "slave0")))
assertEquals(Some("hostname doesn't match unique"), server.matches(offer(hostname = "slave0"), _ => List("slave0")))
assertEquals(Some("hostname doesn't match like:slave.*"), server.matches(offer(hostname = "master")))
assertEquals(None, server.matches(offer(hostname = "slave0"), _ => List("master")))
}
@Test
def matchesAttributes() {
// like
server.constraints.clear()
server.constraints += "rack" -> List(Constraint("like:1-.*"))
assertEquals(None, server.matches(offer(attributes = "rack=1-1")))
assertEquals(None, server.matches(offer(attributes = "rack=1-2")))
assertEquals(Some("rack doesn't match like:1-.*"), server.matches(offer(attributes = "rack=2-1")))
// unique
server.constraints.clear()
server.constraints += "floor" -> List(Constraint("unique"))
assertEquals(None, server.matches(offer(attributes = "rack=1-1,floor=1")))
assertEquals(None, server.matches(offer(attributes = "rack=1-1,floor=1"), _ => List("2")))
assertEquals(Some("floor doesn't match unique"), server.matches(offer(attributes = "rack=1-1,floor=1"), _ => List("1")))
}
@Test
def idFromTaskId() {
assertEquals("0", ExhibitorServer.idFromTaskId(ExhibitorServer.nextTaskId("0")))
assertEquals("100", ExhibitorServer.idFromTaskId(ExhibitorServer.nextTaskId("100")))
}
@Test
def json() {
server.state = ExhibitorServer.Staging
server.constraints.clear()
server.constraints += "hostname" -> List(Constraint("unique"))
server.config.cpus = 1.2
server.config.mem = 2048
server.config.hostname = "slave0"
server.config.sharedConfigChangeBackoff = 5000
server.config.exhibitorConfig += "zkconfigconnect" -> "192.168.3.1:2181"
server.config.sharedConfigOverride += "zookeeper-install-directory" -> "/tmp/zookeeper"
val decoded = Json.toJson(server).as[ExhibitorServer]
ExhibitorServerTest.assertServerEquals(server, decoded)
}
@Test
def newExecutor() {
val exhibitor = ExhibitorServer("1")
exhibitor.config.cpus = 1.5
val executor = exhibitor.newExecutor("")
val command = executor.getCommand
assertTrue(command.getUrisCount > 0)
val cmd = command.getValue
assertTrue(cmd, cmd.contains(Executor.getClass.getName.replace("$", "")))
}
@Test
def newTask() {
val exhibitor = ExhibitorServer("1")
exhibitor.config.cpus = 1.5
exhibitor.config.mem = 1024
val offer = this.offer(slaveId = "slave0", hostname = "host", ports = "1000")
val task = exhibitor.createTask(offer)
assertEquals("slave0", task.getSlaveId.getValue)
assertNotNull(task.getExecutor)
val resources = task.getResourcesList.toList.map(res => res.getName -> res).toMap
val cpuResourceOpt = resources.get("cpus")
assertNotEquals(None, cpuResourceOpt)
val cpuResource = cpuResourceOpt.get
assertEquals(exhibitor.config.cpus, cpuResource.getScalar.getValue, 0.001)
val memResourceOpt = resources.get("mem")
assertNotEquals(None, memResourceOpt)
val memResource = memResourceOpt.get
assertEquals(exhibitor.config.mem, memResource.getScalar.getValue, 0.001)
val portsResourceOpt = resources.get("ports")
assertNotEquals(None, portsResourceOpt)
val portsResource = portsResourceOpt.get
assertEquals(1, portsResource.getRanges.getRangeCount)
val range = portsResource.getRanges.getRangeList.toList.head
assertEquals(1000, range.getBegin)
assertEquals(1000, range.getEnd)
}
@Test
def acceptOffer() {
val exhibitor = ExhibitorServer("1")
val offer = this.offer(cpus = exhibitor.config.cpus, mem = exhibitor.config.mem.toLong)
val allServersRunning = Scheduler.acceptOffer(offer)
assertEquals(allServersRunning, Some("all servers are running"))
Scheduler.cluster.servers += exhibitor
exhibitor.state = ExhibitorServer.Stopped
val accepted = Scheduler.acceptOffer(offer)
assertEquals(None, accepted)
assertEquals(1, schedulerDriver.launchedTasks.size())
assertEquals(0, schedulerDriver.killedTasks.size())
}
@Test
def ports() {
def port(taskPorts: String, offerPorts: String): Option[Long] = {
val exhibitor = ExhibitorServer("0")
exhibitor.config.ports = Util.Range.parseRanges(taskPorts)
val offer = this.offer(ports = offerPorts)
exhibitor.getPort(offer)
}
// any port
assertEquals(Some(31000), port("", "31000..32000"))
// overlapping single port
assertEquals(Some(31010), port("31010", "31000..32000"))
// overlapping port range
assertEquals(Some(31010), port("31010..31100", "31000..32000"))
// overlapping second port range
assertEquals(Some(31020), port("4000..4100,31020..31100", "31000..32000"))
// no match
assertEquals(None, port("4000..4100", "31000..32000"))
}
}
object ExhibitorServerTest {
def assertServerEquals(expected: ExhibitorServer, actual: ExhibitorServer) {
assertEquals(expected.state, actual.state)
assertEquals(expected.constraints, actual.constraints)
assertEquals(expected.config.cpus, actual.config.cpus, 0.001)
assertEquals(expected.config.mem, actual.config.mem, 0.001)
assertEquals(expected.config.hostname, actual.config.hostname)
assertEquals(expected.config.sharedConfigChangeBackoff, actual.config.sharedConfigChangeBackoff)
assertEquals(expected.config.exhibitorConfig, actual.config.exhibitorConfig)
assertEquals(expected.config.sharedConfigOverride, actual.config.sharedConfigOverride)
}
}
|
CiscoCloud/exhibitor-mesos-framework
|
src/main/test/ly/stealth/mesos/exhibitor/ExhibitorServerTest.scala
|
Scala
|
apache-2.0
| 8,702
|
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.migration
import java.sql.Timestamp
import scala.xml.NodeSeq
import java.sql.PreparedStatement
import java.sql.Connection
import scala.xml.Elem
/*
* Note: remember to use plain text here, as
* a failure on that test must rise wondering about
* a global value modification that was not taken
* into account in a migration.
*/
object MigrationTestLog {
//get a default TimeStamp value for that run
val defaultTimestamp = new Timestamp(System.currentTimeMillis)
}
case class MigrationTestLog(
id : Option[Long] = None
, eventType: String
, timestamp: Timestamp = MigrationTestLog.defaultTimestamp
, principal: String = "TestUser"
, cause : Option[Int] = None
, severity : Int = 100
, data : Elem
) {
def insertSql(c: Connection) : Long = {
//ignore cause id
val (row, qmark) = cause match {
case Some(id) => ("causeId", ", ?")
case None => ("", "")
}
val INSERT_SQL = "insert into EventLog (creationDate, principal, eventType, severity, data%s) values (?, ?, ?, ?, ?)".format(row, qmark)
val ps = c.prepareStatement(INSERT_SQL, Array("id"))
ps.setTimestamp(1, timestamp)
ps.setString(2, principal)
ps.setString(3, eventType)
ps.setInt(4, severity)
val sqlXml = c.createSQLXML()
sqlXml.setString(data.toString)
ps.setSQLXML(5, sqlXml)
cause.foreach { id =>
ps.setInt(6, id)
}
ps.executeUpdate
val rs = ps.getGeneratedKeys
rs.next
rs.getLong("id")
}
}
object Migration_2_DATA_EventLogs {
import com.normation.rudder.migration.{
Migration_2_DATA_Rule => RuleXml
, Migration_2_DATA_Other => OtherXml
, Migration_2_DATA_Directive => DirectiveXml
, Migration_2_DATA_Group => GroupXml
}
def e(xml:Elem) = <entry>{xml}</entry>
val data_2 = Map(
"rule_add" -> MigrationTestLog(
eventType = "RuleAdded"
, data = e(RuleXml.rule_add_2)
)
, "rule_modify" -> MigrationTestLog(
eventType = "RuleModified"
, data = e(RuleXml.rule_modify_2)
)
, "rule_delete" -> MigrationTestLog(
eventType = "RuleDeleted"
, data = e(RuleXml.rule_delete_2)
)
, "addPendingDeployment" -> MigrationTestLog(
eventType = "AutomaticStartDeployement"
, data = e(OtherXml.addPendingDeployment_2)
)
, "node_accept" -> MigrationTestLog(
eventType = "AcceptNode"
, data = e(OtherXml.node_accept_2)
)
, "node_refuse" -> MigrationTestLog(
eventType = "RefuseNode"
, data = e(OtherXml.node_refuse_2)
)
, "directive_add" -> MigrationTestLog(
eventType = "DirectiveAdded"
, data = e(DirectiveXml.directive_add_2)
)
, "directive_modify" -> MigrationTestLog(
eventType = "DirectiveModified"
, data = e(DirectiveXml.directive_modify_2)
)
, "directive_delete" -> MigrationTestLog(
eventType = "DirectiveDeleted"
, data = e(DirectiveXml.directive_delete_2)
)
, "nodeGroup_add" -> MigrationTestLog(
eventType = "NodeGroupAdded"
, data = e(GroupXml.nodeGroup_add_2)
)
, "nodeGroup_modify" -> MigrationTestLog(
eventType = "NodeGroupModified"
, data = e(GroupXml.nodeGroup_modify_2)
)
, "nodeGroup_delete" -> MigrationTestLog(
eventType = "NodeGroupDeleted"
, data = e(GroupXml.nodeGroup_delete_2)
)
)
}
object Migration_2_DATA_Other {
val addPendingDeployment_2 =
<addPendingDeployement alreadyPending="false" fileFormat="2"></addPendingDeployement>
val node_accept_2 =
<node action="accept" fileFormat="2">
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
<inventoryVersion>2011-10-13T11:43:52.907+02:00</inventoryVersion>
<hostname>centos-5-32</hostname>
<fullOsName>Centos</fullOsName>
<actorIp>127.0.0.1</actorIp>
</node>
val node_refuse_2 =
<node fileFormat="2" action="accept">
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
<inventoryVersion>2011-10-13T11:43:52.907+02:00</inventoryVersion>
<hostname>centos-5-32</hostname>
<fullOsName>Centos</fullOsName>
<actorIp>127.0.0.1</actorIp>
</node>
}
object Migration_2_DATA_Rule {
val rule_add_2 =
<rule fileFormat="2" changeType="add">
<id>e7c21276-d2b5-4fff-9924-96b67db9bd1c</id>
<displayName>configuration</displayName>
<serial>0</serial>
<target>group:f4b27025-b5a9-46fe-8289-cf9d56e07a8a</target>
<directiveIds>
<id>2813aeb2-6930-11e1-b052-0024e8cdea1f</id>
<id>2c1b0d34-6930-11e1-b901-0024e8cdea1f</id>
</directiveIds>
<shortDescription>configurationconfiguration</shortDescription>
<longDescription></longDescription>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</rule>
val rule_modify_2 =
<rule fileFormat="2" changeType="modify">
<id>39720027-952c-4e28-b774-9d5ce63f7a1e</id>
<displayName>Eutelsat CR Test</displayName>
<name>
<from>Eutelsat CR Test</from>
<to>Users and Fstabs CR</to>
</name>
<shortDescription>
<from>Test CR for Eutelsat</from>
<to>Test CR</to>
</shortDescription>
<longDescription>
<from></from>
<to>Test application of two (single) directives, with two multivalued section.</to>
</longDescription>
<target>
<from>
<none></none>
</from>
<to>group:383d521c-e5a7-4dc2-b402-21d425eefd30</to>
</target>
<directiveIds>
<from></from>
<to>
<id>0a50f415-a8da-42aa-9e86-eb045e289de3</id>
</to>
</directiveIds>
<isEnabled>
<from>false</from>
<to>true</to>
</isEnabled>
</rule>
val rule_delete_2 =
<rule fileFormat="2" changeType="delete">
<id>ad8c48f7-b278-4f0c-83d7-f9cb28e0d440</id>
<displayName>zada on SLES10</displayName>
<serial>2</serial>
<target>group:9bf723d9-0838-4af8-82f7-37912a5093ca</target>
<directiveIds>
<id>3fa24049-e673-475d-90ec-e5f9b6b81e38</id>
</directiveIds>
<shortDescription></shortDescription>
<longDescription></longDescription>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</rule>
}
object Migration_2_DATA_Directive {
val directive_add_2 =
<directive fileFormat="2" changeType="add">
<id>2fd5dd7e-c83b-4610-96ad-02002024c2f1</id>
<displayName>Name resolution 1</displayName>
<techniqueName>dnsConfiguration</techniqueName>
<techniqueVersion>1.0</techniqueVersion>
<section name="sections">
<section name="Nameserver settings">
<var name="DNS_RESOLVERS_EDIT">false</var>
<section name="DNS resolvers">
<var name="DNS_RESOLVERS">192.168.1.1</var>
</section>
<section name="DNS resolvers">
<var name="DNS_RESOLVERS">192.168.1.2</var>
</section>
</section>
<section name="Search suffix settings">
<var name="DNS_SEARCHLIST_EDIT">false</var>
<section name="DNS search list">
<var name="DNS_SEARCHLIST">example1.com</var>
</section>
<section name="DNS search list">
<var name="DNS_SEARCHLIST">example2.com</var>
</section>
<section name="DNS search list">
<var name="DNS_SEARCHLIST">example3.com</var>
</section>
</section>
</section>
<shortDescription></shortDescription>
<longDescription></longDescription>
<priority>5</priority>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</directive>
val directive_modify_2 =
<directive fileFormat="2" changeType="modify">
<id>70785952-d3b9-4d8e-9df4-1606af6d1ba3</id>
<techniqueName>createFilesFromList</techniqueName>
<displayName>creatFileTestPI</displayName>
<parameters>
<from>
<section name="sections">
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile</var>
</section>
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile2</var>
</section>
</section>
</from>
<to>
<section name="sections">
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile</var>
</section>
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile2</var>
</section>
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile3</var>
</section>
</section>
</to>
</parameters>
</directive>
val directive_delete_2 =
<directive fileFormat="2" changeType="delete">
<id>2a79eabf-9987-450c-88bf-3c86d4759eb7</id>
<displayName>Edit crontabs to use "yada"</displayName>
<techniqueName>checkGenericFileContent</techniqueName>
<techniqueVersion>2.0</techniqueVersion>
<section name="sections">
<section name="File to manage">
<section name="File">
<var name="GENERIC_FILE_CONTENT_PATH">/var/spool/cron/tabs/root</var>
<var name="GENERIC_FILE_CONTENT_PAYLOAD">* * * * * /home/wimics/yada</var>
<var name="GENERIC_FILE_CONTENT_ENFORCE">false</var>
</section>
<section name="Permission adjustment">
<var name="GENERIC_FILE_CONTENT_PERMISSION_ADJUSTMENT">true</var>
<var name="GENERIC_FILE_CONTENT_OWNER">root</var>
<var name="GENERIC_FILE_CONTENT_GROUP">root</var>
<var name="GENERIC_FILE_CONTENT_PERM">644</var>
</section>
<section name="Post-modification hook">
<var name="GENERIC_FILE_CONTENT_POST_HOOK_RUN">false</var>
<var name="GENERIC_FILE_CONTENT_POST_HOOK_COMMAND"></var>
</section>
</section>
</section>
<shortDescription></shortDescription>
<longDescription></longDescription>
<priority>5</priority>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</directive>
}
object Migration_2_DATA_Group {
val nodeGroup_add_2 =
<nodeGroup fileFormat="2" changeType="add">
<id>a73220c8-c3e1-40f1-803b-55d21bc817ec</id>
<displayName>CentOS</displayName>
<description>CentOS Group</description>
<query></query>
<isDynamic>true</isDynamic>
<nodeIds></nodeIds>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</nodeGroup>
val nodeGroup_modify_2 =
<nodeGroup fileFormat="2" changeType="modify">
<id>hasPolicyServer-root</id>
<displayName>Root server group</displayName>
<nodeIds>
<from>
<id>root</id>
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
</from>
<to>
<id>root</id>
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
<id>06da3556-5204-4bd7-b3b0-fa5e7bcfbbea</id>
</to>
</nodeIds>
</nodeGroup>
val nodeGroup_delete_2 =
<nodeGroup fileFormat="2" changeType="delete">
<id>4e0e8d5e-c87a-445c-ac81-a0e7a2b9e5e6</id>
<displayName>All debian</displayName>
<description></description>
<query>
{{"select":"node","composition":"And","where":[{{"objectType":"node","attribute":"osName","comparator":"eq","value":"Debian"}}]}}
</query>
<isDynamic>true</isDynamic>
<nodeIds>
<id>b9a71482-5030-4699-984d-b03d28bbbf36</id>
<id>0876521e-3c81-4775-85c7-5dd7f9d5d3da</id>
</nodeIds>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</nodeGroup>
}
|
Kegeruneku/rudder
|
rudder-core/src/test/scala/com/normation/rudder/migration/XmlDataMigration_2.scala
|
Scala
|
agpl-3.0
| 13,715
|
package net.paploo.diestats.expression.evaluator
/**
* The base trait for evaluators of expressions.
*
* Defines functionality that is expected of every domain.
*
* @tparam A The domain type
* @tparam R The evaluation result type
*/
trait Evaluator[A, R] {
def fromValues(as: Iterable[A]): R
def convolve(x: R, y: R): R
def repeatedConvolve(n: Int, x: R): R = {
require(n > 0, s"repeatedConvolution must hapeen more than zero times, but got $n, when convolving over $x")
Seq.fill(n)(x).reduce(convolve)
}
}
object Evaluator {
def direct[A](random: java.util.Random)(implicit numeric: Integral[A]): DirectEvaluator[A] =
DirectEvaluator.numeric(random)
}
|
paploo/DieStats
|
src/main/scala/net/paploo/diestats/expression/evaluator/Evaluator.scala
|
Scala
|
bsd-3-clause
| 694
|
package org.jetbrains.plugins.scala
package codeInspection.collections
import org.jetbrains.plugins.scala.codeInspection.InspectionBundle
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScExpression, ScMethodCall}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.types.api.FunctionType
import org.jetbrains.plugins.scala.lang.psi.types.result.{Success, Typeable}
/**
* Nikolay.Tropin
* 2014-05-05
*/
class MapGetOrElseInspection extends OperationOnCollectionInspection {
override def possibleSimplificationTypes: Array[SimplificationType] = Array(MapGetOrElse)
}
object MapGetOrElse extends SimplificationType() {
def hint: String = InspectionBundle.message("map.getOrElse.hint")
override def getSimplification(expr: ScExpression): Option[Simplification] = {
expr match {
case qual`.mapOnOption`(fun)`.getOrElse`(default) =>
replacementText(qual, fun, default) match {
case Some(newText) if checkTypes(qual, fun, newText) =>
val simplification = replace(expr).withText(newText).highlightFrom(qual)
Some(simplification)
case _ => None
}
case _ => None
}
}
def replacementText(qual: ScExpression, mapArg: ScExpression, goeArg: ScExpression): Option[String] = {
val firstArgText = argListText(Seq(goeArg))
val secondArgText = argListText(Seq(mapArg))
Some(s"${qual.getText}.fold $firstArgText$secondArgText")
}
def checkTypes(qual: ScExpression, mapArg: ScExpression, replacementText: String): Boolean = {
import qual.projectContext
val mapArgRetType = mapArg match {
case Typeable(FunctionType(retType, _)) => retType
case _ => return false
}
ScalaPsiElementFactory.createExpressionFromText(replacementText, qual.getContext) match {
case ScMethodCall(ScMethodCall(_, Seq(firstArg)), _) => mapArgRetType.conforms(firstArg.getType().getOrNothing)
case _ => false
}
}
def checkTypes(optionalBase: Option[ScExpression], mapArgs: Seq[ScExpression], getOrElseArgs: Seq[ScExpression]): Boolean = {
val (mapArg, getOrElseArg) = (mapArgs, getOrElseArgs) match {
case (Seq(a1), Seq(a2)) => (a1, a2)
case _ => return false
}
val baseExpr = optionalBase match {
case Some(e) => e
case _ => return false
}
val mapArgRetType = mapArg.getType() match {
case Success(FunctionType(retType, _), _) => retType
case _ => return false
}
import baseExpr.projectContext
val firstArgText = stripped(getOrElseArg).getText
val secondArgText = stripped(mapArg).getText
val newExprText = s"${baseExpr.getText}.fold {$firstArgText}{$secondArgText}"
ScalaPsiElementFactory.createExpressionFromText(newExprText, baseExpr.getContext) match {
case ScMethodCall(ScMethodCall(_, Seq(firstArg)), _) => mapArgRetType.conforms(firstArg.getType().getOrNothing)
case _ => false
}
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInspection/collections/MapGetOrElseInspection.scala
|
Scala
|
apache-2.0
| 2,978
|
import org.scalacheck.{Gen, Arbitrary}
import org.specs2._
import org.scalacheck.Prop._
class ScalaCheckSpec extends Specification with ScalaCheck { def is = s2"""
A ScalaCheck property inside example ${ forAll { (i: Int) => i > 0 || i <= 0 } }
A `prop` method to create a property from a function
returning a match result
${ prop { (i: Int) => i must be_>(0) or be_<=(0) } }
returning a boolean value
${ prop { (i: Int) => i > 0 || i <= 0 } }
using an implication and a boolean value
${ prop { (i: Int) => (i > 0) ==> (i > 0)}}
Custom `Arbitrary` instance for a parameter ${ prop { (i: Int) => i must be_>(0) }.setArbitrary(positiveInts) }
Custom minimum number of ok tests ${ prop { (i:Int) => (i > 0) ==> (i > 0) }.set(minTestsOk = 50) }
"""
val positiveInts = Arbitrary(Gen.choose(1, 5))
}
|
jaceklaskowski/specs2-sandbox
|
src/test/scala/ScalaCheckSpec.scala
|
Scala
|
apache-2.0
| 853
|
/*
* Copyright 2014 Kevin Herron
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.digitalpetri.ethernetip.cip.logix
case class TemplateInstance(name: String, attributes: TemplateAttributes, members: Seq[TemplateMember])
case class TemplateAttributes(handle: Int, memberCount: Int, objectDefinitionSize: Int, structureSize: Int)
/**
* @param name the name of the member
*
* @param infoWord if the member is an atomic data type, the value is zero. If the member is an array data type, the
* value is the array size (max 65535). If the member is a Boolean data type, the value is the bit
* location (0-31; 0-7 if mapped to a SInt).
*
* @param symbolType the [[SymbolType]] (reserved, array/scalar, TagType or Template instance id).
*
* @param offset where the value is located in the stream of bytes returned by reading the parent structure.
*/
case class TemplateMember(name: String, infoWord: Int, symbolType: SymbolType, offset: Int) {
override def toString: String = f"$productPrefix($name, info=$infoWord, type=$symbolType, offset=$offset)"
}
|
digitalpetri/scala-ethernet-ip
|
logix-services/src/main/scala/com/digitalpetri/ethernetip/cip/logix/TemplateInstance.scala
|
Scala
|
apache-2.0
| 1,641
|
package com.mlh.sprayswaggersample
import com.wordnik.swagger.annotations._
import javax.ws.rs.Path
import spray.routing.HttpService
import spray.json._
import spray.json.DefaultJsonProtocol._
import org.json4s.JsonAST.JObject
// check more here https://github.com/mhamrah/spray-swagger-sample/blob/master/src/main/scala/PetHttpService.scala
@Api(value = "/todo", description = "Operations about todos.", position = 0)
trait TodoListHttpService extends HttpService {
import Json4sSupport._
val routes = readRoute ~ updateRoute ~ deleteRoute ~ listRoute
var todos = scala.collection.mutable.Map[String, Todo]();
@ApiOperation(value = "Find a todo list entry by ID", notes = "Returns a todo based on ID", httpMethod = "GET", response = classOf[Todo])
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "todoId", value = "ID of todo that needs to be fetched", required = true, dataType = "integer", paramType = "path")
))
@ApiResponses(Array(
new ApiResponse(code = 404, message = "Todo not found"),
new ApiResponse(code = 400, message = "Invalid ID supplied")
))
def readRoute = get { path("todo" / Segment) { id =>
//complete(Todo(id, "clean up"))
complete(todos.get(id))
}}
@ApiOperation(value = "Adds or updates a todo with form data.", notes = "", nickname = "updateTodoWithForm", httpMethod = "POST", consumes="application/json, application/vnd.custom.Todo")
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "body", value = "Updated content of the todo.", dataType = "Todo", required = true, paramType="body")
))
@ApiResponses(Array(
new ApiResponse(code = 405, message = "Invalid input")
))
def updateRoute = post {
entity(as[JObject]) { someObject =>
val aTodo = someObject.extract[Todo]
todos.put(aTodo.id , aTodo)
complete(someObject)
}
}
@ApiOperation(value = "Deletes a Todo", nickname="deleteTodo", httpMethod = "DELETE")
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "todoId", value = "Todo id to delete", required = true, dataType="string", paramType="path")
))
@ApiResponses(Array(
new ApiResponse(code = 400, message = "Invalid todo id")
))
def deleteRoute = delete {
path("todo" / Segment) {
id => {
todos.remove(id)
complete(s"Deleted $id")
}
}
}
@ApiOperation(value = "Find all todo list entries", notes = "Returns all todos", httpMethod = "GET", response = classOf[List[Todo]])
def listRoute = get { path("todo") {
complete(todos.toList)
}}
}
@ApiModel(description = "A Todo object")
case class Todo(id: String, content: String)
|
michaelgruczel/spray-swagger-angularjs-example
|
src/main/scala/TodoListHttpService.scala
|
Scala
|
mit
| 2,655
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.support.requisite.RequiresCharacterLevel
/**
* Created by adarr on 4/3/2017.
*/
protected[feats] trait BlindingSpeed extends Passive with RequiresCharacterLevel {
self: EpicFeat =>
override val requireCharacterLevel: Int = 27
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/BlindingSpeed.scala
|
Scala
|
apache-2.0
| 973
|
package org.vaadin.addons.rinne.mixins
import com.vaadin.ui.{Alignment, Component, Layout}
trait LayoutAlignmentHandlerMixin {
this: Layout.AlignmentHandler =>
def componentAlignment(component: Component): Alignment = getComponentAlignment(component)
def componentAlignment_=(component: Component, alignment: Alignment): Unit =
setComponentAlignment(component, alignment)
def defaultComponentAlignment: Alignment = getDefaultComponentAlignment
def defaultComponentAlignment_=(defaultComponentAlignment: Alignment): Unit =
setDefaultComponentAlignment(defaultComponentAlignment)
}
|
LukaszByczynski/rinne
|
src/main/scala/org/vaadin/addons/rinne/mixins/LayoutAlignmentHandlerMixin.scala
|
Scala
|
apache-2.0
| 605
|
// Project: angulate2 (https://github.com/jokade/angulate2)
// Description:
// Copyright (c) 2017 Johannes.Kastner <jokade@karchedon.de>
// Distributed under the MIT License (see included LICENSE file)
package angulate2.platformBrowser.animations
import scala.scalajs.js
import scala.scalajs.js.annotation.JSImport
@js.native
@JSImport("@angular/platform-browser-animations","BrowserAnimationsModule")
class BrowserAnimationsModule extends js.Object
|
jokade/angulate2
|
bindings/src/main/scala/angulate2/platformBrowser/animations/BrowserAnimationsModule.scala
|
Scala
|
mit
| 471
|
/**
* Copyright (C) 2016 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr.persistence.relational.search
import java.sql.Timestamp
import org.orbeon.oxf.externalcontext.{Credentials, Organization}
import org.orbeon.oxf.fr.permission.PermissionsAuthorization.{CheckWithDataUser, PermissionsCheck}
import org.orbeon.oxf.fr.permission._
import org.orbeon.oxf.fr.persistence.relational.RelationalUtils
import org.orbeon.oxf.fr.persistence.relational.RelationalUtils.Logger
import org.orbeon.oxf.fr.persistence.relational.Statement._
import org.orbeon.oxf.fr.persistence.relational.rest.{OrganizationId, OrganizationSupport}
import org.orbeon.oxf.fr.persistence.relational.search.adt.{Document, SearchPermissions, _}
import org.orbeon.oxf.fr.persistence.relational.search.part._
import org.orbeon.oxf.util.CollectionUtils._
import org.orbeon.oxf.util.CoreUtils._
import org.orbeon.oxf.util.SQLUtils._
import org.orbeon.scaxon.SimplePath._
import scala.collection.mutable
trait SearchLogic extends SearchRequest {
private val SearchOperationsLegacy = List("read", "update", "delete")
private val SearchOperations = List(Read, Update, Delete)
private def computePermissions(request: Request, user: Option[Credentials]): SearchPermissions = {
val formPermissionsElOpt = RelationalUtils.readFormPermissions(request.app, request.form)
val formPermissions = PermissionsXML.parse(formPermissionsElOpt.orNull)
def hasPermissionCond(cond: String): Boolean =
formPermissionsElOpt.exists(_.child("permission").child(cond).nonEmpty)
def authorizedIf(check: PermissionsCheck): Boolean = {
val authorized = PermissionsAuthorization.authorizedOperations(formPermissions, user, check)
Operations.allowsAny(authorized, SearchOperations)
}
SearchPermissions(
formPermissionsElOpt,
formPermissions,
authorizedBasedOnRole = {
val check = PermissionsAuthorization.CheckWithoutDataUser(optimistic = false)
val authorizedOperations = PermissionsAuthorization.authorizedOperations(formPermissions, user, check)
Operations.allowsAny(authorizedOperations, SearchOperations)
},
authorizedIfOrganizationMatch = SearchOps.authorizedIfOrganizationMatch(formPermissions, user),
authorizedIfUsername = hasPermissionCond("owner") .option(request.username).flatten,
authorizedIfGroup = hasPermissionCond("group-member").option(request.group).flatten
)
}
def doSearch(request: Request): (List[Document], Int) = {
val user = PermissionsAuthorization.currentUserFromSession
val permissions = computePermissions(request, user)
val hasNoPermissions =
! permissions.authorizedBasedOnRole &&
permissions.authorizedIfUsername.isEmpty &&
permissions.authorizedIfGroup.isEmpty
if (hasNoPermissions)
// There is no chance we can access any data, no need to run any SQL
(Nil, 0)
else
RelationalUtils.withConnection { connection ⇒
val commonParts = List(
commonPart (request),
draftsPart (request),
permissionsPart (permissions),
columnFilterPart (request),
freeTextFilterPart (request)
)
val searchCount = {
val innerSQL = buildQuery(commonParts)
val sql =
s"""SELECT count(*)
| FROM (
| $innerSQL
| ) a
""".stripMargin
Logger.logDebug("search total query", sql)
executeQuery(connection, sql, commonParts) { rs ⇒
rs.next()
rs.getInt(1)
}
}
// Build SQL and create statement
val parts =
commonParts :+
mySqlOrderForRowNumPart(request)
val sql = {
val innerSQL = buildQuery(parts)
val startOffsetZeroBased = (request.pageNumber - 1) * request.pageSize
// Use LEFT JOIN instead of regular join, in case the form doesn't have any control marked
// to be indexed, in which case there won't be anything for it in orbeon_i_control_text.
s""" SELECT c.*, t.control, t.pos, t.val
| FROM (
| $innerSQL
| ) c
| LEFT JOIN orbeon_i_control_text t
| ON c.data_id = t.data_id
| WHERE row_number
| BETWEEN ${startOffsetZeroBased + 1}
| AND ${startOffsetZeroBased + request.pageSize}
|""".stripMargin
}
Logger.logDebug("search items query", sql)
val documentsMetadataValues = executeQuery(connection, sql, parts) { documentsResultSet ⇒
Iterator.iterateWhile(
cond = documentsResultSet.next(),
elem = (
DocumentMetaData(
documentId = documentsResultSet.getString ("document_id"),
draft = documentsResultSet.getString ("draft") == "Y",
created = documentsResultSet.getTimestamp ("created"),
lastModifiedTime = documentsResultSet.getTimestamp ("last_modified_time"),
lastModifiedBy = documentsResultSet.getString ("last_modified_by"),
username = Option(documentsResultSet.getString ("username")),
groupname = Option(documentsResultSet.getString ("groupname")),
organizationId = RelationalUtils.getIntOpt(documentsResultSet, "organization_id")
),
DocumentValue(
control = documentsResultSet.getString ("control"),
pos = documentsResultSet.getInt ("pos"),
value = documentsResultSet.getString ("val")
)
)
)
.toList
// Group row by common metadata, since the metadata is repeated in the result set
.groupBy(_._1).mapValues(_.map(_._2)).toList
// Sort by last modified in descending order, as the call expects the result to be pre-sorted
.sortBy(_._1.lastModifiedTime)(Ordering[Timestamp].reverse)
}
// Compute possible operations for each document
val organizationsCache = mutable.Map[Int, Organization]()
val documents = documentsMetadataValues.map{ case (metadata, values) ⇒
def readFromDatabase(id: Int) = OrganizationSupport.read(connection, OrganizationId(id)).get
val organization = metadata.organizationId.map(id ⇒ organizationsCache.getOrElseUpdate(id, readFromDatabase(id)))
val check = CheckWithDataUser(metadata.username, metadata.groupname, organization)
val operations = PermissionsAuthorization.authorizedOperations(permissions.formPermissions, user, check)
Document(metadata, Operations.serialize(operations), values)
}
(documents, searchCount)
}
}
}
|
brunobuzzi/orbeon-forms
|
form-runner/jvm/src/main/scala/org/orbeon/oxf/fr/persistence/relational/search/SearchLogic.scala
|
Scala
|
lgpl-2.1
| 7,930
|
package sbt
/*
TODO:
- index all available AutoPlugins to get the tasks that will be added
- error message when a task doesn't exist that it would be provided by plugin x, enabled by natures y,z, blocked by a, b
*/
import logic.{Atom, Clause, Clauses, Formula, Literal, Logic, Negated}
import Logic.{CyclicNegation, InitialContradictions, InitialOverlap, LogicException}
import Def.Setting
import Plugins._
import annotation.tailrec
/**
* An AutoPlugin defines a group of settings and the conditions where the settings are automatically added to a build (called "activation").
* The `requires` and `trigger` methods together define the conditions, and a method like `projectSettings` defines the settings to add.
*
* Steps for plugin authors:
*
* 1. Determine if the `AutoPlugin` should automatically be activated when all requirements are met, or should be opt-in.
* 1. Determine the `AutoPlugin`s that, when present (or absent), act as the requirements for the `AutoPlugin`.
* 1. Determine the settings/configurations to that the `AutoPlugin` injects when activated.
* 1. Determine the keys and other names to be automatically imported to `*.sbt` scripts.
*
* For example, the following will automatically add the settings in `projectSettings`
* to a project that has both the `Web` and `Javascript` plugins enabled.
*
* {{{
* object MyPlugin extends sbt.AutoPlugin {
* override def requires = Web && Javascript
* override def trigger = allRequirements
* override def projectSettings = Seq(...)
*
* object autoImport {
* lazy val obfuscate = taskKey[Seq[File]]("Obfuscates the source.")
* }
* }
* }}}
*
* Steps for users:
*
* 1. Add dependencies on plugins in `project/plugins.sbt` as usual with `addSbtPlugin`
* 1. Add key plugins to projects, which will automatically select the plugin + dependent plugin settings to add for those projects.
* 1. Exclude plugins, if desired.
*
* For example, given plugins Web and Javascript (perhaps provided by plugins added with `addSbtPlugin`),
*
* {{{
* myProject.enablePlugins(Web && Javascript)
* }}}
*
* will activate `MyPlugin` defined above and have its settings automatically added. If the user instead defines
* {{{
* myProject.enablePlugins(Web && Javascript).disablePlugins(MyPlugin)
* }}}
*
* then the `MyPlugin` settings (and anything that activates only when `MyPlugin` is activated) will not be added.
*/
abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions {
/** Determines whether this AutoPlugin will be activated for this project when the `requires` clause is satisfied.
*
* When this method returns `allRequirements`, and `requires` method returns `Web && Javascript`, this plugin
* instance will be added automatically if the `Web` and `Javascript` plugins are enabled.
*
* When this method returns `noTrigger`, and `requires` method returns `Web && Javascript`, this plugin
* instance will be added only if the build user enables it, but it will automatically add both `Web` and `Javascript`. */
def trigger: PluginTrigger = noTrigger
/** This AutoPlugin requires the plugins the [[Plugins]] matcher returned by this method. See [[trigger]].
*/
def requires: Plugins = empty
val label: String = getClass.getName.stripSuffix("$")
override def toString: String = label
/** The [[Configuration]]s to add to each project that activates this AutoPlugin.*/
def projectConfigurations: Seq[Configuration] = Nil
/** The [[Setting]]s to add in the scope of each project that activates this AutoPlugin. */
def projectSettings: Seq[Setting[_]] = Nil
/** The [[Setting]]s to add to the build scope for each project that activates this AutoPlugin.
* The settings returned here are guaranteed to be added to a given build scope only once
* regardless of how many projects for that build activate this AutoPlugin. */
def buildSettings: Seq[Setting[_]] = Nil
/** The [[Setting]]s to add to the global scope exactly once if any project activates this AutoPlugin. */
def globalSettings: Seq[Setting[_]] = Nil
// TODO?: def commands: Seq[Command]
private[sbt] def unary_! : Exclude = Exclude(this)
/** If this plugin does not have any requirements, it means it is actually a root plugin. */
private[sbt] final def isRoot: Boolean =
requires match {
case Empty => true
case _ => false
}
/** If this plugin does not have any requirements, it means it is actually a root plugin. */
private[sbt] final def isAlwaysEnabled: Boolean =
isRoot && (trigger == AllRequirements)
}
/** An error that occurs when auto-plugins aren't configured properly.
* It translates the error from the underlying logic system to be targeted at end users. */
final class AutoPluginException private(val message: String, val origin: Option[LogicException]) extends RuntimeException(message) {
/** Prepends `p` to the error message derived from `origin`. */
def withPrefix(p: String) = new AutoPluginException(p + message, origin)
}
object AutoPluginException {
def apply(msg: String): AutoPluginException = new AutoPluginException(msg, None)
def apply(origin: LogicException): AutoPluginException = new AutoPluginException(Plugins.translateMessage(origin), Some(origin))
}
sealed trait PluginTrigger
case object AllRequirements extends PluginTrigger
case object NoTrigger extends PluginTrigger
/** An expression that matches `AutoPlugin`s. */
sealed trait Plugins {
def && (o: Basic): Plugins
}
sealed trait PluginsFunctions {
/** [[Plugins]] instance that doesn't require any [[Plugins]]s. */
def empty: Plugins = Plugins.Empty
/** This plugin is activated when all required plugins are present. */
def allRequirements: PluginTrigger = AllRequirements
/** This plugin is activated only when it is manually activated. */
def noTrigger: PluginTrigger = NoTrigger
}
object Plugins extends PluginsFunctions {
/** Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[AutoPlugin]]s.
* The [[AutoPlugin]]s are topologically sorted so that a required [[AutoPlugin]] comes before its requiring [[AutoPlugin]].*/
def deducer(defined0: List[AutoPlugin]): (Plugins, Logger) => Seq[AutoPlugin] =
if(defined0.isEmpty) (_, _) => Nil
else {
// TODO: defined should return all the plugins
val allReqs = (defined0 flatMap { asRequirements }).toSet
val diff = allReqs diff defined0.toSet
val defined = if (diff.nonEmpty) diff.toList ::: defined0
else defined0
val byAtom = defined map { x => (Atom(x.label), x) }
val byAtomMap = byAtom.toMap
if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom)
// Ignore clauses for plugins that does not require anything else.
// Avoids the requirement for pure Nature strings *and* possible
// circular dependencies in the logic.
val allRequirementsClause = defined.filterNot(_.isRoot).flatMap(d => asRequirementsClauses(d))
val allEnabledByClause = defined.filterNot(_.isRoot).flatMap(d => asEnabledByClauses(d))
// Note: Here is where the function begins. We're given a list of plugins now.
(requestedPlugins, log) => {
def explicitlyDisabled(p: AutoPlugin): Boolean = hasExclude(requestedPlugins, p)
val alwaysEnabled: List[AutoPlugin] = defined.filter(_.isAlwaysEnabled).filterNot(explicitlyDisabled)
val knowlege0: Set[Atom] = ((flatten(requestedPlugins) ++ alwaysEnabled) collect {
case x: AutoPlugin => Atom(x.label)
}).toSet
val clauses = Clauses((allRequirementsClause ::: allEnabledByClause) filterNot { _.head subsetOf knowlege0 })
log.debug(s"deducing auto plugins based on known facts ${knowlege0.toString} and clauses ${clauses.toString}")
Logic.reduce(clauses, (flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet) match {
case Left(problem) => throw AutoPluginException(problem)
case Right(results) =>
log.debug(s" :: deduced result: ${results}")
val selectedAtoms: List[Atom] = results.ordered
val selectedPlugins = selectedAtoms map { a =>
byAtomMap.getOrElse(a, throw AutoPluginException(s"${a} was not found in atom map."))
}
val forbidden: Set[AutoPlugin] = (selectedPlugins flatMap { Plugins.asExclusions }).toSet
val c = selectedPlugins.toSet & forbidden
if (c.nonEmpty) {
exlusionConflictError(requestedPlugins, selectedPlugins, c.toSeq sortBy {_.label})
}
val retval = topologicalSort(selectedPlugins, log)
log.debug(s" :: sorted deduced result: ${retval.toString}")
retval
}
}
}
private[sbt] def topologicalSort(ns: List[AutoPlugin], log: Logger): List[AutoPlugin] = {
log.debug(s"sorting: ns: ${ns.toString}")
@tailrec def doSort(found0: List[AutoPlugin], notFound0: List[AutoPlugin], limit0: Int): List[AutoPlugin] = {
log.debug(s" :: sorting:: found: ${found0.toString} not found ${notFound0.toString}")
if (limit0 < 0) throw AutoPluginException(s"Failed to sort ${ns} topologically")
else if (notFound0.isEmpty) found0
else {
val (found1, notFound1) = notFound0 partition { n => asRequirements(n).toSet subsetOf found0.toSet }
doSort(found0 ::: found1, notFound1, limit0 - 1)
}
}
val (roots, nonRoots) = ns partition (_.isRoot)
doSort(roots, nonRoots, ns.size * ns.size + 1)
}
private[sbt] def translateMessage(e: LogicException) = e match {
case ic: InitialContradictions => s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString(ic.literals.toSeq)}"
case io: InitialOverlap => s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisifed. The directly selected plugins were: ${literalsString(io.literals.toSeq)}"
case cn: CyclicNegation => s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}"
}
private[this] def literalsString(lits: Seq[Literal]): String =
lits map { case Atom(l) => l; case Negated(Atom(l)) => l } mkString(", ")
private[this] def duplicateProvidesError(byAtom: Seq[(Atom, AutoPlugin)]) {
val dupsByAtom = byAtom.groupBy(_._1).mapValues(_.map(_._2))
val dupStrings = for( (atom, dups) <- dupsByAtom if dups.size > 1 ) yield
s"${atom.label} by ${dups.mkString(", ")}"
val (ns, nl) = if(dupStrings.size > 1) ("s", "\\n\\t") else ("", " ")
val message = s"Plugin$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}"
throw AutoPluginException(message)
}
private[this] def exlusionConflictError(requested: Plugins, selected: Seq[AutoPlugin], conflicting: Seq[AutoPlugin]) {
def listConflicts(ns: Seq[AutoPlugin]) = (ns map { c =>
val reasons = (if (flatten(requested) contains c) List("requested")
else Nil) ++
(if (c.requires != empty && c.trigger == allRequirements) List(s"enabled by ${c.requires.toString}")
else Nil) ++
{
val reqs = selected filter { x => asRequirements(x) contains c }
if (reqs.nonEmpty) List(s"""required by ${reqs.mkString(", ")}""")
else Nil
} ++
{
val exs = selected filter { x => asExclusions(x) contains c }
if (exs.nonEmpty) List(s"""excluded by ${exs.mkString(", ")}""")
else Nil
}
s""" - conflict: ${c.label} is ${reasons.mkString("; ")}"""
}).mkString("\\n")
throw AutoPluginException(s"""Contradiction in enabled plugins:
- requested: ${requested.toString}
- enabled: ${selected.mkString(", ")}
${listConflicts(conflicting)}""")
}
private[sbt] final object Empty extends Plugins {
def &&(o: Basic): Plugins = o
override def toString = "<none>"
}
/** An included or excluded Nature/Plugin. */
// TODO: better name than Basic. Also, can we dump this class
sealed abstract class Basic extends Plugins {
def &&(o: Basic): Plugins = And(this :: o :: Nil)
}
private[sbt] final case class Exclude(n: AutoPlugin) extends Basic {
override def toString = s"!$n"
}
private[sbt] final case class And(plugins: List[Basic]) extends Plugins {
def &&(o: Basic): Plugins = And(o :: plugins)
override def toString = plugins.mkString(" && ")
}
private[sbt] def and(a: Plugins, b: Plugins) = b match {
case Empty => a
case And(ns) => (a /: ns)(_ && _)
case b: Basic => a && b
}
private[sbt] def remove(a: Plugins, del: Set[Basic]): Plugins = a match {
case b: Basic => if(del(b)) Empty else b
case Empty => Empty
case And(ns) =>
val removed = ns.filterNot(del)
if(removed.isEmpty) Empty else And(removed)
}
/** Defines enabled-by clauses for `ap`. */
private[sbt] def asEnabledByClauses(ap: AutoPlugin): List[Clause] =
// `ap` is the head and the required plugins for `ap` is the body.
if (ap.trigger == AllRequirements) Clause( convert(ap.requires), Set(Atom(ap.label)) ) :: Nil
else Nil
/** Defines requirements clauses for `ap`. */
private[sbt] def asRequirementsClauses(ap: AutoPlugin): List[Clause] =
// required plugin is the head and `ap` is the body.
asRequirements(ap) map { x => Clause( convert(ap), Set(Atom(x.label)) ) }
private[sbt] def asRequirements(ap: AutoPlugin): List[AutoPlugin] = flatten(ap.requires).toList collect {
case x: AutoPlugin => x
}
private[sbt] def asExclusions(ap: AutoPlugin): List[AutoPlugin] = flatten(ap.requires).toList collect {
case Exclude(x) => x
}
// TODO - This doesn't handle nested AND boolean logic...
private[sbt] def hasExclude(n: Plugins, p: AutoPlugin): Boolean = n match {
case `p` => false
case Exclude(`p`) => true
// TODO - This is stupidly advanced. We do a nested check through possible and-ed
// lists of plugins exclusions to see if the plugin ever winds up in an excluded=true case.
// This would handle things like !!p or !(p && z)
case Exclude(n) => hasInclude(n, p)
case And(ns) => ns.forall(n => hasExclude(n, p))
case b: Basic => false
case Empty => false
}
private[sbt] def hasInclude(n: Plugins, p: AutoPlugin): Boolean = n match {
case `p` => true
case Exclude(n) => hasExclude(n, p)
case And(ns) => ns.forall(n => hasInclude(n, p))
case b: Basic => false
case Empty => false
}
private[this] def flattenConvert(n: Plugins): Seq[Literal] = n match {
case And(ns) => convertAll(ns)
case b: Basic => convertBasic(b) :: Nil
case Empty => Nil
}
private[sbt] def flatten(n: Plugins): Seq[Basic] = n match {
case And(ns) => ns
case b: Basic => b :: Nil
case Empty => Nil
}
private[this] def convert(n: Plugins): Formula = n match {
case And(ns) => convertAll(ns).reduce[Formula](_ && _)
case b: Basic => convertBasic(b)
case Empty => Formula.True
}
private[this] def convertBasic(b: Basic): Literal = b match {
case Exclude(n) => !convertBasic(n)
case a: AutoPlugin => Atom(a.label)
}
private[this] def convertAll(ns: Seq[Basic]): Seq[Literal] = ns map convertBasic
/** True if the trigger clause `n` is satisifed by `model`. */
def satisfied(n: Plugins, model: Set[AutoPlugin]): Boolean =
flatten(n) forall {
case Exclude(a) => !model(a)
case ap: AutoPlugin => model(ap)
}
private[sbt] def hasAutoImportGetter(ap: AutoPlugin, loader: ClassLoader): Boolean = {
import reflect.runtime.{universe => ru}
import util.control.Exception.catching
val m = ru.runtimeMirror(loader)
val im = m.reflect(ap)
val hasGetterOpt = catching(classOf[ScalaReflectionException]) opt {
im.symbol.asType.toType.declaration(ru.newTermName("autoImport")) match {
case ru.NoSymbol => false
case sym => sym.asTerm.isGetter || sym.asTerm.isModule
}
}
hasGetterOpt getOrElse false
}
}
|
pdalpra/sbt
|
main/src/main/scala/sbt/Plugins.scala
|
Scala
|
bsd-3-clause
| 16,090
|
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.sbtheader
import de.heikoseeberger.sbtheader.LicenseStyle.{ Detailed, SpdxSyntax }
sealed trait License {
def text: String
}
sealed trait SpdxLicense {
def spdxIdentifier: String
def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle = Detailed): License
}
object License {
private[sbtheader] val spdxLicenses =
Vector(
ALv2,
MIT,
MPLv2,
BSD2Clause,
BSD3Clause,
GPLv3OrLater,
GPLv3Only,
GPLv3,
LGPLv3OrLater,
LGPLv3Only,
LGPLv3,
AGPLv3OrLater,
AGPLv3Only,
AGPLv3
)
private[sbtheader] def buildSpdxSyntax(
yyyy: String,
copyrightOwner: String,
spdxIdentifier: String
): String =
s"""|Copyright $yyyy $copyrightOwner
|
|SPDX-License-Identifier: $spdxIdentifier
|""".stripMargin
final case object ALv2 extends SpdxLicense {
override val spdxIdentifier: String =
"Apache-2.0"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new ALv2(yyyy, copyrightOwner, licenseStyle)
}
final class ALv2(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax =>
buildSpdxSyntax(yyyy, copyrightOwner, ALv2.spdxIdentifier)
case Detailed =>
s"""|Copyright $yyyy $copyrightOwner
|
|Licensed under the Apache License, Version 2.0 (the "License");
|you may not use this file except in compliance with the License.
|You may obtain a copy of the License at
|
| http://www.apache.org/licenses/LICENSE-2.0
|
|Unless required by applicable law or agreed to in writing, software
|distributed under the License is distributed on an "AS IS" BASIS,
|WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|See the License for the specific language governing permissions and
|limitations under the License.
|""".stripMargin
}
}
final case object MIT extends SpdxLicense {
override val spdxIdentifier: String =
"MIT"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle) =
new MIT(yyyy, copyrightOwner, licenseStyle)
}
final class MIT(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax =>
buildSpdxSyntax(yyyy, copyrightOwner, MIT.spdxIdentifier)
case Detailed =>
s"""|Copyright (c) $yyyy $copyrightOwner
|
|Permission is hereby granted, free of charge, to any person obtaining a copy of
|this software and associated documentation files (the "Software"), to deal in
|the Software without restriction, including without limitation the rights to
|use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|the Software, and to permit persons to whom the Software is furnished to do so,
|subject to the following conditions:
|
|The above copyright notice and this permission notice shall be included in all
|copies or substantial portions of the Software.
|
|THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|""".stripMargin
}
}
final case object MPLv2 extends SpdxLicense {
override val spdxIdentifier: String =
"MPL-2.0"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new MPLv2(yyyy, copyrightOwner, licenseStyle)
}
final class MPLv2(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax =>
buildSpdxSyntax(yyyy, copyrightOwner, MPLv2.spdxIdentifier)
case Detailed =>
s"""|Copyright (c) $yyyy $copyrightOwner
|
|This Source Code Form is subject to the terms of the Mozilla Public
|License, v. 2.0. If a copy of the MPL was not distributed with this
|file, You can obtain one at http://mozilla.org/MPL/2.0/.
|""".stripMargin
}
}
final object MPLv2_NoCopyright extends License {
override val text: String =
s"""|This Source Code Form is subject to the terms of the Mozilla Public
|License, v. 2.0. If a copy of the MPL was not distributed with this
|file, You can obtain one at http://mozilla.org/MPL/2.0/.
|""".stripMargin
}
final case object BSD2Clause extends SpdxLicense {
override val spdxIdentifier: String =
"BSD-2-Clause"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new BSD2Clause(yyyy, copyrightOwner, licenseStyle)
}
final class BSD2Clause(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax =>
buildSpdxSyntax(yyyy, copyrightOwner, BSD2Clause.spdxIdentifier)
case Detailed =>
s"""|Copyright (c) $yyyy, $copyrightOwner
|All rights reserved.
|
|Redistribution and use in source and binary forms, with or without modification,
|are permitted provided that the following conditions are met:
|
|1. Redistributions of source code must retain the above copyright notice, this
| list of conditions and the following disclaimer.
|
|2. Redistributions in binary form must reproduce the above copyright notice,
| this list of conditions and the following disclaimer in the documentation
| and/or other materials provided with the distribution.
|
|THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|""".stripMargin
}
}
final case object BSD3Clause extends SpdxLicense {
override val spdxIdentifier: String =
"BSD-3-Clause"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new BSD3Clause(yyyy, copyrightOwner, licenseStyle)
}
final class BSD3Clause(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax =>
buildSpdxSyntax(yyyy, copyrightOwner, BSD3Clause.spdxIdentifier)
case Detailed =>
s"""|Copyright (c) $yyyy, $copyrightOwner
|All rights reserved.
|
|Redistribution and use in source and binary forms, with or without modification,
|are permitted provided that the following conditions are met:
|
|1. Redistributions of source code must retain the above copyright notice, this
| list of conditions and the following disclaimer.
|
|2. Redistributions in binary form must reproduce the above copyright notice,
| this list of conditions and the following disclaimer in the documentation
| and/or other materials provided with the distribution.
|
|3. Neither the name of the copyright holder nor the names of its contributors
| may be used to endorse or promote products derived from this software without
| specific prior written permission.
|
|THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|""".stripMargin
}
}
final case object GPLv3OrLater extends SpdxLicense {
override val spdxIdentifier: String =
"GPL-3.0-or-later"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new GPLv3OrLater(yyyy, copyrightOwner, licenseStyle)
def detailed(yyyy: String, copyrightOwner: String): String =
s"""|Copyright (C) $yyyy $copyrightOwner
|
|This program is free software: you can redistribute it and/or modify
|it under the terms of the GNU General Public License as published by
|the Free Software Foundation, either version 3 of the License, or
|(at your option) any later version.
|
|This program is distributed in the hope that it will be useful,
|but WITHOUT ANY WARRANTY; without even the implied warranty of
|MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|GNU General Public License for more details.
|
|You should have received a copy of the GNU General Public License
|along with this program. If not, see <http://www.gnu.org/licenses/>.
|""".stripMargin
}
final class GPLv3OrLater(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax => buildSpdxSyntax(yyyy, copyrightOwner, GPLv3OrLater.spdxIdentifier)
case Detailed => GPLv3OrLater.detailed(yyyy, copyrightOwner)
}
}
final case object GPLv3Only extends SpdxLicense {
override val spdxIdentifier: String =
"GPL-3.0-only"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new GPLv3Only(yyyy, copyrightOwner, licenseStyle)
}
final class GPLv3Only(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax =>
buildSpdxSyntax(yyyy, copyrightOwner, GPLv3Only.spdxIdentifier)
case Detailed =>
s"""|Copyright (C) $yyyy $copyrightOwner
|
|This program is free software: you can redistribute it and/or modify
|it under the terms of the GNU General Public License as published by
|the Free Software Foundation, version 3.
|
|This program is distributed in the hope that it will be useful,
|but WITHOUT ANY WARRANTY; without even the implied warranty of
|MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|GNU General Public License for more details.
|
|You should have received a copy of the GNU General Public License
|along with this program. If not, see <http://www.gnu.org/licenses/>.
|""".stripMargin
}
}
final case object GPLv3 extends SpdxLicense {
override val spdxIdentifier: String =
"GPL-3.0"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new GPLv3(yyyy, copyrightOwner, licenseStyle)
}
final class GPLv3(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax => buildSpdxSyntax(yyyy, copyrightOwner, GPLv3.spdxIdentifier)
case Detailed => GPLv3OrLater.detailed(yyyy, copyrightOwner)
}
}
final case object LGPLv3OrLater extends SpdxLicense {
override val spdxIdentifier: String =
"LGPL-3.0-or-later"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new LGPLv3OrLater(yyyy, copyrightOwner, licenseStyle)
def detailed(yyyy: String, copyrightOwner: String): String =
s"""|Copyright (C) $yyyy $copyrightOwner
|
|This program is free software: you can redistribute it and/or modify
|it under the terms of the GNU Lesser General Public License as published
|by the Free Software Foundation, either version 3 of the License, or
|(at your option) any later version.
|
|This program is distributed in the hope that it will be useful,
|but WITHOUT ANY WARRANTY; without even the implied warranty of
|MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|GNU Lesser General Public License for more details.
|
|You should have received a copy of the GNU General Lesser Public License
|along with this program. If not, see <http://www.gnu.org/licenses/>.
|""".stripMargin
}
final class LGPLv3OrLater(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax => buildSpdxSyntax(yyyy, copyrightOwner, LGPLv3OrLater.spdxIdentifier)
case Detailed => LGPLv3OrLater.detailed(yyyy, copyrightOwner)
}
}
final case object LGPLv3Only extends SpdxLicense {
override def spdxIdentifier: String =
"LGPL-3.0-only"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new LGPLv3Only(yyyy, copyrightOwner, licenseStyle)
}
final class LGPLv3Only(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax =>
buildSpdxSyntax(yyyy, copyrightOwner, LGPLv3Only.spdxIdentifier)
case Detailed =>
s"""|Copyright (C) $yyyy $copyrightOwner
|
|This program is free software: you can redistribute it and/or modify
|it under the terms of the GNU Lesser General Public License as published
|by the Free Software Foundation, version 3.
|
|This program is distributed in the hope that it will be useful,
|but WITHOUT ANY WARRANTY; without even the implied warranty of
|MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|GNU Lesser General Public License for more details.
|
|You should have received a copy of the GNU General Lesser Public License
|along with this program. If not, see <http://www.gnu.org/licenses/>.
|""".stripMargin
}
}
final case object LGPLv3 extends SpdxLicense {
override def spdxIdentifier: String =
"LGPL-3.0"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new LGPLv3(yyyy, copyrightOwner, licenseStyle)
}
final class LGPLv3(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax => buildSpdxSyntax(yyyy, copyrightOwner, LGPLv3.spdxIdentifier)
case Detailed => LGPLv3OrLater.detailed(yyyy, copyrightOwner)
}
}
final case object AGPLv3OrLater extends SpdxLicense {
override def spdxIdentifier: String =
"AGPL-3.0-or-later"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new AGPLv3OrLater(yyyy, copyrightOwner, licenseStyle)
def detailed(yyyy: String, copyrightOwner: String): String =
s"""|Copyright (C) $yyyy $copyrightOwner
|
|This program is free software: you can redistribute it and/or modify
|it under the terms of the GNU Affero General Public License as
|published by the Free Software Foundation, either version 3 of the
|License, or (at your option) any later version.
|
|This program is distributed in the hope that it will be useful,
|but WITHOUT ANY WARRANTY; without even the implied warranty of
|MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|GNU Affero General Public License for more details.
|
|You should have received a copy of the GNU Affero General Public License
|along with this program. If not, see <http://www.gnu.org/licenses/>.
|""".stripMargin
}
final class AGPLv3OrLater(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax => buildSpdxSyntax(yyyy, copyrightOwner, AGPLv3OrLater.spdxIdentifier)
case Detailed => AGPLv3OrLater.detailed(yyyy, copyrightOwner)
}
}
final case object AGPLv3Only extends SpdxLicense {
override val spdxIdentifier: String =
"AGPL-3.0-only"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new AGPLv3Only(yyyy, copyrightOwner, licenseStyle)
}
final class AGPLv3Only(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax =>
buildSpdxSyntax(yyyy, copyrightOwner, AGPLv3Only.spdxIdentifier)
case Detailed =>
s"""|Copyright (C) $yyyy $copyrightOwner
|
|This program is free software: you can redistribute it and/or modify
|it under the terms of the GNU Affero General Public License as
|published by the Free Software Foundation, version 3.
|
|This program is distributed in the hope that it will be useful,
|but WITHOUT ANY WARRANTY; without even the implied warranty of
|MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|GNU Affero General Public License for more details.
|
|You should have received a copy of the GNU Affero General Public License
|along with this program. If not, see <http://www.gnu.org/licenses/>.
|""".stripMargin
}
}
final case object AGPLv3 extends SpdxLicense {
override val spdxIdentifier: String =
"AGPL-3.0"
override def apply(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle): License =
new AGPLv3(yyyy, copyrightOwner, licenseStyle)
}
final class AGPLv3(yyyy: String, copyrightOwner: String, licenseStyle: LicenseStyle)
extends License {
override val text: String =
licenseStyle match {
case SpdxSyntax => buildSpdxSyntax(yyyy, copyrightOwner, AGPLv3.spdxIdentifier)
case Detailed => AGPLv3OrLater.detailed(yyyy, copyrightOwner)
}
}
final case class Custom(text: String) extends License
}
|
sbt/sbt-header
|
src/main/scala/de/heikoseeberger/sbtheader/License.scala
|
Scala
|
apache-2.0
| 21,592
|
package models.annotation
import com.scalableminds.util.accesscontext.DBAccessContext
import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper}
import com.scalableminds.webknossos.schema.Tables._
import com.scalableminds.webknossos.tracingstore.tracings.TracingType
import javax.inject.Inject
import models.annotation.AnnotationState._
import models.annotation.AnnotationType.AnnotationType
import play.api.libs.json._
import slick.jdbc.GetResult._
import slick.jdbc.PostgresProfile.api._
import slick.jdbc.TransactionIsolation.Serializable
import slick.lifted.Rep
import slick.sql.SqlAction
import utils.{ObjectId, SQLClient, SQLDAO, SimpleSQLDAO}
import scala.concurrent.ExecutionContext
case class Annotation(
_id: ObjectId,
_dataSet: ObjectId,
_task: Option[ObjectId] = None,
_team: ObjectId,
_user: ObjectId,
annotationLayers: List[AnnotationLayer],
description: String = "",
visibility: AnnotationVisibility.Value = AnnotationVisibility.Internal,
name: String = "",
viewConfiguration: Option[JsObject] = None,
state: AnnotationState.Value = Active,
statistics: JsObject = Json.obj(),
tags: Set[String] = Set.empty,
tracingTime: Option[Long] = None,
typ: AnnotationType.Value = AnnotationType.Explorational,
created: Long = System.currentTimeMillis,
modified: Long = System.currentTimeMillis,
isDeleted: Boolean = false
) extends FoxImplicits {
lazy val id: String = _id.toString
def tracingType: TracingType.Value = {
val skeletonPresent = annotationLayers.exists(_.typ == AnnotationLayerType.Skeleton)
val volumePresent = annotationLayers.exists(_.typ == AnnotationLayerType.Volume)
if (skeletonPresent && volumePresent) TracingType.hybrid
else if (skeletonPresent) TracingType.skeleton
else TracingType.volume
}
def skeletonTracingId(implicit ec: ExecutionContext): Fox[Option[String]] =
for {
_ <- bool2Fox(annotationLayers.count(_.typ == AnnotationLayerType.Skeleton) <= 1) ?~> "annotation.multiLayers.skeleton.notImplemented"
} yield annotationLayers.find(_.typ == AnnotationLayerType.Skeleton).map(_.tracingId)
def volumeTracingId(implicit ec: ExecutionContext): Fox[Option[String]] =
for {
_ <- bool2Fox(annotationLayers.count(_.typ == AnnotationLayerType.Volume) <= 1) ?~> "annotation.multiLayers.volume.notImplemented"
} yield annotationLayers.find(_.typ == AnnotationLayerType.Volume).map(_.tracingId)
def volumeAnnotationLayers: List[AnnotationLayer] = annotationLayers.filter(_.typ == AnnotationLayerType.Volume)
def skeletonAnnotationLayers: List[AnnotationLayer] = annotationLayers.filter(_.typ == AnnotationLayerType.Skeleton)
def isRevertPossible: Boolean =
// Unfortunately, we can not revert all tracings, because we do not have the history for all of them
// hence we need a way to decide if a tracing can safely be reverted. We will use the created date of the
// annotation to do so
created > 1470002400000L // 1.8.2016, 00:00:00
}
class AnnotationLayerDAO @Inject()(SQLClient: SQLClient)(implicit ec: ExecutionContext)
extends SimpleSQLDAO(SQLClient) {
private def parse(r: AnnotationLayersRow): Fox[AnnotationLayer] =
for {
typ <- AnnotationLayerType.fromString(r.typ)
} yield {
AnnotationLayer(
r.tracingid,
typ,
r.name
)
}
def findAnnotationLayersFor(annotationId: ObjectId): Fox[List[AnnotationLayer]] =
for {
rows <- run(
sql"select _annotation, tracingId, typ, name from webknossos.annotation_layers where _annotation = $annotationId order by tracingId"
.as[AnnotationLayersRow])
parsed <- Fox.serialCombined(rows.toList)(parse)
} yield parsed
def insertForAnnotation(annotationId: ObjectId, annotationLayers: List[AnnotationLayer]): Fox[Unit] =
for {
_ <- Fox.serialCombined(annotationLayers)(insertOne(annotationId, _))
} yield ()
def insertOne(annotationId: ObjectId, annotationLayer: AnnotationLayer): Fox[Unit] =
for {
_ <- run(insertOneQuery(annotationId, annotationLayer))
} yield ()
def insertLayerQueries(annotationId: ObjectId,
layers: List[AnnotationLayer]): List[SqlAction[Int, NoStream, Effect]] =
layers.map { annotationLayer =>
insertOneQuery(annotationId, annotationLayer)
}
def insertOneQuery(annotationId: ObjectId, a: AnnotationLayer): SqlAction[Int, NoStream, Effect] =
sqlu"""insert into webknossos.annotation_layers(_annotation, tracingId, typ, name)
values($annotationId, ${a.tracingId}, '#${a.typ.toString}', ${a.name.map(sanitize)})"""
def findAnnotationIdByTracingId(tracingId: String): Fox[ObjectId] =
for {
rList <- run(sql"select _annotation from webknossos.annotation_layers where tracingId = $tracingId".as[String])
head: String <- rList.headOption.toFox
parsed <- ObjectId.parse(head)
} yield parsed
def replaceTracingId(annotationId: ObjectId, oldTracingId: String, newTracingId: String): Fox[Unit] =
for {
_ <- run(
sqlu"update webknossos.annotation_layers set tracingId = $newTracingId where _annotation = $annotationId and tracingId = $oldTracingId")
} yield ()
def updateName(annotationId: ObjectId, tracingId: String, newName: Option[String]): Fox[Unit] =
for {
_ <- run(
sqlu"update webknossos.annotation_layers set name = $newName where _annotation = $annotationId and tracingId = $tracingId")
} yield ()
def deleteAllForAnnotationQuery(annotationId: ObjectId): SqlAction[Int, NoStream, Effect] =
sqlu"delete from webknossos.annotation_layers where _annotation = $annotationId"
}
class AnnotationDAO @Inject()(sqlClient: SQLClient, annotationLayerDAO: AnnotationLayerDAO)(
implicit ec: ExecutionContext)
extends SQLDAO[Annotation, AnnotationsRow, Annotations](sqlClient) {
val collection = Annotations
def idColumn(x: Annotations): Rep[String] = x._Id
def isDeletedColumn(x: Annotations): Rep[Boolean] = x.isdeleted
def parse(r: AnnotationsRow): Fox[Annotation] =
for {
state <- AnnotationState.fromString(r.state).toFox
typ <- AnnotationType.fromString(r.typ).toFox
viewconfigurationOpt <- Fox.runOptional(r.viewconfiguration)(JsonHelper.parseJsonToFox[JsObject](_))
visibility <- AnnotationVisibility.fromString(r.visibility).toFox
annotationLayers <- annotationLayerDAO.findAnnotationLayersFor(ObjectId(r._Id))
} yield {
Annotation(
ObjectId(r._Id),
ObjectId(r._Dataset),
r._Task.map(ObjectId(_)),
ObjectId(r._Team),
ObjectId(r._User),
annotationLayers,
r.description,
visibility,
r.name,
viewconfigurationOpt,
state,
Json.parse(r.statistics).as[JsObject],
parseArrayTuple(r.tags).toSet,
r.tracingtime,
typ,
r.created.getTime,
r.modified.getTime,
r.isdeleted
)
}
override def anonymousReadAccessQ(sharingToken: Option[String]) = s"visibility = '${AnnotationVisibility.Public}'"
override def readAccessQ(requestingUserId: ObjectId) =
s"""(visibility = '${AnnotationVisibility.Public}'
or (visibility = '${AnnotationVisibility.Internal}' and (select _organization from webknossos.teams where webknossos.teams._id = _team)
in (select _organization from webknossos.users_ where _id = '${requestingUserId.id}'))
or _team in (select _team from webknossos.user_team_roles where _user = '${requestingUserId.id}' and isTeamManager)
or _user = '${requestingUserId.id}'
or (select _organization from webknossos.teams where webknossos.teams._id = _team)
in (select _organization from webknossos.users_ where _id = '${requestingUserId.id}' and isAdmin))"""
override def deleteAccessQ(requestingUserId: ObjectId) =
s"""(_team in (select _team from webknossos.user_team_roles where isTeamManager and _user = '${requestingUserId.id}') or _user = '${requestingUserId.id}'
or (select _organization from webknossos.teams where webknossos.teams._id = _team)
in (select _organization from webknossos.users_ where _id = '${requestingUserId.id}' and isAdmin))"""
override def updateAccessQ(requestingUserId: ObjectId): String =
deleteAccessQ(requestingUserId)
// read operations
override def findOne(id: ObjectId)(implicit ctx: DBAccessContext): Fox[Annotation] =
for {
accessQuery <- readAccessQuery
r <- run(
sql"select #$columns from #$existingCollectionName where _id = ${id.id} and #$accessQuery".as[AnnotationsRow])
parsed <- parseFirst(r, id)
} yield parsed
private def getStateQuery(isFinished: Option[Boolean]) =
isFinished match {
case Some(true) => s"state = '${AnnotationState.Finished.toString}'"
case Some(false) => s"state = '${AnnotationState.Active.toString}'"
case None => s"state != '${AnnotationState.Cancelled.toString}'"
}
def findAllFor(userId: ObjectId,
isFinished: Option[Boolean],
annotationType: AnnotationType,
limit: Int,
pageNumber: Int = 0)(implicit ctx: DBAccessContext): Fox[List[Annotation]] = {
val stateQuery = getStateQuery(isFinished)
for {
accessQuery <- readAccessQuery
r <- run(sql"""select #$columns from #$existingCollectionName
where _user = ${userId.id} and typ = '#${annotationType.toString}' and #$stateQuery and #$accessQuery
order by _id desc limit $limit offset ${pageNumber * limit}""".as[AnnotationsRow])
parsed <- parseAll(r)
} yield parsed
}
def findActiveTaskIdsForUser(userId: ObjectId): Fox[List[ObjectId]] = {
val stateQuery = getStateQuery(isFinished = Some(false))
for {
r <- run(sql"""select _task from #$existingCollectionName
where _user = ${userId.id} and typ = '#${AnnotationType.Task.toString}' and #$stateQuery""".as[String])
r <- Fox.serialCombined(r.toList)(ObjectId.parse(_))
} yield r
}
def countAllFor(userId: ObjectId, isFinished: Option[Boolean], annotationType: AnnotationType)(
implicit ctx: DBAccessContext): Fox[Int] = {
val stateQuery = getStateQuery(isFinished)
for {
accessQuery <- readAccessQuery
r <- run(
sql"""select count(*) from #$existingCollectionName
where _user = ${userId.id} and typ = '#${annotationType.toString}' and #$stateQuery and #$accessQuery"""
.as[Int])
parsed <- r.headOption
} yield parsed
}
def countForTeam(teamId: ObjectId): Fox[Int] =
for {
countList <- run(sql"select count(_id) from #$existingCollectionName where _team = $teamId".as[Int])
count <- countList.headOption
} yield count
// Does not use access query (because they dont support prefixes). Use only after separate access check!
def findAllFinishedForProject(projectId: ObjectId): Fox[List[Annotation]] =
for {
r <- run(
sql"""select #${columnsWithPrefix("a.")} from #$existingCollectionName a
join webknossos.tasks_ t on a._task = t._id
where t._project = ${projectId.id} and a.typ = '#${AnnotationType.Task.toString}' and a.state = '#${AnnotationState.Finished.toString}'"""
.as[AnnotationsRow])
parsed <- parseAll(r)
} yield parsed
// Does not use access query (because they dont support prefixes). Use only after separate access check!
def findAllActiveForProject(projectId: ObjectId): Fox[List[ObjectId]] =
for {
r <- run(sql""" select a._id from
webknossos.annotations_ a
join webknossos.tasks_ t on a._task = t._id
join webknossos.projects_ p on t._project = p._id
join webknossos.users_ u on a._user = u._id
where p._id = $projectId
and a.state = '#${AnnotationState.Active.toString}'
and a.typ = '#${AnnotationType.Task}' """.as[String])
} yield r.map(ObjectId(_)).toList
def findAllByTaskIdAndType(taskId: ObjectId, typ: AnnotationType)(
implicit ctx: DBAccessContext): Fox[List[Annotation]] =
for {
accessQuery <- readAccessQuery
r <- run(
sql"""select #$columns from #$existingCollectionName
where _task = ${taskId.id} and typ = '#${typ.toString}' and state != '#${AnnotationState.Cancelled.toString}' and #$accessQuery"""
.as[AnnotationsRow])
parsed <- parseAll(r)
} yield parsed
def findOneByTracingId(tracingId: String)(implicit ctx: DBAccessContext): Fox[Annotation] =
for {
annotationId <- annotationLayerDAO.findAnnotationIdByTracingId(tracingId)
annotation <- findOne(annotationId)
} yield annotation
// count operations
def countActiveAnnotationsFor(userId: ObjectId, typ: AnnotationType, excludedTeamIds: List[ObjectId])(
implicit ctx: DBAccessContext): Fox[Int] =
for {
accessQuery <- readAccessQuery
excludeTeamsQ = if (excludedTeamIds.isEmpty) "true"
else s"(not t._id in ${writeStructTupleWithQuotes(excludedTeamIds.map(t => sanitize(t.id)))})"
countList <- run(sql"""select count(*)
from (select a._id from
(select #$columns
from #$existingCollectionName
where _user = ${userId.id} and typ = '#${typ.toString}' and state = '#${AnnotationState.Active.toString}' and #$accessQuery) a
join webknossos.teams t on a._team = t._id where #$excludeTeamsQ) q
""".as[Int])
count <- countList.headOption
} yield count
def countActiveByTask(taskId: ObjectId, typ: AnnotationType)(implicit ctx: DBAccessContext): Fox[Int] =
for {
accessQuery <- readAccessQuery
countList <- run(
sql"""select count(*) from (select _id from #$existingCollectionName where _task = ${taskId.id} and typ = '#${typ.toString}' and state = '#${AnnotationState.Active.toString}' and #$accessQuery) q"""
.as[Int])
count <- countList.headOption
} yield count
def countAllForOrganization(organizationId: ObjectId): Fox[Int] =
for {
countList <- run(
sql"select count(*) from (select a._id from #$existingCollectionName a join webknossos.users_ u on a._user = u._id where u._organization = $organizationId) q"
.as[Int])
count <- countList.headOption
} yield count
// update operations
def insertOne(a: Annotation): Fox[Unit] = {
val viewConfigurationStr: Option[String] = a.viewConfiguration.map(Json.toJson(_).toString)
val insertAnnotationQuery = sqlu"""
insert into webknossos.annotations(_id, _dataSet, _task, _team, _user, description, visibility,
name, viewConfiguration, state, statistics, tags, tracingTime, typ, created, modified, isDeleted)
values(${a._id.id}, ${a._dataSet.id}, ${a._task.map(_.id)}, ${a._team.id},
${a._user.id}, ${a.description}, '#${a.visibility.toString}', ${a.name},
#${optionLiteral(viewConfigurationStr.map(sanitize))},
'#${a.state.toString}', '#${sanitize(a.statistics.toString)}',
'#${writeArrayTuple(a.tags.toList.map(sanitize))}', ${a.tracingTime}, '#${a.typ.toString}',
${new java.sql.Timestamp(a.created)}, ${new java.sql.Timestamp(a.modified)}, ${a.isDeleted})
"""
val insertLayerQueries = annotationLayerDAO.insertLayerQueries(a._id, a.annotationLayers)
for {
_ <- run(DBIO.sequence(insertAnnotationQuery +: insertLayerQueries).transactionally)
} yield ()
}
// Task only, thus hard replacing tracing ids
def updateInitialized(a: Annotation): Fox[Unit] = {
val viewConfigurationStr: Option[String] = a.viewConfiguration.map(Json.toJson(_).toString)
val updateAnnotationQuery = sqlu"""
update webknossos.annotations
set
_dataSet = ${a._dataSet.id},
_team = ${a._team.id},
_user = ${a._user.id},
description = ${a.description},
visibility = '#${a.visibility.toString}',
name = ${a.name},
viewConfiguration = #${optionLiteral(viewConfigurationStr.map(sanitize))},
state = '#${a.state.toString}',
statistics = '#${sanitize(a.statistics.toString)}',
tags = '#${writeArrayTuple(a.tags.toList.map(sanitize))}',
tracingTime = ${a.tracingTime},
typ = '#${a.typ.toString}',
created = ${new java.sql.Timestamp(a.created)},
modified = ${new java.sql.Timestamp(a.modified)},
isDeleted = ${a.isDeleted}
where _id = ${a._id.id}
"""
val deleteLayersQuery = annotationLayerDAO.deleteAllForAnnotationQuery(a._id)
val insertLayerQueries = annotationLayerDAO.insertLayerQueries(a._id, a.annotationLayers)
for {
_ <- run(DBIO.sequence(updateAnnotationQuery +: deleteLayersQuery +: insertLayerQueries).transactionally)
_ = logger.info(s"Initialized task annotation ${a._id}, state is now ${a.state.toString}")
} yield ()
}
def abortInitializingAnnotation(id: ObjectId): Fox[Unit] = {
val deleteLayersQuery = annotationLayerDAO.deleteAllForAnnotationQuery(id)
val deleteAnnotationQuery =
sqlu"delete from webknossos.annotations where _id = $id and state = '#${AnnotationState.Initializing.toString}'"
val composed = DBIO.sequence(List(deleteLayersQuery, deleteAnnotationQuery)).transactionally
for {
_ <- run(composed.withTransactionIsolation(Serializable),
retryCount = 50,
retryIfErrorContains = List(transactionSerializationError))
_ = logger.info(s"Aborted initializing task annotation ${id.toString}")
} yield ()
}
def deleteOldInitializingAnnotations(): Fox[Unit] =
for {
_ <- run(
sqlu"delete from webknossos.annotations where state = '#${AnnotationState.Initializing.toString}' and created < (now() - interval '1 hour')")
} yield ()
def logTime(id: ObjectId, time: Long)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- assertUpdateAccess(id) ?~> "FAILED: AnnotationSQLDAO.assertUpdateAccess"
_ <- run(
sqlu"update webknossos.annotations set tracingTime = Coalesce(tracingTime, 0) + $time where _id = ${id.id}") ?~> "FAILED: run in AnnotationSQLDAO.logTime"
} yield ()
def updateState(id: ObjectId, state: AnnotationState)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- assertUpdateAccess(id) ?~> "FAILED: AnnotationSQLDAO.assertUpdateAccess"
_ <- run(
sqlu"update webknossos.annotations set state = '#$state' where _id = ${id.id}"
.withTransactionIsolation(Serializable),
retryCount = 50,
retryIfErrorContains = List(transactionSerializationError)
) ?~> "FAILED: run in AnnotationSQLDAO.updateState"
_ = logger.info(
s"Updated state of Annotation ${id.toString} to ${state.toString}, access context: ${ctx.toStringAnonymous}")
} yield ()
def updateDescription(id: ObjectId, description: String)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- assertUpdateAccess(id)
_ <- updateStringCol(id, _.description, description)
} yield ()
def updateName(id: ObjectId, name: String)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- assertUpdateAccess(id)
_ <- updateStringCol(id, _.name, name)
} yield ()
def updateVisibility(id: ObjectId, visibilityString: String)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- assertUpdateAccess(id)
_ <- AnnotationVisibility.fromString(visibilityString).toFox
_ <- run(sqlu"update webknossos.annotations_ set visibility = '#$visibilityString' where _id = $id")
} yield ()
def updateTags(id: ObjectId, tags: List[String])(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- assertUpdateAccess(id)
_ <- run(
sqlu"update webknossos.annotations set tags = '#${writeArrayTuple(tags.map(sanitize))}' where _id = ${id.id}")
} yield ()
def updateModified(id: ObjectId, modified: Long)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- assertUpdateAccess(id)
_ <- run(
sqlu"update webknossos.annotations set modified = ${new java.sql.Timestamp(modified)} where _id = ${id.id}")
} yield ()
def updateStatistics(id: ObjectId, statistics: JsObject)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- assertUpdateAccess(id)
_ <- run(
sqlu"update webknossos.annotations set statistics = '#${sanitize(statistics.toString)}' where _id = ${id.id}")
} yield ()
def updateUser(id: ObjectId, userId: ObjectId)(implicit ctx: DBAccessContext): Fox[Unit] =
updateObjectIdCol(id, _._User, userId)
def updateViewConfiguration(id: ObjectId, viewConfiguration: Option[JsObject])(
implicit ctx: DBAccessContext): Fox[Unit] = {
val viewConfigurationStr: Option[String] = viewConfiguration.map(Json.toJson(_).toString)
for {
_ <- assertUpdateAccess(id)
_ <- run(sqlu"update webknossos.annotations set viewConfiguration = #${optionLiteral(
viewConfigurationStr.map(sanitize))} where _id = ${id.id}")
} yield ()
}
}
class SharedAnnotationsDAO @Inject()(annotationDAO: AnnotationDAO, sqlClient: SQLClient)(implicit ec: ExecutionContext)
extends SimpleSQLDAO(sqlClient) {
def sharedTeamsFor(annotationId: ObjectId): Fox[List[String]] =
for (result <- run(
sql"select _team from webknossos.annotation_sharedTeams where _annotation = $annotationId".as[String]))
yield result.toList
// Does not use access query (because they dont support prefixes). Use only after separate access check!
def findAllSharedForTeams(teams: List[ObjectId]): Fox[List[Annotation]] =
for {
result <- run(
sql"""select distinct #${annotationDAO.columnsWithPrefix("a.")} from webknossos.annotations_ a
join webknossos.annotation_sharedTeams l on a._id = l._annotation
where l._team in #${writeStructTupleWithQuotes(teams.map(t => sanitize(t.toString)))}"""
.as[AnnotationsRow])
parsed <- Fox.combined(result.toList.map(annotationDAO.parse))
} yield parsed
def updateTeamsForSharedAnnotation(annotationId: ObjectId, teams: List[ObjectId])(
implicit ctx: DBAccessContext): Fox[Unit] = {
val clearQuery = sqlu"delete from webknossos.annotation_sharedTeams where _annotation = $annotationId"
val insertQueries = teams.map(teamId => sqlu"""insert into webknossos.annotation_sharedTeams(_annotation, _team)
values($annotationId, $teamId)""")
val composedQuery = DBIO.sequence(List(clearQuery) ++ insertQueries)
for {
_ <- annotationDAO.assertUpdateAccess(annotationId)
_ <- run(composedQuery.transactionally.withTransactionIsolation(Serializable),
retryCount = 50,
retryIfErrorContains = List(transactionSerializationError))
} yield ()
}
}
|
scalableminds/webknossos
|
app/models/annotation/Annotation.scala
|
Scala
|
agpl-3.0
| 23,477
|
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js tools **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2014, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.core.tools.classpath
import org.scalajs.core.tools.io.VirtualJSFile
import org.scalajs.core.tools.jsdep.ResolutionInfo
import scala.collection.immutable.Seq
/** A [[CompleteClasspath]] that is fully linked (either with the
* [[ScalaJSOptimizer]] or the Closure Optimizer. It contains only a single
* file that is scalaJSCode.
*/
final class LinkedClasspath(
jsLibs: Seq[ResolvedJSDependency],
val scalaJSCode: VirtualJSFile,
requiresDOM: Boolean,
version: Option[String]
) extends CompleteClasspath(jsLibs, requiresDOM, version)
|
jmnarloch/scala-js
|
tools/shared/src/main/scala/org/scalajs/core/tools/classpath/LinkedClasspath.scala
|
Scala
|
bsd-3-clause
| 1,100
|
/*
* =========================================================================================
* Copyright © 2013-2018 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
* =========================================================================================
*/
package app.kanela.instrumentation
import kanela.agent.api.instrumentation.InstrumentationBuilder
class MultiMixinsInstrumentation extends InstrumentationBuilder {
import app.kanela.instrumentation.mixin.MixinOverMixin._
onType("app.kanela.cases.multimixins.WithMultiMixinsClass")
.when(classIsPresent("kanela.agent.api.instrumentation.InstrumentationBuilder"))
.mixin(classOf[MixinOverMixin1])
.mixin(classOf[MixinOverMixin2])
.mixin(classOf[MixinOverMixin3])
}
|
kamon-io/kamon-agent
|
agent-test/src/test/scala/app/kanela/instrumentation/MultiMixinsInstrumentation.scala
|
Scala
|
apache-2.0
| 1,303
|
/**
* MIT License
*
* Copyright (c) 2016-2018 James Sherwood-Jones <james.sherwoodjones@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.jsherz.luskydive.util
import scalaz.{-\\/, \\/}
import scala.concurrent.Future
object EitherFutureExtensions {
implicit def toEitherWithError[X](either: String \\/ X): EitherWithError[X] = {
new EitherWithError(either)
}
}
/**
* Either a [[String]] error, or an [[X]].
*
* @param either
* @tparam X
*/
class EitherWithError[X](either: String \\/ X) {
/**
* If the wrapped [[\\/]] is Right, return Right(f(rightVal)), otherwise return Future(Left(leftVal).
*
* @param f
* @return
*/
def withFutureF[V](f: X => Future[String \\/ V]): Future[String \\/ V] = {
either.fold(error => Future.successful(-\\/(error)), f(_))
}
}
|
jSherz/lsd-members
|
backend/src/main/scala/com/jsherz/luskydive/util/EitherFutureExts.scala
|
Scala
|
mit
| 1,884
|
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\\
* @ @ *
* # # # # (c) 2017 CAB *
* # # # # # # *
* # # # # # # # # # # # # *
* # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* @ @ *
\\* * http://github.com/alexcab * * * * * * * * * * * * * * * * * * * * * * * * * */
package mathact.core.sketch.view.visualization
/** Visualization object
* Created by CAB on 28.09.2016.
*/
private[core] object Visualization {
//Enums
object LayoutType extends Enumeration {
val OrganicLayout = Value
val CircleLayout = Value
val TreeLayout = Value
val ParallelEdgeLayout = Value
val StackLayout = Value}
type LayoutType = LayoutType.Value
//Data
case class BlockImageData(
path: String,
width: Int,
height: Int)
case class BlockData(
blockId: Int,
blockName: String,
blockImage: Option[BlockImageData],
inlets: Map[Int, Option[String]],
outlets: Map[Int, Option[String]])
case class ConnectionData(
inletBlockId: Int,
inletId: Int,
outletBlockId: Int,
outletId: Int)
case class GraphData(blocks: List[BlockData], connections: List[ConnectionData])
//Messages
case object DoClose
case class LayoutTypeChanced(layoutType: LayoutType)
case object DoLayoutBtnHit
case object RebuildGraph
case class DoLayout(steps: Int, delay: Int)}
|
AlexCAB/MathAct
|
mathact_core/src/main/scala/mathact/core/sketch/view/visualization/Visualization.scala
|
Scala
|
mit
| 2,158
|
package io.koff.expressions
import com.github.jedesah.Expression
import com.github.jedesah.Expression._
import scalaz._
import Scalaz._
import scalaz.std.scalaFuture.futureInstance
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
/**
* A very simple example which shows how to use Expression object
*/
object SimpleExpressionExample extends App {
/**
* Async operation #1
*/
def future1(): Future[String] = Future.successful("future1")
/**
* Async operation #2
*/
def future2(): Future[String] = Future.successful("future2")
/**
* Result calculation
*/
def resultCalc(str1: String, str2: String): String = str1 + " | " + str2
/**
* We need to use `extract(...)` method in order to get result from futures
*/
def expression(): Future[String] = Expression[Future, String] {
val result1 = extract(future1())
val result2 = extract(future2())
resultCalc(result1, result2)
}
/**
* For-Comprehension analog for `expression()` method
*/
def forComprehension(): Future[String] = {
val fut1 = future1()
val fut2 = future2()
for {
result1 <- fut1
result2 <- fut2
} yield {
resultCalc(result1, result2)
}
}
println("expression: " + Await.result(expression(), 30 seconds))
println("for-comprehension: " + Await.result(forComprehension(), 30 seconds))
/**
* Example of usage of auto extraction
*/
def autoExtract(): Future[String] = {
import com.github.jedesah.Expression.auto.extract
Expression[Future, String] {
val result1 = future1()
val result2 = future2()
resultCalc(result1, result2)
}
}
println("autoExtract: " + Await.result(autoExtract(), 30 seconds))
}
|
coffius/koffio-expression-example
|
src/main/scala/io/koff/expressions/SimpleExpressionExample.scala
|
Scala
|
mit
| 1,794
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spark.streaming
import akka.actor.Props
import akka.actor.SupervisorStrategy
import akka.zeromq.Subscribe
import spark.streaming.dstream._
import spark._
import spark.streaming.receivers.ActorReceiver
import spark.streaming.receivers.ReceiverSupervisorStrategy
import spark.streaming.receivers.ZeroMQReceiver
import spark.storage.StorageLevel
import spark.util.MetadataCleaner
import spark.streaming.receivers.ActorReceiver
import scala.collection.mutable.Queue
import scala.collection.Map
import java.io.InputStream
import java.util.concurrent.atomic.AtomicInteger
import java.util.UUID
import org.apache.hadoop.io.LongWritable
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.hadoop.fs.Path
import twitter4j.Status
import twitter4j.auth.Authorization
/**
* A StreamingContext is the main entry point for Spark Streaming functionality. Besides the basic
* information (such as, cluster URL and job name) to internally create a SparkContext, it provides
* methods used to create DStream from various input sources.
*/
class StreamingContext private (
sc_ : SparkContext,
cp_ : Checkpoint,
batchDur_ : Duration
) extends Logging {
/**
* Create a StreamingContext using an existing SparkContext.
* @param sparkContext Existing SparkContext
* @param batchDuration The time interval at which streaming data will be divided into batches
*/
def this(sparkContext: SparkContext, batchDuration: Duration) = {
this(sparkContext, null, batchDuration)
}
/**
* Create a StreamingContext by providing the details necessary for creating a new SparkContext.
* @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
* @param appName A name for your job, to display on the cluster web UI
* @param batchDuration The time interval at which streaming data will be divided into batches
*/
def this(
master: String,
appName: String,
batchDuration: Duration,
sparkHome: String = null,
jars: Seq[String] = Nil,
environment: Map[String, String] = Map()) = {
this(StreamingContext.createNewSparkContext(master, appName, sparkHome, jars, environment),
null, batchDuration)
}
/**
* Re-create a StreamingContext from a checkpoint file.
* @param path Path either to the directory that was specified as the checkpoint directory, or
* to the checkpoint file 'graph' or 'graph.bk'.
*/
def this(path: String) = this(null, CheckpointReader.read(path), null)
initLogging()
if (sc_ == null && cp_ == null) {
throw new Exception("Spark Streaming cannot be initialized with " +
"both SparkContext and checkpoint as null")
}
if (MetadataCleaner.getDelaySeconds < 0) {
throw new SparkException("Spark Streaming cannot be used without setting spark.cleaner.ttl; "
+ "set this property before creating a SparkContext (use SPARK_JAVA_OPTS for the shell)")
}
protected[streaming] val isCheckpointPresent = (cp_ != null)
protected[streaming] val sc: SparkContext = {
if (isCheckpointPresent) {
new SparkContext(cp_.master, cp_.framework, cp_.sparkHome, cp_.jars, cp_.environment)
} else {
sc_
}
}
protected[streaming] val env = SparkEnv.get
protected[streaming] val graph: DStreamGraph = {
if (isCheckpointPresent) {
cp_.graph.setContext(this)
cp_.graph.restoreCheckpointData()
cp_.graph
} else {
assert(batchDur_ != null, "Batch duration for streaming context cannot be null")
val newGraph = new DStreamGraph()
newGraph.setBatchDuration(batchDur_)
newGraph
}
}
protected[streaming] val nextNetworkInputStreamId = new AtomicInteger(0)
protected[streaming] var networkInputTracker: NetworkInputTracker = null
protected[streaming] var checkpointDir: String = {
if (isCheckpointPresent) {
sc.setCheckpointDir(StreamingContext.getSparkCheckpointDir(cp_.checkpointDir), true)
cp_.checkpointDir
} else {
null
}
}
protected[streaming] var checkpointDuration: Duration = if (isCheckpointPresent) cp_.checkpointDuration else null
protected[streaming] var receiverJobThread: Thread = null
protected[streaming] var scheduler: Scheduler = null
/**
* Return the associated Spark context
*/
def sparkContext = sc
/**
* Set each DStreams in this context to remember RDDs it generated in the last given duration.
* DStreams remember RDDs only for a limited duration of time and releases them for garbage
* collection. This method allows the developer to specify how to long to remember the RDDs (
* if the developer wishes to query old data outside the DStream computation).
* @param duration Minimum duration that each DStream should remember its RDDs
*/
def remember(duration: Duration) {
graph.remember(duration)
}
/**
* Set the context to periodically checkpoint the DStream operations for master
* fault-tolerance. The graph will be checkpointed every batch interval.
* @param directory HDFS-compatible directory where the checkpoint data will be reliably stored
*/
def checkpoint(directory: String) {
if (directory != null) {
sc.setCheckpointDir(StreamingContext.getSparkCheckpointDir(directory))
checkpointDir = directory
} else {
checkpointDir = null
}
}
protected[streaming] def initialCheckpoint: Checkpoint = {
if (isCheckpointPresent) cp_ else null
}
protected[streaming] def getNewNetworkStreamId() = nextNetworkInputStreamId.getAndIncrement()
/**
* Create an input stream with any arbitrary user implemented network receiver.
* @param receiver Custom implementation of NetworkReceiver
*/
def networkStream[T: ClassManifest](
receiver: NetworkReceiver[T]): DStream[T] = {
val inputStream = new PluggableInputDStream[T](this,
receiver)
graph.addInputStream(inputStream)
inputStream
}
/**
* Create an input stream with any arbitrary user implemented actor receiver.
* @param props Props object defining creation of the actor
* @param name Name of the actor
* @param storageLevel RDD storage level. Defaults to memory-only.
*
* @note An important point to note:
* Since Actor may exist outside the spark framework, It is thus user's responsibility
* to ensure the type safety, i.e parametrized type of data received and actorStream
* should be same.
*/
def actorStream[T: ClassManifest](
props: Props,
name: String,
storageLevel: StorageLevel = StorageLevel.MEMORY_ONLY_SER_2,
supervisorStrategy: SupervisorStrategy = ReceiverSupervisorStrategy.defaultStrategy
): DStream[T] = {
networkStream(new ActorReceiver[T](props, name, storageLevel, supervisorStrategy))
}
/**
* Create an input stream that receives messages pushed by a zeromq publisher.
* @param publisherUrl Url of remote zeromq publisher
* @param subscribe topic to subscribe to
* @param bytesToObjects A zeroMQ stream publishes sequence of frames for each topic
* and each frame has sequence of byte thus it needs the converter
* (which might be deserializer of bytes) to translate from sequence
* of sequence of bytes, where sequence refer to a frame
* and sub sequence refer to its payload.
* @param storageLevel RDD storage level. Defaults to memory-only.
*/
def zeroMQStream[T: ClassManifest](
publisherUrl:String,
subscribe: Subscribe,
bytesToObjects: Seq[Seq[Byte]] ⇒ Iterator[T],
storageLevel: StorageLevel = StorageLevel.MEMORY_ONLY_SER_2,
supervisorStrategy: SupervisorStrategy = ReceiverSupervisorStrategy.defaultStrategy
): DStream[T] = {
actorStream(Props(new ZeroMQReceiver(publisherUrl,subscribe,bytesToObjects)),
"ZeroMQReceiver", storageLevel, supervisorStrategy)
}
/**
* Create an input stream that pulls messages from a Kafka Broker.
* @param zkQuorum Zookeper quorum (hostname:port,hostname:port,..).
* @param groupId The group id for this consumer.
* @param topics Map of (topic_name -> numPartitions) to consume. Each partition is consumed
* in its own thread.
* @param storageLevel Storage level to use for storing the received objects
* (default: StorageLevel.MEMORY_AND_DISK_SER_2)
*/
def kafkaStream(
zkQuorum: String,
groupId: String,
topics: Map[String, Int],
storageLevel: StorageLevel = StorageLevel.MEMORY_ONLY_SER_2
): DStream[String] = {
val kafkaParams = Map[String, String](
"zk.connect" -> zkQuorum, "groupid" -> groupId, "zk.connectiontimeout.ms" -> "10000")
kafkaStream[String, kafka.serializer.StringDecoder](kafkaParams, topics, storageLevel)
}
/**
* Create an input stream that pulls messages from a Kafka Broker.
* @param kafkaParams Map of kafka configuration paramaters.
* See: http://kafka.apache.org/configuration.html
* @param topics Map of (topic_name -> numPartitions) to consume. Each partition is consumed
* in its own thread.
* @param storageLevel Storage level to use for storing the received objects
*/
def kafkaStream[T: ClassManifest, D <: kafka.serializer.Decoder[_]: Manifest](
kafkaParams: Map[String, String],
topics: Map[String, Int],
storageLevel: StorageLevel
): DStream[T] = {
val inputStream = new KafkaInputDStream[T, D](this, kafkaParams, topics, storageLevel)
registerInputStream(inputStream)
inputStream
}
/**
* Create a input stream from TCP source hostname:port. Data is received using
* a TCP socket and the receive bytes is interpreted as UTF8 encoded `\n` delimited
* lines.
* @param hostname Hostname to connect to for receiving data
* @param port Port to connect to for receiving data
* @param storageLevel Storage level to use for storing the received objects
* (default: StorageLevel.MEMORY_AND_DISK_SER_2)
*/
def socketTextStream(
hostname: String,
port: Int,
storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER_2
): DStream[String] = {
socketStream[String](hostname, port, SocketReceiver.bytesToLines, storageLevel)
}
/**
* Create a input stream from TCP source hostname:port. Data is received using
* a TCP socket and the receive bytes it interepreted as object using the given
* converter.
* @param hostname Hostname to connect to for receiving data
* @param port Port to connect to for receiving data
* @param converter Function to convert the byte stream to objects
* @param storageLevel Storage level to use for storing the received objects
* @tparam T Type of the objects received (after converting bytes to objects)
*/
def socketStream[T: ClassManifest](
hostname: String,
port: Int,
converter: (InputStream) => Iterator[T],
storageLevel: StorageLevel
): DStream[T] = {
val inputStream = new SocketInputDStream[T](this, hostname, port, converter, storageLevel)
registerInputStream(inputStream)
inputStream
}
/**
* Create a input stream from a Flume source.
* @param hostname Hostname of the slave machine to which the flume data will be sent
* @param port Port of the slave machine to which the flume data will be sent
* @param storageLevel Storage level to use for storing the received objects
*/
def flumeStream (
hostname: String,
port: Int,
storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER_2
): DStream[SparkFlumeEvent] = {
val inputStream = new FlumeInputDStream(this, hostname, port, storageLevel)
registerInputStream(inputStream)
inputStream
}
/**
* Create a input stream from network source hostname:port, where data is received
* as serialized blocks (serialized using the Spark's serializer) that can be directly
* pushed into the block manager without deserializing them. This is the most efficient
* way to receive data.
* @param hostname Hostname to connect to for receiving data
* @param port Port to connect to for receiving data
* @param storageLevel Storage level to use for storing the received objects
* @tparam T Type of the objects in the received blocks
*/
def rawSocketStream[T: ClassManifest](
hostname: String,
port: Int,
storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER_2
): DStream[T] = {
val inputStream = new RawInputDStream[T](this, hostname, port, storageLevel)
registerInputStream(inputStream)
inputStream
}
/**
* Create a input stream that monitors a Hadoop-compatible filesystem
* for new files and reads them using the given key-value types and input format.
* File names starting with . are ignored.
* @param directory HDFS directory to monitor for new file
* @tparam K Key type for reading HDFS file
* @tparam V Value type for reading HDFS file
* @tparam F Input format for reading HDFS file
*/
def fileStream[
K: ClassManifest,
V: ClassManifest,
F <: NewInputFormat[K, V]: ClassManifest
] (directory: String): DStream[(K, V)] = {
val inputStream = new FileInputDStream[K, V, F](this, directory)
registerInputStream(inputStream)
inputStream
}
/**
* Create a input stream that monitors a Hadoop-compatible filesystem
* for new files and reads them using the given key-value types and input format.
* @param directory HDFS directory to monitor for new file
* @param filter Function to filter paths to process
* @param newFilesOnly Should process only new files and ignore existing files in the directory
* @tparam K Key type for reading HDFS file
* @tparam V Value type for reading HDFS file
* @tparam F Input format for reading HDFS file
*/
def fileStream[
K: ClassManifest,
V: ClassManifest,
F <: NewInputFormat[K, V]: ClassManifest
] (directory: String, filter: Path => Boolean, newFilesOnly: Boolean): DStream[(K, V)] = {
val inputStream = new FileInputDStream[K, V, F](this, directory, filter, newFilesOnly)
registerInputStream(inputStream)
inputStream
}
/**
* Create a input stream that monitors a Hadoop-compatible filesystem
* for new files and reads them as text files (using key as LongWritable, value
* as Text and input format as TextInputFormat). File names starting with . are ignored.
* @param directory HDFS directory to monitor for new file
*/
def textFileStream(directory: String): DStream[String] = {
fileStream[LongWritable, Text, TextInputFormat](directory).map(_._2.toString)
}
/**
* Create a input stream that returns tweets received from Twitter.
* @param twitterAuth Twitter4J authentication, or None to use Twitter4J's default OAuth
* authorization; this uses the system properties twitter4j.oauth.consumerKey,
* .consumerSecret, .accessToken and .accessTokenSecret.
* @param filters Set of filter strings to get only those tweets that match them
* @param storageLevel Storage level to use for storing the received objects
*/
def twitterStream(
twitterAuth: Option[Authorization] = None,
filters: Seq[String] = Nil,
storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER_2
): DStream[Status] = {
val inputStream = new TwitterInputDStream(this, twitterAuth, filters, storageLevel)
registerInputStream(inputStream)
inputStream
}
/**
* Create an input stream from a queue of RDDs. In each batch,
* it will process either one or all of the RDDs returned by the queue.
* @param queue Queue of RDDs
* @param oneAtATime Whether only one RDD should be consumed from the queue in every interval
* @tparam T Type of objects in the RDD
*/
def queueStream[T: ClassManifest](
queue: Queue[RDD[T]],
oneAtATime: Boolean = true
): DStream[T] = {
queueStream(queue, oneAtATime, sc.makeRDD(Seq[T](), 1))
}
/**
* Create an input stream from a queue of RDDs. In each batch,
* it will process either one or all of the RDDs returned by the queue.
* @param queue Queue of RDDs
* @param oneAtATime Whether only one RDD should be consumed from the queue in every interval
* @param defaultRDD Default RDD is returned by the DStream when the queue is empty.
* Set as null if no RDD should be returned when empty
* @tparam T Type of objects in the RDD
*/
def queueStream[T: ClassManifest](
queue: Queue[RDD[T]],
oneAtATime: Boolean,
defaultRDD: RDD[T]
): DStream[T] = {
val inputStream = new QueueInputDStream(this, queue, oneAtATime, defaultRDD)
registerInputStream(inputStream)
inputStream
}
/**
* Create a unified DStream from multiple DStreams of the same type and same interval
*/
def union[T: ClassManifest](streams: Seq[DStream[T]]): DStream[T] = {
new UnionDStream[T](streams.toArray)
}
/**
* Register an input stream that will be started (InputDStream.start() called) to get the
* input data.
*/
def registerInputStream(inputStream: InputDStream[_]) {
graph.addInputStream(inputStream)
}
/**
* Register an output stream that will be computed every interval
*/
def registerOutputStream(outputStream: DStream[_]) {
graph.addOutputStream(outputStream)
}
protected def validate() {
assert(graph != null, "Graph is null")
graph.validate()
assert(
checkpointDir == null || checkpointDuration != null,
"Checkpoint directory has been set, but the graph checkpointing interval has " +
"not been set. Please use StreamingContext.checkpoint() to set the interval."
)
}
/**
* Start the execution of the streams.
*/
def start() {
if (checkpointDir != null && checkpointDuration == null && graph != null) {
checkpointDuration = graph.batchDuration
}
validate()
val networkInputStreams = graph.getInputStreams().filter(s => s match {
case n: NetworkInputDStream[_] => true
case _ => false
}).map(_.asInstanceOf[NetworkInputDStream[_]]).toArray
if (networkInputStreams.length > 0) {
// Start the network input tracker (must start before receivers)
networkInputTracker = new NetworkInputTracker(this, networkInputStreams)
networkInputTracker.start()
}
Thread.sleep(1000)
// Start the scheduler
scheduler = new Scheduler(this)
scheduler.start()
}
/**
* Stop the execution of the streams.
*/
def stop() {
try {
if (scheduler != null) scheduler.stop()
if (networkInputTracker != null) networkInputTracker.stop()
if (receiverJobThread != null) receiverJobThread.interrupt()
sc.stop()
logInfo("StreamingContext stopped successfully")
} catch {
case e: Exception => logWarning("Error while stopping", e)
}
}
}
object StreamingContext {
implicit def toPairDStreamFunctions[K: ClassManifest, V: ClassManifest](stream: DStream[(K,V)]) = {
new PairDStreamFunctions[K, V](stream)
}
protected[streaming] def createNewSparkContext(
master: String,
appName: String,
sparkHome: String,
jars: Seq[String],
environment: Map[String, String]): SparkContext = {
// Set the default cleaner delay to an hour if not already set.
// This should be sufficient for even 1 second interval.
if (MetadataCleaner.getDelaySeconds < 0) {
MetadataCleaner.setDelaySeconds(3600)
}
new SparkContext(master, appName, sparkHome, jars, environment)
}
protected[streaming] def rddToFileName[T](prefix: String, suffix: String, time: Time): String = {
if (prefix == null) {
time.milliseconds.toString
} else if (suffix == null || suffix.length ==0) {
prefix + "-" + time.milliseconds
} else {
prefix + "-" + time.milliseconds + "." + suffix
}
}
protected[streaming] def getSparkCheckpointDir(sscCheckpointDir: String): String = {
new Path(sscCheckpointDir, UUID.randomUUID.toString).toString
}
}
|
wgpshashank/spark
|
streaming/src/main/scala/spark/streaming/StreamingContext.scala
|
Scala
|
apache-2.0
| 21,232
|
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.query
import java.util.Collections
import org.geotools.data.Query
import org.geotools.data.collection.ListFeatureCollection
import org.geotools.filter.text.ecql.ECQL
import org.geotools.referencing.GeodeticCalculator
import org.junit.runner.RunWith
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.filter.visitor.BoundsFilterVisitor
import org.locationtech.geomesa.process.query.KNearestNeighborSearchProcess._
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geohash.GeoHash
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.locationtech.jts.geom.Point
import org.opengis.feature.simple.SimpleFeature
import org.opengis.filter.spatial.BBOX
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class KnnProcessTest extends Specification {
import scala.collection.JavaConverters._
val process = new KNearestNeighborSearchProcess
val sft = SimpleFeatureTypes.createType("knn", "*geom:Point:srid=4326")
val geom = org.locationtech.geomesa.filter.ff.property("geom")
val features = Seq.tabulate[SimpleFeature](10) { i =>
ScalaSimpleFeature.create(sft, s"$i", s"POINT(45 5$i)")
}
val featureCollection = new ListFeatureCollection(sft, features.asJava)
val diagonalFeatures = Seq.tabulate[SimpleFeature](90) { lat =>
ScalaSimpleFeature.create(sft, s"$lat", f"POINT($lat%d $lat%d)")
}
val polarFeatures = Seq.range(-180, 181).map { lon =>
ScalaSimpleFeature.create(sft, s"$lon", f"POINT($lon%d 89.9)")
}
val cville = WKTUtils.read("POINT(-78.4953560 38.0752150)").asInstanceOf[Point]
"KnnProcess" should {
"manually visit a feature collection" in {
val input = Collections.singletonList[SimpleFeature](ScalaSimpleFeature.create(sft, "", "POINT (45 55)"))
val query = new ListFeatureCollection(sft, input)
val result = SelfClosingIterator(process.execute(query, featureCollection, 3, 0, Double.MaxValue).features).toSeq
result must containTheSameElementsAs(features.slice(4, 7))
}
"manually visit a feature collection smaller than k" in {
val input = Collections.singletonList[SimpleFeature](ScalaSimpleFeature.create(sft, "", "POINT (45 55)"))
val query = new ListFeatureCollection(sft, input)
val result = SelfClosingIterator(process.execute(query, featureCollection, 20, 0, Double.MaxValue).features).toSeq
result must containTheSameElementsAs(features)
}
"inject a small BBOX into a larger query" in {
val filter = ECQL.toFilter("prop like 'foo' AND bbox(geom, -80, 30, -70, 40)")
val base = new Query("", filter)
val p = GeoHash("dqb0tg").getCentroid
val window = new KnnWindow(base, geom, p, 10, 1000, 10000)
// generate a new query
val updated = window.next(None)
// verify that original is untouched
base.getFilter mustEqual filter
val decomposed = org.locationtech.geomesa.filter.decomposeAnd(updated.getFilter)
decomposed must haveLength(3)
val bounds = decomposed.collect {
case b: BBOX => (b.getBounds.getMinX, b.getBounds.getMaxX, b.getBounds.getMinY, b.getBounds.getMaxY)
}
bounds must haveLength(2)
// verify that the original filter is present
bounds must contain((-80d, -70d, 30d, 40d))
// verify the window filter
val expected = Envelope(p, 1000, new GeodeticCalculator)
bounds must contain((expected.xmin, expected.xmax, expected.ymin, expected.ymax))
}
"calculate features close to the equator" in {
val k = 10
val p = WKTUtils.read("POINT(0.1 0.2)").asInstanceOf[Point]
val nearest = Array.ofDim[FeatureWithDistance](k)
val calculator = new KnnCalculator(p, k, Double.MaxValue, nearest)
diagonalFeatures.foreach(calculator.visit)
nearest.map(_.sf.getID).toSeq must containTheSameElementsAs((0 until k).map(_.toString))
}
"calculate features close to Southwest Russia" in {
val k = 10
val p = WKTUtils.read("POINT(45.1 45.1)").asInstanceOf[Point]
val nearest = Array.ofDim[FeatureWithDistance](k)
val calculator = new KnnCalculator(p, k, Double.MaxValue, nearest)
diagonalFeatures.foreach(calculator.visit)
nearest.map(_.sf.getID).toSeq must containTheSameElementsAs((41 to 50).map(_.toString))
}
"calculate features close to the North Pole" in {
val k = 10
val p = WKTUtils.read("POINT(89.9 89.9)").asInstanceOf[Point]
val nearest = Array.ofDim[FeatureWithDistance](k)
val calculator = new KnnCalculator(p, k, Double.MaxValue, nearest)
diagonalFeatures.foreach(calculator.visit)
nearest.map(_.sf.getID).toSeq must containTheSameElementsAs((80 to 89).map(_.toString))
}
"calculate northern features close to the North Pole" in {
val k = 10
val p = WKTUtils.read("POINT(89.9 89.9)").asInstanceOf[Point]
val nearest = Array.ofDim[FeatureWithDistance](k)
val calculator = new KnnCalculator(p, k, Double.MaxValue, nearest)
polarFeatures.foreach(calculator.visit)
nearest.map(_.sf.getID).toSeq must containTheSameElementsAs((85 to 94).map(_.toString))
}
"calculate more things near the north polar region" in {
val k = 10
val p = WKTUtils.read("POINT(0.0001 89.9)").asInstanceOf[Point]
val nearest = Array.ofDim[FeatureWithDistance](k)
val calculator = new KnnCalculator(p, k, Double.MaxValue, nearest)
polarFeatures.foreach(calculator.visit)
diagonalFeatures.foreach(calculator.visit)
nearest.map(_.sf.getID).toSeq must containTheSameElementsAs((-4 to 5).map(_.toString))
}
"double the window when no features are found" in {
val window = new KnnWindow(new Query(""), geom, cville, 10, 500d, 5000d)
// note: compare envelope -> referenced envelope, otherwise comparison doesn't work
Envelope(cville, 500d, new GeodeticCalculator).toJts mustEqual
BoundsFilterVisitor.visit(window.next(None).getFilter)
Envelope(cville, 1000d, new GeodeticCalculator).toJts mustEqual
BoundsFilterVisitor.visit(window.next(Some(0)).getFilter)
}
"expand the window correctly around Charlottesville" in {
val window = new KnnWindow(new Query(""), geom, cville, 10, 500d, 5000d)
val initial = window.next(None).getFilter
window.radius mustEqual 500d
initial must beAnInstanceOf[BBOX]
val inner = Envelope(cville, window.radius, new GeodeticCalculator)
// note: compare envelope -> referenced envelope, otherwise comparison doesn't work
inner.toJts mustEqual initial.asInstanceOf[BBOX].getBounds
// can get a multi-polygon and visualize these queries using:
// expanded.map(_.toString.drop(20).dropRight(2)).mkString("MULTIPOLYGON(", ",", ")")
val expanded = org.locationtech.geomesa.filter.decomposeOr(window.next(Some(5)).getFilter)
window.radius must beCloseTo(797.88, 0.01) // math.sqrt(10 / (math.Pi * (5 / (4 * 500d * 500d))))
expanded must haveLength(4)
val bounds = expanded.collect {
case b: BBOX => (b.getBounds.getMinX, b.getBounds.getMaxX, b.getBounds.getMinY, b.getBounds.getMaxY)
}
bounds must haveLength(4)
bounds must containTheSameElementsAs {
Seq(
(-78.50444946042865, -78.50105448126463, 38.07071041202179, 38.07971958797821),
(-78.48965751873537, -78.48626253957136, 38.07071041202179, 38.07971958797821),
(-78.50444946042865, -78.48626253957136, 38.07971958797821, 38.08240328075565),
(-78.50444946042865, -78.48626253957136, 38.06802671924435, 38.07071041202179)
)
}
}
"expand the window correctly around the anti-meridian near Suva, Fiji" in {
val p = WKTUtils.read("POINT (178.440 -18.140)").asInstanceOf[Point]
val window = new KnnWindow(new Query(""), geom, p, 10, 250000d, 750000d)
val initial = org.locationtech.geomesa.filter.decomposeOr(window.next(None).getFilter)
window.radius mustEqual 250000d
initial must haveLength(2)
val initialBounds = initial.collect {
case b: BBOX => (b.getBounds.getMinX, b.getBounds.getMaxX, b.getBounds.getMinY, b.getBounds.getMaxY)
}
initialBounds must haveLength(2)
initialBounds must containTheSameElementsAs(
Seq(
(176.07760736902137, 180.0, -20.39844934356917, -15.881550656430832),
(-180.0, -179.19760736902137, -20.39844934356917, -15.881550656430832)
)
)
// can get a multi-polygon and visualize these queries using:
// expanded.map(_.toString.drop(20).dropRight(2)).mkString("MULTIPOLYGON(", ",", ")")
val expanded = org.locationtech.geomesa.filter.decomposeOr(window.next(Some(7)).getFilter)
window.radius must beCloseTo(337167.76, 0.01) // math.sqrt(10 / (math.Pi * (7 / (4 * 250000d * 250000d))))
expanded must haveLength(6)
val expandedBounds = expanded.collect {
case b: BBOX => (b.getBounds.getMinX, b.getBounds.getMaxX, b.getBounds.getMinY, b.getBounds.getMaxY)
}
expandedBounds must haveLength(6)
expandedBounds must containTheSameElementsAs(
Seq(
(175.2540034973006, 180.0,-15.881550656430832, -15.0942293591859),
(175.2540034973006, 180.0,-21.185770640814102, -20.39844934356917),
(175.2540034973006, 176.07760736902137, -20.39844934356917, -15.881550656430832),
(-180.0, -178.3740034973006, -15.881550656430832, -15.0942293591859),
(-180.0, -178.3740034973006, -21.185770640814102, -20.39844934356917),
(-179.19760736902137, -178.3740034973006, -20.39844934356917, -15.881550656430832)
)
)
}
"expand the window correctly around the polar region near McMurdo Station" in {
val p = WKTUtils.read("POINT (166.68360 -77.842)").asInstanceOf[Point]
val window = new KnnWindow(new Query(""), geom, p, 10, 250000d, 750000d)
val initial = org.locationtech.geomesa.filter.decomposeOr(window.next(None).getFilter)
window.radius mustEqual 250000d
initial must haveLength(1)
val initialBounds = initial.collect {
case b: BBOX => (b.getBounds.getMinX, b.getBounds.getMaxX, b.getBounds.getMinY, b.getBounds.getMaxY)
}
initialBounds must haveLength(1)
initialBounds mustEqual Seq((156.16673876448903, 177.200461235511, -80.08109073174205, -75.60290926825795))
// can get a multi-polygon and visualize these queries using:
// expanded.map(_.toString.drop(20).dropRight(2)).mkString("MULTIPOLYGON(", ",", ")")
val expanded = org.locationtech.geomesa.filter.decomposeOr(window.next(Some(7)).getFilter)
window.radius must beCloseTo(337167.76, 0.01) // math.sqrt(10 / (math.Pi * (7 / (4 * 250000d * 250000d))))
expanded must haveLength(5)
val expandedBounds = expanded.collect {
case b: BBOX => (b.getBounds.getMinX, b.getBounds.getMaxX, b.getBounds.getMinY, b.getBounds.getMaxY)
}
expandedBounds must haveLength(5)
expandedBounds must containTheSameElementsAs(
Seq(
(152.61949285269546,180.0,-75.60290926825795,-74.82227697359694),
(152.61949285269546,180.0,-80.86172302640306,-80.08109073174205),
(152.61949285269546,156.16673876448903,-80.08109073174205,-75.60290926825795),
(177.200461235511,180.0,-80.08109073174205,-75.60290926825795),
(-180.0,-179.25229285269543,-80.86172302640306,-74.82227697359694)
)
)
val double = org.locationtech.geomesa.filter.decomposeOr(window.next(Some(9)).getFilter)
window.radius must beCloseTo(401032.76, 0.01) // math.sqrt(10 / (math.Pi * (9 / (4 * 337167.76 * 337167.76))))
double must haveLength(6)
val doubleBounds = double.collect {
case b: BBOX => (b.getBounds.getMinX, b.getBounds.getMaxX, b.getBounds.getMinY, b.getBounds.getMaxY)
}
doubleBounds must haveLength(6)
doubleBounds must containTheSameElementsAs(
Seq(
(150.08274179277637, 180.0, -74.82227697359694, -74.2503545564565),
(150.08274179277637, 180.0, -81.4336454435435, -80.86172302640306),
(150.08274179277637, 152.61949285269546, -80.86172302640306, -74.82227697359694),
(-180.0, -176.71554179277635, -74.82227697359694, -74.2503545564565),
(-180.0, -176.71554179277635, -81.4336454435435, -80.86172302640306),
(-179.25229285269543, -176.71554179277635, -80.86172302640306, -74.82227697359694)
)
)
}
"expand the window correctly around the north pole" in {
val p = WKTUtils.read("POINT(89.9 89.9)").asInstanceOf[Point]
val window = new KnnWindow(new Query(""), geom, p, 10, 250000d, 750000d)
val initial = org.locationtech.geomesa.filter.decomposeOr(window.next(None).getFilter)
window.radius mustEqual 250000d
initial must haveLength(1)
val initialBounds = initial.collect {
case b: BBOX => (b.getBounds.getMinX, b.getBounds.getMaxX, b.getBounds.getMinY, b.getBounds.getMaxY)
}
initialBounds must haveLength(1)
initialBounds mustEqual Seq((-180.0, 180.0, 87.66172837475138, 90.0))
// can get a multi-polygon and visualize these queries using:
// expanded.map(_.toString.drop(20).dropRight(2)).mkString("MULTIPOLYGON(", ",", ")")
val expanded = org.locationtech.geomesa.filter.decomposeOr(window.next(Some(7)).getFilter)
window.radius must beCloseTo(337167.76, 0.01) // math.sqrt(10 / (math.Pi * (7 / (4 * 250000d * 250000d))))
expanded must haveLength(1)
val expandedBounds = expanded.collect {
case b: BBOX => (b.getBounds.getMinX, b.getBounds.getMaxX, b.getBounds.getMinY, b.getBounds.getMaxY)
}
expandedBounds must haveLength(1)
expandedBounds mustEqual Seq((-180.0, 180.0, 86.88129440297135, 87.66172837475138))
}
"stop after reaching the threshold" in {
val window = new KnnWindow(new Query(""), geom, cville, 10, 500d, 800d)
window.hasNext must beTrue
window.next(None)
window.hasNext must beTrue
window.next(Some(5))
window.hasNext must beTrue
val max = window.next(Some(5)).getFilter
window.hasNext must beFalse
// note: compare envelope -> referenced envelope, otherwise comparison doesn't work
Envelope(cville, 800d, new GeodeticCalculator).toJts mustEqual BoundsFilterVisitor.visit(max)
}
"convert cartesian envelopes to world bounds" in {
// simple in-bounds case
Envelope(-10, 10, -5, 5).toWorld mustEqual Seq(Envelope(-10, 10, -5, 5))
// anti-meridian left
Envelope(-200, -140, -45, 45).toWorld must containTheSameElementsAs(
Seq(Envelope(160, 180, -45, 45), Envelope(-180, -140, -45, 45))
)
// anti-meridian right
Envelope(165, 185, -45, 45).toWorld must containTheSameElementsAs(
Seq(Envelope(165, 180, -45, 45), Envelope(-180, -175, -45, 45))
)
// pole-crossing south
Envelope(60, 80, -95, -75).toWorld mustEqual Seq(Envelope(-180, 180, -90, -75))
// pole-crossing north
Envelope(60, 80, 60, 110).toWorld mustEqual Seq(Envelope(-180, 180, 60, 90))
// wider than world bounds
Envelope(-200, 200, -100, 100).toWorld mustEqual Seq(Envelope(-180, 180, -90, 90))
Envelope(-200, 200, -45, 45).toWorld mustEqual Seq(Envelope(-180, 180, -45, 45))
Envelope(-5, 5, -100, 100).toWorld mustEqual Seq(Envelope(-180, 180, -90, 90))
}
"calculate diffs between query envelopes" in {
// contains the other envelope
Envelope(-10, 10, -5, 5).minus(Envelope(-15, 15, -7.5, 7.5)) must beEmpty
// contained within the other envelope
Envelope(-10, 10, -5, 5).minus(Envelope(-5, 5, -2.5, 2.5)) must containTheSameElementsAs(
Seq(Envelope(-10, 10, 2.5, 5), Envelope(-10, 10, -5, -2.5), Envelope(-10, -5, -2.5, 2.5), Envelope(5, 10, -2.5, 2.5))
)
// disjoint
Envelope(-10, 10, -5, 5).minus(Envelope(-15, -10, -7.5, 7.5)) mustEqual Seq(Envelope(-10, 10, -5, 5))
Envelope(10, 15, -15, -10).minus(Envelope(0, 5, -20, -15)) mustEqual Seq(Envelope(10, 15, -15, -10))
Envelope(10, 15, -15, -10).minus(Envelope(0, 5, -5, 0)) mustEqual Seq(Envelope(10, 15, -15, -10))
// side-laps the other envelope
Envelope(-10, 10, -5, 5).minus(Envelope(-15, -5, -2.5, 2.5)) must containTheSameElementsAs(
Seq(Envelope(-10, 10, 2.5, 5), Envelope(-10, 10, -5, -2.5), Envelope(-5, 10, -2.5, 2.5))
)
Envelope(-10, 10, -5, 5).minus(Envelope(5, 15, -2.5, 2.5)) must containTheSameElementsAs(
Seq(Envelope(-10, 10, 2.5, 5), Envelope(-10, 10, -5, -2.5), Envelope(-10, 5, -2.5, 2.5))
)
// top/bottom-laps the other envelope
Envelope(-10, 10, -5, 5).minus(Envelope(-5, 5, -2.5, 10)) must containTheSameElementsAs(
Seq(Envelope(-10, 10, -5, -2.5), Envelope(-10, -5, -2.5, 5), Envelope(5, 10, -2.5, 5))
)
Envelope(-10, 10, -5, 5).minus(Envelope(-5, 5, -10, 2.5)) must containTheSameElementsAs(
Seq(Envelope(-10, 10, 2.5, 5), Envelope(-10, -5, -5, 2.5), Envelope(5, 10, -5, 2.5))
)
// corner-laps the other envelope
Envelope(-10, 10, -5, 5).minus(Envelope(0, 15, 0, 10)) must containTheSameElementsAs(
Seq(Envelope(-10, 10, -5, 0), Envelope(-10, 0, 0, 5))
)
Envelope(-10, 10, -5, 5).minus(Envelope(-15, 0, -10, 0)) must containTheSameElementsAs(
Seq(Envelope(-10, 10, 0, 5), Envelope(0, 10, -5, 0))
)
Envelope(-10, 10, -5, 5).minus(Envelope(0, 15, -10, 0)) must containTheSameElementsAs(
Seq(Envelope(-10, 10, 0, 5), Envelope(-10, 0, -5, 0))
)
Envelope(-10, 10, -5, 5).minus(Envelope(-15, 0, 0, 10)) must containTheSameElementsAs(
Seq(Envelope(-10, 10, -5, 0), Envelope(0, 10, 0, 5))
)
// half-laps the other envelope
Envelope(-10, 10, -5, 5).minus(Envelope(-15, 15, 0, 10)) mustEqual Seq(Envelope(-10, 10, -5, 0))
Envelope(-10, 10, -5, 5).minus(Envelope(-15, 15, -10, 0)) mustEqual Seq(Envelope(-10, 10, 0, 5))
Envelope(-10, 10, -5, 5).minus(Envelope(-15, 0, -10, 10)) mustEqual Seq(Envelope(0, 10, -5, 5))
Envelope(-10, 10, -5, 5).minus(Envelope(0, 15, -10, 10)) mustEqual Seq(Envelope(-10, 0, -5, 5))
}
"calculate non-overlapping query windows" in {
// initial query window
QueryEnvelope(Envelope(149, 151, -86, -84), None).query mustEqual Seq(Envelope(149, 151, -86, -84))
// expands but still within world bounds
QueryEnvelope(Envelope(145, 155, -87.5, -82.5), Some(Envelope(149, 151, -86, -84))).query must containTheSameElementsAs(
Seq(Envelope(145, 155, -87.5, -86), Envelope(145, 155, -84, -82.5), Envelope(145, 149, -86, -84), Envelope(151, 155, -86, -84))
)
// expands past the poles
QueryEnvelope(Envelope(130, 170, -95, -75), Some(Envelope(145, 155, -87.5, -82.5))).query must containTheSameElementsAs(
Seq(Envelope(-180, 180, -90, -87.5), Envelope(-180, 180, -82.5, -75), Envelope(-180, 145, -87.5, -82.5), Envelope(155, 180, -87.5, -82.5))
)
}
}
}
|
elahrvivaz/geomesa
|
geomesa-process/geomesa-process-vector/src/test/scala/org/locationtech/geomesa/process/query/KnnProcessTest.scala
|
Scala
|
apache-2.0
| 19,810
|
package io.github.shogowada.scalajs.reactjs.example.interactive.helloworld
import io.github.shogowada.scalajs.reactjs.VirtualDOM._
import io.github.shogowada.scalajs.reactjs.elements.ReactElement
import io.github.shogowada.scalajs.reactjs.events.FormSyntheticEvent
import io.github.shogowada.scalajs.reactjs.example.interactive.helloworld.LetterCase.{DEFAULT, LOWER_CASE, LetterCase, UPPER_CASE}
import io.github.shogowada.scalajs.reactjs.{React, ReactDOM}
import org.scalajs.dom
import org.scalajs.dom.raw.HTMLInputElement
import scala.scalajs.js.JSApp
object Main extends JSApp {
def main(): Unit = {
val mountNode = dom.document.getElementById("mount-node")
ReactDOM.render(<(InteractiveHelloWorld()).empty, mountNode)
}
}
object InteractiveHelloWorld {
case class State(name: String, letterCase: LetterCase)
type Self = React.Self[Unit, State]
private val nameId = "name"
def apply() = reactClass
private lazy val reactClass = React.createClass[Unit, State](
getInitialState = (self) => State(
name = "whoever you are",
letterCase = DEFAULT
),
render = (self) =>
<.div()(
createNameInput(self),
LetterCase.ALL.map(createLetterCaseRadioBox(self, _)),
<.br.empty,
<.div(^.id := "greet")(s"Hello, ${name(self.state)}!")
)
)
private def createNameInput(self: Self) =
<.div()(
<.label(^.`for` := nameId)("Name: "),
<.input(
^.id := nameId,
^.value := self.state.name,
^.onChange := onChange(self)
)()
)
private def createLetterCaseRadioBox(self: Self, thisLetterCase: LetterCase): ReactElement = {
<(LetterCaseRadioBox())(
^.wrapped := LetterCaseRadioBox.WrappedProps(
letterCase = thisLetterCase,
checked = thisLetterCase == self.state.letterCase,
onChecked = () => {
self.setState(_.copy(letterCase = thisLetterCase))
}
)
)()
}
private def onChange(self: Self) =
(event: FormSyntheticEvent[HTMLInputElement]) => {
val name = event.target.value
self.setState(_.copy(name = name))
}
private def name(state: State): String =
state.letterCase match {
case LOWER_CASE => state.name.toLowerCase
case UPPER_CASE => state.name.toUpperCase
case _ => state.name
}
}
object LetterCase {
sealed class LetterCase(val name: String)
case object DEFAULT extends LetterCase("Default")
case object LOWER_CASE extends LetterCase("Lower Case")
case object UPPER_CASE extends LetterCase("Upper Case")
val ALL = Seq(DEFAULT, LOWER_CASE, UPPER_CASE)
}
object LetterCaseRadioBox {
case class WrappedProps(letterCase: LetterCase, checked: Boolean, onChecked: () => Unit)
type Self = React.Self[WrappedProps, Unit]
def apply() = reactClass
private lazy val reactClass = React.createClass[WrappedProps, Unit](
(self) =>
<.span()(
<.input(
^.`type`.radio,
^.name := "letter-case",
^.value := self.props.wrapped.letterCase.name,
^.checked := self.props.wrapped.checked,
^.onChange := onChange(self)
)(),
self.props.wrapped.letterCase.name
)
)
private def onChange(self: Self) =
(event: FormSyntheticEvent[HTMLInputElement]) => {
if (event.target.checked) {
self.props.wrapped.onChecked()
}
}
}
|
shogowada/scalajs-reactjs
|
example/interactive-helloworld/src/main/scala/io/github/shogowada/scalajs/reactjs/example/interactive/helloworld/Main.scala
|
Scala
|
mit
| 3,386
|
package org.lancegatlin
object Try1 {
case class Person(id: Int, name: String, age: Int)
trait Schema[C] {
class Field[A](val unapply: C => A) {
def name: String = {
// org.lancegatlin.Try1$PersonSchema$id$
val name = getClass.getName
// Try1$PersonSchema$id
val simpleName = name.substring(name.lastIndexOf('.') + 1).dropRight(1)
// id
simpleName.substring(simpleName.lastIndexOf('$') + 1)
}
// Note: bug in this call for objects
//getClass.getSimpleName
override def toString = s"Field($name)"
}
def fields: Seq[Field[_]]
}
implicit object PersonSchema extends Schema[Person] {
object id extends Field(_.id)
object name extends Field(_.name)
object age extends Field(_.age)
val fields = Seq(id,name,age)
}
sealed trait Ast[C,A]
case class Equals[C,A](field: Schema[C]#Field[A], ast: Ast[C,A]) extends Ast[C,A]
case class LessThan[C,A](field: Schema[C]#Field[A], ast: Ast[C,A]) extends Ast[C,A]
case class Value[C,A](value: A) extends Ast[C,A]
case class And[C,A,B](ast1: Ast[C,A],ast2:Ast[C,B]) extends Ast[C,(A,B)]
implicit class PimpMyField[C,A](val self: Schema[C]#Field[A]) extends AnyVal {
def ===(value: A) = Equals(self,Value[C,A](value))
def <(value: A) = LessThan(self,Value[C,A](value))
}
implicit class PimpMyAst[C,A](val self: Ast[C,A]) extends AnyVal {
def and[B](other: Ast[C,B]) = And(self, other)
}
val ast =
{
import PersonSchema._
id === 1 and name === "asdf" and age < 30
}
def astToSql(ast: Ast[_,_]) : String = {
val builder = new StringBuilder(256)
def loop(ast: Ast[_,_]) : Unit = {
ast match {
case e@Equals(field,ast1) =>
builder
.append(field.name)
.append(" = ")
loop(ast1)
case LessThan(field,ast1) =>
builder
.append(field.name)
.append(" < ")
loop(ast1)
case Value(value) =>
// Can't just convert to String here, each dialect needs to do this differently
// This is best done using a serializer type-class for the dialect
builder.append(value)
case a@And(ast1,ast2) =>
loop(ast1)
builder.append(" AND ")
loop(ast2)
}
}
loop(ast)
builder.result()
}
}
|
lancegatlin/caseclass_ql
|
src/main/scala/org/lancegatlin/Try1.scala
|
Scala
|
mit
| 2,371
|
package uk.co.bbc.redux
import java.util.Date
import java.io.InputStream
import java.io.BufferedInputStream
import java.awt.image.BufferedImage
import javax.imageio.ImageIO
import scala.xml._
import scala.xml.factory.XMLLoader
class Client extends Http {
var htmlParser:XMLLoader[Elem] = XML.withSAXParser(new org.ccil.cowan.tagsoup.jaxp.SAXFactoryImpl().newSAXParser())
/** Login to redux
*
* This is done over HTTP, redux doesn't have HTTPS !!
*
* Don't abuse this, redux is very overzealous when it comes to
* locking your account if it sees to many logins especially from
* multiple IPs. Always use the logout method when finished or reuse
* an existing session.
*
* @param username Your redux username
* @param password Your redux password
* @throws UserNotFoundException Your username cannot be found
* @throws UserPasswordException Your password is wrong
* @throws ClientHttpException Some over HTTP error has occured
* @return a new User instance with an associated Session
*/
def login (username: String, password: String) : User = {
userRequest (Url.login(username, password), xml => User.createFromXMLResponse(xml) )
}
/**
* @param user A User with associated and valid Session
* @throws ClientHttpException Some over HTTP error has occured
*/
def logout (user: User) : Unit = {
getRequestWithStringResponse (Url.logout(user.session.token), otherHttpException)
}
/**
* @param diskReference An identifier for the content
* @param session A valid Session
* @throws ContentNotFoundException The diskReference cannot be found
* @throws SessionInvalidException The session token is broken
* @throws ClientHttpException Some over HTTP error has occured
*/
def key (diskReference:String, session:Session) : Key = {
contentRequest (Url.key(diskReference, session.token), xml => Key.createFromXMLResponse(xml) )
}
/**
* @param diskReference An identifier for the content
* @param session A valid Session
* @throws ContentNotFoundException The diskReference cannot be found
* @throws SessionInvalidException The session token is broken
* @throws ClientHttpException Some over HTTP error has occured
*/
def content (diskReference:String, session:Session) : Content = {
contentRequest (Url.content(diskReference, session.token), xml => Content.createFromXMLResponse(xml) )
}
/** Download some file from redux
*
* Pass a block to this method to handle the download. The block is passed the
* HTTP response body as an InputStream which can be read however you like.
*
* The stream needs closing afterwards.
*
* @param url The url for the download file
* @param block A block that takes an InputStream as a param
* @throws DownloadNotFoundException The requested file cannot be found
* @throws ClientHttpException Some over HTTP error has occured
*/
def download[T] (url: String, block: InputStream => T) : T = {
getRequest(url, method => block(method.getResponseBodyAsStream), status => status match {
case 404 => throw new DownloadNotFoundException
case _ => otherHttpException(status)
})
}
/**
* Retreive an image representing a redux programme, this will be a jpeg for most
* TV and probably cause an error for radio content.
*
* The image is a frame of footage from the first 5 minutes of content, it's 640x360
*
* Note, you don't require a key for this call
*
* @param diskReference An identifier for the content
* @throws DownloadNotFoundException The requested file cannot be found
* @throws ClientHttpException Some over HTTP error has occured
*/
def image (diskReference:String) : BufferedImage = {
download(Url.image(diskReference), stream => ImageIO.read(stream))
}
/**
* Retreive a montage of frames from a programme (frequency 1 frame every 20 secs)
*
* @param diskReference An identifier for the content
* @param key A key for the content
* @throws DownloadNotFoundException The requested file cannot be found
* @throws ClientHttpException Some over HTTP error has occured
*/
def montage (diskReference:String, key:Key) : BufferedImage = {
download(Url.montage(diskReference, key), stream => ImageIO.read(stream))
}
/**
* Generate a single frame from a frames download
*
* This uses a failry memory efficient approach to crop a single frame from a
* strip of 60. It is returned as a BufferedImage.
*
* @param diskReference An identifier for the content
* @param seconds Number of seconds into the content to get frame for
* @param key A key for the content
* @throws DownloadNotFoundException The requested file cannot be found
* @throws FrameNotFoundException Cannot find requested frame (i.e. you asked for something out of duration)
* @throws ClientHttpException Some over HTTP error has occured
*/
def frame (diskReference:String, seconds:Int, key:Key) : BufferedImage = {
val mins:Int = seconds / 60
val secs:Int = seconds - mins * 60
download(Url.frames(diskReference, mins, key), stream => Frame.fromInputStream(stream, secs))
}
/**
* Retreive a strip of 60 frames from a programme (frequency of 1 frame every second)
*
* @param diskReference An identifier for the content
* @param minute The minute of footage that the strip of frames represents
* @param key A key for the content
* @throws DownloadNotFoundException The requested file cannot be found
* @throws ClientHttpException Some over HTTP error has occured
*/
def frames (diskReference:String, minute:Int, key:Key) : BufferedImage = {
download(Url.frames(diskReference, minute, key), stream => ImageIO.read(stream))
}
/**
* Retreive a list of disk references for a date (i.e. a "schedule")
*
* @param date A Java date object
* @param session A valid Session
* @throws SessionInvalidException The session token is broken
* @throws ClientHttpException Some over HTTP error has occured
*/
def tvSchedule (date:Date, session:Session) : Seq[String] = {
val xml = html(Url.tv(date), session)
Schedule.createFromXMLResponse(xml)
}
/**
* Get an arbitary page from redux and return as NodeSeq ready for XML parsing
*
* @param url A url to download
* @param session A valid Session
* @throws SessionInvalidException The session token is broken
* @throws ClientHttpException Some over HTTP error has occured
*/
def html (url:String, session:Session) : NodeSeq = {
val response:NodeSeq = getRequest(url, "BBC_video="+session.token, method => {
htmlParser.load(method.getResponseBodyAsStream())
}, otherHttpException)
if (response \\\\ "@name" exists { _.text == "dologin" }) {
throw new SessionInvalidException
}
response
}
}
|
bbcsnippets/redux-client-scala
|
src/main/scala/uk/co/bbc/redux/Client.scala
|
Scala
|
apache-2.0
| 6,916
|
import scala
object Foo {
val x = "this should be long to compile or the test may fail."
}
|
twitter-forks/sbt
|
sbt/src/sbt-test/actions/task-cancel/src/main/scala/test.scala
|
Scala
|
bsd-3-clause
| 92
|
/*
* $Id$
*
* Copyright 2015 Valentyn Kolesnikov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* UkrainianToLatin unit test.
*
* @author Valentyn Kolesnikov
* @version $Revision$ $Date$
*/
object UkrainianToLatinTest {
def assertEquals(expected : String, actual : String) = {
if (expected != actual) {
throw new Exception("expected: (" + expected + ") actual: (" + actual + ")");
}
}
/**
* Checks string converter.
*/
def generateLat() = {
assertEquals("", UkrainianToLatin.generateLat(""));
assertEquals("abvhd", UkrainianToLatin.generateLat("абвгд"));
assertEquals("a", UkrainianToLatin.generateLat("а"));
assertEquals("B", UkrainianToLatin.generateLat("Б"));
assertEquals("abvhd kh", UkrainianToLatin.generateLat("абвгд х"));
assertEquals("abVhd KH", UkrainianToLatin.generateLat("абВгд Х"));
assertEquals("abVhKH", UkrainianToLatin.generateLat("абВгХ"));
assertEquals("abKhhKH", UkrainianToLatin.generateLat("абХгХ"));
assertEquals("abvhd kh yulia", UkrainianToLatin.generateLat("абвгд х юля"));
assertEquals("yizhak", UkrainianToLatin.generateLat("їжак"));
assertEquals("Yizhak", UkrainianToLatin.generateLat("Їжак"));
assertEquals("YI", UkrainianToLatin.generateLat("Ї"));
assertEquals("aI", UkrainianToLatin.generateLat("аЇ"));
assertEquals("SHCH", UkrainianToLatin.generateLat("Щ"));
assertEquals("aSHCH", UkrainianToLatin.generateLat("аЩ"));
assertEquals("ashchB", UkrainianToLatin.generateLat("ащБ"));
assertEquals("ashchb", UkrainianToLatin.generateLat("ащб"));
assertEquals("aSHCHB", UkrainianToLatin.generateLat("аЩБ"));
assertEquals("aShchb", UkrainianToLatin.generateLat("аЩб"));
assertEquals("shchB", UkrainianToLatin.generateLat("щБ"));
assertEquals("SHCHB", UkrainianToLatin.generateLat("ЩБ"));
assertEquals("yiZhak", UkrainianToLatin.generateLat("їЖак"));
assertEquals("aIzhak", UkrainianToLatin.generateLat("аЇжак"));
assertEquals("yizhaksiryi", UkrainianToLatin.generateLat("їжак-сірий"));
assertEquals("Rozghon", UkrainianToLatin.generateLat("Розгон"));
assertEquals("Zghorany", UkrainianToLatin.generateLat("Згорани"));
assertEquals("ZGHorany", UkrainianToLatin.generateLat("ЗГорани"));
assertEquals("aZGHorany", UkrainianToLatin.generateLat("аЗГорани"));
assertEquals("Zghorany", UkrainianToLatin.generateLat("Згорани'"));
assertEquals("Zghorany", UkrainianToLatin.generateLat("Згорани’"));
assertEquals("Zghorany\\nkh", UkrainianToLatin.generateLat("Згорани’\\nх"));
assertEquals("aZghorany\\nkh", UkrainianToLatin.generateLat("аЗгорани’\\nх"));
}
def equal(actual : String, expected : String) = {
assertEquals(expected, UkrainianToLatin.generateLat(actual))
}
/**.*/
def ukrainianToLatin() = {
//Аа Aа
equal("Алушта Андрій", "Alushta Andrii");
//Бб Bb
equal("Борщагівка Борисенко", "Borshchahivka Borysenko");
//Вв Vv
equal("Вінниця Володимир", "Vinnytsia Volodymyr");
//Гг Hh
equal("Гадяч Богдан Згурський", "Hadiach Bohdan Zghurskyi");
//Ґґ Gg
equal("Ґалаґан Ґорґани", "Galagan Gorgany");
//Дд Dd
equal("Донецьк Дмитро", "Donetsk Dmytro");
//Ее Eе
equal("Рівне Олег Есмань", "Rivne Oleh Esman");
//Єє Ye ie на початку слова в інших позиціях
equal("Єнакієве Гаєвич Короп’є", "Yenakiieve Haievych Koropie");
//Жж Zh zh
equal("Житомир Жанна Жежелів", "Zhytomyr Zhanna Zhezheliv");
//Зз Zz
equal("Закарпаття Казимирчук", "Zakarpattia Kazymyrchuk");
//Ии Yy
equal("Медвин Михайленко", "Medvyn Mykhailenko");
//Іі Ii
equal("Іванків Іващенко", "Ivankiv Ivashchenko");
//Її Yi i на початку слова в інших позиціях
equal("Їжакевич Кадиївка Мар’їне", "Yizhakevych Kadyivka Marine");
//Йй Y i на початку слова в інших позиціях
equal("Йосипівка Стрий Олексій", "Yosypivka Stryi Oleksii");
//Кк Kk
equal("Київ Коваленко", "Kyiv Kovalenko");
//Лл Ll
equal("Лебедин Леонід", "Lebedyn Leonid");
//Мм Mm
equal("Миколаїв Маринич", "Mykolaiv Marynych");
//Нн Nn
equal("Ніжин Наталія", "Nizhyn Nataliia");
//Оо Oo
equal("Одеса Онищенко", "Odesa Onyshchenko");
//Пп Pp
equal("Полтава Петро", "Poltava Petro");
//Рр Rr
equal("Решетилівка Рибчинський", "Reshetylivka Rybchynskyi");
//Сс Ss
equal("Суми Соломія", "Sumy Solomiia");
//Тт Tt
equal("Тернопіль Троць", "Ternopil Trots");
//Уу Uu
equal("Ужгород Уляна", "Uzhhorod Uliana");
//Фф Ff
equal("Фастів Філіпчук", "Fastiv Filipchuk");
//Хх Kh kh
equal("Харків Христина", "Kharkiv Khrystyna");
//Цц Ts ts
equal("Біла Церква Стеценко", "Bila Tserkva Stetsenko");
//Чч Ch ch
equal("Чернівці Шевченко", "Chernivtsi Shevchenko");
//Шш Sh sh
equal("Шостка Кишеньки", "Shostka Kyshenky");
//Щщ Shch shch
equal("Щербухи Гоща Гаращенко", "Shcherbukhy Hoshcha Harashchenko");
//Юю Yu іu на початку слова в інших позиціях
equal("Юрій Корюківка", "Yurii Koriukivka");
//Яя Ya ia на початку слова в інших позиціях
equal("Яготин Ярошенко Костянтин Знам’янка Феодосія", "Yahotyn Yaroshenko Kostiantyn Znamianka Feodosiia");
}
def main(args: Array[String]) {
generateLat()
ukrainianToLatin()
println("2 tests were run: generateLat(), ukrainianToLatin()")
}
}
|
javadev/ukrainiantolatin
|
src/test/scala/UkrainianToLatinTest.scala
|
Scala
|
apache-2.0
| 6,951
|
package pimpathon
import _root_.java.io.{InputStream, OutputStream}
import _root_.java.math.BigInteger
import _root_.java.nio.charset.Charset
import pimpathon.any._
import pimpathon.string._
object array {
implicit class ArrayPimps[A](val self: Array[A]) extends AnyVal {
def copyTo(srcPos: Int, dest: Array[A], destPos: Int, length: Int): Array[A] =
dest.tap(_ ⇒ System.arraycopy(self, srcPos, dest, destPos, length))
}
implicit class ByteArrayPimps(val self: Array[Byte]) extends AnyVal {
def toHex(length: Int): String = toHex.prefixPadTo(length, '0')
def toHex: String = new BigInteger(1, self).toString(16)
def copyUpToN(n: Long, is: InputStream, os: OutputStream): Int =
readUpToN(n, is).tapUnless(_ == -1)(os.write(self, 0, _))
def readUpToN(n: Long, is: InputStream): Int =
if (n == 0) -1 else is.read(self, 0, math.min(n, self.length).toInt)
def asString: String = new String(self, Charset.forName("UTF-8"))
def asString(charset: Charset): String = new String(self, charset)
}
}
|
raymanoz/pimpathon
|
src/main/scala/pimpathon/array.scala
|
Scala
|
apache-2.0
| 1,051
|
package com.codahale.jerkson.deser
import com.fasterxml.jackson.databind.{ DeserializationContext, JsonDeserializer }
import com.fasterxml.jackson.core.{ JsonToken, JsonParser }
import com.fasterxml.jackson.databind.JavaType
import scala.collection.immutable.LongMap
import com.fasterxml.jackson.databind.deser.ResolvableDeserializer
class LongMapDeserializer(valueType: JavaType) extends JsonDeserializer[Object] with ResolvableDeserializer {
var valueDeserializer: JsonDeserializer[Object] = _
def deserialize(jp: JsonParser, ctxt: DeserializationContext) = {
var map = LongMap.empty[Object]
if (jp.getCurrentToken == JsonToken.START_OBJECT) {
jp.nextToken()
}
if (jp.getCurrentToken != JsonToken.FIELD_NAME &&
jp.getCurrentToken != JsonToken.END_OBJECT) {
throw ctxt.mappingException(valueType.getRawClass)
}
while (jp.getCurrentToken != JsonToken.END_OBJECT) {
try {
val name = jp.getCurrentName.toLong
jp.nextToken()
map += ((name, valueDeserializer.deserialize(jp, ctxt)))
jp.nextToken()
} catch {
case e: IllegalArgumentException ⇒ throw ctxt.mappingException(classOf[LongMap[_]])
}
}
map
}
def resolve(ctxt: DeserializationContext) {
valueDeserializer = ctxt.findRootValueDeserializer(valueType)
}
override def isCachable = true
}
|
mDialog/jerkson
|
src/main/scala/com/codahale/jerkson/deser/LongMapDeserializer.scala
|
Scala
|
mit
| 1,373
|
package org.jetbrains.plugins.scala
package highlighter
package usages
import com.intellij.codeInsight.highlighting.HighlightUsagesHandlerBase
import com.intellij.util.Consumer
import com.intellij.openapi.editor.Editor
import com.intellij.psi.{PsiFile, PsiElement}
import java.util.List
import collection.JavaConversions._
import lang.psi.api.toplevel.typedef.ScTemplateDefinition
import lang.psi.api.statements.{ScPatternDefinition, ScVariableDefinition}
/**
* Highlights the expressions that will be evaluated during construction.
*/
class ScalaHighlightPrimaryConstructorExpressionsHandler(templateDef: ScTemplateDefinition, editor: Editor,
file: PsiFile, keyword: PsiElement)
extends HighlightUsagesHandlerBase[PsiElement](editor, file) {
def computeUsages(targets: List[PsiElement]) {
val iterator = targets.listIterator
while (iterator.hasNext) {
val elem = iterator.next
myReadUsages.add(elem.getTextRange)
}
}
def selectTargets(targets: List[PsiElement], selectionConsumer: Consumer[List[PsiElement]]) {
selectionConsumer.consume(targets)
}
def getTargets: List[PsiElement] ={
val eb = templateDef.extendsBlock
val varAndValDefsExprs = eb.members.flatMap {
case p: ScPatternDefinition => p.expr // we include lazy vals, perhaps they could be excluded.
case v: ScVariableDefinition => v.expr
case _ => None
}
val constructorExprs = varAndValDefsExprs ++ eb.templateBody.toList.flatMap(_.exprs) ++ Seq(keyword)
constructorExprs.toBuffer[PsiElement]
}
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/highlighter/usages/ScalaHighlightPrimaryConstructorExpressionsHandler.scala
|
Scala
|
apache-2.0
| 1,602
|
package suiryc
import suiryc.scala.misc.EnumerationWithAliases
package object scala {
// Note: class defined in package object so that we can make it 'implicit'
/** Enrich Enumeration with case-insensitive name resolver. */
implicit class RichEnumeration[A <: Enumeration](val enum: A) extends AnyVal {
private type WithAliases = A with EnumerationWithAliases
private def withAliases: Option[WithAliases] = enum match {
case v: EnumerationWithAliases => Some(v.asInstanceOf[WithAliases])
case _ => None
}
def byName(s: String): A#Value = withAliases.map(v => v.byName(s): A#Value).getOrElse {
enum.values.find(_.toString.toLowerCase == s.toLowerCase).getOrElse {
throw new NoSuchElementException(s"No value found for '$s'")
}
}
}
}
|
suiryc/suiryc-scala
|
core/src/main/scala/suiryc/scala/package.scala
|
Scala
|
gpl-3.0
| 800
|
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.sst
import slamdata.Predef._
import quasar.contrib.matryoshka.envT
import quasar.ejson.{EJson, TypeTag}
import quasar.fp.numeric.SampleStats
import quasar.tpe.{SimpleType, TypeF}
import quasar.contrib.iota.copkTraverse
import matryoshka.{Corecursive, Recursive}
import matryoshka.implicits._
import scalaz.{IList, Order}
import scalaz.std.option._
import scalaz.syntax.foldable._
import spire.algebra.Field
import spire.math.ConvertableTo
object strings {
import StructuralType.{TagST, TypeST, STF}
val StructuralString = TypeTag("_structural.string")
/** Compresses a string into a generic char[]. */
def compress[T, J, A: ConvertableTo: Order](strStat: TypeStat[A], s: String)(
implicit
A: Field[A],
C: Corecursive.Aux[T, SSTF[J, A, ?]],
JC: Corecursive.Aux[J, EJson],
JR: Recursive.Aux[J, EJson])
: SSTF[J, A, T] = {
// NB: Imported here so as not to pollute outer scope given Iterable's
// pervasiveness.
import scalaz.std.iterable._
val charStat =
s.toIterable.foldMap(c => some(TypeStat.fromEJson(A.one, EJson.char(c))))
val charArr =
charStat map { ts =>
C.embed(envT(ts, TypeST(TypeF.simple(SimpleType.Char))))
}
val arrStat =
TypeStat.coll(strStat.size, some(A.fromInt(s.length)), some(A.fromInt(s.length)))
stringTagged(strStat, C.embed(envT(arrStat, TypeST(TypeF.arr(IList[T](), charArr)))))
}
def simple[T, J, A](strStat: TypeStat[A]): SSTF[J, A, T] =
envT(strStat, TypeST[J, T](TypeF.Simple(SimpleType.Str)))
/** Widens a string into an array of its characters. */
def widen[J: Order, A: ConvertableTo: Field: Order](strStat: TypeStat[A], s: String)(
implicit
JC: Corecursive.Aux[J, EJson],
JR: Recursive.Aux[J, EJson])
: SSTF[J, A, SST[J, A]] = {
val charArr =
SST.fromEJson(strStat.size, EJson.arr(s.map(EJson.char[J](_)) : _*))
stringTagged(strStat, charArr)
}
/** Widens a string into a character array, maintaining stats on literals,
* but dropping the literals themselves.
*/
def widenStats[J: Order, A: ConvertableTo: Field: Order](strStat: TypeStat[A], s: String)(
implicit
JC: Corecursive.Aux[J, EJson],
JR: Recursive.Aux[J, EJson])
: SSTF[J, A, SST[J, A]] = {
val len =
some(ConvertableTo[A].fromInt(s.length))
val chars =
s.foldLeft(IList.empty[SST[J, A]]) { (l, c) =>
val s =
envT(
TypeStat.char(SampleStats.one(ConvertableTo[A].fromInt(c.toInt)), c, c),
TypeST(TypeF.simple[J, SST[J, A]](SimpleType.Char))).embed
s :: l
}
val charArr =
envT(
TypeStat.coll(strStat.size, len, len),
TypeST(TypeF.arr[J, SST[J, A]](chars.reverse, none))).embed
stringTagged(strStat, charArr)
}
////
private def stringTagged[T, L, V](v: V, t: T): STF[L, V, T] =
envT(v, TagST[L](Tagged(StructuralString, t)))
}
|
slamdata/slamengine
|
sst/src/main/scala/quasar/sst/strings.scala
|
Scala
|
apache-2.0
| 3,569
|
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.persistence
import com.netflix.atlas.core.model.Datapoint
import java.time.Instant
import java.time.LocalDateTime
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
import com.netflix.spectator.api.Registry
import com.typesafe.scalalogging.StrictLogging
import org.apache.avro.file.CodecFactory
/**
* Hourly writer does hourly directory rolling, and delegates actual writing to underlying
* RollingFileWriter.
*/
class HourlyRollingWriter(
val dataDir: String,
val rollingConf: RollingConfig,
val registry: Registry,
val workerId: Int
) extends StrictLogging {
private val msOfOneHour = 3600000
private val baseId = registry.createId("persistence.outOfOrderEvents")
private val lateEventsCounter = registry.counter(baseId.withTag("id", "late"))
private val futureEventsCounter = registry.counter(baseId.withTag("id", "future"))
private var currWriter: RollingFileWriter = _
private var prevWriter: RollingFileWriter = _
// Assume maxLateDuration is within 1h
require(rollingConf.maxLateDurationMs > 0 && rollingConf.maxLateDurationMs <= msOfOneHour)
def initialize(): Unit = {
currWriter = createWriter(System.currentTimeMillis())
// Create Writer for previous hour if still within limit
if (System.currentTimeMillis() <= rollingConf.maxLateDurationMs + currWriter.startTime) {
prevWriter = createWriter(currWriter.startTime - msOfOneHour)
}
}
def close(): Unit = {
if (currWriter != null) currWriter.close()
if (prevWriter != null) prevWriter.close()
}
private def rollOverWriter(): Unit = {
if (prevWriter != null) prevWriter.close()
prevWriter = currWriter
currWriter = createWriter(System.currentTimeMillis())
}
private def createWriter(ts: Long): RollingFileWriter = {
val hourStart = getHourStart(ts)
val hourEnd = hourStart + msOfOneHour
val writer = new RollingFileWriter(
getFilePathPrefixForHour(hourStart),
rollingConf,
hourStart,
hourEnd,
registry,
workerId
)
writer.initialize()
writer
}
def write(dps: List[Datapoint]): Unit = {
dps.foreach(writeDp)
}
private def writeDp(dp: Datapoint): Unit = {
val now = System.currentTimeMillis()
checkHourRollover(now)
checkPrevHourExpiration(now)
if (RollingFileWriter.RolloverCheckDatapoint eq dp) {
//check rollover for both writers
currWriter.write(dp)
if (prevWriter != null) prevWriter.write(dp)
} else {
// Range checking in order, higher possibility goes first:
// current hour -> previous hour -> late -> future
if (currWriter.shouldAccept(dp)) {
currWriter.write(dp)
} else if (prevWriter != null && prevWriter.shouldAccept(dp)) {
prevWriter.write(dp)
} else if (dp.timestamp < currWriter.startTime) {
lateEventsCounter.increment()
logger.debug(s"found late event: $dp")
} else {
futureEventsCounter.increment()
logger.debug(s"found future event: $dp")
}
}
}
private def checkHourRollover(now: Long): Unit = {
if (now >= currWriter.endTime) {
rollOverWriter()
}
}
// Note: late arrival is only checked cross hour, not rolling time
private def checkPrevHourExpiration(now: Long): Unit = {
if (prevWriter != null && (now > currWriter.startTime + rollingConf.maxLateDurationMs)) {
logger.debug(
s"stop writer for previous hour after maxLateDuration of ${rollingConf.maxLateDurationMs} ms"
)
prevWriter.close()
prevWriter = null
}
}
private def getHourStart(timestamp: Long): Long = {
timestamp / msOfOneHour * msOfOneHour
}
private def getFilePathPrefixForHour(hourStart: Long): String = {
val dateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(hourStart), ZoneOffset.UTC)
s"$dataDir/${dateTime.format(HourlyRollingWriter.HourFormatter)}"
}
}
object HourlyRollingWriter {
val HourFormatter: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH'00'")
val HourStringLen: Int = 15
}
case class RollingConfig(
maxRecords: Long,
maxDurationMs: Long,
maxLateDurationMs: Long,
codec: String,
compressionLevel: Int,
syncInterval: Int
) {
// Doing config checks here to fail early for invalid values
require(maxRecords > 0)
require(maxDurationMs > 0)
require(maxLateDurationMs > 0)
CodecFactory.fromString(codec) // just for validation
require(compressionLevel >= 1 && compressionLevel <= 9)
}
|
Netflix-Skunkworks/iep-apps
|
atlas-persistence/src/main/scala/com/netflix/atlas/persistence/HourlyRollingWriter.scala
|
Scala
|
apache-2.0
| 5,135
|
package pl.newicom.dddd.process
import pl.newicom.dddd.office.LocalOfficeId
trait SagaSupport {
implicit def officeListener[E <: Saga : LocalOfficeId : ReceptorActorFactory]: CoordinationOfficeListener[E] =
new CoordinationOfficeListener[E]
}
|
pawelkaczor/akka-ddd
|
akka-ddd-core/src/main/scala/pl/newicom/dddd/process/SagaSupport.scala
|
Scala
|
mit
| 254
|
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql
import java.util.Collections
import scala.collection.JavaConversions._
import io.snappydata.Constant
import org.apache.spark.SnappyJavaHelperUtils._
class DataFrameJavaFunctions(val df: DataFrame) {
/**
* Creates stratified sampled data from given DataFrame
* {{{
* peopleDf.stratifiedSample(Map("qcs" -> Array(1,2), "fraction" -> 0.01))
* }}}
*/
def stratifiedSample(options: java.util.Map[String, Object]): SampleDataFrame = {
snappy.snappyOperationsOnDataFrame(df).stratifiedSample(options.toMap)
}
/**
* Creates a DataFrame for given time instant that will be used when
* inserting into top-K structures.
*
* @param time the time instant of the DataFrame as millis since epoch
* @return
*/
def withTime(time: java.lang.Long): DataFrameWithTime =
snappy.snappyOperationsOnDataFrame(df).withTime(time)
/**
* Append to an existing cache table.
* Automatically uses #cacheQuery if not done already.
*/
def appendToTempTableCache(tableName: String): Unit =
snappy.snappyOperationsOnDataFrame(df).appendToTempTableCache(tableName)
/* def errorStats(columnName: String,
groupBy: java.util.Set[String] = Collections.emptySet()): MultiColumnOpenHashMap[StatCounter] =
snappy.samplingOperationsOnDataFrame(df).errorStats(columnName, groupBy.toSet)
*/
def errorEstimateAverage(columnName: String, confidence: JDouble,
groupByColumns: java.util.Set[String]): java.util.Map[Row, Tuple4[JDouble, JDouble, JDouble, JDouble]] = {
val groupedColumns =
if (groupByColumns == null)
Collections.emptySet()
else
groupByColumns
val result = snappy.samplingOperationsOnDataFrame(df)
.errorEstimateAverage(columnName, confidence, groupedColumns.toSet)
result.mapValues(toJDouble(_))
}
def withError(error: JDouble,
confidence: JDouble = Constant.DEFAULT_CONFIDENCE): DataFrame =
snappy.convertToAQPFrame(df).withError(error, confidence)
}
|
vjr/snappydata
|
core/src/main/scala/org/apache/spark/sql/DataFrameJavaFunctions.scala
|
Scala
|
apache-2.0
| 2,677
|
/*
* cramersv.scala
* author: RJ Nowling <rnowling@redhat.com>
*
* Copyright (c) 2016 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.redhat.et.silex.statistics
import scala.util.Random
import com.redhat.et.silex.utils.crossProduct
/**
* "[[https://en.wikipedia.org/wiki/Cram%C3%A9r%27s_V Cramers' V]] is a measure of
* association between two nominal variables, giving a value between 0 and +1
* (inclusive)."
*/
object CramersV {
private def countOccurrences[A](seq : Seq[A]) : Map[A, Double] = {
seq.foldLeft(Map.empty[A, Double]) {
case (counts, value) =>
counts + (value -> (counts.getOrElse(value, 0.0) + 1.0))
}
}
/**
* Calculate Cramer's V for a collection of values co-sampled from two
* variables.
*
* @param values Sequence of 2-tuples containing co-sampled values
* @return Cramer's V
*/
def apply[T, U](values : Seq[(T, U)]) : Double = {
val values1 = values.map { _._1 }
val values2 = values.map { _._2 }
val set1 = values1.toSet
val set2 = values2.toSet
if (set1.size == 1 && set2.size == 1) {
1.0
} else if (values1.size == 0 || values2.size == 0 || set1.size == 1 || set2.size == 1) {
0.0
} else {
val pairCounts = countOccurrences(values)
val counts1 = countOccurrences(values1)
val counts2 = countOccurrences(values2)
val nObs = values1.size.toDouble
val chi2 = crossProduct(set1, set2)
.foldLeft(0.0) {
case (runningSum, (value1, value2)) =>
val nij = pairCounts.getOrElse((value1, value2), 0.0)
val ni = counts1.getOrElse(value1, 0.0)
val nj = counts2.getOrElse(value2, 0.0)
val b = ni * nj / nObs
val c = (nij - b) * (nij - b) / b
runningSum + c
}
val minDim = math.min(set1.size - 1, set1.size - 1).toDouble
val v = math.sqrt(chi2 / nObs / minDim)
v
}
}
/**
* Perform a permutation test to get a p-value indicating the probability of getting
* a higher assocation value. Take the association level as the null hypothesis, reject
* if the p-value is less than your desired threshold.
*
* @param values Values co-sampled from variables 1 and 2
* @param rounds Number of permutations to generate
* @param seed (optional) Seed for the Random number generator used to generate permutations
* @return p-value giving the probability of getting a lower association value
*/
def pValueEstimate[T, U](values : Seq[(T, U)], rounds : Int, seed : Long = Random.nextLong) : Double = {
val values1 = values.map { _._1 }
val values2 = values.map { _._2 }
val testV = CramersV(values)
val rng = new Random(seed)
val worseCount = (1 to rounds).iterator.map {
i =>
val shuffled = rng.shuffle(values1)
CramersV(shuffled.zip(values2))
}
.filter {
v =>
v < testV
}
.size
val pvalue = 1.0 - worseCount.toDouble / rounds.toDouble
pvalue
}
}
|
willb/silex
|
src/main/scala/com/redhat/et/silex/statistics/cramersv.scala
|
Scala
|
apache-2.0
| 3,581
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream.table.validation
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.expressions.utils._
import org.apache.flink.table.runtime.utils.JavaUserDefinedAggFunctions.WeightedAvg
import org.apache.flink.table.utils.{ObjectTableFunction, TableFunc1, TableFunc2, TableTestBase}
import org.junit.Assert.{assertTrue, fail}
import org.junit.Test
class CorrelateValidationTest extends TableTestBase {
@Test
def testRegisterFunctionException(): Unit ={
val util = streamTestUtil()
val t = util.addTable[(Int, Long, String)]('a, 'b, 'c)
// check scala object is forbidden
expectExceptionThrown(
util.tableEnv.registerFunction("func3", ObjectTableFunction), "Scala object")
expectExceptionThrown(
util.javaTableEnv.registerFunction("func3", ObjectTableFunction), "Scala object")
expectExceptionThrown(t.joinLateral(ObjectTableFunction('a, 1)), "Scala object")
}
@Test
def testInvalidTableFunction(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
//=================== check scala object is forbidden =====================
// Scala table environment register
expectExceptionThrown(util.addFunction("udtf", ObjectTableFunction), "Scala object")
// Java table environment register
expectExceptionThrown(
util.tableEnv.registerFunction("udtf", ObjectTableFunction), "Scala object")
// Scala Table API directly call
expectExceptionThrown(t.joinLateral(ObjectTableFunction('a, 1)), "Scala object")
//============ throw exception when table function is not registered =========
// Java Table API call
expectExceptionThrown(
t.joinLateral(call("nonexist", $"a")), "Undefined function: nonexist")
// SQL API call
expectExceptionThrown(
util.tableEnv.sqlQuery("SELECT * FROM MyTable, LATERAL TABLE(nonexist(a))"),
"No match found for function signature nonexist(<NUMERIC>)")
//========= throw exception when the called function is a scalar function ====
util.tableEnv.registerFunction("func0", Func0)
// SQL API call
// NOTE: it doesn't throw an exception but an AssertionError, maybe a Calcite bug
expectExceptionThrown(
util.tableEnv.sqlQuery("SELECT * FROM MyTable, LATERAL TABLE(func0(a))"),
null,
classOf[AssertionError])
//========== throw exception when the parameters is not correct ===============
// Java Table API call
util.addFunction("func2", new TableFunc2)
expectExceptionThrown(
t.joinLateral(call("func2", $"c", $"c")),
"Given parameters of function 'func2' do not match any signature")
// SQL API call
expectExceptionThrown(
util.tableEnv.sqlQuery("SELECT * FROM MyTable, LATERAL TABLE(func2(c, c))"),
"Given parameters of function 'func2' do not match any signature.")
}
/**
* Due to the improper translation of TableFunction left outer join (see CALCITE-2004), the
* join predicate can only be empty or literal true (the restriction should be removed in
* FLINK-7865).
*/
@Test (expected = classOf[ValidationException])
def testLeftOuterJoinWithPredicates(): Unit = {
val util = streamTestUtil()
val table = util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
val function = util.addFunction("func1", new TableFunc1)
val result = table.leftOuterJoinLateral(function('c) as 's, 'c === 's)
.select('c, 's).where('a > 10)
util.verifyTable(result, "")
}
@Test(expected = classOf[ValidationException])
def testInvalidMapFunctionTypeAggregation(): Unit = {
val util = streamTestUtil()
util.addTable[(Int)](
"MyTable", 'int)
.flatMap('int.sum) // do not support AggregateFunction as input
}
@Test(expected = classOf[ValidationException])
def testInvalidMapFunctionTypeUDAGG(): Unit = {
val util = streamTestUtil()
val weightedAvg = new WeightedAvg
util.addTable[(Int)](
"MyTable", 'int)
.flatMap(weightedAvg('int, 'int)) // do not support AggregateFunction as input
}
@Test(expected = classOf[ValidationException])
def testInvalidMapFunctionTypeUDAGG2(): Unit = {
val util = streamTestUtil()
util.tableEnv.registerFunction("weightedAvg", new WeightedAvg)
util.addTable[(Int)](
"MyTable", 'int)
.flatMap(call("weightedAvg", $"int", $"int")) // do not support AggregateFunction as input
}
@Test(expected = classOf[ValidationException])
def testInvalidMapFunctionTypeScalarFunction(): Unit = {
val util = streamTestUtil()
util.addTable[(String)](
"MyTable", 'string)
.flatMap(Func15('string)) // do not support ScalarFunction as input
}
@Test(expected = classOf[ValidationException])
def testInvalidFlatMapFunctionTypeFieldReference(): Unit = {
val util = batchTestUtil()
util.addTable[(String)](
"MyTable", 'string)
.flatMap('string) // Only TableFunction can be used in flatMap
}
// ----------------------------------------------------------------------------------------------
private def expectExceptionThrown(
function: => Unit,
keywords: String,
clazz: Class[_ <: Throwable] = classOf[ValidationException])
: Unit = {
try {
function
fail(s"Expected a $clazz, but no exception is thrown.")
} catch {
case e if e.getClass == clazz =>
if (keywords != null) {
assertTrue(
s"The exception message '${e.getMessage}' doesn't contain keyword '$keywords'",
e.getMessage.contains(keywords))
}
case e: Throwable => fail(s"Expected throw ${clazz.getSimpleName}, but is $e.")
}
}
}
|
tzulitai/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/CorrelateValidationTest.scala
|
Scala
|
apache-2.0
| 6,613
|
package com.nulabinc.backlog.r2b.exporter.convert
import javax.inject.Inject
import com.nulabinc.backlog.migration.common.conf.BacklogConstantValue
import com.nulabinc.backlog.migration.common.convert.{Convert, Writes}
import com.nulabinc.backlog.migration.common.domain.{
BacklogAttachment,
BacklogAttributeInfo,
BacklogChangeLog,
BacklogTextFormattingRule
}
import com.nulabinc.backlog.migration.common.utils.{DateUtil, FileUtil, Logging, StringUtil}
import com.nulabinc.backlog.r2b.mapping.converters.{
MappingPriorityConverter,
MappingStatusConverter,
MappingUserConverter
}
import com.nulabinc.backlog.r2b.mapping.core.MappingContainer
import com.nulabinc.backlog.r2b.redmine.conf.RedmineConstantValue
import com.nulabinc.backlog.r2b.redmine.domain.PropertyValue
import com.nulabinc.backlog.r2b.utils.TextileUtil
import com.nulabinc.backlog4j.CustomField.FieldType
import com.taskadapter.redmineapi.bean.JournalDetail
/**
* @author
* uchida
*/
private[exporter] class JournalDetailWrites @Inject() (
propertyValue: PropertyValue,
customFieldValueWrites: CustomFieldValueWrites,
mappingContainer: MappingContainer,
backlogTextFormattingRule: BacklogTextFormattingRule
) extends Writes[JournalDetail, BacklogChangeLog]
with Logging {
override def writes(detail: JournalDetail): BacklogChangeLog = {
BacklogChangeLog(
field = field(detail),
optOriginalValue = Option(detail.getOldValue)
.flatMap(value => detailValue(detail, value))
.map(DateUtil.formatIfNeeded),
optNewValue = Option(detail.getNewValue)
.flatMap(value => detailValue(detail, value))
.map(DateUtil.formatIfNeeded),
optAttachmentInfo = attachmentInfo(detail),
optAttributeInfo = attributeInfo(detail),
optNotificationInfo = None
)
}
private[this] def attributeInfo(
detail: JournalDetail
): Option[BacklogAttributeInfo] = {
detail.getProperty match {
case RedmineConstantValue.CUSTOM_FIELD =>
val optCustomFieldDefinition =
propertyValue.customFieldDefinitionOfId(detail.getName)
val optTypeId = optCustomFieldDefinition match {
case Some(customFieldDefinition) =>
customFieldDefinition.fieldFormat match {
case RedmineConstantValue.FieldFormat.TEXT =>
Some(FieldType.Text.getIntValue)
case RedmineConstantValue.FieldFormat.STRING |
RedmineConstantValue.FieldFormat.LINK =>
Some(FieldType.TextArea.getIntValue)
case RedmineConstantValue.FieldFormat.INT | RedmineConstantValue.FieldFormat.FLOAT =>
Some(FieldType.Numeric.getIntValue)
case RedmineConstantValue.FieldFormat.DATE =>
Some(FieldType.Date.getIntValue)
case RedmineConstantValue.FieldFormat.BOOL =>
Some(FieldType.SingleList.getIntValue)
case RedmineConstantValue.FieldFormat.LIST if !customFieldDefinition.isMultiple =>
Some(FieldType.SingleList.getIntValue)
case RedmineConstantValue.FieldFormat.LIST if customFieldDefinition.isMultiple =>
Some(FieldType.MultipleList.getIntValue)
case RedmineConstantValue.FieldFormat.VERSION =>
Some(FieldType.MultipleList.getIntValue)
case RedmineConstantValue.FieldFormat.USER =>
Some(FieldType.MultipleList.getIntValue)
case _ => None
}
case _ =>
throw new RuntimeException(
s"custom field id not found [${detail.getName}]"
)
}
optTypeId.map(typeId => BacklogAttributeInfo(optId = None, typeId = typeId.toString))
case _ => None
}
}
private[this] def attachmentInfo(
detail: JournalDetail
): Option[BacklogAttachment] = {
detail.getProperty match {
case RedmineConstantValue.ATTACHMENT =>
val attachment = BacklogAttachment(
optId = StringUtil.safeStringToLong(detail.getName),
name = FileUtil.normalize(detail.getNewValue)
)
Some(attachment)
case _ => None
}
}
private[this] def detailValue(
detail: JournalDetail,
value: String
): Option[String] =
detail.getProperty match {
case RedmineConstantValue.ATTR => attr(detail, value)
case RedmineConstantValue.CUSTOM_FIELD =>
Convert.toBacklog((detail.getName, Option(value)))(
customFieldValueWrites
)
case RedmineConstantValue.ATTACHMENT => Option(value)
case RedmineConstantValue.RELATION => Option(value)
}
private[this] def attr(detail: JournalDetail, value: String): Option[String] =
detail.getName match {
case RedmineConstantValue.Attr.STATUS =>
propertyValue.statuses
.find(status => StringUtil.safeEquals(status.getId.intValue(), value))
.map(_.getName)
.map(statusName =>
MappingStatusConverter
.convert(mappingContainer.statuses, statusName)
)
.map(_.name.trimmed)
case RedmineConstantValue.Attr.PRIORITY =>
propertyValue.priorities
.find(priority => StringUtil.safeEquals(priority.getId.intValue(), value))
.map(_.getName)
.map(MappingPriorityConverter.convert(mappingContainer.priority, _))
case RedmineConstantValue.Attr.ASSIGNED =>
propertyValue
.optUserOfId(Some(value))
.map(_.getLogin)
.map(MappingUserConverter.convert(mappingContainer.user, _))
case RedmineConstantValue.Attr.VERSION =>
propertyValue.versions
.find(version => StringUtil.safeEquals(version.getId.intValue(), value))
.map(_.getName)
case RedmineConstantValue.Attr.TRACKER =>
propertyValue.trackers
.find(tracker => StringUtil.safeEquals(tracker.getId.intValue(), value))
.map(_.getName)
case RedmineConstantValue.Attr.CATEGORY =>
propertyValue.categories
.find(category => StringUtil.safeEquals(category.getId.intValue(), value))
.map(_.getName)
case _ => Option(TextileUtil.convert(value, backlogTextFormattingRule))
}
private[this] def field(detail: JournalDetail): String =
detail.getProperty match {
case RedmineConstantValue.CUSTOM_FIELD =>
propertyValue
.customFieldDefinitionOfId(detail.getName)
.map(_.name)
.getOrElse {
val message =
propertyValue.customFieldDefinitions
.map(c => s"${c.id}: ${c.name}")
.mkString("\\n")
throw new RuntimeException(
s"custom field id not found. Custom field name: ${detail.getName}\\nAvailable custom fields are:\\n$message"
)
}
case RedmineConstantValue.ATTACHMENT =>
BacklogConstantValue.ChangeLog.ATTACHMENT
case _ =>
field(detail.getName)
}
private def field(name: String): String =
name match {
case RedmineConstantValue.Attr.SUBJECT =>
BacklogConstantValue.ChangeLog.SUMMARY
case RedmineConstantValue.Attr.DESCRIPTION =>
BacklogConstantValue.ChangeLog.DESCRIPTION
case RedmineConstantValue.Attr.CATEGORY =>
BacklogConstantValue.ChangeLog.COMPONENT
//version
case RedmineConstantValue.Attr.VERSION =>
BacklogConstantValue.ChangeLog.MILESTONE
case RedmineConstantValue.Attr.STATUS =>
BacklogConstantValue.ChangeLog.STATUS
case RedmineConstantValue.Attr.ASSIGNED =>
BacklogConstantValue.ChangeLog.ASSIGNER
case RedmineConstantValue.Attr.TRACKER =>
BacklogConstantValue.ChangeLog.ISSUE_TYPE
case RedmineConstantValue.Attr.START_DATE =>
BacklogConstantValue.ChangeLog.START_DATE
case RedmineConstantValue.Attr.DUE_DATE =>
BacklogConstantValue.ChangeLog.LIMIT_DATE
case RedmineConstantValue.Attr.PRIORITY =>
BacklogConstantValue.ChangeLog.PRIORITY
//resolution
case RedmineConstantValue.Attr.ESTIMATED_HOURS =>
BacklogConstantValue.ChangeLog.ESTIMATED_HOURS
//actualHours
case RedmineConstantValue.Attr.PARENT =>
BacklogConstantValue.ChangeLog.PARENT_ISSUE
//notification
//attachment
//commit
case _ => name
}
}
|
nulab/BacklogMigration-Redmine
|
src/main/scala/com/nulabinc/backlog/r2b/exporter/convert/JournalDetailWrites.scala
|
Scala
|
mit
| 8,397
|
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package search
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.progress.ProgressManager
import com.intellij.openapi.util.Computable
import com.intellij.psi.PsiClass
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.psi.search.searches.DirectClassInheritorsSearch
import com.intellij.util.{Processor, QueryExecutor}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
import org.jetbrains.plugins.scala.lang.psi.stubs.util.ScalaStubsUtil
/**
* User: Alexander Podkhalyuzin
* Date: 24.10.2008
*/
class ScalaDirectClassInheritorsSearcher extends QueryExecutor[PsiClass, DirectClassInheritorsSearch.SearchParameters] {
def execute(queryParameters: DirectClassInheritorsSearch.SearchParameters, consumer: Processor[PsiClass]): Boolean = {
val clazz = queryParameters.getClassToProcess
val scope: GlobalSearchScope = queryParameters.getScope match {case x: GlobalSearchScope => x case _ => return true}
ApplicationManager.getApplication.runReadAction(new Computable[Boolean] {
def compute: Boolean = {
if (!clazz.isValid) return true
val candidates: Seq[ScTemplateDefinition] = ScalaStubsUtil.getClassInheritors(clazz, scope)
for (candidate <- candidates if candidate.showAsInheritor) {
ProgressManager.checkCanceled()
if (candidate.isInheritor(clazz, deep = false)) {
if (!consumer.process(candidate)) {
return false
}
}
}
true
}
})
}
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/impl/search/ScalaDirectClassInheritorsSearcher.scala
|
Scala
|
apache-2.0
| 1,672
|
/*
* Copyright (C) 2017 Michael Dippery <michael@monkey-robot.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mipadi.jupiter
/** I/O classes generally useful to many applications. */
package object io
|
mdippery/jupiter
|
src/main/scala/com/mipadi/jupiter/io/package.scala
|
Scala
|
apache-2.0
| 736
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.mqtt
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken
import org.eclipse.paho.client.mqttv3.MqttCallback
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream._
import org.apache.spark.streaming.receiver.Receiver
/**
* Input stream that subscribe messages from a Mqtt Broker.
* Uses eclipse paho as MqttClient http://www.eclipse.org/paho/
* @param brokerUrl Url of remote mqtt publisher
* @param topic topic name to subscribe to
* @param storageLevel RDD storage level.
*/
private[streaming]
class MQTTInputDStream(
ssc_ : StreamingContext,
brokerUrl: String,
topic: String,
storageLevel: StorageLevel
) extends ReceiverInputDStream[String](ssc_) {
private[streaming] override def name: String = s"MQTT stream [$id]"
def getReceiver(): Receiver[String] = {
new MQTTReceiver(brokerUrl, topic, storageLevel)
}
}
private[streaming]
class MQTTReceiver(
brokerUrl: String,
topic: String,
storageLevel: StorageLevel
) extends Receiver[String](storageLevel) {
def onStop() {
}
def onStart() {
// Set up persistence for messages
val persistence = new MemoryPersistence()
// Initializing Mqtt Client specifying brokerUrl, clientID and MqttClientPersistance
val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), persistence)
// Callback automatically triggers as and when new message arrives on specified topic
val callback = new MqttCallback() {
// Handles Mqtt message
override def messageArrived(topic: String, message: MqttMessage) {
store(new String(message.getPayload(), "utf-8"))
}
override def deliveryComplete(token: IMqttDeliveryToken) {
}
override def connectionLost(cause: Throwable) {
restart("Connection lost ", cause)
}
}
// Set up callback for MqttClient. This needs to happen before
// connecting or subscribing, otherwise messages may be lost
client.setCallback(callback)
// Connect to MqttBroker
client.connect()
// Subscribe to Mqtt topic
client.subscribe(topic)
}
}
|
pronix/spark
|
external/mqtt/src/main/scala/org/apache/spark/streaming/mqtt/MQTTInputDStream.scala
|
Scala
|
apache-2.0
| 3,175
|
package debop4s.mongo.datetime
import debop4s.core.conversions.jodatime._
import debop4s.core.{TimestampZoneText, ToStringHelper}
import debop4s.mongo.AbstractMongoDocument
import org.joda.time.DateTime
import org.springframework.data.mongodb.core.mapping.Document
@Document
class Timeseries extends AbstractMongoDocument {
def this(datetime: DateTime, score: Long) = {
this()
this.time = datetime.asTimestampZoneText
this.score = score
}
var time: TimestampZoneText = _
var score: Long = _
override protected def buildStringHelper: ToStringHelper =
super.buildStringHelper
.add("time", time)
.add("score", score)
}
|
debop/debop4s
|
debop4s-mongo/src/test/scala/debop4s/mongo/datetime/Timeseries.scala
|
Scala
|
apache-2.0
| 655
|
package paperdoll.arm
import resource.Resource
import paperdoll.core.effect.Arrs.compose
import paperdoll.core.effect.Effects
import scalaz.Leibniz
import paperdoll.core.layer.Member
import paperdoll.core.layer.Layers
import shapeless.Coproduct
import scalaz.Leibniz.===
import paperdoll.core.effect.Pure
import paperdoll.core.effect.Handler
import scalaz.Forall
import paperdoll.core.effect.Arr_
import paperdoll.core.effect.Arrs
import paperdoll.core.effect.Impure
import paperdoll.queue.Queue
import paperdoll.core.effect.GenericBind
import paperdoll.core.effect.Arr
import paperdoll.core.effect.GenericHandler
import shapeless.Nat
sealed trait Region[S <: Nat, R, A] {
def fold[B](resource: (A === R, R) => B): B
}
object Region {
/**
* S is an index to allow us to have multiple regions for resources
* of the same type. Use a different S for each resource in the same
* effect stack (e.g. when opening two or more files).
* (Reusing the same S for different *types* of resource R should
* work but I still don't recommend it)
* If writing a library method that returns an effect stack that
* includes regions, best practice is to take an "offset" argument
* and start numbering from there, so that client code that calls
* two or more such library methods can ensure the types don't overlap.
*/
def newSHandle[S <: Nat, R](s: S, r: => R): Effects.One[Region_[S, R], R] =
Effects.send[Region_[S, R], R](new Region[S, R, R] {
override def fold[B](resource: (R === R, R) => B) =
resource(Leibniz.refl, r)
})
private[this] def handleInRgn[S <: Nat, RE](handle: RE)(implicit re: Resource[RE]) = new GenericBind[Region_[S, RE]] {
override type O[X] = X
override def pure[A](a: A) = {
re.close(handle)
a
}
override def bind[V, RR <: Coproduct, RL <: Layers[RR], A](eff: Region[S, RE, V], cont: Arr[RR, RL, V, O[A]]) =
throw new RuntimeException("Opened the same handle twice. Did you reuse the same S type for multiple regions?")
}
def newRgn[S <: Nat, RE](implicit re: Resource[RE]): GenericHandler[Region_[S, RE]] = new GenericHandler[Region_[S, RE]] {
override type O[X] = X
override def handler[R <: Coproduct](implicit me1: Member[R, Region_[S, RE]]): Handler[R, me1.L, Region_[S, RE]] {
type RestR = me1.RestR
type RestL = me1.RestL
type O[X] = X
} = new Handler[R, me1.L, Region_[S, RE]] {
type RestR = me1.RestR
type RestL = me1.RestL
type O[X] = X
def me = me1
override def run[A](eff: Effects[R, me1.L, A]): Effects[RestR, RestL, O[A]] =
eff.fold(
a ⇒ Pure[RestR, RestL, A](a),
new Forall[({ type K[X] = (me1.L#O[X], Arrs[R, me1.L, X, A]) ⇒ Effects[RestR, RestL, O[A]] })#K] {
override def apply[X] = { (eff, cont) ⇒
val composed = compose(cont)
me.remove(eff).fold(
otherEffect ⇒ Impure[RestR, RestL, X, O[A]](otherEffect, Queue.One[Arr_[RestR, RestL]#O, X, O[A]](
composed andThen { run(_) })),
_.fold({ (le, r) ⇒
re.open(r)
handleInRgn[S, RE](r).apply[R, me1.L, A, me1.L](le.subst[Arr[R, me1.L, ?, A]](composed)(r))(me1, Leibniz.refl)
}))
}
})
}
}
}
|
m50d/paperdoll
|
arm/src/main/scala/paperdoll/arm/Region.scala
|
Scala
|
apache-2.0
| 3,414
|
package com.github.j5ik2o.forseti.domain.accessToken
object AccessTokenType extends Enumeration {
val Bearer, Mac, Unknown = Value
}
|
j5ik2o/forseti
|
domain/src/main/scala/com/github/j5ik2o/forseti/domain/accessToken/AccessTokenType.scala
|
Scala
|
mit
| 138
|
/*
* Copyright 2013-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0.
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied.
*
* See the License for the specific language governing permissions
* and limitations under the License.
*/
class Point(xc: Int, yc: Int) {
var x: Int = xc
var y: Int = yc
def move(dx: Int, dy: Int) {
x = x + dx
y = y + dy
}
override def toString(): String = "(" + x + ", " + y + ")";
}
|
JetBrains/sbt-tc-logger
|
test/testdata/jacoco/src/main/scala/Point.scala
|
Scala
|
apache-2.0
| 803
|
val x@"foo" = "foo"
/*start*/x/*end*/
//String
|
ilinum/intellij-scala
|
testdata/typeInference/bugs5/SCL4558.scala
|
Scala
|
apache-2.0
| 46
|
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.stats
import org.junit.runner.RunWith
import org.locationtech.geomesa.curve.TimePeriod
import org.locationtech.geomesa.utils.geotools.GeoToolsDateFormat
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class Z3HistogramTest extends Specification with StatTestHelper {
def createStat(length: Int, observe: Boolean): Z3Histogram = {
val s = Stat(sft, Stat.Z3Histogram("geom", "dtg", TimePeriod.Week, length))
if (observe) {
features.foreach { s.observe }
}
s.asInstanceOf[Z3Histogram]
}
def createStat(observe: Boolean = true): Z3Histogram = createStat(1024, observe)
def toDate(string: String) = java.util.Date.from(java.time.LocalDateTime.parse(string, GeoToolsDateFormat).toInstant(java.time.ZoneOffset.UTC))
def toGeom(string: String) = WKTUtils.read(string)
"HistogramZ3 stat" should {
"work with geometries and dates" >> {
"be empty initially" >> {
val stat = createStat(observe = false)
stat.isEmpty must beTrue
}
"correctly bin values" >> {
val stat = createStat()
stat.isEmpty must beFalse
forall(0 until 100) { i =>
val (w, idx) = stat.indexOf(toGeom(s"POINT(-$i ${i / 2})"), toDate(f"2012-01-01T${i%24}%02d:00:00.000Z"))
stat.count(w, idx) must beBetween(1L, 12L)
}
}
"serialize and deserialize" >> {
val stat = createStat()
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked must beAnInstanceOf[Z3Histogram]
unpacked.asInstanceOf[Z3Histogram].geomIndex mustEqual stat.geomIndex
unpacked.asInstanceOf[Z3Histogram].dtgIndex mustEqual stat.dtgIndex
unpacked.asInstanceOf[Z3Histogram].length mustEqual stat.length
unpacked.asInstanceOf[Z3Histogram].toJson mustEqual stat.toJson
}
"serialize and deserialize empty stats" >> {
val stat = createStat(observe = false)
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked must beAnInstanceOf[Z3Histogram]
unpacked.asInstanceOf[Z3Histogram].geomIndex mustEqual stat.geomIndex
unpacked.asInstanceOf[Z3Histogram].dtgIndex mustEqual stat.dtgIndex
unpacked.asInstanceOf[Z3Histogram].length mustEqual stat.length
unpacked.asInstanceOf[Z3Histogram].toJson mustEqual stat.toJson
}
"deserialize as immutable value" >> {
val stat = createStat()
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed, immutable = true)
unpacked must beAnInstanceOf[Z3Histogram]
unpacked.asInstanceOf[Z3Histogram].geomIndex mustEqual stat.geomIndex
unpacked.asInstanceOf[Z3Histogram].dtgIndex mustEqual stat.dtgIndex
unpacked.asInstanceOf[Z3Histogram].length mustEqual stat.length
unpacked.asInstanceOf[Z3Histogram].toJson mustEqual stat.toJson
unpacked.clear must throwAn[Exception]
unpacked.+=(stat) must throwAn[Exception]
unpacked.observe(features.head) must throwAn[Exception]
unpacked.unobserve(features.head) must throwAn[Exception]
}
"clear" >> {
val stat = createStat()
stat.clear()
stat.isEmpty must beTrue
forall(0 until 100) { i =>
val (w, idx) = stat.indexOf(toGeom(s"POINT(-$i ${i / 2})"), toDate(f"2012-01-01T${i%24}%02d:00:00.000Z"))
stat.count(w, idx) mustEqual 0
}
val (w, idx) = stat.indexOf(toGeom("POINT(-180 -90)"), toDate("2012-01-01T00:00:00.000Z"))
stat.count(w, idx) mustEqual 0
}
}
}
}
|
MutahirKazmi/geomesa
|
geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/stats/Z3HistogramTest.scala
|
Scala
|
apache-2.0
| 4,325
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.ref.WeakReference
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually
import org.apache.spark._
import org.apache.spark.internal.config._
import org.apache.spark.io.CompressionCodec
import org.apache.spark.memory.MemoryTestingUtils
import org.apache.spark.util.CompletionIterator
class ExternalAppendOnlyMapSuite extends SparkFunSuite
with LocalSparkContext
with Eventually
with Matchers{
import TestUtils.{assertNotSpilled, assertSpilled}
private val allCompressionCodecs = CompressionCodec.ALL_COMPRESSION_CODECS
private def createCombiner[T](i: T) = ArrayBuffer[T](i)
private def mergeValue[T](buffer: ArrayBuffer[T], i: T): ArrayBuffer[T] = buffer += i
private def mergeCombiners[T](buf1: ArrayBuffer[T], buf2: ArrayBuffer[T]): ArrayBuffer[T] =
buf1 ++= buf2
private def createExternalMap[T] = {
val context = MemoryTestingUtils.fakeTaskContext(sc.env)
new ExternalAppendOnlyMap[T, T, ArrayBuffer[T]](
createCombiner[T], mergeValue[T], mergeCombiners[T], context = context)
}
private def createSparkConf(loadDefaults: Boolean, codec: Option[String] = None): SparkConf = {
val conf = new SparkConf(loadDefaults)
// Make the Java serializer write a reset instruction (TC_RESET) after each object to test
// for a bug we had with bytes written past the last object in a batch (SPARK-2792)
conf.set("spark.serializer.objectStreamReset", "1")
conf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer")
conf.set("spark.shuffle.spill.compress", codec.isDefined.toString)
conf.set("spark.shuffle.compress", codec.isDefined.toString)
codec.foreach { c => conf.set("spark.io.compression.codec", c) }
// Ensure that we actually have multiple batches per spill file
conf.set("spark.shuffle.spill.batchSize", "10")
conf
}
test("single insert") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
map.insert(1, 10)
val it = map.iterator
assert(it.hasNext)
val kv = it.next()
assert(kv._1 === 1 && kv._2 === ArrayBuffer[Int](10))
assert(!it.hasNext)
sc.stop()
}
test("multiple insert") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
map.insert(1, 10)
map.insert(2, 20)
map.insert(3, 30)
val it = map.iterator
assert(it.hasNext)
assert(it.toSet === Set[(Int, ArrayBuffer[Int])](
(1, ArrayBuffer[Int](10)),
(2, ArrayBuffer[Int](20)),
(3, ArrayBuffer[Int](30))))
sc.stop()
}
test("insert with collision") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
map.insertAll(Seq(
(1, 10),
(2, 20),
(3, 30),
(1, 100),
(2, 200),
(1, 1000)))
val it = map.iterator
assert(it.hasNext)
val result = it.toSet[(Int, ArrayBuffer[Int])].map(kv => (kv._1, kv._2.toSet))
assert(result === Set[(Int, Set[Int])](
(1, Set[Int](10, 100, 1000)),
(2, Set[Int](20, 200)),
(3, Set[Int](30))))
sc.stop()
}
test("ordering") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map1 = createExternalMap[Int]
map1.insert(1, 10)
map1.insert(2, 20)
map1.insert(3, 30)
val map2 = createExternalMap[Int]
map2.insert(2, 20)
map2.insert(3, 30)
map2.insert(1, 10)
val map3 = createExternalMap[Int]
map3.insert(3, 30)
map3.insert(1, 10)
map3.insert(2, 20)
val it1 = map1.iterator
val it2 = map2.iterator
val it3 = map3.iterator
var kv1 = it1.next()
var kv2 = it2.next()
var kv3 = it3.next()
assert(kv1._1 === kv2._1 && kv2._1 === kv3._1)
assert(kv1._2 === kv2._2 && kv2._2 === kv3._2)
kv1 = it1.next()
kv2 = it2.next()
kv3 = it3.next()
assert(kv1._1 === kv2._1 && kv2._1 === kv3._1)
assert(kv1._2 === kv2._2 && kv2._2 === kv3._2)
kv1 = it1.next()
kv2 = it2.next()
kv3 = it3.next()
assert(kv1._1 === kv2._1 && kv2._1 === kv3._1)
assert(kv1._2 === kv2._2 && kv2._2 === kv3._2)
sc.stop()
}
test("null keys and values") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
val nullInt = null.asInstanceOf[Int]
map.insert(1, 5)
map.insert(2, 6)
map.insert(3, 7)
map.insert(4, nullInt)
map.insert(nullInt, 8)
map.insert(nullInt, nullInt)
val result = map.iterator.toSet[(Int, ArrayBuffer[Int])].map(kv => (kv._1, kv._2.sorted))
assert(result === Set[(Int, Seq[Int])](
(1, Seq[Int](5)),
(2, Seq[Int](6)),
(3, Seq[Int](7)),
(4, Seq[Int](nullInt)),
(nullInt, Seq[Int](nullInt, 8))
))
sc.stop()
}
test("simple aggregator") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
// reduceByKey
val rdd = sc.parallelize(1 to 10).map(i => (i%2, 1))
val result1 = rdd.reduceByKey(_ + _).collect()
assert(result1.toSet === Set[(Int, Int)]((0, 5), (1, 5)))
// groupByKey
val result2 = rdd.groupByKey().collect().map(x => (x._1, x._2.toList)).toSet
assert(result2.toSet === Set[(Int, Seq[Int])]
((0, List[Int](1, 1, 1, 1, 1)), (1, List[Int](1, 1, 1, 1, 1))))
sc.stop()
}
test("simple cogroup") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val rdd1 = sc.parallelize(1 to 4).map(i => (i, i))
val rdd2 = sc.parallelize(1 to 4).map(i => (i%2, i))
val result = rdd1.cogroup(rdd2).collect()
result.foreach { case (i, (seq1, seq2)) =>
i match {
case 0 => assert(seq1.toSet === Set[Int]() && seq2.toSet === Set[Int](2, 4))
case 1 => assert(seq1.toSet === Set[Int](1) && seq2.toSet === Set[Int](1, 3))
case 2 => assert(seq1.toSet === Set[Int](2) && seq2.toSet === Set[Int]())
case 3 => assert(seq1.toSet === Set[Int](3) && seq2.toSet === Set[Int]())
case 4 => assert(seq1.toSet === Set[Int](4) && seq2.toSet === Set[Int]())
}
}
sc.stop()
}
test("spilling") {
testSimpleSpilling()
}
test("spilling with compression") {
// Keep track of which compression codec we're using to report in test failure messages
var lastCompressionCodec: Option[String] = None
try {
allCompressionCodecs.foreach { c =>
lastCompressionCodec = Some(c)
testSimpleSpilling(Some(c))
}
} catch {
// Include compression codec used in test failure message
// We need to catch Throwable here because assertion failures are not covered by Exceptions
case t: Throwable =>
val compressionMessage = lastCompressionCodec
.map { c => "with compression using codec " + c }
.getOrElse("without compression")
val newException = new Exception(s"Test failed $compressionMessage:\\n\\n${t.getMessage}")
newException.setStackTrace(t.getStackTrace)
throw newException
}
}
test("spilling with compression and encryption") {
testSimpleSpilling(Some(CompressionCodec.DEFAULT_COMPRESSION_CODEC), encrypt = true)
}
/**
* Test spilling through simple aggregations and cogroups.
* If a compression codec is provided, use it. Otherwise, do not compress spills.
*/
private def testSimpleSpilling(codec: Option[String] = None, encrypt: Boolean = false): Unit = {
val size = 1000
val conf = createSparkConf(loadDefaults = true, codec) // Load defaults for Spark home
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 4).toString)
conf.set(IO_ENCRYPTION_ENABLED, encrypt)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
assertSpilled(sc, "reduceByKey") {
val result = sc.parallelize(0 until size)
.map { i => (i / 2, i) }.reduceByKey(math.max).collect()
assert(result.length === size / 2)
result.foreach { case (k, v) =>
val expected = k * 2 + 1
assert(v === expected, s"Value for $k was wrong: expected $expected, got $v")
}
}
assertSpilled(sc, "groupByKey") {
val result = sc.parallelize(0 until size).map { i => (i / 2, i) }.groupByKey().collect()
assert(result.length == size / 2)
result.foreach { case (i, seq) =>
val actual = seq.toSet
val expected = Set(i * 2, i * 2 + 1)
assert(actual === expected, s"Value for $i was wrong: expected $expected, got $actual")
}
}
assertSpilled(sc, "cogroup") {
val rdd1 = sc.parallelize(0 until size).map { i => (i / 2, i) }
val rdd2 = sc.parallelize(0 until size).map { i => (i / 2, i) }
val result = rdd1.cogroup(rdd2).collect()
assert(result.length === size / 2)
result.foreach { case (i, (seq1, seq2)) =>
val actual1 = seq1.toSet
val actual2 = seq2.toSet
val expected = Set(i * 2, i * 2 + 1)
assert(actual1 === expected, s"Value 1 for $i was wrong: expected $expected, got $actual1")
assert(actual2 === expected, s"Value 2 for $i was wrong: expected $expected, got $actual2")
}
}
sc.stop()
}
test("ExternalAppendOnlyMap shouldn't fail when forced to spill before calling its iterator") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[String]
val consumer = createExternalMap[String]
map.insertAll((1 to size).iterator.map(_.toString).map(i => (i, i)))
assert(map.spill(10000, consumer) == 0L)
}
test("spilling with hash collisions") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[String]
val collisionPairs = Seq(
("Aa", "BB"), // 2112
("to", "v1"), // 3707
("variants", "gelato"), // -1249574770
("Teheran", "Siblings"), // 231609873
("misused", "horsemints"), // 1069518484
("isohel", "epistolaries"), // -1179291542
("righto", "buzzards"), // -931102253
("hierarch", "crinolines"), // -1732884796
("inwork", "hypercatalexes"), // -1183663690
("wainages", "presentencing"), // 240183619
("trichothecenes", "locular"), // 339006536
("pomatoes", "eructation") // 568647356
)
collisionPairs.foreach { case (w1, w2) =>
// String.hashCode is documented to use a specific algorithm, but check just in case
assert(w1.hashCode === w2.hashCode)
}
map.insertAll((1 to size).iterator.map(_.toString).map(i => (i, i)))
collisionPairs.foreach { case (w1, w2) =>
map.insert(w1, w2)
map.insert(w2, w1)
}
assert(map.numSpills > 0, "map did not spill")
// A map of collision pairs in both directions
val collisionPairsMap = (collisionPairs ++ collisionPairs.map(_.swap)).toMap
// Avoid map.size or map.iterator.length because this destructively sorts the underlying map
var count = 0
val it = map.iterator
while (it.hasNext) {
val kv = it.next()
val expectedValue = ArrayBuffer[String](collisionPairsMap.getOrElse(kv._1, kv._1))
assert(kv._2.equals(expectedValue))
count += 1
}
assert(count === size + collisionPairs.size * 2)
sc.stop()
}
test("spilling with many hash collisions") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val context = MemoryTestingUtils.fakeTaskContext(sc.env)
val map =
new ExternalAppendOnlyMap[FixedHashObject, Int, Int](_ => 1, _ + _, _ + _, context = context)
// Insert 10 copies each of lots of objects whose hash codes are either 0 or 1. This causes
// problems if the map fails to group together the objects with the same code (SPARK-2043).
for (i <- 1 to 10) {
for (j <- 1 to size) {
map.insert(FixedHashObject(j, j % 2), 1)
}
}
assert(map.numSpills > 0, "map did not spill")
val it = map.iterator
var count = 0
while (it.hasNext) {
val kv = it.next()
assert(kv._2 === 10)
count += 1
}
assert(count === size)
sc.stop()
}
test("spilling with hash collisions using the Int.MaxValue key") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
(1 to size).foreach { i => map.insert(i, i) }
map.insert(Int.MaxValue, Int.MaxValue)
assert(map.numSpills > 0, "map did not spill")
val it = map.iterator
while (it.hasNext) {
// Should not throw NoSuchElementException
it.next()
}
sc.stop()
}
test("spilling with null keys and values") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
map.insertAll((1 to size).iterator.map(i => (i, i)))
map.insert(null.asInstanceOf[Int], 1)
map.insert(1, null.asInstanceOf[Int])
map.insert(null.asInstanceOf[Int], null.asInstanceOf[Int])
assert(map.numSpills > 0, "map did not spill")
val it = map.iterator
while (it.hasNext) {
// Should not throw NullPointerException
it.next()
}
sc.stop()
}
test("SPARK-22713 spill during iteration leaks internal map") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
map.insertAll((0 until size).iterator.map(i => (i / 10, i)))
assert(map.numSpills == 0, "map was not supposed to spill")
val it = map.iterator
assert(it.isInstanceOf[CompletionIterator[_, _]])
// org.apache.spark.util.collection.AppendOnlyMap.destructiveSortedIterator returns
// an instance of an annonymous Iterator class.
val underlyingMapRef = WeakReference(map.currentMap)
{
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(!tmpIsNull)
}
val first50Keys = for ( _ <- 0 until 50) yield {
val (k, vs) = it.next
val sortedVs = vs.sorted
assert(sortedVs.seq == (0 until 10).map(10 * k + _))
k
}
assert(map.numSpills == 0)
map.spill(Long.MaxValue, null)
// these asserts try to show that we're no longer holding references to the underlying map.
// it'd be nice to use something like
// https://github.com/scala/scala/blob/2.13.x/test/junit/scala/tools/testing/AssertUtil.scala
// (lines 69-89)
// assert(map.currentMap == null)
eventually(timeout(5 seconds), interval(200 milliseconds)) {
System.gc()
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(tmpIsNull)
}
val next50Keys = for ( _ <- 0 until 50) yield {
val (k, vs) = it.next
val sortedVs = vs.sorted
assert(sortedVs.seq == (0 until 10).map(10 * k + _))
k
}
assert(!it.hasNext)
val keys = (first50Keys ++ next50Keys).sorted
assert(keys == (0 until 100))
}
test("drop all references to the underlying map once the iterator is exhausted") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
map.insertAll((0 until size).iterator.map(i => (i / 10, i)))
assert(map.numSpills == 0, "map was not supposed to spill")
val underlyingMapRef = WeakReference(map.currentMap)
{
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(!tmpIsNull)
}
val it = map.iterator
assert( it.isInstanceOf[CompletionIterator[_, _]])
val keys = it.map{
case (k, vs) =>
val sortedVs = vs.sorted
assert(sortedVs.seq == (0 until 10).map(10 * k + _))
k
}
.toList
.sorted
assert(it.isEmpty)
assert(keys == (0 until 100).toList)
assert(map.numSpills == 0)
// these asserts try to show that we're no longer holding references to the underlying map.
// it'd be nice to use something like
// https://github.com/scala/scala/blob/2.13.x/test/junit/scala/tools/testing/AssertUtil.scala
// (lines 69-89)
assert(map.currentMap == null)
eventually {
Thread.sleep(500)
System.gc()
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(tmpIsNull)
}
assert(it.toList.isEmpty)
}
test("SPARK-22713 external aggregation updates peak execution memory") {
val spillThreshold = 1000
val conf = createSparkConf(loadDefaults = false)
.set("spark.shuffle.spill.numElementsForceSpillThreshold", spillThreshold.toString)
sc = new SparkContext("local", "test", conf)
// No spilling
AccumulatorSuite.verifyPeakExecutionMemorySet(sc, "external map without spilling") {
assertNotSpilled(sc, "verify peak memory") {
sc.parallelize(1 to spillThreshold / 2, 2).map { i => (i, i) }.reduceByKey(_ + _).count()
}
}
// With spilling
AccumulatorSuite.verifyPeakExecutionMemorySet(sc, "external map with spilling") {
assertSpilled(sc, "verify peak memory") {
sc.parallelize(1 to spillThreshold * 3, 2).map { i => (i, i) }.reduceByKey(_ + _).count()
}
}
}
test("force to spill for external aggregation") {
val conf = createSparkConf(loadDefaults = false)
.set("spark.shuffle.memoryFraction", "0.01")
.set("spark.memory.useLegacyMode", "true")
.set("spark.testing.memory", "100000000")
.set("spark.shuffle.sort.bypassMergeThreshold", "0")
sc = new SparkContext("local", "test", conf)
val N = 2e5.toInt
sc.parallelize(1 to N, 2)
.map { i => (i, i) }
.groupByKey()
.reduceByKey(_ ++ _)
.count()
}
}
|
guoxiaolongzte/spark
|
core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
|
Scala
|
apache-2.0
| 20,009
|
package io.github.pauljamescleary.petstore.infrastructure.endpoint
import org.http4s.QueryParamDecoder
import org.http4s.dsl.impl.OptionalQueryParamDecoderMatcher
object Pagination {
/* Necessary for decoding query parameters */
import QueryParamDecoder._
/* Parses out the optional offset and page size params */
object OptionalPageSizeMatcher extends OptionalQueryParamDecoderMatcher[Int]("pageSize")
object OptionalOffsetMatcher extends OptionalQueryParamDecoderMatcher[Int]("offset")
}
|
pauljamescleary/scala-pet-store
|
src/main/scala/io/github/pauljamescleary/petstore/infrastructure/endpoint/Pagination.scala
|
Scala
|
apache-2.0
| 503
|
package scuff
import java.lang.management.ManagementFactory
import scala.reflect.{ ClassTag, classTag }
import scala.jdk.CollectionConverters._
import java.net.InetSocketAddress
import java.net.InetAddress
import java.util.concurrent.atomic.AtomicInteger
import javax.management._
import javax.management.remote.JMXServiceURL
import javax.management.remote.jmxmp.JMXMPConnectorServer
import java.beans.Introspector
import java.beans.PropertyDescriptor
import java.lang.reflect.Method
import java.util.concurrent.atomic.AtomicLong
import java.net.BindException
import scala.util.control.NonFatal
object JMX {
final case class Registration(name: ObjectName)
extends Subscription {
require(Server isRegistered name)
def cancel(): Unit = try {
Server unregisterMBean name
} catch {
case NonFatal(cause) =>
cause.printStackTrace(System.err)
}
}
private[this] val unsafeChars = Array(' ', '*', '?', '=', ':', '"', '\\n', '\\\\', '/', ',')
private[this] val nameCounters = new Memoizer[String, AtomicInteger](_ => new AtomicInteger)
private def mxBeanInterfaceOf(mxBean: AnyRef): Option[Class[_]] = {
val MXBeanAnnotationClass = classOf[MXBean]
mxBean.getClass.getInterfaces.find { i =>
i.getName.endsWith(MXBeanSuffix) ||
i.getAnnotations.map(_.annotationType).exists {
case MXBeanAnnotationClass => true
case _ => false
}
}
}
private def getTypeName(mxBean: AnyRef, mxBeanType: Option[Class[_]]): String = {
val name = mxBeanType.map(_.getSimpleName) || mxBean.getClass.getSimpleName
if (name.length > MXBeanSuffix.length && name.endsWith(MXBeanSuffix)) {
name.substring(0, name.length - MXBeanSuffix.length)
} else name
}
private def mkObjName(mxBean: AnyRef, attrs: Map[String, String]): ObjectName = {
def isQuoted(name: String) = name.startsWith("\\"") && name.endsWith("\\"")
def needsQuotes(name: String) = !isQuoted(name) && unsafeChars.exists(name.indexOf(_) != -1)
def safeName(name: String): String = {
nameCounters(name).getAndIncrement match {
case 0 =>
if (needsQuotes(name)) {
ObjectName quote name
} else name
case n =>
if (isQuoted(name)) {
s""""${name.substring(1, name.length - 1)}[$n]""""
} else if (needsQuotes(name)) {
ObjectName quote s"$name[$n]"
} else s"$name[$n]"
}
}
val mxBeanInterface = mxBeanInterfaceOf(mxBean)
val attributes = Map("type" -> getTypeName(mxBean, mxBeanInterface)) ++ attrs.map {
case ("name", name) => "name" -> safeName(name)
case entry => entry
}
new ObjectName(mxBean.getClass.getPackage.getName, new java.util.Hashtable(attributes.asJava))
}
private[this] final def MXBeanSuffix = "MXBean"
private[this] final val Server: MBeanServer = ManagementFactory.getPlatformMBeanServer
def startJMXMP(port: Int): JMXMPConnectorServer = startJMXMP(new InetSocketAddress(InetAddress.getLocalHost, port))
def startJMXMP(address: InetSocketAddress = null): JMXMPConnectorServer = {
val (jmxmpServer, urlOpt) = address match {
case null =>
val server = new JMXMPConnectorServer(Server)
server -> Option(server.getAddress)
case addr =>
val url = new JMXServiceURL("jmxmp", addr.getAddress.getHostAddress, addr.getPort)
new JMXMPConnectorServer(url, null, Server) -> Some(url)
}
try jmxmpServer.start() catch {
case cause: BindException =>
val target = urlOpt.map(url => s"${url.getHost}:${url.getPort}") || "default MBean server"
val be = new BindException(s"Cannot bind to $target")
be.setStackTrace(be.getStackTrace.take(1))
be initCause cause
throw be
}
jmxmpServer
}
def register(mxBean: AnyRef, instanceName: Option[String]): Registration =
register(mxBean, instanceName, Map.empty[String, String])
def register(mxBean: AnyRef, instanceName: Option[String], attributes: Map[String, String]): Registration = {
val attrs = instanceName.foldLeft(attributes) {
case (attrs, name) => attrs.updated("name", name)
}
register(mxBean, attrs)
}
def register(mxBean: AnyRef, instanceName: String = null, attributes: Map[String, String] = Map.empty): Registration =
register(mxBean, instanceName.optional, attributes)
def register(mxBean: AnyRef, attributes: Map[String, String]): Registration = {
val objName = mkObjName(mxBean, attributes)
register(mxBean, objName)
}
def register(mxBean: AnyRef, objectName: ObjectName): Registration = {
Server.registerMBean(mxBean, objectName)
Registration(objectName)
}
/**
* Implement this trait if your bean have properties
* that are unknown or variable at compile time.
*/
trait DynamicMBean extends javax.management.DynamicMBean {
/**
* The dynamic properties.
* NOTE: The keys must remain constant. Adding or removing keys
* after startup will not be reflected, only changing values.
*/
protected def dynamicProps: collection.Map[String, Any]
protected def typeName = JMX.getTypeName(this, mxBeanType)
private[this] val mxBeanType: Option[Class[_]] = mxBeanInterfaceOf(this)
private[this] val (ops, props) = {
mxBeanType match {
case None =>
Array.empty[(MBeanOperationInfo, Method)] -> Map.empty[String, PropertyDescriptor]
case Some(interface) =>
val props = Introspector.getBeanInfo(interface)
.getPropertyDescriptors.map { prop =>
prop.getName -> prop
}.toMap
val propMethods =
props.values.map(_.getWriteMethod).filter(_ != null).toSet ++
props.values.map(_.getReadMethod).filter(_ != null).toSet
val ops = interface.getMethods.filterNot(propMethods).map { method =>
new MBeanOperationInfo(s"Operation ${method.getName}", method) -> method
}
ops -> props
}
}
def getAttribute(name: String) = getValue(name, dynamicProps)
private def getValue(name: String, snapshot: => collection.Map[String, Any]): AnyRef = {
props.get(name) match {
case Some(prop) => prop.getReadMethod.invoke(this)
case _ => snapshot(name).asInstanceOf[AnyRef]
}
}
def setAttribute(attr: Attribute) = props(attr.getName).getWriteMethod.invoke(this, attr.getValue)
def getAttributes(names: Array[String]) = {
lazy val snapshot = dynamicProps
val list = new AttributeList
names.foreach { name =>
val value = getValue(name, snapshot)
list.add(new Attribute(name, value))
}
list
}
def setAttributes(list: AttributeList) = {
list.asList.asScala.foreach(setAttribute)
list
}
def invoke(name: String, values: Array[Object], types: Array[String]): Object = {
val found = ops.find {
case (_, method) =>
method.getName == name && {
val sig = method.getParameterTypes
sig.length == values.length &&
sig.zip(values).forall {
case (argType, arg) => argType.isInstance(arg)
}
}
}
found.map(_._2.invoke(this, values: _*)).orNull
}
val getMBeanInfo = {
val mapInfo = dynamicProps.map {
case (name, value) =>
name -> new MBeanAttributeInfo(name, value.getClass.getName, "", true, false, false)
}.toMap
val propInfo = props.values.map { prop =>
val name = prop.getName
name -> new MBeanAttributeInfo(name, s"$name description", prop.getReadMethod, prop.getWriteMethod)
}.toMap
val attrInfo = (mapInfo ++ propInfo).values.toArray
val opsInfo = this.ops.map(_._1)
new MBeanInfo(
typeName, "",
attrInfo,
Array.empty[MBeanConstructorInfo],
opsInfo,
Array.empty[MBeanNotificationInfo])
}
}
/**
* MBean implementations can extend this
* for easy sending of notifications.
*/
abstract class NotificationMBean[N: ClassTag] extends NotificationBroadcasterSupport {
private[this] val seqNums = new AtomicLong
protected val notificationType: String = classTag[N].runtimeClass.getName
protected def stringify(notif: N): String
def sendNotification(notif: N, timestamp: Long = System.currentTimeMillis): Unit = {
this sendNotification new Notification(
notificationType,
this,
seqNums.getAndIncrement,
timestamp,
stringify(notif))
}
}
}
|
nilskp/scuff
|
src/main/scala/scuff/JMX.scala
|
Scala
|
mit
| 8,642
|
/*
Copyright 2016 Tunalytics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
"http://www.apache.org/licenses/LICENSE-2.0".
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.tunalytics.loader.transformer.topology.signals
import java.util.Map
import backtype.storm.spout.SpoutOutputCollector
import backtype.storm.task.TopologyContext
import backtype.storm.topology.OutputFieldsDeclarer
import backtype.storm.topology.base.BaseRichSpout
import backtype.storm.tuple.Fields
import backtype.storm.tuple.Values
import com.typesafe.scalalogging.LazyLogging
/**
* Signal emitting spout.
*
* For testing purposes! To be removed in production-ready version.
*/
class SignalSpout extends BaseRichSpout with LazyLogging {
// TODO: remove this class in production-ready version
logger.trace("instance created")
private var confgiguration: Map[_, _] = _
private var context: TopologyContext = _
private var collector: SpoutOutputCollector = _
private var index: Integer = 0
def nextTuple() {
logger.trace("emitting new tuple...")
Thread.sleep(100)
val signal = nextSignal()
logger.debug("emitting: " + signal + "...")
collector.emit(new Values(signal))
logger.trace("tuple emitted")
}
def declareOutputFields(declarer: OutputFieldsDeclarer) {
logger.trace("declaring fields...")
declarer.declare(new Fields("signal"))
logger.trace("fields declared")
}
def open(configuration: Map[_, _], context: TopologyContext,
collector: SpoutOutputCollector) {
logger.trace("preparing...")
this.confgiguration = configuration
this.context = context
this.collector = collector
logger.trace("prepared...")
}
private def nextSignal(): Signal = {
index += 1
new Signal(new Message("serial #" + index))
}
}
|
tunalytics/loader
|
transformer/src/main/scala/org/tunalytics/loader/transformer/topology/signals/SignalSpout.scala
|
Scala
|
apache-2.0
| 2,241
|
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v3
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtOptionalInteger, Input}
import uk.gov.hmrc.ct.ct600e.v3.calculations.ExpenditureCalculator
import uk.gov.hmrc.ct.ct600e.v3.retriever.CT600EBoxRetriever
case class E125(value: Option[Int]) extends CtBoxIdentifier("Expenditure: Total of boxes E95 to E120") with CtOptionalInteger
object E125 extends Calculated[E125, CT600EBoxRetriever] with ExpenditureCalculator {
override def calculate(boxRetriever: CT600EBoxRetriever): E125 = {
calculateTotalExpenditure(
e95 = boxRetriever.retrieveE95(),
e100 = boxRetriever.retrieveE100(),
e105 = boxRetriever.retrieveE105(),
e110 = boxRetriever.retrieveE110(),
e115 = boxRetriever.retrieveE115(),
e120 = boxRetriever.retrieveE120()
)
}
}
|
ahudspith-equalexperts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600e/v3/E125.scala
|
Scala
|
apache-2.0
| 1,423
|
import com.google.inject.AbstractModule
import service._
class Module extends AbstractModule {
override def configure() = {
bind(classOf[WsConsumer]).to(classOf[WsConsumerImpl]).asEagerSingleton()
bind(classOf[TeaserService]).to(classOf[TeaserServiceImpl]).asEagerSingleton()
bind(classOf[CarouselService]).to(classOf[CarouselServiceImpl]).asEagerSingleton()
bind(classOf[TextblockService]).to(classOf[TextblockServiceImpl]).asEagerSingleton()
}
}
|
splink/pagelets-seed
|
app/Module.scala
|
Scala
|
apache-2.0
| 469
|
import scala.collection.immutable.HashSet
object Test extends App {
var hashCount = 0
/**
* A key that produces lots of hash collisions, to exercise the part of the code that deals with those
*/
case class Collision(value: Int) {
override def hashCode = {
// we do not check hash counts for Collision keys because ListSet.++ uses a mutable hashset internally,
// so when we have hash collisions, union will call key.hashCode.
// hashCount += 1
value / 5
}
}
/**
* A key that is identical to int other than that it counts hashCode invocations
*/
case class HashCounter(value: Int) {
override def hashCode = {
hashCount += 1
value
}
}
def testUnion[T](sizes: Seq[Int], offsets: Seq[Double], keyType: String, mkKey: Int => T) {
for {
i <- sizes
o <- offsets
} {
val e = HashSet.empty[T]
val j = (i * o).toInt
// create two sets of size i with overlap o
val a = e ++ (0 until i).map(mkKey)
require(a.size == i, s"Building HashSet of size $i failed. Key type $keyType.")
val b = e ++ (j until (i + j)).map(mkKey)
require(b.size == i, s"Building HashSet of size $i failed. Key type $keyType.")
val as = e ++ (0 until j).map(mkKey)
require(as.size == j, s"Building HashSet of size $j failed. Key type $keyType.")
val hashCount0 = hashCount
val u = a union b
require(hashCount == hashCount0, s"key.hashCode should not be called, but has been called ${hashCount - hashCount0} times. Key type $keyType.")
require(u == (a union scala.collection.mutable.HashSet(b.toSeq: _*)), s"Operation must still work for other sets!")
require(u.size == i + j, s"Expected size ${i+j}. Real size ${u.size}. Key type $keyType.")
for (x <- 0 until i + j)
require(u.contains(mkKey(x)), s"Key type $keyType. Set (0 until ${i + j}) should contain $x but does not.")
val a_as = a union as
val as_a = as union a
require((a_as eq a) || (a_as eq as), s"No structural sharing in a union as. Key type $keyType, a=(0 until $i) as=(0 until $j)")
require((as_a eq a) || (as_a eq as), s"No structural sharing in as union a. Key type $keyType, a=(0 until $i) as=(0 until $j)")
}
}
val sizes = Seq(1, 10, 100, 1000, 10000, 100000)
val offsets = Seq(0.0, 0.25, 0.5, 0.75, 1.0)
testUnion(sizes, offsets, "int", identity[Int])
testUnion(sizes, offsets, "hashcounter", HashCounter.apply)
testUnion(sizes, offsets, "collision", Collision.apply)
}
|
felixmulder/scala
|
test/files/run/t6253a.scala
|
Scala
|
bsd-3-clause
| 2,545
|
package tshrdlu.util.index
/**
* Copyright 2013 Nick Wilson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File
import org.apache.lucene.analysis.Analyzer
import org.apache.lucene.index.DirectoryReader
import org.apache.lucene.queryparser.classic.QueryParser
import org.apache.lucene.search.{IndexSearcher,TopScoreDocCollector}
import org.apache.lucene.store.{Directory,RAMDirectory}
import org.apache.lucene.util.Version
/**
* Provides read access to a Lucene index of objects.
*
* @constructor create a new object reader
* @tparam T the type of object in the index (must be serializable)
* @param index the Lucene index to read from
* @param queryParser parses a string containing a search query
* @param objectToDocument converts Lucene <code>Document</code>s to objects
* of type <code>T</code>
*/
class ObjectReader[T](
index: Directory,
queryParser: QueryParser,
objectToDocument: ObjectToDocument[T]) {
protected var indexReader = DirectoryReader.open(index)
protected var searcher = new IndexSearcher(indexReader)
/**
* Searches for objects in the index matching a query. The matching
* objects are returned in an iterator along with their score used
* for ranking the results.
*
* Prior to doing the search, the indexed is reopened if necessary to get
* access to documents that may have recently been added to the index.
*
* @param query the search query
* @param maxHits the maximum number of results to return
* @return the results in descending order of score
* @see <a href="http://lucene.apache.org/core/4_2_0/queryparser/org/apache/lucene/queryparser/classic/package-summary.html">Lucene query parser format</a>
*/
def search(query: String, maxHits: Int = 10): Iterator[(T, Float)] = {
reopenIfChanged()
// Do the search
val queryObj = queryParser.parse(query)
val collector = TopScoreDocCollector.create(maxHits, true)
searcher.search(queryObj, collector)
val hits = collector.topDocs().scoreDocs
// Convert the Lucene documents into objects of the appropriate type
// and return them with their scores
hits.map { hit =>
val document = searcher.doc(hit.doc)
val objectToDocument(theObject) = document
(theObject, hit.score)
}.toIterator
}
/**
* Reopens the index if it has changed.
*/
protected def reopenIfChanged() {
val newReader = DirectoryReader.openIfChanged(indexReader)
if (newReader != null) {
indexReader = newReader
searcher = new IndexSearcher(indexReader)
}
}
}
/**
* Constructs [[ObjectReader]]s to read objects from a Lucene index.
*
* @tparam T the type of object in the index (must be
* serializable)
* @param objectToDocument converts Lucene <code>Document</code>s to objects
* of type <code>T</code>
* @param defaultSearchField the default field to search when doing a query.
* For example, a query "title:blah" will search the
* "title" field while a query "blah" will search
* the field specified by this parameter.
* @param analyzerCreator creates the Lucene <code>Analyzer</code> for
* extracting terms from text. This should create an
* analyzer with the same behavior as the one used
* to write to the index.
* @param luceneVersion the version of the Lucene index
*/
class ObjectReaderFactory[T](
objectToDocument: ObjectToDocument[T],
defaultSearchField: String = "text",
analyzerCreator: (Version => Analyzer) = EnglishAnalyzerCreator,
luceneVersion: Version = Settings.LuceneVersion)
extends ReaderOrWriterFactory[ObjectReader[T]] {
def apply(index: Directory): ObjectReader[T] = {
val analyzer = analyzerCreator(luceneVersion)
val queryParser = new QueryParser(luceneVersion, defaultSearchField, analyzer)
new ObjectReader[T](index, queryParser, objectToDocument)
}
}
/**
* Provides a main method to perform searches on indexes.
*
* @param readerFactory the factory used to create an [[ObjectReader]]
*/
abstract class ObjectIndexSearcher[T](readerFactory: ObjectReaderFactory[T]) {
/**
* Searches an index and prints the top hits.
*
* @param args the first argument is a name of an index (see
* [[tshrdlu.util.index.Settings.BaseIndexPath]] for a
* decription of where the index is located on disk) or a full
* path to an index. The second argument is the search query.
*/
def main(args: Array[String]) {
val Array(indexNameOrPath, query) = args
println("Searching index '" + indexNameOrPath + "': " + query + "\n")
// Treat the first argument as a full path to an index if the directory
// exists, otherwise treat it as an index name
val path = new File(indexNameOrPath)
val reader = if (path.exists) {
readerFactory(path)
} else {
readerFactory(indexNameOrPath)
}
// Do the search and print the top hits
reader.search(query).foreach {
case (theObject, score) => {
println(resultToString(theObject, score))
}
}
}
/**
* Returns a string representing a single hit.
*
* @param theObject the object retrieved from the index
* @param score the score used to rank the hit
*/
def resultToString(theObject: T, score: Float): String
}
|
utcompling/tshrdlu
|
src/main/scala/tshrdlu/util/index/Reader.scala
|
Scala
|
apache-2.0
| 6,107
|
package exceptions
case class FileNotFoundException(message: String) extends Throwable
|
arpanchaudhury/SFD
|
src/main/scala/exceptions/FileNotFoundException.scala
|
Scala
|
mit
| 88
|
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package play.routes.compiler
import java.io.File
import org.apache.commons.io.FileUtils
import scala.util.parsing.combinator._
import scala.util.parsing.input._
import scala.language.postfixOps
object RoutesFileParser {
/**
* Parse the given routes file
*
* @param routesFile The routes file to parse
* @return Either the list of compilation errors encountered, or a list of routing rules
*/
def parse(routesFile: File): Either[Seq[RoutesCompilationError], List[Rule]] = {
val routesContent = FileUtils.readFileToString(routesFile)
parseContent(routesContent, routesFile)
}
/**
* Parse the given routes file content
*
* @param routesContent The content of the routes file
* @param routesFile The routes file (used for error reporting)
* @return Either the list of compilation errors encountered, or a list of routing rules
*/
def parseContent(routesContent: String, routesFile: File): Either[Seq[RoutesCompilationError], List[Rule]] = {
val parser = new RoutesFileParser()
parser.parse(routesContent) match {
case parser.Success(parsed: List[Rule], _) =>
validate(routesFile, parsed.collect { case r: Route => r }) match {
case Nil => Right(parsed)
case errors => Left(errors)
}
case parser.NoSuccess(message, in) =>
Left(Seq(RoutesCompilationError(routesFile, message, Some(in.pos.line), Some(in.pos.column))))
}
}
/**
* Validate the routes file
*/
private def validate(file: java.io.File, routes: List[Route]): Seq[RoutesCompilationError] = {
import scala.collection.mutable._
val errors = ListBuffer.empty[RoutesCompilationError]
routes.foreach { route =>
if (route.call.packageName.isEmpty) {
errors += RoutesCompilationError(
file,
"Missing package name",
Some(route.call.pos.line),
Some(route.call.pos.column))
}
if (route.call.controller.isEmpty) {
errors += RoutesCompilationError(
file,
"Missing Controller",
Some(route.call.pos.line),
Some(route.call.pos.column))
}
route.path.parts.collect {
case part @ DynamicPart(name, regex, _) => {
route.call.parameters.getOrElse(Nil).find(_.name == name).map { p =>
if (p.fixed.isDefined || p.default.isDefined) {
errors += RoutesCompilationError(
file,
"It is not allowed to specify a fixed or default value for parameter: '" + name + "' extracted from the path",
Some(p.pos.line),
Some(p.pos.column))
}
try {
java.util.regex.Pattern.compile(regex)
} catch {
case e: Exception => {
errors += RoutesCompilationError(
file,
e.getMessage,
Some(part.pos.line),
Some(part.pos.column))
}
}
}.getOrElse {
errors += RoutesCompilationError(
file,
"Missing parameter in call definition: " + name,
Some(part.pos.line),
Some(part.pos.column))
}
}
}
}
// make sure there are no routes using overloaded handler methods, or handler methods with default parameters without declaring them all
val sameHandlerMethodGroup = routes.groupBy { r =>
r.call.packageName + r.call.controller + r.call.method
}
val sameHandlerMethodParameterCountGroup = sameHandlerMethodGroup.groupBy { g =>
(g._1, g._2.groupBy(route => route.call.parameters.map(p => p.length).getOrElse(0)))
}
sameHandlerMethodParameterCountGroup.find(g => g._1._2.size > 1).foreach { overloadedRouteGroup =>
val firstOverloadedRoute = overloadedRouteGroup._2.values.head.head
errors += RoutesCompilationError(
file,
"Using different overloaded methods is not allowed. If you are using a single method in combination with default parameters, make sure you declare them all explicitly.",
Some(firstOverloadedRoute.call.pos.line),
Some(firstOverloadedRoute.call.pos.column)
)
}
errors.toList
}
}
/**
* The routes file parser
*/
private[routes] class RoutesFileParser extends JavaTokenParsers {
override def skipWhitespace = false
override val whiteSpace = """[ \t]+""".r
def EOF: util.matching.Regex = "\\z".r
def namedError[A](p: Parser[A], msg: String): Parser[A] = Parser[A] { i =>
p(i) match {
case Failure(_, in) => Failure(msg, in)
case o => o
}
}
def several[T](p: => Parser[T]): Parser[List[T]] = Parser { in =>
import scala.collection.mutable.ListBuffer
val elems = new ListBuffer[T]
def continue(in: Input): ParseResult[List[T]] = {
val p0 = p // avoid repeatedly re-evaluating by-name parser
@scala.annotation.tailrec
def applyp(in0: Input): ParseResult[List[T]] = p0(in0) match {
case Success(x, rest) =>
elems += x; applyp(rest)
case Failure(_, _) => Success(elems.toList, in0)
case err: Error => err
}
applyp(in)
}
continue(in)
}
def separator: Parser[String] = namedError(whiteSpace, "Whitespace expected")
def ignoreWhiteSpace: Parser[Option[String]] = opt(whiteSpace)
// This won't be needed when we upgrade to Scala 2.11, we will then be able to use JavaTokenParser.ident:
// https://github.com/scala/scala/pull/1466
def javaIdent: Parser[String] = """\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}*""".r
def identifier: Parser[String] = namedError(javaIdent, "Identifier expected")
def end: util.matching.Regex = """\s*""".r
def comment: Parser[Comment] = "#" ~> ".*".r ^^ {
case c => Comment(c)
}
def newLine: Parser[String] = namedError((("\r"?) ~> "\n"), "End of line expected")
def blankLine: Parser[Unit] = ignoreWhiteSpace ~> newLine ^^ { case _ => () }
def parentheses: Parser[String] = {
"(" ~ (several((parentheses | not(")") ~> """.""".r))) ~ commit(")") ^^ {
case p1 ~ charList ~ p2 => p1 + charList.mkString + p2
}
}
def brackets: Parser[String] = {
"[" ~ (several((parentheses | not("]") ~> """.""".r))) ~ commit("]") ^^ {
case p1 ~ charList ~ p2 => p1 + charList.mkString + p2
}
}
def string: Parser[String] = {
"\"" ~ (several((parentheses | not("\"") ~> """.""".r))) ~ commit("\"") ^^ {
case p1 ~ charList ~ p2 => p1 + charList.mkString + p2
}
}
def multiString: Parser[String] = {
"\"\"\"" ~ (several((parentheses | not("\"\"\"") ~> """.""".r))) ~ commit("\"\"\"") ^^ {
case p1 ~ charList ~ p2 => p1 + charList.mkString + p2
}
}
def httpVerb: Parser[HttpVerb] = namedError("GET" | "POST" | "PUT" | "PATCH" | "HEAD" | "DELETE" | "OPTIONS", "HTTP Verb expected") ^^ {
case v => HttpVerb(v)
}
def singleComponentPathPart: Parser[DynamicPart] = (":" ~> identifier) ^^ {
case name => DynamicPart(name, """[^/]+""", encode = true)
}
def multipleComponentsPathPart: Parser[DynamicPart] = ("*" ~> identifier) ^^ {
case name => DynamicPart(name, """.+""", encode = false)
}
def regexComponentPathPart: Parser[DynamicPart] = "$" ~> identifier ~ ("<" ~> (not(">") ~> """[^\s]""".r +) <~ ">" ^^ { case c => c.mkString }) ^^ {
case name ~ regex => DynamicPart(name, regex, encode = false)
}
def staticPathPart: Parser[StaticPart] = (not(":") ~> not("*") ~> not("$") ~> """[^\s]""".r +) ^^ {
case chars => StaticPart(chars.mkString)
}
def path: Parser[PathPattern] = "/" ~ ((positioned(singleComponentPathPart) | positioned(multipleComponentsPathPart) | positioned(regexComponentPathPart) | staticPathPart) *) ^^ {
case _ ~ parts => PathPattern(parts)
}
def space(s: String): Parser[String] = (ignoreWhiteSpace ~> s <~ ignoreWhiteSpace)
def parameterType: Parser[String] = ":" ~> ignoreWhiteSpace ~> simpleType
def simpleType: Parser[String] = {
((stableId <~ ignoreWhiteSpace) ~ opt(typeArgs)) ^^ {
case sid ~ ta => sid.toString + ta.getOrElse("")
} |
(space("(") ~ types ~ space(")")) ^^ {
case _ ~ b ~ _ => "(" + b + ")"
}
}
def typeArgs: Parser[String] = {
(space("[") ~ types ~ space("]") ~ opt(typeArgs)) ^^ {
case _ ~ ts ~ _ ~ ta => "[" + ts + "]" + ta.getOrElse("")
} |
(space("#") ~ identifier ~ opt(typeArgs)) ^^ {
case _ ~ id ~ ta => "#" + id + ta.getOrElse("")
}
}
def types: Parser[String] = rep1sep(simpleType, space(",")) ^^ (_ mkString ",")
def stableId: Parser[String] = rep1sep(identifier, space(".")) ^^ (_ mkString ".")
def expression: Parser[String] = (multiString | string | parentheses | brackets | """[^),?=\n]""".r +) ^^ {
case p => p.mkString
}
def parameterFixedValue: Parser[String] = "=" ~ ignoreWhiteSpace ~ expression ^^ {
case a ~ _ ~ b => a + b
}
def parameterDefaultValue: Parser[String] = "?=" ~ ignoreWhiteSpace ~ expression ^^ {
case a ~ _ ~ b => a + b
}
def parameter: Parser[Parameter] = (identifier <~ ignoreWhiteSpace) ~ opt(parameterType) ~ (ignoreWhiteSpace ~> opt(parameterDefaultValue | parameterFixedValue)) ^^ {
case name ~ t ~ d => Parameter(name, t.getOrElse("String"), d.filter(_.startsWith("=")).map(_.drop(1)), d.filter(_.startsWith("?")).map(_.drop(2)))
}
def parameters: Parser[List[Parameter]] = "(" ~> repsep(ignoreWhiteSpace ~> positioned(parameter) <~ ignoreWhiteSpace, ",") <~ ")"
// Absolute method consists of a series of Java identifiers representing the package name, controller and method.
// Since the Scala parser is greedy, we can't easily extract this out, so just parse at least 3
def absoluteMethod: Parser[List[String]] = namedError(javaIdent ~ "." ~ javaIdent ~ "." ~ rep1sep(javaIdent, ".") ^^ {
case first ~ _ ~ second ~ _ ~ rest => first :: second :: rest
}, "Controller method call expected")
def call: Parser[HandlerCall] = opt("@") ~ absoluteMethod ~ opt(parameters) ^^ {
case instantiate ~ absMethod ~ parameters =>
{
val (packageParts, classAndMethod) = absMethod.splitAt(absMethod.size - 2)
val packageName = packageParts.mkString(".")
val className = classAndMethod(0)
val methodName = classAndMethod(1)
val dynamic = !instantiate.isEmpty
HandlerCall(packageName, className, dynamic, methodName, parameters)
}
}
def router: Parser[String] = rep1sep(identifier, ".") ^^ {
case parts => parts.mkString(".")
}
def route = httpVerb ~! separator ~ path ~ separator ~ positioned(call) ~ ignoreWhiteSpace ^^ {
case v ~ _ ~ p ~ _ ~ c ~ _ => Route(v, p, c)
}
def include = "->" ~! separator ~ path ~ separator ~ router ~ ignoreWhiteSpace ^^ {
case _ ~ _ ~ p ~ _ ~ r ~ _ => Include(p.toString, r)
}
def sentence: Parser[Product with Serializable] = namedError((comment | positioned(include) | positioned(route)), "HTTP Verb (GET, POST, ...), include (->) or comment (#) expected") <~ (newLine | EOF)
def parser: Parser[List[Rule]] = phrase((blankLine | sentence *) <~ end) ^^ {
case routes =>
routes.reverse.foldLeft(List[(Option[Rule], List[Comment])]()) {
case (s, r @ Route(_, _, _, _)) => (Some(r), List()) :: s
case (s, i @ Include(_, _)) => (Some(i), List()) :: s
case (s, c @ ()) => (None, List()) :: s
case ((r, comments) :: others, c @ Comment(_)) => (r, c :: comments) :: others
case (s, _) => s
}.collect {
case (Some(r @ Route(_, _, _, _)), comments) => r.copy(comments = comments).setPos(r.pos)
case (Some(i @ Include(_, _)), _) => i
}
}
def parse(text: String): ParseResult[List[Rule]] = {
parser(new CharSequenceReader(text))
}
}
|
jyotikamboj/container
|
pf-framework/src/routes-compiler/src/main/scala/play/routes/compiler/RoutesFileParser.scala
|
Scala
|
mit
| 11,914
|
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.structure
import io.gatling.core.action.builder.{ ActionBuilder, SessionHookBuilder }
import io.gatling.core.session.{ Expression, Session }
trait Execs[B] {
private[core] def actionBuilders: List[ActionBuilder]
private[core] def newInstance(actionBuilders: List[ActionBuilder]): B
def exec(sessionFunction: Expression[Session]): B = exec(new SessionHookBuilder(sessionFunction, exitable = true))
def exec(actionBuilder: ActionBuilder): B = chain(List(actionBuilder))
def exec(chains: ChainBuilder*): B = exec(chains.toIterable)
def exec(chains: Iterator[ChainBuilder]): B = exec(chains.toIterable)
def exec(chains: Iterable[ChainBuilder]): B = chain(chains.toList.reverse.flatMap(_.actionBuilders))
def exec(scenario: ScenarioBuilder): B = chain(scenario.actionBuilders)
private[core] def chain(newActionBuilders: Seq[ActionBuilder]): B = newInstance(newActionBuilders.toList ::: actionBuilders)
}
|
wiacekm/gatling
|
gatling-core/src/main/scala/io/gatling/core/structure/Execs.scala
|
Scala
|
apache-2.0
| 1,564
|
package edu.cmu.cs.oak.lib.array
import java.nio.file.Path
import com.caucho.quercus.expr.Expr
import edu.cmu.cs.oak.lib.InterpreterPluginProvider
import edu.cmu.cs.oak.core.OakInterpreter
import edu.cmu.cs.oak.env.Environment
import edu.cmu.cs.oak.lib.InterpreterPlugin
import edu.cmu.cs.oak.value.ArrayValue
import edu.cmu.cs.oak.value.BooleanValue
import edu.cmu.cs.oak.value.OakValue
import com.caucho.quercus.Location
class IsArray extends InterpreterPlugin {
override def getName(): String = "is_array"
override def visit(provider: InterpreterPluginProvider, args: List[OakValue], loc: Location, env: Environment): OakValue = {
/* Assert that the function has only
* been called with exactly one argument. */
assert(args.size == 1)
val v = args(0)
return BooleanValue(v match {
case a: ArrayValue => true
case _ => false
} )
}
}
|
smba/oak
|
edu.cmu.cs.oak/src/main/scala/edu/cmu/cs/oak/lib/array/IsArray.scala
|
Scala
|
lgpl-3.0
| 910
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dbis.pig.plan.rewriting.internals
import dbis.pig.tools.logging.PigletLogging
import dbis.pig.op._
import dbis.pig.plan.DataflowPlan
import dbis.pig.plan.rewriting.RewriterException
import dbis.pig.tools.{BreadthFirstBottomUpWalker, BreadthFirstTopDownWalker}
import org.kiama.rewriting.Rewriter._
import org.kiama.rewriting.Strategy
import dbis.setm.SETM.timing
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
/** Provides methods to deal with windows in a [[dbis.pig.plan.DataflowPlan]].
*
*/
trait WindowSupport extends PigletLogging {
def processPlan(newPlan: DataflowPlan, strategy: Strategy): DataflowPlan
def processWindows(plan: DataflowPlan): DataflowPlan = timing("process windows") {
require(plan != null, "Plan must not be null")
var newPlan = plan
val walker1 = new BreadthFirstTopDownWalker
val walker2 = new BreadthFirstBottomUpWalker
// All Window Ops: Group,Filter,Distinct,Limit,OrderBy,Foreach
// Two modes: Group,Filter,Limit,Foreach
// Terminator: Foreach, Join
logger.debug(s"Searching for Window Operators")
walker1.walk(newPlan){ op =>
op match {
case o: Window => {
logger.debug(s"Found Window Operator")
newPlan = markForWindowMode(newPlan, o)
}
case _ =>
}
}
// Find and process Window Joins and Cross'
val joins = ListBuffer.empty[PigOperator]
walker2.walk(newPlan){ op =>
op match {
case o: Join => joins += o
case o: Cross => joins += o
case _ =>
}
}
newPlan = processWindowJoins(newPlan, joins.toList)
//TODO: Add Check for WindowOnly operators (distinct, orderBy, etc.)
newPlan
}
def markForWindowMode(plan: DataflowPlan, windowOp: Window): DataflowPlan = {
var lastOp: PigOperator = new Empty(Pipe("empty"))
val littleWalker = mutable.Queue(windowOp.outputs.flatMap(_.consumer).toSeq: _*)
while(!littleWalker.isEmpty){
val operator = littleWalker.dequeue()
operator match {
case o: Filter => {
logger.debug(s"Rewrite Filter to WindowMode")
o.windowMode = true
}
case o: Distinct => {
logger.debug(s"Rewrite Distinct to WindowMode")
o.windowMode = true
}
case o: OrderBy => {
logger.debug(s"Rewrite OrderBy to WindowMode")
o.windowMode = true
}
case o: Grouping => {
logger.debug(s"Rewrite Grouping to WindowMode")
o.windowMode = true
}
case o: Foreach => {
logger.debug(s"Rewrite Foreach to WindowMode")
o.windowMode = true
//val flatten = new WindowFlatten(Pipe("flattenNode"), o.outputs.head)
val apply = new WindowApply(Pipe(o.outPipeName+"Apply"), windowOp.outputs.head, "WindowFunc"+o.outPipeName)
val newPlan = plan.insertConnect(windowOp, o.outputs.head.consumer.head, apply)
apply.schema=o.schema
return newPlan
}
case o: Join => {
logger.debug(s"Found Join Node, abort")
return plan
}
case o: Cross => {
logger.debug(s"Found Cross Node, abort")
return plan
}
case _ =>
}
littleWalker ++= operator.outputs.flatMap(_.consumer)
if (littleWalker.isEmpty) lastOp = operator
}
logger.debug(s"Reached End of Plan - Adding Flatten Node")
val before = lastOp.inputs.head
val apply = new WindowApply(Pipe(before.name+"Apply"), windowOp.outputs.head, "WindowFunc"+before.name)
val newPlan = plan.insertConnect(windowOp, lastOp, apply)
apply.schema = before.producer.schema
lastOp.schema = before.producer.schema
newPlan
}
def processWindowJoins(plan: DataflowPlan, joins: List[PigOperator]): DataflowPlan = {
var newPlan = plan
/*
* Foreach Join or Cross Operator check if Input requirements are met.
* Collect Window input relations and create new Join with Window
* definition and window inputs as new inputs.
*/
for(joinOp <- joins) {
var newInputs = ListBuffer.empty[Pipe]
var windowDef: Option[Tuple2[Int,String]] = None
for(joinInputPipe <- joinOp.inputs){
// Checks
if(!joinInputPipe.producer.isInstanceOf[Window])
throw new RewriterException("Join inputs must be Window Definitions")
val inputWindow = joinInputPipe.producer.asInstanceOf[Window]
if(inputWindow.window._2=="")
throw new RewriterException("Join input windows must be defined via RANGE")
if (!windowDef.isDefined)
windowDef = Some(inputWindow.window)
if(windowDef!=Some(inputWindow.window))
throw new RewriterException("Join input windows must have the same definition")
newInputs += inputWindow.inputs.head
// Remove Window-Join relations
joinOp.inputs = joinOp.inputs.filterNot(_.producer == inputWindow)
inputWindow.outputs.foreach((out: Pipe) => {
if(out.consumer contains joinOp)
out.consumer = out.consumer.filterNot(_ == joinOp)
})
}
val newJoin = joinOp match {
case o: Join => Join(o.outputs.head, newInputs.toList, o.fieldExprs, windowDef.getOrElse(null.asInstanceOf[Tuple2[Int,String]]))
case o: Cross => Cross(o.outputs.head, newInputs.toList, windowDef.getOrElse(null.asInstanceOf[Tuple2[Int,String]]))
case _ => ???
}
/*
* Replace Old Join with new Join (new Input Pipes and Window Parameter)
*/
val strategy = (op: Any) => {
if (op == joinOp) {
joinOp.outputs = List.empty
joinOp.inputs = List.empty
Some(newJoin)
}
else {
None
}
}
newPlan = processPlan(newPlan, strategyf(t => strategy(t)))
}
newPlan
}
}
|
ksattler/piglet
|
src/main/scala/dbis/pig/plan/rewriting/internals/WindowSupport.scala
|
Scala
|
apache-2.0
| 6,707
|
package net.fehmicansaglam.tepkin
import akka.actor.{ActorRefFactory, ActorSystem}
import net.fehmicansaglam.tepkin.TepkinMessage.ShutDown
import net.fehmicansaglam.tepkin.protocol.ReadPreference
import scala.concurrent.ExecutionContext
class MongoClient(val context: ActorRefFactory, uri: MongoClientUri, nConnectionsPerNode: Int) {
val poolManager = context.actorOf(
MongoPoolManager
.props(uri, nConnectionsPerNode, uri.option("readPreference").map(ReadPreference.apply))
.withMailbox("tepkin-mailbox"),
name = "tepkin-pool")
implicit def ec: ExecutionContext = context.dispatcher
def apply(databaseName: String): MongoDatabase = {
require(databaseName != null && databaseName.getBytes("UTF-8").size < 123,
"Database name must be shorter than 123 bytes")
new MongoDatabase(poolManager, databaseName)
}
def db(databaseName: String): MongoDatabase = {
apply(databaseName)
}
def shutdown(): Unit = {
poolManager ! ShutDown
}
}
object MongoClient {
def apply(uri: String,
nConnectionsPerNode: Int = 10,
context: ActorRefFactory = ActorSystem("tepkin-system")): MongoClient = {
new MongoClient(context, MongoClientUri(uri), nConnectionsPerNode)
}
}
|
danielwegener/tepkin
|
tepkin/src/main/scala/net/fehmicansaglam/tepkin/MongoClient.scala
|
Scala
|
apache-2.0
| 1,248
|
/*
* Copyright 2016 rdbc contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rdbc.pgsql.core.internal.protocol
import io.rdbc.pgsql.core.Oid
import scodec.bits.ByteVector
sealed trait Argument {
def dataTypeOid: Oid
}
object Argument {
case class Null(dataTypeOid: Oid) extends Argument
case class Textual(value: String, dataTypeOid: Oid) extends Argument
case class Binary(value: ByteVector, dataTypeOid: Oid) extends Argument
}
|
rdbc-io/rdbc-pgsql
|
rdbc-pgsql-core/src/main/scala/io/rdbc/pgsql/core/internal/protocol/Argument.scala
|
Scala
|
apache-2.0
| 977
|
package scripts
import java.io.File.pathSeparatorChar
import java.net.URLClassLoader
import java.nio.file.{Files, Paths}
import io.bazel.rulesscala.worker.Worker
import protocbridge.{ProtocBridge, ProtocCodeGenerator}
import scala.sys.process._
object ScalaPBWorker extends Worker.Interface {
private val protoc = {
val executable = sys.props.getOrElse("PROTOC", sys.error("PROTOC not supplied"))
(args: Seq[String]) => Process(executable, args).!(ProcessLogger(stderr.println(_)))
}
private val classes = {
val jars = sys.props.getOrElse("JARS", "").split(pathSeparatorChar).filter(_.nonEmpty).map { e =>
val file = Paths.get(e)
require(Files.exists(file), s"Expected file for classpath loading $file to exist")
file.toUri.toURL
}
new URLClassLoader(jars).loadClass(_)
}
private val generator = (className: String) => try {
classes(className + "$").getField("MODULE$").get(null).asInstanceOf[ProtocCodeGenerator]
} catch {
case _: NoSuchFieldException | _: java.lang.ClassNotFoundException =>
classes(className).newInstance.asInstanceOf[ProtocCodeGenerator]
}
private val generators: Seq[(String, ProtocCodeGenerator)] = sys.props.toSeq.collect {
case (k, v) if k.startsWith("GEN_") => k.stripPrefix("GEN_") -> generator(v)
}
def main(args: Array[String]): Unit = Worker.workerMain(args, ScalaPBWorker)
def work(args: Array[String]): Unit = {
val code = ProtocBridge.runWithGenerators(protoc, generators, args)
if (code != 0) {
sys.error(s"Exit with code $code")
}
}
}
|
bazelbuild/rules_scala
|
src/scala/scripts/ScalaPBWorker.scala
|
Scala
|
apache-2.0
| 1,579
|
package org.brijest.storm.engine.model.components
import collection._
import mutable.Buffer
package immutable {
trait Quad[+T] extends Traversable[T] {
def default: (Int, Int) => Option[T]
def dimensions: (Int, Int)
def size: Int
def apply(x: Int, y: Int): T
def within(p: Quad.Area): Seq[(Int, Int, T)]
def foreach(f: (Int, Int, T) => Unit): Unit
}
}
class Quad[T, Acc](w: Int, h: Int, d: (Int, Int) => Option[T], compress: Boolean) extends immutable.Quad[T] with Serializable {
private var dflt: (Int, Int) => Option[T] = d
private var dims: (Int, Int) = (w, h);
private var root: QNode[T] = new QEmpty[T](QNode.calcSide(dims))
private def app(x: Int, y: Int) = {
check(x, y)
root.apply(0, 0, x, y, dflt)
}
private def check(x: Int, y: Int) = if (x < 0 || y < 0 || x >= dims._1 || y >= dims._2)
throw new IndexOutOfBoundsException("Dims: " + dims + "; idx: " + (x, y))
private def resize(ndims: (Int, Int)) = {
dims = ndims
root = new QEmpty[T](QNode.calcSide(dims))
}
private def upd(x: Int, y: Int, v: T) {
check(x, y)
root = root.update(0, 0, x, y, v, compress)
}
private def clr() {
root = new QEmpty[T](QNode.calcSide(dims))
}
final def default = dflt
final def dimensions = dims
final override def size = root.elems
final def apply(x: Int, y: Int) = app(x, y)
final def apply(p: (Int, Int)) = app(p._1, p._2)
final def within(p: Quad.Area): Seq[(Int, Int, T)] = {
val b = mutable.Buffer[(Int, Int, T)]()
root.within(0, 0, p, b)
b
}
final def foreach(f: (Int, Int, T) => Unit) = root.foreach(0, 0, f)
final def foreach[U](f: T => U): Unit = root.foreach(0, 0, (x, y, v) => f(v))
final def default_=(d: (Int, Int) => Option[T])(implicit rq: Acc) = dflt = d
final def dimensions_=(sz: (Int, Int))(implicit rq: Acc) = resize(sz)
final def update(x: Int, y: Int, elem: T)(implicit rq: Acc) = upd(x, y, elem)
final def remove(x: Int, y: Int)(implicit rq: Acc) = root = root.remove(0, 0, x, y, compress)
final def clear()(implicit rq: Acc) = clr()
}
object Quad {
sealed trait Area {
def apply(x: Int, y: Int): Boolean
def topy: Int
def boty: Int
def lefx: Int
def rigx: Int
def rectIn(x: Int, y: Int, len: Int): Boolean
}
def radius(x0: Int, y0: Int, r: Int) = new Area {
val sqrr = r * r
final def apply(xp: Int, yp: Int) = (xp - x0) * (xp - x0) + (yp - y0) * (yp - y0) <= sqrr
final def topy = y0 - r
final def boty = y0 + r
final def lefx = x0 - r
final def rigx = x0 + r
final def rectIn(x: Int, y: Int, len: Int) = !(x > rigx || (x + len) < lefx || y > boty || (y + len) < topy)
}
final def square(x0: Int, y0: Int, r: Int) = new Area {
final def apply(xp: Int, yp: Int) = math.abs(xp - x0) <= r && math.abs(yp - y0) <= r
final def topy = y0 - r
final def boty = y0 + r
final def lefx = x0 - r
final def rigx = x0 + r
final def rectIn(x: Int, y: Int, len: Int) = !(x > rigx || (x + len) < lefx || y > boty || (y + len) < topy)
}
}
private abstract class QNode[T] {
final def lstmax = 8
final def mxside = 4 // mxside^2 = 16
final def forkslotside = 8
def isQEmpty = false
def isQList = false
def side: Int
def elems: Int
def apply(x0: Int, y0: Int, x: Int, y: Int, d: (Int, Int) => Option[T]): T
def within(x0: Int, y0: Int, p: Quad.Area, acc: Buffer[(Int, Int, T)]): Unit
def update(x0: Int, y0: Int, x: Int, y: Int, v: T, c: Boolean): QNode[T]
def remove(x0: Int, y0: Int, x: Int, y: Int, c: Boolean): QNode[T]
def foreach(x0: Int, y0: Int, f: (Int, Int, T) => Unit): Unit
}
private object QNode {
final def mxsz = 16
final def calcSide(dim: (Int, Int)) = nextSideSize(dim._1 max dim._2)
private def nextSideSize(n: Int) =
if (n <= (1 << 2)) 1 << 2
else if (n <= (1 << 5)) 1 << 5
else if (n <= (1 << 8)) 1 << 8
else if (n <= (1 << 11)) 1 << 11
else if (n <= (1 << 14)) 1 << 14
else if (n <= (1 << 17)) 1 << 17
else if (n <= (1 << 20)) 1 << 20
else if (n <= (1 << 23)) 1 << 23
else if (n <= (1 << 26)) 1 << 26
else if (n <= (1 << 29)) 1 << 29
else sys.error("unsupported size")
}
private case class QEmpty[T](side: Int) extends QNode[T] {
override def isQEmpty = true
final def elems = 0
final def apply(x0: Int, y0: Int, x: Int, y: Int, d: (Int, Int) => Option[T]): T = d(x, y).get
final def within(x0: Int, y0: Int, p: Quad.Area, acc: Buffer[(Int, Int, T)]): Unit = ()
final def update(x0: Int, y0: Int, x: Int, y: Int, v: T, c: Boolean): QNode[T] = {
val ql = new QList[T](0, new Array[Int](4), new Array(2), side)
ql.update(x0, y0, x, y, v, c)
}
final def remove(x0: Int, y0: Int, x: Int, y: Int, c: Boolean): QNode[T] = this
final def foreach(x0: Int, y0: Int, f: (Int, Int, T) => Unit) = ()
}
private object QFork {
val deBruijnBitPos = Array[Byte] (
0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8,
31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9
)
}
private case class QFork[T](var elems: Int, var bmp: Long, var subs: Array[QNode[T]], side: Int)
extends QNode[T] {
final def apply(x0: Int, y0: Int, x: Int, y: Int, d: (Int, Int) => Option[T]) = {
val sofss = side >> 3 // 3 == log(forkslotside)
val fx = (x - x0) / sofss
val fy = (y - y0) / sofss
val loc = (fy << 3) + fx // 3 == log(forkslotside)
if ((bmp & (1L << loc)) != 0) {
val pos = java.lang.Long.bitCount(bmp & ((1L << loc) - 1))
val subx0 = x0 + fx * sofss
val suby0 = y0 + fy * sofss
subs(pos).apply(subx0, suby0, x, y, d)
} else d(x, y).get
}
final def within(x0: Int, y0: Int, p: Quad.Area, buff: Buffer[(Int, Int, T)]): Unit = {
val sofss = side >> 3
var left = crop((p.lefx - x0) / sofss, (p.rigx - x0) / sofss, (p.topy - y0) / sofss, (p.boty - y0) / sofss, bmp)
while (left != 0) {
val singleOne = left & (-left)
val loc = calcBitLoc(singleOne)
val fx = loc & 7 // 7 == (1 << log(forkslotside)) - 1
val fy = loc >> 3 // 3 == log(forkslotside)
val subx0 = x0 + fx * sofss
val suby0 = y0 + fy * sofss
val pos = java.lang.Long.bitCount((singleOne - 1) & bmp)
if (rectIn(subx0, suby0, sofss, p)) {
subs(pos).within(subx0, suby0, p, buff)
}
left ^= singleOne
}
}
private def crop(lefx: Int, rigx: Int, topy: Int, boty: Int, bmp: Long) = {
var mask = -1L
if (boty < 8) mask &= -1L >>> ((7 - boty) << 3); // crop bigger than boty
if (topy > 0) mask &= -1L << (topy << 3); // crop smaller than topy
if (rigx < 8) mask &= (0xff >> (7 - rigx)) * 0x0101010101010101L; // crop bigger than rigx
if (lefx > 0) mask &= ((0xff << lefx) & 0xff) * 0x0101010101010101L; // crop smaller than lefx
bmp & mask
}
private def calcBitLoc(singleOne: Long): Int = {
if (singleOne == 1 || ((singleOne >>> 1) < (1L << 31)))
QFork.deBruijnBitPos((singleOne.toInt * 0x077CB531) >>> 27) // deBruijn constantP
else
32 + QFork.deBruijnBitPos(((singleOne >>> 32).toInt * 0x077CB531) >>> 27)
}
private def rectIn(tlx: Int, tly: Int, sidelen: Int, p: Quad.Area) = p.rectIn(tlx, tly, sidelen)
final def update(x0: Int, y0: Int, x: Int, y: Int, v: T, c: Boolean) = {
val sofss = side >> 3 // 3 == log(forkslotside)
val fx = (x - x0) / sofss
val fy = (y - y0) / sofss
val loc = (fy << 3) + fx // 3 == log(forkslotside)
val subx0 = x0 + fx * sofss
val suby0 = y0 + fy * sofss
if ((bmp & (1L << loc)) != 0) {
// write("update: bmp=%x, loc=%d, fx=%d, fy=%d, sx0=%d, sy0=%d, x=%d, y=%d, x0=%d, y0=%d".format(bmp, loc, fx, fy, subx0, suby0, x, y, x0, y0))
val pos = java.lang.Long.bitCount(bmp & ((1L << loc) - 1))
val subnode = subs(pos)
val oldelems = subnode.elems
subs(pos) = subnode.update(subx0, suby0, x, y, v, c)
if (subs(pos) != oldelems) elems += 1
this
} else { // create a new subnode
val ql = new QList[T](0, new Array[Int](4), new Array(2), side / forkslotside)
ql.update(subx0, suby0, x, y, v, c)
val subelems = java.lang.Long.bitCount(bmp)
val pos = java.lang.Long.bitCount(bmp & ((1L << loc) - 1))
bmp |= (1L << loc)
if (subelems < subs.length) {
backcopy(subs, pos, subs, pos + 1, subelems - pos)
subs(pos) = ql
} else { // double subelems array
val nsubs = new Array[QNode[T]](subs.length * 2)
forwcopy(subs, 0, nsubs, 0, pos)
nsubs(pos) = ql
forwcopy(subs, pos, nsubs, pos + 1, subelems - pos)
subs = nsubs
}
elems += 1
this
}
}
private def backcopy(src: Array[QNode[T]], srcpos: Int, dest: Array[QNode[T]], destpos: Int, len: Int) {
var isrc = srcpos + len - 1
var idest = destpos + len - 1
while (isrc >= srcpos) {
dest(idest) = src(isrc)
isrc -= 1
idest -= 1
}
}
private def forwcopy(src: Array[QNode[T]], srcpos: Int, dest: Array[QNode[T]], destpos: Int, len: Int) {
var isrc = srcpos
var idest = destpos
val until = isrc + len
while (isrc < until) {
dest(idest) = src(isrc)
isrc += 1
idest += 1
}
}
final def remove(x0: Int, y0: Int, x: Int, y: Int, c: Boolean) = {
val sofss = side >> 3 // 3 == log(forkslotside)
val fx = (x - x0) / sofss
val fy = (y - y0) / sofss
val loc = (fy << 3) + fx // 3 == log(forkslotside)
val subx0 = x0 + fx * sofss
val suby0 = y0 + fy * sofss
if ((bmp & (1L << loc)) == 0) this else {
val subelems = java.lang.Long.bitCount(bmp)
val pos = java.lang.Long.bitCount(bmp & ((1L << loc) - 1))
val oldsub = subs(pos)
val oldelems = oldsub.elems
val newsub = oldsub.remove(subx0, suby0, x, y, c)
if (!newsub.isQEmpty) {
subs(pos) = newsub
if (oldelems != newsub.elems) elems -= 1
} else {
elems -= 1
bmp ^= (1L << loc)
forwcopy(subs, pos + 1, subs, pos, subelems - 1 - pos)
}
if (subelems < subs.length / 4) {
val nsubs = new Array[QNode[T]](subs.length / 2)
forwcopy(subs, 0, nsubs, 0, subelems)
subs = nsubs
}
if (elems > lstmax / 2) this else { // compress
toQList(x0, y0, c)
}
}
}
private def toQList(x0: Int, y0: Int, c: Boolean): QList[T] = {
val ql = new QList[T](0, new Array[Int](4), new Array[AnyRef](2), side)
foreach(x0, y0, (x, y, v) => ql.update(x0, y0, x, y, v, c))
ql
}
final def foreach(x0: Int, y0: Int, f: (Int, Int, T) => Unit) {
var left = bmp
var count = 0
val sofss = side >> 3
while (count != -1) {
val singleOne = left & (-left)
if (singleOne == 0) count = -1
else {
val loc = calcBitLoc(singleOne)
val fx = loc & 7 // 7 == (1 << log(forkslotside)) - 1
val fy = loc >> 3 // 3 == log(forkslotside)
val subx0 = x0 + fx * sofss
val suby0 = y0 + fy * sofss
subs(count).foreach(subx0, suby0, f)
count += 1
left ^= singleOne
}
}
}
}
private case class QList[T](var elems: Int, var coords: Array[Int], var lst: Array[AnyRef], side: Int)
extends QNode[T] {
override def isQList = true
final def apply(x0: Int, y0: Int, x: Int, y: Int, d: (Int, Int) => Option[T]): T = {
var i = 0
val until = elems * 2
while (i < until) {
if (x == coords(i) && y == coords(i + 1)) return lst(i / 2).asInstanceOf[T]
i += 2
}
d(x, y).get
}
final def within(x0: Int, y0: Int, p: Quad.Area, buff: Buffer[(Int, Int, T)]): Unit = {
var i = 0
val until = elems * 2
while (i < until) {
val xp = coords(i)
val yp = coords(i + 1)
if (p(xp, yp)) buff += ((xp, yp, lst(i / 2).asInstanceOf[T]))
i += 2
}
}
final def update(x0: Int, y0: Int, x: Int, y: Int, v: T, c: Boolean): QNode[T] = {
// write("list update: x0=%d, y0=%d, x=%d, y=%d".format(x0, y0, x, y))
var i = 0
val until = elems * 2
while (i < until) {
if (x == coords(i) && y == coords(i + 1)) {
lst(i / 2) = v.asInstanceOf[AnyRef]
return this
}
i += 2
}
if (elems == lstmax) {
if (side != mxside) { // still wide
val n = toQNode(new QFork(0, 0, new Array[QNode[T]](lstmax * 2), side), x0, y0, c)
n.update(x0, y0, x, y, v, c)
n
} else { // smallest granularity
val n = toQNode(new QMatrix(0, 0, new Array[AnyRef](QNode.mxsz)), x0, y0, c)
n.update(x0, y0, x, y, v, c)
n
}
} else {
if (elems == lst.length) {
val nlst = new Array[AnyRef](2 * elems)
copy(lst, 0, nlst, 0, elems)
lst = nlst
val ncoords = new Array[Int](4 * elems)
copyint(coords, 0, ncoords, 0, 2 * elems)
coords = ncoords
}
coords(2 * elems) = x
coords(2 * elems + 1) = y
lst(elems) = v.asInstanceOf[AnyRef]
elems += 1
this
}
}
private def toQNode(n: QNode[T], x0: Int, y0: Int, c: Boolean) = {
// write("toQNode(%d, %d): ".format(x0, y0) + n + " from: " + this.lst.toList + ", " + this.coords.toList)
var i = 0
while (i < elems) {
val curr = lst(i)
n.update(x0, y0, coords(2 * i), coords(2 * i + 1), lst(i).asInstanceOf[T], c)
i += 1
}
n
}
final def remove(x0: Int, y0: Int, x: Int, y: Int, c: Boolean): QNode[T] = {
var i = 0
val until = 2 * elems
while (i < until) {
if (coords(i) == x && coords(i + 1) == y) return remove(i / 2)
i += 2
}
this
}
private def copy(src: Array[AnyRef], srcpos: Int, dest: Array[AnyRef], destpos: Int, len: Int) {
var isrc = srcpos
var idest = destpos
val until = isrc + len
while (isrc < until) {
dest(idest) = src(isrc)
isrc += 1
idest += 1
}
}
private def copyint(src: Array[Int], srcpos: Int, dest: Array[Int], destpos: Int, len: Int) {
var isrc = srcpos
var idest = destpos
val until = isrc + len
while (isrc < until) {
dest(idest) = src(isrc)
isrc += 1
idest += 1
}
}
private def remove(i: Int): QNode[T] = {
elems -= 1
if (elems < lst.length / 4) { // compress if necessary
val nlst = new Array[AnyRef](lst.length / 2)
copy(lst, 0, nlst, 0, i)
copy(lst, i + 1, nlst, i, elems - i)
lst = nlst
val ncoords = new Array[Int](coords.length / 2)
copyint(coords, 0, ncoords, 0, 2 * i)
copyint(coords, 2 * (i + 1), ncoords, 2 * i, 2 * (elems - i))
coords = ncoords
} else {
lst(i) = lst(elems)
lst(elems) = null
coords(2 * i) = coords(2 * elems)
coords(2 * i + 1) = coords(2 * elems + 1)
}
if (elems > 0) this else new QEmpty[T](side)
}
final def foreach(x0: Int, y0: Int, f: (Int, Int, T) => Unit) {
var i = 0
while (i < elems) {
f(coords(2 * i), coords(2 * i + 1), lst(i).asInstanceOf[T])
i += 1
}
}
}
private case class QMatrix[T](var elems: Int, var bmp: Int, var mx: Array[AnyRef], var cval: AnyRef = null)
extends QNode[T] {
def side = mxside
final def apply(x0: Int, y0: Int, x: Int, y: Int, d: (Int, Int) => Option[T]) = if (cval ne null) cval.asInstanceOf[T] else {
val loc = ((y - y0) << 2) | (x - x0) // 2 == log(mxside)
if ((bmp & (1 << loc)) == 0) d(x, y).get
else mx(loc).asInstanceOf[T]
}
final def within(x0: Int, y0: Int, p: Quad.Area, buff: Buffer[(Int, Int, T)]): Unit = if (cval eq null) {
var loc = 0
val until = mxside * mxside
val b = bmp
while (loc < until) {
if ((b & (1 << loc)) != 0) {
val xp = x0 + (loc & 0x3) // 0x3 == 1 << log(mxside) - 1
val yp = y0 + ((loc & 0xc) >> 2) // 0xc == (1 << (2 * log(mxside))) - (1 << log(mxside))
if (p(xp, yp)) buff += ((xp, yp, mx(loc).asInstanceOf[T]))
}
loc += 1
}
} else {
// compressed version
var x = 0
var y = 0
val sd = mxside
while (x < sd) {
while (y < sd) {
val xp = x0 + x
val yp = y0 + y
if (p(xp, yp)) buff += ((xp, yp, cval.asInstanceOf[T]))
y += 1
}
y = 0
x += 1
}
}
private def unpack(c: Boolean) {
// check if previously compressed and restore
if (c && (cval ne null)) {
val len = QNode.mxsz
mx = new Array[AnyRef](len)
var i = 0
while (i < len) {
mx(i) = cval
i += 1
}
cval = null
}
}
private def pack(c: Boolean) {
if (c && elems == mxside * mxside) {
var allsame = true
val refer = mx(0)
var i = 1
val len = mx.length
while (i < len) {
if (mx(i) ne refer) {
allsame = false
i = len
}
i += 1
}
if (allsame) {
cval = refer
mx = null
}
}
}
final def update(x0: Int, y0: Int, x: Int, y: Int, v: T, c: Boolean) = {
unpack(c)
val loc = ((y - y0) << 2) | (x - x0) // 2 == log(mxside)
if ((bmp & (1 << loc)) == 0) {
bmp |= 1 << loc
elems += 1
}
// write("update: " + loc + ", from: " + (x, y) + ", left upper: " + (x0, y0))
mx(loc) = v.asInstanceOf[AnyRef]
// check if compression needed and possible
pack(c)
this
}
final def remove(x0: Int, y0: Int, x: Int, y: Int, c: Boolean) = {
unpack(c)
val loc = ((y - y0) << 2) | (x - x0)
val flag = 1 << loc
if ((bmp & flag) != 0) {
bmp ^= flag
elems -= 1
}
mx(loc) = null
if (elems < (lstmax / 2)) { // compress back to a list
var ql = new QList[T](0, Array[Int](0, 0, 0, 0), new Array[AnyRef](2), side)
var loc = 0
val until = mxside * mxside
val b = bmp
while (loc < until) {
if ((b & (1 << loc)) != 0) {
val xp = x0 + (loc & 0x3) // 0x3 == 1 << log(mxside) - 1
val yp = y0 + ((loc & 0xc) >> 2) // 0xc == (1 << (2 * log(mxside))) - (1 << log(mxside))
ql.update(x0, y0, xp, yp, mx(loc).asInstanceOf[T], c)
}
loc += 1
}
ql
} else this
}
final def foreach(x0: Int, y0: Int, f: (Int, Int, T) => Unit) = if (cval eq null) {
var loc = 0
val until = mxside * mxside
val b = bmp
while (loc < until) {
if ((b & (1 << loc)) != 0) {
val xp = x0 + (loc & 0x3) // 0x3 == 1 << log(mxside) - 1
val yp = y0 + ((loc & 0xc) >> 2) // 0xc == (1 << (2 * log(mxside))) - (1 << log(mxside))
f(xp, yp, mx(loc).asInstanceOf[T])
}
loc += 1
}
} else {
// compressed version
var x = 0
var y = 0
val sd = mxside
while (x < sd) {
while (y < sd) {
val xp = x0 + x
val yp = y0 + y
f(xp, yp, cval.asInstanceOf[T])
y += 1
}
y = 0
x += 1
}
}
}
|
axel22/scala-2d-game-editor
|
src/main/scala/org/brijest/storm/engine/model/components/Quad.scala
|
Scala
|
bsd-3-clause
| 19,148
|
package org.scalaide.ui.editor
import scala.collection.JavaConverters._
import scala.collection.breakOut
import org.eclipse.jdt.core.ICompilationUnit
import org.eclipse.jdt.core.compiler.IProblem
import org.eclipse.jdt.internal.ui.javaeditor.CompilationUnitDocumentProvider.ProblemAnnotation
import org.eclipse.jface.text.ITextViewerExtension2
import org.eclipse.jface.text.Position
import org.eclipse.jface.text.source.Annotation
import org.eclipse.jface.text.source.IAnnotationModelExtension2
import org.scalaide.util.internal.eclipse.AnnotationUtils._
import org.scalaide.util.ui.DisplayThread
trait DecoratedInteractiveEditor extends ISourceViewerEditor {
/** Return the annotation model associated with the current document. */
private def annotationModel = Option(getDocumentProvider).map(_.getAnnotationModel(getEditorInput))
private var previousAnnotations = List[Annotation]()
/**
* This removes all annotations in the region between `start` and `end`.
*/
def removeAnnotationsInRegion(start: Int, end: Int): Unit = annotationModel foreach { model ⇒
val annsToRemove = model match {
case model: IAnnotationModelExtension2 ⇒
model.getAnnotationIterator(start, end - start, /*canStartBefore*/ false, /*canEndAfter*/ false).asScala
case _ ⇒
model.getAnnotationIterator.asScala.filter { ann ⇒
val pos = model.getPosition(ann)
pos.offset >= start && pos.offset + pos.length <= end
}
}
model.deleteAnnotations(annsToRemove.toSeq)
}
/**
* Update annotations on the editor from a list of IProblems
*/
def updateErrorAnnotations(errors: List[IProblem], cu: ICompilationUnit): Unit = annotationModel foreach { model ⇒
val newAnnotations: Map[Annotation, Position] = (for (e ← errors) yield {
val annotation = new ProblemAnnotation(e, cu) // no compilation unit
val position = new Position(e.getSourceStart, e.getSourceEnd - e.getSourceStart + 1)
(annotation, position)
})(breakOut)
model.replaceAnnotations(previousAnnotations, newAnnotations)
previousAnnotations = newAnnotations.keys.toList
// This shouldn't be necessary in @dragos' opinion. But see #84 and
// http://stackoverflow.com/questions/12507620/race-conditions-in-annotationmodel-error-annotations-lost-in-reconciler
getViewer match {
case viewer: ITextViewerExtension2 ⇒
// TODO: This should be replaced by a better modularization of semantic highlighting PositionsChange
val newPositions = newAnnotations.values
def end(x: Position) = x.offset + x.length - 1
val taintedBounds = (newPositions foldLeft (Int.MaxValue → 0)) { (acc, p1) ⇒ (Math.min(acc._1, p1.offset), Math.max(acc._2, end(p1))) }
val taintedLength = (taintedBounds._2 - taintedBounds._1 + 1)
DisplayThread.asyncExec {
viewer.invalidateTextPresentation(taintedBounds._1, taintedLength)
}
case viewer ⇒
DisplayThread.asyncExec {
viewer.invalidateTextPresentation()
}
}
}
}
|
scala-ide/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/ui/editor/DecoratedInteractiveEditor.scala
|
Scala
|
bsd-3-clause
| 3,083
|
package coder.simon.ch6
trait State[S, +A] { self =>
def run: S => (A, S)
def map[B](f: A => B) = new State[S, B] {
def run = { s =>
val (a, s2) = self.run(s)
(f(a), s2)
}
}
def flatMap[B](f: A => State[S, B]) = new State[S, B] {
def run = { s =>
val (a, s2) = self.run(s)
f(a).run(s2)
}
}
def get: State[S, S] = new State[S, S] {
def run = s => (s, s)
}
def set(ns: S) = new State[S, Unit] {
def run = { _ => ((), ns) }
}
def modify(f: S => S): State[S, Unit] = for {
s <- get
_ <- set(f(s))
} yield ()
def map2[B, C](sb: State[S, B])(f: (A, B) => C) = flatMap { va => sb.map { vb => f(va, vb) } }
}
object State {
def unit[S, A](a: A) = new State[S, A] {
def run = s => (a, s)
}
def apply[S, A](f: S => (A, S)) = new State[S, A] {
def run = f
}
def sequence[S, A](ls: List[State[S, A]]): State[S, List[A]] = ls match {
case Nil => unit(Nil)
case h :: t => h.map2(sequence(t))(_ :: _)
}
}
object MX {
sealed trait Input
case object Coin extends Input
case object Turn extends Input
case class Machine(locked: Boolean, coins: Int, candies: Int)
def action(input: Input): State[Machine, Unit] = State { machine =>
((), (input, machine) match {
case (Coin, Machine(true, m, n)) if n > 0 =>
Machine(false, m + 1, n)
case (Turn, Machine(false, m, n)) if n > 0 =>
Machine(true, m, n - 1)
case _ => machine
})
}
def add(v: Int): State[Int, Int] = State { x => (x, x + v) }
def runx(inputs: List[Input]) = {
val xs = inputs.map(action)
State.sequence(xs)
}
def main(args: Array[String]): Unit = {
val init = Machine(false, 17, 91)
val actions = List(Coin, Turn, Coin, Turn, Coin, Turn, Coin)
val (p, m) = runx(actions).run(init)
println(p)
println(m)
val a2 = List(add(1), add(2), add(3))
val r = State.sequence(a2).run(100)
println(r)
}
}
|
erlangxk/fpscala
|
src/main/scala/coder/simon/ch6/State.scala
|
Scala
|
mit
| 1,968
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.catalog
import com.google.common.collect.Lists
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.api.internal.TableEnvironmentImpl
import org.apache.flink.table.api.{DataTypes, EnvironmentSettings, Schema, Table, TableDescriptor, TableEnvironment, TableException}
import org.apache.flink.table.functions.ScalarFunction
import org.apache.flink.table.planner.factories.TableFactoryHarness
import org.apache.flink.table.planner.factories.utils.TestCollectionTableFactory
import org.apache.flink.test.util.AbstractTestBase
import org.apache.flink.types.Row
import org.apache.flink.util.CollectionUtil
import org.junit.Assert.assertEquals
import org.junit.rules.ExpectedException
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import org.junit.{Before, Rule, Test}
import java.util
import scala.collection.JavaConversions._
/** Test cases for view related DDLs. */
@RunWith(classOf[Parameterized])
class CatalogViewITCase(isStreamingMode: Boolean) extends AbstractTestBase {
//~ Instance fields --------------------------------------------------------
private val settings = if (isStreamingMode) {
EnvironmentSettings.newInstance().inStreamingMode().build()
} else {
EnvironmentSettings.newInstance().inBatchMode().build()
}
private val tableEnv: TableEnvironment = TableEnvironmentImpl.create(settings)
var _expectedEx: ExpectedException = ExpectedException.none
@Rule
def expectedEx: ExpectedException = _expectedEx
@Before
def before(): Unit = {
tableEnv.getConfig
.getConfiguration
.setInteger(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1)
TestCollectionTableFactory.reset()
}
//~ Tools ------------------------------------------------------------------
implicit def rowOrdering: Ordering[Row] = Ordering.by((r : Row) => {
val builder = new StringBuilder
0 until r.getArity foreach(idx => builder.append(r.getField(idx)))
builder.toString()
})
def toRow(args: Any*):Row = {
val row = new Row(args.length)
0 until args.length foreach {
i => row.setField(i, args(i))
}
row
}
@Test
def testCreateViewIfNotExistsTwice(): Unit = {
val sourceData = List(
toRow(1, "1000", 2),
toRow(2, "1", 3),
toRow(3, "2000", 4),
toRow(1, "2", 2),
toRow(2, "3000", 3))
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|CREATE TABLE T2(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewWith3ColumnDDL =
"""
|CREATE VIEW IF NOT EXISTS T3(d, e, f) AS SELECT a, b, c FROM T1
""".stripMargin
val viewWith2ColumnDDL =
"""
|CREATE VIEW IF NOT EXISTS T3(d, e) AS SELECT a, b FROM T1
""".stripMargin
val query = "SELECT d, e, f FROM T3"
tableEnv.executeSql(sourceDDL)
tableEnv.executeSql(sinkDDL)
tableEnv.executeSql(viewWith3ColumnDDL)
tableEnv.executeSql(viewWith2ColumnDDL)
tableEnv.sqlQuery(query).executeInsert("T2").await()
assertEquals(sourceData.sorted, TestCollectionTableFactory.RESULT.sorted)
}
@Test
def testCreateViewWithoutFieldListAndWithStar(): Unit = {
val sourceData = List(
toRow(1, "1000", 2),
toRow(2, "1", 3),
toRow(3, "2000", 4),
toRow(1, "2", 2),
toRow(2, "3000", 3))
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|CREATE TABLE T2(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE VIEW IF NOT EXISTS T3 AS SELECT * FROM T1
""".stripMargin
val query = "SELECT * FROM T3"
tableEnv.executeSql(sourceDDL)
tableEnv.executeSql(sinkDDL)
tableEnv.executeSql(viewDDL)
tableEnv.sqlQuery(query).executeInsert("T2").await()
assertEquals(sourceData.sorted, TestCollectionTableFactory.RESULT.sorted)
}
@Test
def testCreateTemporaryView(): Unit = {
val sourceData = List(
toRow(1, "1000", 2),
toRow(2, "1", 3),
toRow(3, "2000", 4),
toRow(1, "2", 2),
toRow(2, "3000", 3))
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|CREATE TABLE T2(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE TEMPORARY VIEW T3(d, e, f) AS SELECT a, b, c FROM T1
""".stripMargin
val query = "SELECT d, e, f FROM T3"
tableEnv.executeSql(sourceDDL)
tableEnv.executeSql(sinkDDL)
tableEnv.executeSql(viewDDL)
tableEnv.sqlQuery(query).executeInsert("T2").await()
assertEquals(sourceData.sorted, TestCollectionTableFactory.RESULT.sorted)
}
@Test
def testTemporaryViewMaskPermanentViewWithSameName(): Unit = {
val sourceData = List(
toRow(1, "1000", 2),
toRow(2, "1", 3),
toRow(3, "2000", 4),
toRow(1, "2", 2),
toRow(2, "3000", 3))
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|CREATE TABLE T2(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val permanentView =
"""
|CREATE VIEW IF NOT EXISTS T3 AS SELECT a, b, c FROM T1
""".stripMargin
val permanentViewData = List(
toRow(1, "1000", 2),
toRow(2, "1", 3),
toRow(3, "2000", 4),
toRow(1, "2", 2),
toRow(2, "3000", 3))
val temporaryView =
"""
|CREATE TEMPORARY VIEW IF NOT EXISTS T3 AS SELECT a, b, c+1 FROM T1
""".stripMargin
val temporaryViewData = List(
toRow(1, "1000", 3),
toRow(2, "1", 4),
toRow(3, "2000", 5),
toRow(1, "2", 3),
toRow(2, "3000", 4))
tableEnv.executeSql(sourceDDL)
tableEnv.executeSql(sinkDDL)
tableEnv.executeSql(permanentView)
tableEnv.executeSql(temporaryView)
TestCollectionTableFactory.initData(sourceData)
val query = "SELECT * FROM T3"
tableEnv.sqlQuery(query).executeInsert("T2").await()
// temporary view T3 masks permanent view T3
assertEquals(temporaryViewData.sorted, TestCollectionTableFactory.RESULT.sorted)
TestCollectionTableFactory.reset()
TestCollectionTableFactory.initData(sourceData)
val dropTemporaryView =
"""
|DROP TEMPORARY VIEW IF EXISTS T3
""".stripMargin
tableEnv.executeSql(dropTemporaryView)
tableEnv.sqlQuery(query).executeInsert("T2").await()
// now we only have permanent view T3
assertEquals(permanentViewData.sorted, TestCollectionTableFactory.RESULT.sorted)
}
private def buildTableDescriptor(): TableDescriptor = {
val tableDescriptor: TableDescriptor = TableFactoryHarness.newBuilder()
.boundedScanSource()
.schema(Schema.newBuilder()
.column("a", DataTypes.INT())
.column("b", DataTypes.STRING())
.column("c", DataTypes.INT())
.build())
.sink()
.build()
tableDescriptor
}
@Test
def testShowCreateQueryOperationCatalogView(): Unit = {
val table: Table = tableEnv.from(buildTableDescriptor())
_expectedEx.expect(classOf[TableException])
_expectedEx.expectMessage(
"SHOW CREATE VIEW is not supported for views registered by Table API.")
tableEnv.createTemporaryView("QueryOperationCatalogView", table)
tableEnv.executeSql("show create view QueryOperationCatalogView")
}
@Test
def testShowCreateTemporaryView(): Unit = {
tableEnv.createTable("T1", buildTableDescriptor())
val tView1DDL: String = "CREATE TEMPORARY VIEW t_v1 AS SELECT a, b, c FROM T1"
tableEnv.executeSql(tView1DDL)
val tView1ShowCreateResult: util.List[Row] = CollectionUtil.iteratorToList(
tableEnv
.executeSql("show create view t_v1")
.collect()
)
assertEquals(tView1ShowCreateResult, Lists.newArrayList(
Row.of(
s"""CREATE TEMPORARY VIEW `default_catalog`.`default_database`.`t_v1`(`a`, `b`, `c`) as
|SELECT `T1`.`a`, `T1`.`b`, `T1`.`c`
|FROM `default_catalog`.`default_database`.`T1`"""
.stripMargin
)
))
val tView2DDL: String = "CREATE TEMPORARY VIEW t_v2(d, e, f) AS SELECT a, b, c FROM T1"
tableEnv.executeSql(tView2DDL)
val tView2ShowCreateResult: util.List[Row] = CollectionUtil.iteratorToList(
tableEnv
.executeSql("show create view t_v2")
.collect()
)
assertEquals(tView2ShowCreateResult, Lists.newArrayList(
Row.of(
s"""CREATE TEMPORARY VIEW `default_catalog`.`default_database`.`t_v2`(`d`, `e`, `f`) as
|SELECT `T1`.`a`, `T1`.`b`, `T1`.`c`
|FROM `default_catalog`.`default_database`.`T1`"""
.stripMargin
)
))
}
@Test
def testShowCreateCatalogView(): Unit = {
tableEnv.createTable("T1", buildTableDescriptor())
val view1DDL: String = "CREATE VIEW v1 AS SELECT a, b, c FROM T1"
tableEnv.executeSql(view1DDL)
val view1ShowCreateResult: util.List[Row] = CollectionUtil.iteratorToList(
tableEnv
.executeSql("show create view v1")
.collect()
)
assertEquals(view1ShowCreateResult, Lists.newArrayList(
Row.of(
s"""CREATE VIEW `default_catalog`.`default_database`.`v1`(`a`, `b`, `c`) as
|SELECT `T1`.`a`, `T1`.`b`, `T1`.`c`
|FROM `default_catalog`.`default_database`.`T1`"""
.stripMargin
)
))
val view2DDL: String = "CREATE VIEW v2(x, y, z) AS SELECT a, b, c FROM T1"
tableEnv.executeSql(view2DDL)
val view2ShowCreateResult: util.List[Row] = CollectionUtil.iteratorToList(
tableEnv.executeSql("show create view v2")
.collect()
)
assertEquals(view2ShowCreateResult, Lists.newArrayList(
Row.of(
s"""CREATE VIEW `default_catalog`.`default_database`.`v2`(`x`, `y`, `z`) as
|SELECT `T1`.`a`, `T1`.`b`, `T1`.`c`
|FROM `default_catalog`.`default_database`.`T1`"""
.stripMargin
)
))
}
@Test
def testShowCreateViewWithLeftJoinGroupBy(): Unit = {
tableEnv.createTable("t1", buildTableDescriptor())
tableEnv.createTable("t2", buildTableDescriptor())
val viewWithLeftJoinGroupByDDL: String =
s"""create view viewLeftJoinGroupBy as
|select max(t1.a) max_value
|from t1 left join t2 on t1.c=t2.c"""
.stripMargin
tableEnv.executeSql(viewWithLeftJoinGroupByDDL)
val showCreateLeftJoinGroupByViewResult: util.List[Row] = CollectionUtil.iteratorToList(
tableEnv.executeSql("show create view viewLeftJoinGroupBy")
.collect()
)
assertEquals(showCreateLeftJoinGroupByViewResult, Lists.newArrayList(
Row.of(
s"""CREATE VIEW `default_catalog`.`default_database`.`viewLeftJoinGroupBy`(`max_value`) as
|SELECT MAX(`t1`.`a`) AS `max_value`
|FROM `default_catalog`.`default_database`.`t1`
|LEFT JOIN `default_catalog`.`default_database`.`t2` ON `t1`.`c` = `t2`.`c`"""
.stripMargin
)
))
}
@Test
def testShowCreateViewWithUDFOuterJoin(): Unit = {
tableEnv.createTable("t1", buildTableDescriptor())
tableEnv.createTable("t2", buildTableDescriptor())
tableEnv.createTemporarySystemFunction("udfEqualsOne", new ScalarFunction {
def eval(): Int ={
1
}
})
val viewWithCrossJoinDDL: String =
s"""create view viewWithCrossJoin as
|select udfEqualsOne() a, t1.a a1, t2.b b2 from t1 cross join t2"""
.stripMargin
tableEnv.executeSql(viewWithCrossJoinDDL)
val showCreateCrossJoinViewResult: util.List[Row] = CollectionUtil.iteratorToList(
tableEnv.executeSql("show create view viewWithCrossJoin")
.collect()
)
assertEquals(showCreateCrossJoinViewResult, Lists.newArrayList(
Row.of(
s"""CREATE VIEW `default_catalog`.`default_database`.`viewWithCrossJoin`(`a`, `a1`, `b2`) as
|SELECT `udfEqualsOne`() AS `a`, `t1`.`a` AS `a1`, `t2`.`b` AS `b2`
|FROM `default_catalog`.`default_database`.`t1`
|CROSS JOIN `default_catalog`.`default_database`.`t2`"""
.stripMargin
)
))
}
@Test
def testShowCreateViewWithInnerJoin(): Unit = {
tableEnv.createTable("t1", buildTableDescriptor())
tableEnv.createTable("t2", buildTableDescriptor())
val viewWithInnerJoinDDL: String =
s"""create view innerJoinView as
|select t1.a a1, t2.b b2
|from t1 inner join t2
|on t1.c=t2.c"""
.stripMargin
tableEnv.executeSql(viewWithInnerJoinDDL)
val showCreateInnerJoinViewResult: util.List[Row] = CollectionUtil.iteratorToList(
tableEnv.executeSql("show create view innerJoinView")
.collect()
)
assertEquals(showCreateInnerJoinViewResult, Lists.newArrayList(
Row.of(
s"""CREATE VIEW `default_catalog`.`default_database`.`innerJoinView`(`a1`, `b2`) as
|SELECT `t1`.`a` AS `a1`, `t2`.`b` AS `b2`
|FROM `default_catalog`.`default_database`.`t1`
|INNER JOIN `default_catalog`.`default_database`.`t2` ON `t1`.`c` = `t2`.`c`"""
.stripMargin
)
))
}
}
object CatalogViewITCase {
@Parameterized.Parameters(name = "{0}")
def parameters(): java.util.Collection[Boolean] = {
util.Arrays.asList(true, false)
}
}
|
lincoln-lil/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/catalog/CatalogViewITCase.scala
|
Scala
|
apache-2.0
| 15,232
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.planner.logical.idp
import collection.mutable
// Table used by IDPSolver to record optimal plans found so far
//
class IDPTable[P](private val map: mutable.Map[Goal, P] = mutable.Map.empty[Goal, P]) extends IDPCache[P] {
def size = map.size
def put(goal: Goal, product: P): Unit = {
map.put(goal, product)
}
def apply(goal: Goal): Option[P] = map.get(goal)
def contains(goal: Goal): Boolean = map.contains(goal)
def plansOfSize(k: Int) = map.iterator.filter(_._1.size == k)
def plans = map.iterator
def removeAllTracesOf(goal: Goal) = {
val toDrop = map.keysIterator.filter(entry => (entry & goal).nonEmpty)
toDrop.foreach(map.remove)
}
override def toString(): String = s"IDPPlanTable(numberOfPlans=$size, largestSolved=${map.keySet.map(_.size).max})"
}
object IDPTable {
def apply[X, P](registry: IdRegistry[X], seed: Seed[X, P]) = {
val builder = mutable.Map.newBuilder[Goal, P]
if (seed.hasDefiniteSize)
builder.sizeHint(seed.size)
seed.foreach { case (goal, product) => builder += registry.registerAll(goal) -> product }
new IDPTable[P](builder.result())
}
}
|
HuangLS/neo4j
|
community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/planner/logical/idp/IDPTable.scala
|
Scala
|
apache-2.0
| 2,756
|
package dotty.tools
package dotc
package reporting
import core._
import Contexts.Context
import Decorators._, Symbols._, Names._, NameOps._, Types._, Flags._
import Denotations.SingleDenotation
import SymDenotations.SymDenotation
import util.SourcePosition
import parsing.Scanners.Token
import parsing.Tokens
import printing.Highlighting._
import printing.Formatting
import ErrorMessageID._
import ast.Trees
import config.{Feature, ScalaVersion}
import typer.ErrorReporting.{Errors, err}
import typer.ProtoTypes.ViewProto
import scala.util.control.NonFatal
import StdNames.nme
import printing.Formatting.hl
/** Messages
* ========
* The role of messages is to provide the necessary details for a simple to
* understand diagnostic event. Each message can be turned into a message
* container (one of the above) by calling the appropriate method on them.
* For instance:
*
* ```scala
* EmptyCatchBlock(tree).error(pos) // res: Error
* EmptyCatchBlock(tree).warning(pos) // res: Warning
* ```
*/
object messages {
import ast.Trees._
import ast.untpd
import ast.tpd
/** Helper methods for messages */
def implicitClassRestrictionsText(implicit ctx: Context): String =
em"""|For a full list of restrictions on implicit classes visit
|${Blue("http://docs.scala-lang.org/overviews/core/implicit-classes.html")}"""
abstract class SyntaxMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Syntax"
abstract class TypeMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Type"
abstract class TypeMismatchMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Type Mismatch"
abstract class NamingMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Naming"
abstract class DeclarationMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Declaration"
/** A simple not found message (either for idents, or member selection.
* Messages of this class are sometimes dropped in favor of other, more
* specific messages.
*/
abstract class NotFoundMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Not Found"
def name: Name
abstract class PatternMatchMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Pattern Match"
abstract class CyclicMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Cyclic"
abstract class ReferenceMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Reference"
abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: ErrorMessageID)(implicit ctx: Context)
extends SyntaxMsg(EmptyCatchOrFinallyBlockID) {
def explain = {
val tryString = tryBody match {
case Block(Nil, untpd.EmptyTree) => "{}"
case _ => tryBody.show
}
val code1 =
s"""|import scala.util.control.NonFatal
|
|try $tryString catch {
| case NonFatal(e) => ???
|}""".stripMargin
val code2 =
s"""|try $tryString finally {
| // perform your cleanup here!
|}""".stripMargin
em"""|A ${hl("try")} expression should be followed by some mechanism to handle any exceptions
|thrown. Typically a ${hl("catch")} expression follows the ${hl("try")} and pattern matches
|on any expected exceptions. For example:
|
|$code1
|
|It is also possible to follow a ${hl("try")} immediately by a ${hl("finally")} - letting the
|exception propagate - but still allowing for some clean up in ${hl("finally")}:
|
|$code2
|
|It is recommended to use the ${hl("NonFatal")} extractor to catch all exceptions as it
|correctly handles transfer functions like ${hl("return")}."""
}
}
class EmptyCatchBlock(tryBody: untpd.Tree)(implicit ctx: Context)
extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchBlockID) {
def msg =
em"""|The ${hl("catch")} block does not contain a valid expression, try
|adding a case like - ${hl("case e: Exception =>")} to the block"""
}
class EmptyCatchAndFinallyBlock(tryBody: untpd.Tree)(implicit ctx: Context)
extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID) {
def msg =
em"""|A ${hl("try")} without ${hl("catch")} or ${hl("finally")} is equivalent to putting
|its body in a block; no exceptions are handled."""
}
class DeprecatedWithOperator()(implicit ctx: Context)
extends SyntaxMsg(DeprecatedWithOperatorID) {
def msg =
em"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead"""
def explain =
em"""|Dotty introduces intersection types - ${hl("&")} types. These replace the
|use of the ${hl("with")} keyword. There are a few differences in
|semantics between intersection types and using ${hl("with")}."""
}
class CaseClassMissingParamList(cdef: untpd.TypeDef)(implicit ctx: Context)
extends SyntaxMsg(CaseClassMissingParamListID) {
def msg =
em"""|A ${hl("case class")} must have at least one parameter list"""
def explain =
em"""|${cdef.name} must have at least one parameter list, if you would rather
|have a singleton representation of ${cdef.name}, use a "${hl("case object")}".
|Or, add an explicit ${hl("()")} as a parameter list to ${cdef.name}."""
}
class AnonymousFunctionMissingParamType(param: untpd.ValDef,
args: List[untpd.Tree],
tree: untpd.Function,
pt: Type)
(implicit ctx: Context)
extends TypeMsg(AnonymousFunctionMissingParamTypeID) {
def msg = {
val ofFun =
if (MethodType.syntheticParamNames(args.length + 1) contains param.name)
i" of expanded function:\\n$tree"
else
""
val inferred =
if (pt == WildcardType) ""
else i"\\nWhat I could infer was: $pt"
i"""Missing parameter type
|
|I could not infer the type of the parameter ${param.name}$ofFun.$inferred"""
}
def explain = ""
}
class WildcardOnTypeArgumentNotAllowedOnNew()(implicit ctx: Context)
extends SyntaxMsg(WildcardOnTypeArgumentNotAllowedOnNewID) {
def msg = "Type argument must be fully defined"
def explain =
val code1: String =
"""
|object TyperDemo {
| class Team[A]
| val team = new Team[?]
|}
""".stripMargin
val code2: String =
"""
|object TyperDemo {
| class Team[A]
| val team = new Team[Int]
|}
""".stripMargin
em"""|Wildcard on arguments is not allowed when declaring a new type.
|
|Given the following example:
|
|$code1
|
|You must complete all the type parameters, for instance:
|
|$code2 """
}
// Type Errors ------------------------------------------------------------ //
class DuplicateBind(bind: untpd.Bind, tree: untpd.CaseDef)(implicit ctx: Context)
extends NamingMsg(DuplicateBindID) {
def msg = em"duplicate pattern variable: ${bind.name}"
def explain = {
val pat = tree.pat.show
val guard = tree.guard match {
case untpd.EmptyTree => ""
case guard => s"if ${guard.show}"
}
val body = tree.body match {
case Block(Nil, untpd.EmptyTree) => ""
case body => s" ${body.show}"
}
val caseDef = s"case $pat$guard => $body"
em"""|For each ${hl("case")} bound variable names have to be unique. In:
|
|$caseDef
|
|${bind.name} is not unique. Rename one of the bound variables!"""
}
}
class MissingIdent(tree: untpd.Ident, treeKind: String, val name: Name)(implicit ctx: Context)
extends NotFoundMsg(MissingIdentID) {
def msg = em"Not found: $treeKind$name"
def explain = {
em"""|The identifier for `$treeKind$name` is not bound, that is,
|no declaration for this identifier can be found.
|That can happen, for example, if `$name` or its declaration has either been
|misspelt or if an import is missing."""
}
}
class TypeMismatch(found: Type, expected: Type, addenda: => String*)(implicit ctx: Context)
extends TypeMismatchMsg(TypeMismatchID):
// replace constrained TypeParamRefs and their typevars by their bounds where possible
// the idea is that if the bounds are also not-subtypes of each other to report
// the type mismatch on the bounds instead of the original TypeParamRefs, since
// these are usually easier to analyze.
object reported extends TypeMap:
def setVariance(v: Int) = variance = v
val constraint = mapCtx.typerState.constraint
def apply(tp: Type): Type = tp match
case tp: TypeParamRef =>
constraint.entry(tp) match
case bounds: TypeBounds =>
if variance < 0 then apply(mapCtx.typeComparer.fullUpperBound(tp))
else if variance > 0 then apply(mapCtx.typeComparer.fullLowerBound(tp))
else tp
case NoType => tp
case instType => apply(instType)
case tp: TypeVar => apply(tp.stripTypeVar)
case _ => mapOver(tp)
def msg =
val found1 = reported(found)
reported.setVariance(-1)
val expected1 = reported(expected)
val (found2, expected2) =
if (found1 frozen_<:< expected1) (found, expected) else (found1, expected1)
val postScript = addenda.find(!_.isEmpty) match
case Some(p) => p
case None =>
if expected.isAny
|| expected.isAnyRef
|| expected.isRef(defn.AnyValClass)
|| defn.isBottomType(found)
then ""
else ctx.typer.importSuggestionAddendum(ViewProto(found.widen, expected))
val (where, printCtx) = Formatting.disambiguateTypes(found2, expected2)
val whereSuffix = if (where.isEmpty) where else s"\\n\\n$where"
val (foundStr, expectedStr) = Formatting.typeDiff(found2, expected2)(printCtx)
s"""|Found: $foundStr
|Required: $expectedStr""".stripMargin
+ whereSuffix + err.whyNoMatchStr(found, expected) + postScript
def explain = ""
end TypeMismatch
class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(implicit ctx: Context)
extends NotFoundMsg(NotAMemberID) {
//println(i"site = $site, decls = ${site.decls}, source = ${site.widen.typeSymbol.sourceFile}") //DEBUG
def msg = {
import core.Flags._
val maxDist = 3 // maximal number of differences to be considered for a hint
val missing = name.show
// The names of all non-synthetic, non-private members of `site`
// that are of the same type/term kind as the missing member.
def candidates: Set[String] =
for
bc <- site.widen.baseClasses.toSet
sym <- bc.info.decls.filter(sym =>
sym.isType == name.isTypeName
&& !sym.isConstructor
&& !sym.flagsUNSAFE.isOneOf(Synthetic | Private))
yield sym.name.show
// Calculate Levenshtein distance
def distance(s1: String, s2: String): Int =
val dist = Array.ofDim[Int](s2.length + 1, s1.length + 1)
for
j <- 0 to s2.length
i <- 0 to s1.length
do
dist(j)(i) =
if j == 0 then i
else if i == 0 then j
else if s2(j - 1) == s1(i - 1) then dist(j - 1)(i - 1)
else (dist(j - 1)(i) min dist(j)(i - 1) min dist(j - 1)(i - 1)) + 1
dist(s2.length)(s1.length)
// A list of possible candidate strings with their Levenstein distances
// to the name of the missing member
def closest: List[(Int, String)] = candidates
.toList
.map(n => (distance(n.show, missing), n))
.filter((d, n) => d <= maxDist && d < missing.length && d < n.length)
.sorted // sort by distance first, alphabetically second
val finalAddendum =
if addendum.nonEmpty then addendum
else closest match {
case (d, n) :: _ =>
val siteName = site match
case site: NamedType => site.name.show
case site => i"$site"
s" - did you mean $siteName.$n?"
case Nil => ""
}
ex"$selected $name is not a member of ${site.widen}$finalAddendum"
}
def explain = ""
}
class EarlyDefinitionsNotSupported()(implicit ctx: Context)
extends SyntaxMsg(EarlyDefinitionsNotSupportedID) {
def msg = "Early definitions are not supported; use trait parameters instead"
def explain = {
val code1 =
"""|trait Logging {
| val f: File
| f.open()
| onExit(f.close())
| def log(msg: String) = f.write(msg)
|}
|
|class B extends Logging {
| val f = new File("log.data") // triggers a NullPointerException
|}
|
|// early definition gets around the NullPointerException
|class C extends {
| val f = new File("log.data")
|} with Logging""".stripMargin
val code2 =
"""|trait Logging(f: File) {
| f.open()
| onExit(f.close())
| def log(msg: String) = f.write(msg)
|}
|
|class C extends Logging(new File("log.data"))""".stripMargin
em"""|Earlier versions of Scala did not support trait parameters and "early
|definitions" (also known as "early initializers") were used as an alternative.
|
|Example of old syntax:
|
|$code1
|
|The above code can now be written as:
|
|$code2
|"""
}
}
class TopLevelImplicitClass(cdef: untpd.TypeDef)(implicit ctx: Context)
extends SyntaxMsg(TopLevelImplicitClassID) {
def msg = em"""An ${hl("implicit class")} may not be top-level"""
def explain = {
val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef
val exampleArgs =
if(constr0.vparamss.isEmpty) "..."
else constr0.vparamss(0).map(_.withMods(untpd.Modifiers()).show).mkString(", ")
def defHasBody[T] = impl.body.exists(!_.isEmpty)
val exampleBody = if (defHasBody) "{\\n ...\\n }" else ""
em"""|There may not be any method, member or object in scope with the same name as
|the implicit class and a case class automatically gets a companion object with
|the same name created by the compiler which would cause a naming conflict if it
|were allowed.
|
|""" + implicitClassRestrictionsText + em"""|
|
|To resolve the conflict declare ${cdef.name} inside of an ${hl("object")} then import the class
|from the object at the use site if needed, for example:
|
|object Implicits {
| implicit class ${cdef.name}($exampleArgs)$exampleBody
|}
|
|// At the use site:
|import Implicits.${cdef.name}"""
}
}
class ImplicitCaseClass(cdef: untpd.TypeDef)(implicit ctx: Context)
extends SyntaxMsg(ImplicitCaseClassID) {
def msg = em"""A ${hl("case class")} may not be defined as ${hl("implicit")}"""
def explain =
em"""|Implicit classes may not be case classes. Instead use a plain class:
|
|implicit class ${cdef.name}...
|
|""" + implicitClassRestrictionsText
}
class ImplicitClassPrimaryConstructorArity()(implicit ctx: Context)
extends SyntaxMsg(ImplicitClassPrimaryConstructorArityID){
def msg = "Implicit classes must accept exactly one primary constructor parameter"
def explain = {
val example = "implicit class RichDate(date: java.util.Date)"
em"""Implicit classes may only take one non-implicit argument in their constructor. For example:
|
| $example
|
|While it’s possible to create an implicit class with more than one non-implicit argument,
|such classes aren’t used during implicit lookup.
|""" + implicitClassRestrictionsText
}
}
class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(implicit ctx: Context)
extends SyntaxMsg(ObjectMayNotHaveSelfTypeID) {
def msg = em"""${hl("object")}s must not have a self ${hl("type")}"""
def explain = {
val untpd.ModuleDef(name, tmpl) = mdef
val ValDef(_, selfTpt, _) = tmpl.self
em"""|${hl("object")}s must not have a self ${hl("type")}:
|
|Consider these alternative solutions:
| - Create a trait or a class instead of an object
| - Let the object extend a trait containing the self type:
|
| object $name extends ${selfTpt.show}"""
}
}
class RepeatedModifier(modifier: String)(implicit ctx:Context)
extends SyntaxMsg(RepeatedModifierID) {
def msg = em"""Repeated modifier $modifier"""
def explain = {
val code1 = em"""private private val Origin = Point(0, 0)"""
val code2 = em"""private final val Origin = Point(0, 0)"""
em"""This happens when you accidentally specify the same modifier twice.
|
|Example:
|
|$code1
|
|instead of
|
|$code2
|
|"""
}
}
class InterpolatedStringError()(implicit ctx:Context)
extends SyntaxMsg(InterpolatedStringErrorID) {
def msg = "Error in interpolated string: identifier or block expected"
def explain = {
val code1 = "s\\"$new Point(0, 0)\\""
val code2 = "s\\"${new Point(0, 0)}\\""
em"""|This usually happens when you forget to place your expressions inside curly braces.
|
|$code1
|
|should be written as
|
|$code2
|"""
}
}
class UnboundPlaceholderParameter()(implicit ctx:Context)
extends SyntaxMsg(UnboundPlaceholderParameterID) {
def msg = em"""Unbound placeholder parameter; incorrect use of ${hl("_")}"""
def explain =
em"""|The ${hl("_")} placeholder syntax was used where it could not be bound.
|Consider explicitly writing the variable binding.
|
|This can be done by replacing ${hl("_")} with a variable (eg. ${hl("x")})
|and adding ${hl("x =>")} where applicable.
|
|Example before:
|
|${hl("{ _ }")}
|
|Example after:
|
|${hl("x => { x }")}
|
|Another common occurrence for this error is defining a val with ${hl("_")}:
|
|${hl("val a = _")}
|
|But this val definition isn't very useful, it can never be assigned
|another value. And thus will always remain uninitialized.
|Consider replacing the ${hl("val")} with ${hl("var")}:
|
|${hl("var a = _")}
|
|Note that this use of ${hl("_")} is not placeholder syntax,
|but an uninitialized var definition.
|Only fields can be left uninitialized in this manner; local variables
|must be initialized.
|"""
}
class IllegalStartSimpleExpr(illegalToken: String)(implicit ctx: Context)
extends SyntaxMsg(IllegalStartSimpleExprID) {
def msg = em"expression expected but ${Red(illegalToken)} found"
def explain = {
em"""|An expression cannot start with ${Red(illegalToken)}."""
}
}
class MissingReturnType()(implicit ctx:Context)
extends SyntaxMsg(MissingReturnTypeID) {
def msg = "Missing return type"
def explain =
em"""|An abstract declaration must have a return type. For example:
|
|trait Shape {hl(
| def area: Double // abstract declaration returning a ${"Double"}
|)}"""
}
class MissingReturnTypeWithReturnStatement(method: Symbol)(implicit ctx: Context)
extends SyntaxMsg(MissingReturnTypeWithReturnStatementID) {
def msg = em"$method has a return statement; it needs a result type"
def explain =
em"""|If a method contains a ${hl("return")} statement, it must have an
|explicit return type. For example:
|
|${hl("def good: Int /* explicit return type */ = return 1")}"""
}
class YieldOrDoExpectedInForComprehension()(implicit ctx: Context)
extends SyntaxMsg(YieldOrDoExpectedInForComprehensionID) {
def msg = em"${hl("yield")} or ${hl("do")} expected"
def explain =
em"""|When the enumerators in a for comprehension are not placed in parentheses or
|braces, a ${hl("do")} or ${hl("yield")} statement is required after the enumerators
|section of the comprehension.
|
|You can save some keystrokes by omitting the parentheses and writing
|
|${hl("val numbers = for i <- 1 to 3 yield i")}
|
| instead of
|
|${hl("val numbers = for (i <- 1 to 3) yield i")}
|
|but the ${hl("yield")} keyword is still required.
|
|For comprehensions that simply perform a side effect without yielding anything
|can also be written without parentheses but a ${hl("do")} keyword has to be
|included. For example,
|
|${hl("for (i <- 1 to 3) println(i)")}
|
|can be written as
|
|${hl("for i <- 1 to 3 do println(i) // notice the 'do' keyword")}
|
|"""
}
class ProperDefinitionNotFound()(implicit ctx: Context)
extends Message(ProperDefinitionNotFoundID) {
def kind: String = "Doc Comment"
def msg = em"""Proper definition was not found in ${hl("@usecase")}"""
def explain = {
val noUsecase =
"def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That"
val usecase =
"""|/** Map from List[A] => List[B]
| *
| * @usecase def map[B](f: A => B): List[B]
| */
|def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That
|""".stripMargin
em"""|Usecases are only supported for ${hl("def")}s. They exist because with Scala's
|advanced type-system, we sometimes end up with seemingly scary signatures.
|The usage of these methods, however, needs not be - for instance the ${hl("map")}
|function
|
|${hl("List(1, 2, 3).map(2 * _) // res: List(2, 4, 6)")}
|
|is easy to understand and use - but has a rather bulky signature:
|
|$noUsecase
|
|to mitigate this and ease the usage of such functions we have the ${hl("@usecase")}
|annotation for docstrings. Which can be used like this:
|
|$usecase
|
|When creating the docs, the signature of the method is substituted by the
|usecase and the compiler makes sure that it is valid. Because of this, you're
|only allowed to use ${hl("def")}s when defining usecases."""
}
}
class ByNameParameterNotSupported(tpe: untpd.TypTree)(implicit ctx: Context)
extends SyntaxMsg(ByNameParameterNotSupportedID) {
def msg = em"By-name parameter type ${tpe} not allowed here."
def explain =
em"""|By-name parameters act like functions that are only evaluated when referenced,
|allowing for lazy evaluation of a parameter.
|
|An example of using a by-name parameter would look like:
|${hl("def func(f: => Boolean) = f // 'f' is evaluated when referenced within the function")}
|
|An example of the syntax of passing an actual function as a parameter:
|${hl("def func(f: (Boolean => Boolean)) = f(true)")}
|
|or:
|
|${hl("def func(f: Boolean => Boolean) = f(true)")}
|
|And the usage could be as such:
|${hl("func(bool => // do something...)")}
|"""
}
class WrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[ParamInfo], actual: List[untpd.Tree])(implicit ctx: Context)
extends SyntaxMsg(WrongNumberOfTypeArgsID) {
private val expectedCount = expectedArgs.length
private val actualCount = actual.length
private val msgPrefix = if (actualCount > expectedCount) "Too many" else "Not enough"
def msg =
val expectedArgString = expectedArgs
.map(_.paramName.unexpandedName.show)
.mkString("[", ", ", "]")
val actualArgString = actual.map(_.show).mkString("[", ", ", "]")
val prettyName =
try fntpe.termSymbol match
case NoSymbol => fntpe.show
case symbol => symbol.showFullName
catch case NonFatal(ex) => fntpe.show
em"""|$msgPrefix type arguments for $prettyName$expectedArgString
|expected: $expectedArgString
|actual: $actualArgString""".stripMargin
def explain = {
val tooManyTypeParams =
"""|val tuple2: (Int, String) = (1, "one")
|val list: List[(Int, String)] = List(tuple2)""".stripMargin
if (actualCount > expectedCount)
em"""|You have supplied too many type parameters
|
|For example List takes a single type parameter (List[A])
|If you need to hold more types in a list then you need to combine them
|into another data type that can contain the number of types you need,
|In this example one solution would be to use a Tuple:
|
|${tooManyTypeParams}"""
else
em"""|You have not supplied enough type parameters
|If you specify one type parameter then you need to specify every type parameter."""
}
}
class IllegalVariableInPatternAlternative()(implicit ctx: Context)
extends SyntaxMsg(IllegalVariableInPatternAlternativeID) {
def msg = "Variables are not allowed in alternative patterns"
def explain = {
val varInAlternative =
"""|def g(pair: (Int,Int)): Int = pair match {
| case (1, n) | (n, 1) => n
| case _ => 0
|}""".stripMargin
val fixedVarInAlternative =
"""|def g(pair: (Int,Int)): Int = pair match {
| case (1, n) => n
| case (n, 1) => n
| case _ => 0
|}""".stripMargin
em"""|Variables are not allowed within alternate pattern matches. You can workaround
|this issue by adding additional cases for each alternative. For example, the
|illegal function:
|
|$varInAlternative
|could be implemented by moving each alternative into a separate case:
|
|$fixedVarInAlternative"""
}
}
class IdentifierExpected(identifier: String)(implicit ctx: Context)
extends SyntaxMsg(IdentifierExpectedID) {
def msg = "identifier expected"
def explain = {
val wrongIdentifier = em"def foo: $identifier = {...}"
val validIdentifier = em"def foo = {...}"
em"""|An identifier expected, but $identifier found. This could be because
|$identifier is not a valid identifier. As a workaround, the compiler could
|infer the type for you. For example, instead of:
|
|$wrongIdentifier
|
|Write your code like:
|
|$validIdentifier
|
|"""
}
}
class AuxConstructorNeedsNonImplicitParameter()(implicit ctx:Context)
extends SyntaxMsg(AuxConstructorNeedsNonImplicitParameterID) {
def msg = "Auxiliary constructor needs non-implicit parameter list"
def explain =
em"""|Only the primary constructor is allowed an ${hl("implicit")} parameter list;
|auxiliary constructors need non-implicit parameter lists. When a primary
|constructor has an implicit argslist, auxiliary constructors that call the
|primary constructor must specify the implicit value.
|
|To resolve this issue check for:
| - Forgotten parenthesis on ${hl("this")} (${hl("def this() = { ... }")})
| - Auxiliary constructors specify the implicit value
|"""
}
class IncorrectRepeatedParameterSyntax()(implicit ctx: Context)
extends SyntaxMsg(IncorrectRepeatedParameterSyntaxID) {
def msg = "'*' expected"
def explain =
em"""|Expected * in ${hl("_*")} operator.
|
|The ${hl("_*")} operator can be used to supply a sequence-based argument
|to a method with a variable-length or repeated parameter. It is used
|to expand the sequence to a variable number of arguments, such that:
|${hl("func(args: _*)")} would expand to ${hl("func(arg1, arg2 ... argN)")}.
|
|Below is an example of how a method with a variable-length
|parameter can be declared and used.
|
|Squares the arguments of a variable-length parameter:
|${hl("def square(args: Int*) = args.map(a => a * a)")}
|
|Usage:
|${hl("square(1, 2, 3) // res0: List[Int] = List(1, 4, 9)")}
|
|Secondary Usage with ${hl("_*")}:
|${hl("val ints = List(2, 3, 4) // ints: List[Int] = List(2, 3, 4)")}
|${hl("square(ints: _*) // res1: List[Int] = List(4, 9, 16)")}
|""".stripMargin
}
class IllegalLiteral()(implicit ctx: Context)
extends SyntaxMsg(IllegalLiteralID) {
def msg = "Illegal literal"
def explain =
em"""|Available literals can be divided into several groups:
| - Integer literals: 0, 21, 0xFFFFFFFF, -42L
| - Floating Point Literals: 0.0, 1e30f, 3.14159f, 1.0e-100, .1
| - Boolean Literals: true, false
| - Character Literals: 'a', '\\u0041', '\\n'
| - String Literals: "Hello, World!"
| - null
|"""
}
class PatternMatchExhaustivity(uncoveredFn: => String)(implicit ctx: Context)
extends Message(PatternMatchExhaustivityID) {
def kind = "Pattern Match Exhaustivity"
lazy val uncovered = uncoveredFn
def msg =
em"""|${hl("match")} may not be exhaustive.
|
|It would fail on pattern case: $uncovered"""
def explain =
em"""|There are several ways to make the match exhaustive:
| - Add missing cases as shown in the warning
| - If an extractor always return ${hl("Some(...)")}, write ${hl("Some[X]")} for its return type
| - Add a ${hl("case _ => ...")} at the end to match all remaining cases
|"""
}
class UncheckedTypePattern(msgFn: => String)(implicit ctx: Context)
extends PatternMatchMsg(UncheckedTypePatternID) {
def msg = msgFn
def explain =
em"""|Type arguments and type refinements are erased during compile time, thus it's
|impossible to check them at run-time.
|
|You can either replace the type arguments by ${hl("_")} or use `@unchecked`.
|"""
}
class MatchCaseUnreachable()(implicit ctx: Context)
extends Message(MatchCaseUnreachableID) {
def kind = "Match case Unreachable"
def msg = "Unreachable case"
def explain = ""
}
class MatchCaseOnlyNullWarning()(implicit ctx: Context)
extends PatternMatchMsg(MatchCaseOnlyNullWarningID) {
def msg = em"""Only ${hl("null")} is matched. Consider using ${hl("case null =>")} instead."""
def explain = ""
}
class SeqWildcardPatternPos()(implicit ctx: Context)
extends SyntaxMsg(SeqWildcardPatternPosID) {
def msg = em"""${hl("_*")} can be used only for last argument"""
def explain = {
val code =
"""def sumOfTheFirstTwo(list: List[Int]): Int = list match {
| case List(first, second, x:_*) => first + second
| case _ => 0
|}"""
em"""|Sequence wildcard pattern is expected at the end of an argument list.
|This pattern matches any remaining elements in a sequence.
|Consider the following example:
|
|$code
|
|Calling:
|
|${hl("sumOfTheFirstTwo(List(1, 2, 10))")}
|
|would give 3 as a result"""
}
}
class IllegalStartOfSimplePattern()(implicit ctx: Context)
extends SyntaxMsg(IllegalStartOfSimplePatternID) {
def msg = "pattern expected"
def explain = {
val sipCode =
"""def f(x: Int, y: Int) = x match {
| case `y` => ...
|}
"""
val constructorPatternsCode =
"""case class Person(name: String, age: Int)
|
|def test(p: Person) = p match {
| case Person(name, age) => ...
|}
"""
val tupplePatternsCode =
"""def swap(tuple: (String, Int)): (Int, String) = tuple match {
| case (text, number) => (number, text)
|}
"""
val patternSequencesCode =
"""def getSecondValue(list: List[Int]): Int = list match {
| case List(_, second, x:_*) => second
| case _ => 0
|}"""
em"""|Simple patterns can be divided into several groups:
|- Variable Patterns: ${hl("case x => ...")}.
| It matches any value, and binds the variable name to that value.
| A special case is the wild-card pattern _ which is treated as if it was a fresh
| variable on each occurrence.
|
|- Typed Patterns: ${hl("case x: Int => ...")} or ${hl("case _: Int => ...")}.
| This pattern matches any value matched by the specified type; it binds the variable
| name to that value.
|
|- Literal Patterns: ${hl("case 123 => ...")} or ${hl("case 'A' => ...")}.
| This type of pattern matches any value that is equal to the specified literal.
|
|- Stable Identifier Patterns:
|
| $sipCode
|
| the match succeeds only if the x argument and the y argument of f are equal.
|
|- Constructor Patterns:
|
| $constructorPatternsCode
|
| The pattern binds all object's fields to the variable names (name and age, in this
| case).
|
|- Tuple Patterns:
|
| $tupplePatternsCode
|
| Calling:
|
| ${hl("""swap(("Luftballons", 99)""")}
|
| would give ${hl("""(99, "Luftballons")""")} as a result.
|
|- Pattern Sequences:
|
| $patternSequencesCode
|
| Calling:
|
| ${hl("getSecondValue(List(1, 10, 2))")}
|
| would give 10 as a result.
| This pattern is possible because a companion object for the List class has a method
| with the following signature:
|
| ${hl("def unapplySeq[A](x: List[A]): Some[List[A]]")}
|"""
}
}
class PkgDuplicateSymbol(existing: Symbol)(implicit ctx: Context)
extends NamingMsg(PkgDuplicateSymbolID) {
def msg = em"Trying to define package with same name as $existing"
def explain = ""
}
class ExistentialTypesNoLongerSupported()(implicit ctx: Context)
extends SyntaxMsg(ExistentialTypesNoLongerSupportedID) {
def msg =
em"""|Existential types are no longer supported -
|use a wildcard or dependent type instead"""
def explain =
em"""|The use of existential types is no longer supported.
|
|You should use a wildcard or dependent type instead.
|
|For example:
|
|Instead of using ${hl("forSome")} to specify a type variable
|
|${hl("List[T forSome { type T }]")}
|
|Try using a wildcard type variable
|
|${hl("List[?]")}
|"""
}
class UnboundWildcardType()(implicit ctx: Context)
extends SyntaxMsg(UnboundWildcardTypeID) {
def msg = "Unbound wildcard type"
def explain =
em"""|The wildcard type syntax (${hl("_")}) was used where it could not be bound.
|Replace ${hl("_")} with a non-wildcard type. If the type doesn't matter,
|try replacing ${hl("_")} with ${hl("Any")}.
|
|Examples:
|
|- Parameter lists
|
| Instead of:
| ${hl("def foo(x: _) = ...")}
|
| Use ${hl("Any")} if the type doesn't matter:
| ${hl("def foo(x: Any) = ...")}
|
|- Type arguments
|
| Instead of:
| ${hl("val foo = List[?](1, 2)")}
|
| Use:
| ${hl("val foo = List[Int](1, 2)")}
|
|- Type bounds
|
| Instead of:
| ${hl("def foo[T <: _](x: T) = ...")}
|
| Remove the bounds if the type doesn't matter:
| ${hl("def foo[T](x: T) = ...")}
|
|- ${hl("val")} and ${hl("def")} types
|
| Instead of:
| ${hl("val foo: _ = 3")}
|
| Use:
| ${hl("val foo: Int = 3")}
|"""
}
class DanglingThisInPath()(implicit ctx: Context) extends SyntaxMsg(DanglingThisInPathID) {
def msg = em"""Expected an additional member selection after the keyword ${hl("this")}"""
def explain =
val contextCode: String =
""" trait Outer {
| val member: Int
| type Member
| trait Inner {
| ...
| }
| }"""
val importCode: String =
""" import Outer.this.member
| // ^^^^^^^"""
val typeCode: String =
""" type T = Outer.this.Member
| // ^^^^^^^"""
em"""|Paths of imports and type selections must not end with the keyword ${hl("this")}.
|
|Maybe you forgot to select a member of ${hl("this")}? As an example, in the
|following context:
|${contextCode}
|
|- This is a valid import expression using a path
|${importCode}
|
|- This is a valid type using a path
|${typeCode}
|"""
}
class OverridesNothing(member: Symbol)(implicit ctx: Context)
extends DeclarationMsg(OverridesNothingID) {
def msg = em"""${member} overrides nothing"""
def explain =
em"""|There must be a field or method with the name ${member.name} in a super
|class of ${member.owner} to override it. Did you misspell it?
|Are you extending the right classes?
|"""
}
class OverridesNothingButNameExists(member: Symbol, existing: List[Denotations.SingleDenotation])(implicit ctx: Context)
extends DeclarationMsg(OverridesNothingButNameExistsID) {
def msg = em"""${member} has a different signature than the overridden declaration"""
def explain =
val existingDecl: String = existing.map(_.showDcl).mkString(" \\n")
em"""|There must be a non-final field or method with the name ${member.name} and the
|same parameter list in a super class of ${member.owner} to override it.
|
| ${member.showDcl}
|
|The super classes of ${member.owner} contain the following members
|named ${member.name}:
| ${existingDecl}
|"""
}
class ForwardReferenceExtendsOverDefinition(value: Symbol, definition: Symbol)(implicit ctx: Context)
extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID) {
def msg = em"${definition.name} is a forward reference extending over the definition of ${value.name}"
def explain =
em"""|${definition.name} is used before you define it, and the definition of ${value.name}
|appears between that use and the definition of ${definition.name}.
|
|Forward references are allowed only, if there are no value definitions between
|the reference and the referred method definition.
|
|Define ${definition.name} before it is used,
|or move the definition of ${value.name} so it does not appear between
|the declaration of ${definition.name} and its use,
|or define ${value.name} as lazy.
|""".stripMargin
}
class ExpectedTokenButFound(expected: Token, found: Token)(implicit ctx: Context)
extends SyntaxMsg(ExpectedTokenButFoundID) {
private lazy val foundText = Tokens.showToken(found)
def msg =
val expectedText =
if (Tokens.isIdentifier(expected)) "an identifier"
else Tokens.showToken(expected)
em"""${expectedText} expected, but ${foundText} found"""
def explain =
if (Tokens.isIdentifier(expected) && Tokens.isKeyword(found))
s"""
|If you want to use $foundText as identifier, you may put it in backticks: `$foundText`.""".stripMargin
else
""
}
class MixedLeftAndRightAssociativeOps(op1: Name, op2: Name, op2LeftAssoc: Boolean)(implicit ctx: Context)
extends SyntaxMsg(MixedLeftAndRightAssociativeOpsID) {
def msg =
val op1Asso: String = if (op2LeftAssoc) "which is right-associative" else "which is left-associative"
val op2Asso: String = if (op2LeftAssoc) "which is left-associative" else "which is right-associative"
em"${op1} (${op1Asso}) and ${op2} ($op2Asso) have same precedence and may not be mixed"
def explain =
s"""|The operators ${op1} and ${op2} are used as infix operators in the same expression,
|but they bind to different sides:
|${op1} is applied to the operand to its ${if (op2LeftAssoc) "right" else "left"}
|${op2} is applied to the operand to its ${if (op2LeftAssoc) "left" else "right"}
|As both have the same precedence the compiler can't decide which to apply first.
|
|You may use parenthesis to make the application order explicit,
|or use method application syntax operand1.${op1}(operand2).
|
|Operators ending in a colon ${hl(":")} are right-associative. All other operators are left-associative.
|
|Infix operator precedence is determined by the operator's first character. Characters are listed
|below in increasing order of precedence, with characters on the same line having the same precedence.
| (all letters)
| |
| ^
| &
| = !
| < >
| :
| + -
| * / %
| (all other special characters)
|Operators starting with a letter have lowest precedence, followed by operators starting with `|`, etc.
|""".stripMargin
}
class CantInstantiateAbstractClassOrTrait(cls: Symbol, isTrait: Boolean)(implicit ctx: Context)
extends TypeMsg(CantInstantiateAbstractClassOrTraitID) {
private val traitOrAbstract = if (isTrait) "a trait" else "abstract"
def msg = em"""${cls.name} is ${traitOrAbstract}; it cannot be instantiated"""
def explain =
em"""|Abstract classes and traits need to be extended by a concrete class or object
|to make their functionality accessible.
|
|You may want to create an anonymous class extending ${cls.name} with
| ${s"class ${cls.name} { }"}
|
|or add a companion object with
| ${s"object ${cls.name} extends ${cls.name}"}
|
|You need to implement any abstract members in both cases.
|""".stripMargin
}
class UnreducibleApplication(tycon: Type)(using Context) extends TypeMsg(UnreducibleApplicationID):
def msg = em"unreducible application of higher-kinded type $tycon to wildcard arguments"
def explain =
em"""|An abstract type constructor cannot be applied to wildcard arguments.
|Such applications are equivalent to existential types, which are not
|supported in Scala 3."""
class OverloadedOrRecursiveMethodNeedsResultType(cycleSym: Symbol)(implicit ctx: Context)
extends CyclicMsg(OverloadedOrRecursiveMethodNeedsResultTypeID) {
def msg = em"""Overloaded or recursive $cycleSym needs return type"""
def explain =
em"""Case 1: $cycleSym is overloaded
|If there are multiple methods named $cycleSym and at least one definition of
|it calls another, you need to specify the calling method's return type.
|
|Case 2: $cycleSym is recursive
|If $cycleSym calls itself on any path (even through mutual recursion), you need to specify the return type
|of $cycleSym or of a definition it's mutually recursive with.
|""".stripMargin
}
class RecursiveValueNeedsResultType(cycleSym: Symbol)(implicit ctx: Context)
extends CyclicMsg(RecursiveValueNeedsResultTypeID) {
def msg = em"""Recursive $cycleSym needs type"""
def explain =
em"""The definition of $cycleSym is recursive and you need to specify its type.
|""".stripMargin
}
class CyclicReferenceInvolving(denot: SymDenotation)(implicit ctx: Context)
extends CyclicMsg(CyclicReferenceInvolvingID) {
def msg =
val where = if denot.exists then s" involving $denot" else ""
em"Cyclic reference$where"
def explain =
em"""|$denot is declared as part of a cycle which makes it impossible for the
|compiler to decide upon ${denot.name}'s type.
|To avoid this error, try giving ${denot.name} an explicit type.
|""".stripMargin
}
class CyclicReferenceInvolvingImplicit(cycleSym: Symbol)(implicit ctx: Context)
extends CyclicMsg(CyclicReferenceInvolvingImplicitID) {
def msg = em"""Cyclic reference involving implicit $cycleSym"""
def explain =
em"""|$cycleSym is declared as part of a cycle which makes it impossible for the
|compiler to decide upon ${cycleSym.name}'s type.
|This might happen when the right hand-side of $cycleSym's definition involves an implicit search.
|To avoid this error, try giving ${cycleSym.name} an explicit type.
|""".stripMargin
}
class SuperQualMustBeParent(qual: untpd.Ident, cls: ClassSymbol)(implicit ctx: Context)
extends ReferenceMsg(SuperQualMustBeParentID) {
def msg = em"""|$qual does not name a parent of $cls"""
def explain =
val parents: Seq[String] = (cls.info.parents map (_.typeSymbol.name.show)).sorted
em"""|When a qualifier ${hl("T")} is used in a ${hl("super")} prefix of the form ${hl("C.super[T]")},
|${hl("T")} must be a parent type of ${hl("C")}.
|
|In this case, the parents of $cls are:
|${parents.mkString(" - ", "\\n - ", "")}
|""".stripMargin
}
class VarArgsParamMustComeLast()(implicit ctx: Context)
extends SyntaxMsg(IncorrectRepeatedParameterSyntaxID) {
def msg = em"""${hl("varargs")} parameter must come last"""
def explain =
em"""|The ${hl("varargs")} field must be the last field in the method signature.
|Attempting to define a field in a method signature after a ${hl("varargs")} field is an error.
|"""
}
import typer.Typer.BindingPrec
class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(implicit ctx: Context)
extends ReferenceMsg(AmbiguousReferenceID) {
/** A string which explains how something was bound; Depending on `prec` this is either
* imported by <tree>
* or defined in <symbol>
*/
private def bindingString(prec: BindingPrec, whereFound: Context, qualifier: String = "") = {
val howVisible = prec match {
case BindingPrec.Definition => "defined"
case BindingPrec.Inheritance => "inherited"
case BindingPrec.NamedImport => "imported by name"
case BindingPrec.WildImport => "imported"
case BindingPrec.PackageClause => "found"
case BindingPrec.NothingBound => assert(false)
}
if (prec.isImportPrec) {
ex"""$howVisible$qualifier by ${em"${whereFound.importInfo}"}"""
} else
ex"""$howVisible$qualifier in ${em"${whereFound.owner}"}"""
}
def msg =
i"""|Reference to ${em"$name"} is ambiguous,
|it is both ${bindingString(newPrec, ctx)}
|and ${bindingString(prevPrec, prevCtx, " subsequently")}"""
def explain =
em"""|The compiler can't decide which of the possible choices you
|are referencing with $name: A definition of lower precedence
|in an inner scope, or a definition with higher precedence in
|an outer scope.
|Note:
| - Definitions in an enclosing scope take precedence over inherited definitions
| - Definitions take precedence over imports
| - Named imports take precedence over wildcard imports
| - You may replace a name when imported using
| ${hl("import")} scala.{ $name => ${name.show + "Tick"} }
|"""
}
class MethodDoesNotTakeParameters(tree: tpd.Tree)(implicit ctx: Context)
extends TypeMsg(MethodDoesNotTakeParametersId) {
def methodSymbol: Symbol = tpd.methPart(tree).symbol
def msg = {
val more = if (tree.isInstanceOf[tpd.Apply]) " more" else ""
val meth = methodSymbol
val methStr = if (meth.exists) methodSymbol.showLocated else "expression"
em"$methStr does not take$more parameters"
}
def explain = {
val isNullary = methodSymbol.info.isInstanceOf[ExprType]
val addendum =
if (isNullary) "\\nNullary methods may not be called with parenthesis"
else ""
"You have specified more parameter lists as defined in the method definition(s)." + addendum
}
}
class AmbiguousOverload(tree: tpd.Tree, val alternatives: List[SingleDenotation], pt: Type)(
err: Errors)(
implicit ctx: Context)
extends ReferenceMsg(AmbiguousOverloadID) {
private def all = if (alternatives.length == 2) "both" else "all"
def msg =
s"""|Ambiguous overload. The ${err.overloadedAltsStr(alternatives)}
|$all match ${err.expectedTypeStr(pt)}""".stripMargin
def explain =
em"""|There are ${alternatives.length} methods that could be referenced as the compiler knows too little
|about the expected type.
|You may specify the expected type e.g. by
|- assigning it to a value with a specified type, or
|- adding a type ascription as in ${hl("instance.myMethod: String => Int")}
|"""
}
class ReassignmentToVal(name: Name)(implicit ctx: Context)
extends TypeMsg(ReassignmentToValID) {
def msg = em"""Reassignment to val $name"""
def explain =
em"""|You can not assign a new value to $name as values can't be changed.
|Keep in mind that every statement has a value, so you may e.g. use
| ${hl("val")} $name ${hl("= if (condition) 2 else 5")}
|In case you need a reassignable name, you can declare it as
|variable
| ${hl("var")} $name ${hl("=")} ...
|""".stripMargin
}
class TypeDoesNotTakeParameters(tpe: Type, params: List[Trees.Tree[Trees.Untyped]])(implicit ctx: Context)
extends TypeMsg(TypeDoesNotTakeParametersID) {
def msg = em"$tpe does not take type parameters"
def explain =
val ps =
if (params.size == 1) s"a type parameter ${params.head}"
else s"type parameters ${params.map(_.show).mkString(", ")}"
i"""You specified ${NoColor(ps)} for ${em"$tpe"}, which is not
|declared to take any.
|"""
}
class ParameterizedTypeLacksArguments(psym: Symbol)(implicit ctx: Context)
extends TypeMsg(ParameterizedTypeLacksArgumentsID) {
def msg = em"Parameterized $psym lacks argument list"
def explain =
em"""The $psym is declared with non-implicit parameters, you may not leave
|out the parameter list when extending it.
|"""
}
class VarValParametersMayNotBeCallByName(name: TermName, mutable: Boolean)(implicit ctx: Context)
extends SyntaxMsg(VarValParametersMayNotBeCallByNameID) {
def varOrVal = if (mutable) em"${hl("var")}" else em"${hl("val")}"
def msg = s"$varOrVal parameters may not be call-by-name"
def explain =
em"""${hl("var")} and ${hl("val")} parameters of classes and traits may no be call-by-name. In case you
|want the parameter to be evaluated on demand, consider making it just a parameter
|and a ${hl("def")} in the class such as
| ${s"class MyClass(${name}Tick: => String) {"}
| ${s" def $name() = ${name}Tick"}
| ${hl("}")}
|"""
}
class MissingTypeParameterFor(tpe: Type)(implicit ctx: Context)
extends SyntaxMsg(MissingTypeParameterForID) {
def msg =
if (tpe.derivesFrom(defn.AnyKindClass)) em"${tpe} cannot be used as a value type"
else em"Missing type parameter for ${tpe}"
def explain = ""
}
class MissingTypeParameterInTypeApp(tpe: Type)(implicit ctx: Context)
extends TypeMsg(MissingTypeParameterInTypeAppID) {
def numParams = tpe.typeParams.length
def parameters = if (numParams == 1) "parameter" else "parameters"
def msg = em"Missing type $parameters for $tpe"
def explain = em"A fully applied type is expected but $tpe takes $numParams $parameters"
}
class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(
err: Errors)(implicit ctx: Context)
extends TypeMismatchMsg(DoesNotConformToBoundID) {
def msg = em"Type argument ${tpe} does not conform to $which bound $bound${err.whyNoMatchStr(tpe, bound)}"
def explain = ""
}
class DoesNotConformToSelfType(category: String, selfType: Type, cls: Symbol,
otherSelf: Type, relation: String, other: Symbol)(
implicit ctx: Context)
extends TypeMismatchMsg(DoesNotConformToSelfTypeID) {
def msg = em"""$category: self type $selfType of $cls does not conform to self type $otherSelf
|of $relation $other"""
def explain =
em"""You mixed in $other which requires self type $otherSelf, but $cls has self type
|$selfType and does not inherit from $otherSelf.
|
|Note: Self types are indicated with the notation
| ${s"class "}$other ${hl("{ this: ")}$otherSelf${hl(" => ")}
"""
}
class DoesNotConformToSelfTypeCantBeInstantiated(tp: Type, selfType: Type)(
implicit ctx: Context)
extends TypeMismatchMsg(DoesNotConformToSelfTypeCantBeInstantiatedID) {
def msg = em"""$tp does not conform to its self type $selfType; cannot be instantiated"""
def explain =
em"""To create an instance of $tp it needs to inherit $selfType in some way.
|
|Note: Self types are indicated with the notation
| ${s"class "}$tp ${hl("{ this: ")}$selfType${hl(" => ")}
|"""
}
class AbstractMemberMayNotHaveModifier(sym: Symbol, flag: FlagSet)(
implicit ctx: Context)
extends SyntaxMsg(AbstractMemberMayNotHaveModifierID) {
def msg = em"""${hl("abstract")} $sym may not have `${flag.flagsString}` modifier"""
def explain = ""
}
class TopLevelCantBeImplicit(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(TopLevelCantBeImplicitID) {
def msg = em"""${hl("implicit")} modifier cannot be used for top-level definitions"""
def explain = ""
}
class TypesAndTraitsCantBeImplicit()(implicit ctx: Context)
extends SyntaxMsg(TypesAndTraitsCantBeImplicitID) {
def msg = em"""${hl("implicit")} modifier cannot be used for types or traits"""
def explain = ""
}
class OnlyClassesCanBeAbstract(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(OnlyClassesCanBeAbstractID) {
def explain = ""
def msg = em"""${hl("abstract")} modifier can be used only for classes; it should be omitted for abstract members"""
}
class AbstractOverrideOnlyInTraits(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(AbstractOverrideOnlyInTraitsID) {
def msg = em"""${hl("abstract override")} modifier only allowed for members of traits"""
def explain = ""
}
class TraitsMayNotBeFinal(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(TraitsMayNotBeFinalID) {
def msg = em"""$sym may not be ${hl("final")}"""
def explain =
"A trait can never be final since it is abstract and must be extended to be useful."
}
class NativeMembersMayNotHaveImplementation(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(NativeMembersMayNotHaveImplementationID) {
def msg = em"""${hl("@native")} members may not have an implementation"""
def explain = ""
}
class OnlyClassesCanHaveDeclaredButUndefinedMembers(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(OnlyClassesCanHaveDeclaredButUndefinedMembersID) {
private def varNote =
if (sym.is(Mutable)) "Note that variables need to be initialized to be defined."
else ""
def msg = em"""Declaration of $sym not allowed here: only classes can have declared but undefined members"""
def explain = s"$varNote"
}
class CannotExtendAnyVal(sym: Symbol)(implicit ctx: Context)
extends SyntaxMsg(CannotExtendAnyValID) {
def msg = em"""$sym cannot extend ${hl("AnyVal")}"""
def explain =
em"""Only classes (not traits) are allowed to extend ${hl("AnyVal")}, but traits may extend
|${hl("Any")} to become ${Green("\\"universal traits\\"")} which may only have ${hl("def")} members.
|Universal traits can be mixed into classes that extend ${hl("AnyVal")}.
|"""
}
class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(implicit ctx: Context)
extends SyntaxMsg(CannotHaveSameNameAsID) {
import CannotHaveSameNameAs._
def reasonMessage: String = reason match {
case CannotBeOverridden => "class definitions cannot be overridden"
case DefinedInSelf(self) =>
s"""cannot define ${sym.showKind} member with the same name as a ${cls.showKind} member in self reference ${self.name}.
|(Note: this can be resolved by using another name)
|""".stripMargin
}
def msg = em"""$sym cannot have the same name as ${cls.showLocated} -- """ + reasonMessage
def explain = ""
}
object CannotHaveSameNameAs {
sealed trait Reason
case object CannotBeOverridden extends Reason
case class DefinedInSelf(self: tpd.ValDef) extends Reason
}
class ValueClassesMayNotDefineInner(valueClass: Symbol, inner: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassesMayNotDefineInnerID) {
def msg = em"""Value classes may not define an inner class"""
def explain = ""
}
class ValueClassesMayNotDefineNonParameterField(valueClass: Symbol, field: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassesMayNotDefineNonParameterFieldID) {
def msg = em"""Value classes may not define non-parameter field"""
def explain = ""
}
class ValueClassesMayNotDefineASecondaryConstructor(valueClass: Symbol, constructor: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassesMayNotDefineASecondaryConstructorID) {
def msg = em"""Value classes may not define a secondary constructor"""
def explain = ""
}
class ValueClassesMayNotContainInitalization(valueClass: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassesMayNotContainInitalizationID) {
def msg = em"""Value classes may not contain initialization statements"""
def explain = ""
}
class ValueClassesMayNotBeAbstract(valueClass: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassesMayNotBeAbstractID) {
def msg = em"""Value classes may not be ${hl("abstract")}"""
def explain = ""
}
class ValueClassesMayNotBeContainted(valueClass: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassesMayNotBeContaintedID) {
private def localOrMember = if (valueClass.owner.isTerm) "local class" else "member of another class"
def msg = s"""Value classes may not be a $localOrMember"""
def explain = ""
}
class ValueClassesMayNotWrapItself(valueClass: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassesMayNotWrapItselfID) {
def msg = """A value class may not wrap itself"""
def explain = ""
}
class ValueClassParameterMayNotBeAVar(valueClass: Symbol, param: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassParameterMayNotBeAVarID) {
def msg = em"""A value class parameter may not be a ${hl("var")}"""
def explain =
em"""A value class must have exactly one ${hl("val")} parameter."""
}
class ValueClassNeedsOneValParam(valueClass: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassNeedsExactlyOneValParamID) {
def msg = em"""Value class needs one ${hl("val")} parameter"""
def explain = ""
}
class ValueClassParameterMayNotBeCallByName(valueClass: Symbol, param: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ValueClassParameterMayNotBeCallByNameID) {
def msg = s"Value class parameter `${param.name}` may not be call-by-name"
def explain = ""
}
class OnlyCaseClassOrCaseObjectAllowed()(implicit ctx: Context)
extends SyntaxMsg(OnlyCaseClassOrCaseObjectAllowedID) {
def msg = em"""Only ${hl("case class")} or ${hl("case object")} allowed"""
def explain = ""
}
class ExpectedToplevelDef()(implicit ctx: Context)
extends SyntaxMsg(ExpectedTopLevelDefID) {
def msg = "Expected a toplevel definition"
def explain = ""
}
class SuperCallsNotAllowedInlineable(symbol: Symbol)(implicit ctx: Context)
extends SyntaxMsg(SuperCallsNotAllowedInlineableID) {
def msg = em"Super call not allowed in inlineable $symbol"
def explain = "Method inlining prohibits calling superclass methods, as it may lead to confusion about which super is being called."
}
class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathID):
def msg = em"$tp is not a valid $usage, since it is not an immutable path"
def explain =
i"""An immutable path is
| - a reference to an immutable value, or
| - a reference to `this`, or
| - a selection of an immutable path with an immutable value."""
class WrongNumberOfParameters(expected: Int)(implicit ctx: Context)
extends SyntaxMsg(WrongNumberOfParametersID) {
def msg = s"Wrong number of parameters, expected: $expected"
def explain = ""
}
class DuplicatePrivateProtectedQualifier()(implicit ctx: Context)
extends SyntaxMsg(DuplicatePrivateProtectedQualifierID) {
def msg = "Duplicate private/protected qualifier"
def explain =
em"It is not allowed to combine `private` and `protected` modifiers even if they are qualified to different scopes"
}
class ExpectedStartOfTopLevelDefinition()(implicit ctx: Context)
extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID) {
def msg = "Expected start of definition"
def explain =
em"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after qualifiers"
}
class NoReturnFromInlineable(owner: Symbol)(implicit ctx: Context)
extends SyntaxMsg(NoReturnFromInlineableID) {
def msg = em"No explicit ${hl("return")} allowed from inlineable $owner"
def explain =
em"""Methods marked with ${hl("inline")} modifier may not use ${hl("return")} statements.
|Instead, you should rely on the last expression's value being
|returned from a method.
|"""
}
class ReturnOutsideMethodDefinition(owner: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ReturnOutsideMethodDefinitionID) {
def msg = em"${hl("return")} outside method definition"
def explain =
em"""You used ${hl("return")} in ${owner}.
|${hl("return")} is a keyword and may only be used within method declarations.
|"""
}
class ExtendFinalClass(clazz:Symbol, finalClazz: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ExtendFinalClassID) {
def msg = em"$clazz cannot extend ${hl("final")} $finalClazz"
def explain =
em"""A class marked with the ${hl("final")} keyword cannot be extended"""
}
class ExpectedTypeBoundOrEquals(found: Token)(implicit ctx: Context)
extends SyntaxMsg(ExpectedTypeBoundOrEqualsID) {
def msg = em"${hl("=")}, ${hl(">:")}, or ${hl("<:")} expected, but ${Tokens.showToken(found)} found"
def explain =
em"""Type parameters and abstract types may be constrained by a type bound.
|Such type bounds limit the concrete values of the type variables and possibly
|reveal more information about the members of such types.
|
|A lower type bound ${hl("B >: A")} expresses that the type variable ${hl("B")}
|refers to a supertype of type ${hl("A")}.
|
|An upper type bound ${hl("T <: A")} declares that type variable ${hl("T")}
|refers to a subtype of type ${hl("A")}.
|"""
}
class ClassAndCompanionNameClash(cls: Symbol, other: Symbol)(implicit ctx: Context)
extends NamingMsg(ClassAndCompanionNameClashID) {
def msg = em"Name clash: both ${cls.owner} and its companion object defines ${cls.name.stripModuleClassSuffix}"
def explain =
em"""|A ${cls.kindString} and its companion object cannot both define a ${hl("class")}, ${hl("trait")} or ${hl("object")} with the same name:
| - ${cls.owner} defines ${cls}
| - ${other.owner} defines ${other}"""
}
class TailrecNotApplicable(symbol: Symbol)(implicit ctx: Context)
extends SyntaxMsg(TailrecNotApplicableID) {
def msg = {
val reason =
if (!symbol.is(Method)) em"$symbol isn't a method"
else if (symbol.is(Deferred)) em"$symbol is abstract"
else if (!symbol.isEffectivelyFinal) em"$symbol is neither ${hl("private")} nor ${hl("final")} so can be overridden"
else em"$symbol contains no recursive calls"
s"TailRec optimisation not applicable, $reason"
}
def explain = ""
}
class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol])(implicit ctx: Context)
extends Message(FailureToEliminateExistentialID) {
def kind: String = "Compatibility"
def msg =
val originalType = ctx.printer.dclsText(boundSyms, "; ").show
em"""An existential type that came from a Scala-2 classfile cannot be
|mapped accurately to to a Scala-3 equivalent.
|original type : $tp forSome ${originalType}
|reduces to : $tp1
|type used instead: $tp2
|This choice can cause follow-on type errors or hide type errors.
|Proceed at own risk."""
def explain =
em"""Existential types in their full generality are no longer supported.
|Scala-3 does applications of class types to wildcard type arguments.
|Other forms of existential types that come from Scala-2 classfiles
|are only approximated in a best-effort way."""
}
class OnlyFunctionsCanBeFollowedByUnderscore(tp: Type)(implicit ctx: Context)
extends SyntaxMsg(OnlyFunctionsCanBeFollowedByUnderscoreID) {
def msg = em"Only function types can be followed by ${hl("_")} but the current expression has type $tp"
def explain =
em"""The syntax ${hl("x _")} is no longer supported if ${hl("x")} is not a function.
|To convert to a function value, you need to explicitly write ${hl("() => x")}"""
}
class MissingEmptyArgumentList(method: Symbol)(implicit ctx: Context)
extends SyntaxMsg(MissingEmptyArgumentListID) {
def msg = em"$method must be called with ${hl("()")} argument"
def explain = {
val codeExample =
"""def next(): T = ...
|next // is expanded to next()"""
em"""Previously an empty argument list () was implicitly inserted when calling a nullary method without arguments. E.g.
|
|$codeExample
|
|In Dotty, this idiom is an error. The application syntax has to follow exactly the parameter syntax.
|Excluded from this rule are methods that are defined in Java or that override methods defined in Java."""
}
}
class DuplicateNamedTypeParameter(name: Name)(implicit ctx: Context)
extends SyntaxMsg(DuplicateNamedTypeParameterID) {
def msg = em"Type parameter $name was defined multiple times."
def explain = ""
}
class UndefinedNamedTypeParameter(undefinedName: Name, definedNames: List[Name])(implicit ctx: Context)
extends SyntaxMsg(UndefinedNamedTypeParameterID) {
def msg = em"Type parameter $undefinedName is undefined. Expected one of ${definedNames.map(_.show).mkString(", ")}."
def explain = ""
}
class IllegalStartOfStatement(isModifier: Boolean)(implicit ctx: Context) extends SyntaxMsg(IllegalStartOfStatementID) {
def msg = {
val addendum = if (isModifier) ": no modifiers allowed here" else ""
"Illegal start of statement" + addendum
}
def explain = "A statement is either an import, a definition or an expression."
}
class TraitIsExpected(symbol: Symbol)(implicit ctx: Context) extends SyntaxMsg(TraitIsExpectedID) {
def msg = em"$symbol is not a trait"
def explain = {
val errorCodeExample =
"""class A
|class B
|
|val a = new A with B // will fail with a compile error - class B is not a trait""".stripMargin
val codeExample =
"""class A
|trait B
|
|val a = new A with B // compiles normally""".stripMargin
em"""Only traits can be mixed into classes using a ${hl("with")} keyword.
|Consider the following example:
|
|$errorCodeExample
|
|The example mentioned above would fail because B is not a trait.
|But if you make B a trait it will be compiled without any errors:
|
|$codeExample
|"""
}
}
class TraitRedefinedFinalMethodFromAnyRef(method: Symbol)(implicit ctx: Context) extends SyntaxMsg(TraitRedefinedFinalMethodFromAnyRefID) {
def msg = em"Traits cannot redefine final $method from ${hl("class AnyRef")}."
def explain = ""
}
class PackageNameAlreadyDefined(pkg: Symbol)(implicit ctx: Context) extends NamingMsg(PackageNameAlreadyDefinedID) {
lazy val (where, or) =
if pkg.associatedFile == null then ("", "")
else (s" in ${pkg.associatedFile}", " or delete the containing class file")
def msg = em"""${pkg.name} is the name of $pkg$where.
|It cannot be used at the same time as the name of a package."""
def explain =
em"""An ${hl("object")} or other toplevel definition cannot have the same name as an existing ${hl("package")}.
|Rename either one of them$or."""
}
class UnapplyInvalidNumberOfArguments(qual: untpd.Tree, argTypes: List[Type])(implicit ctx: Context)
extends SyntaxMsg(UnapplyInvalidNumberOfArgumentsID) {
def msg = em"Wrong number of argument patterns for $qual; expected: ($argTypes%, %)"
def explain =
em"""The Unapply method of $qual was used with incorrect number of arguments.
|Expected usage would be something like:
|case $qual(${argTypes.map(_ => '_')}%, %) => ...
|
|where subsequent arguments would have following types: ($argTypes%, %).
|""".stripMargin
}
class UnapplyInvalidReturnType(unapplyResult: Type, unapplyName: Symbol#ThisName)(implicit ctx: Context)
extends DeclarationMsg(UnapplyInvalidReturnTypeID) {
def msg =
val addendum =
if Feature.migrateTo3 && unapplyName == nme.unapplySeq
then "\\nYou might want to try to rewrite the extractor to use `unapply` instead."
else ""
em"""| ${Red(i"$unapplyResult")} is not a valid result type of an $unapplyName method of an ${Magenta("extractor")}.$addendum"""
def explain = if (unapplyName.show == "unapply")
em"""
|To be used as an extractor, an unapply method has to return a type that either:
| - has members ${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} (usually an ${Green("Option[S]")})
| - is a ${Green("Boolean")}
| - is a ${Green("Product")} (like a ${Magenta("Tuple2[T1, T2]")})
|
|class A(val i: Int)
|
|object B {
| def unapply(a: A): ${Green("Option[Int]")} = Some(a.i)
|}
|
|object C {
| def unapply(a: A): ${Green("Boolean")} = a.i == 2
|}
|
|object D {
| def unapply(a: A): ${Green("(Int, Int)")} = (a.i, a.i)
|}
|
|object Test {
| def test(a: A) = a match {
| ${Magenta("case B(1)")} => 1
| ${Magenta("case a @ C()")} => 2
| ${Magenta("case D(3, 3)")} => 3
| }
|}
""".stripMargin
else
em"""
|To be used as an extractor, an unapplySeq method has to return a type which has members
|${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} where ${Magenta("S <: Seq[V]")} (usually an ${Green("Option[Seq[V]]")}):
|
|object CharList {
| def unapplySeq(s: String): ${Green("Option[Seq[Char]")} = Some(s.toList)
|
| "example" match {
| ${Magenta("case CharList(c1, c2, c3, c4, _, _, _)")} =>
| println(s"$$c1,$$c2,$$c3,$$c4")
| case _ =>
| println("Expected *exactly* 7 characters!")
| }
|}
""".stripMargin
}
class StaticFieldsOnlyAllowedInObjects(member: Symbol)(implicit ctx: Context) extends SyntaxMsg(StaticFieldsOnlyAllowedInObjectsID) {
def msg = em"${hl("@static")} $member in ${member.owner} must be defined inside an ${hl("object")}."
def explain =
em"${hl("@static")} members are only allowed inside objects."
}
class StaticFieldsShouldPrecedeNonStatic(member: Symbol, defns: List[tpd.Tree])(implicit ctx: Context) extends SyntaxMsg(StaticFieldsShouldPrecedeNonStaticID) {
def msg = em"${hl("@static")} $member in ${member.owner} must be defined before non-static fields."
def explain = {
val nonStatics = defns.takeWhile(_.symbol != member).take(3).filter(_.isInstanceOf[tpd.ValDef])
val codeExample = s"""object ${member.owner.name.firstPart} {
| @static ${member} = ...
| ${nonStatics.map(m => s"${m.symbol} = ...").mkString("\\n ")}
| ...
|}"""
em"""The fields annotated with @static should precede any non @static fields.
|This ensures that we do not introduce surprises for users in initialization order of this class.
|Static field are initialized when class loading the code of Foo.
|Non static fields are only initialized the first time that Foo is accessed.
|
|The definition of ${member.name} should have been before the non ${hl("@static val")}s:
|$codeExample
|"""
}
}
class CyclicInheritance(symbol: Symbol, addendum: => String)(implicit ctx: Context) extends SyntaxMsg(CyclicInheritanceID) {
def msg = em"Cyclic inheritance: $symbol extends itself$addendum"
def explain = {
val codeExample = "class A extends A"
em"""Cyclic inheritance is prohibited in Dotty.
|Consider the following example:
|
|$codeExample
|
|The example mentioned above would fail because this type of inheritance hierarchy
|creates a "cycle" where a not yet defined class A extends itself which makes
|impossible to instantiate an object of this class"""
}
}
class BadSymbolicReference(denot: SymDenotation)(implicit ctx: Context)
extends ReferenceMsg(BadSymbolicReferenceID) {
def msg = {
val denotationOwner = denot.owner
val denotationName = ctx.fresh.setSetting(ctx.settings.YdebugNames, true).printer.nameString(denot.name)
val file = denot.symbol.associatedFile
val (location, src) =
if (file != null) (s" in $file", file.toString)
else ("", "the signature")
em"""Bad symbolic reference. A signature$location
|refers to $denotationName in ${denotationOwner.showKind} ${denotationOwner.showFullName} which is not available.
|It may be completely missing from the current classpath, or the version on
|the classpath might be incompatible with the version used when compiling $src."""
}
def explain = ""
}
class UnableToExtendSealedClass(pclazz: Symbol)(implicit ctx: Context) extends SyntaxMsg(UnableToExtendSealedClassID) {
def msg = em"Cannot extend ${hl("sealed")} $pclazz in a different source file"
def explain = "A sealed class or trait can only be extended in the same file as its declaration"
}
class SymbolHasUnparsableVersionNumber(symbol: Symbol, migrationMessage: => String)(implicit ctx: Context)
extends SyntaxMsg(SymbolHasUnparsableVersionNumberID) {
def msg = em"${symbol.showLocated} has an unparsable version number: $migrationMessage"
def explain =
em"""$migrationMessage
|
|The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics
|between versions and the ${hl("-Xmigration")} settings is used to warn about constructs
|whose behavior may have changed since version change."""
}
class SymbolChangedSemanticsInVersion(
symbol: Symbol,
migrationVersion: ScalaVersion
)(implicit ctx: Context) extends SyntaxMsg(SymbolChangedSemanticsInVersionID) {
def msg = em"${symbol.showLocated} has changed semantics in version $migrationVersion"
def explain = {
em"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics
|between versions and the ${hl("-Xmigration")} settings is used to warn about constructs
|whose behavior may have changed since version change."""
}
}
class UnableToEmitSwitch(tooFewCases: Boolean)(implicit ctx: Context)
extends SyntaxMsg(UnableToEmitSwitchID) {
def tooFewStr: String = if (tooFewCases) " since there are not enough cases" else ""
def msg = em"Could not emit switch for ${hl("@switch")} annotated match$tooFewStr"
def explain = {
val codeExample =
"""val ConstantB = 'B'
|final val ConstantC = 'C'
|def tokenMe(ch: Char) = (ch: @switch) match {
| case '\\t' | '\\n' => 1
| case 'A' => 2
| case ConstantB => 3 // a non-literal may prevent switch generation: this would not compile
| case ConstantC => 4 // a constant value is allowed
| case _ => 5
|}""".stripMargin
em"""If annotated with ${hl("@switch")}, the compiler will verify that the match has been compiled to a
|tableswitch or lookupswitch and issue an error if it instead compiles into a series of conditional
|expressions. Example usage:
|
|$codeExample
|
|The compiler will not apply the optimisation if:
|- the matched value is not of type ${hl("Int")}, ${hl("Byte")}, ${hl("Short")} or ${hl("Char")}
|- the matched value is not a constant literal
|- there are less than three cases"""
}
}
class MissingCompanionForStatic(member: Symbol)(implicit ctx: Context)
extends SyntaxMsg(MissingCompanionForStaticID) {
def msg = em"${member.owner} does not have a companion class"
def explain =
em"An object that contains ${hl("@static")} members must have a companion class."
}
class PolymorphicMethodMissingTypeInParent(rsym: Symbol, parentSym: Symbol)(implicit ctx: Context)
extends SyntaxMsg(PolymorphicMethodMissingTypeInParentID) {
def msg = em"Polymorphic refinement $rsym without matching type in parent $parentSym is no longer allowed"
def explain =
em"""Polymorphic $rsym is not allowed in the structural refinement of $parentSym because
|$rsym does not override any method in $parentSym. Structural refinement does not allow for
|polymorphic methods."""
}
class ParamsNoInline(owner: Symbol)(implicit ctx: Context)
extends SyntaxMsg(ParamsNoInlineID) {
def msg = em"""${hl("inline")} modifier can only be used for parameters of inline methods"""
def explain = ""
}
class JavaSymbolIsNotAValue(symbol: Symbol)(implicit ctx: Context) extends TypeMsg(JavaSymbolIsNotAValueID) {
def msg = {
val kind =
if (symbol is Package) em"$symbol"
else em"Java defined ${hl("class " + symbol.name)}"
s"$kind is not a value"
}
def explain = ""
}
class DoubleDefinition(decl: Symbol, previousDecl: Symbol, base: Symbol)(implicit ctx: Context) extends NamingMsg(DoubleDefinitionID) {
def msg = {
def nameAnd = if (decl.name != previousDecl.name) " name and" else ""
def details(implicit ctx: Context): String =
if (decl.isRealMethod && previousDecl.isRealMethod) {
// compare the signatures when both symbols represent methods
decl.signature.matchDegree(previousDecl.signature) match {
case Signature.MatchDegree.NoMatch =>
// If the signatures don't match at all at the current phase, then
// they might match after erasure.
val elimErasedCtx = ctx.withPhaseNoEarlier(ctx.elimErasedValueTypePhase.next)
if (elimErasedCtx != ctx)
details(elimErasedCtx)
else
"" // shouldn't be reachable
case Signature.MatchDegree.ParamMatch =>
"have matching parameter types."
case Signature.MatchDegree.FullMatch =>
i"have the same$nameAnd type after erasure."
}
}
else ""
def symLocation(sym: Symbol) = {
val lineDesc =
if (sym.span.exists && sym.span != sym.owner.span)
s" at line ${sym.sourcePos.line + 1}"
else ""
i"in ${sym.owner}${lineDesc}"
}
val clashDescription =
if (decl.owner eq previousDecl.owner)
"Double definition"
else if ((decl.owner eq base) || (previousDecl eq base))
"Name clash between defined and inherited member"
else
"Name clash between inherited members"
em"""$clashDescription:
|${previousDecl.showDcl} ${symLocation(previousDecl)} and
|${decl.showDcl} ${symLocation(decl)}
|""" + details
}
def explain = ""
}
class ImportRenamedTwice(ident: untpd.Ident)(implicit ctx: Context) extends SyntaxMsg(ImportRenamedTwiceID) {
def msg = s"${ident.show} is renamed twice on the same import line."
def explain = ""
}
class TypeTestAlwaysSucceeds(foundCls: Symbol, testCls: Symbol)(implicit ctx: Context) extends SyntaxMsg(TypeTestAlwaysSucceedsID) {
def msg = {
val addendum =
if (foundCls != testCls) s" is a subtype of $testCls"
else " is the same as the tested type"
s"The highlighted type test will always succeed since the scrutinee type ($foundCls)" + addendum
}
def explain = ""
}
// Relative of CyclicReferenceInvolvingImplicit and RecursiveValueNeedsResultType
class TermMemberNeedsResultTypeForImplicitSearch(cycleSym: Symbol)(implicit ctx: Context)
extends CyclicMsg(TermMemberNeedsNeedsResultTypeForImplicitSearchID) {
def msg = em"""$cycleSym needs result type because its right-hand side attempts implicit search"""
def explain =
em"""|The right hand-side of $cycleSym's definition requires an implicit search at the highlighted position.
|To avoid this error, give `$cycleSym` an explicit type.
|""".stripMargin
}
class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(implicit ctx: Context) extends SyntaxMsg(ClassCannotExtendEnumID) {
def msg = em"""$cls in ${cls.owner} extends enum ${parent.name}, but extending enums is prohibited."""
def explain = ""
}
class NotAnExtractor(tree: untpd.Tree)(implicit ctx: Context) extends SyntaxMsg(NotAnExtractorID) {
def msg = em"$tree cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method"
def explain =
em"""|An ${hl("unapply")} method should be defined in an ${hl("object")} as follow:
| - If it is just a test, return a ${hl("Boolean")}. For example ${hl("case even()")}
| - If it returns a single sub-value of type T, return an ${hl("Option[T]")}
| - If it returns several sub-values T1,...,Tn, group them in an optional tuple ${hl("Option[(T1,...,Tn)]")}
|
|Sometimes, the number of sub-values isn't fixed and we would like to return a sequence.
|For this reason, you can also define patterns through ${hl("unapplySeq")} which returns ${hl("Option[Seq[T]]")}.
|This mechanism is used for instance in pattern ${hl("case List(x1, ..., xn)")}""".stripMargin
}
class MemberWithSameNameAsStatic()(using ctx: Context)
extends SyntaxMsg(MemberWithSameNameAsStaticID) {
def msg = em"Companion classes cannot define members with same name as a ${hl("@static")} member"
def explain = ""
}
class PureExpressionInStatementPosition(stat: untpd.Tree, val exprOwner: Symbol)(implicit ctx: Context)
extends Message(PureExpressionInStatementPositionID) {
def kind = "Potential Issue"
def msg = "A pure expression does nothing in statement position; you may be omitting necessary parentheses"
def explain =
em"""The pure expression $stat doesn't have any side effect and its result is not assigned elsewhere.
|It can be removed without changing the semantics of the program. This may indicate an error.""".stripMargin
}
class TraitCompanionWithMutableStatic()(using ctx: Context)
extends SyntaxMsg(TraitCompanionWithMutableStaticID) {
def msg = em"Companion of traits cannot define mutable @static fields"
def explain = ""
}
class LazyStaticField()(using ctx: Context)
extends SyntaxMsg(LazyStaticFieldID) {
def msg = em"Lazy @static fields are not supported"
def explain = ""
}
class StaticOverridingNonStaticMembers()(using ctx: Context)
extends SyntaxMsg(StaticOverridingNonStaticMembersID) {
def msg = em"${hl("@static")} members cannot override or implement non-static ones"
def explain = ""
}
class OverloadInRefinement(rsym: Symbol)(using ctx: Context)
extends DeclarationMsg(OverloadInRefinementID) {
def msg = "Refinements cannot introduce overloaded definitions"
def explain =
em"""The refinement `$rsym` introduces an overloaded definition.
|Refinements cannot contain overloaded definitions.""".stripMargin
}
class NoMatchingOverload(val alternatives: List[SingleDenotation], pt: Type)(
err: Errors)(using ctx: Context)
extends TypeMismatchMsg(NoMatchingOverloadID) {
def msg =
em"""None of the ${err.overloadedAltsStr(alternatives)}
|match ${err.expectedTypeStr(pt)}"""
def explain = ""
}
class StableIdentPattern(tree: untpd.Tree, pt: Type)(using ctx: Context)
extends TypeMsg(StableIdentPatternID) {
def msg =
em"""Stable identifier required, but $tree found"""
def explain = ""
}
class IllegalSuperAccessor(base: Symbol, memberName: Name,
acc: Symbol, accTp: Type,
other: Symbol, otherTp: Type)(using ctx: Context) extends DeclarationMsg(IllegalSuperAccessorID) {
def msg = {
// The mixin containing a super-call that requires a super-accessor
val accMixin = acc.owner
// The class or trait that the super-accessor should resolve too in `base`
val otherMixin = other.owner
// The super-call in `accMixin`
val superCall = hl(i"super.$memberName")
// The super-call that the super-accesors in `base` forwards to
val resolvedSuperCall = hl(i"super[${otherMixin.name}].$memberName")
// The super-call that we would have called if `super` in traits behaved like it
// does in classes, i.e. followed the linearization of the trait itself.
val staticSuperCall = {
val staticSuper = accMixin.asClass.info.parents.reverse
.find(_.nonPrivateMember(memberName).matchingDenotation(accMixin.thisType, acc.info).exists)
val staticSuperName = staticSuper match {
case Some(parent) =>
parent.classSymbol.name.show
case None => // Might be reachable under separate compilation
"SomeParent"
}
hl(i"super[$staticSuperName].$memberName")
}
ex"""$base cannot be defined due to a conflict between its parents when
|implementing a super-accessor for $memberName in $accMixin:
|
|1. One of its parent (${accMixin.name}) contains a call $superCall in its body,
| and when a super-call in a trait is written without an explicit parent
| listed in brackets, it is implemented by a generated super-accessor in
| the class that extends this trait based on the linearization order of
| the class.
|2. Because ${otherMixin.name} comes before ${accMixin.name} in the linearization
| order of ${base.name}, and because ${otherMixin.name} overrides $memberName,
| the super-accessor in ${base.name} is implemented as a call to
| $resolvedSuperCall.
|3. However,
| ${otherTp.widenExpr} (the type of $resolvedSuperCall in ${base.name})
| is not a subtype of
| ${accTp.widenExpr} (the type of $memberName in $accMixin).
| Hence, the super-accessor that needs to be generated in ${base.name}
| is illegal.
|
|Here are two possible ways to resolve this:
|
|1. Change the linearization order of ${base.name} such that
| ${accMixin.name} comes before ${otherMixin.name}.
|2. Alternatively, replace $superCall in the body of $accMixin by a
| super-call to a specific parent, e.g. $staticSuperCall
|""".stripMargin
}
def explain = ""
}
class TraitParameterUsedAsParentPrefix(cls: Symbol)(using ctx: Context)
extends DeclarationMsg(TraitParameterUsedAsParentPrefixID) {
def msg =
s"${cls.show} cannot extend from a parent that is derived via its own parameters"
def explain =
ex"""
|The parent class/trait that ${cls.show} extends from is obtained from
|the parameter of ${cls.show}. This is disallowed in order to prevent
|outer-related Null Pointer Exceptions in Scala.
|
|In order to fix this issue consider directly extending from the parent rather
|than obtaining it from the parameters of ${cls.show}.
|""".stripMargin
}
class UnknownNamedEnclosingClassOrObject(name: TypeName)(using ctx: Context)
extends ReferenceMsg(UnknownNamedEnclosingClassOrObjectID) {
def msg =
em"""no enclosing class or object is named '${hl(name.show)}'"""
def explain =
ex"""
|The class or object named '${hl(name.show)}' was used as a visibility
|modifier, but could not be resolved. Make sure that
|'${hl(name.show)}' is not misspelled and has been imported into the
|current scope.
""".stripMargin
}
class IllegalCyclicTypeReference(sym: Symbol, where: String, lastChecked: Type)(using ctx: Context)
extends CyclicMsg(IllegalCyclicTypeReferenceID) {
def msg = i"illegal cyclic type reference: ${where} ${hl(lastChecked.show)} of $sym refers back to the type itself"
def explain = ""
}
class ErasedTypesCanOnlyBeFunctionTypes()(using ctx: Context)
extends SyntaxMsg(ErasedTypesCanOnlyBeFunctionTypesID) {
def msg = "Types with erased keyword can only be function types `(erased ...) => ...`"
def explain = ""
}
class CaseClassMissingNonImplicitParamList(cdef: untpd.TypeDef)(implicit ctx: Context)
extends SyntaxMsg(CaseClassMissingNonImplicitParamListID) {
def msg =
em"""|A ${hl("case class")} must have at least one non-implicit parameter list"""
def explain =
em"""|${cdef.name} must have at least one non-implicit parameter list,
| if you're aiming to have a case class parametrized only by implicit ones, you should
| add an explicit ${hl("()")} as a parameter list to ${cdef.name}.""".stripMargin
}
class EnumerationsShouldNotBeEmpty(cdef: untpd.TypeDef)(implicit ctx: Context)
extends SyntaxMsg(EnumerationsShouldNotBeEmptyID) {
def msg = "Enumerations must contain at least one case"
def explain =
em"""|Enumeration ${cdef.name} must contain at least one case
|Example Usage:
| ${hl("enum")} ${cdef.name} {
| ${hl("case")} Option1, Option2
| }
|""".stripMargin
}
class AbstractCannotBeUsedForObjects(mdef: untpd.ModuleDef)(implicit ctx: Context)
extends SyntaxMsg(AbstractCannotBeUsedForObjectsID) {
def msg = em"${hl("abstract")} modifier cannot be used for objects"
def explain =
em"""|Objects are final and cannot be extended, thus cannot have the ${hl("abstract")} modifier
|
|You may want to define an abstract class:
| ${hl("abstract")} ${hl("class")} Abstract${mdef.name} { }
|
|And extend it in an object:
| ${hl("object")} ${mdef.name} ${hl("extends")} Abstract${mdef.name} { }
|""".stripMargin
}
class ModifierRedundantForObjects(mdef: untpd.ModuleDef, modifier: String)(implicit ctx: Context)
extends SyntaxMsg(ModifierRedundantForObjectsID) {
def msg = em"${hl(modifier)} modifier is redundant for objects"
def explain =
em"""|Objects cannot be extended making the ${hl(modifier)} modifier redundant.
|You may want to define the object without it:
| ${hl("object")} ${mdef.name} { }
|""".stripMargin
}
class TypedCaseDoesNotExplicitlyExtendTypedEnum(enumDef: Symbol, caseDef: untpd.TypeDef)(implicit ctx: Context)
extends SyntaxMsg(TypedCaseDoesNotExplicitlyExtendTypedEnumID) {
def msg = i"explicit extends clause needed because both enum case and enum class have type parameters"
def explain =
em"""Enumerations where the enum class as well as the enum case have type parameters need
|an explicit extends.
|for example:
| ${hl("enum")} ${enumDef.name}[T] {
| ${hl("case")} ${caseDef.name}[U](u: U) ${hl("extends")} ${enumDef.name}[U]
| }
|""".stripMargin
}
class IllegalRedefinitionOfStandardKind(kindType: String, name: Name)(implicit ctx: Context)
extends SyntaxMsg(IllegalRedefinitionOfStandardKindID) {
def msg = em"illegal redefinition of standard $kindType $name"
def explain =
em"""| "$name" is a standard Scala core `$kindType`
| Please choose a different name to avoid conflicts
|""".stripMargin
}
class NoExtensionMethodAllowed(mdef: untpd.DefDef)(implicit ctx: Context)
extends SyntaxMsg(NoExtensionMethodAllowedID) {
def msg = em"No extension method allowed here, since collective parameters are given"
def explain =
em"""|Extension method:
| `${mdef}`
|is defined inside an extension clause which has collective parameters.
|""".stripMargin
}
class ExtensionMethodCannotHaveTypeParams(mdef: untpd.DefDef)(implicit ctx: Context)
extends SyntaxMsg(ExtensionMethodCannotHaveTypeParamsID) {
def msg = i"Extension method cannot have type parameters since some were already given previously"
def explain =
em"""|Extension method:
| `${mdef}`
|has type parameters `[${mdef.tparams.map(_.show).mkString(",")}]`, while the extension clause has
|it's own type parameters. Please consider moving these to the extension clause's type parameter list.
|""".stripMargin
}
class ExtensionCanOnlyHaveDefs(mdef: untpd.Tree)(implicit ctx: Context)
extends SyntaxMsg(ExtensionCanOnlyHaveDefsID) {
def msg = em"Only methods allowed here, since collective parameters are given"
def explain =
em"""Extension clauses can only have `def`s
| `${mdef.show}` is not a valid expression here.
|""".stripMargin
}
class UnexpectedPatternForSummonFrom(tree: Tree[_])(implicit ctx: Context)
extends SyntaxMsg(UnexpectedPatternForSummonFromID) {
def msg = em"Unexpected pattern for summonFrom. Expected ${hl("`x: T`")} or ${hl("`_`")}"
def explain =
em"""|The pattern "${tree.show}" provided in the ${hl("case")} expression of the ${hl("summonFrom")},
| needs to be of the form ${hl("`x: T`")} or ${hl("`_`")}.
|
| Example usage:
| inline def a = summonFrom {
| case x: T => ???
| }
|
| or
| inline def a = summonFrom {
| case _ => ???
| }
|""".stripMargin
}
class AnonymousInstanceCannotBeEmpty(impl: untpd.Template)(implicit ctx: Context)
extends SyntaxMsg(AnonymousInstanceCannotBeEmptyID) {
def msg = i"anonymous instance must implement a type or have at least one extension method"
def explain =
em"""|Anonymous instances cannot be defined with an empty body. The block
|`${impl.show}` should either contain an implemented type or at least one extension method.
|""".stripMargin
}
class TypeSpliceInValPattern(expr: untpd.Tree)(implicit ctx: Context)
extends SyntaxMsg(TypeSpliceInValPatternID) {
def msg = "Type splices cannot be used in val patterns. Consider using `match` instead."
def explain =
em"""|Type splice: `$$${expr.show}` cannot be used in a `val` pattern. Consider rewriting the `val` pattern
|as a `match` with a corresponding `case` to replace the `val`.
|""".stripMargin
}
class ModifierNotAllowedForDefinition(flag: Flag)(implicit ctx: Context)
extends SyntaxMsg(ModifierNotAllowedForDefinitionID) {
def msg = s"Modifier `${flag.flagsString}` is not allowed for this definition"
def explain = ""
}
}
|
som-snytt/dotty
|
compiler/src/dotty/tools/dotc/reporting/messages.scala
|
Scala
|
apache-2.0
| 99,657
|
package edu.berkeley.nlp.entity.sem
import edu.berkeley.nlp.entity.coref.DocumentGraph
import edu.berkeley.nlp.futile.util.Logger
import edu.berkeley.nlp.entity.coref.Mention
import scala.collection.mutable.ArrayBuffer
import edu.berkeley.nlp.entity.WordNetInterfacer
import scala.collection.JavaConverters._
object FancyHeadMatcher {
val VacuousSuffixesCore = Set("Ltd.", "Inc.", "Co.", "Jr.", "Sr.")
val VacuousSuffixes = VacuousSuffixesCore ++ VacuousSuffixesCore.map(_.replaceAll("\\\\.", ""));
// BASIC HEAD CONTAINMENT
def isBasicHeadContainedBidirectional(docGraph: DocumentGraph, antIdx: Int, mentIdx: Int): Boolean = {
isBasicHeadContained(docGraph, antIdx, mentIdx) || isBasicHeadContained(docGraph, mentIdx, antIdx);
}
def isBasicHeadContained(docGraph: DocumentGraph, antIdx: Int, mentIdx: Int): Boolean = {
docGraph.getMention(antIdx).wordsLc.contains(docGraph.getMention(mentIdx).headStringLc);
}
// CLEAN CONTAINMENT
// "Clean" head containment: non-nested, containment site is an immediate child of the head
// (and not the head)
def isCleanContained(container: Mention, containeeLc: String) = {
val idx = getCleanContainmentSentenceIdx(container, containeeLc);
idx != -1 && container.contextTree.isChild(idx, container.headIdx);
}
def getCleanContainmentSentenceIdx(container: Mention, containeeLc: String): Int = {
val hcIdxRaw = container.wordsLc.indexOf(containeeLc);
val hcIdx = hcIdxRaw + container.startIdx;
val isContained = hcIdxRaw != -1;
val isContainedAndNotHead = isContained && hcIdx != container.headIdx
if (isContainedAndNotHead) hcIdx else -1;
}
def getCleanContainmentSentenceIdxStrict(container: Mention, containee: Mention): Int = {
if (isCleanContained(container, containee.headStringLc) && !areNested(container, containee) &&
getCleanContainmentSyntacticType(container, containee) != "GENITIVE" && !isCoordinatedHacky(container) && !isCoordinatedHacky(containee)) {
getCleanContainmentSentenceIdx(container, containee.headStringLc);
} else {
-1;
}
}
def getCleanContainmentSyntacticType(antecedent: Mention, curr: Mention): String = {
val containmentSentenceIdx = getCleanContainmentSentenceIdx(antecedent, curr.headStringLc);
getCleanContainmentSyntacticType(antecedent, containmentSentenceIdx);
}
def getCleanContainmentSyntacticType(mention: Mention, containmentSentenceIdx: Int): String = {
val headIdx = mention.headIdx;
if (containmentSentenceIdx == -1) {
"NONE";
} else if (containmentSentenceIdx < headIdx && (interposed(mention.wordsLc, containmentSentenceIdx - mention.startIdx, headIdx - mention.startIdx, "'s")
|| interposed(mention.wordsLc, containmentSentenceIdx - mention.startIdx, headIdx - mention.startIdx, "'"))) {
// Genitive: before, separated by a 's
"GENITIVE";
} else if (headIdx < containmentSentenceIdx && interposed(mention.pos, headIdx - mention.startIdx, containmentSentenceIdx - mention.startIdx, "CC")) {
// Coordination: after, separated by a CC
// (comes before appositive so that serial lists don't get mislabeled)
"COORDINATION";
} else if (headIdx < containmentSentenceIdx && interposed(mention.wordsLc, headIdx - mention.startIdx, containmentSentenceIdx - mention.startIdx, ",")) {
// Appositive: after, separated by a comma
"APPOSITIVE";
} else {
"MODIFIER";
}
}
def getCleanContainmentSemanticType(antecedent: Mention, curr: Mention, wni: WordNetInterfacer): String = {
val containmentSentenceIdx = getCleanContainmentSentenceIdx(antecedent, curr.headStringLc);
getCleanContainmentSemanticType(antecedent, containmentSentenceIdx, wni: WordNetInterfacer);
}
def getCleanContainmentSemanticType(mention: Mention, containmentSentenceIdx: Int, wni: WordNetInterfacer): String = {
SemClass.getSemClass(mention.wordsLc(containmentSentenceIdx - mention.startIdx), wni).toString;
}
private def interposed(items: Seq[String], startIdx: Int, endIdx: Int, item: String) = {
var contains = false;
for (i <- startIdx until endIdx) {
if (items(i) == item) {
contains = true;
}
}
contains;
}
def areNested(antecedent: Mention, curr: Mention) = {
antecedent.sentIdx == curr.sentIdx && ((antecedent.startIdx <= curr.headIdx && curr.headIdx < antecedent.endIdx) ||
(curr.startIdx <= antecedent.headIdx && antecedent.headIdx < curr.endIdx));
}
def isBasicCleanHeadContainedBidirectional(antecedent: Mention, curr: Mention): Boolean = {
isBasicCleanHeadContained(antecedent, curr) || isBasicCleanHeadContained(curr, antecedent);
}
def isBasicCleanHeadContained(antecedent: Mention, curr: Mention): Boolean = {
val containmentSentenceIdx = getCleanContainmentSentenceIdx(antecedent, curr.headStringLc);
!areNested(antecedent, curr) && containmentSentenceIdx != -1 && antecedent.contextTree.isChild(containmentSentenceIdx, antecedent.headIdx);
}
def isCoordinatedHacky(ment: Mention) = {
val tree = ment.contextTree.constTree;
var isCoordinated = false;
val dominatingNodes = tree.getSpanMap.get(new edu.berkeley.nlp.futile.fig.basic.Pair[Integer,Integer](ment.startIdx, ment.endIdx));
if (dominatingNodes != null && !dominatingNodes.isEmpty) {
for (dominatingNode <- dominatingNodes.asScala) {
if (dominatingNode.getChildren.asScala.map(_.getLabel).contains("CC")) {
isCoordinated = true;
}
}
}
isCoordinated;
}
// FANCY HEADS
def isFancyHeadContainedBidirectional(docGraph: DocumentGraph, antIdx: Int, mentIdx: Int): Boolean = {
isFancyHeadContained(docGraph, antIdx, mentIdx) || isFancyHeadContained(docGraph, mentIdx, antIdx)
}
def isFancyHeadContained(docGraph: DocumentGraph, antIdx: Int, mentIdx: Int): Boolean = {
// val antWordsLc = docGraph.getMention(antIdx).wordsLc;
val antWordsLc = identifyHeadContentLc(docGraph.getMention(antIdx));
val currMent = docGraph.getMention(mentIdx);
val nerString = currMent.nerString;
var headOffset = currMent.headIdx - currMent.startIdx;
if (headOffset > 0 && VacuousSuffixes.contains(currMent.words(headOffset))) {
headOffset -= 1;
}
val words = currMent.words;
val wordsLc = currMent.wordsLc;
// If it's a company, take the prefix
val hc = if (nerString == "GPE" || nerString == "ORG") {
// Find the first capitalized non-determiner
var startIdx = 0;
while (startIdx < headOffset && (Character.isLowerCase(words(startIdx).charAt(0)) ||
words(startIdx).toLowerCase == "The" ||
words(startIdx).toLowerCase == "A")) {
startIdx += 1;
}
antWordsLc.contains(wordsLc(startIdx)) || antWordsLc.contains(wordsLc(headOffset));
}
// If it's a person, consider a match if either the first name or the last name matches
else if (nerString == "PER") {
var firstNameOrTitleOffset = if (headOffset == 0) headOffset else headOffset - 1;
antWordsLc.contains(wordsLc(firstNameOrTitleOffset)) || antWordsLc.contains(wordsLc(headOffset));
} else {
// Back up one from suffixes and return what's there
antWordsLc.contains(wordsLc(headOffset));
}
hc;
}
def identifyHeadContentLc(ment: Mention): Seq[String] = {
// Take the span given by dependents of the head. If they're not contiguous, print them out.
// Block with PRP$ or a genitive marker
val contextTree = ment.contextTree;
val deps = ment.contextTree.childParentDepMap;
val headChildren = new ArrayBuffer[Int];
for (i <- ment.startIdx until ment.endIdx) {
if (deps(i) == ment.headIdx || i == ment.headIdx) {
headChildren += i;
}
}
if (headChildren.size == 0) {
Seq(ment.headString);
} else {
val min = headChildren.reduce(Math.min);
val max = headChildren.reduce(Math.max);
// if (!headChildren.sameElements(min to max)) {
// Logger.logss("Conflict: " + ment.wordsLc +
// "\\n head children: " + headChildren.map(i => ment.wordsLc(i - ment.startIdx)) +
// "\\n min and max: " + (min to max).map(i => ment.wordsLc(i - ment.startIdx)));
// }
// Block things past a genitive marker
var blockIndex = -1;
for (idx <- min to ment.headIdx) {
if (ment.wordsLc(idx - ment.startIdx) == "'s") {
blockIndex = idx;
}
}
var filteredHeadChildren = headChildren.filter(childIdx => childIdx > blockIndex);
filteredHeadChildren.map(i => ment.wordsLc(i - ment.startIdx));
}
}
}
|
malcolmgreaves/berkeley-entity
|
src/main/java/edu/berkeley/nlp/entity/sem/FancyHeadMatcher.scala
|
Scala
|
gpl-3.0
| 8,838
|
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.curve
import com.typesafe.scalalogging.LazyLogging
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.io.Source
@RunWith(classOf[JUnitRunner])
class XZ2SFCTest extends Specification with LazyLogging {
val sfc = XZ2SFC(12)
"XZ2" should {
"index polygons and query them" >> {
val poly = sfc.index(10, 10, 12, 12)
val containing = Seq(
(9.0, 9.0, 13.0, 13.0),
(-180.0, -90.0, 180.0, 90.0),
(0.0, 0.0, 180.0, 90.0),
(0.0, 0.0, 20.0, 20.0)
)
val overlapping = Seq(
(11.0, 11.0, 13.0, 13.0),
(9.0, 9.0, 11.0, 11.0),
(10.5, 10.5, 11.5, 11.5),
(11.0, 11.0, 11.0, 11.0)
)
// note: in general, some disjoint ranges will match due to false positives
val disjoint = Seq(
(-180.0, -90.0, 8.0, 8.0),
(0.0, 0.0, 8.0, 8.0),
(9.0, 9.0, 9.5, 9.5),
(20.0, 20.0, 180.0, 90.0)
)
forall(containing ++ overlapping) { bbox =>
val ranges = sfc.ranges(Seq(bbox)).map(r => (r.lower, r.upper))
val matches = ranges.exists(r => r._1 <= poly && r._2 >= poly)
if (!matches) {
logger.warn(s"$bbox - no match")
}
matches must beTrue
}
forall(disjoint) { bbox =>
val ranges = sfc.ranges(Seq(bbox)).map(r => (r.lower, r.upper))
val matches = ranges.exists(r => r._1 <= poly && r._2 >= poly)
if (matches) {
logger.warn(s"$bbox - invalid match")
}
matches must beFalse
}
}
"index points and query them" >> {
val poly = sfc.index(11, 11, 11, 11)
val containing = Seq(
(9.0, 9.0, 13.0, 13.0),
(-180.0, -90.0, 180.0, 90.0),
(0.0, 0.0, 180.0, 90.0),
(0.0, 0.0, 20.0, 20.0)
)
val overlapping = Seq(
(11.0, 11.0, 13.0, 13.0),
(9.0, 9.0, 11.0, 11.0),
(10.5, 10.5, 11.5, 11.5),
(11.0, 11.0, 11.0, 11.0)
)
// note: in general, some disjoint ranges will match due to false positives
val disjoint = Seq(
(-180.0, -90.0, 8.0, 8.0),
(0.0, 0.0, 8.0, 8.0),
(9.0, 9.0, 9.5, 9.5),
(12.5, 12.5, 13.5, 13.5),
(20.0, 20.0, 180.0, 90.0)
)
forall(containing ++ overlapping) { bbox =>
val ranges = sfc.ranges(Seq(bbox)).map(r => (r.lower, r.upper))
val matches = ranges.exists(r => r._1 <= poly && r._2 >= poly)
if (!matches) {
logger.warn(s"$bbox - no match")
}
matches must beTrue
}
forall(disjoint) { bbox =>
val ranges = sfc.ranges(Seq(bbox)).map(r => (r.lower, r.upper))
val matches = ranges.exists(r => r._1 <= poly && r._2 >= poly)
if (matches) {
logger.warn(s"$bbox - invalid match")
}
matches must beFalse
}
}
"index complex features and query them2" >> {
// geometries taken from accumulo FilterTest
val r = """\\((\\d+\\.\\d*),(\\d+\\.\\d*),(\\d+\\.\\d*),(\\d+\\.\\d*)\\)""".r
val source = Source.fromInputStream(getClass.getClassLoader.getResourceAsStream("geoms.list"))
val geoms = try {
source.getLines.toArray.flatMap { l =>
r.findFirstMatchIn(l).map { m =>
(m.group(1).toDouble, m.group(2).toDouble, m.group(3).toDouble, m.group(4).toDouble)
}
}
} finally {
source.close()
}
val ranges = sfc.ranges(45.0, 23.0, 48.0, 27.0)
forall(geoms) { geom =>
val index = sfc.index(geom)
val matches = ranges.exists(r => r.lower <= index && r.upper >= index)
if (!matches) {
logger.warn(s"$geom - no match")
}
matches must beTrue
}
}
"fail for out-of-bounds values" >> {
val toFail = Seq(
(-180.1, 0d, -179.9, 1d),
(179.9, 0d, 180.1, 1d),
(-180.3, 0d, -180.1, 1d),
(180.1, 0d, 180.3, 1d),
(-180.1, 0d, 180.1, 1d),
(0d, -90.1, 1d, -89.9),
(0d, 89.9, 1d, 90.1),
(0d, -90.3, 1d, -90.1),
(0d, 90.1, 1d, 90.3),
(0d, -90.1, 1d, 90.1),
(-181d, -91d, 0d, 0d),
(0d, 0d, 181d, 91d)
)
forall(toFail) { case (xmin, ymin, xmax, ymax) =>
sfc.index(xmin, ymin, xmax, ymax) must throwAn[IllegalArgumentException]
}
}
}
}
|
locationtech/geomesa
|
geomesa-z3/src/test/scala/org/locationtech/geomesa/curve/XZ2SFCTest.scala
|
Scala
|
apache-2.0
| 4,884
|
package com.tradeshift.reaktive.xsd
import akka.stream.scaladsl.StreamConverters
import com.tradeshift.reaktive.testkit.SharedActorSystemSpec
import java.io.FileInputStream
import org.scalatest.Matchers
import org.scalatest.FunSpecLike
import akka.stream.scaladsl.StreamConverters
import akka.stream.scaladsl.FileIO
import com.tradeshift.reaktive.marshal.stream.AaltoReader
import com.tradeshift.reaktive.xsd.SchemaItem.Import
import java.io.File
import java.nio.file.Path
import java.nio.file.Paths
import org.scalatest.AsyncFunSpecLike
import scala.concurrent.Await
import scala.concurrent.duration._
import javax.xml.namespace.QName
class SchemaLoaderSpec extends SharedActorSystemSpec with FunSpecLike with Matchers {
implicit val m = materializer
val basePath = "./src/main/resources/"
var stuff = Vector.empty[Schema]
describe("SchemaLoader") {
it("should load a UBL XSD correctly") {
val start = System.nanoTime()
val schema = Await.result(SchemaLoader(Schema.empty, Set(
Import("urn:oasis:names:specification:ubl:schema:xsd:Invoice-2",None)),
i => i match {
case Import("urn:oasis:names:specification:ubl:schema:xsd:Invoice-2", _) =>
Import(
"urn:oasis:names:specification:ubl:schema:xsd:Invoice-2",
Some("UBL-Invoice-2.2-annotated.xsd"))
case Import("http://www.w3.org/2000/09/xmldsig#", _) =>
Import(
"http://www.w3.org/2000/09/xmldsig#",
Some("ubl/common/UBL-xmldsig-core-schema-2.2.xsd"))
case Import(ns, Some(file)) =>
val i = file.lastIndexOf("/")
val f = if (i == -1) file else file.substring(i + 1)
Import(ns, Some("ubl/common/" + f))
},
i => i match {
case Import(ns, Some(file)) =>
println("Importing " + ns + " from " + file)
StreamConverters.fromInputStream(() => getClass.getResourceAsStream("/" + file))
.via(AaltoReader.instance)
}), 30.seconds)
val t = System.nanoTime() - start
println(s"Took ${(t / 1000000)}ms")
stuff = stuff :+ schema
schema.namespaces should have size(14)
val invoiceTag =
schema.rootElements(new QName("urn:oasis:names:specification:ubl:schema:xsd:Invoice-2", "Invoice"))
invoiceTag.elementType
.isChildMultiValued(new QName("urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2", "InvoiceLine")) shouldBe(true)
invoiceTag.elementType
.isChildMultiValued(new QName("urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2", "BuyerCustomerParty")) shouldBe(false)
invoiceTag.elementType
.isChildMultiValued(new QName("urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2", "NonExistingTag")) shouldBe(false)
}
it("should resolve references to an existing Schema") {
val start = System.nanoTime
val base = Await.result(SchemaLoader(Schema.empty, Set(
Import("urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2",
Some("UBL-CommonAggregateComponents-2.2.xsd")),
Import("urn:oasis:names:specification:ubl:schema:xsd:CommonExtensionComponents-2",
Some("UBL-CommonExtensionComponents-2.2.xsd"))
),
i => i match {
case Import("http://www.w3.org/2000/09/xmldsig#", _) =>
Import(
"http://www.w3.org/2000/09/xmldsig#",
Some("ubl/common/UBL-xmldsig-core-schema-2.2.xsd"))
case Import(ns, Some(file)) =>
val i = file.lastIndexOf("/")
val f = if (i == -1) file else file.substring(i + 1)
Import(ns, Some("ubl/common/" + f))
},
i => i match {
case Import(ns, Some(file)) =>
println("Importing " + ns + " from " + file)
StreamConverters.fromInputStream(() => getClass.getResourceAsStream("/" + file))
.via(AaltoReader.instance)
}), 30.seconds)
val baseTime = System.nanoTime
println("base has " + base.namespaces.size + " ns")
val schema = Await.result(SchemaLoader(base,
Set(Import("urn:oasis:names:specification:ubl:schema:xsd:Invoice-2", None)),
i => i match {
case Import("urn:oasis:names:specification:ubl:schema:xsd:Invoice-2", _) =>
Import(
"urn:oasis:names:specification:ubl:schema:xsd:Invoice-2",
Some("UBL-Invoice-2.2-annotated.xsd"))
},
i => i match {
case Import(ns, Some(file)) =>
println("Importing " + ns + " from " + file)
StreamConverters.fromInputStream(() => getClass.getResourceAsStream("/" + file))
.via(AaltoReader.instance)
}), 30.seconds)
val done = System.nanoTime
println("schema has " + schema.namespaces.size + " ns")
println("base took " + (baseTime - start) / 1000000 + "ms, schema took " + (done - baseTime) / 1000000 + "ms")
}
}
}
|
Tradeshift/ts-reaktive
|
ts-reaktive-xsd/src/test/scala/com/tradeshift/reaktive/xsd/SchemaLoaderSpec.scala
|
Scala
|
mit
| 5,041
|
package typeclasses.homebrew
import shapeless._
import shapeless.record.FieldType
/**
* Created by eap on 7/31/14.
*/
trait TypeClassImpl[C[_]] {
def emptyProduct: C[HNil]
def product[H, T <: HList](
name: String,
CHead: C[H],
CTail: C[T]): C[H :: T]
def emptyCoproduct: C[CNil]
def coproduct[L, R <: Coproduct](
name: String,
CL: => C[L],
CR: => C[R]): C[L :+: R]
def project[A, B](
instance: C[B],
to: A => B,
from: B => A): C[A]
}
trait LowPriorityTypeClassConstructors[C[_]] extends TypeClassImpl[C] {
trait Instance[L] extends DepFn0 {
type Inner
final type Out = C[Inner]
}
trait FinalInstance[A] {
def apply(): C[A]
}
type ProductAux[L <: HList, I <: HList] = Instance[L] {
type Inner = I
}
type CoproductAux[L <: Coproduct, I <: Coproduct] = Instance[L] {
type Inner = I
}
type GenericAux[A, B] = Instance[A] {
type Inner = B
}
implicit def emptyProductInstance[In <: HNil]: ProductAux[In, HNil] =
new Instance[In] {
type Inner = HNil
def apply() = emptyProduct
}
implicit def productInstance[Label <: Symbol, Head, Tail <: HList, TailInner <: HList](
implicit witness: Witness.Aux[Label],
cHead: C[Head],
tailInstance: ProductAux[Tail, TailInner]): ProductAux[FieldType[Label, Head] :: Tail, Head :: TailInner] =
new Instance[FieldType[Label, Head]:: Tail] {
type Inner = Head :: TailInner
def apply() = product(witness.value.name, cHead, tailInstance())
}
implicit def emptyCoproductInstance[In <: CNil]: CoproductAux[In, CNil] =
new Instance[In] {
type Inner = CNil
def apply() = emptyCoproduct
}
implicit def coproductInstance[Label <: Symbol, Left, Right <: Coproduct, RightInner <: Coproduct](
implicit witness: Witness.Aux[Label],
cLeft: Lazy[C[Left]],
rightInstance: CoproductAux[Right, RightInner]): CoproductAux[FieldType[Label, Left] :+: Right, Left :+: RightInner] =
new Instance[FieldType[Label, Left]:+: Right] {
type Inner = Left :+: RightInner
def apply() = coproduct(witness.value.name, cLeft.value, rightInstance())
}
}
trait TypeClass[C[_]] extends LowPriorityTypeClassConstructors[C] {
implicit def genericInstance[A, Repr0, Repr1](
implicit lg: LabelledGeneric.Aux[A, Repr0],
bg: Generic.Aux[A, Repr1],
instance: GenericAux[Repr0, Repr1]): FinalInstance[A] =
new FinalInstance[A] {
def apply() = project(instance(), bg.to, bg.from)
}
object auto {
implicit def derive[A](implicit instance: Lazy[FinalInstance[A]]): C[A] = instance.value()
}
def apply[A](implicit instance: Lazy[FinalInstance[A]]): C[A] = instance.value()
}
|
pierzchalski/HomebrewTypeClasses
|
src/main/scala/typeclasses/homebrew/TypeClass.scala
|
Scala
|
mit
| 2,712
|
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package au.com.cba.omnia.maestro.schema
package syntax
import au.com.cba.omnia.maestro.schema._
/** Three letter curreny codes, like "AUD", "USD", "CHF". */
object CurrencyCode extends Syntax {
val name = "CurrencyCode"
def likeness(s: String): Double =
if (codesISO(s) || codesOther(s)) 1.0 else 0.0
// ISO standard currency codes.
val codesISO = Set(
"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN"
, "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BRL"
, "BSD", "BTN", "BWP", "BYR", "BZD"
, "CAD", "CDF", "CHF", "CLP", "CNY", "COP", "CRC", "CUC", "CUP", "CVE"
, "CZK"
, "DJF", "DKK", "DOP", "DZD"
, "EGP", "ERN", "ETB", "EUR"
, "FJD", "FKP"
, "GBP", "GEL", "GGP", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD"
, "HKD", "HNL", "HRK", "HTG", "HUF"
, "IDR", "ILS", "IMP", "INR", "IQD", "IRR", "ISK"
, "JEP", "JMD", "JOD", "JPY"
, "KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT"
, "LAK", "LBP", "LKR", "LRD", "LSL", "LTL", "LVL", "LYD"
, "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR"
, "MWK", "MXN", "MYR", "MZN"
, "NAD", "NGN", "NIO", "NOK", "NPR", "NZD"
, "OMR"
, "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG"
, "QAR"
, "RON", "RSD", "RUB", "RWF"
, "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SPL"
, "SRD", "STD", "SVC", "SYP", "SZL"
, "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TVD", "TWD", "TZS"
, "UAH", "UGX", "USD", "UYU", "UZS"
, "VEF", "VND", "VUV"
, "WST"
, "XAF", "XCD", "XDR", "XOF", "XPF"
, "YER"
, "ZAR", "ZMW", "ZWD")
// Non-ISO currency codes.
// Includes pre-euro currencies, and bullion codes.
val codesOther = Set(
"CNH", "CNT", "TRL", "FIM", "SKK", "ESP", "ZWR", "MTP", "VEB"
, "CYP", "GRD", "MTL"
, "FRF", "DEM", "ITL", "YUN", "ATS", "BEF", "PTE", "NLG", "IEP"
, "XAU", "XAG", "XPT", "XPD", "XEU")
val parents: Set[Syntax] = Set(Upper)
val partitions: Set[Syntax] = Set()
}
|
toddmowen/maestro
|
maestro-schema/src/main/scala/au/com/cba/omnia/maestro/schema/syntax/Currency.scala
|
Scala
|
apache-2.0
| 2,709
|
/* Copyright 2012 Christian Douven
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package almhirt.syntax
object xml extends almhirt.xml.ToXmlOps {
}
|
chridou/almhirt
|
almhirt-common/src/main/scala/almhirt/syntax/almxml.scala
|
Scala
|
apache-2.0
| 658
|
package cats.examples.datatypes
/**
* Kleisli enables composition of functions that return a monadic value, for
* instance an Option[Int] or a Either[String, List[Double]], without having
* functions take an Option or Either as a parameter, which can be strange and
* unwieldy.
*
* We may also have several functions which depend on some environment and want
* a nice way to compose these functions to ensure they all receive the same
* environment. Or perhaps we have functions which depend on their own “local”
* configuration and all the configurations together make up a “global”
* application configuration. How do we have these functions play nice with
* each other despite each only knowing about their own local requirements?
*
* These situations are where Kleisli is immensely helpful.
*
* See http://typelevel.org/cats/datatypes/kleisli.html
*/
object KleisliExample extends App {
// Functions
// One of the most useful properties of functions is that they *compose*.
// That is, given a function A => B and a function B => C, we can combine
// them to create a new function, A => C. It is through this compositional
// property that we are able to write many small functions and compose them
// together to create a larger one that suits our needs.
val twice: Int => Int = x => x * 2
val countCats: Int => String = x => if (x == 1) "1 cat" else s"$x cats"
val twiceAsManyCats: Int => String =
twice andThen countCats // equivalent to: countCats compose twice
// Thus
assert(twiceAsManyCats(1) == "2 cats")
// Sometimes, our functions will need to return monadic values. For instance,
// consider the following set of functions.
{
val parse: String => Option[Int] =
s => if (s.matches("-?[0-9]+")) Some(s.toInt) else None
val reciprocal: Int => Option[Double] =
i => if (i != 0) Some(1.0 / i) else None
}
// As it stands we cannot use Function1.compose (or Function1.andThen) to
// compose these two functions. The output type of parse is Option[Int]
// whereas the input type of reciprocal is Int.
// This is where Kleisli comes into play.
// Kleisli
// At it’s core, Kleisli[F[_], A, B] is just a wrapper around the function
// A => F[B]. Depending on the properties of the F[_], we can do different
// things with Kleislis. For instance, if F[_] has a FlatMap[F] instance (we
// can call flatMap on F[A] values), we can compose two Kleislis much like we
// can two functions.
{
import cats.FlatMap
import cats.implicits._
final case class Kleisli[F[_], A, B](run: A => F[B]) {
def compose[Z](k: Kleisli[F, Z, A])(implicit F: FlatMap[F]): Kleisli[F, Z, B] =
Kleisli[F, Z, B](z => k.run(z).flatMap(run))
}
// Returning to our earlier example:
// Bring in cats.FlatMap[Option] instance
import cats.implicits._
val parse = Kleisli((s: String) =>
try {
Some(s.toInt)
}
catch {
case _: NumberFormatException => None
}
)
val reciprocal = Kleisli((i: Int) =>
if (i == 0) None
else Some(1.0 / i)
)
val parseAndReciprocal: Kleisli[Option, String, Double] =
reciprocal.compose(parse)
assert(parseAndReciprocal.run("1") contains 1.0)
}
// Kleisli#andThen can be defined similarly.
// It is important to note that the F[_] having a FlatMap (or a Monad)
// instance is not a hard requirement - we can do useful things with weaker
// requirements. Such an example would be Kleisli#map, which only requires
// that F[_] have a Functor instance (e.g. is equipped with map:
// F[A] => (A => B) => F[B]).
// Below are some more methods on Kleisli that can be used so long as the
// constraint on F[_] is satisfied.
// Method | Constraint on `F[_]`
// --------- | -------------------
// andThen | FlatMap
// compose | FlatMap
// flatMap | FlatMap
// lower | Monad
// map | Functor
// traverse | Applicative
// Type class instances
// The type class instances for Kleisli, like that for functions, often fix
// the input type (and the F[_]) and leave the output type free. What type
// class instances it has tends to depend on what instances the F[_] has.
// For instance, Kleisli[F, A, B] has a Functor instance so long as the chosen
// F[_] does. It has a Monad instance so long as the chosen F[_] does. The
// instances in Cats are laid out in a way such that implicit resolution will
// pick up the most specific instance it can (depending on the F[_]).
// An example of a Monad instance for Kleisli is shown below.
// Note: the example below assumes usage of the kind-projector compiler plugin
// and will not compile if it is not being used in a project.
object KleisliMonadExample {
import cats._
import cats.implicits._
import cats.data.Kleisli
// We can define a FlatMap instance for Kleisli if the F[_] we chose has a
// FlatMap instance
// Note the input type and F are fixed, with the output type left free
implicit def kleisliFlatMap[F[_], Z](implicit F: FlatMap[F]): FlatMap[Kleisli[F, Z, ?]] =
new FlatMap[Kleisli[F, Z, ?]] {
def flatMap[A, B](fa: Kleisli[F, Z, A])(f: A => Kleisli[F, Z, B]): Kleisli[F, Z, B] =
Kleisli(z => fa.run(z).flatMap(a => f(a).run(z)))
def map[A, B](fa: Kleisli[F, Z, A])(f: A => B): Kleisli[F, Z, B] =
Kleisli(z => fa.run(z).map(f))
def tailRecM[A, B](a: A)(f: A => Kleisli[F, Z, Either[A, B]]) =
Kleisli[F, Z, B]({ z => FlatMap[F].tailRecM(a) { f(_).run(z) } })
}
}
// Below is a table of some of the type class instances Kleisli can have
// depending on what instances F[_] has.
// Type class | Constraint on `F[_]`
// -------------- | -------------------
// Functor | Functor
// Apply | Apply
// Applicative | Applicative
// FlatMap | FlatMap
// Monad | Monad
// Arrow | Monad
// Split | FlatMap
// Strong | Functor
// SemigroupK* | FlatMap
// MonoidK* | Monad
// * These instances only exist for Kleisli arrows with identical input and
// output types; that is, Kleisli[F, A, A] for some type A. These instances
// use Kleisli composition as the combine operation, and Monad.pure as the
// empty value.
// Also, there is an instance of Monoid[Kleisli[F, A, B]] if there is an
// instance of Monoid[F[B]]. Monoid.combine here creates a new Kleisli arrow
// which takes an A value and feeds it into each of the combined Kleisli
// arrows, which together return two F[B] values. Then, they are combined into
// one using the Monoid[F[B]] instance.
// Other uses
// Monad Transformers
// Many data types have a monad transformer equivalent that allows us to
// compose the Monad instance of the data type with any other Monad instance.
// For instance, OptionT[F[_], A] allows us to compose the monadic properties
// of Option with any other F[_], such as a List. This allows us to work with
// nested contexts/effects in a nice way (for example, in for-comprehensions).
// Kleisli can be viewed as the monad transformer for functions. Recall that
// at its essence, Kleisli[F, A, B] is just a function A => F[B], with
// niceties to make working with the value we actually care about, the B,
// easy. Kleisli allows us to take the effects of functions and have them play
// nice with the effects of any other F[_].
// This may raise the question, what exactly is the “effect” of a function?
// Well, if we take a look at any function, we can see it takes some input
// and produces some output with it, without having touched the input
// (assuming the function is pure, i.e. referentially transparent). That is,
// we take a read-only value, and produce some value with it. For this reason,
// the type class instances for functions often refer to the function as a
// Reader. For instance, it is common to hear about the Reader monad. In the
// same spirit, Cats defines a Reader type alias along the lines of:
{
import cats.data.Kleisli
// We want A => B, but Kleisli provides A => F[B]. To make the types/shapes
// match, we need an F[_] such that providing it a type A is equivalent to A
// This can be thought of as the type-level equivalent of the identity
// function.
type Id[A] = A
type Reader[A, B] = Kleisli[Id, A, B]
object Reader {
// Lifts a plain function A => B into a Kleisli, giving us access
// to all the useful methods and type class instances
def apply[A, B](f: A => B): Reader[A, B] = Kleisli[Id, A, B](f)
}
type ReaderT[F[_], A, B] = Kleisli[F, A, B]
val ReaderT = Kleisli
}
// The ReaderT value alias exists to allow users to use the Kleisli companion
// object as if it were ReaderT, if they were so inclined.
// The topic of functions as a read-only environment brings us to our next
// common use case of Kleisli - configuration.
// Configuration
// Functional programming advocates the creation of programs and modules by
// composing smaller, simpler modules. This philosophy intentionally mirrors
// that of function composition - write many small functions, and compose them
// to build larger ones. After all, our programs are just functions.
// Let’s look at some example modules, where each module has it’s own
// configuration that is validated by a function. If the configuration is
// good, we return a Some of the module, otherwise a None. This example uses
// Option for simplicity - if you want to provide error messages or other
// failure context, consider using Either instead.
{
import cats.data.Kleisli
case class DbConfig(url: String, user: String, pass: String)
trait Db
object Db {
val fromDbConfig: Kleisli[Option, DbConfig, Db] = ???
}
case class ServiceConfig(addr: String, port: Int)
trait Service
object Service {
val fromServiceConfig: Kleisli[Option, ServiceConfig, Service] = ???
}
// We have two independent modules, a Db (allowing access to a database) and
// a Service (supporting an API to provide data over the web). Both depend
// on their own configuration parameters. Neither know or care about the
// other, as it should be. However our application needs both of these
// modules to work. It is plausible we then have a more global application
// configuration.
case class AppConfig(dbConfig: DbConfig, serviceConfig: ServiceConfig)
class App(db: Db, service: Service)
}
// As it stands, we cannot use both Kleisli validation functions together
// nicely - one takes a DbConfig, the other a ServiceConfig. That means the
// FlatMap (and by extension, the Monad) instances differ (recall the input
// type is fixed in the type class instances). However, there is a nice
// function on Kleisli called local.
{
final case class Kleisli[F[_], A, B](run: A => F[B]) {
def local[AA](f: AA => A): Kleisli[F, AA, B] = Kleisli(f.andThen(run))
}
}
// What local allows us to do is essentially “expand” our input type to a
// more “general” one. In our case, we can take a Kleisli that expects a
// DbConfig or ServiceConfig and turn it into one that expects an AppConfig,
// so long as we tell it how to go from an AppConfig to the other configs.
// Now we can create our application config validator!
{
import cats._
import cats.implicits._
final case class Kleisli[F[_], Z, A](run: Z => F[A]) {
def flatMap[B](f: A => Kleisli[F, Z, B])(implicit F: FlatMap[F]): Kleisli[F, Z, B] =
Kleisli(z => F.flatMap(run(z))(a => f(a).run(z)))
def map[B](f: A => B)(implicit F: Functor[F]): Kleisli[F, Z, B] =
Kleisli(z => F.map(run(z))(f))
def local[ZZ](f: ZZ => Z): Kleisli[F, ZZ, A] = Kleisli(f.andThen(run))
}
case class DbConfig(url: String, user: String, pass: String)
trait Db
object Db {
val fromDbConfig: Kleisli[Option, DbConfig, Db] = ???
}
case class ServiceConfig(addr: String, port: Int)
trait Service
object Service {
val fromServiceConfig: Kleisli[Option, ServiceConfig, Service] = ???
}
case class AppConfig(dbConfig: DbConfig, serviceConfig: ServiceConfig)
class App(db: Db, service: Service)
def appFromAppConfig: Kleisli[Option, AppConfig, App] =
for {
db <- Db.fromDbConfig.local[AppConfig](_.dbConfig)
sv <- Service.fromServiceConfig.local[AppConfig](_.serviceConfig)
} yield new App(db, sv)
}
// What if we need a module that doesn’t need any config validation, say a
// strategy to log events? We would have such a module be instantiated from
// a config directly, without an Option - we would have something like
// Kleisli[Id, LogConfig, Log] (alternatively, Reader[LogConfig, Log]).
// However, this won’t play nice with our other Kleislis since those use
// Option instead of Id.
// We can define a lift method on Kleisli (available already on Kleisli in
// Cats) that takes a type parameter G[_] such that G has an Applicative
// instance and lifts a Kleisli value such that its output type is G[F[B]].
// This allows us to then lift a Reader[A, B] into a Kleisli[G, A, B]. Note
// that lifting a Reader[A, B] into some G[_] is equivalent to having a
// Kleisli[G, A, B] since Reader[A, B] is just a type alias for
// Kleisli[Id, A, B], and type Id[A] = A so G[Id[A]] is equivalent to G[A].
}
|
carwynellis/cats-examples
|
src/main/scala/cats/examples/datatypes/KleisliExample.scala
|
Scala
|
mit
| 13,664
|
package io.getquill
import io.getquill.context.Context
// Testing we are passing type params explicitly into JdbcContextBase, otherwise
// this file will fail to compile
trait BaseExtensions {
val context: Context[PostgresDialect, _]
}
trait JDBCExtensions extends BaseExtensions {
override val context: PostgresZioJdbcContext[_]
}
|
getquill/quill
|
quill-jdbc-zio/src/test/scala/io/getquill/TypeParamExtensionTest.scala
|
Scala
|
apache-2.0
| 340
|
/* Copyright 2009-2016 EPFL, Lausanne */
import leon.annotation.extern
object Option {
sealed abstract class Option[T] {
def get: T = {
require(isDefined)
//this match { case Some(x) => x }
this.asInstanceOf[Some[T]].x // slightly more efficient than pattern matching: no type checking
}
def isEmpty = this match {
case Some(_) => false
case None() => true
}
def nonEmpty = !isEmpty
def isDefined = nonEmpty
}
case class Some[T](x: T) extends Option[T]
case class None[T]() extends Option[T]
case class Dummy1(x: Int)
case class Dummy2(opt: Option[Int])
def foo(x: Int): Option[Int] = {
if (x % 2 == 1) Some(x)
else None[Int]
}
def bar(x: Int): Option[Dummy1] = {
if (x % 2 != 0) Some(Dummy1(x))
else None[Dummy1]
}
def baz(opt: Option[Int]): Dummy2 = {
Dummy2(opt)
}
def test1(): Int = {
val o1 = foo(1)
val o2 = foo(2)
if (o1.nonEmpty && o2.isEmpty && o1.get == 1) 0
else 1
} ensuring { _ == 0 }
def test2(): Int = {
val o1 = bar(1)
val o2 = bar(2)
if (o1.nonEmpty && o2.isEmpty && o1.get.x == 1) 0
else 1
} ensuring { _ == 0 }
def test3(): Int = {
val o = baz(Some(42))
if (o.opt.isDefined && o.opt.get == 42) 0
else 1
} ensuring { _ == 0 }
def _main() = {
test1() + test2() + test3()
} ensuring { _ == 0 }
@extern
def main(args: Array[String]): Unit = _main()
}
|
epfl-lara/leon
|
src/test/resources/regression/genc/valid/Option.scala
|
Scala
|
gpl-3.0
| 1,473
|
package rxthings.sensors
import spray.json.{DefaultJsonProtocol, RootJsonFormat}
object DS18B20ReadingModels {
case class ValidDS18B20Reading(device: String, value: Int) extends DS18B20Reading {
val t = System.currentTimeMillis()
lazy val c: Double = value / 1000
lazy val f: Double = c * 1.8 + 32
}
case class InvalidDS18B20Reading(device: String) extends DS18B20Reading
}
object DS18B20ReadingProtocol extends DefaultJsonProtocol {
import DS18B20ReadingModels._
implicit val validFormat: RootJsonFormat[ValidDS18B20Reading] = jsonFormat(ValidDS18B20Reading, "device", "value")
implicit val invalidFormat: RootJsonFormat[InvalidDS18B20Reading] = jsonFormat1(InvalidDS18B20Reading)
}
|
jw3/DS18B20
|
src/main/scala/rxthings/sensors/Models.scala
|
Scala
|
apache-2.0
| 715
|
/*
* Copyright 2015 [See AUTHORS file for list of authors]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.lang.Math.abs
import java.util.Random
import java.util.concurrent.atomic.{AtomicInteger, AtomicIntegerArray}
import java.util.concurrent.{Callable, ExecutorService, Executors}
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* Auxiliary helper functions.
*/
object AuxiliaryFunctions{
/**
* Randomly permute an array using the Knuth permutation algorithm.
* @param a Array to permute.
* @param rand Random object to use for the permutation.
* @return Permutated array.
*/
def KnuthPermute(a: Array[Int], rand: Random) : Array[Int] = {
val numElements = a.length
var i = 0
var j = 0
var temp = 0
while (i < numElements-1){
j = rand.nextInt(numElements-i-1) + i + 1
temp = a(i)
a(i) = a(j)
a(j) = temp
i += 1
}
a
}
/**
* Generate a random permutation in parallel.
* @param nThreads Number of threads to participate in the permutation operation.
* @param threads Thread pool consisting of at least nThreads threads.
* @param randSeed Random seed to use for the permutation.
* @param nElements Number of elements to permute
* @return Tuple of permutated ordering and its inverse: invOrder(i) = j iff order(j) = i
*/
def parallelRandomPermutation(nThreads: Int, threads: ExecutorService, randSeed: Int, nElements: Int)
: (Array[Int], Array[Int]) = {
val masterRand : Random = new Random(randSeed)
val rands : Array[Random] = (0 until nThreads).map(_ => new Random(masterRand.nextLong())).toArray
val orderingStartPoint : Array[Int] = new Array[Int](nThreads)
val ordering : Array[Int] = new Array[Int](nElements)
val invOrder : Array[Int] = new Array[Int](nElements)
var i = 0
var hasChanged = true
// First local permute
val tasks_firstLocalPermute = (0 until nThreads).map(threadID => new Callable[Array[Int]]{
override def call() = {
val startIndex: Int = math.floor( threadID .toDouble / nThreads.toDouble * nElements.toDouble).toInt
val endIndex : Int = if (threadID == nThreads-1) nElements else math.floor((threadID + 1).toDouble / nThreads.toDouble * nElements.toDouble).toInt
KnuthPermute((startIndex until endIndex).toArray, rands(threadID))
}
})
val firstPermuteArrays : Array[Array[Int]] = threads.invokeAll(tasks_firstLocalPermute).map(_.get).toArray
// Collect across processors, then second permute
val tasks_collectAndPermute = (0 until nThreads).map(threadID => new Callable[Array[Int]]{
override def call() = {
var i = 0
var j = 0
var bucketPointer = 0
var bucketNumElements = 0
var localNumElements = 0
var startIndex = 0
var endIndex = 0
while (i < nThreads){
bucketPointer = (i + threadID) % nThreads
bucketNumElements = firstPermuteArrays(bucketPointer).length
startIndex = math.floor( threadID .toDouble / nThreads.toDouble * bucketNumElements.toDouble).toInt
endIndex = if (threadID == nThreads-1) bucketNumElements else math.floor((threadID + 1).toDouble / nThreads.toDouble * bucketNumElements.toDouble).toInt
localNumElements += (endIndex - startIndex)
i += 1
}
val buffer : Array[Int] = new Array[Int](localNumElements)
i = 0
var counter = 0
while (i < nThreads){
bucketPointer = (i + threadID) % nThreads
bucketNumElements = firstPermuteArrays(bucketPointer).length
startIndex = math.floor( threadID .toDouble / nThreads.toDouble * bucketNumElements.toDouble).toInt
endIndex = if (threadID == nThreads-1) bucketNumElements else math.floor((threadID + 1).toDouble / nThreads.toDouble * bucketNumElements.toDouble).toInt
j = startIndex
while (j < endIndex){
buffer(counter) = firstPermuteArrays(bucketPointer)(j)
j += 1
counter += 1
}
i += 1
}
KnuthPermute(buffer.toArray, rands(threadID))
}
})
val secondPermuteArrays : Array[Array[Int]] = threads.invokeAll(tasks_collectAndPermute).map(_.get).toArray
// Create ordering
i = 1
while (i < nThreads){
orderingStartPoint(i) = orderingStartPoint(i-1) + secondPermuteArrays(i-1).length
i += 1
}
val tasks_ordering = (0 until nThreads).map(threadID => new Callable[Boolean]{
override def call() = {
var hasChanged = false
var index = 0
val numElements = secondPermuteArrays(threadID).length
try {
while (index < numElements) {
if (ordering(orderingStartPoint(threadID) + index) != secondPermuteArrays(threadID)(index)) {
ordering(orderingStartPoint(threadID) + index) = secondPermuteArrays(threadID)(index)
hasChanged = true
}
index += 1
}
hasChanged
}catch{
case e: Exception =>{
println(s"Thread $threadID, Error @ index = $index," +
s"orderingStartPoint($threadID) = ${orderingStartPoint(threadID)}, ordering.length = ${ordering.length}, secondPermuteArrays($threadID).length = ${secondPermuteArrays(threadID).length}")
e.printStackTrace()
System.exit(-1)
true
}
}
}
})
hasChanged = true
while (hasChanged) hasChanged = threads.invokeAll(tasks_ordering).map(_.get).reduce(_ | _)
// Create invOrder
val tasks_invOrder = (0 until nThreads).map(threadID => new Callable[Boolean]{
override def call() = {
var hasChanged = false
var index = threadID
while (index < nElements){
if (invOrder(ordering(index)) != index){
invOrder(ordering(index)) = index
hasChanged = true
}
index += nThreads
}
hasChanged
}
})
hasChanged = true
while (hasChanged) hasChanged = threads.invokeAll(tasks_invOrder).map(_.get).reduce(_ | _)
(ordering, invOrder)
}
/**
* Computes the objective value of a clustering assignment to a given graph.
* @param nThreads Number of threads to use for this check.
* @param threads Thread pool consisting of at least nThreads threads.
* @param graph Graph on which the correlation clustering has been done.
* @param clusterID Assignment of vertices to clusters.
* @param clusterID_atomic Atomic array corresponding to clusterID. This is taken as the ground truth.
* @return correlation clustering objective value
*/
def computeObjective(nThreads: Int,
threads: ExecutorService,
graph: SparseGraph,
clusterID : Array[Int],
clusterID_atomic: AtomicIntegerArray) : Long = {
val nVertices: Int = graph.numNodes()
val tasks = (0 until nThreads).map(threadID => new Callable[Long] {
override def call() = {
var objVal : Long = 0
var index = threadID
var v = 0
var vClusterID = 0
var ui = 0
var u = 0
var uClusterID = 0
var uNSucc = 0
var wi = 0
var nSucc = 0
val clusterHash : mutable.HashSet[Int] = new mutable.HashSet[Int]()
while (index < nVertices){
v = index
if (clusterID(v)==0) clusterID(v) = clusterID_atomic.get(v)
vClusterID = abs(clusterID(v))
nSucc = graph.nSuccessor(v)
// First, count bad +ve edges around this vertex
ui = 0
while (ui < nSucc){
u = graph.succ(v, ui)
if (clusterID(u)==0) clusterID(u) = clusterID_atomic.get(u)
uClusterID = abs(clusterID(u))
if (vClusterID != uClusterID){
// Share edge but different cluster => bad +ve
objVal += 1
}
ui += 1
}
// If center, count the bad -ve edges in the cluster
if (clusterID(v) > 0){
clusterHash.clear()
clusterHash.add(v)
ui = 0
while (ui < nSucc){
u = graph.succ(v, ui)
if (clusterID(u)==0) clusterID(u) = clusterID_atomic.get(u)
uClusterID = abs(clusterID(u))
if (uClusterID == vClusterID) clusterHash.add(u)
ui += 1
}
val clusterSize = clusterHash.size - 1
ui = 0
while (ui < nSucc){
u = graph.succ(v, ui)
if (clusterHash.contains(u)){
wi = 0
uNSucc = graph.nSuccessor(u)
var clusterNeighbors = 0
while (wi < uNSucc){
if (clusterHash.contains(graph.succ(u, wi))) clusterNeighbors += 1
wi += 1
}
objVal += (clusterSize - clusterNeighbors)
}
ui += 1
}
}
index += nThreads
}
objVal
}
})
threads.invokeAll(tasks).map(_.get()).reduce(_+_) / 2
}
/**
* Given two sets of cluster IDs, check if they are equivalent
* @param nThreads Number of threads to use for this check.
* @param threads Thread pool consisting of at least nThreads threads
* @param cid1 First set of cluster IDs.
* @param cidA1 Atomic array corresponding to first set of cluster IDs. This is taken as the ground truth.
* @param cid2 Second set of cluster IDs.
* @param cidA2 Atomic array corresponding to the second set of cluster IDs. This is taken as the ground truth.
* @return True iff the two sets of cluster IDs are the same.
*/
def checkEquivalence(nThreads: Int,
threads: ExecutorService,
cid1: Array[Int], cidA1: AtomicIntegerArray,
cid2: Array[Int], cidA2: AtomicIntegerArray) : Boolean = {
val nElements = cid1.length
if (cid2.length != nElements || cidA1.length() != nElements || cidA2.length() != nElements){
System.out.println(s"Lengths of cluster ID arrays do not match")
false
}else {
val tasks_check = (0 until nThreads).map(threadID => new Callable[Boolean] {
override def call() = {
var isEquivalent = true
var index = 0
var id1 = 0
var id2 = 0
while (index < nElements){
id1 = if (cid1(index)==0) cidA1.get(index) else cid1(index)
id2 = if (cid2(index)==0) cidA2.get(index) else cid2(index)
if (id1 != id2){
System.out.println(s"cid1($index) == $id1 != $id2 == cid2($index)")
isEquivalent = false
}
index += nThreads
}
isEquivalent
}
})
threads.invokeAll(tasks_check).map(_.get).reduce(_|_)
}
}
}
|
pxinghao/ParallelCorrelationClustering
|
src/main/scala/AuxiliaryFunctions.scala
|
Scala
|
apache-2.0
| 11,709
|
package org.scalatra
package servlet
import java.net.{ MalformedURLException, URL }
import java.{ util => jutil }
import javax.servlet.http.{ HttpServlet, HttpServletRequest }
import javax.servlet.{ DispatcherType, Filter, ServletContext }
import scala.collection.mutable
/**
* Extension methods to the standard ServletContext.
*/
case class RichServletContext(sc: ServletContext) extends AttributesMap {
protected def attributes: ServletContext = sc
/**
* Optionally returns a URL to the resource mapped to the given path. This
* is a wrapper around `getResource`.
*
* @param path the path to the resource
* @return the resource located at the path, or None if there is no resource
* at that path.
*/
def resource(path: String): Option[URL] = {
try {
Option(sc.getResource(path))
} catch {
case e: MalformedURLException => throw e
}
}
/**
* Optionally returns the resource mapped to the request's path.
*
* @param req the request
* @return the resource located at the result of concatenating the request's
* servlet path and its path info, or None if there is no resource at that path.
*/
def resource(req: HttpServletRequest): Option[URL] = {
val path = req.getServletPath + (Option(req.getPathInfo) getOrElse "")
resource(path)
}
private[this] def pathMapping(urlPattern: String): String = urlPattern match {
case s if s.endsWith("/*") => s
case s if s.endsWith("/") => s + "*"
case s => s + "/*"
}
/**
* Mounts a handler to the servlet context. Must be an HttpServlet or a
* Filter.
*
* @param handler the handler to mount
*
* @param urlPattern the URL pattern to mount. Will be appended with `\\/\\*` if
* not already, as path-mapping is the most natural fit for Scalatra.
* If you don't want path mapping, use the native Servlet API.
*
* @param name the name of the handler
*/
def mount(handler: Handler, urlPattern: String, name: String): Unit = {
mount(handler, urlPattern, name, 1)
}
/**
* Mounts a handler to the servlet context. Must be an HttpServlet or a
* Filter.
*
* @param handler the handler to mount
*
* @param urlPattern the URL pattern to mount. Will be appended with `\\/\\*` if
* not already, as path-mapping is the most natural fit for Scalatra.
* If you don't want path mapping, use the native Servlet API.
*
* @param name the name of the handler
*/
def mount(handler: Handler, urlPattern: String, name: String, loadOnStartup: Int): Unit = {
val pathMap = pathMapping(urlPattern)
handler match {
case servlet: HttpServlet => mountServlet(servlet, pathMap, name, loadOnStartup)
case filter: Filter => mountFilter(filter, pathMap, name)
case _ => sys.error("Don't know how to mount this service to a servletContext: " + handler.getClass)
}
}
def mount(handler: Handler, urlPattern: String): Unit = mount(handler, urlPattern, 1)
def mount(handler: Handler, urlPattern: String, loadOnStartup: Int): Unit = {
mount(handler, urlPattern, handler.getClass.getName, loadOnStartup)
}
def mount[T](
handlerClass: Class[T],
urlPattern: String,
name: String,
loadOnStartup: Int = 1): Unit = {
val pathMap = urlPattern match {
case s if s.endsWith("/*") => s
case s if s.endsWith("/") => s + "*"
case s => s + "/*"
}
if (classOf[HttpServlet].isAssignableFrom(handlerClass)) {
mountServlet(handlerClass.asInstanceOf[Class[HttpServlet]], pathMap, name, loadOnStartup)
} else if (classOf[Filter].isAssignableFrom(handlerClass)) {
mountFilter(handlerClass.asInstanceOf[Class[Filter]], pathMap, name)
} else {
sys.error("Don't know how to mount this service to a servletContext: " + handlerClass)
}
}
def mount[T](handlerClass: Class[T], urlPattern: String): Unit = mount[T](handlerClass, urlPattern, 1)
def mount[T](handlerClass: Class[T], urlPattern: String, loadOnStartup: Int): Unit = {
mount(handlerClass, urlPattern, handlerClass.getName, loadOnStartup)
}
private def mountServlet(
servlet: HttpServlet,
urlPattern: String,
name: String,
loadOnStartup: Int): Unit = {
val reg = Option(sc.getServletRegistration(name)) getOrElse {
val r = sc.addServlet(name, servlet)
servlet match {
case s: HasMultipartConfig =>
r.setMultipartConfig(s.multipartConfig.toMultipartConfigElement)
case _ =>
}
if (servlet.isInstanceOf[ScalatraAsyncSupport])
r.setAsyncSupported(true)
r.setLoadOnStartup(loadOnStartup)
r
}
reg.addMapping(urlPattern)
}
private def mountServlet(
servletClass: Class[HttpServlet],
urlPattern: String,
name: String,
loadOnStartup: Int): Unit = {
val reg = Option(sc.getServletRegistration(name)) getOrElse {
val r = sc.addServlet(name, servletClass)
// since we only have a Class[_] here, we can't access the MultipartConfig value
// if (classOf[HasMultipartConfig].isAssignableFrom(servletClass))
if (classOf[ScalatraAsyncSupport].isAssignableFrom(servletClass)) {
r.setAsyncSupported(true)
}
r.setLoadOnStartup(loadOnStartup)
r
}
reg.addMapping(urlPattern)
}
private def mountFilter(filter: Filter, urlPattern: String, name: String): Unit = {
val reg = Option(sc.getFilterRegistration(name)) getOrElse {
val r = sc.addFilter(name, filter)
if (filter.isInstanceOf[ScalatraAsyncSupport])
r.setAsyncSupported(true)
r
}
// We don't have an elegant way of threading this all the way through
// in an abstract fashion, so we'll dispatch on everything.
val dispatchers = jutil.EnumSet.allOf(classOf[DispatcherType])
reg.addMappingForUrlPatterns(dispatchers, true, urlPattern)
}
private def mountFilter(filterClass: Class[Filter], urlPattern: String, name: String): Unit = {
val reg = Option(sc.getFilterRegistration(name)) getOrElse {
val r = sc.addFilter(name, filterClass)
if (classOf[ScalatraAsyncSupport].isAssignableFrom(filterClass)) {
r.setAsyncSupported(true)
}
r
}
// We don't have an elegant way of threading this all the way through
// in an abstract fashion, so we'll dispatch on everything.
val dispatchers = jutil.EnumSet.allOf(classOf[DispatcherType])
reg.addMappingForUrlPatterns(dispatchers, true, urlPattern)
}
/**
* A free form string representing the environment.
* `org.scalatra.Environment` is looked up as a system property, and if
* absent, as an init parameter. The default value is `DEVELOPMENT`.
*/
def environment: String = {
sys.props.get(EnvironmentKey) orElse initParameters.get(EnvironmentKey) getOrElse ("DEVELOPMENT")
}
object initParameters extends mutable.Map[String, String] {
def get(key: String): Option[String] = Option(sc.getInitParameter(key))
def iterator: Iterator[(String, String)] = {
val theInitParams = sc.getInitParameterNames
new Iterator[(String, String)] {
override def hasNext: Boolean = theInitParams.hasMoreElements
override def next(): (String, String) = {
val nm = theInitParams.nextElement()
(nm, sc.getInitParameter(nm))
}
}
}
def +=(kv: (String, String)): this.type = {
sc.setInitParameter(kv._1, kv._2)
this
}
def -=(key: String): this.type = {
sc.setInitParameter(key, null)
this
}
}
def contextPath: String = sc.getContextPath
}
|
lightvector/scalatra
|
core/src/main/scala/org/scalatra/servlet/RichServletContext.scala
|
Scala
|
bsd-2-clause
| 7,594
|
package us.feliscat.time
import scala.collection.mutable.ListBuffer
/**
* <pre>
* Created on 3/14/15.
* </pre>
*
* @author K.Sakamoto
*/
object TimeMerger {
def needToUpdate(yAOpt: Option[Int], yBOpt: Option[Int], isALessThanB: Boolean): Boolean = {
yAOpt match {
case Some(yA) =>
yBOpt match {
case Some(yB) =>
if (isALessThanB) {
yA < yB
} else {
yB < yA
}
case None =>
true
}
case None =>
false
}
}
private def merge(input: Seq[TimeTmp], isUnion: Boolean): TimeTmp = {
var beginTimeTmp = Option.empty[Int]
var endTimeTmp = Option.empty[Int]
val beginTimeTextBuffer = ListBuffer.empty[String]
val endTimeTextBuffer = ListBuffer.empty[String]
val nonUnion: Boolean = !isUnion
input foreach {
time: TimeTmp =>
val beginTimeOpt: Option[Int] = time.beginTime
val endTimeOpt: Option[Int] = time.endTime
beginTimeTextBuffer ++= time.beginTimeTextList
endTimeTextBuffer ++= time.endTimeTextList
if (needToUpdate(beginTimeOpt, beginTimeTmp, isALessThanB = isUnion)) {
beginTimeTmp = beginTimeOpt
}
if (needToUpdate(endTimeOpt, endTimeTmp, isALessThanB = nonUnion)) {
endTimeTmp = endTimeOpt
}
}
new TimeTmp(
beginTimeTmp,
endTimeTmp,
beginTimeTextBuffer.result.distinct,
endTimeTextBuffer.result.distinct)
}
def union(input: Seq[TimeTmp]): TimeTmp = {
merge(input, isUnion = true)
}
def intersect(input: Seq[TimeTmp]): TimeTmp = {
merge(input, isUnion = false)
}
}
|
ktr-skmt/FelisCatusZero-multilingual
|
libraries/src/main/scala/us/feliscat/time/TimeMerger.scala
|
Scala
|
apache-2.0
| 1,694
|
package org.jetbrains.plugins.dotty.lang.parser.parsing.types
import org.jetbrains.plugins.dotty.lang.parser.DottyElementTypes._
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* @author adkozlov
*/
/*
* RefinedType ::= WithType {[nl] Refinement}
*/
object RefinedType extends org.jetbrains.plugins.scala.lang.parser.parsing.types.Type {
override protected def infixType = InfixType
override def parse(builder: ScalaPsiBuilder, star: Boolean, isPattern: Boolean): Boolean = {
val marker = builder.mark()
if (!WithType.parse(builder, star, isPattern)) {
marker.drop()
return false
}
var isDone = false
while (Refinement.parse(builder)) {
isDone = true
}
if (isDone) {
marker.done(REFINED_TYPE)
} else {
marker.drop()
}
true
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/dotty/lang/parser/parsing/types/RefinedType.scala
|
Scala
|
apache-2.0
| 850
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.