code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package com.seanshubin.todo.persistence.domain
import java.io._
import java.nio.charset.Charset
import scala.annotation.tailrec
object IoUtil {
def inputStreamToString(inputStream: InputStream, charset: Charset): String = {
val bytes = inputStreamToBytes(inputStream)
new String(bytes, charset)
}
def stringToInputStream(s: String, charset: Charset): InputStream = {
new ByteArrayInputStream(s.getBytes(charset))
}
def inputStreamToBytes(inputStream: InputStream): Array[Byte] = {
val outputStream = new ByteArrayOutputStream
feedInputStreamToOutputStream(inputStream, outputStream)
val byteArray = outputStream.toByteArray
byteArray
}
def bytesToOutputStream(bytes: Seq[Byte], outputStream: OutputStream): Unit = {
val inputStream = new ByteArrayInputStream(bytes.toArray)
feedInputStreamToOutputStream(inputStream, outputStream)
}
def feedInputStreamToOutputStream(inputStream: InputStream, outputStream: OutputStream) {
@tailrec
def loop(byte: Int) {
if (byte != -1) {
outputStream.write(byte)
loop(inputStream.read())
}
}
loop(inputStream.read())
}
}
|
SeanShubin/todo-persistence
|
domain/src/main/scala/com/seanshubin/todo/persistence/domain/IoUtil.scala
|
Scala
|
unlicense
| 1,162
|
package org.scalaide.debug.internal.extensions
import org.scalaide.util.eclipse.EclipseUtils
import org.scalaide.debug.DebugEventHandler
object EventHandlerMapping {
final val EventHandlerId = "org.scala-ide.sdt.debug.eventHandler"
/**
* Returns all existing event handler extensions mapped to the
* [[EventHandlerMapping]] class.
*/
def mappings: Seq[EventHandlerMapping] = {
val elems = EclipseUtils.configElementsForExtension(EventHandlerId)
elems flatMap { e ⇒
EclipseUtils.withSafeRunner(s"Error while trying to retrieve information from extension '$EventHandlerId'.") {
EventHandlerMapping(
e.getAttribute("id"),
e.getAttribute("name")
)(e.createExecutableExtension("class").asInstanceOf[DebugEventHandler])
}
}
}
}
/**
* A mapping for an event handler that allows easy access to the defined
* configuration. For documentation of the defined fields, see the event handler
* extension point.
*/
case class EventHandlerMapping
(id: String, name: String)
(unsafeInstanceAccess: DebugEventHandler) {
/**
* Gives access to the actual event handler instance. Because these instances
* can be defined by third party plugins, they need to be executed in a safe
* mode to protect the IDE against corruption.
*
* If an error occurs in the passed function, `None` is returned, otherwise
* the result of the function.
*/
def withInstance[A](f: DebugEventHandler ⇒ A): Option[A] = {
EclipseUtils.withSafeRunner(s"Error while executing debug event handler '$name'.") {
f(unsafeInstanceAccess)
}
}
}
|
Kwestor/scala-ide
|
org.scala-ide.sdt.debug/src/org/scalaide/debug/internal/extensions/EventHandlerMapping.scala
|
Scala
|
bsd-3-clause
| 1,629
|
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.coders
import java.io.{EOFException, InputStream, OutputStream}
import java.nio.file.Path
import java.util.concurrent.atomic.AtomicInteger
import com.esotericsoftware.kryo.KryoException
import com.esotericsoftware.kryo.io.{InputChunked, OutputChunked}
import com.esotericsoftware.kryo.serializers.JavaSerializer
import com.google.protobuf.{ByteString, Message}
import com.spotify.scio.coders.instances.kryo.{GrpcSerializers => grpc, _}
import com.spotify.scio.options.ScioOptions
import com.twitter.chill._
import com.twitter.chill.algebird.AlgebirdRegistrar
import com.twitter.chill.protobuf.ProtobufSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.avro.specific.SpecificRecordBase
import org.apache.beam.sdk.coders.{AtomicCoder, CoderException => BCoderException, InstantCoder}
import org.apache.beam.sdk.io.gcp.bigquery.TableRowJsonCoder
import org.apache.beam.sdk.options.{PipelineOptions, PipelineOptionsFactory}
import org.apache.beam.sdk.util.VarInt
import org.apache.beam.sdk.util.common.ElementByteSizeObserver
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.{
ByteStreams,
CountingOutputStream
}
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.reflect.ClassPath
import org.joda.time.{DateTime, LocalDate, LocalDateTime, LocalTime}
import org.slf4j.LoggerFactory
import scala.jdk.CollectionConverters._
import scala.collection.mutable
import scala.collection.compat.extra.Wrappers
private object KryoRegistrarLoader {
private[this] val logger = LoggerFactory.getLogger(this.getClass)
def load(k: Kryo): Unit = {
logger.debug("Loading KryoRegistrars: " + registrars.mkString(", "))
registrars.foreach(_(k))
}
private val registrars: Seq[IKryoRegistrar] = {
logger.debug("Initializing KryoRegistrars")
val classLoader = Thread.currentThread().getContextClassLoader
ClassPath
.from(classLoader)
.getAllClasses
.asScala
.toSeq
.filter(_.getName.endsWith("KryoRegistrar"))
.flatMap { clsInfo =>
val optCls: Option[IKryoRegistrar] =
try {
val cls = clsInfo.load()
if (classOf[AnnotatedKryoRegistrar] isAssignableFrom cls) {
Some(cls.getConstructor().newInstance().asInstanceOf[IKryoRegistrar])
} else {
None
}
} catch {
case _: Throwable => None
}
optCls
}
}
}
object ScioKryoRegistrar {
private val logger = LoggerFactory.getLogger(this.getClass)
}
/**
* serializers we've written in Scio and want to add to Kryo serialization
* @see com.spotify.scio.coders.instances.serializers
*/
final private class ScioKryoRegistrar extends IKryoRegistrar {
import ScioKryoRegistrar.logger
override def apply(k: Kryo): Unit = {
logger.debug("Loading common Kryo serializers...")
k.forClass(new CoderSerializer(InstantCoder.of()))
k.forClass(new CoderSerializer(TableRowJsonCoder.of()))
// Java Iterable/Collection are missing proper equality check, use custom CBF as a
// workaround
k.register(
classOf[Wrappers.JIterableWrapper[_]],
new JTraversableSerializer[Any, Iterable[Any]]()(new JIterableWrapperCBF[Any])
)
k.register(
classOf[Wrappers.JCollectionWrapper[_]],
new JTraversableSerializer[Any, Iterable[Any]]()(new JCollectionWrapperCBF[Any])
)
// Wrapped Java collections may have immutable implementations, i.e. Guava, treat them
// as regular Scala collections as a workaround
k.register(
classOf[Wrappers.JListWrapper[_]],
new JTraversableSerializer[Any, mutable.Buffer[Any]]
)
k.forSubclass[SpecificRecordBase](new SpecificAvroSerializer)
k.forSubclass[GenericRecord](new GenericAvroSerializer)
k.forSubclass[Message](new ProtobufSerializer)
k.forClass[LocalDate](new JodaLocalDateSerializer)
k.forClass[LocalTime](new JodaLocalTimeSerializer)
k.forClass[LocalDateTime](new JodaLocalDateTimeSerializer)
k.forClass[DateTime](new JodaDateTimeSerializer)
k.forSubclass[Path](new JPathSerializer)
k.forSubclass[ByteString](new ByteStringSerializer)
k.forClass(new KVSerializer)
k.forClass[io.grpc.Status](new grpc.StatusSerializer)
k.forSubclass[io.grpc.StatusRuntimeException](new grpc.StatusRuntimeExceptionSerializer)
k.addDefaultSerializer(classOf[Throwable], new JavaSerializer)
()
}
}
final private[scio] class KryoAtomicCoder[T](private val options: KryoOptions)
extends AtomicCoder[T] {
import KryoAtomicCoder._
private[this] val instanceId = KryoAtomicCoder.nextInstanceId()
override def encode(value: T, os: OutputStream): Unit =
withKryoState(instanceId, options) { kryoState =>
if (value == null) {
throw new BCoderException("cannot encode a null value")
}
VarInt.encode(Header, os)
val chunked = kryoState.outputChunked
chunked.setOutputStream(os)
try {
kryoState.kryo.writeClassAndObject(chunked, value)
chunked.endChunks()
chunked.flush()
} catch {
case ke: KryoException =>
// make sure that the Kryo output buffer is cleared in case that we can recover from
// the exception (e.g. EOFException which denotes buffer full)
chunked.clear()
ke.getCause match {
case ex: EOFException => throw ex
case _ => throw ke
}
}
}
override def decode(is: InputStream): T = withKryoState(instanceId, options) { kryoState =>
val chunked = kryoState.inputChunked
val o = if (VarInt.decodeInt(is) == Header) {
chunked.setInputStream(is)
kryoState.kryo.readClassAndObject(chunked)
} else {
kryoState.kryo.readClassAndObject(new Input(chunked.getBuffer))
}
o.asInstanceOf[T]
}
// This method is called by PipelineRunner to sample elements in a PCollection and estimate
// size. This could be expensive for collections with small number of very large elements.
override def registerByteSizeObserver(value: T, observer: ElementByteSizeObserver): Unit =
value match {
// (K, Iterable[V]) is the return type of `groupBy` or `groupByKey`. This could be very slow
// when there're few keys with many values.
case (key, wrapper: Wrappers.JIterableWrapper[_]) =>
observer.update(kryoEncodedElementByteSize(key))
// FIXME: handle ElementByteSizeObservableIterable[_, _]
var count = 0
var bytes = 0L
var warned = false
var aborted = false
val warningThreshold = 10000 // 10s
val abortThreshold = 60000 // 1min
val start = System.currentTimeMillis()
val i = wrapper.underlying.iterator()
while (i.hasNext && !aborted) {
val size = kryoEncodedElementByteSize(i.next())
observer.update(size)
count += 1
bytes += size
val elapsed = System.currentTimeMillis() - start
if (elapsed > abortThreshold) {
aborted = true
logger.warn(
s"Aborting size estimation for ${wrapper.underlying.getClass}, " +
s"elapsed: $elapsed ms, count: $count, bytes: $bytes"
)
wrapper.underlying match {
case c: _root_.java.util.Collection[_] =>
// extrapolate remaining bytes in the collection
val remaining =
(bytes.toDouble / count * (c.size - count)).toLong
observer.update(remaining)
logger.warn(
s"Extrapolated size estimation for ${wrapper.underlying.getClass} " +
s"count: ${c.size}, bytes: ${bytes + remaining}"
)
case _ =>
logger.warn("Can't get size of internal collection, thus can't extrapolate size")
}
} else if (elapsed > warningThreshold && !warned) {
warned = true
logger.warn(
s"Slow size estimation for ${wrapper.underlying.getClass}, " +
s"elapsed: $elapsed ms, count: $count, bytes: $bytes"
)
}
}
case _ =>
observer.update(kryoEncodedElementByteSize(value))
}
private def kryoEncodedElementByteSize(obj: Any): Long =
withKryoState(instanceId, options) { kryoState: KryoState =>
val s = new CountingOutputStream(ByteStreams.nullOutputStream())
val output = new Output(options.bufferSize, options.maxBufferSize)
output.setOutputStream(s)
kryoState.kryo.writeClassAndObject(output, obj)
output.flush()
s.getCount + VarInt.getLength(s.getCount)
}
}
/** Used for sharing Kryo instance and buffers. */
final private[scio] case class KryoState(
kryo: Kryo,
inputChunked: InputChunked,
outputChunked: OutputChunked
)
private[scio] object KryoAtomicCoder {
private val logger = LoggerFactory.getLogger(this.getClass)
private val Header = -1
private val atomicInstanceIds = new AtomicInteger(0)
// We want to have one Kryo instance per thread per instance.
// Also the instances should be garbage collected when the thread dies.
private[this] val KryoStateMap: ThreadLocal[mutable.HashMap[Integer, KryoState]] =
new ThreadLocal[mutable.HashMap[Integer, KryoState]] {
override def initialValue(): mutable.HashMap[Integer, KryoState] =
mutable.HashMap[Integer, KryoState]()
}
private def nextInstanceId(): Int = atomicInstanceIds.getAndIncrement()
final def withKryoState[R](instanceId: Integer, options: KryoOptions)(f: KryoState => R): R = {
val ks = KryoStateMap
.get()
.getOrElseUpdate(
instanceId, {
val k = KryoSerializer.registered.newKryo()
k.setReferences(options.referenceTracking)
k.setRegistrationRequired(options.registrationRequired)
new ScioKryoRegistrar()(k)
new AlgebirdRegistrar()(k)
KryoRegistrarLoader.load(k)
val input = new InputChunked(options.bufferSize)
val output = new OutputChunked(options.bufferSize)
KryoState(k, input, output)
}
)
f(ks)
}
}
final private[scio] case class KryoOptions(
bufferSize: Int,
maxBufferSize: Int,
referenceTracking: Boolean,
registrationRequired: Boolean
)
private[scio] object KryoOptions {
@inline def apply(): KryoOptions =
KryoOptions(PipelineOptionsFactory.create())
def apply(options: PipelineOptions): KryoOptions = {
val o = options.as(classOf[ScioOptions])
KryoOptions(
o.getKryoBufferSize,
o.getKryoMaxBufferSize,
o.getKryoReferenceTracking,
o.getKryoRegistrationRequired
)
}
}
|
regadas/scio
|
scio-core/src/main/scala/com/spotify/scio/coders/KryoAtomicCoder.scala
|
Scala
|
apache-2.0
| 11,363
|
package quotidian.web.controller
import org.apache.lucene.document.Document
class DocumentWrapper(private val document:Document) {
def apply(name:String):Option[String] =
try { Some(document.getValues(name)(0)) }
catch {
case _ => None
}
}
object DocumentWrapper {
def apply(doc:Document) = new DocumentWrapper(doc)
implicit def document2documentWrapper(doc:Document) = DocumentWrapper(doc)
}
|
bryanjswift/quotidian
|
src/main/scala/quotidian/web/controller/DocumentWrapper.scala
|
Scala
|
mit
| 409
|
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.fts
import cmwell.domain._
import com.typesafe.scalalogging.{LazyLogging, Logger}
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest
import org.elasticsearch.common.unit.TimeValue
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import org.scalatest._
import org.slf4j.LoggerFactory
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.io.Source
/**
* User: Israel
* Date: 11/18/12
* Time: 6:15 PM
*/
sealed trait FTSMixin extends BeforeAndAfterAll { this: Suite =>
def ftsService: FTSServiceOps
def refreshAll(): Unit
def getUUID(uuid: String, isCurrent: Boolean = true) = ftsService match {
case es: FTSServiceNew => es.client.prepareGet("cm_well_p0_0","infoclone", uuid).execute().actionGet()
case es: FTSServiceES => {
val index = if(isCurrent) "cmwell_current" else "cmwell_history"
es.client.prepareGet(index, "infoclone", uuid).execute().actionGet()
}
}
val isoDateFormatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'")
implicit val executionContext = scala.concurrent.ExecutionContext.global
implicit val logger = Logger(LoggerFactory.getLogger(getClass.getName))
}
trait FTSServiceESTest extends FTSMixin { this: Suite =>
var ftsServiceES:FTSServiceES = _
override def ftsService: FTSServiceOps = ftsServiceES
override protected def beforeAll() {
ftsServiceES = FTSServiceES.getOne("FTSServiceESTest.yml")
// wait for green status
ftsServiceES.client.admin().cluster()
.prepareHealth()
.setWaitForGreenStatus()
.setTimeout(TimeValue.timeValueMinutes(5))
.execute()
.actionGet()
// delete all existing indices
ftsServiceES.client.admin().indices().delete(new DeleteIndexRequest("_all"))
// load indices template
val indicesTemplate = Source.fromURL(this.getClass.getResource("/indices_template.json")).getLines.reduceLeft(_ + _)
ftsServiceES.client.admin().indices().preparePutTemplate("indices_template").setSource(indicesTemplate).execute().actionGet()
// override with test-only settings
val testOnlyTemplate = Source.fromURL(this.getClass.getResource("/test_indices_template_override.json")).getLines.reduceLeft(_ + _)
ftsServiceES.client.admin().indices().preparePutTemplate("test_indices_template").setSource(testOnlyTemplate).execute().actionGet()
// create current index
ftsServiceES.client.admin().indices().prepareCreate("cmwell_current").execute().actionGet()
// create history index
ftsServiceES.client.admin().indices().prepareCreate("cmwell_history").execute().actionGet()
super.beforeAll()
}
override protected def afterAll() {
ftsServiceES.close()
super.afterAll()
Thread.sleep(10000)
logger debug s"FTSSpec is over"
}
override def refreshAll() = ftsServiceES.client.admin().indices().prepareRefresh("*").execute().actionGet()
}
trait FTSServiceNewTest extends FTSMixin { this: Suite =>
var ftsServiceNew: FTSServiceNew = _
override def ftsService: FTSServiceOps = ftsServiceNew
override protected def beforeAll() {
ftsServiceNew = FTSServiceNew("FTSServiceESTest.yml")
// wait for green status
ftsServiceNew.client.admin().cluster()
.prepareHealth()
.setWaitForGreenStatus()
.setTimeout(TimeValue.timeValueMinutes(5))
.execute()
.actionGet()
//
// // delete all existing indices
// ftsServiceNew.client.admin().indices().delete(new DeleteIndexRequest("_all"))
// load indices template
val indicesTemplate = Source.fromURL(this.getClass.getResource("/indices_template_new.json")).getLines.reduceLeft(_ + _)
ftsServiceNew.client.admin().indices().preparePutTemplate("indices_template_new").setSource(indicesTemplate).execute().actionGet()
// override with test-only settings
val testOnlyTemplate = Source.fromURL(this.getClass.getResource("/test_indices_template_override.json")).getLines.reduceLeft(_ + _)
ftsServiceNew.client.admin().indices().preparePutTemplate("test_indices_template").setSource(testOnlyTemplate).execute().actionGet()
// create index
Await.ready(ftsServiceNew.createIndex("cm_well_p0_0"),5.minutes)
super.beforeAll()
}
override protected def afterAll() {
ftsServiceNew.close()
super.afterAll()
Thread.sleep(10000)
logger debug s"FTSSpec is over"
}
def refreshAll() = ftsServiceNew.client.admin().indices().prepareRefresh("*").execute().actionGet()
}
trait FTSServiceEsSpec extends FlatSpec with Matchers /*with ElasticSearchTestNode with FTSServiceESTest */ { this: FTSMixin =>
System.setProperty("dataCenter.id" , "dc_test")
val timeout = FiniteDuration(10, SECONDS)
val m : Map[String, Set[FieldValue]] = Map("name" -> Set(FString("yehuda"), FString("moshe")))
val infotonToIndex = ObjectInfoton("/fts-test/objinfo1/a/b/c","dc_test", Some(System.currentTimeMillis()), m)
"indexing new infoton" should "store it in current index" in {
Await.result(ftsService.index(infotonToIndex, None, ftsService.defaultPartition), timeout)
refreshAll()
val result = getUUID(infotonToIndex.uuid)
// result: ftsServiceES.search(Some(PathFilter("/fts-test/objinfo1/a/b", false)),FieldFilter(Must, Contains, "name", "moshe") :: Nil, None, DefaultPaginationParams,false, "cmwell")
result.isExists should equal (true)
}
"indexing existing infoton" should "store it in current index" in {
val lastModified = new DateTime()
val m : Map[String, Set[FieldValue]] = Map("name" -> Set(FString("yehuda"), FString("moshe")), "family" -> Set(FString("smith")))
val updatedInfotonToIndex = ObjectInfoton("/fts-test/objinfo1/a/b/c", "dc_test", Some(System.currentTimeMillis()),lastModified, m)
Await.result(ftsService.index(updatedInfotonToIndex,Some(infotonToIndex)), timeout)
refreshAll()
val result = Await.result(ftsService.search(None, Some(FieldFilter(Must, Equals, "system.path","/fts-test/objinfo1/a/b/c")), None, DefaultPaginationParams), timeout)
result.infotons.size should equal (1)
result.infotons(0).lastModified should equal (lastModified)
}
it should "store its previous version in the history index" in {
val result = Await.result(ftsService.search(None, Some(FieldFilter(Must, Equals, "system.path","/fts-test/objinfo1/a/b/c")), None,DefaultPaginationParams, SortParam.empty, true), timeout)
result.infotons.size should equal (2)
val res = getUUID(infotonToIndex.uuid,false)
withClue(s"${res.getIndex}, ${res.getSource}, ${res.getType}, ${res.getVersion}, ${res.isSourceEmpty}") {
res.isExists should equal(true)
}
}
val bulkInfotons = Vector.empty ++ (for(i <- 1 to 500) yield ObjectInfoton("/fts-test/bulk/info" + i, "dc_test", Some(System.currentTimeMillis()), Map("name" + i -> Set[FieldValue](FString("value" + i), FString("another value" + i)))))
"bulk indexing infotons" should "store them in current index" in {
Await.result(ftsService.bulkIndex(bulkInfotons,Nil,ftsService.defaultPartition), timeout)
refreshAll()
val result = Await.result(ftsService.search(Some(PathFilter("/fts-test/bulk", true)),None,None,DefaultPaginationParams), timeout)
result.total should equal (500)
}
"bulk indexing existing infotons" should "store their previous version in history index and current version in current index" in {
val updatedBulkInfotons = Vector.empty ++ (for(i <- 1 to 500) yield ObjectInfoton("/fts-test/bulk/info" + i,"dc_test", Some(System.currentTimeMillis()), Map("name" + i -> Set[FieldValue](FString("moshe" + i), FString("shirat" + i)))))
Await.result(ftsService.bulkIndex(updatedBulkInfotons, bulkInfotons), timeout)
refreshAll()
Await.result(ftsService.search(Some(PathFilter("/fts-test/bulk", true)),None,None,DefaultPaginationParams), timeout).total should equal (500)
Await.result(ftsService.search(pathFilter = Some(PathFilter("/fts-test/bulk", true)),None,None,DefaultPaginationParams, withHistory = true), timeout).total should equal (1000)
}
"deleting infoton" should "remove it from current index" in {
val infotonToDelete = ObjectInfoton("/fts-test/infoToDel","dc_test", Some(System.currentTimeMillis()), Map("country" -> Set[FieldValue](FString("israel"), FString("spain"))))
Await.result(ftsService.index(infotonToDelete,None), timeout)
refreshAll()
val resultBeforeDelete = Await.result(ftsService.search(None, Some(FieldFilter(Must, Equals, "system.path","/fts-test/infoToDel")), None, DefaultPaginationParams), timeout)
resultBeforeDelete.total should equal (1)
val deletedInfoton = DeletedInfoton("/fts-test/infoToDel","dc_test",Some(System.currentTimeMillis()))
Await.result(ftsService.delete(deletedInfoton, infotonToDelete), timeout)
refreshAll()
val resultAfterDelete = Await.result(ftsService.search(None, Some(FieldFilter(Must, Equals, "system.path","/fts-test/infoToDel")), None, DefaultPaginationParams), timeout)
resultAfterDelete.total should equal (0)
}
it should "move it to history index and add tombstone" in {
val resultWithHistory = Await.result(ftsService.search(None, Some(FieldFilter(Must, Equals, "system.path","/fts-test/infoToDel")), None,DefaultPaginationParams, SortParam.empty, true), timeout)
resultWithHistory.total should equal (2)
resultWithHistory.infotons.exists(_.isInstanceOf[DeletedInfoton]) should equal (true)
}
"purging infoton" should "permanently delete infoton with given UUID from history index" in {
val infotonToPurge = ObjectInfoton("/fts-test/infoToPurge","dc_test",Some(System.currentTimeMillis()))
Await.result(ftsService.index(infotonToPurge, None), timeout)
val updatedInfotonToPurge = ObjectInfoton("/fts-test/infoToPurge","dc_test",Some(System.currentTimeMillis()))
Await.result(ftsService.index(updatedInfotonToPurge, Some(infotonToPurge)), timeout)
refreshAll()
val result = Await.result(ftsService.search(None, Some(FieldFilter(Must, Equals, "system.uuid",infotonToPurge.uuid)), None, DefaultPaginationParams, SortParam.empty, true), timeout)
result.length should equal(1)
Await.result(ftsService.purge(infotonToPurge.uuid), timeout)
refreshAll()
Await.result(ftsService.search(None, Some(FieldFilter(Must, Equals, "system.uuid",infotonToPurge.uuid)), None, DefaultPaginationParams, SortParam.empty, true), timeout).length should equal(0)
}
"purgeAll infoton" should "permanently delete all infoton's versions with given path from all indices" in {
val infotonToPurgeAll = ObjectInfoton("/fts-test/infoToPurgeAll","dc_test")
Await.result(ftsService.index(infotonToPurgeAll, None), timeout)
val updatedInfotonToPurgeAll = ObjectInfoton("/fts-test/infoToPurgeAll","dc_test")
Await.result(ftsService.index(updatedInfotonToPurgeAll, Some(infotonToPurgeAll)), timeout)
refreshAll()
Await.result(ftsService.search(None, Some(FieldFilter(Must, Equals, "system.path", infotonToPurgeAll.path)), None, DefaultPaginationParams, SortParam.empty, true), timeout).length should equal (2)
Await.result(ftsService.purgeAll(infotonToPurgeAll.path,true,ftsService.defaultPartition), timeout)
refreshAll()
val f = ftsService.search(None, Some(FieldFilter(Must, Equals, "system.path", infotonToPurgeAll.path)), None, DefaultPaginationParams, SortParam.empty, true)
f.onSuccess{
case FTSSearchResponse(total, offset, length, infotons, None) =>
logger.debug(s"before failing: total: $total, offset: $offset, length: $length and infotons:\\n${infotons.map(_.path).mkString("\\t","\\n\\t","\\n")} ")
}(scala.concurrent.ExecutionContext.Implicits.global)
Await.result(f, timeout).length should equal (0)
}
"listChildren" should "return a list of given infoton's current version children" in {
val infotonToList1 = ObjectInfoton("/fts-test/infotons/infotonToList1","dc_test",Some(System.currentTimeMillis()))
val infotonToList2 = ObjectInfoton("/fts-test/infotons/infotonToList2","dc_test", Some(System.currentTimeMillis()), Map("city" -> Set[FieldValue](FString("Or-Yehuda"), FString("Modiin"))))
val infotonToList3 = LinkInfoton(path= "/fts-test/infotons/infotonToList3" ,dc = "dc_test",linkTo = "/fts-test/infotons/infotonToList2", linkType = LinkType.Temporary).copy(indexTime = Some(System.currentTimeMillis()))
Await.result(ftsService.index(infotonToList1,None), timeout)
Await.result(ftsService.index(infotonToList2,None), timeout)
Await.result(ftsService.index(infotonToList3,None), timeout)
refreshAll()
Await.result(ftsService.listChildren("/fts-test/infotons",0,20,false,ftsService.defaultPartition), timeout).infotons.length should equal (3)
}
"search API" should "find infotons using path filter with descendants sorted by lastModified (desc)" in {
//prepare infotons to index
val objectInfotonToSearch = ObjectInfoton("/fts-test/search/objectInfotonToSearch","dc_test", Some(System.currentTimeMillis()), isoDateFormatter.parseDateTime("2013-01-01T10:00:00Z"),
Map("car" -> Set[FieldValue](FString("Mazda"), FString("Mitsubishi")), "food" -> Set[FieldValue](FString("Sushi"), FString("Falafel")), "copyright" -> Set[FieldValue](FString("Cm well team ©"))))
val objectInfotonToSearch2 = ObjectInfoton("/fts-test/search/objectInfotonToSearch2","dc_test", Some(System.currentTimeMillis()), isoDateFormatter.parseDateTime("2013-01-01T10:01:00Z"),
Map("os" -> Set[FieldValue](FString("osx")), "ver" -> Set[FieldValue](FString("9.2")), "copyright" -> Set[FieldValue](FString("Cm well team ©"))))
// This is for the withHistory flag test coming up in a few tests
val updatedObjectInfotonToSearch = ObjectInfoton("/fts-test/search/objectInfotonToSearch","dc_test", Some(System.currentTimeMillis()), isoDateFormatter.parseDateTime("2013-01-02T10:02:00Z"),
Map("car" -> Set[FieldValue](FString("Mazda"), FString("Mitsubishi")), "food" -> Set[FieldValue](FString("Sushi"), FString("Falafel"), FString("Malabi")), "copyright" -> Set[FieldValue](FString("Cm well team ©"))))
val fileInfotonToSearch = FileInfoton("/fts-test/search/objectInfotonToSearch/fileInfotonToSearch","dc_test", Some(System.currentTimeMillis()), isoDateFormatter.parseDateTime("2013-01-02T10:03:00Z"),
Map("copyright" -> Set[FieldValue](FString("Cm-well team ©")), "since" -> Set[FieldValue](FString("2009"))),FileContent("My test file content is great".getBytes, "text/plain"))
val linkInfotonToSearch = LinkInfoton("/fts-test/search/linkInfotonToSearch","dc_test", isoDateFormatter.parseDateTime("2013-01-05T10:04:00Z"), Map("since" -> Set[FieldValue](FString("2009"))), "/fts-test/search/objectInfotonToSearch/fileInfotonToSearch", LinkType.Temporary).copy(indexTime = Some(System.currentTimeMillis()))
// index them
Await.result(ftsService.index(objectInfotonToSearch,None), timeout)
Await.result(ftsService.index(objectInfotonToSearch2,None), timeout)
Await.result(ftsService.index(updatedObjectInfotonToSearch, Some(objectInfotonToSearch)), timeout)
Await.result(ftsService.index(fileInfotonToSearch,None), timeout)
Await.result(ftsService.index(linkInfotonToSearch,None), timeout)
refreshAll()
val response = Await.result(ftsService.search(Some(PathFilter("/fts-test/search", true)),None,None,DefaultPaginationParams), timeout)
response.infotons.length should equal (4)
response.infotons.head.path should equal ("/fts-test/search/linkInfotonToSearch")
response.infotons.last.path should equal ("/fts-test/search/objectInfotonToSearch2")
}
it should "find infotons using path filter with descendants sorted by infoton's type" in {
val response = Await.result(ftsService.search(pathFilter = Some(PathFilter("/fts-test/search", true)), fieldsFilter = None, datesFilter = None, paginationParams = DefaultPaginationParams, sortParams = FieldSortParams(List("type" -> Asc))), timeout)
response.infotons.head.path should equal ("/fts-test/search/objectInfotonToSearch/fileInfotonToSearch")
response.infotons.last.path should equal ("/fts-test/search/objectInfotonToSearch")
}
it should "find infotons using PathFilter without descendants" in {
Await.result(ftsService.search(Some(PathFilter("/fts-test/search", false)),None,None,DefaultPaginationParams), timeout).infotons.length should equal(3)
}
it should "find infotons using FieldFilter" in {
Await.result(ftsService.search(None,Some(FieldFilter(Must, Contains, "car", "mazda")),None,DefaultPaginationParams), timeout).infotons.length should equal (1)
Await.result(ftsService.search(Some(PathFilter("/fts-test/search", true)) ,Some(FieldFilter(MustNot, Contains, "copyright", "team")),None,DefaultPaginationParams), timeout).infotons.length should equal (1)
// in case of only one "should" supplied it acts as a "must"
Await.result(ftsService.search(Some(PathFilter("/fts-test/search", true)) ,Some(FieldFilter(Should, Contains, "copyright", "well")),None,DefaultPaginationParams), timeout).infotons.length should equal (3)
Await.result(ftsService.search(Some(PathFilter("/fts-test/search", true)), Some(MultiFieldFilter(Must, Seq(FieldFilter(Must, Contains, "copyright", "well"), FieldFilter(Should, Equals, "since", "2009")))),None,DefaultPaginationParams) , timeout).infotons.length should equal (3)
}
it should "find infotons using FieldFilters using 'Should Exist'" in {
Await.result(ftsService.search(None,Some(MultiFieldFilter(Must, Seq(SingleFieldFilter(Should,Contains, "car", None), SingleFieldFilter(Should,Contains, "ver", None)))),None,DefaultPaginationParams), timeout).infotons.length should equal (2)
}
it should "find infotons using DateFilter " in {
Await.result(ftsService.search(None,None, Some(DatesFilter(Some(isoDateFormatter.parseDateTime("2013-01-01T10:00:00Z")),
Some(isoDateFormatter.parseDateTime("2013-01-03T10:00:00Z")))),DefaultPaginationParams), timeout).infotons.length should equal (3)
}
it should "find infotons using DateFilter limited with pagination params" in {
Await.result(ftsService.search(None,None,datesFilter = Some(DatesFilter(Some(isoDateFormatter.parseDateTime("2013-01-01T10:00:00Z")),
Some(isoDateFormatter.parseDateTime("2013-01-06T10:00:00Z")))),DefaultPaginationParams), timeout).infotons.length should equal (4)
Await.result(ftsService.search(None,None,datesFilter = Some(DatesFilter(Some(isoDateFormatter.parseDateTime("2013-01-01T10:00:00Z")),
Some(isoDateFormatter.parseDateTime("2013-01-06T10:00:00Z")))), paginationParams = PaginationParams(0, 2)), timeout).infotons.length should equal (2)
Await.result(ftsService.search(None,None,datesFilter = Some(DatesFilter(Some(isoDateFormatter.parseDateTime("2013-01-01T10:00:00Z")),
Some(isoDateFormatter.parseDateTime("2013-01-06T10:00:00Z")))), paginationParams = PaginationParams(2, 1)), timeout).infotons.length should equal (1)
}
it should "include history versions when turning on the 'withHistory' flag" in {
Await.result(ftsService.search(None,fieldsFilter = Some(FieldFilter(Must, Contains, "car", "mazda")),None,DefaultPaginationParams, withHistory = true), timeout).infotons.length should equal (2)
}
it should "use exact value when sorting on string field" in {
val i1 = ObjectInfoton(path = "/fts-test2/sort-by/1", indexTime = None, dc = "dc_test",
fields = Map("car" -> Set[FieldValue](FString("Mitsubishi Outlander"))))
val i2 = ObjectInfoton(path = "/fts-test2/sort-by/2", indexTime = None, dc = "dc_test",
fields = Map("car" -> Set[FieldValue](FString("Subaru Impreza"))))
Await.result(ftsService.index(i1,None), timeout)
Await.result(ftsService.index(i2,None), timeout)
refreshAll()
val response = Await.result(ftsService.search(pathFilter = Some(PathFilter("/fts-test2/sort-by", true)),
fieldsFilter = Some(SingleFieldFilter(Must, Equals, "car", None)),None,DefaultPaginationParams,
sortParams = FieldSortParams(List(("car" -> Desc))), debugInfo = true), timeout)
withClue(response) {
response.infotons.head.path should equal("/fts-test2/sort-by/2")
response.infotons.last.path should equal("/fts-test2/sort-by/1")
}
}
"Scroll API" should "allow start scrolling and continue scrolling" in {
val l = System.currentTimeMillis()
val infotons = Seq.tabulate(500){ i =>
ObjectInfoton(
s"/fts-test/scroll/info$i",
"dc_test",
Some(l + i),
Map("name" + i -> Set[FieldValue](FString("value" + i), FString("another value" + i)))
)
}
// val infotons = Vector.empty ++ (for(i <- 1 to 500) yield (ObjectInfoton("/fts-test/scroll/info" + i,"dc_test", None, Map("name" + i -> Set[FieldValue](FString("value" + i), FString("another value" + i)))), None))
Await.result(ftsService.bulkIndex(infotons,Nil), timeout)
refreshAll()
val startScrollResult =Await.result(ftsService.startScroll(pathFilter=Some(PathFilter("/fts-test/scroll", false)),None,None, paginationParams = PaginationParams(0, 60)), timeout)
startScrollResult.total should equal (500)
var count = 0
var scrollResult =Await.result(ftsService.scroll(startScrollResult.scrollId), timeout)
while(scrollResult.infotons.nonEmpty) {
count += scrollResult.infotons.length
scrollResult =Await.result(ftsService.scroll(scrollResult.scrollId), timeout)
}
count should equal (500)
}
}
class FTSoldTests extends FTSServiceEsSpec with FTSServiceESTest with FTSMixin
//TODO: uncomment, and make fts tests to run on new FTS as well
//class FTSnewTests extends FTSServiceEsSpec with FTSServiceNewTest with FTSMixin
|
nruppin/CM-Well
|
server/cmwell-fts/src/test/scala/cmwell/fts/FTSServiceEsSpec.scala
|
Scala
|
apache-2.0
| 22,684
|
package org.jetbrains.sbt.editor.documentationProvider
import com.intellij.lang.documentation.DocumentationProvider
import com.intellij.openapi.editor.Editor
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.psi.{PsiElement, PsiFile}
import org.jetbrains.plugins.scala.editor.documentationProvider.DocumentationProviderTestBase
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.psi.api.base.literals.ScStringLiteral
import org.jetbrains.sbt.MockSbtBase
import org.jetbrains.sbt.language.SbtFileType
import org.junit.Assert
abstract class SbtScalacOptionsDocumentationProviderTestBase extends DocumentationProviderTestBase {
self: MockSbtBase =>
override protected def documentationProvider: DocumentationProvider = new SbtScalacOptionsDocumentationProvider
override protected def createFile(fileContent: String): PsiFile =
getFixture.configureByText(SbtFileType, fileContent)
override protected def generateDoc(editor: Editor, file: PsiFile): String = {
val (referredElement, elementAtCaret) = extractReferredAndOriginalElements(editor, file)
val customDocumentationElement = documentationProvider.getCustomDocumentationElement(editor, file, elementAtCaret, 0)
generateDoc(Option(customDocumentationElement).getOrElse(referredElement), elementAtCaret)
}
override protected def extractReferredAndOriginalElements(editor: Editor, file: PsiFile): (PsiElement, PsiElement) = {
val elementAtCaret = file.findElementAt(editor.getCaretModel.getOffset)
val leaf = PsiTreeUtil.getDeepestFirst(elementAtCaret)
val parents = leaf.parentsInFile.toArray
parents.collectFirst {
case str: ScStringLiteral => (str, leaf)
}.getOrElse {
Assert.fail("No appropriate original element found at caret position").asInstanceOf[Nothing]
}
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/test/org/jetbrains/sbt/editor/documentationProvider/SbtScalacOptionsDocumentationProviderTestBase.scala
|
Scala
|
apache-2.0
| 1,851
|
/******************************************************************
* See the NOTICE file distributed with this work for additional *
* information regarding Copyright ownership. The author/authors *
* license this file to you under the terms of the Apache License *
* Version 2.0 (the "License"); you may not use this file except *
* in compliance with the License. You may obtain a copy of the *
* License at: *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, *
* either express or implied. See the License for the specific *
* language governing permissions and limitations under the *
* License. *
******************************************************************/
package scalatime.impl
import java.time.temporal.ChronoUnit
import java.time.{Duration, LocalTime, Period}
import org.scalatest.{Matchers, Outcome, fixture}
class TemporalAmountOpsSuite extends fixture.FunSuite with Matchers {
override type FixtureParam = (TemporalAmountOps)
override protected def withFixture(test: OneArgTest): Outcome = {
val amt = Duration.ofMinutes(60)
withFixture(test.toNoArgTest(TemporalAmountOps(amt)))
}
test("`<<+` adds the boxed amount to the specified Temporal") {
_ <<+ LocalTime.of(14, 50) shouldBe LocalTime.of(15, 50)
}
test("`<<+` subtracts the boxed amount from the specified Temporal") {
_ <<- LocalTime.of(14, 50) shouldBe LocalTime.of(13, 50)
}
}
class DurationOpsSuite extends fixture.FunSuite with Matchers {
override type FixtureParam = DurationOps
override protected def withFixture(test: OneArgTest): Outcome = {
val dur = Duration.ofMinutes(60)
withFixture(test.toNoArgTest(DurationOps(dur)))
}
test("`*` obtains a copy of the boxed Duration multiplied by the scalar") {
_ * 2 shouldBe Duration.ofMinutes(120)
}
test("`+` obtains a copy of the boxed Duration with the specified Duration added") {
_ + Duration.ofMinutes(60) shouldBe Duration.ofMinutes(120)
}
test("`+` obtains a copy of the boxed Duration with a duration added in terms of the specified unit") {
_ +(60, ChronoUnit.MINUTES) shouldBe Duration.ofMinutes(120)
}
test("`-` obtains a copy of the boxed Duration with the specified Duration subtracted") {
_ - Duration.ofMinutes(60) shouldBe Duration.ofMinutes(0)
}
test("`-` obtains a copy of the boxed Duration with a duration subtracted in terms of the specified unit") {
_ -(60, ChronoUnit.MINUTES) shouldBe Duration.ofMinutes(0)
}
test("`/` obtains a copy of the boxed Duration divided by the divisor") {
_ / 2 shouldBe Duration.ofMinutes(30)
}
test("`unary_!` negates the boxed Duration") {
!_ shouldBe Duration.ofMinutes(-60)
}
test("`asConcurrentDuration` obtains the Duration as a `concurrent.duration.Duration`") {
_.asConcurrentDuration shouldBe concurrent.duration.Duration.fromNanos(3600000000000L)
}
}
class PeriodOpsSuite extends fixture.FunSuite with Matchers {
override type FixtureParam = PeriodOps
override protected def withFixture(test: OneArgTest): Outcome = {
val period = Period.ofDays(60)
withFixture(test.toNoArgTest(new FixtureParam(period)))
}
test("`*` obtains a copy of the boxed Period multiplied by the scalar") {
_ * 2 shouldBe Period.ofDays(120)
}
test("`+` obtains a copy of the boxed Period with the specified TemporalAmount added") {
_ + Period.ofDays(60) shouldBe Period.ofDays(120)
}
test("`+` obtains a copy of the boxed Period with the specified TemporalAmount subtracted") {
_ - Period.ofDays(60) shouldBe Period.ofDays(0)
}
test("`unary_!` negates the boxed Period") {
!_ shouldBe Period.ofDays(-60)
}
}
|
reactivecodes/scala-time
|
src/test/scala/scalatime/impl/TemporalAmountOpsSuite.scala
|
Scala
|
apache-2.0
| 4,139
|
package gsd.linux.tools
/*
* This file is part of the Linux Variability Modeling Tools (LVAT).
*
* Copyright (C) 2010 Steven She <shshe@gsd.uwaterloo.ca>
*
* LVAT is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* LVAT is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with LVAT. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
import gsd.linux._
import java.io.{InputStreamReader, PrintStream}
import org.clapper.argot.{ArgotConverters, ArgotUsageException}
import util.logging.ConsoleLogger
/**
* Outputs the boolean translation of a Kconfig extract.
*
* @author Steven She (shshe@gsd.uwaterloo.ca)
*/
object BooleanTranslationMain extends ArgotUtil with ConsoleLogger {
val name = "BooleanTranslationMain"
import ArgotConverters._
val inParam = parser.parameter[String](
"in-file", "input Kconfig extract (.exconfig) file, stdin if not specified.", true)
val outParam = parser.parameter[String](
"out-file", "output file to write boolean expressions, stdout if not specified.", true)
def main(args: Array[String]) {
try {
parser.parse(args)
val k =
(pOpt.value, inParam.value) match {
case (Some(_), Some(_)) =>
parser.usage("Either a project (-p) is specified or input & output parameters are used.")
case (Some(p), None) => p.exconfig
case (None, Some(f)) =>
log("Reading Kconfig extract from file...")
KConfigParser.parseKConfigFile(f)
case (None, None) =>
log("Using stdin as input...")
KConfigParser.parseKConfigStream(System.in)
}
val output =
(pOpt.value, outParam.value) match {
case (Some(p), None) => new PrintStream(p.boolFile.get)
case (None, Some(f)) => new PrintStream(f)
case _ => System.out
}
execute(k, output)
}
catch {
case e: ArgotUsageException => println(e.message)
}
}
def execute(k: ConcreteKConfig, out: PrintStream) {
//First output identifiers
for (id <- k.identifiers)
out.println("@ " + id)
val res = BooleanTranslation.mkBooleanTranslation(k.toAbstractKConfig)
for (id <- res.genVars) out.println("$ " + id)
for (e <- res.exprs) out.println(e)
}
}
|
scas-mdd/linux-variability-analysis-tools.fm-translation
|
src/main/scala/gsd/linux/tools/BooleanTranslationMain.scala
|
Scala
|
gpl-3.0
| 2,754
|
package sds.classfile.bytecode
class Iinc(_index: Int, _const: Int, pc: Int) extends OpcodeInfo("iinc", pc) {
def index: Int = _index
def const: Int = _const
override def toString(): String = s"${super.toString()}: $index, $const"
}
|
g1144146/sds_for_scala
|
src/main/scala/sds/classfile/bytecode/Iinc.scala
|
Scala
|
apache-2.0
| 245
|
package redstone.solver.levels
import org.scalatest._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import redstone._
import redstone.piece._
import redstone.solver.util._
import redstone.solver._
@RunWith(classOf[JUnitRunner])
class Two extends FunSuite {
// Which solver do we use?
val priorityQueueSolver = !true
val depthFirstSearchSolver = !true
val breadthFirstSearchSolver = true
// Results
//
// priorityQ: 121 moves, 425 iterations
// DFS : 495 moves, 3774 iterations
// BFS : 46 moves, 18786 iterations
// |b b|
// |bbbb|
// |bbbb|
// |^BB^|
// |UBBU|
test("level 2 - Epidote") {
var pieces: List[BoardPiece] = List()
// Row 0
var piece: BoardPiece = new UpRectangle(0, 0, 0)
pieces = pieces :+ piece
piece = new BigSquare(1, 1, 0)
pieces = pieces :+ piece
piece = new UpRectangle(2, 3, 0)
pieces = pieces :+ piece
// Row 2
piece = new SmallSquare(3, 0, 2)
pieces = pieces :+ piece
piece = new SmallSquare(4, 1, 2)
pieces = pieces :+ piece
piece = new SmallSquare(5, 2, 2)
pieces = pieces :+ piece
piece = new SmallSquare(6, 3, 2)
pieces = pieces :+ piece
// Row 3
piece = new SmallSquare(7, 0, 3)
pieces = pieces :+ piece
piece = new SmallSquare(8, 1, 3)
pieces = pieces :+ piece
piece = new SmallSquare(9, 2, 3)
pieces = pieces :+ piece
piece = new SmallSquare(10, 3, 3)
pieces = pieces :+ piece
// Row 4
piece = new SmallSquare(11, 0, 4)
pieces = pieces :+ piece
piece = new SmallSquare(12, 3, 4)
pieces = pieces :+ piece
val board = new Board(pieces)
val solutionBoard: Option[Board] = {
if(priorityQueueSolver) PriorityQueueSolver.solve(board)
else if(depthFirstSearchSolver) DepthFirstSearchSolver.solve(board)
else BreadthFirstSearchSolver.solve(board)
}
Utility.printSolution(board, solutionBoard)
assert(solutionBoard.isDefined && solutionBoard.get.isSolution)
}
}
|
skumargithub/redstone-solver
|
src/test/scala/redstone/solver/levels/Two.scala
|
Scala
|
gpl-2.0
| 2,111
|
package io.backchat.hookup
class ScalaUpstreamHandler extends {
}
|
backchatio/hookup
|
src/main/scala/io/backchat/hookup/netty_handlers.scala
|
Scala
|
mit
| 69
|
package temportalist.compression.main.client.model
import net.minecraft.client.renderer.block.model.ModelResourceLocation
import net.minecraft.client.resources.IResourceManager
import net.minecraft.util.ResourceLocation
import net.minecraftforge.client.model.{ICustomModelLoader, IModel}
import temportalist.compression.main.common.Compression
/**
*
* Created by TheTemportalist on 4/14/2016.
*
* @author TheTemportalist
*/
class ModelLoaderCompressed extends ICustomModelLoader {
override def accepts(modelLocation: ResourceLocation): Boolean = {
modelLocation.equals(ModelLoaderCompressed.fakeRL)
}
override def loadModel(modelLocation: ResourceLocation): IModel = {
new ModelCompressed
}
override def onResourceManagerReload(resourceManager: IResourceManager): Unit = {}
}
object ModelLoaderCompressed {
val fakeRL = new ModelResourceLocation(Compression.getModId, "models/fake")
}
|
TheTemportalist/Compression
|
src/main/scala/temportalist/compression/main/client/model/ModelLoaderCompressed.scala
|
Scala
|
apache-2.0
| 913
|
/*
* scala-swing (https://www.scala-lang.org)
*
* Copyright EPFL, Lightbend, Inc., contributors
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.swing
/**
* Horizontal and vertical alignments. We sacrifice a bit of type-safety
* for simplicity here.
*
* @see javax.swing.SwingConstants
*/
object Alignment extends Enumeration {
import javax.swing.SwingConstants._
val Left : Alignment.Value = Value(LEFT)
val Right : Alignment.Value = Value(RIGHT)
val Center : Alignment.Value = Value(CENTER)
val Top : Alignment.Value = Value(TOP)
val Bottom : Alignment.Value = Value(BOTTOM)
//1.6: val Baseline = Value(BASELINE)
val Leading : Alignment.Value = Value(LEADING)
val Trailing: Alignment.Value = Value(TRAILING)
}
|
scala/scala-swing
|
src/main/scala/scala/swing/Alignment.scala
|
Scala
|
apache-2.0
| 925
|
package com.twitter.querulous.unit
import java.sql.Connection
import org.specs.Specification
import org.specs.mock.JMocker
import com.twitter.querulous.query._
import com.twitter.finagle.tracing._
class TracingQuerySpec extends Specification with JMocker {
"TracingQuery" should {
"add records as query is executed" in {
val queryString = "select * from users"
val tracer = mock[Tracer]
val connection = mock[Connection]
Trace.pushId(TraceId(Some(SpanId(1)), None, SpanId(1), Some(true)))
expect {
one(connection).getClientInfo("ClientHostname")
one(connection).prepareStatement("select * from users /*~{\\"client_host\\" : \\"127.0.0.1\\", " +
"\\"service_name\\" : \\"service\\", \\"trace_id\\" : \\"0000000000000001\\"}*/")
exactly(5).of(tracer).record(a[Record])
}
val query = new SqlQuery(connection, queryString)
val tracingQuery = new TracingQuery(query, connection, QueryClass.Select,
"service", tracer, true)
tracingQuery.execute()
}
}
}
|
twitter/querulous
|
querulous-tracing/src/test/scala/com/twitter/querulous/unit/TracingQuerySpec.scala
|
Scala
|
apache-2.0
| 1,048
|
/*
* *
* * Copyright 2012 Martin Gontovnikas (martin at gonto dot com dot ar) - twitter: @mgonto
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
*
*/
package ar.com.gonto.factorypal.reflection
import ar.com.gonto.factorypal.objects.ObjectBuilder
import scala.reflect.runtime.universe._
import ar.com.gonto.factorypal.fields.FieldSetter
/**
* This is the object that does the magic.
*
* Given a list of FieldSetters and the Type of the object to create, this object first finds
* the minimum constructor to which we have all of the proeprties needed in our FieldSetters.
* After this, using this constructor, it instantiates the object. Then, using the remaining
* FieldSetters, this sets all of the fields value the user has asked.
* @author mgonto
*/
object ObjectReflector {
def create[T, Any](fieldSetters : List[FieldSetter[T, Any]])(implicit man : Manifest[T]) : T = {
val constructorList = typeOf[T].declaration(nme.CONSTRUCTOR).asTerm.alternatives.collect {
case m : MethodSymbol => m.paramss.map(_.map(x => x.asInstanceOf[TermSymbol]))
}.flatten
val minConstructor = constructorList.minBy(_.size)
val namesToUse = minConstructor.map(x => (x.name.toString))
val clazzToUse = clazz[T]
val clazzName = clazzToUse.getSimpleName
val params = namesToUse.map(name =>
fieldSetters.find(setter => setter.propName == name).getOrElse(
throw new IllegalStateException(s"The constructor for $clazzName needs a param with name $name and there's no property with that value")
))
val classesToUse = params.map(param => param.getValueClass)
val reflectedConstructor = clazzToUse.getConstructor(classesToUse: _*)
val instance = reflectedConstructor.newInstance(params.map(_.getValue.asInstanceOf[Object]) : _*).asInstanceOf[T]
val fieldsRemaining = fieldSetters.dropWhile(elem => params.contains(elem))
fieldsRemaining.foreach(_.setValue(instance))
instance
}
def clazz[T](implicit man : Manifest[T]) = man.runtimeClass
def classSymbol[T](implicit man : Manifest[T]) = Symbol(clazz[T].getName)
}
|
mgonto/factory_pal
|
framework-src/src/main/scala/ar/com/gonto/factorypal/reflection/ObjectReflector.scala
|
Scala
|
apache-2.0
| 2,658
|
package ee.cone.c4ui
import ee.cone.c4actor.{Context, TransientLens}
import ee.cone.c4vdom.Types.ViewRes
import ee.cone.c4vdom.{VDomLens, VDomState, VDomView}
trait View extends VDomView[Context] with Product
trait UntilPolicy {
def wrap(view: Context=>ViewRes): Context=>ViewRes
}
case object VDomStateKey extends TransientLens[Option[VDomState]](None)
with VDomLens[Context, Option[VDomState]]
trait ViewRestPeriodProvider {
def get(local: Context): ViewRestPeriod
}
sealed trait ViewRestPeriod extends Product {
def valueMillis: Long
}
case class DynamicViewRestPeriod(valueMillis: Long) extends ViewRestPeriod
case class StaticViewRestPeriod(valueMillis: Long) extends ViewRestPeriod
case object ViewRestPeriodKey extends TransientLens[Option[ViewRestPeriod]](None)
|
conecenter/c4proto
|
base_lib/src/main/scala/ee/cone/c4ui/UIApi.scala
|
Scala
|
apache-2.0
| 787
|
package plainFP
import org.scalatest._
import org.scalatest.Matchers._
class RequestResponseSpec extends FunSpec with Matchers {
import RequestResponse._
import Method._
val badResponse = Response("400", "", "text/plain", "")
val goodResponse = Response("200", "", "text/plain", "Hello John!")
describe("hello") {
it("should give good response") {
val request = Request(GET, "/hello", "", "", "", "", "john")
val response = hello(request)
response shouldEqual goodResponse
}
it("should give bad response if method is wrong") {
val request = Request(POST, "/hello", "???", "", "", "", "john")
val response = hello(request)
response shouldEqual badResponse
}
it("should give bad response if path is wrong") {
val request = Request(GET, "/bye", "???", "", "", "", "john")
val response = hello(request)
response shouldEqual badResponse
}
}
describe("isMethod") {
it("should give true if method is equal") {
val request = Request(GET, "", "", "", "", "", "")
val response = isGetMethod(request)
response shouldEqual true
}
it("should give false if method is wrong") {
val request = Request(POST, "", "", "", "", "", "")
val response = isGetMethod(request)
response shouldEqual false
}
}
describe("isPath") {
it("should give true if path is equal") {
val request = Request(GET, "/hello", "", "", "", "", "")
val response = isHelloPath(request)
response shouldEqual true
}
it("should give false if path is wrong") {
val request = Request(GET, "/bye", "", "", "", "", "")
val response = isHelloPath(request)
response shouldEqual false
}
}
describe("createHelloContent") {
it("should give 'Hello John!'") {
val request = Request(GET, "", "", "", "", "", "john")
val response = createHelloContent(request)
response shouldEqual "Hello John!"
}
it("should give 'Hello !' if name is empty") {
val request = Request(GET, "", "", "", "", "", "")
val response = createHelloContent(request)
response shouldEqual "Hello !"
}
}
}
|
enpassant/miniatures
|
src/test/scala/plainFP/RequestResponseSpec.scala
|
Scala
|
apache-2.0
| 2,176
|
package com.ubeeko.exceptions
/**
* Created with IntelliJ IDEA.
* User: elb
* Date: 18/02/13
* Time: 09:03
*/
trait BusinessRuleException extends Exception
|
eric-leblouch/htalk
|
src/main/scala/com/ubeeko/exceptions/BusinessRuleException.scala
|
Scala
|
apache-2.0
| 161
|
import collection.mutable.UnrolledBuffer
object Test {
def main(args: Array[String]): Unit = {
val buf = UnrolledBuffer(1 to 50: _*)
val dub = buf ++ buf
println(dub)
}
}
|
folone/dotty
|
tests/run/t5867.scala
|
Scala
|
bsd-3-clause
| 193
|
package com.github.vonnagy.service.container.http.routing
import akka.ConfigurationException
import akka.actor._
import akka.http.scaladsl.server.{Route, RouteConcatenation}
import scala.concurrent.ExecutionContext
/**
* Add a set of defined routes
*
* @param route
*/
case class AddRoute(route: RoutedEndpoints)
/**
* This message is sent back to the sender when the route has been officially added
*/
case class RouteAdded()
/**
* Get a sequence of defined routes
*/
case class GetRoutes()
/**
* This is the return from the message ``GetRoutes``
*
* @param routes the currently defined routes
*/
case class Routes(routes: Seq[RoutedEndpoints])
/**
* Allows you to construct Spray ``HttpService`` from a concatenation of routes; and wires in the error handler.
* It also logs all internal server errors using ``ActorLoggingAdapter``.
*/
trait RoutedService extends RoutingHandler with RouteConcatenation {
this: Actor =>
def conf = context.system.settings.config
private[routing] var routes = Seq[RoutedEndpoints]()
// The base handler
val routeReceive: Receive = {
case AddRoute(route) => addRoute(route); sender ! RouteAdded
case GetRoutes => sender ! Routes(routes)
}
/**
* Load the designated routes and store them for later
* @param routeEndpoints
*/
def loadAndBuildRoute(routeEndpoints: Seq[Class[_ <: RoutedEndpoints]]): Route = {
routes = loadRoutes(routeEndpoints)
buildRoute(routes)
}
/**
* Build the routes from sequence of ``RoutedEndpoints``
*
* @param services the the service that will be used to build the routes
* @return an instance of ``Route``
*/
private[routing] def buildRoute(services: Iterable[RoutedEndpoints]): Route = {
services.map(_.route).reduceLeft(_ ~ _)
}
/**
* Add the route and reset the message handler
*
* @param route the route to add
*/
private[routing] def addRoute(route: RoutedEndpoints): Unit = {
routes = routes ++ Seq(route)
}
/**
* Load the defined routes
*/
private def loadRoutes(routeEndpoints: Seq[Class[_ <: RoutedEndpoints]]): Seq[RoutedEndpoints] = {
log.info("Setting up all of the routes")
val newRoutes =
for {
route <- routeEndpoints
} yield {
val args = List(classOf[ActorSystem] -> context.system, classOf[ExecutionContext] -> context.dispatcher)
context.system.asInstanceOf[ExtendedActorSystem].dynamicAccess
.createInstanceFor[RoutedEndpoints](route.getName, args).map({
case route =>
route
}).recover({
case e => throw new ConfigurationException(
"RoutedEndpoints can't be loaded [" + route.getName +
"] due to [" + e.toString + "]", e)
}).get
}
newRoutes.toSeq
}
}
|
vonnagy/service-container
|
service-container/src/main/scala/com/github/vonnagy/service/container/http/routing/RoutedService.scala
|
Scala
|
apache-2.0
| 2,821
|
package com.twitter.algebird
import org.specs._
import org.scalacheck.Arbitrary
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Properties
import org.scalacheck.Prop.forAll
import org.scalacheck.Gen.{choose, oneOf, value}
import java.lang.AssertionError
import java.util.Arrays
object SGDLaws extends Properties("SGD") {
import BaseProperties._
implicit val sgdMonoid = new SGDMonoid(SGD.constantStep(0.001), SGD.linearGradient)
val zeroStepMonoid = new SGDMonoid(SGD.constantStep(0.0), SGD.linearGradient)
val (m, b) = (2.0, 4.0)
val eps = 1e-3
val sgdPosGen = for(
x <- choose(0.0, 1.0);
n <- choose(0.0, 0.001)
) yield SGDPos((m * x + b + n, IndexedSeq(x)))
val sgdWGen = for(
cnt <- choose(0L, 100000L);
m <- choose(-10.0, 10.0);
b <- choose(-10.0, 10.0)
) yield SGDWeights(cnt, Vector(m,b))
val zeroGen = value(SGDZero)
implicit val sgdPos = Arbitrary(sgdPosGen)
implicit val sgdWArb = Arbitrary(sgdWGen)
implicit val sgdArb : Arbitrary[SGD[(Double, IndexedSeq[Double])]] = Arbitrary {
oneOf(sgdWGen, sgdPosGen, zeroGen)
}
property("is a Monoid") = monoidLaws[SGD[(Double,IndexedSeq[Double])]]
property("Gradient is zero on the line") = forAll { (w : SGDWeights, x : Double) =>
val m = w.weights(0)
val b = w.weights(1)
val y = m*x + b
y.isInfinity || {
val pos = (y, IndexedSeq(x))
val grad = SGD.linearGradient(w.weights, pos)
(scala.math.abs(grad(0)) < eps) && (scala.math.abs(grad(1)) < eps)
}
}
property("Gradient at x=0 has zero first component") = forAll { (w : SGDWeights, y : Double) =>
SGD.linearGradient(w.weights,(y, IndexedSeq(0.0)))(0) == 0.0
}
property("Zero-step leaves Weights unchanged") = forAll {
(w : SGDWeights, pos : SGDPos[(Double,IndexedSeq[Double])]) =>
val next = zeroStepMonoid.newWeights(w, pos.pos.head)
next.weights == w.weights && next.count == (w.count + 1L)
}
def minus(x : IndexedSeq[Double], y : IndexedSeq[Double]) : IndexedSeq[Double] = {
x.zip(y).map { case (x : Double, y : Double) => x-y }
}
val oneStepMonoid = new SGDMonoid(SGD.constantStep(1.0), SGD.linearGradient)
property("unit step can be undone by adding gradient") = forAll {
(w : SGDWeights, pos : SGDPos[(Double,IndexedSeq[Double])]) =>
val next = oneStepMonoid.newWeights(w, pos.pos.head)
next.weights == minus(w.weights, SGD.linearGradient(w.weights, pos.pos.head))
}
}
|
snoble/algebird
|
algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala
|
Scala
|
apache-2.0
| 2,475
|
package picasso.analysis
import picasso.utils._
import picasso.model.dbp._
import picasso.graph._
import picasso.math._
import TransitionsGraphFromCover._
object InterfaceExtraction {
type ObjType = String
type Field = String
type UnaryPredicates = Map[String, Boolean]
type BinaryPredicates = Map[String, Map[Field,Boolean]]
//obj: class name, neighborhood (to what it is connected), unary predicates, binary predicates
type Obj = (ObjType, Map[Field, Iterable[ObjType]], UnaryPredicates, BinaryPredicates)
//types for the transitions and the language
//modifiers for tracking the eq classes
sealed abstract class Multiplicity
case object One extends Multiplicity
case object May extends Multiplicity
case object Part extends Multiplicity
case object Rest extends Multiplicity //Rest means All but ...
type Changes[A] = Map[A, Iterable[(Multiplicity, A)]]
//use DepthBoundedConf to preserve the depth
type IGT = DBCT{type State = Obj; type EL = String}
type G = DepthBoundedConf[IGT]
/** What happens each time a method is called.
* Src: the input
* Role: maps some nodes to their role in the fct call (callee, args, ...)
* String: the method name
* Become: what an object becomes / might become
* Iterable[Obj]: the newly created objects [one in the list]
* Dest: the output
*/
type MethodCall = (G, Map[G#V, String], String, Changes[G#V], Iterable[G#V], G)
type IGT2 = GT.ELGT{type V = G; type EL = MethodCall}
type Interface = EdgeLabeledDiGraph[IGT2]
def multiplicityToString(m: Multiplicity) = m match {
case One => "one"
case May => "may"
case Part => "some"
case Rest => "rest"
}
def objToString(obj: Obj): String = {
val (tpe, ptsTo, unary, binary) = obj
val flatBinary = for ( (p, fb) <- binary; (f,b) <- fb) yield (p,f,b)
tpe +
ptsTo.view.map{case (f, t) => f + " -> " + t}.mkString("(",", ",")") +
unary.mkString("[",",","]") +
flatBinary.mkString("{",",","}")
}
def simplify[A](b: Changes[A]): Changes[A] = {
def simplify1(collec: Iterable[(Multiplicity, A)]): Iterable[(Multiplicity, A)] = {
val (many, single) = collec.partition{ case (m, _) => m == Rest || m == Part }
val byObj = many.groupBy(_._2)
val many2 = byObj.map{ case (obj, lst) =>
if (lst.exists(_._1 == Rest)) (Rest, obj) else (Part, obj)
}
single ++ many2
}
b.map{ case (a,b) => (a, simplify1(b))}
}
def loopAcceleration[A](becomes: Changes[A], eq: (A, A) => Boolean = ( (x:A,y:A) => x == y)): Changes[A] = {
//println(becomes.mkString("\\n"))
for ( (src, dests) <- becomes ) yield {
if (dests.size == 1) {
Logger.assert(dests exists { case (m, v) => eq(v, src) && (m == Rest || m == One) }, "InterfaceExtraction", "frame not in dests: " + dests)
(src, dests)
} else if (dests.size == 2) {
Logger.assert(dests exists { case (m, v) => eq(v, src) && m == Rest }, "InterfaceExtraction", "frame not in dests: " + dests)
Logger.assert(dests exists { case (m, v) => m == One && !eq(v, src) }, "InterfaceExtraction", "dest not in dests: " + dests)
(src, dests.map{ case (m, v) => if (eq(v, src)) (m,v) else (Part, v) })
} else {
Logger.logAndThrow("InterfaceExtraction", LogError, "expected loop with single dest + frame: " + dests)
}
}
}
def composeMultiplicities(m1: Multiplicity, m2: Multiplicity) = m1 match {
case Rest => m2
case Part => m2 match {
case Rest => Part
case _ => m2
}
case One => m2 match {
case Rest | One => One
case Part | May => May
}
case May => May
}
def compose[A](a: Changes[A], b: Changes[A]): Changes[A] = {
//when not disjoint: One/May -> May, Rest/Part -> Part
val onlyB = b -- a.keys
val a2 = a.map{ case (k, vs) =>
(k, vs.flatMap{ case (m, v) =>
b.getOrElse(v, Seq(Rest -> v)).map{ case (m2, v2) => (composeMultiplicities(m,m2), v2) }
})
}
simplify(a2 ++ onlyB)
}
/** methodName(thisType {, argType}* )[: newObj] [; comment]
* returns (thisType, methodName, argsTypes, [newObj])
*/
def parseTransitionName(str: String): Option[(ObjType, String, Seq[ObjType], Option[ObjType])] = {
val woComment =
if (str.indexOf(";") == -1) str
else str.substring(0, str.indexOf(";"))
try {
val lparen = woComment.indexOf("(")
val rparen = woComment.indexOf(")")
val methodName = woComment.substring(0, lparen)
val tpes = woComment.substring(lparen + 1, rparen).split(",").map(_.trim)
val rest = woComment.substring(rparen + 1)
val created =
if (rest contains ":") Some(rest.substring(rest.indexOf(":") + 1).trim)
else None
Some((tpes.head, methodName, tpes.tail, created))
} catch {
case e: java.lang.StringIndexOutOfBoundsException =>
None
}
}
import picasso.utils.report._
//pretty print the Interface
def report(interface: Interface): Item = {
import scala.text.Document
import scala.text.Document._
//collect the objects
val idsForCover = interface.vertices.iterator.zipWithIndex.map{ case (o, idx) => (o, "cover_"+idx) }.toMap
val idsForTransitions = interface.edges.iterator.zipWithIndex.map{ case (t, idx) => (t._2, "t_"+idx) }.toMap
def gToGv(g: G, id: String, kind: String = "digraph", misc: Document = empty, roles: Map[G#V, String] = Map.empty): (Document, Map[G#V, String]) = {
def idsForObj(obj: G#V) = {
val (tpe, ptsTo, unary, binary) = obj.state
val flatBinary = for ( (p, fb) <- binary; (f,b) <- fb) yield (p,f,b)
val name =
if (roles.contains(obj)) roles(obj) + ": " + tpe
else tpe
Misc.quote(
"{ " + name + " | " +
unary.map{ case (n,b) => if (b) n else "not " + n }.mkString(" | ") +
flatBinary.mkString(" | ") +
"}"
)
}
g.toGraphvizWithNesting(
id, kind, misc, id,
(node => List("label" -> idsForObj(node), "shape" -> "record")),
(edge => Nil)
)
}
def mkTitle(method: String, roles: Map[G#V, String]): String = {
val objByRole = roles.map[(String, G#V), Map[String, G#V]]{ case (a, b) => (b, a) }
val calleeName = objByRole.get("callee") match {
case Some(cl) => objToString(cl.state)
case None => "???"
}
val args = for (i <- 0 until roles.size -1) yield {
objByRole.get("arg" + i) match {
case Some(cl) => objToString(cl.state)
case None => "???"
}
}
calleeName + "." + method + args.mkString("(", ", ", ")")
}
def multiplicityToEdgeProp(m: Multiplicity) = m match {
case One => "color=\\"#00FF00\\""
case May => "color=\\"#00FF00\\",style=\\"dashed\\""
case Part => "color=\\"#0000FF\\""
case Rest => "color=\\"#0000FF\\""
}
def completeRoleWithScope(g: G, roles: Map[G#V, String]): Map[G#V, String] = {
val scope = g.vertices.filter(_.depth == 0) -- roles.keys
(roles /: scope.zipWithIndex)( (acc, s) => acc + (s._1 -> ("s" + s._2)))
}
def keepRelevantEdges(changes: Changes[G#V]): Changes[G#V] = {
changes.filter{ case (k, v) => v.size != 1 || k.depth > 0 }
}
//a few graph to print:
//-the structure of the automaton: give unique name to node and transition
val outline = interface.toGraphvizExplicit(
"outline",
"digraph",
scala.text.Document.empty,
"outline",
(node => List("label" -> idsForCover(node))),
(edge => List("label" -> idsForTransitions(edge)))
)._1
//-the content of the nodes
val nodesGraphs = idsForCover.map{ case (g, id) => (id, gToGv(g, id)._1) }
//-the transitions
val trsGraphs = interface.edges.map{ case (coverFrom , el @ (from, roles, method, changes, news, to), coverTo) =>
val id = idsForTransitions(el)
val title = mkTitle(method, roles)
val rolesWithScope = completeRoleWithScope(from, roles)
val (fromGv, fromNodesToId) = gToGv(from, "cluster_" + id + "_src", "subgraph", text("label = \\"LHS("+idsForCover(coverFrom)+")\\";"), rolesWithScope)
val newsRole = news.zipWithIndex.map{ case (n,i) => (n, "new"+i) }
val rolesAfter = rolesWithScope.flatMap[(G#V, String),Map[G#V, String]]{ case (n, r) => changes.get(n).map(_.head._2 -> r) } ++ newsRole //TODO some assertion
val (toGv, toNodesToId) = gToGv(to, "cluster_" + id + "_to", "subgraph", text("label = \\"RHS("+idsForCover(coverTo)+")\\";"), rolesAfter)
val changesEdges = keepRelevantEdges(changes).iterator.flatMap{ case (a, bs) => bs.map{ case (m, b) =>
text( fromNodesToId(a) + " -> " + toNodesToId(b) + " ["+ multiplicityToEdgeProp(m) +"];")
}
}
val graphs = fromGv :/: toGv
val body = (graphs /: changesEdges)(_ :/: _)
val gv = "digraph " :: id :: " {" :/: nest(4, body) :/: text("}")
Logger("InterfaceExtraction", LogDebug, title + ": " + Misc.docToString(gv))
(id, title, gv)
}
//pack everything into a report item.
val top = new List("Interface")
val outlineStr = Misc.docToString(outline)
top.add(new GenericItem("Outline", outlineStr, Misc.graphvizToSvgDot(outlineStr)))
val cover = new List("Cover")
for ( (id, graph) <- nodesGraphs ) {
val gv = Misc.docToString(graph)
cover.add(new GenericItem(id, gv, Misc.graphvizToSvgDot(gv)))
}
top.add(cover)
val trs = new List("Transitions")
for ( (id, title, graph) <- trsGraphs ) {
val gv = Misc.docToString(graph)
trs.add(new GenericItem(id + ": " + title, gv, Misc.graphvizToSvgDot(gv)))
}
top.add(trs)
top
}
}
class InterfaceExtraction[P <: DBCT](proc: DepthBoundedProcess[P], cover: DownwardClosedSet[DepthBoundedConf[P]]) {
import InterfaceExtraction._
type DP = DepthBoundedConf[P]
type DPV = (P#V, DepthBoundedConf[P])
/* TODO language extraction from the transition graph (should go innto its own file)
* assume transition name / comment are of the from "methodName(thisType)[: newObj] [, comment]"
* methods that do not have this shape are transient methods (from wich the result should be integrated in the first correctly named predecessor).
*
* 1st step: identifies the equivalence classes (object node with the predicates)
* 2nd step: go along the edges (and morphing) while tracking the equivalence classes of this and the other objects
* 3rd step: structure the output ...
*/
/** Checks whether the system respects the assumption needed for the interface extraction. */
def conforms: Boolean = {
//TODO
//method names
//predicate / object names in the graph
//type of transition
//...
sys.error("TODO")
}
val tg: EdgeLabeledDiGraph[TG[P]] = TransitionsGraphFromCover(proc, cover)
protected def typeOf(node: P#V) = node.state.toString
protected def isObj(node: P#V) = typeOf(node).head.isUpper //TODO this is an HACK!
protected def isPred(node: P#V) = !isObj(node)
protected def predValue(p: P#V): (String, Boolean) = {
val nme = typeOf(p)
if (nme startsWith "not_") (nme.substring(4), false)
else (nme, true)
}
protected def isError(conf: DP) = {
conf.vertices.exists(v => typeOf(v).endsWith("Error")) &&
conf.vertices.forall(v => typeOf(v) != "safe")
}
protected def isTransient(conf: DP) = {
conf.vertices.exists(v => typeOf(v).startsWith("transient")) &&
conf.vertices.forall(v => typeOf(v) != "safe")
}
protected def eqClassToObj(cl: DPV): Obj = {
//TODO the code for this method is really bad.
//it can be made much faster, but since it is not a performance bottleneck ...
Logger("InterfaceExtraction", LogDebug, "eqClassToObj: " + cl)
val (node, graph) = cl
val successors = graph(node)
assert( (successors + node) == graph.vertices, "unrelated nodes in a DPV of " + node + "\\n" + graph)
val (objs, preds) = successors.partition(isObj)
val objsWithField = objs.groupBy(o => {
graph.outEdges(node).find{ case (k, v) => v contains o }.get._1
}).map{ case (k, v) => (k.toString, v map typeOf) }
val (unaryPreds, binaryPreds) = preds.partition( p => {
objs.forall(o => !graph(o).contains(p) ) //unary is pointed only by the node itself
})
val unaryValues = unaryPreds.map(predValue).toMap
val binaryValues = binaryPreds.map( p => {
val pointedBy = objs.filter(o => graph(o).contains(p) )
assert(pointedBy.size == 1, pointedBy.mkString(", "))
val other = pointedBy.head
val field = graph.outEdges(node).find{ case (k, v) => v contains other }.get._1
val (pName, v) = predValue(p)
(pName, (typeOf(other), v))
}).groupBy(_._1).map{ case (k, v) => (k, v.map(_._2).toMap ) }
(typeOf(node), objsWithField, unaryValues, binaryValues)
}
protected def extractDPV(graph: DP, node: P#V): DPV = {
Logger.assert(
graph contains node,
"InterfaceExtraction",
"extractDPV: " + node + " is not in " + graph
)
val neighbors = graph(node).filter(isObj)
val allPreds = graph(node).filter(isPred)
val revG = graph.reverse
val preds = allPreds.filter(p => {
//val out = revG(p)
//out.size == 1 || (out.size > 1 && neighbors.exists(out))
true
})
//for the binary preds, keep only the pred if the other guy is a neighbor
val keep = neighbors ++ preds + node
val restricted = graph filterNodes keep
//flatten to keep a single object.
val height = node.depth
if (height > 0) {
val withLower = restricted.vertices.map(v => (v, v.setDepth(math.max(0, v.depth - height))) )
val morphing = withLower.toMap[P#V,P#V]
(morphing(node), restricted morph morphing)
} else {
(node, restricted)
}
}
protected def inDPV(d1: DPV, d2: DPV): Boolean = {
//check whether there is a morphism between d1 and d2 (compatible with the main obj)
d1._2.morphisms(d2._2, Map(d1._1 -> d2._1))(proc.stateOrdering).hasNext
}
protected def sameDPV(d1: DPV, d2: DPV): Boolean = {
inDPV(d1, d2) && inDPV(d2, d1)
}
protected lazy val eqClassesInGraph: Set[DPV] = {
//get every object in every "non transient location" and trim the graph so that only the eq class is left.
//a non transient location is a location of the cover (not all the location in tg).
val objs = cover.flatMap( graph => {
val objsNode = graph.vertices.filter(isObj)
objsNode.map( n => extractDPV(graph, n) )
})
//remove the duplicates
//first group by the node label so that we compare only objs of the same type
val objByType = objs.groupBy( o => typeOf(o._1) )
objByType.values.flatMap( sameType => {
(Set[DPV]() /: sameType)( (acc, obj) => {
if (acc.exists(sameDPV(obj, _)) ) acc else acc + obj
})
}).toSet
}
protected lazy val eqClassesMap: Map[DPV, Obj] = {
eqClassesInGraph.iterator.map(conf => (conf, eqClassToObj(conf))).toMap
}
lazy val eqClasses: Set[Obj] = eqClassesMap.values.toSet
protected def findClassOf(conf: DP, obj: P#V): DPV = {
val extracted = extractDPV(conf, obj)
val candidate = eqClassesInGraph.find( dpv => inDPV(extracted, dpv) )
Logger.assert(
candidate.isDefined,
"InterfaceExtraction",
"findClassOf: no candidate found for " + obj + "\\n" + conf + extracted._1 + "\\n" + extracted._2
+ "in\\n" + eqClassesMap.keys.mkString("\\n")
)
candidate.get
}
protected def simpleTracking(curr: (Changes[P#V], List[P#V]), mapping: Map[P#V,P#V]) = {
val (goesTo, news) = curr
val goesToPrime = goesTo.map[(P#V, Iterable[(Multiplicity, P#V)]), Map[P#V, Iterable[(Multiplicity, P#V)]]]{ case (k,v) =>
(k, v map {case (m, eq) => (m, mapping(eq))} )
}
val newsPrime = news map mapping
(goesToPrime, newsPrime)
}
protected def backwardTracking(curr: (Changes[P#V], List[P#V]), mapping: Map[P#V, Seq[P#V]]) = {
val (becomes, news) = curr
val becomesPrime = becomes.map[(P#V, Iterable[(Multiplicity, P#V)]), Map[P#V, Iterable[(Multiplicity, P#V)]]]{ case (k,vs) =>
(k, vs flatMap { case (m, v) =>
val target = mapping(v)
if (target.isEmpty) {
Logger.logAndThrow("InterfaceExtraction", LogError, v.label + " disappears during the unfolding.")
} else if (target.size == 1) {
Seq(m -> target.head)
} else {
val highest = target.maxBy(_.depth)
val (concrete, between) = target.view.filterNot(_ ==highest).partition(_.depth == 0)
Logger.assert(between.forall(_.depth < highest.depth), "InterfaceExtraction", "multiple nodes with highest depth.")
val highestM = m match {
case Rest => Rest
case Part => Part
case _ => Logger.logAndThrow("InterfaceExtraction", LogError, "unfolding of concrete node !?")
}
val betweenM: Seq[(Multiplicity, P#V)] = between.map( Part -> _ )
val concreteM: Seq[(Multiplicity, P#V)] = concrete.map ( One -> _ )
val res: Seq[(Multiplicity, P#V)] = Seq(highestM -> highest) ++ betweenM ++ concreteM
res
}
})
}
val newsPrime = news flatMap mapping
(becomesPrime, newsPrime)
}
/* Adapting the tracking to get the unfolded version
first -> unfolded in the base of the tracking
last -> stops before the folding
*/
protected def trackAux(curr: (Changes[P#V], List[P#V]), edge: TGEdges[P], first: Boolean = false, last: Boolean = false): (Changes[P#V], List[P#V]) = edge match {
case Transition(witness) =>
//println("following transition: " + witness.transition.id)
/// just in case ///
witness.checkMorphisms
witness.complete
witness.checkMorphisms
////////////////////
//unfolding (this one is reversed)
val curr2 =
if (witness.isUnfoldingTrivial) curr
else if (first) initialTracking(witness.unfolded)
else backwardTracking( curr, witness.reversedUnfolding)
//inhibiting
val curr3 =
if (witness.isInhibitingTrivial) curr2
else {
Logger.logAndThrow("InterfaceExtraction", LogError, "TODO tracking of inhibitors")
}
//post
val newNew = (witness.unfoldedAfterPost.vertices -- witness.post.values).filter(isObj)
val (changed, newTracked) =
if (witness.isPostTrivial) curr3
else simpleTracking(curr3, witness.post)
val curr4 = (changed, newTracked ++ newNew)
//folding
val curr5 =
if (witness.isFoldingTrivial || last) curr4
else simpleTracking(curr4, witness.folding)
curr5
case Covering(mapping) =>
Logger.assert(!(first || last), "InterfaceExtraction", "track: Covering edge for first or last")
//println("following covering")
simpleTracking(curr, mapping)
}
protected def track(curr: (Changes[P#V], List[P#V]), edge: TGEdges[P]) =
trackAux(curr, edge, false, false)
protected def trackFirst(curr: (Changes[P#V], List[P#V]), edge: TGEdges[P]) =
trackAux(curr, edge, true, false)
protected def trackLast(curr: (Changes[P#V], List[P#V]), edge: TGEdges[P]) =
trackAux(curr, edge, false, true)
protected def trackFirstLast(curr: (Changes[P#V], List[P#V]), edge: TGEdges[P]) =
trackAux(curr, edge, true, true)
/** identify the objects in a DP and make a map to self */
protected def initialTracking(from: DP) = {
val objsNode: Set[P#V] = from.vertices.filter(isObj)
val iter: Iterator[(P#V, Seq[(Multiplicity, P#V)])] = objsNode.iterator.map(n => (n, Seq( (if (n.depth == 0) One else Rest) -> n)))
val objsMap: Changes[P#V] = Map[P#V, Seq[(Multiplicity, P#V)]]() ++ iter
(objsMap, List[P#V]())
}
protected def withType(nodes: Iterable[P#V], tpe: ObjType, role: String): Option[P#V] = {
val candidates = nodes.filter(n => typeOf(n) == tpe)
if (candidates.isEmpty) {
Logger("InterfaceExtraction", LogWarning, "pathToMethodCall: no candidates for " + role)
None
} else {
if (candidates.size > 1) {
Logger( "InterfaceExtraction",
LogWarning,
"pathToMethodCall: more than one candidate for " + role + ": " + candidates.mkString(", ")
)
}
Some(candidates.head)
}
}
protected def nodeWithID(witness: TransitionWitness[P], conf: DP, id: String): Option[P#V] = {
witness.lhsIDs.find{ case (a,b) => b == id } match {
case Some((n,_)) =>
Logger.assert(conf contains n, "InterfaceExtraction", id + " in lhsIDs but not conf.")
Some(n)
case None =>
Logger("InterfaceExtraction", LogWarning, "pathToMethodCall: no candidates for " + id)
None
}
}
// parse the comment from the transition to get the method name and the type/id of the callee
protected def parseTransition(edge: TGEdges[P]): (Map[P#V, String], String, Option[ObjType]) = edge match {
case Transition(witness) =>
Logger("InterfaceExtraction", LogInfo, "making edge for: " + witness.transition.id)
parseTransitionName(witness.transition.id) match {
case Some((tpe, call, args, created)) =>
//println("looking for candidate of type " + tpe + " in " + witness.modifiedPre.map(typeOf).mkString(", "))
val concreteNodes = witness.modifiedUnfolded.filter(n => n.depth == 0)
val callee: Option[P#V] = nodeWithID(witness, witness.unfolded, "callee")
val indexed: Iterable[(ObjType, Int)] = args.zipWithIndex
val initRole: Map[P#V, String] =
if(callee.isDefined) Map[P#V, String](callee.get -> "callee")
else Map.empty[P#V, String]
val roles: Map[P#V, String] = (initRole /: indexed)( (acc, a) => {
val role = "arg" + a._2
val candidate: Option[P#V] = nodeWithID(witness, witness.unfolded, role)
if (candidate.isDefined) acc + (candidate.get -> role)
else acc
})
(roles, call, created)
case None =>
Logger("InterfaceExtraction", LogWarning, "pathToMethodCall: cannot parse \\""+witness.transition.id+"\\"")
(Map.empty[P#V, String], "---", None)
}
case _ => Logger.logAndThrow("InterfaceExtraction", LogError, "pathToMethodCall: expected Transition")
}
protected def removePreds(conf: DP): (G, Map[P#V, G#V]) = {
val (objNode, predNodes) = conf.vertices.partition(isObj)
val nodeTObj = objNode.iterator.map( v => (v, Thread(eqClassToObj(findClassOf(conf,v)), v.depth)) ).toMap[P#V, G#V]
val objOnly = conf -- predNodes
(objOnly.morphFull[IGT](nodeTObj, x => x.toString, x => (x.state, x.depth)), nodeTObj)
}
protected def composeTracking( t1: (Changes[P#V], List[P#V]),
t2: (Changes[P#V], List[P#V])
): (Changes[P#V], List[P#V])= {
val (t1Changes, t1News) = t1
val (t2Changes, t2News) = t2
val resChanges = t1Changes.map[(P#V, Iterable[(Multiplicity, P#V)]), Changes[P#V]]{ case (a, bs) =>
(a, bs.flatMap{ case (m, b) =>
t2Changes(b).map{ case (m2, b2) => (composeMultiplicities(m, m2), b2) }
})
}
val resNews = t1News.flatMap( n => t2Changes(n).map(_._2) )
(resChanges, resNews)
}
/* a trace should start in a non-transient cover location,
* end in a non-transient cover location, and includes the transient loops. */
protected def traceToMethodCall(trace: Trace[DP, TGEdges[P]]): (DP, MethodCall, DP) = {
//get the differents parts of the trace:
// the first transition -> track after the unfolding
val ((s1, transition1), tail1) = trace.step
val ((s2, cover1), tail2) = tail1.step
// the loops -> accelerate and track
val (loops, tail3) = tail2.splitAfter(tail2.length - 2)
// sanity checks
Logger.assert(!isTransient(s1), "InterfaceExtraction", "traceToMethodCall: transient start")
Logger.assert(loops.states.dropRight(1).forall(isTransient), "InterfaceExtraction", "non-transient middle")
//
//parse the comment from the transition to get the method name and the type/id of the callee
val (roles, method, createdTpe) = parseTransition(transition1)
//follows ....
val initTracking = initialTracking(s1)
val (goesTo, news, last) = if (loops.isEmpty) {
val last = transition1 match {
case Transition(witness) => witness.unfoldedAfterPost
case _ => Logger.logAndThrow("InterfaceExtraction", LogError, "expected Transition")
}
val (a,b) = trackFirstLast(initTracking, transition1)
(a,b,last)
} else {
// the last transition -> stops tracking before the unfolding
val ((endOfLoop, lastTransition), end) = tail3.step
val last = lastTransition match {
case Transition(witness) => witness.unfoldedAfterPost
case _ => Logger.logAndThrow("InterfaceExtraction", LogError, "expected Transition")
}
//the prefix
val firstChange = track(trackFirst(initTracking, transition1), cover1)
//the loop
val loopTracking = initialTracking(loops.start)
val (loopChanges, loopNews) = (loopTracking /: loops.labels)(track)
Logger.assert(loopNews.isEmpty, "InterfaceExtraction", "TODO new object and acceleration")
val accelerated = loopAcceleration(loopChanges) //, (x: EqClass, y: EqClass) => x.obj == y.obj)
//the suffix
val lastTracking = initialTracking(endOfLoop)
val lastChange = trackLast(lastTracking, lastTransition)
//compose ...
val (a, b) = List(firstChange, (accelerated, loopNews), lastChange) reduceLeft composeTracking
(a, b, last)
}
Logger.assert(!isTransient(last), "InterfaceExtraction", "traceToMethodCall: transient stop")
val (src, srcMap) = transition1 match {
case Transition(witness) => removePreds(witness.unfolded)
case _ => Logger.logAndThrow("InterfaceExtraction", LogError, "expected Transition")
}
val (dst, dstMap) = removePreds(last)
val becomesObj = simplify(goesTo.map{ case (k,vs) =>
(srcMap(k), vs map { case (m, cl) =>
(m, dstMap(cl)) } ) })
val newsObj = news map dstMap
val roles2 = roles.map[(G#V, String), Map[G#V, String]]{ case (node, role) => (srcMap(node), role) }
val call = (src, roles2, method, becomesObj, newsObj, dst)
(trace.start, call, trace.stop)
}
protected def makeNormalTraces: Iterable[Trace[DP, TGEdges[P]]] = {
//normal traces
val paths = tg.simplePaths
Logger.assert(paths.forall(p => cover.contains(p.start) && cover.contains(p.stop)),
"InterfaceExtraction",
"TODO makeTraces: a more complex way of spliting the paths ...")
val paths2 = paths.view.flatMap(p => p.split(loc => cover.basis.contains(loc)))
val paths3 = paths2.filter(p => !isTransient(p.start) && !isTransient(p.stop) )
paths3.force
}
protected def makeTransientTraces: Iterable[Trace[DP, TGEdges[P]]] = {
// tg: EdgeLabeledDiGraph[TG[P]]
val revTG = tg.reverse
val transientStates = cover.basis.seq.filter(isTransient)
//includes the self loops
def mkTransientPath(t: DP): Iterable[Trace[DP, TGEdges[P]]] = {
//needs to do two steps because of the way the tg graph is ...
def twoStepsTo( from: DP,
graph: EdgeLabeledDiGraph[TG[P]],
pred: DP => Boolean
): Iterable[(TGEdges[P], DP, TGEdges[P], DP)] = {
for( (label1, dest1) <- graph.outEdges(from);
s1 <- dest1;
(label2, dest2) <- graph.outEdges(s1);
s2 <- dest2 if pred(s2))
yield (label1, s1, label2, s2)
}
def twoStepsToConcrete(from: DP, graph: EdgeLabeledDiGraph[TG[P]]) =
twoStepsTo(from, graph, x => !isTransient(x))
def twoStepsToSelf(from: DP, graph: EdgeLabeledDiGraph[TG[P]]) =
twoStepsTo(from, graph, x => x == from)
val prefixes = twoStepsToConcrete(t, revTG)
val loops = twoStepsToSelf(t, tg)
val suffixes = twoStepsToConcrete(t, tg)
Logger("InterfaceExtraction", LogDebug, "#prefixes: " + prefixes.size)
Logger("InterfaceExtraction", LogDebug, "#loops: " + loops.size)
Logger("InterfaceExtraction", LogDebug, "#suffixes: " + suffixes.size)
for ( (c2, s2, c1, s1) <- prefixes;
(c3, s3, c4, s4) <- loops;
(c5, s5, c6, s6) <- suffixes )
yield Trace(s1, (c1, s2), (c2, t), (c3, s3), (c4, t), (c5, s5), (c6, s6))
}
val traces = transientStates.iterator.flatMap( mkTransientPath ).toIterable
traces
}
protected def makeTraces = {
val traces = makeNormalTraces ++ makeTransientTraces
Logger("InterfaceExtraction", LogInfo, "transitions are:\\n" + traces.map(_.labels.mkString("; ")).mkString("\\n"))
Logger.assert(
{
val trs = (0 /: traces)( (acc, t) => acc + t.length)
trs >= tg.edges.size
},
"InterfaceExtraction",
"makeTraces is not covering the whole graph"
)
traces
}
def pruneCall(call: MethodCall): MethodCall = {
val (src, roles, method, changes, news, dst) = call
val lhsSeeds = roles.flatMap{ case (v,r) =>
if (r == "callee" || r.startsWith("arg")) Some(v) else None }.toSet
val changed = changes.filterNot{ case (a,bs) =>
if (bs.size == 1) {
val (m, b) = bs.head
((m == Rest || m == One) && b.state == a.state)
} else false
}.keySet
val srcClosed = src.transitiveClosure( (a,_) => a )
val reachFromSeed = lhsSeeds.flatMap(x => srcClosed(x) )
val toKeep = src.vertices.filter(x => changed(x) || lhsSeeds(x) || srcClosed(x).exists(changed)) ++ reachFromSeed
val src2 = src.filterNodes(toKeep)
val changes2 = changes.filterKeys(toKeep)
val changesRange = changes2.values.flatMap(_.map(_._2)).toSet
val newsSet = news.toSet
val dst2 = dst.filterNodes(n => newsSet(n) || changesRange(n) )
(src2, roles, method, changes2, news, dst2)
/*
//transitively reachable
val lhsSeeds = roles.keySet
val srcClosed = src.transitiveClosure( (a,_) => a )
val toKeep = src.vertices.filter(x => lhsSeeds(x) || srcClosed(x).exists(lhsSeeds))
val src2 = src.filterNodes(toKeep)
val changes2 = changes.filterKeys(toKeep)
val changesRange = changes2.values.flatMap(_.map(_._2)).toSet
val newsSet = news.toSet
val dst2 = dst.filterNodes(n => newsSet(n) || changesRange(n) )
(src2, roles, method, changes2, news, dst2)
*/
//connected component rather than changed.
/*
val lhsSeeds = roles.keySet
val toKeep = (Set.empty[G#V] /: src.CC)( (acc, cc) => if (cc exists lhsSeeds) acc ++ cc else acc )
val src2 = src.filterNodes(toKeep)
val changes2 = changes.filterKeys(toKeep)
val changesRange = changes2.values.flatMap(_.map(_._2)).toSet
val newsSet = news.toSet
val dst2 = dst.filterNodes(n => newsSet(n) || changesRange(n) )
(src2, roles, method, changes2, news, dst2)
*/
/*
val changes2 = changes.filterNot{ case (a,bs) =>
if (!roles.contains(a) && bs.size == 1) {
val (m, b) = bs.head
(m == Rest && b.state == a.state)
} else false
}
val src2 = src.filterNodes(n => roles.contains(n) || changes2.keySet(n))
val changesRange = changes2.values.flatMap(_.map(_._2)).toSet
val newsSet = news.toSet
val dst2 = dst.filterNodes(n => newsSet(n) || changesRange(n) )
(src2, roles, method, changes2, news, dst2)
*/
}
def interface: Interface = {
Logger("InterfaceExtraction", LogNotice, "Extracting interface ...")
Logger( "InterfaceExtraction", LogDebug, Misc.docToString(
TransitionsGraphFromCover.structureToGraphviz(cover, tg) ) )
val dict = cover.basis.seq.iterator.map( c => (c, removePreds(c)._1) ).toMap
val edges: Iterable[(G, MethodCall, G)] = makeTraces.map( t => {
val (a,b,c) = traceToMethodCall(t)
(dict(a), pruneCall(b), dict(c))
})
val nodes = cover.basis.seq.filter(x => !isTransient(x)).map(dict)
EdgeLabeledDiGraph[IGT2](edges).addVertices(nodes)
}
}
|
dzufferey/picasso
|
core/src/main/scala/picasso/analysis/InterfaceExtraction.scala
|
Scala
|
bsd-2-clause
| 32,367
|
import sys.process._
object TextAnalyzer extends App {
if (args.length != 2) {
System.err.println( "Usage: TextAnalyzer" +
" <VectorizationModelFolder>" +
" <ClassificationModelFolder>"
)
System.exit(1)
}
val vectorizationModelFolder = args(0)
val classificationModelFolder = args(1)
val runningJavaProcesses = "jps -lm" !!
val runningSystemProcesses = "ps -aux" !!
if (!runningJavaProcesses.contains("org.apache.spark")) {
println("Spark is not running. Start spark before continue!")
System.exit(1)
}
println("Spark process was found\n")
if (!runningSystemProcesses.contains("kafka")) {
printf("Kafka is not running. Start kafka before continue!\n")
System.exit(2)
}
printf("Start listening analyzer\n")
val result: Int = StreamAnalyzer.startListening(vectorizationModelFolder, classificationModelFolder)
if(result != 0) {
printf("ERROR! %d when listening the analyzer\n", result)
System.exit(result)
}
}
|
dotdeb/Pilsner
|
Analytics/ScienceAnalyzer/Docker-full/spark-app/src/main/scala/TextAnalyzer.scala
|
Scala
|
apache-2.0
| 991
|
package org.example5.declaration.data
class Y {
}
|
JetBrains/intellij-scala
|
scala/scala-impl/testdata/move/moveMultipleClasses_UsedInLocalImports/after/org/example5/declaration/data/Y.scala
|
Scala
|
apache-2.0
| 52
|
import de.tototec.sbuild._
import de.tototec.sbuild.ant._
import de.tototec.sbuild.ant.tasks._
import de.tototec.sbuild.TargetRefs._
@version("0.4.0")
@include("../SBuildConfig.scala")
@classpath("mvn:org.apache.ant:ant:1.8.4")
class SBuild(implicit _project: Project) {
val namespace = "de.tototec.sbuild.compilerplugin"
val jar = s"target/${namespace}-${SBuildConfig.sbuildVersion}.jar"
val sourcesZip = s"target/${namespace}-${SBuildConfig.sbuildVersion}-sources.jar"
val compileCp =
s"mvn:org.scala-lang:scala-library:${SBuildConfig.scalaVersion}" ~
s"mvn:org.scala-lang:scala-reflect:${SBuildConfig.scalaVersion}" ~
s"mvn:org.scala-lang:scala-compiler:${SBuildConfig.scalaVersion}"
ExportDependencies("eclipse.classpath", compileCp)
Target("phony:all") dependsOn jar ~ sourcesZip
Target("phony:clean").evictCache exec {
AntDelete(dir = Path("target"))
}
Target("phony:compile").cacheable dependsOn SBuildConfig.compilerPath ~ compileCp ~ "scan:src/main/scala" exec {
val output = "target/classes"
addons.scala.Scalac(
compilerClasspath = SBuildConfig.compilerPath.files,
classpath = compileCp.files,
sources = "scan:src/main/scala".files,
destDir = Path(output),
unchecked = true, deprecation = true, debugInfo = "vars"
)
}
Target(jar) dependsOn "compile" ~ "scan:src/main/resources" ~ "LICENSE.txt" exec { ctx: TargetContext =>
new AntJar(destFile = ctx.targetFile.get, baseDir = Path("target/classes")) {
if (Path("src/main/resources").exists) add(AntFileSet(dir = Path("src/main/resources")))
add(AntFileSet(file = Path("LICENSE.txt")))
}.execute
}
Target(sourcesZip) dependsOn "scan:src/main/scala" ~ "scan:src/main/resources" ~ "scan:LICENSE.txt" exec { ctx: TargetContext =>
AntZip(destFile = ctx.targetFile.get, fileSets = Seq(
AntFileSet(dir = Path("src/main/scala")),
AntFileSet(dir = Path("src/main/resources")),
AntFileSet(file = Path("LICENSE.txt"))
))
}
Target("phony:scaladoc").cacheable dependsOn SBuildConfig.compilerPath ~ compileCp ~ "scan:src/main/scala" exec {
addons.scala.Scaladoc(
scaladocClasspath = SBuildConfig.compilerPath.files,
classpath = compileCp.files,
sources = "scan:src/main/scala".files,
destDir = Path("target/scaladoc"),
deprecation = true, unchecked = true, implicits = true,
docVersion = SBuildConfig.sbuildVersion,
docTitle = s"SBuild Scala Compiler Plugin API Reference"
)
}
}
|
SBuild-org/sbuild
|
de.tototec.sbuild.compilerplugin/SBuild.scala
|
Scala
|
apache-2.0
| 2,522
|
/**
* Copyright (c) 2013-2015 Patrick Nicolas - Scala for Machine Learning - All rights reserved
*
* The source code in this file is provided by the author for the sole purpose of illustrating the
* concepts and algorithms presented in "Scala for Machine Learning". It should not be used to
* build commercial applications.
* ISBN: 978-1-783355-874-2 Packt Publishing.
* Unless required by applicable law or agreed to in writing, software is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* Version 0.98
*/
package org.scalaml.scalability.akka.message
import akka.actor._
import org.scalaml.core.Types.ScalaMl._
import org.scalaml.core.XTSeries
/**
* <p>Generic message exchanged between a master and worker actors.</p>
* @param id Unique identifier for this message.
* @author Patrick Nicolas
* @since March 28, 2014
* @note Scala for Machine learning Chapter 12 Scalable Framework / Akka / Master-workers
*/
sealed abstract class Message(val id: Int)
/**
* <p>Message sent by the master to terminate the worker actors..</p>
* @param i unique identifier for this message.
* @author Patrick Nicolas
* @since March 28, 2014
* @note Scala for Machine learning Chapter 12 Scalable Framework / Akka / Master-workers
*/
case class Terminate(i: Int) extends Message(i)
/**
* <p>Message sent to the master to initialize the computation.</p>
* @param i unique identifier for this message.
* @author Patrick Nicolas
* @since March 28, 2014
* @note Scala for Machine learning Chapter 12 Scalable Framework / Akka / Master-workers
*/
case class Start(i: Int =0) extends Message(i)
/**
* <p>Message sent by the worker actors to notify the master their tasks is completed.</p>
* @param i unique identifier for this message.
* @param xt time series transformed (or processed)
* @author Patrick Nicolas
* @since March 28, 2014
* @note Scala for Machine learning Chapter 12 Scalable Frameworks / Akka / Master-workers
*/
case class Completed(i: Int, xt: XTSeries[Double]) extends Message(i)
/**
* <p>Message sent by the master to the worker actors to start the computation.</p>
* @param id unique identifier for this message.
* @param xt time series to transform (or process)
* @author Patrick Nicolas
* @since March 28, 2014
* @note Scala for Machine learning Chapter 12 Scalable Frameworks / Akka / Master-workers
*/
case class Activate(i: Int, xt: XTSeries[Double]) extends Message(i)
// --------------------------------- EOF -------------------------
|
batermj/algorithm-challenger
|
books/cs/machine-learning/scala-for-machine-learning/1rst-edition/original-src-from-the-book/src/main/scala/org/scalaml/scalability/akka/message/Messages.scala
|
Scala
|
apache-2.0
| 2,636
|
/*
* Copyright 2010-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftmodules
package couchdb
import scala.collection.{Map => MapTrait}
import scala.collection.immutable.Map
import scala.collection.mutable.ArrayBuffer
import scala.reflect.Manifest
import dispatch.{:/, Handler, Http, Request, StatusCode}
import net.liftweb.common.{Box, Empty, Failure, Full}
import net.liftweb.json.{DefaultFormats, Formats}
import net.liftweb.json.Extraction.{decompose, extract}
import net.liftweb.json.Implicits.string2jvalue
import net.liftweb.json.JsonAST.{JArray, JBool, JField, JInt, JObject, JString, JValue, render}
import net.liftweb.json.JsonDSL.pair2jvalue
import net.liftweb.json.Printer.compact
import net.liftweb.util.ControlHelpers.tryo
import DocumentHelpers.{jobjectToJObjectExtension, updateIdAndRev}
import DispatchJSON.requestToJSONRequest
/** Helper functions */
private[couchdb] object DatabaseHelpers {
/** Handles the JSON result of an update action by parsing out id and rev, updating the given original object with the values and returning it */
def handleUpdateResult(original: JObject)(json: JValue): Box[JObject] =
for {
obj <- Full(json).asA[JObject] ?~ ("update result is not a JObject: " + json)
ok <- Full(json \ "ok" ).asA[JBool].filter(_.value) ?~ ("ok not present in reply or not true: "+json)
id <- Full(json \ "id" ).asA[JString].map(_.s) ?~ ("id not present or not a string: " + json)
rev <- Full(json \ "rev").asA[JString].map(_.s) ?~ ("rev not present or not a string: " + json)
} yield updateIdAndRev(original, id, rev)
}
import DatabaseHelpers._
/** Single element Map implementation */
object SingleElementMap {
/** Implicitly convert a pair to a single element map */
implicit def pairToSingleElementMap[A, B](pair: (A, B)): MapTrait[A, B] = Map(pair)
}
import SingleElementMap.pairToSingleElementMap
/** Trait that adds a "fetch" method for getting a JObject from CouchDB */
trait FetchableAsJObject {
self: Request =>
/** Fetch the document as a JObject */
def fetch: Handler[JObject] = this ># (_.asInstanceOf[JObject])
}
/** Trait of requests that represent a document in a Couch database */
trait Document extends Request with FetchableAsJObject {
/** Refine to a particular revision of the document. Only GET-style requests should be used with the resulting path */
def at(rev: String): DocumentRevision = new Request(this <<? ("rev" -> rev)) with DocumentRevision { }
/** Alias for at */
def @@ (rev: String): DocumentRevision = at(rev)
/** Refine to a particular revision of the document by getting _rev from a given JObject. */
def at(doc: JObject): DocumentRevision = at(doc._rev.openOrThrowException("legacy code"))
/** Alias for at */
def @@ (doc: JObject): DocumentRevision = at(doc)
/** Store a new version of the document, returning the document with _id and _rev updated */
def put(doc: JObject): Handler[Box[JObject]] = JSONRequest(this) <<<# doc ># handleUpdateResult(doc) _
/** Alias for put */
def <<<# (doc: JObject): Handler[Box[JObject]] = put(doc)
}
/** Trait of requests that represent a particular document revision in a Couch database */
trait DocumentRevision extends Request with FetchableAsJObject {
/** Destroy the document. The document's current revision must be the revision represented by this request */
def delete: Handler[Unit] = DELETE >|
}
/** Trait of requests that represent a particular design document */
trait Design extends Document {
/** Access a particular view by name that can be queried */
def view(name: String): View = new Request(this / "_view" / name) with View { }
}
/** Trait of requests that represent a view that can be queried */
trait View extends Request with Queryable[View] {
protected def newQueryable(req: Request): View = new Request(req) with View { }
}
/** Trait of requests representing all documents in a Couch database */
trait AllDocs extends Request with Queryable[AllDocs] {
protected def newQueryable(req: Request): AllDocs = new Request(req) with AllDocs { }
}
/** Trait of requests that support CouchDB querying. That is, _all_docs and views */
trait Queryable[SelfType <: Queryable[SelfType]] {
self: Request =>
/** Create a new self-typed instance */
protected def newQueryable(req: Request): SelfType
/** Add parameters to the query */
def withParams(params: MapTrait[String, Any]): SelfType = newQueryable(this <<? params)
/** Fetch results of the query */
def query: Handler[Box[QueryResults]] = this ># (QueryResult.read _)
/** Query for the given key only */
def key(keyValue: JValue): SelfType = withParams("key" -> compact(render(keyValue)))
/** Query for the given set of keys */
def keys(keyValues: JValue*): SelfType = newQueryable(this <<# ("keys" -> JArray(keyValues.toList)))
/** Restrict the query to only keys greater than or equal to the given key */
def from(lowValue: JValue): SelfType = withParams("startkey" -> compact(render(lowValue)))
/** Restrict the query to only keys greater than or equal to the given key, not including any documents that are earlier in the view than the given docid */
def from(lowValue: JValue, docid: String): SelfType = withParams(Map("startkey" -> compact(render(lowValue)), "startkey_docid" -> docid))
/** Restrict the query to only keys less than or equal to the given key */
def to(highValue: JValue): SelfType = withParams("endkey" -> compact(render(highValue)))
/** Restrict the query to only keys less than or equal to the given key, not including any documents that are later in the view than the given docid */
def to(highValue: JValue, docid: String): SelfType = withParams(Map("endkey" -> compact(render(highValue)), "endkey_docid" -> docid))
/** Limit the query to the given number of results */
def limit(i: Int): SelfType = withParams("limit" -> i)
/** Specify that stale view data is okay. Used for optimization -- some other query must keep the view fresh. */
def staleOk: SelfType = withParams("stale" -> "ok")
/** Specify a descending sort. Note that the reversal is applied before key filtering, so you must reverse your from(...) and to(...) values. */
def descending: SelfType = withParams("descending" -> "true")
/** Group results (see http://wiki.apache.org/couchdb/Introduction_to_CouchDB_views) */
def group: SelfType = withParams("group" -> "true")
/** Group results at the given level (see http://wiki.apache.org/couchdb/Introduction_to_CouchDB_views) */
def group(level: Int): SelfType = withParams(Map("group" -> "true") + ("group_level" -> level))
/** Specify that reduction should not occur */
def dontReduce: SelfType = withParams("reduce" -> "false")
/** Include the associated document with each result */
def includeDocs: SelfType = withParams("include_docs" -> "true")
/**
* Query a range matching the given key prefix. Equivalent to composing from(prefix) and to(prefix with {} appended),
* e.g. from=["foobar"]&to=["foobar",{}]
*/
def arrayRange(prefix: List[JValue]): SelfType = from(JArray(prefix)) to(JArray(prefix ::: (JObject(Nil)::Nil)))
}
/** Specialization of dispatch's Request that provides Couch specific functionality */
class Database(couch: Request, database: String) extends Request(couch / database) {
/** Construct a Database request using host and port */
def this(hostname: String, port: Int, database: String) = this(:/(hostname, port), database)
/** Construct a Database request to a default installation of CouchDB on localhost (port 5984) */
def this(database: String) = this("127.0.0.1", 5984, database)
/** Create the database iff it doesn't already exist */
def createIfNotCreated(http: Http): Unit =
try {
http(info)
()
} catch {
case StatusCode(404, _) => http(create)
}
/** Attempt to create the database (PUT) */
def create: Handler[Unit] = this <<< "" >|
/** Retrieve information about the database (GET) */
def info: Handler[DatabaseInfo] = {
implicit val f: Formats = DefaultFormats
this ># (extract[DatabaseInfo] _)
}
/** Destroy the database (DELETE) */
def delete: Handler[Unit] = DELETE >|
/** Access all documents in the database with a queryable interface */
def all: AllDocs = new Request(this / "_all_docs") with AllDocs { }
/** Access a particular document in the database by ID. */
def apply(id: String): Document = new Request(this / id) with Document { }
/** Access a particular document in the database with _id from a given JObject */
def apply(doc: JObject): Document = this(doc._id.openOrThrowException("legacy code").s)
/** Access a series of documents by ID. */
def apply(ids: Seq[String]): AllDocs = all.includeDocs.keys(ids.map(JString): _*)
/** Access a particular design document in the database by name */
def design(name: String): Design = new Request(this / "_design" / name) with Design { }
/** Store a document in the database, generating a new unique ID for it and returning the document with _id and _rev updated */
def post(doc: JObject): Handler[Box[JObject]] = JSONRequest(this) <<# doc ># handleUpdateResult(doc) _
/** Alias for post */
def <<# (doc: JObject): Handler[Box[JObject]] = post(doc)
/** Inserts or updates a document in the database, using the standard _id field to do so. Returns the updated document. */
def store(doc: JObject): Handler[Box[JObject]] =
doc._id match {
case Full(id) => this(id.s) <<<# doc
case _ => this <<# doc
}
}
/** Case class that holds information about a couch database, as retrieved using GET /database */
case class DatabaseInfo(db_name: String, doc_count: Int, doc_del_count: Int, update_seq: BigInt, compact_running: Boolean, disk_size: BigInt)
/** Result of a CouchDB query, possibly containing some summary information (if Couch provided it) such as total rows, and the results themselves */
case class QueryResults(totalRows: Box[BigInt], offset: Box[BigInt], rows: Seq[QueryRow])
/** Single result of a CouchDB query */
case class QueryRow(id: Box[String], key: JValue, value: Box[JValue], doc: Box[JObject], error: Box[JString])
object QueryResult {
/** Read JSON into a QueryResults instance that holds the rows along with metadata about the query */
def read(json: JValue): Box[QueryResults] =
for {
obj <- Full(json).asA[JObject] ?~ ("query JSON is not a JObject: " + json)
jsonRows <- obj.get[JArray]("rows").map(_.arr) ?~ ("rows not found or wrong type in " + json)
rows <- ((Full(new ArrayBuffer): Box[ArrayBuffer[QueryRow]]) /: jsonRows)((prev, cur) => {
prev flatMap {
buf => readRow(cur).flatMap { res => buf += res; prev }
}
})
} yield {
QueryResults(obj.get[JInt]("total_rows").map(_.num), obj.get[JInt]("offset").map(_.num), rows)
}
/** Read JSON into a QueryRow */
private def readRow(json: JValue): Box[QueryRow] =
for {
obj <- Full(json).asA[JObject] ?~ ("row not a JObject: " + json)
key <- obj.get[JValue]("key") ?~ ("key not found or wrong type in " + json)
} yield QueryRow(obj.get[JString]("id").map(_.s), key, obj.get[JValue]("value"), obj.get[JObject]("doc"), obj.get[JString]("error"))
}
|
arashi01/couchdb
|
src/main/scala/net/liftmodules/couchdb/Database.scala
|
Scala
|
apache-2.0
| 11,823
|
package com.rasterfoundry.api.utils.queryparams
import com.rasterfoundry.datamodel._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.directives.ParameterDirectives.parameters
import akka.http.scaladsl.unmarshalling._
import javax.xml.bind.DatatypeConverter
import java.sql.Timestamp
import java.util.UUID
/** Unmarshalls query parameters to correct type */
trait QueryParameterDeserializers {
implicit val deserializerUUID: Unmarshaller[String, UUID] =
Unmarshaller.strict[String, UUID] { s =>
UUID.fromString(s)
}
implicit val deserializerTimestamp: Unmarshaller[String, Timestamp] =
Unmarshaller.strict[String, Timestamp] { s =>
Timestamp.from(DatatypeConverter.parseDateTime(s).getTime().toInstant())
}
implicit val deserializerGroupType: Unmarshaller[String, GroupType] =
Unmarshaller.strict[String, GroupType] { s =>
GroupType.fromString(s)
}
implicit val deserializerTaskStatus: Unmarshaller[String, TaskStatus] =
Unmarshaller.strict[String, TaskStatus] { s =>
TaskStatus.fromString(s)
}
implicit val deserializerAnnotationProjectType
: Unmarshaller[String, AnnotationProjectType] =
Unmarshaller.strict[String, AnnotationProjectType] { s =>
AnnotationProjectType.fromString(s)
}
implicit val deserializerContinent: Unmarshaller[String, Continent] =
Unmarshaller.strict[String, Continent] { s =>
Continent.fromString(s)
}
implicit val deserializerTaskType: Unmarshaller[String, TaskType] =
Unmarshaller.strict[String, TaskType] { s =>
TaskType.fromString(s)
}
implicit val deserializerActionType: Unmarshaller[String, ActionType] =
Unmarshaller.strict[String, ActionType] { s =>
ActionType.fromString(s)
}
}
trait QueryParametersCommon extends QueryParameterDeserializers {
def projectQueryParameters =
(
orgQueryParams &
userQueryParameters &
timestampQueryParameters &
searchParams &
ownershipTypeQueryParameters &
groupQueryParameters &
tagQueryParameters &
parameters(
'analysisId.as[UUID].?
)
).as(ProjectQueryParameters.apply _)
def orgQueryParams =
parameters(
'organization.as(deserializerUUID).*
).as(OrgQueryParameters.apply _)
def ownerQueryParameters =
parameters(
'owner.as[String].*
).as(OwnerQueryParameters.apply _)
def ownershipTypeQueryParameters =
parameters(
'ownershipType.as[String].?
).as(OwnershipTypeQueryParameters.apply _)
def groupQueryParameters =
parameters(
'groupType.as(deserializerGroupType).?,
'groupId.as(deserializerUUID).?
).as(GroupQueryParameters.apply _)
def userAuditQueryParameters =
parameters(
'createdBy.as[String].?
).as(UserAuditQueryParameters.apply _)
def userQueryParameters =
(
userAuditQueryParameters &
ownerQueryParameters &
activationParams
).as(UserQueryParameters.apply _)
def timestampQueryParameters =
parameters(
'minCreateDatetime.as(deserializerTimestamp).?,
'maxCreateDatetime.as(deserializerTimestamp).?,
'minModifiedDatetime.as(deserializerTimestamp).?,
'maxModifiedDatetime.as(deserializerTimestamp).?
).as(TimestampQueryParameters.apply _)
def annotationQueryParams =
(orgQueryParams &
userQueryParameters &
parameters(
'label.as[String].?,
'machineGenerated.as[Boolean].?,
'minConfidence.as[Double].?,
'maxConfidence.as[Double].?,
'quality.as[String].?,
'annotationGroup.as[UUID].?,
'bbox.as[String].*,
'withOwnerInfo.as[Boolean].?,
'taskId.as[UUID].?
)).as(AnnotationQueryParameters.apply _)
def shapeQueryParams =
(
orgQueryParams &
userQueryParameters &
timestampQueryParameters &
ownershipTypeQueryParameters &
groupQueryParameters &
searchParams
).as(ShapeQueryParameters.apply _)
def searchParams =
parameters(
'search.as[String].?
).as(SearchQueryParameters.apply _)
def activationParams =
parameters(
'isActive.as[Boolean].?
).as(ActivationQueryParameters.apply _)
def platformIdParams =
parameters(
'platformId.as[UUID].?
).as(PlatformIdQueryParameters.apply _)
def tagQueryParameters =
parameters(
'tagsInclude.as[String].*,
'tagsExclude.as[String].*
).as(TagQueryParameters.apply _)
def teamQueryParameters =
(
timestampQueryParameters &
orgQueryParams &
userAuditQueryParameters &
searchParams &
activationParams
).as(TeamQueryParameters.apply _)
def annotationExportQueryParameters =
parameters(
'exportAll.as[Boolean].?
).as(AnnotationExportQueryParameters.apply _)
def taskQueryParameters =
parameters(
'status.as[TaskStatus].*,
'locked.as[Boolean].?,
'lockedBy.as[String].?,
'bbox.as[String].*,
'actionUser.as[String].?,
'actionType.as[TaskStatus].?,
'actionStartTime.as(deserializerTimestamp).?,
'actionEndTime.as(deserializerTimestamp).?,
'actionMinCount.as[Int].?,
'actionMaxCount.as[Int].?,
'taskType.as(deserializerTaskType).?
).as(TaskQueryParameters.apply _)
def userTaskActivityParameters =
parameters(
'actionStartTime.as(deserializerTimestamp).?,
'actionEndTime.as(deserializerTimestamp).?,
'actionUser.as[String].?
).as(UserTaskActivityParameters.apply _)
def stacExportQueryParameters =
(
userAuditQueryParameters &
ownerQueryParameters &
searchParams &
parameters(
'exportStatus.as[String].?,
'annotationProjectId.as[UUID].?,
'campaignId.as[UUID].?
)
).as(StacExportQueryParameters.apply _)
def annotationProjectFilterParameters =
parameters(
'projectType.as(deserializerAnnotationProjectType).?,
'taskStatusesInclude.as(deserializerTaskStatus).*
).as(AnnotationProjectFilterQueryParameters.apply _)
def annotationProjectQueryParameters =
(
ownerQueryParameters &
searchParams &
ownershipTypeQueryParameters &
groupQueryParameters &
annotationProjectFilterParameters &
parameters(
'campaignId.as[UUID].?,
'capturedAt.as(deserializerTimestamp).?,
'isActive.as[Boolean].?
)
).as(AnnotationProjectQueryParameters.apply _)
def campaignQueryParameters =
(
ownerQueryParameters &
searchParams &
ownershipTypeQueryParameters &
groupQueryParameters &
parameters(
'campaignType.as(deserializerAnnotationProjectType).?,
'continent.as(deserializerContinent).?,
'isActive.as[Boolean].?
)
).as(CampaignQueryParameters.apply _)
def campaignRandomTaskQueryParameters =
parameters(
'requestAction.as(deserializerActionType).*
).as(CampaignRandomTaskQueryParameters.apply _)
}
|
raster-foundry/raster-foundry
|
app-backend/api/src/main/scala/utils/QueryParameters.scala
|
Scala
|
apache-2.0
| 7,090
|
package rebind.syntax.std
import rebind.std.FutureAction
import scala.concurrent.{ExecutionContext, Future}
import scalaz.DisjunctionT
trait FutureSyntax {
implicit def futureSyntax[A](future: => Future[A])(implicit ec: ExecutionContext): FutureOps[A] =
new FutureOps(future)
}
class FutureOps[A](future: => Future[A])(implicit ec: ExecutionContext) {
def action: DisjunctionT[FutureAction, Throwable, A] = FutureAction(future)
}
|
adelbertc/rebind
|
core/src/main/scala/rebind/syntax/std/FutureOps.scala
|
Scala
|
bsd-3-clause
| 443
|
/*
* Copyright (c) 2016 by its authors. Some rights reserved.
* See the project homepage at: https://sincron.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sincron.atomic
trait AtomicBuilder[T, R <: Atomic[T]] {
def buildInstance(initialValue: T, strategy: PaddingStrategy): R
}
private[atomic] object Implicits {
abstract class Level1 {
implicit def AtomicRefBuilder[T <: AnyRef] = new AtomicBuilder[T, AtomicAny[T]] {
def buildInstance(initialValue: T, strategy: PaddingStrategy) =
AtomicAny(initialValue)(strategy)
}
}
abstract class Level2 extends Level1 {
implicit def AtomicNumberBuilder[T <: AnyRef : Numeric] =
new AtomicBuilder[T, AtomicNumberAny[T]] {
def buildInstance(initialValue: T, strategy: PaddingStrategy) =
AtomicNumberAny(initialValue)(implicitly[Numeric[T]], strategy)
}
}
}
object AtomicBuilder extends Implicits.Level2 {
implicit object AtomicIntBuilder extends AtomicBuilder[Int, AtomicInt] {
def buildInstance(initialValue: Int, strategy: PaddingStrategy): AtomicInt =
AtomicInt(initialValue)(strategy)
}
implicit object AtomicLongBuilder extends AtomicBuilder[Long, AtomicLong] {
def buildInstance(initialValue: Long, strategy: PaddingStrategy): AtomicLong =
AtomicLong(initialValue)(strategy)
}
implicit object AtomicBooleanBuilder extends AtomicBuilder[Boolean, AtomicBoolean] {
def buildInstance(initialValue: Boolean, strategy: PaddingStrategy) =
AtomicBoolean(initialValue)(strategy)
}
implicit object AtomicByteBuilder extends AtomicBuilder[Byte, AtomicByte] {
def buildInstance(initialValue: Byte, strategy: PaddingStrategy): AtomicByte =
AtomicByte(initialValue)(strategy)
}
implicit object AtomicCharBuilder extends AtomicBuilder[Char, AtomicChar] {
def buildInstance(initialValue: Char, strategy: PaddingStrategy): AtomicChar =
AtomicChar(initialValue)(strategy)
}
implicit object AtomicShortBuilder extends AtomicBuilder[Short, AtomicShort] {
def buildInstance(initialValue: Short, strategy: PaddingStrategy): AtomicShort =
AtomicShort(initialValue)(strategy)
}
implicit object AtomicFloatBuilder extends AtomicBuilder[Float, AtomicFloat] {
def buildInstance(initialValue: Float, strategy: PaddingStrategy): AtomicFloat =
AtomicFloat(initialValue)(strategy)
}
implicit object AtomicDoubleBuilder extends AtomicBuilder[Double, AtomicDouble] {
def buildInstance(initialValue: Double, strategy: PaddingStrategy): AtomicDouble =
AtomicDouble(initialValue)(strategy)
}
}
|
monixio/sincron
|
sincron-atomic/jvm/src/main/scala/org/sincron/atomic/AtomicBuilder.scala
|
Scala
|
apache-2.0
| 3,126
|
package com.wavesplatform.transaction.smart
import com.wavesplatform.account.Address
import com.wavesplatform.db.WithDomain
import com.wavesplatform.db.WithState.AddrWithBalance
import com.wavesplatform.lang.directives.values.StdLibVersion
import com.wavesplatform.lang.script.Script
import com.wavesplatform.lang.v1.compiler.TestCompiler
import com.wavesplatform.test._
import com.wavesplatform.transaction.TxHelpers
import com.wavesplatform.utils.JsonMatchers
import play.api.libs.json.Json
class SubInvokeStateChangesSpec extends FlatSpec with WithDomain with JsonMatchers {
val ContractFunction = "default"
val compileV5: String => Script = TestCompiler(StdLibVersion.V5).compileContract(_)
"Invoke state changes" should "include intermediate invokes" in {
// Root DApp, calls addr2s and addr2f
val dAppAddress = TxHelpers.signer(1)
// Success chain
val addr2s = TxHelpers.signer(3) // Calls addr3s
val addr3s = TxHelpers.signer(5) // Finishes successfully
// Failed chain
val addr2f = TxHelpers.signer(2) // Calls addr3f
val addr3f = TxHelpers.signer(4) // Fails
val balances = Seq(dAppAddress, addr2f, addr3f, addr2s, addr3s).map(acc => AddrWithBalance(acc.toAddress, 1.waves)) :+
AddrWithBalance(TxHelpers.defaultAddress)
withDomain(DomainPresets.RideV5, balances) { d =>
{ // Prerequisites
val script1 = compileV5(genScript(Seq(addr2s.toAddress, addr2f.toAddress)))
val script2 = compileV5(genScript(Some(addr3f.toAddress)))
val script3 = compileV5(genScript(None, fail = true))
val script2alt = compileV5(genScript(Some(addr3s.toAddress)))
val script3alt = compileV5(genScript(None))
val setScripts = Seq(
TxHelpers.setScript(dAppAddress, script1),
TxHelpers.setScript(addr2f, script2),
TxHelpers.setScript(addr3f, script3),
TxHelpers.setScript(addr2s, script2alt),
TxHelpers.setScript(addr3s, script3alt)
)
d.appendBlock(setScripts: _*)
}
// Actual test
val invoke = TxHelpers.invoke(dAppAddress.toAddress, Some(ContractFunction))
d.appendBlock(invoke)
val stateChanges = d.commonApi.invokeScriptResult(invoke.id())
val json = Json.toJson(stateChanges)
json should matchJson(
"""{
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ {
| "dApp" : "3N4DiVEiZHzcjEhoBx2kmoKKCH7GBZMim3L",
| "call" : {
| "function" : "default",
| "args" : [ ]
| },
| "payment" : [ {
| "assetId" : null,
| "amount" : 17
| } ],
| "stateChanges" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ {
| "dApp" : "3MvAdB2DFMf6unzX6czVGgco5rA24End8Jn",
| "call" : {
| "function" : "default",
| "args" : [ ]
| },
| "payment" : [ {
| "assetId" : null,
| "amount" : 17
| } ],
| "stateChanges" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ]
| }
| } ]
| }
| }, {
| "dApp" : "3MsY23LPQnvPZnBKpvs6YcnCvGjLVD42pSy",
| "call" : {
| "function" : "default",
| "args" : [ ]
| },
| "payment" : [ {
| "assetId" : null,
| "amount" : 17
| } ],
| "stateChanges" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ {
| "dApp" : "3N87Qja7rNj8z6H7nG9EYtjCXQtZLawaxyM",
| "call" : {
| "function" : "default",
| "args" : [ ]
| },
| "payment" : [ {
| "assetId" : null,
| "amount" : 17
| } ],
| "stateChanges" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ],
| "error" : {
| "code" : 1,
| "text" : "boom"
| }
| }
| } ]
| }
| } ],
| "error" : {
| "code" : 1,
| "text" : "FailedTransactionError(code = 1, error = boom, log =\n\t@p = false\n)"
| }
|}""".stripMargin
)
val allAddresses = Seq(dAppAddress, addr2s, addr3s, addr2f, addr3f).map(_.toAddress)
for ((addr, i) <- allAddresses.zipWithIndex)
withClue(s"Addr #${i + 1}")(d.commonApi.addressTransactions(addr) should contain(invoke))
}
}
def genScript(calls: Iterable[Address], fail: Boolean = false): String =
s"""
|{-# STDLIB_VERSION 5 #-}
|{-# CONTENT_TYPE DAPP #-}
|{-# SCRIPT_TYPE ACCOUNT #-}
|
|@Callable(i)
|func $ContractFunction() = {
| ${calls.zipWithIndex
.map { case (address, i) => s"""strict r$i = invoke(Address(base58'$address'), "$ContractFunction", [], [AttachedPayment(unit, 17)])""" }
.mkString("\n")}
| if ($fail && !(${(1 to 10).map(_ => "sigVerify(base58'', base58'', base58'')").mkString(" || ")})) then throw("boom") else []
|}""".stripMargin
}
|
wavesplatform/Waves
|
node/src/test/scala/com/wavesplatform/transaction/smart/SubInvokeStateChangesSpec.scala
|
Scala
|
mit
| 6,724
|
// create an svg display object
//package svgala
import scala.collection.mutable.ListBuffer
import java.io._
class SVGala(val width: Int, val height: Int) {
// list of objects to display
val displayList = ListBuffer[String]()
// utilities
// get the attribute name from an attribute assignment string
def getAttrName(attr: String): String = attr.split('=')(0)
// move origin to lower left by adjusting y coordinate
def flipY(y: Int, oHeight: Int): Int = height - (y + oHeight)
// uniform DRY handling of presentation attributes
// takes the element specific part of the definition and combines it with
// any presentation attributes added as an array of Strings.
// Completes the svg tag and returns the entire string to add to the document.
// apply defauls and filter out override of defaults.
def addPresentationAttr(element: String, attr: Seq[String], defaultAttr: Array[String]): String = {
val attrNames = attr.map((a: String) => getAttrName(a)) // just attr name
val filteredDefaultAttr = defaultAttr.filter((da: String) => !attrNames.contains(getAttrName(da)))
val finalAttr = attr ++ filteredDefaultAttr
element + finalAttr.reduce((a: String, b: String) => a + " " + b) + "/>"
}
// methods which create object tags as strings
// Create an object tag for a rectangle
def makeRect(x: Int, y: Int, width: Int, height: Int, attr: Seq[String]): String = {
val svg1 = s"""<rect x="$x" y="${flipY(y, height)}" width="$width" height="$height" """
val defaultAttr = Array("""fill="0X0000FF"""", """stroke-width="1"""", """stroke="0x000000"""")
addPresentationAttr(svg1, attr, defaultAttr)
}
// Create an object tag for a circle
def makeCircle(cx: Int, cy: Int, r: Int, attr:Seq[String]): String = {
val svg1 = s"""<circle cx="$cx" cy="${flipY(cy, 0)}" r="$r" """
val defaultAttr = Array("""fill="0X0000FF"""", """stroke-width="1"""", """stroke="0x000000"""")
addPresentationAttr(svg1, attr, defaultAttr)
}
// Create an object tag for an ellipse
def makeEllipse(cx: Int, cy: Int, rx: Int, ry: Int, attr:Seq[String]): String = {
val svg1 = s"""<ellipse cx="$cx" cy="${flipY(cy, 0)}" rx="$rx" ry="$ry" """
val defaultAttr = Array("""fill="0X0000FF"""", """stroke-width="1"""", """stroke="0x000000"""")
addPresentationAttr(svg1, attr, defaultAttr)
}
// methods which add objects to the display
// add a rectangle
def addRect(x: Int, y: Int, width: Int, height: Int, attr: String*) {
displayList += makeRect(x, y, width, height, attr)
}
// add a circle
def addCircle(cx: Int, cy: Int, r: Int, attr: String*) {
displayList += makeCircle(cx, cy, r, attr)
}
// add an ellipse
def addEllipse(cx: Int, cy: Int, rx: Int, ry: Int, attr: String*) {
displayList += makeEllipse(cx, cy, rx, ry, attr)
}
// output
def writeSVG(fileName: String) {
val bw = new BufferedWriter(new FileWriter(fileName))
// add the header
val header = s"""<?xml version="1.0"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg width="$width" height="$height" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
"""
bw.write(header)
bw.newLine
// add the display objects
displayList.foreach(obj => bw.write(obj + "\\n"))
// close the svg doc
bw.newLine
bw.write("</svg>\\n")
bw.close()
}
}
|
toma63/svgala
|
svgala.scala
|
Scala
|
gpl-3.0
| 3,438
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hbase
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types._
import org.apache.spark.sql.hbase.catalyst.expressions.PartialPredicateOperations._
import org.apache.spark.sql.hbase.types.Range
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
object CriticalPointType extends Enumeration {
type CriticalPointType = Value
val upInclusive = Value("Up Inclusive: (...)[...)")
val lowInclusive = Value("Low Inclusive: (...](...)")
val bothInclusive = Value("Both Inclusive: (...)[](...)")
}
/**
*
* @param value value of this critical point
* @param ctype type of this critical point
* @param dt the runtime data type of this critical point
* @tparam T the data type parameter of this critical point
*/
case class CriticalPoint[T](value: T, ctype: CriticalPointType.CriticalPointType, dt: AtomicType) {
override def hashCode() = value.hashCode()
override def equals(other: Any): Boolean = other match {
case cp: CriticalPoint[T] => value.equals(cp.value)
case _ => false
}
override def toString = {
s"CriticalPoint: value=$value, ctype=$ctype., dt=${dt.typeName}"
}
}
/**
* Range based on a Critical point
*
* @param start the start of the range in native type; None for an open start
* @param startInclusive inclusive for the start
* @param end the end of the range in native; None for an open end
* @param endInclusive inclusive for the end
* @param dt the data type
* @param pred the associated predicate
* @tparam T the native data type of the critical point range
*
*/
private[hbase] class CriticalPointRange[+T](start: Option[T], startInclusive: Boolean,
end: Option[T], endInclusive: Boolean,
dt: AtomicType, var pred: Expression)
extends Range[T](start, startInclusive, end, endInclusive, dt) {
var nextDimCriticalPointRanges: Seq[CriticalPointRange[_]] = Nil
// this CPR is invalid, meaning its children are all excluded, different from
// an empty children list which means no predicate applicable to the next dimension
var invalid = false
/**
* expand on nested critical point ranges of sub-dimensions
* @param prefix the buffer to build prefix on all leading dimensions
* @return a list of Multiple dimensional critical point ranges
*/
private[hbase] def flatten(prefix: ArrayBuffer[(Any, AtomicType)])
: Seq[MDCriticalPointRange[_]] = {
(nextDimCriticalPointRanges, invalid) match {
case (Nil, true) => Nil
case (Nil, false) => Seq(new MDCriticalPointRange(prefix.toSeq, this, dt))
case _ => {
prefix += ((start.get, dt))
require(isPoint, "Internal Logical Error: point range expected")
nextDimCriticalPointRanges.map(_.flatten(prefix.clone())).reduceLeft(_ ++ _)
}
}
}
override def toString = {
val result = new mutable.StringBuilder()
if (startInclusive) result.append("[") else result.append("(")
result.append(s"$start, $end")
if (endInclusive) result.append("]") else result.append(")")
result.append(s" ${dt.typeName} $pred")
result.toString()
}
}
/**
* Multidimensional critical point range. It uses native data types of the dimensions for comparison
*
* @param prefix prefix values and their native types of the leading dimensions;
* Nil for the first dimension ranges
* @param lastRange the range of the last dimension (not necessarily the last dimension of the
* table but just in this invocation!)
* @param dt the data type of the range of the last dimension
* @tparam T the type parameter of the range of the last dimension
*/
private[hbase] case class MDCriticalPointRange[T](prefix: Seq[(Any, AtomicType)],
lastRange: CriticalPointRange[T],
dt: AtomicType) {
/**
* Compare this range's start/end with the partition's end/start
* @param startOrEnd TRUE if compare this start with the partition's end;
* FALSE if compare this end with the partition's start
* @param part the HBase partition to compare with
* @return -1 if this critical point range's start is smaller than the partition's end
* 1 if this critical point range's start is bigger than the partition's end
*/
private[hbase] def compareWithPartition(startOrEnd: Boolean, part: HBasePartition): Int = {
val (comparePoint, comparePointInclusive, comparePPoint, comparePPointInclusive) =
if (startOrEnd) {
(lastRange.start, part.end) match {
case (None, None) => (null, false, null, false)
case (None, Some(_)) => (null, false, part.endNative, part.endInclusive)
case (Some(start), None) => (start, lastRange.startInclusive, null, false)
case (Some(start), Some(_)) => (start, lastRange.startInclusive,
part.endNative, part.endInclusive)
}
} else {
(lastRange.end, part.start) match {
case (None, None) => (null, false, null, false)
case (None, Some(_)) => (null, false, part.startNative, part.startInclusive)
case (Some(end), None) => (end, lastRange.endInclusive, null, false)
case (Some(end), Some(_)) => (end, lastRange.endInclusive,
part.startNative, part.startInclusive)
}
}
(prefix, comparePoint, comparePPoint) match {
case (_, _, null) => if (startOrEnd) -1 else 1
case (Nil, null, _) => if (startOrEnd) -1 else 1
case _ =>
val zippedPairs = prefix.zip(comparePPoint)
var i = 0
for (zippedPair <- zippedPairs
if zippedPair._1._2.ordering.equiv(
zippedPair._1._1.asInstanceOf[zippedPair._1._2.InternalType],
zippedPair._2.asInstanceOf[zippedPair._1._2.InternalType])) {
i = i + 1
}
if (i < zippedPairs.size) {
val ((prefixPoint, dt), pPoint) = zippedPairs(i)
if (dt.ordering.gt(prefixPoint.asInstanceOf[dt.InternalType],
pPoint.asInstanceOf[dt.InternalType])) {
1
}
else {
-1
}
} else {
(comparePoint, comparePPoint(i)) match {
case (null, _) => if (startOrEnd) {
-1
} else {
1
}
case (_, pend) =>
if (dt.ordering.gt(comparePoint.asInstanceOf[dt.InternalType],
pend.asInstanceOf[dt.InternalType])) {
1
}
else if (dt.ordering.lt(comparePoint.asInstanceOf[dt.InternalType],
pend.asInstanceOf[dt.InternalType])) {
-1
} else {
if (comparePointInclusive && comparePPointInclusive) {
0
} else if ((comparePointInclusive && prefix.size + 1 < comparePPoint.size) ||
(comparePPointInclusive && prefix.size + 1 < comparePPoint.size)) {
// if the inclusive side has smaller dimensionality, there is overlap
0
} else if (startOrEnd) {
1
}
else {
-1
}
}
}
}
}
}
override def toString = {
val result = new mutable.StringBuilder()
for (item <- prefix) {
result.append(s"(${item._1} ${item._2}}) ")
}
result.append(s"${lastRange.toString()} ${dt.typeName}}")
result.toString()
}
}
/**
* find the critical points in the given expression: not really a transformer
* Must be called before reference binding
*/
object RangeCriticalPoint {
/**
* collect all critical points from an expression on a specific dimension key
* @param expression the expression from where the critical points will be identified
* @param key the dimension key for which the critical points will be identified
* @tparam T type parameter of the critical points
* @return
*/
private[hbase] def collect[T](expression: Expression, key: AttributeReference)
: Seq[CriticalPoint[T]] = {
if (key.references.subsetOf(expression.references)) {
val pointSet = mutable.Set[CriticalPoint[T]]()
val dt: AtomicType = key.dataType.asInstanceOf[AtomicType]
def checkAndAdd(value: Any, ct: CriticalPointType.CriticalPointType): Unit = {
val cp = CriticalPoint[T](value.asInstanceOf[T], ct, dt)
if (!pointSet.add(cp)) {
val oldCp = pointSet.find(_.value == value).get
if (oldCp.ctype != ct && oldCp.ctype != CriticalPointType.bothInclusive) {
pointSet.remove(cp)
if (ct == CriticalPointType.bothInclusive) {
pointSet.add(cp)
} else {
pointSet.add(CriticalPoint[T](value.asInstanceOf[T],
CriticalPointType.bothInclusive, dt))
}
}
}
}
expression transform {
case a@In(AttributeReference(_, _, _, _), list) =>
if (a.value.equals(key)) {
list.filter(_.isInstanceOf[Literal]).foreach(v =>
checkAndAdd(v.asInstanceOf[Literal].value, CriticalPointType.bothInclusive))
}
a
case a@InSet(AttributeReference(_, _, _, _), list) =>
if (a.value.equals(key)) {
list.foreach(v => checkAndAdd(v, CriticalPointType.bothInclusive))
}
a
case a@EqualTo(AttributeReference(_, _, _, _), Literal(value, _)) =>
if (a.left.equals(key)) checkAndAdd(value, CriticalPointType.bothInclusive)
a
case a@EqualTo(Literal(value, _), AttributeReference(_, _, _, _)) =>
if (a.right.equals(key)) checkAndAdd(value, CriticalPointType.bothInclusive)
a
case a@LessThan(AttributeReference(_, _, _, _), Literal(value, _)) =>
if (a.left.equals(key)) checkAndAdd(value, CriticalPointType.upInclusive)
a
case a@LessThan(Literal(value, _), AttributeReference(_, _, _, _)) =>
if (a.right.equals(key)) checkAndAdd(value, CriticalPointType.lowInclusive)
a
case a@LessThanOrEqual(AttributeReference(_, _, _, _), Literal(value, _)) =>
if (a.left.equals(key)) checkAndAdd(value, CriticalPointType.lowInclusive)
a
case a@LessThanOrEqual(Literal(value, _), AttributeReference(_, _, _, _)) =>
if (a.right.equals(key)) checkAndAdd(value, CriticalPointType.upInclusive)
a
case a@GreaterThanOrEqual(AttributeReference(_, _, _, _), Literal(value, _)) =>
if (a.left.equals(key)) checkAndAdd(value, CriticalPointType.upInclusive)
a
case a@GreaterThanOrEqual(Literal(value, _), AttributeReference(_, _, _, _)) =>
if (a.right.equals(key)) checkAndAdd(value, CriticalPointType.lowInclusive)
a
case a@GreaterThan(AttributeReference(_, _, _, _), Literal(value, _)) =>
if (a.left.equals(key)) checkAndAdd(value, CriticalPointType.lowInclusive)
a
case a@GreaterThan(Literal(value, _), AttributeReference(_, _, _, _)) =>
if (a.right.equals(key)) checkAndAdd(value, CriticalPointType.upInclusive)
a
}
pointSet.toSeq.sortWith((a: CriticalPoint[T], b: CriticalPoint[T])
=> dt.ordering.lt(a.value.asInstanceOf[dt.InternalType], b.value.asInstanceOf[dt.InternalType]))
} else Nil
}
/**
* create partition ranges on a *sorted* list of critical points
* @param cps a sorted list of critical points
* @param dimIndex the dimension index for this set of critical points
* @param dt the runtime data type of this set of critical points
* @tparam T the type parameter of this set of critical points
* @return a list of generated critical point ranges
*/
private[hbase] def generateCriticalPointRange[T](cps: Seq[CriticalPoint[T]],
dimIndex: Int, dt: AtomicType)
: Seq[CriticalPointRange[T]] = {
if (cps.isEmpty) Nil
else {
val discreteType = dt.isInstanceOf[IntegralType]
val result = new ArrayBuffer[CriticalPointRange[T]](cps.size + 1)
var prev: CriticalPoint[T] = null
cps.foreach(cp => {
if (prev == null) {
cp.ctype match {
case CriticalPointType.lowInclusive =>
result += new CriticalPointRange[T](None, false, Some(cp.value), true, cp.dt, null)
case CriticalPointType.upInclusive =>
result += new CriticalPointRange[T](None, false, Some(cp.value), false, cp.dt, null)
case CriticalPointType.bothInclusive =>
result +=(new CriticalPointRange[T](None, false, Some(cp.value), false, cp.dt, null),
new CriticalPointRange[T](Some(cp.value), true, Some(cp.value), true, cp.dt, null))
}
} else {
(prev.ctype, cp.ctype) match {
case (CriticalPointType.lowInclusive, CriticalPointType.lowInclusive) =>
result += new CriticalPointRange[T](Some(prev.value), false, Some(cp.value), true,
cp.dt, null)
case (CriticalPointType.lowInclusive, CriticalPointType.upInclusive) =>
result += new CriticalPointRange[T](Some(prev.value), false, Some(cp.value), false,
cp.dt, null)
case (CriticalPointType.lowInclusive, CriticalPointType.bothInclusive) =>
result +=(new CriticalPointRange[T](Some(prev.value), false, Some(cp.value), false,
cp.dt, null),
new CriticalPointRange[T](Some(cp.value), true, Some(cp.value), true,
cp.dt, null))
case (CriticalPointType.upInclusive, CriticalPointType.lowInclusive) =>
result += new CriticalPointRange[T](Some(prev.value), true, Some(cp.value), true,
cp.dt, null)
case (CriticalPointType.upInclusive, CriticalPointType.upInclusive) =>
result += new CriticalPointRange[T](Some(prev.value), true, Some(cp.value), false,
cp.dt, null)
case (CriticalPointType.upInclusive, CriticalPointType.bothInclusive) =>
result +=(new CriticalPointRange[T](Some(prev.value), true, Some(cp.value), false,
cp.dt, null),
new CriticalPointRange[T](Some(cp.value), true, Some(cp.value), true,
cp.dt, null))
case (CriticalPointType.bothInclusive, CriticalPointType.lowInclusive) =>
result += new CriticalPointRange[T](Some(prev.value), false, Some(cp.value), true,
cp.dt, null)
case (CriticalPointType.bothInclusive, CriticalPointType.upInclusive) =>
result += new CriticalPointRange[T](Some(prev.value), false, Some(cp.value), false,
cp.dt, null)
case (CriticalPointType.bothInclusive, CriticalPointType.bothInclusive) =>
result +=(new CriticalPointRange[T](Some(prev.value), false, Some(cp.value), false,
cp.dt, null),
new CriticalPointRange[T](Some(cp.value), true, Some(cp.value), true,
cp.dt, null))
}
}
prev = cp
})
if (prev != null) {
result += {
prev.ctype match {
case CriticalPointType.lowInclusive =>
new CriticalPointRange[T](Some(prev.value), false,
None, false, prev.dt, null)
case CriticalPointType.upInclusive =>
new CriticalPointRange[T](Some(prev.value), true,
None, false, prev.dt, null)
case CriticalPointType.bothInclusive =>
new CriticalPointRange[T](Some(prev.value), false,
None, false, prev.dt, null)
}
}
}
// remove any redundant ranges for integral type
if (discreteType) {
result.map(r => {
var gotNew = false
val numeric = dt.asInstanceOf[IntegralType]
.numeric.asInstanceOf[Integral[T]]
val (start, startInclusive) = {
if (r.start.isDefined && !r.startInclusive) {
gotNew = true
(Some(numeric.plus(r.start.get, numeric.one)), true)
} else (r.start, r.startInclusive)
}
val (end, endInclusive) = {
if (r.end.isDefined && !r.endInclusive) {
gotNew = true
(Some(numeric.minus(r.end.get, numeric.one)), true)
} else (r.end, r.endInclusive)
}
if (gotNew) {
if (start.isDefined
&& end.isDefined
&& (start.get == numeric.plus(end.get, numeric.one))) {
null
} else new CriticalPointRange[T](start, startInclusive, end, endInclusive, r.dt, null)
} else r
}
).filter(r => r != null)
} else result
}
}
/**
* Step 1: generate critical point ranges for a particular dimension
* @param relation the HBase relation
* @param pred the predicate expression to work on
* @return whether no valid CPRs, plus a list of critical point ranges
*/
private[hbase] def generateCriticalPointRanges(relation: HBaseRelation, pred: Option[Expression])
: (Boolean, Seq[CriticalPointRange[_]]) = {
if (pred.isEmpty) (false, Nil)
else {
val predExpr = pred.get
val predRefs = predExpr.references.toSeq
val row = new GenericMutableRow(predRefs.size)
// Step 1
generateCriticalPointRangesHelper(relation, predExpr, 0, row, predRefs)
}
}
/**
* The workhorse method to generate critical points
* @param relation the hbase relation
* @param predExpr the predicate to work on
* @param dimIndex the dimension index
* @param row a row for partial reduction
* @param predRefs the references in the predicate expression
* @return whether this CPR has all chidren invalid, plus a list of critical point ranges
*/
private[hbase] def generateCriticalPointRangesHelper(relation: HBaseRelation,
predExpr: Expression,
dimIndex: Int,
row: MutableRow,
predRefs: Seq[Attribute])
: (Boolean, Seq[CriticalPointRange[_]]) = {
val keyDim = relation.partitionKeys(dimIndex)
val boundPred = BindReferences.bindReference(predExpr, predRefs)
val dt: AtomicType = keyDim.dataType.asInstanceOf[AtomicType]
// Step 1.1
val criticalPoints: Seq[CriticalPoint[dt.InternalType]]
= collect(predExpr, relation.partitionKeys(dimIndex))
if (criticalPoints.isEmpty) (false, Nil)
else {
val cpRanges: Seq[CriticalPointRange[dt.InternalType]]
= generateCriticalPointRange[dt.InternalType](criticalPoints, dimIndex, dt)
// Step 1.2
val keyIndex = predRefs.indexWhere(_.exprId == relation.partitionKeys(dimIndex).exprId)
val qualifiedCPRanges = cpRanges.filter(cpr => {
row.update(keyIndex, cpr)
val prRes = boundPred.partialReduce(row, predRefs)
if (prRes._1 == null) cpr.pred = prRes._2
prRes._1 == null || prRes._1.asInstanceOf[Boolean]
})
if (!cpRanges.isEmpty && qualifiedCPRanges.isEmpty) {
// all children are disqualified
(true, Nil)
}
// Step 1.3
if (dimIndex < relation.partitionKeys.size - 1) {
// For point range, generate CPs for the next dim
qualifiedCPRanges.foreach(cpr => {
if (cpr.isPoint && cpr.pred != null) {
val (invalid, nextDimCPR) =
generateCriticalPointRangesHelper(relation, cpr.pred, dimIndex + 1, row, predRefs)
cpr.invalid = invalid
cpr.nextDimCriticalPointRanges = nextDimCPR
}
})
// If all child CPRs are invalid, this CPR is invalid.
if (!qualifiedCPRanges.exists(!_.invalid)) return (true, Nil)
}
// Update row(keyIndex) to null for future reuse
row.update(keyIndex, null)
(false, qualifiedCPRanges)
}
}
// Step 3
/**
* Search for a tight, either upper or lower, equality bound
* @param eq the equality point to start search with
* @param limit the limit for the search, exclusive
* @param src the source to search for a match
* @param tgt the list to search on
* @param threshold linear search threshold
* @param comp the comparison function
* @tparam S the source type
* @tparam T the type of the target elements
* @return the index of the target element
*/
private def binarySearchEquality[S, T](eq: Int, limit: Int, src: S, tgt: Seq[T], threshold: Int,
comp: (S, T) => Int): Int = {
val incr = if (eq > limit) -1 else 1 // search direction
var mid = limit
var newLimit = limit
var cmp = 0
var prevEq = eq
while (incr * (newLimit - prevEq) >= 0) {
if (incr * (newLimit - prevEq) < threshold) {
// linear search
mid = prevEq + incr
while (incr * (newLimit - mid) > 0 && cmp == 0) {
prevEq = mid
mid = mid + incr
cmp = comp(src, tgt(mid))
}
} else {
mid = (prevEq + newLimit) / 2
cmp = comp(src, tgt(mid))
if (cmp == 0) prevEq = mid
else newLimit = mid
}
}
prevEq
}
/**
*
* @param src the source to base search for
* @param tgt the list to be searched on
* @param startIndex the index of the target to start search on
* @param upperBound TRUE for tight upper bound; FALSE for tight lower bound
* @param comp a comparison function
* @tparam S the type of the source
* @tparam T the type of the target elements
* @return the index of the result
*/
private def binarySearchForTightBound[S, T](src: S, tgt: Seq[T],
startIndex: Int,
upperBound: Boolean,
comp: (S, T) => Int,
threshold: Int = 10): Int = {
var left = startIndex
var right = tgt.size - 1
var prevLarger = -1
var prevSmaller = -1
var mid = -1
var cmp: Int = 0
while (right >= left) {
if (right - left < threshold) {
// linear search
cmp = 0
if (upperBound) {
// tight upper bound
var i = right + 1
while (i > left && cmp <= 0) {
i = i - 1
cmp = comp(src, tgt(i))
}
prevLarger = if (i == left && cmp <= 0) i
else i + 1
} else {
// tight lower bound
var i = left - 1
while (i < right && cmp >= 0) {
i = i + 1
cmp = comp(src, tgt(i))
}
prevSmaller = if (i == right && cmp >= 0) i
else i - 1
}
right = left - 1 // break the outer while loop
} else {
// binary search
mid = left + (right - left) / 2
cmp = comp(src, tgt(mid))
if (cmp == 0) {
if (upperBound) {
prevLarger = binarySearchEquality(mid, prevSmaller, src, tgt, threshold, comp)
} else {
prevSmaller = binarySearchEquality(mid, prevLarger, src, tgt, threshold, comp)
}
right = left // break the outer loop
} else if (cmp < 0) {
prevLarger = mid
right = mid - 1
} else {
prevSmaller = mid
left = mid + 1
}
}
}
if (upperBound) {
prevLarger
}
else {
prevSmaller
}
}
/**
* find partitions covered by a critical point range
* @param cpr: the critical point range
* @param partitions the partitions to be qualified
* @param pStartIndex the index of the partition to start the qualification process with
* @return the start and end index of the qualified partitions, inclusive on both boundaries
*/
private[hbase] def getQualifiedPartitions[T](cpr: MDCriticalPointRange[T],
partitions: Seq[HBasePartition],
pStartIndex: Int,
threshold: Int = 10): (Int, Int) = {
val largestStart = binarySearchForTightBound[MDCriticalPointRange[T], HBasePartition](
cpr, partitions, pStartIndex, upperBound = false,
(mdpr: MDCriticalPointRange[T], p: HBasePartition) =>
mdpr.compareWithPartition(startOrEnd = false, p), threshold)
val smallestEnd = binarySearchForTightBound[MDCriticalPointRange[T], HBasePartition](
cpr, partitions, pStartIndex, upperBound = true,
(mdpr: MDCriticalPointRange[T], p: HBasePartition) =>
mdpr.compareWithPartition(startOrEnd = true, p), threshold)
if (largestStart == -1 || smallestEnd == -1 || smallestEnd > largestStart) {
null // no overlapping
}
else {
(smallestEnd, largestStart)
}
}
/**
* Find critical point ranges covered by a partition
* @param partition: the partition
* @param crps the critical point ranges to be qualified
* @param startIndex the index of the crp to start the qualification process with
* @return the start and end index of the qualified crps, inclusive on both boundaries
*/
private[hbase] def getQualifiedCRRanges(partition: HBasePartition,
crps: Seq[MDCriticalPointRange[_]],
startIndex: Int,
threshold: Int = 10): Int = {
val largestStart = binarySearchForTightBound[HBasePartition, MDCriticalPointRange[_]](
partition, crps, startIndex, upperBound = false,
(p: HBasePartition, mdpr: MDCriticalPointRange[_]) =>
-mdpr.compareWithPartition(startOrEnd = true, p), threshold)
// val smallestEnd = binarySearchForTightBound[HBasePartition, MDCriticalPointRange[_]](
// partition, crps, startIndex, upperBound = true,
// (p: HBasePartition, mdpr: MDCriticalPointRange[_]) =>
// -mdpr.compareWithPartition(startOrEnd = false, p), threshold)
largestStart
}
private[hbase] def prunePartitions(cprs: Seq[MDCriticalPointRange[_]],
pred: Option[Expression],
partitions: Seq[HBasePartition],
dimSize: Int,
threshold: Int = 10): Seq[HBasePartition] = {
// no need to prune as hbase partitions size is 1. Generally for single hbase partition there
// will not be any lowerBound and upperBound key.
if (cprs.isEmpty || partitions.length == 1) {
partitions.map(p => new HBasePartition(p.idx, p.mappedIndex, p.start, p.end, p.server, pred))
} else {
var cprStartIndex = 0
var pStartIndex = 0
var pIndex = 0
var done = false
var result = Seq[HBasePartition]()
while (cprStartIndex < cprs.size && pStartIndex < partitions.size && !done) {
val cpr = cprs(cprStartIndex)
val qualifiedPartitionIndexes =
getQualifiedPartitions(cpr, partitions, pStartIndex)
if (qualifiedPartitionIndexes != null) {
val (pstart, pend) = qualifiedPartitionIndexes
var p = partitions(pstart)
for (i <- pstart to pend) {
p = partitions(i)
result = result :+ new HBasePartition(pIndex, p.idx,
p.start, p.end, p.server, pred)
pIndex += 1
}
pStartIndex = pend + 1
// Step 3.2
// skip any critical point ranges that possibly are covered by
// the last of just-qualified partitions
val qualifiedCPRIndexes = getQualifiedCRRanges(
partitions(pend), cprs, cprStartIndex, threshold)
if (qualifiedCPRIndexes == -1) done = true
else cprStartIndex = if (qualifiedCPRIndexes == cprStartIndex) {
qualifiedCPRIndexes + 1
} else qualifiedCPRIndexes
} else {
done = true
}
}
result
}
}
/*
* Given a HBase relation, generate a sequence of pruned partitions and their
* associated filter predicates that are further subject to slave-side refinement
* The algorithm goes like this:
* 1. For each dimension key (starting from the primary key dimension)
* 1.1 collect the critical points and their sorted ranges
* 1.2 For each range, partial reduce to qualify and generate associated filter predicates
* 1.3 For each "point range", repeat Step 1 for the next key dimension
* 2. For each critical point based range,
* potentially expand the original top-level critical point ranges into multidimensional
* critical point ranges incorporating
* lower level nested critical point ranges for next key dimension(s)
* 3. For each converted critical point based range, map them to partitions to partitions
* 3.1 start the binary search from the last mapped partition
* 3.2 For last mapped partition, find the the last critical point range covered
* by this last mapped partition and use that as the next start point of the critical
* point range to find next set of mapped partitions. This ping-pong manner
* of searching will continue until either list is exhausted.
*/
private[hbase] def generatePrunedPartitions(relation: HBaseRelation, pred: Option[Expression])
: Seq[HBasePartition] = {
if (pred.isEmpty) relation.partitions
else {
// Step 1
val (invalid, cprs): (Boolean, Seq[CriticalPointRange[_]]) = generateCriticalPointRanges(relation, pred)
if (invalid) {
Nil
} else {
// Step 2
val expandedCPRs: Seq[MDCriticalPointRange[_]] =
cprs.flatMap(_.flatten(new ArrayBuffer[(Any, AtomicType)](relation.dimSize)))
// Step 3
prunePartitions(expandedCPRs, pred, relation.partitions, relation.partitionKeys.size)
}
}
}
}
|
nkhuyu/Spark-SQL-on-HBase
|
src/main/scala/org/apache/spark/sql/hbase/HBaseCriticalPoint.scala
|
Scala
|
apache-2.0
| 31,135
|
package org.jetbrains
import _root_.java.io._
import _root_.java.lang.{Boolean => JavaBoolean}
import _root_.java.security.MessageDigest
import com.intellij.ide.plugins.PluginManager
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.extensions.PluginId
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil
import com.intellij.openapi.util.io.FileUtil
import com.intellij.openapi.util.{Computable, Pair => IdeaPair}
import com.intellij.openapi.vfs.{VfsUtil, VirtualFile}
import com.intellij.util.{PathUtil, Function => IdeaFunction}
import plugins.scala.extensions._
import scala.annotation.tailrec
import scala.language.implicitConversions
import scala.reflect.ClassTag
/**
* @author Pavel Fatin
*/
package object sbt {
implicit def toIdeaFunction1[A, B](f: A => B): IdeaFunction[A, B] = new IdeaFunction[A, B] {
def fun(a: A): B = f(a)
}
implicit def toIdeaPredicate[A](f: A => Boolean): IdeaFunction[A, JavaBoolean] = new IdeaFunction[A, JavaBoolean] {
def fun(a: A): JavaBoolean = JavaBoolean.valueOf(f(a))
}
implicit def toIdeaFunction2[A, B, C](f: (A, B) => C): IdeaFunction[IdeaPair[A, B], C] = new IdeaFunction[IdeaPair[A, B], C] {
def fun(pair: IdeaPair[A, B]): C = f(pair.getFirst, pair.getSecond)
}
implicit class RichFile(val file: File) {
def /(path: String): File = new File(file, path)
def `<<`: File = << (1)
def `<<`(level: Int): File = RichFile.parent(file, level)
def name: String = file.getName
def path: String = file.getPath
def absolutePath: String = file.getAbsolutePath
def canonicalPath: String = ExternalSystemApiUtil.toCanonicalPath(file.getAbsolutePath)
def canonicalFile: File = new File(canonicalPath)
def parent: Option[File] = Option(file.getParentFile)
def endsWith(parts: String*): Boolean = endsWith0(file, parts.reverse)
private def endsWith0(file: File, parts: Seq[String]): Boolean = if (parts.isEmpty) true else
parts.head == file.getName && Option(file.getParentFile).exists(endsWith0(_, parts.tail))
def url: String = VfsUtil.getUrlForLibraryRoot(file)
def isAncestorOf(aFile: File): Boolean = FileUtil.isAncestor(file, aFile, true)
def isUnder(root: File): Boolean = FileUtil.isAncestor(root, file, true)
def isOutsideOf(root: File): Boolean = !FileUtil.isAncestor(root, file, false)
def write(lines: String*) {
writeLinesTo(file, lines: _*)
}
def copyTo(destination: File) {
copy(file, destination)
}
}
private object RichFile {
@tailrec
def parent(file: File, level: Int): File =
if (level > 0) parent(file.getParentFile, level - 1) else file
}
implicit class RichVirtualFile(val entry: VirtualFile) extends AnyVal {
def containsDirectory(name: String): Boolean = find(name).exists(_.isDirectory)
def containsFile(name: String): Boolean = find(name).exists(_.isFile)
def find(name: String): Option[VirtualFile] = Option(entry.findChild(name))
def isFile: Boolean = !entry.isDirectory
}
implicit class RichString(val str: String) extends AnyVal {
def toFile: File = new File(str)
def shaDigest: String = {
val digest = MessageDigest.getInstance("SHA1").digest(str.getBytes)
digest.map("%02x".format(_)).mkString
}
}
implicit class RichBoolean(val b: Boolean) extends AnyVal {
def option[A](a: => A): Option[A] = if(b) Some(a) else None
def either[A, B](right: => B)(left: => A): Either[A, B] = if (b) Right(right) else Left(left)
def seq[A](a: A*): Seq[A] = if (b) Seq(a: _*) else Seq.empty
}
implicit class RichSeq[T](val xs: Seq[T]) extends AnyVal {
def distinctBy[A](f: T => A): Seq[T] = {
val (_, ys) = xs.foldLeft((Set.empty[A], Vector.empty[T])) {
case ((set, acc), x) =>
val v = f(x)
if (set.contains(v)) (set, acc) else (set + v, acc :+ x)
}
ys
}
}
implicit class RichOption[T](val opt: Option[T]) extends AnyVal {
// Use for safely checking for null in chained calls
@inline def safeMap[A](f: T => A): Option[A] = if (opt.isEmpty) None else Option(f(opt.get))
}
def jarWith[T : ClassTag]: File = {
val tClass = implicitly[ClassTag[T]].runtimeClass
Option(PathUtil.getJarPathForClass(tClass)).map(new File(_)).getOrElse {
throw new RuntimeException("Jar file not found for class " + tClass.getName)
}
}
def using[A <: Closeable, B](resource: A)(block: A => B): B = {
try {
block(resource)
} finally {
resource.close()
}
}
def writeLinesTo(file: File, lines: String*) {
using(new PrintWriter(new FileWriter(file))) { writer =>
lines.foreach(writer.println)
writer.flush()
}
}
def copy(source: File, destination: File) {
using(new BufferedInputStream(new FileInputStream(source))) { in =>
using(new BufferedOutputStream(new FileOutputStream(destination))) { out =>
var eof = false
while (!eof) {
val b = in.read()
if (b == -1) eof = true else out.write(b)
}
out.flush()
}
}
}
def usingTempFile[T](prefix: String, suffix: Option[String] = None)(block: File => T): T = {
val file = FileUtil.createTempFile(prefix, suffix.orNull, true)
try {
block(file)
} finally {
file.delete()
}
}
private val NameWithExtension = """(.+)(\\..+?)""".r
private def parse(fileName: String): (String, String) = fileName match {
case NameWithExtension(name, extension) => (name, extension)
case name => (name, "")
}
def inWriteAction[T](body: => T): T = {
ApplicationManager.getApplication.runWriteAction(new Computable[T] {
def compute: T = body
})
}
def isIdeaPluginEnabled(id: String): Boolean =
PluginId.findId(id).toOption
.flatMap(PluginManager.getPlugin(_).toOption)
.exists(_.isEnabled)
}
|
triplequote/intellij-scala
|
scala/scala-impl/src/org/jetbrains/sbt/package.scala
|
Scala
|
apache-2.0
| 5,938
|
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.marklogic.qscript
import slamdata.Predef._
import quasar.contrib.matryoshka._
import quasar.contrib.pathy._
import quasar.fp.free._
import quasar.fp.ski.κ
import quasar.qscript._
import quasar.qscript.{MapFuncsCore => MFCore, MFC => _, _}
import quasar.{RenderTree, NonTerminal, Terminal}
import matryoshka.{Hole => _, _}
import matryoshka.data._
import matryoshka.patterns._
import matryoshka.implicits._
import pathy._, Path._
import scalaz._, Scalaz._
/* TODO switch from ADir to AFile
* @tparam A recursive position */
final case class ProjectPath[A](src: A, path: ADir)
object PathProject {
def unapply[T[_[_]], A](pr: PathMapFunc[T, A]): Option[ProjectPath[A]] =
Inject[ProjectPath, PathMapFunc[T, ?]].prj(pr)
}
object MFPath {
def unapply[T[_[_]], A](pr: PathMapFunc[T, A]): Option[MapFuncCore[T, A]] =
Inject[MapFuncCore[T, ?], PathMapFunc[T, ?]].prj(pr)
}
object ProjectPath extends ProjectPathInstances {
def elideGuards[T[_[_]]: RecursiveT](fpm: FreePathMap[T]): FreePathMap[T] = {
val alg: CoPathMapFunc[T, FreePathMap[T]] => CoPathMapFunc[T, FreePathMap[T]] = totally {
case CoEnv(\\/-(MFPath(MFCore.Guard(Embed(CoEnv(\\/-(PathProject(_)))), _, cont, _)))) =>
CoEnv(cont.resume.swap)
}
fpm.transCata[FreePathMap[T]](alg)
}
def foldProjectField[T[_[_]]: RecursiveT](fm: FreeMap[T]): FreePathMap[T] = {
val alg: AlgebraicGTransform[(FreeMap[T], ?), FreePathMap[T], CoMapFunc[T, ?], CoPathMapFunc[T, ?]] = {
case CoEnv(\\/-(MFC(MFCore.ProjectField((_, Embed(CoEnv(\\/-(PathProject(path))))), (MFCore.StrLit(field), _))))) => {
val dir0 = path.path </> dir(field)
val pp = ProjectPath(path.src, dir0)
CoEnv(Inject[ProjectPath, PathMapFunc[T, ?]].inj(pp).right)
}
case CoEnv(\\/-(MFC(MFCore.ProjectField((Embed(CoEnv(src)), _), (MFCore.StrLit(field), _))))) => {
val dir0 = rootDir[Sandboxed] </> dir(field)
val desc = src.fold(κ(Free.point[PathMapFunc[T, ?], Hole](SrcHole)),
Free.roll(_).mapSuspension(injectNT[MapFunc[T, ?], PathMapFunc[T, ?]]))
val pp = ProjectPath(desc, dir0)
CoEnv(Inject[ProjectPath, PathMapFunc[T, ?]].inj(pp).right)
}
case CoEnv(\\/-(other)) =>
CoEnv(Inject[MapFunc[T, ?], PathMapFunc[T, ?]].inj(other.map(_._2)).right)
case CoEnv(-\\/(h)) => CoEnv(h.left)
}
fm.transPara[FreePathMap[T]](alg)
}
}
sealed abstract class ProjectPathInstances {
implicit def functor: Functor[ProjectPath] =
new Functor[ProjectPath] {
def map[A, B](fa: ProjectPath[A])(f: A => B) = ProjectPath(f(fa.src), fa.path)
}
implicit def show[A]: Delay[Show, ProjectPath] = new Delay[Show, ProjectPath] {
def apply[A](sh: Show[A]): Show[ProjectPath[A]] = Show.show { pp =>
Cord.fromStrings(List("ProjectPath(", sh.shows(pp.src), ")"))
}
}
implicit def equal[A]: Delay[Equal, ProjectPath] = new Delay[Equal, ProjectPath] {
def apply[A](eq: Equal[A]) = Equal.equalBy(_.path)
}
implicit def renderTree[A]: Delay[RenderTree, ProjectPath] =
Delay.fromNT(λ[RenderTree ~> (RenderTree ∘ ProjectPath)#λ](rt =>
RenderTree.make(pp =>
NonTerminal(List("ProjectPath"), none,
List(rt.render(pp.src), Terminal(List("Path"), prettyPrint(pp.path).some))))))
}
|
drostron/quasar
|
marklogic/src/main/scala/quasar/physical/marklogic/qscript/ProjectPath.scala
|
Scala
|
apache-2.0
| 3,938
|
/*
* @author Philip Stutz
*
* Copyright 2012 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.nodeprovisioning.local
import akka.actor.ActorRef
import com.signalcollect.configuration.GraphConfiguration
import com.signalcollect.nodeprovisioning.NodeProvisioner
import com.signalcollect.nodeprovisioning.Node
import akka.actor.ActorSystem
import com.typesafe.config.ConfigFactory
import com.signalcollect.configuration.AkkaConfig
class LocalNodeProvisioner extends NodeProvisioner {
def getNodes: List[Node] = {
List(new LocalNode())
}
}
|
Tjoene/thesis
|
Case_Programs/signal-collect/src/main/scala/com/signalcollect/nodeprovisioning/local/LocalNodeProvisioner.scala
|
Scala
|
gpl-2.0
| 1,149
|
package scala.collection.mutable
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
import scala.util.Random
@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
@Threads(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
class RedBlackTreeBenchmark {
@Param(Array("0", "1", "10", "100", "1000", "10000"))
var size: Int = _
var nums: Range = _
val rnd = new Random(0)
var set1: TreeSet[Int] = _
var perm: Array[Int] = _ // repeatably pseudo-random permutation
//var map1: TreeMap[Int, Int] = _
@Setup(Level.Trial) def init: Unit = {
nums = 1 to size
set1 = TreeSet.from(nums)
perm = new Array[Int](size)
val rem = scala.collection.mutable.ArrayBuffer.from(nums)
perm = Array.fill(size)(rem.remove(rnd.nextInt(rem.size)))
assert(rem.size == 0)
assert(perm.sum == nums.sum)
//map1 = TreeMap.from(nums.map(i => (i, i)))
}
@Benchmark
def build(bh: Blackhole): Unit =
bh.consume(TreeSet.from(nums))
@Benchmark
def buildRandom(bh: Blackhole): Unit =
bh.consume(TreeSet.from(perm))
@Benchmark
def iterator(bh: Blackhole): Unit = {
val it = set1.iterator
var res = 0
while(it.hasNext)
res += it.next()
bh.consume(res)
}
@Benchmark
def foreach(bh: Blackhole): Unit = {
var i = 0
set1.foreach { x => i += x }
bh.consume(i)
}
@Benchmark
def copy(bh: Blackhole): Unit =
bh.consume(TreeSet.from(set1))
@Benchmark
def copyDrain(bh: Blackhole): Unit = {
var s = TreeSet.from(set1)
perm.foreach(i => s.remove(i))
bh.consume(s)
}
/*
@Benchmark
def transformNone(bh: Blackhole): Unit =
bh.consume(map1.transform((k, v) => v))
@Benchmark
def transformAll(bh: Blackhole): Unit =
bh.consume(map1.transform((k, v) => v+1))
@Benchmark
def transformHalf(bh: Blackhole): Unit =
bh.consume(map1.transform((k, v) => if(k % 2 == 0) v else v+1))
*/
}
|
lrytz/scala
|
test/benchmarks/src/main/scala/scala/collection/mutable/RedBlackTreeBenchmark.scala
|
Scala
|
apache-2.0
| 2,023
|
package io.transwarp.midas.constant.midas.params.outlier
object LOFParams {
val minPts = "minPts"
}
|
transwarpio/rapidminer
|
api-driver/src/main/scala/io/transwarp/midas/constant/midas/params/outlier/LOFParams.scala
|
Scala
|
gpl-3.0
| 103
|
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
object Test extends App {
reify {
object Persons {
/** A list of persons. To create a list, we use Predef.List
* which takes a variable number of arguments and constructs
* a list out of them.
*/
val persons = List(
new Person("Bob", 17),
new Person("John", 40),
new Person("Richard", 68)
)
/** A Person class. 'val' constructor parameters become
* public members of the class.
*/
class Person(val name: String, val age: Int)
/** Return an iterator over persons that are older than 20.
*/
def olderThan20(xs: Seq[Person]): Iterator[String] =
olderThan20(xs.iterator)
/** Return an iterator over persons older than 20, given
* an iterator over persons.
*/
def olderThan20(xs: Iterator[Person]): Iterator[String] = {
// The first expression is called a 'generator' and makes
// 'p' take values from 'xs'. The second expression is
// called a 'filter' and it is a boolean expression which
// selects only persons older than 20. There can be more than
// one generator and filter. The 'yield' expression is evaluated
// for each 'p' which satisfies the filters and used to assemble
// the resulting iterator
for (p <- xs if p.age > 20) yield p.name
}
}
/** Some functions over lists of numbers which demonstrate
* the use of for comprehensions.
*/
object Numeric {
/** Return the divisors of n. */
def divisors(n: Int): List[Int] =
for (i <- List.range(1, n+1) if n % i == 0) yield i
/** Is 'n' a prime number? */
def isPrime(n: Int) = divisors(n).length == 2
/** Return pairs of numbers whose sum is prime. */
def findNums(n: Int): Iterable[(Int, Int)] = {
// a for comprehension using two generators
for (i <- 1 until n;
j <- 1 until (i-1);
if isPrime(i + j)) yield (i, j)
}
/** Return the sum of the elements of 'xs'. */
def sum(xs: List[Double]): Double =
xs.foldLeft(0.0) { (x, y) => x + y }
/** Return the sum of pairwise product of the two lists. */
def scalProd(xs: List[Double], ys: List[Double]) =
sum(for((x, y) <- xs zip ys) yield x * y);
/** Remove duplicate elements in 'xs'. */
def removeDuplicates[A](xs: List[A]): List[A] =
if (xs.isEmpty)
xs
else
xs.head :: removeDuplicates(for (x <- xs.tail if x != xs.head) yield x)
}
// import all members of object 'persons' in the current scope
import Persons._
print("Persons over 20:")
olderThan20(persons) foreach { x => print(" " + x) }
println
import Numeric._
println("divisors(34) = " + divisors(34))
print("findNums(15) =")
findNums(15) foreach { x => print(" " + x) }
println
val xs = List(3.5, 5.0, 4.5)
println("average(" + xs + ") = " + sum(xs) / xs.length)
val ys = List(2.0, 1.0, 3.0)
println("scalProd(" + xs + ", " + ys +") = " + scalProd(xs, ys))
}.eval
}
|
felixmulder/scala
|
test/files/run/reify_fors_oldpatmat.scala
|
Scala
|
bsd-3-clause
| 3,204
|
package com.twitter.finagle.stats
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar
import com.twitter.finagle.{Codec, CodecFactory, Service}
import com.twitter.util.{Await, Future}
import org.jboss.netty.channel.{Channels, ChannelPipelineFactory}
import org.jboss.netty.handler.codec.frame.{Delimiters, DelimiterBasedFrameDecoder}
import org.jboss.netty.handler.codec.string.{StringEncoder, StringDecoder}
import com.twitter.finagle.builder.{ClientBuilder, ServerBuilder}
import java.net.{InetAddress, InetSocketAddress}
import java.nio.charset.StandardCharsets.UTF_8
import com.twitter.ostrich.stats.Stats
@RunWith(classOf[JUnitRunner])
class FinagleStatsTest extends FunSuite with MockitoSugar {
val dummyService = new Service[String, String] {
def apply(request: String) = Future.value("You said: " + request)
}
class StringCodec extends CodecFactory[String, String] {
def server = Function.const {
new Codec[String, String] {
def pipelineFactory = new ChannelPipelineFactory {
def getPipeline = {
val pipeline = Channels.pipeline()
pipeline.addLast("line", new DelimiterBasedFrameDecoder(100, Delimiters.lineDelimiter: _*))
pipeline.addLast("stringDecoder", new StringDecoder(UTF_8))
pipeline.addLast("stringEncoder", new StringEncoder(UTF_8))
pipeline
}
}
}
}
def client = Function.const {
new Codec[String, String] {
def pipelineFactory = new ChannelPipelineFactory {
def getPipeline = {
val pipeline = Channels.pipeline()
pipeline.addLast("stringEncode", new StringEncoder(UTF_8))
pipeline.addLast("stringDecode", new StringDecoder(UTF_8))
pipeline
}
}
}
}
}
val statsReceiver = new OstrichStatsReceiver
val codec = new StringCodec
val server = ServerBuilder()
.name("server")
.bindTo(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
.codec(codec)
.reportTo(statsReceiver)
.maxConcurrentRequests(5)
.build(dummyService)
val service = ClientBuilder()
.name("client")
.reportTo(statsReceiver)
.hosts(server.boundAddress.asInstanceOf[InetSocketAddress])
.codec(codec)
.hostConnectionLimit(10)
.build()
test("system should correctly count connections") {
/*TODO: is this ok? We are not registering connections gauge until connection
is needed.
Stats.getGauge("server/connections") must beSome(0.0)
Stats.getGauge("client/connections") must beSome(0.0)*/
Await.result(service("Hello\\n"))
assert(Stats.getGauge("server/connections") == Some(1.0))
assert(Stats.getGauge("client/connections") == Some(1.0))
}
test("system should show symmetric stats on client and server") {
def equalsGauge(name: String) =
assert(Stats.getCounter("server/" + name)() == Stats.getCounter("client/" + name)())
equalsGauge("requests")
equalsGauge("connects")
equalsGauge("success")
}
}
|
adriancole/finagle
|
finagle-ostrich4/src/test/scala/com/twitter/finagle/stats/FinagleStatsTest.scala
|
Scala
|
apache-2.0
| 3,122
|
package paperdoll.core.nondeterminism
import org.junit.Test
import Nondeterminism._
import scalaz.std.list._
import scalaz.syntax.monadPlus._
import org.fest.assertions.Assertions.assertThat
import scala.Vector
import paperdoll.core.effect.Effects
import scala.Predef.intWrapper
class NDetTest {
@Test def testIfte(): Unit = {
val gen = collapse((2 to 30).toList)
val eff = for {
n <- gen
x <- ifte(for {
d <- gen
if(d < n && n % d == 0)
} yield {}, {_: Unit => Zero[Int]}, n.point[Effects.One_[NDet_]#O])
} yield n
val _ = assertThat(runNDetVector(eff).run).isEqualTo(Vector(2, 3, 5, 7, 11, 13, 17, 19, 23, 29))
}
}
|
m50d/paperdoll
|
core/src/test/scala/paperdoll/core/nondeterminism/NDetTest.scala
|
Scala
|
apache-2.0
| 672
|
package lila.i18n
import play.api.libs.json._
import lila.common.PimpedJson._
import play.api.libs.ws.WS
import play.api.Play.current
import tube.translationTube
private[i18n] final class UpstreamFetch(upstreamUrl: Int => String) {
private type Fetched = Fu[List[Translation]]
def apply(from: Int): Fetched =
fetch(upstreamUrl(from)) map parse flatMap {
_.fold(e => fufail(e.toString), fuccess(_))
}
def apply(from: String): Fetched =
parseIntOption(from).fold(fufail("Bad from argument"): Fetched)(apply)
private def fetch(url: String): Fu[JsValue] =
WS.url(url).get() map (_.json)
private def parse(json: JsValue): JsResult[List[Translation]] =
Json.fromJson[List[Translation]](json)
}
|
Happy0/lila
|
modules/i18n/src/main/UpstreamFetch.scala
|
Scala
|
mit
| 732
|
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.db.xlog;
import java.io.File;
import java.io.IOException;
import java.util.Hashtable;
import java.util.Properties;
import scouter.server.Configure;
import scouter.server.db.io.RealDataFile;
import scouter.server.db.io.zip.GZipStore;
import scouter.util.FileUtil;
import scouter.util.IClose;
object XLogDataWriter {
val table = new Hashtable[String, XLogDataWriter]();
def open(date: String, file: String): XLogDataWriter = {
table.synchronized {
var writer = table.get(file);
if (writer != null) {
writer.reference += 1;
} else {
writer = new XLogDataWriter(date, file);
table.put(file, writer);
}
return writer;
}
}
}
class XLogDataWriter(date: String, file: String) extends IClose {
var reference = 0;
val conf = Configure.getInstance()
var gzip = conf.compress_xlog_enabled
var f = new File(file + ".service.conf");
if (f.exists()) {
val properties = FileUtil.readProperties(f);
gzip = "true".equalsIgnoreCase(properties.getProperty("compress_xlog_enabled", ""+conf.compress_xlog_enabled).trim());
} else {
gzip = conf.compress_xlog_enabled;
val properties = new Properties();
properties.put("compress_xlog_enabled", "" + conf.compress_xlog_enabled);
FileUtil.writeProperties(f, properties);
}
var out:RealDataFile = null
if(gzip==false){
out=new RealDataFile(file + ".service");
}
def write(bytes: Array[Byte]): Long = {
if (gzip) {
return GZipStore.getInstance().write(date, bytes);
}
this.synchronized {
val point = out.getOffset();
out.writeShort(bytes.length.toShort);
out.write(bytes);
out.flush();
return point;
}
}
override def close() {
XLogDataWriter.table.synchronized {
if (this.reference == 0) {
XLogDataWriter.table.remove(this.file);
FileUtil.close(out);
} else {
this.reference -= 1
}
}
}
}
|
scouter-project/scouter
|
scouter.server/src/main/scala/scouter/server/db/xlog/XLogDataWriter.scala
|
Scala
|
apache-2.0
| 2,878
|
/* scala-stm - (c) 2009-2010, Stanford University, PPL */
package scala.concurrent.stm
object MaybeTxn {
implicit val unknown = TxnUnknown
}
/** `MaybeTxn` allows lookup of the implicit `InTxn` instance without failing
* if the `InTxn` is not known at compile time. `implicitly[MaybeTxn]` will
* bind to an implicit `InTxn` if one is available, otherwise it will bind to
* the object `TxnUnkown`. A `MaybeTxn` of `TxnUnknown` should trigger a
* dynamically-scoped `InTxn` search using `Txn.current`.
*
* @author Nathan Bronson
*/
trait MaybeTxn
|
djspiewak/scala-stm
|
src/main/scala/scala/concurrent/stm/MaybeTxn.scala
|
Scala
|
bsd-3-clause
| 564
|
package de.mukis
import com.typesafe.config.ConfigFactory
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
import java.nio.file.Files
import java.nio.charset.Charset
object ConfigApp extends App {
val logFile = Files.createTempFile("mukis-", ".log")
// Creating an executor service to schedule the config parsing
val service = Executors newScheduledThreadPool 1
service.scheduleAtFixedRate(new Runnable() {
override def run() {
// Loading config
ConfigFactory.invalidateCaches
val config = ConfigFactory load () getConfig ("mukis")
// Writing the config content to the logfile
val log = Files.newBufferedWriter(logFile, Charset forName "UTF-8")
log write (s"User:\\t${config.root render ()}")
log.newLine
log.close
}
}, 0, 5, TimeUnit.SECONDS)
}
|
muuki88/sbt-native-packager-examples
|
linux-mappings/src/main/scala/de/mukis/ConfigApp.scala
|
Scala
|
apache-2.0
| 841
|
package lila.swiss
import akka.stream.scaladsl._
import reactivemongo.api.bson._
import lila.db.dsl._
import lila.user.User
// https://www.fide.com/FIDE/handbook/C04Annex2_TRF16.pdf
final class SwissTrf(
sheetApi: SwissSheetApi,
colls: SwissColls,
baseUrl: lila.common.config.BaseUrl
)(implicit ec: scala.concurrent.ExecutionContext) {
private type Bits = List[(Int, String)]
def apply(swiss: Swiss, sorted: Boolean): Source[String, _] = Source futureSource {
fetchPlayerIds(swiss) map { apply(swiss, _, sorted) }
}
def apply(swiss: Swiss, playerIds: PlayerIds, sorted: Boolean): Source[String, _] =
SwissPlayer.fields { f =>
tournamentLines(swiss) concat
forbiddenPairings(swiss, playerIds) concat sheetApi
.source(swiss, sort = sorted.??($doc(f.rating -> -1)))
.map((playerLine(swiss, playerIds) _).tupled)
.map(formatLine)
}
private def tournamentLines(swiss: Swiss) =
Source(
List(
s"012 ${swiss.name}",
s"022 $baseUrl/swiss/${swiss.id}",
s"032 Lichess",
s"042 ${dateFormatter print swiss.startsAt}",
s"052 ${swiss.finishedAt ?? dateFormatter.print}",
s"062 ${swiss.nbPlayers}",
s"092 Individual: Swiss-System",
s"102 $baseUrl/swiss",
s"XXR ${swiss.settings.nbRounds}",
s"XXC ${chess.Color.fromWhite(swiss.id.value(0).toInt % 2 == 0).name}1"
)
)
private def playerLine(
swiss: Swiss,
playerIds: PlayerIds
)(p: SwissPlayer, pairings: Map[SwissRound.Number, SwissPairing], sheet: SwissSheet): Bits =
List(
3 -> "001",
8 -> playerIds.getOrElse(p.userId, 0).toString,
(15 + p.userId.size) -> p.userId,
52 -> p.rating.toString,
84 -> f"${sheet.points.value}%1.1f"
) ::: {
swiss.allRounds.zip(sheet.outcomes).flatMap { case (rn, outcome) =>
val pairing = pairings get rn
List(
95 -> pairing.map(_ opponentOf p.userId).flatMap(playerIds.get).??(_.toString),
97 -> pairing.map(_ colorOf p.userId).??(_.fold("w", "b")),
99 -> {
import SwissSheet._
outcome match {
case Absent => "-"
case Late => "H"
case Bye => "U"
case Draw => "="
case Win => "1"
case Loss => "0"
case Ongoing => "Z"
case ForfeitLoss => "-"
case ForfeitWin => "+"
}
}
).map { case (l, s) => (l + (rn.value - 1) * 10, s) }
}
} ::: {
p.absent && swiss.round.value < swiss.settings.nbRounds
}.?? {
List( // http://www.rrweb.org/javafo/aum/JaVaFo2_AUM.htm#_Unusual_info_extensions
95 -> "0000",
97 -> "",
99 -> "-"
).map { case (l, s) => (l + swiss.round.value * 10, s) }
}
private def formatLine(bits: Bits): String =
bits.foldLeft("") { case (acc, (pos, txt)) =>
s"""$acc${" " * (pos - txt.length - acc.length)}$txt"""
}
private val dateFormatter = org.joda.time.format.DateTimeFormat forStyle "M-"
def fetchPlayerIds(swiss: Swiss): Fu[PlayerIds] =
SwissPlayer
.fields { p =>
import BsonHandlers._
colls.player
.aggregateOne() { framework =>
import framework._
Match($doc(p.swissId -> swiss.id)) -> List(
Sort(Descending(p.rating)),
Group(BSONNull)("us" -> PushField(p.userId))
)
}
.map {
~_.flatMap(_.getAsOpt[List[User.ID]]("us"))
}
.map {
_.view.zipWithIndex.map { case (userId, index) =>
(userId, index + 1)
}.toMap
}
}
private def forbiddenPairings(swiss: Swiss, playerIds: PlayerIds): Source[String, _] =
if (swiss.settings.forbiddenPairings.isEmpty) Source.empty[String]
else
Source.fromIterator { () =>
swiss.settings.forbiddenPairings.linesIterator.flatMap {
_.trim.toLowerCase.split(' ').map(_.trim) match {
case Array(u1, u2) if u1 != u2 =>
for {
id1 <- playerIds.get(u1)
id2 <- playerIds.get(u2)
} yield s"XXP $id1 $id2"
case _ => none
}
}
}
}
|
luanlv/lila
|
modules/swiss/src/main/SwissTrf.scala
|
Scala
|
mit
| 4,443
|
package com.imadethatcow.hipchat.rooms
import com.imadethatcow.hipchat.common.{Logging, Common}
import Common._
import com.imadethatcow.hipchat.common.caseclass.{HistoriesResponse, HistoryItem}
import scala.concurrent.{ExecutionContext, Future}
class ViewHistory(private[this] val apiToken: String, private[this] val baseUrlOpt: Option[String] = None)(implicit executor: ExecutionContext) extends Logging {
private def url(roomIdOrName: String) = (reqFromBaseUrl(baseUrlOpt) / "room" / roomIdOrName / "history").GET
def roomHistory(
roomIdOrName: String,
date: Option[Any] = None, // Must be either "recent" or conform to ISO-8601, use joda for the latter
timezone: Option[String] = None,
startIndex: Option[Long] = None,
maxResults: Option[Long] = None,
reverse: Option[Boolean] = None
): Future[Seq[HistoryItem]] = {
var req = addToken(url(roomIdOrName), apiToken)
for (d <- date) req = req.addQueryParameter("date", d.toString)
for (tz <- timezone) req = req.addQueryParameter("timezone", tz)
for (si <- startIndex) req = req.addQueryParameter("start-index", si.toString)
for (mr <- maxResults) req = req.addQueryParameter("max-results", mr.toString)
for (r <- reverse) req = req.addQueryParameter("reverse", r.toString)
resolveAndDeserialize[HistoriesResponse](req) map {
response => response.items
}
}
}
|
poweld/hipchat-scala
|
src/main/scala/com/imadethatcow/hipchat/rooms/ViewHistory.scala
|
Scala
|
mit
| 1,416
|
package spotlight.model.outlier
import omnibus.commons.util._
trait IsQuorum extends Serializable {
def apply( results: OutlierAlgorithmResults ): Boolean
def totalIssued: Int
protected def evaluateRemainder( results: OutlierAlgorithmResults ): Boolean = results.size >= totalIssued
}
object IsQuorum {
case class AtLeastQuorumSpecification( override val totalIssued: Int, triggerPoint: Int ) extends IsQuorum {
override def apply( results: OutlierAlgorithmResults ): Boolean = {
if ( results.count { _._2.hasAnomalies } >= triggerPoint &&
OutlierAlgorithmResults.tallyMax( results ) >= triggerPoint ) {
true
} else {
evaluateRemainder( results )
}
}
override def toString: String = s"${getClass.safeSimpleName}(trigger:[${triggerPoint}] of total:[${totalIssued}])"
}
case class MajorityQuorumSpecification( override val totalIssued: Int, triggerPoint: Double ) extends IsQuorum {
override def apply( results: OutlierAlgorithmResults ): Boolean = {
val actual = results.count { _._2.hasAnomalies }.toDouble / totalIssued.toDouble
if ( actual > triggerPoint &&
( OutlierAlgorithmResults.tallyMax( results ).toDouble / totalIssued.toDouble ) > triggerPoint ) {
true
} else {
evaluateRemainder( results )
}
}
override def toString: String = s"${getClass.safeSimpleName}(trigger:[${triggerPoint * 100}]% of total:[${totalIssued}])"
}
}
|
dmrolfs/lineup
|
core/src/main/scala/spotlight/model/outlier/IsQuorum.scala
|
Scala
|
mit
| 1,467
|
package org.jetbrains.plugins.scala.lang.parser.parsing.xml
import org.jetbrains.plugins.scala.lang.lexer.ScalaXmlTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.util.ParserPatcher
import scala.annotation.tailrec
/**
* @author Alexander Podkhalyuzin
* Date: 18.04.2008
*/
/*
* Content ::= [CharData] {Content1 [CharData]}
*
* Content1 ::= XmlContent
* | Reference
* | ScalaExpr
*/
object Content {
def parse(builder: ScalaPsiBuilder): Boolean = {
val contentMarker = builder.mark()
builder.getTokenType match {
case ScalaXmlTokenTypes.XML_DATA_CHARACTERS =>
builder.advanceLexer()
case ScalaXmlTokenTypes.XML_CHAR_ENTITY_REF =>
builder.advanceLexer()
case _ =>
}
val patcher = ParserPatcher.getSuitablePatcher(builder)
@tailrec
def subparse() {
var isReturn = false
if (!XmlContent.parse(builder) &&
!Reference.parse(builder) &&
!ScalaExpr.parse(builder) && !patcher.parse(builder)) isReturn = true
builder.getTokenType match {
case ScalaXmlTokenTypes.XML_DATA_CHARACTERS =>
builder.advanceLexer()
case ScalaXmlTokenTypes.XML_CHAR_ENTITY_REF =>
builder.advanceLexer()
case ScalaXmlTokenTypes.XML_ENTITY_REF_TOKEN => builder.advanceLexer()
case _ =>
if (isReturn) return
}
subparse()
}
subparse()
contentMarker.drop()
true
}
}
|
gtache/intellij-lsp
|
intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/parser/parsing/xml/Content.scala
|
Scala
|
apache-2.0
| 1,553
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import scala.collection.JavaConversions._
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.ql.metadata.{Partition => HivePartition}
import org.apache.hadoop.hive.serde.serdeConstants
import org.apache.hadoop.hive.serde2.objectinspector._
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.execution._
import org.apache.spark.sql.hive._
import org.apache.spark.sql.types.{BooleanType, DataType}
/**
* The Hive table scan operator. Column and partition pruning are both handled.
*
* @param requestedAttributes Attributes to be fetched from the Hive table.
* @param relation The Hive table be be scanned.
* @param partitionPruningPred An optional partition pruning predicate for partitioned table.
*/
private[hive]
case class HiveTableScan(
requestedAttributes: Seq[Attribute],
relation: MetastoreRelation,
partitionPruningPred: Seq[Expression])(
@transient val context: HiveContext)
extends LeafNode {
require(partitionPruningPred.isEmpty || relation.hiveQlTable.isPartitioned,
"Partition pruning predicates only supported for partitioned tables.")
// Retrieve the original attributes based on expression ID so that capitalization matches.
val attributes = requestedAttributes.map(relation.attributeMap)
// Bind all partition key attribute references in the partition pruning predicate for later
// evaluation.
private[this] val boundPruningPred = partitionPruningPred.reduceLeftOption(And).map { pred =>
require(
pred.dataType == BooleanType,
s"Data type of predicate $pred must be BooleanType rather than ${pred.dataType}.")
BindReferences.bindReference(pred, relation.partitionKeys)
}
// Create a local copy of hiveconf,so that scan specific modifications should not impact
// other queries
@transient
private[this] val hiveExtraConf = new HiveConf(context.hiveconf)
// append columns ids and names before broadcast
addColumnMetadataToConf(hiveExtraConf)
@transient
private[this] val hadoopReader =
new HadoopTableReader(attributes, relation, context, hiveExtraConf)
private[this] def castFromString(value: String, dataType: DataType) = {
Cast(Literal(value), dataType).eval(null)
}
private def addColumnMetadataToConf(hiveConf: HiveConf) {
// Specifies needed column IDs for those non-partitioning columns.
val neededColumnIDs = attributes.flatMap(relation.columnOrdinals.get).map(o => o: Integer)
HiveShim.appendReadColumns(hiveConf, neededColumnIDs, attributes.map(_.name))
val tableDesc = relation.tableDesc
val deserializer = tableDesc.getDeserializerClass.newInstance
deserializer.initialize(hiveConf, tableDesc.getProperties)
// Specifies types and object inspectors of columns to be scanned.
val structOI = ObjectInspectorUtils
.getStandardObjectInspector(
deserializer.getObjectInspector,
ObjectInspectorCopyOption.JAVA)
.asInstanceOf[StructObjectInspector]
val columnTypeNames = structOI
.getAllStructFieldRefs
.map(_.getFieldObjectInspector)
.map(TypeInfoUtils.getTypeInfoFromObjectInspector(_).getTypeName)
.mkString(",")
hiveConf.set(serdeConstants.LIST_COLUMN_TYPES, columnTypeNames)
hiveConf.set(serdeConstants.LIST_COLUMNS, relation.attributes.map(_.name).mkString(","))
}
/**
* Prunes partitions not involve the query plan.
*
* @param partitions All partitions of the relation.
* @return Partitions that are involved in the query plan.
*/
private[hive] def prunePartitions(partitions: Seq[HivePartition]) = {
boundPruningPred match {
case None => partitions
case Some(shouldKeep) => partitions.filter { part =>
val dataTypes = relation.partitionKeys.map(_.dataType)
val castedValues = for ((value, dataType) <- part.getValues.zip(dataTypes)) yield {
castFromString(value, dataType)
}
// Only partitioned values are needed here, since the predicate has already been bound to
// partition key attribute references.
val row = InternalRow.fromSeq(castedValues)
shouldKeep.eval(row).asInstanceOf[Boolean]
}
}
}
protected override def doExecute(): RDD[InternalRow] = if (!relation.hiveQlTable.isPartitioned) {
hadoopReader.makeRDDForTable(relation.hiveQlTable)
} else {
hadoopReader.makeRDDForPartitionedTable(
prunePartitions(relation.getHiveQlPartitions(partitionPruningPred)))
}
override def output: Seq[Attribute] = attributes
}
|
ArvinDevel/onlineAggregationOnSparkV2
|
sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala
|
Scala
|
apache-2.0
| 5,633
|
/*
* Copyright 2013 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.internal
import javax.net.ssl.SSLContext
import scala.util.control.NonFatal
/** Indicates how to resolve SSLContext.
* * NoSSL = do not use SSL/HTTPS
* * TryDefaultSSLContext = `SSLContext.getDefault()`, or `None` on systems where the default is unavailable
* * Provided = use the explicitly passed SSLContext
*/
private[http4s] sealed trait SSLContextOption extends Product with Serializable
private[http4s] object SSLContextOption {
case object NoSSL extends SSLContextOption
case object TryDefaultSSLContext extends SSLContextOption
final case class Provided(sslContext: SSLContext) extends SSLContextOption
def toMaybeSSLContext(sco: SSLContextOption): Option[SSLContext] =
sco match {
case SSLContextOption.NoSSL => None
case SSLContextOption.TryDefaultSSLContext => tryDefaultSslContext
case SSLContextOption.Provided(context) => Some(context)
}
def tryDefaultSslContext: Option[SSLContext] =
try Some(SSLContext.getDefault())
catch {
case NonFatal(_) => None
}
}
|
http4s/http4s
|
core/shared/src/main/scala/org/http4s/internal/SSLContextOption.scala
|
Scala
|
apache-2.0
| 1,680
|
package io.scalac.newspaper.crawler.urls
import java.nio.file.Path
import akka.NotUsed
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{FileIO, Source}
import akka.util.ByteString
import scala.collection.immutable
import scala.concurrent.{ExecutionContext, Future}
import scala.io
class FileURLsStore(urlsFilePath: Path)(implicit ec: ExecutionContext, materializer: ActorMaterializer) extends URLsStore {
private var urls: immutable.Seq[String] = readData(urlsFilePath)
private def readData(path: Path) =
io.Source.fromFile(urlsFilePath.toFile).getLines().to[immutable.Seq]
override def getURLs: Source[String, NotUsed] =
Source(urls)
def removeURL(url: String): Future[Boolean] = {
urls = urls.filterNot(_ == url)
Source(urls)
.map(s => ByteString(s + "\\n"))
.runWith(FileIO.toPath(urlsFilePath))
.map(_ => true)
}
}
|
ScalaConsultants/newspaper
|
crawler/src/main/scala/io/scalac/newspaper/crawler/urls/FileURLsStore.scala
|
Scala
|
agpl-3.0
| 887
|
package scalaz.stream.mongodb.query
import org.bson.types.ObjectId
import java.util.Date
/**
* Witness for allowed predicates in query
* @tparam A
*/
trait QueryPredicateWitness[A]
object QueryPredicateWitness {
implicit val boolWitness = new QueryPredicateWitness[Boolean] {}
implicit val stringWitness = new QueryPredicateWitness[String] {}
implicit def optionWitness[A: QueryPredicateWitness] = new QueryPredicateWitness[Option[A]] {}
implicit val intWitness = new QueryPredicateWitness[Int] {}
implicit val longWitness = new QueryPredicateWitness[Long] {}
implicit val doubleWitness = new QueryPredicateWitness[Double] {}
implicit val objectIdWitness = new QueryPredicateWitness[ObjectId] {}
implicit val dateWitness = new QueryPredicateWitness[Date] {}
}
|
Spinoco/scalaz-stream-mongodb
|
core/src/main/scala/scalaz/stream/mongodb/query/QueryPredicateWitness.scala
|
Scala
|
mit
| 794
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.tuning
import java.util.{List => JList, Locale}
import scala.collection.JavaConverters._
import scala.concurrent.Future
import scala.concurrent.duration.Duration
import scala.language.existentials
import org.apache.hadoop.fs.Path
import org.json4s.DefaultFormats
import org.apache.spark.annotation.Since
import org.apache.spark.internal.Logging
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.evaluation.Evaluator
import org.apache.spark.ml.param.{DoubleParam, ParamMap, ParamValidators}
import org.apache.spark.ml.param.shared.{HasCollectSubModels, HasParallelism}
import org.apache.spark.ml.util._
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.ThreadUtils
/**
* Params for [[TrainValidationSplit]] and [[TrainValidationSplitModel]].
*/
private[ml] trait TrainValidationSplitParams extends ValidatorParams {
/**
* Param for ratio between train and validation data. Must be between 0 and 1.
* Default: 0.75
*
* @group param
*/
val trainRatio: DoubleParam = new DoubleParam(this, "trainRatio",
"ratio between training set and validation set (>= 0 && <= 1)", ParamValidators.inRange(0, 1))
/** @group getParam */
def getTrainRatio: Double = $(trainRatio)
setDefault(trainRatio -> 0.75)
}
/**
* Validation for hyper-parameter tuning.
* Randomly splits the input dataset into train and validation sets,
* and uses evaluation metric on the validation set to select the best model.
* Similar to [[CrossValidator]], but only splits the set once.
*/
@Since("1.5.0")
class TrainValidationSplit @Since("1.5.0") (@Since("1.5.0") override val uid: String)
extends Estimator[TrainValidationSplitModel]
with TrainValidationSplitParams with HasParallelism with HasCollectSubModels
with MLWritable with Logging {
@Since("1.5.0")
def this() = this(Identifiable.randomUID("tvs"))
/** @group setParam */
@Since("1.5.0")
def setEstimator(value: Estimator[_]): this.type = set(estimator, value)
/** @group setParam */
@Since("1.5.0")
def setEstimatorParamMaps(value: Array[ParamMap]): this.type = set(estimatorParamMaps, value)
/** @group setParam */
@Since("1.5.0")
def setEvaluator(value: Evaluator): this.type = set(evaluator, value)
/** @group setParam */
@Since("1.5.0")
def setTrainRatio(value: Double): this.type = set(trainRatio, value)
/** @group setParam */
@Since("2.0.0")
def setSeed(value: Long): this.type = set(seed, value)
/**
* Set the maximum level of parallelism to evaluate models in parallel.
* Default is 1 for serial evaluation
*
* @group expertSetParam
*/
@Since("2.3.0")
def setParallelism(value: Int): this.type = set(parallelism, value)
/**
* Whether to collect submodels when fitting. If set, we can get submodels from
* the returned model.
*
* Note: If set this param, when you save the returned model, you can set an option
* "persistSubModels" to be "true" before saving, in order to save these submodels.
* You can check documents of
* {@link org.apache.spark.ml.tuning.TrainValidationSplitModel.TrainValidationSplitModelWriter}
* for more information.
*
* @group expertSetParam
*/
@Since("2.3.0")
def setCollectSubModels(value: Boolean): this.type = set(collectSubModels, value)
@Since("2.0.0")
override def fit(dataset: Dataset[_]): TrainValidationSplitModel = {
val schema = dataset.schema
transformSchema(schema, logging = true)
val est = $(estimator)
val eval = $(evaluator)
val epm = $(estimatorParamMaps)
// Create execution context based on $(parallelism)
val executionContext = getExecutionContext
val instr = Instrumentation.create(this, dataset)
instr.logParams(trainRatio, seed, parallelism)
logTuningParams(instr)
val Array(trainingDataset, validationDataset) =
dataset.randomSplit(Array($(trainRatio), 1 - $(trainRatio)), $(seed))
trainingDataset.cache()
validationDataset.cache()
val collectSubModelsParam = $(collectSubModels)
var subModels: Option[Array[Model[_]]] = if (collectSubModelsParam) {
Some(Array.fill[Model[_]](epm.length)(null))
} else None
// Fit models in a Future for training in parallel
logDebug(s"Train split with multiple sets of parameters.")
val metricFutures = epm.zipWithIndex.map { case (paramMap, paramIndex) =>
Future[Double] {
val model = est.fit(trainingDataset, paramMap).asInstanceOf[Model[_]]
if (collectSubModelsParam) {
subModels.get(paramIndex) = model
}
// TODO: duplicate evaluator to take extra params from input
val metric = eval.evaluate(model.transform(validationDataset, paramMap))
logDebug(s"Got metric $metric for model trained with $paramMap.")
metric
} (executionContext)
}
// Wait for all metrics to be calculated
val metrics = metricFutures.map(ThreadUtils.awaitResult(_, Duration.Inf))
// Unpersist training & validation set once all metrics have been produced
trainingDataset.unpersist()
validationDataset.unpersist()
logInfo(s"Train validation split metrics: ${metrics.toSeq}")
val (bestMetric, bestIndex) =
if (eval.isLargerBetter) metrics.zipWithIndex.maxBy(_._1)
else metrics.zipWithIndex.minBy(_._1)
logInfo(s"Best set of parameters:\n${epm(bestIndex)}")
logInfo(s"Best train validation split metric: $bestMetric.")
val bestModel = est.fit(dataset, epm(bestIndex)).asInstanceOf[Model[_]]
instr.logSuccess(bestModel)
copyValues(new TrainValidationSplitModel(uid, bestModel, metrics)
.setSubModels(subModels).setParent(this))
}
@Since("1.5.0")
override def transformSchema(schema: StructType): StructType = transformSchemaImpl(schema)
@Since("1.5.0")
override def copy(extra: ParamMap): TrainValidationSplit = {
val copied = defaultCopy(extra).asInstanceOf[TrainValidationSplit]
if (copied.isDefined(estimator)) {
copied.setEstimator(copied.getEstimator.copy(extra))
}
if (copied.isDefined(evaluator)) {
copied.setEvaluator(copied.getEvaluator.copy(extra))
}
copied
}
@Since("2.0.0")
override def write: MLWriter = new TrainValidationSplit.TrainValidationSplitWriter(this)
}
@Since("2.0.0")
object TrainValidationSplit extends MLReadable[TrainValidationSplit] {
@Since("2.0.0")
override def read: MLReader[TrainValidationSplit] = new TrainValidationSplitReader
@Since("2.0.0")
override def load(path: String): TrainValidationSplit = super.load(path)
private[TrainValidationSplit] class TrainValidationSplitWriter(instance: TrainValidationSplit)
extends MLWriter {
ValidatorParams.validateParams(instance)
override protected def saveImpl(path: String): Unit =
ValidatorParams.saveImpl(path, instance, sc)
}
private class TrainValidationSplitReader extends MLReader[TrainValidationSplit] {
/** Checked against metadata when loading model */
private val className = classOf[TrainValidationSplit].getName
override def load(path: String): TrainValidationSplit = {
implicit val format = DefaultFormats
val (metadata, estimator, evaluator, estimatorParamMaps) =
ValidatorParams.loadImpl(path, sc, className)
val tvs = new TrainValidationSplit(metadata.uid)
.setEstimator(estimator)
.setEvaluator(evaluator)
.setEstimatorParamMaps(estimatorParamMaps)
metadata.getAndSetParams(tvs, skipParams = Option(List("estimatorParamMaps")))
tvs
}
}
}
/**
* Model from train validation split.
*
* @param uid Id.
* @param bestModel Estimator determined best model.
* @param validationMetrics Evaluated validation metrics.
*/
@Since("1.5.0")
class TrainValidationSplitModel private[ml] (
@Since("1.5.0") override val uid: String,
@Since("1.5.0") val bestModel: Model[_],
@Since("1.5.0") val validationMetrics: Array[Double])
extends Model[TrainValidationSplitModel] with TrainValidationSplitParams with MLWritable {
/** A Python-friendly auxiliary constructor. */
private[ml] def this(uid: String, bestModel: Model[_], validationMetrics: JList[Double]) = {
this(uid, bestModel, validationMetrics.asScala.toArray)
}
private var _subModels: Option[Array[Model[_]]] = None
private[tuning] def setSubModels(subModels: Option[Array[Model[_]]])
: TrainValidationSplitModel = {
_subModels = subModels
this
}
// A Python-friendly auxiliary method
private[tuning] def setSubModels(subModels: JList[Model[_]])
: TrainValidationSplitModel = {
_subModels = if (subModels != null) {
Some(subModels.asScala.toArray)
} else {
None
}
this
}
/**
* @return submodels represented in array. The index of array corresponds to the ordering of
* estimatorParamMaps
* @throws IllegalArgumentException if subModels are not available. To retrieve subModels,
* make sure to set collectSubModels to true before fitting.
*/
@Since("2.3.0")
def subModels: Array[Model[_]] = {
require(_subModels.isDefined, "subModels not available, To retrieve subModels, make sure " +
"to set collectSubModels to true before fitting.")
_subModels.get
}
@Since("2.3.0")
def hasSubModels: Boolean = _subModels.isDefined
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
bestModel.transform(dataset)
}
@Since("1.5.0")
override def transformSchema(schema: StructType): StructType = {
bestModel.transformSchema(schema)
}
@Since("1.5.0")
override def copy(extra: ParamMap): TrainValidationSplitModel = {
val copied = new TrainValidationSplitModel (
uid,
bestModel.copy(extra).asInstanceOf[Model[_]],
validationMetrics.clone()
).setSubModels(TrainValidationSplitModel.copySubModels(_subModels))
copyValues(copied, extra).setParent(parent)
}
@Since("2.0.0")
override def write: TrainValidationSplitModel.TrainValidationSplitModelWriter = {
new TrainValidationSplitModel.TrainValidationSplitModelWriter(this)
}
}
@Since("2.0.0")
object TrainValidationSplitModel extends MLReadable[TrainValidationSplitModel] {
private[TrainValidationSplitModel] def copySubModels(subModels: Option[Array[Model[_]]])
: Option[Array[Model[_]]] = {
subModels.map(_.map(_.copy(ParamMap.empty).asInstanceOf[Model[_]]))
}
@Since("2.0.0")
override def read: MLReader[TrainValidationSplitModel] = new TrainValidationSplitModelReader
@Since("2.0.0")
override def load(path: String): TrainValidationSplitModel = super.load(path)
/**
* Writer for TrainValidationSplitModel.
* @param instance TrainValidationSplitModel instance used to construct the writer
*
* TrainValidationSplitModel supports an option "persistSubModels", with possible values
* "true" or "false". If you set the collectSubModels Param before fitting, then you can
* set "persistSubModels" to "true" in order to persist the subModels. By default,
* "persistSubModels" will be "true" when subModels are available and "false" otherwise.
* If subModels are not available, then setting "persistSubModels" to "true" will cause
* an exception.
*/
@Since("2.3.0")
final class TrainValidationSplitModelWriter private[tuning] (
instance: TrainValidationSplitModel) extends MLWriter {
ValidatorParams.validateParams(instance)
override protected def saveImpl(path: String): Unit = {
val persistSubModelsParam = optionMap.getOrElse("persistsubmodels",
if (instance.hasSubModels) "true" else "false")
require(Array("true", "false").contains(persistSubModelsParam.toLowerCase(Locale.ROOT)),
s"persistSubModels option value ${persistSubModelsParam} is invalid, the possible " +
"values are \"true\" or \"false\"")
val persistSubModels = persistSubModelsParam.toBoolean
import org.json4s.JsonDSL._
val extraMetadata = ("validationMetrics" -> instance.validationMetrics.toSeq) ~
("persistSubModels" -> persistSubModels)
ValidatorParams.saveImpl(path, instance, sc, Some(extraMetadata))
val bestModelPath = new Path(path, "bestModel").toString
instance.bestModel.asInstanceOf[MLWritable].save(bestModelPath)
if (persistSubModels) {
require(instance.hasSubModels, "When persisting tuning models, you can only set " +
"persistSubModels to true if the tuning was done with collectSubModels set to true. " +
"To save the sub-models, try rerunning fitting with collectSubModels set to true.")
val subModelsPath = new Path(path, "subModels")
for (paramIndex <- 0 until instance.getEstimatorParamMaps.length) {
val modelPath = new Path(subModelsPath, paramIndex.toString).toString
instance.subModels(paramIndex).asInstanceOf[MLWritable].save(modelPath)
}
}
}
}
private class TrainValidationSplitModelReader extends MLReader[TrainValidationSplitModel] {
/** Checked against metadata when loading model */
private val className = classOf[TrainValidationSplitModel].getName
override def load(path: String): TrainValidationSplitModel = {
implicit val format = DefaultFormats
val (metadata, estimator, evaluator, estimatorParamMaps) =
ValidatorParams.loadImpl(path, sc, className)
val bestModelPath = new Path(path, "bestModel").toString
val bestModel = DefaultParamsReader.loadParamsInstance[Model[_]](bestModelPath, sc)
val validationMetrics = (metadata.metadata \ "validationMetrics").extract[Seq[Double]].toArray
val persistSubModels = (metadata.metadata \ "persistSubModels")
.extractOrElse[Boolean](false)
val subModels: Option[Array[Model[_]]] = if (persistSubModels) {
val subModelsPath = new Path(path, "subModels")
val _subModels = Array.fill[Model[_]](estimatorParamMaps.length)(null)
for (paramIndex <- 0 until estimatorParamMaps.length) {
val modelPath = new Path(subModelsPath, paramIndex.toString).toString
_subModels(paramIndex) =
DefaultParamsReader.loadParamsInstance(modelPath, sc)
}
Some(_subModels)
} else None
val model = new TrainValidationSplitModel(metadata.uid, bestModel, validationMetrics)
.setSubModels(subModels)
model.set(model.estimator, estimator)
.set(model.evaluator, evaluator)
.set(model.estimatorParamMaps, estimatorParamMaps)
metadata.getAndSetParams(model, skipParams = Option(List("estimatorParamMaps")))
model
}
}
}
|
ddna1021/spark
|
mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala
|
Scala
|
apache-2.0
| 15,540
|
package name.orhideous.twicher.error
sealed trait TwicherError extends Exception
object TwicherError {
case object NoSuchQuote extends TwicherError
case object NoQuotes extends TwicherError
}
|
Orhideous/twicher
|
src/main/scala/name/orhideous/twicher/error/TwicherError.scala
|
Scala
|
gpl-3.0
| 202
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.calculations
import uk.gov.hmrc.ct.accounts.frs102.boxes._
import uk.gov.hmrc.ct.box.CtTypeConverters
trait NetCurrentAssetsLiabilitiesCalculator extends CtTypeConverters {
def calculateCurrentNetCurrentAssetsLiabilities(ac56: AC56, ac138B: AC138B, ac58: AC58): AC60 = {
(ac56.value, ac138B.value, ac58.value) match {
case (None, None, None) => AC60(None)
case _ => AC60(Some(ac56 + ac138B - ac58))
}
}
def calculatePreviousNetCurrentAssetsLiabilities(ac57: AC57, ac139B: AC139B, ac59: AC59): AC61 = {
(ac57.value, ac139B.value, ac59.value) match {
case (None, None, None) => AC61(None)
case _ => AC61(Some(ac57 + ac139B - ac59))
}
}
}
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frs102/calculations/NetCurrentAssetsLiabilitiesCalculator.scala
|
Scala
|
apache-2.0
| 1,333
|
package scredis.io
import scredis.protocol._
import scredis.{Subscription, Transaction}
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try
trait Connection {
implicit val dispatcher: ExecutionContext
}
trait NonBlockingConnection {
protected[scredis] def send[A](request: Request[A]): Future[A]
}
trait TransactionEnabledConnection {
protected[scredis] def send(transaction: Transaction): Future[Vector[Try[Any]]]
}
trait BlockingConnection {
protected[scredis] def sendBlocking[A](request: Request[A])(implicit timeout: Duration): Try[A]
}
trait SubscriberConnection {
protected[scredis] def sendAsSubscriber(request: Request[_]): Future[Int]
}
|
scredis/scredis
|
src/main/scala/scredis/io/Connection.scala
|
Scala
|
apache-2.0
| 719
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.engine.batch.benchmark.read_kafka.storm
import com.bwsw.sj.common.utils.benchmark.ClassRunner
import com.bwsw.sj.engine.batch.benchmark.read_kafka.storm.StormBenchmarkBatchLiterals._
import com.bwsw.sj.engine.core.testutils.benchmark.batch.{BatchBenchmark, BatchBenchmarkConfig, BatchBenchmarkParameters}
import com.bwsw.sj.engine.core.testutils.benchmark.loader.kafka.KafkaBenchmarkDataSenderConfig
import com.bwsw.sj.engine.regular.benchmark.read_kafka.storm.StormBenchmarkLiterals._
/**
* Provides methods for testing the speed of reading data by [[http://storm.apache.org Apache Storm]] from Kafka in
* windowed mode
*
* Topic deletion must be enabled on the Kafka server.
*
* @param benchmarkConfig configuration of application
* @param senderConfig configuration of Kafka topic
* @author Pavel Tomskikh
*/
class StormBenchmark(benchmarkConfig: BatchBenchmarkConfig,
senderConfig: KafkaBenchmarkDataSenderConfig)
extends BatchBenchmark(benchmarkConfig) {
override protected def runProcess(parameters: BatchBenchmarkParameters, messagesCount: Long): Process = {
val properties = Map(
kafkaTopicProperty -> senderConfig.topic,
outputFilenameProperty -> outputFile.getAbsolutePath,
messagesCountProperty -> messagesCount,
batchSizeProperty -> parameters.batchSize,
windowSizeProperty -> parameters.windowSize,
slidingIntervalProperty -> parameters.slidingInterval)
.map { case (property, value) => property -> value.toString }
new ClassRunner(classOf[StormBenchmarkLocalCluster], properties = properties).start()
}
}
|
bwsw/sj-platform
|
core/sj-engine-core/src/test/scala-2.12/com/bwsw/sj/engine/batch/benchmark/read_kafka/storm/StormBenchmark.scala
|
Scala
|
apache-2.0
| 2,449
|
/*
* The MIT License
*
* Copyright (c) 2017 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.fulcrumgenomics.bam
import java.nio.file.Paths
import com.fulcrumgenomics.FgBioDef._
import com.fulcrumgenomics.bam.api.{SamRecord, SamSource, SamWriter}
import com.fulcrumgenomics.testing.UnitSpec
import com.fulcrumgenomics.util.Metric
import com.fulcrumgenomics.vcf.api
import com.fulcrumgenomics.vcf.api.{Genotype, VcfCount, VcfFieldType, VcfFormatHeader, VcfSource, VcfWriter}
import htsjdk.samtools.SAMFileHeader.SortOrder
import htsjdk.samtools.{MergingSamRecordIterator, SamFileHeaderMerger}
import org.scalatest.ParallelTestExecution
class EstimatePoolingFractionsTest extends UnitSpec with ParallelTestExecution {
private val Samples = Seq("HG01879", "HG01112", "HG01583", "HG01500", "HG03742", "HG03052")
private val DataDir = Paths.get("src/test/resources/com/fulcrumgenomics/bam/estimate_pooling_fractions")
private val Bams = Samples.map(s => DataDir.resolve(s + ".bam"))
private val Vcf = DataDir.resolve("variants.vcf.gz")
private val Regions = DataDir.resolve("regions.interval_list")
/** Merges one or more BAMs and returns the path to the merged BAM. */
def merge(bams: Seq[PathToBam]): PathToBam = {
val readers = bams.map(bam => SamSource(bam))
// Mangle the library names in the header so that the merger sees duplicate RGs as different RGs.
readers.zipWithIndex.foreach { case (reader, index) =>
reader.header.getReadGroups.foreach(rg => rg.setLibrary(rg.getLibrary + ":" + index))
}
val headerMerger = new SamFileHeaderMerger(SortOrder.coordinate, readers.iterator.map(_.header).toJavaList, false)
val iterator = new MergingSamRecordIterator(headerMerger, readers.iterator.map(_.toSamReader).toJavaList, true)
val output = makeTempFile("merged.", ".bam")
val out = SamWriter(output, headerMerger.getMergedHeader, compression = 0)
iterator.map(_.asInstanceOf[SamRecord]).foreach { r =>
// Add the RG ID to the read name so we don't have identical read names when merging the same BAM 2+ times
r.name = r.readGroup.getReadGroupId + ":" + r.name
out += r
}
out.close()
readers.foreach(_.safelyClose())
output
}
"EstimatePoolingFractions" should "estimate approximately 50/50 for two samples mixed 50/50" in {
val bam = merge(Bams.take(2))
val out = makeTempFile("pooling_metrics.", ".txt")
new EstimatePoolingFractions(vcf=Vcf, bam=bam, output=out, samples=Samples.take(2)).execute()
val metrics = Metric.read[PoolingFractionMetric](out)
metrics should have size 2
metrics.foreach(m => 0.5 should (be >= m.ci99_low and be <= m.ci99_high))
}
Range.inclusive(3, Samples.size-1).foreach { n =>
it should s"accurately estimate a mixof $n samples" in {
val bam = merge(Bams.take(n))
val out = makeTempFile("pooling_metrics.", ".txt")
new EstimatePoolingFractions(vcf=Vcf, bam=bam, output=out, samples=Samples.take(n)).execute()
val metrics = Metric.read[PoolingFractionMetric](out)
metrics should have size n
metrics.foreach(m => (1/n.toDouble) should (be >= m.ci99_low and be <= m.ci99_high))
}
}
it should "work with an interval list, and also use all samples if no samples are provided" in {
val bam = merge(Bams)
val out = makeTempFile("pooling_metrics.", ".txt")
new EstimatePoolingFractions(vcf=Vcf, bam=bam, output=out, intervals=Seq(Regions)).execute()
val metrics = Metric.read[PoolingFractionMetric](out)
metrics should have size Samples.size
metrics.foreach(m => (1/Samples.size.toDouble) should (be >= m.ci99_low and be <= m.ci99_high))
}
it should "accurately estimate unequal mixes of two samples" in {
val samples = Samples.take(2)
val Seq(bam1, bam2) = Bams.take(2)
val bam = merge(Seq(bam1, bam1, bam1, bam2))
val out = makeTempFile("pooling_metrics.", ".txt")
new EstimatePoolingFractions(vcf=Vcf, bam=bam, output=out, samples=samples).execute()
val metrics = Metric.read[PoolingFractionMetric](out)
metrics should have size 2
metrics.foreach {m =>
val expected = if (m.sample == samples.head) 0.75 else 0.25
expected should (be >= m.ci99_low and be <= m.ci99_high)
}
}
it should "accurately estimate a three sample mixture using the AF genotype field" in {
val samples = Samples.take(3)
val Seq(s1, s2, s3) = samples
val bams = Bams.take(3)
val bam = merge(bams)
val vcf = {
val vcf = makeTempFile("mixture.", ".vcf.gz")
val in = api.VcfSource(Vcf)
val hd = in.header.copy(
samples = IndexedSeq(s1, "two_sample_mixture"),
formats = VcfFormatHeader("AF", VcfCount.OnePerAltAllele, kind=VcfFieldType.Float, description="Allele Frequency") +: in.header.formats
)
val out = VcfWriter(vcf, hd)
in.filter(_.alleles.size == 2).foreach { v =>
val gts = samples.map(v.gt)
// Only bother with sites where all samples have called genotypes and there is variation
if (gts.forall((_.isFullyCalled)) && gts.flatMap(_.calls).toSet.size > 1) {
// Make a mixture of the 2nd and 3rd samples
val (mixCalls, mixAf) = {
val input = gts.drop(1)
if (input.forall(_.isHomRef)) (IndexedSeq(v.alleles.ref, v.alleles.ref), 0.0)
else if (input.forall(_.isHomVar)) (IndexedSeq(v.alleles.alts.head, v.alleles.alts.head), 1.0)
else {
val calls = input.flatMap(_.calls)
(IndexedSeq(v.alleles.ref, v.alleles.alts.head), calls.count(_ != v.alleles.ref) / calls.size.toDouble)
}
}
val mixtureGt = Genotype(
alleles = v.alleles,
sample = "two_sample_mixture",
calls = mixCalls,
attrs = Map("AF" -> IndexedSeq[Float](mixAf.toFloat))
)
out += v.copy(genotypes=Map(s1 -> gts.head, mixtureGt.sample -> mixtureGt))
}
}
in.safelyClose()
out.close()
vcf
}
// Run the estimator and test the outputs
val out = makeTempFile("pooling_metrics.", ".txt")
new EstimatePoolingFractions(vcf=vcf, bam=bam, output=out, minGenotypeQuality = -1).execute()
val metrics = Metric.read[PoolingFractionMetric](out)
metrics should have size 2
metrics.foreach {m =>
val expected = if (m.sample == samples.head) 1/3.0 else 2/3.0
expected should (be >= m.ci99_low and be <= m.ci99_high)
}
}
}
|
fulcrumgenomics/fgbio
|
src/test/scala/com/fulcrumgenomics/bam/EstimatePoolingFractionsTest.scala
|
Scala
|
mit
| 7,640
|
package org.openurp.edu.eams.teach.grade.lesson.service
import org.openurp.edu.base.Student
import org.openurp.edu.teach.code.GradeType
import org.openurp.edu.teach.grade.model.CourseGradeState
import org.openurp.edu.teach.grade.model.ExamGradeState
import org.openurp.edu.teach.lesson.Lesson
trait LessonGradeService {
def getGradeTypes(state: CourseGradeState, userCategoryId: java.lang.Long): List[GradeType]
def getGradeTypes(lesson: Lesson): List[GradeType]
def getCanInputGradeTypes(isOnlyCanInput: Boolean): List[GradeType]
def isCheckEvaluation(std: Student): Boolean
def getState(gradeType: GradeType, gradeState: CourseGradeState, precision: java.lang.Integer): ExamGradeState
}
|
openurp/edu-eams-webapp
|
grade/src/main/scala/org/openurp/edu/eams/teach/grade/lesson/service/LessonGradeService.scala
|
Scala
|
gpl-3.0
| 709
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.aggregate
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}
import java.util
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.types._
import org.apache.spark.util.collection.OpenHashMap
/**
* The Percentile aggregate function returns the exact percentile(s) of numeric column `expr` at
* the given percentage(s) with value range in [0.0, 1.0].
*
* Because the number of elements and their partial order cannot be determined in advance.
* Therefore we have to store all the elements in memory, and so notice that too many elements can
* cause GC paused and eventually OutOfMemory Errors.
*
* @param child child expression that produce numeric column value with `child.eval(inputRow)`
* @param percentageExpression Expression that represents a single percentage value or an array of
* percentage values. Each percentage value must be in the range
* [0.0, 1.0].
*/
@ExpressionDescription(
usage =
"""
_FUNC_(col, percentage [, frequency]) - Returns the exact percentile value of numeric column
`col` at the given percentage. The value of percentage must be between 0.0 and 1.0. The
value of frequency should be positive integral
_FUNC_(col, array(percentage1 [, percentage2]...) [, frequency]) - Returns the exact
percentile value array of numeric column `col` at the given percentage(s). Each value
of the percentage array must be between 0.0 and 1.0. The value of frequency should be
positive integral
""",
examples = """
Examples:
> SELECT _FUNC_(col, 0.3) FROM VALUES (0), (10) AS tab(col);
3.0
> SELECT _FUNC_(col, array(0.25, 0.75)) FROM VALUES (0), (10) AS tab(col);
[2.5,7.5]
""",
group = "agg_funcs",
since = "2.1.0")
case class Percentile(
child: Expression,
percentageExpression: Expression,
frequencyExpression : Expression,
mutableAggBufferOffset: Int = 0,
inputAggBufferOffset: Int = 0)
extends TypedImperativeAggregate[OpenHashMap[AnyRef, Long]] with ImplicitCastInputTypes {
def this(child: Expression, percentageExpression: Expression) = {
this(child, percentageExpression, Literal(1L), 0, 0)
}
def this(child: Expression, percentageExpression: Expression, frequency: Expression) = {
this(child, percentageExpression, frequency, 0, 0)
}
override def prettyName: String = "percentile"
override def withNewMutableAggBufferOffset(newMutableAggBufferOffset: Int): Percentile =
copy(mutableAggBufferOffset = newMutableAggBufferOffset)
override def withNewInputAggBufferOffset(newInputAggBufferOffset: Int): Percentile =
copy(inputAggBufferOffset = newInputAggBufferOffset)
// Mark as lazy so that percentageExpression is not evaluated during tree transformation.
@transient
private lazy val returnPercentileArray = percentageExpression.dataType.isInstanceOf[ArrayType]
@transient
private lazy val percentages = percentageExpression.eval() match {
case null => null
case num: Double => Array(num)
case arrayData: ArrayData => arrayData.toDoubleArray()
}
override def children: Seq[Expression] = {
child :: percentageExpression :: frequencyExpression :: Nil
}
// Returns null for empty inputs
override def nullable: Boolean = true
override lazy val dataType: DataType = percentageExpression.dataType match {
case _: ArrayType => ArrayType(DoubleType, false)
case _ => DoubleType
}
override def inputTypes: Seq[AbstractDataType] = {
val percentageExpType = percentageExpression.dataType match {
case _: ArrayType => ArrayType(DoubleType, false)
case _ => DoubleType
}
Seq(NumericType, percentageExpType, IntegralType)
}
// Check the inputTypes are valid, and the percentageExpression satisfies:
// 1. percentageExpression must be foldable;
// 2. percentages(s) must be in the range [0.0, 1.0].
override def checkInputDataTypes(): TypeCheckResult = {
// Validate the inputTypes
val defaultCheck = super.checkInputDataTypes()
if (defaultCheck.isFailure) {
defaultCheck
} else if (!percentageExpression.foldable) {
// percentageExpression must be foldable
TypeCheckFailure("The percentage(s) must be a constant literal, " +
s"but got $percentageExpression")
} else if (percentages == null) {
TypeCheckFailure("Percentage value must not be null")
} else if (percentages.exists(percentage => percentage < 0.0 || percentage > 1.0)) {
// percentages(s) must be in the range [0.0, 1.0]
TypeCheckFailure("Percentage(s) must be between 0.0 and 1.0, " +
s"but got $percentageExpression")
} else {
TypeCheckSuccess
}
}
private def toDoubleValue(d: Any): Double = d match {
case d: Decimal => d.toDouble
case n: Number => n.doubleValue
}
override def createAggregationBuffer(): OpenHashMap[AnyRef, Long] = {
// Initialize new counts map instance here.
new OpenHashMap[AnyRef, Long]()
}
override def update(
buffer: OpenHashMap[AnyRef, Long],
input: InternalRow): OpenHashMap[AnyRef, Long] = {
val key = child.eval(input).asInstanceOf[AnyRef]
val frqValue = frequencyExpression.eval(input)
// Null values are ignored in counts map.
if (key != null && frqValue != null) {
val frqLong = frqValue.asInstanceOf[Number].longValue()
// add only when frequency is positive
if (frqLong > 0) {
buffer.changeValue(key, frqLong, _ + frqLong)
} else if (frqLong < 0) {
throw QueryExecutionErrors.negativeValueUnexpectedError(frequencyExpression)
}
}
buffer
}
override def merge(
buffer: OpenHashMap[AnyRef, Long],
other: OpenHashMap[AnyRef, Long]): OpenHashMap[AnyRef, Long] = {
other.foreach { case (key, count) =>
buffer.changeValue(key, count, _ + count)
}
buffer
}
override def eval(buffer: OpenHashMap[AnyRef, Long]): Any = {
generateOutput(getPercentiles(buffer))
}
private def getPercentiles(buffer: OpenHashMap[AnyRef, Long]): Seq[Double] = {
if (buffer.isEmpty) {
return Seq.empty
}
val sortedCounts = buffer.toSeq.sortBy(_._1)(
child.dataType.asInstanceOf[NumericType].ordering.asInstanceOf[Ordering[AnyRef]])
val accumulatedCounts = sortedCounts.scanLeft((sortedCounts.head._1, 0L)) {
case ((key1, count1), (key2, count2)) => (key2, count1 + count2)
}.tail
val maxPosition = accumulatedCounts.last._2 - 1
percentages.map { percentile =>
getPercentile(accumulatedCounts, maxPosition * percentile)
}
}
private def generateOutput(results: Seq[Double]): Any = {
if (results.isEmpty) {
null
} else if (returnPercentileArray) {
new GenericArrayData(results)
} else {
results.head
}
}
/**
* Get the percentile value.
*
* This function has been based upon similar function from HIVE
* `org.apache.hadoop.hive.ql.udf.UDAFPercentile.getPercentile()`.
*/
private def getPercentile(aggreCounts: Seq[(AnyRef, Long)], position: Double): Double = {
// We may need to do linear interpolation to get the exact percentile
val lower = position.floor.toLong
val higher = position.ceil.toLong
// Use binary search to find the lower and the higher position.
val countsArray = aggreCounts.map(_._2).toArray[Long]
val lowerIndex = binarySearchCount(countsArray, 0, aggreCounts.size, lower + 1)
val higherIndex = binarySearchCount(countsArray, 0, aggreCounts.size, higher + 1)
val lowerKey = aggreCounts(lowerIndex)._1
if (higher == lower) {
// no interpolation needed because position does not have a fraction
return toDoubleValue(lowerKey)
}
val higherKey = aggreCounts(higherIndex)._1
if (higherKey == lowerKey) {
// no interpolation needed because lower position and higher position has the same key
return toDoubleValue(lowerKey)
}
// Linear interpolation to get the exact percentile
(higher - position) * toDoubleValue(lowerKey) + (position - lower) * toDoubleValue(higherKey)
}
/**
* use a binary search to find the index of the position closest to the current value.
*/
private def binarySearchCount(
countsArray: Array[Long], start: Int, end: Int, value: Long): Int = {
util.Arrays.binarySearch(countsArray, 0, end, value) match {
case ix if ix < 0 => -(ix + 1)
case ix => ix
}
}
override def serialize(obj: OpenHashMap[AnyRef, Long]): Array[Byte] = {
val buffer = new Array[Byte](4 << 10) // 4K
val bos = new ByteArrayOutputStream()
val out = new DataOutputStream(bos)
try {
val projection = UnsafeProjection.create(Array[DataType](child.dataType, LongType))
// Write pairs in counts map to byte buffer.
obj.foreach { case (key, count) =>
val row = InternalRow.apply(key, count)
val unsafeRow = projection.apply(row)
out.writeInt(unsafeRow.getSizeInBytes)
unsafeRow.writeToStream(out, buffer)
}
out.writeInt(-1)
out.flush()
bos.toByteArray
} finally {
out.close()
bos.close()
}
}
override def deserialize(bytes: Array[Byte]): OpenHashMap[AnyRef, Long] = {
val bis = new ByteArrayInputStream(bytes)
val ins = new DataInputStream(bis)
try {
val counts = new OpenHashMap[AnyRef, Long]
// Read unsafeRow size and content in bytes.
var sizeOfNextRow = ins.readInt()
while (sizeOfNextRow >= 0) {
val bs = new Array[Byte](sizeOfNextRow)
ins.readFully(bs)
val row = new UnsafeRow(2)
row.pointTo(bs, sizeOfNextRow)
// Insert the pairs into counts map.
val key = row.get(0, child.dataType)
val count = row.get(1, LongType).asInstanceOf[Long]
counts.update(key, count)
sizeOfNextRow = ins.readInt()
}
counts
} finally {
ins.close()
bis.close()
}
}
}
|
witgo/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Percentile.scala
|
Scala
|
apache-2.0
| 11,232
|
package jigg.pipeline
/*
Copyright 2013-2015 Hiroshi Noji
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import scala.annotation.tailrec
/** If you want to define your own Requirement, please override this.
*/
trait Requirement {
def parent = Seq[Requirement]()
def allAncestors(): Set[Requirement] = {
@tailrec
def collectAncestors(r: Seq[Requirement], current: Set[Requirement]): Set[Requirement] = {
r.map(_.parent).flatten match {
case Seq() => current ++ r
case seq => collectAncestors(seq, current ++ r)
}
}
collectAncestors(Seq(this), Set[Requirement]())
}
}
object Requirement {
case object Dsplit extends Requirement
case object Ssplit extends Requirement
case object Tokenize extends Requirement
case object POS extends Requirement
case object Lemma extends Requirement
case object Dependencies extends Requirement
case object NER extends Requirement
case object Coreference extends Requirement
// Now this corresponds to Mention in the CoreNLP, which is (probably) used internally
// for saving mention candidates.
case object Mention extends Requirement
case object PredArg extends Requirement
case object Parse extends Requirement
case object Chunk extends Requirement
// mainly prepared for Stanford CoreNLP
case object NormalizedNER extends Requirement
case object StanfordNER extends Requirement {
override val parent = Seq(NER, NormalizedNER)
}
case object BasicDependencies extends Requirement {
override val parent = Seq(Dependencies)
}
case object CollapsedDependencies extends Requirement {
override val parent = Seq(Dependencies)
}
case object CollapsedCCProcessedDependencies extends Requirement {
override val parent = Seq(Dependencies)
}
}
object JaRequirement {
trait JaTokenize extends Requirement {
import Requirement._
override def parent = Seq(Tokenize, POS, Lemma)
}
case object TokenizeWithIPA extends JaTokenize
case object TokenizeWithJumandic extends JaTokenize
case object TokenizeWithUnidic extends JaTokenize
case object Juman extends Requirement {
override def parent = Seq(TokenizeWithJumandic)
}
case object CabochaChunk extends Requirement {
override val parent = Seq(Requirement.Chunk)
}
case object KNPChunk extends Requirement {
override val parent = Seq(Requirement.Chunk)
}
case object KNPPredArg extends Requirement {
override val parent = Seq(Requirement.PredArg)
}
case object BasicPhraseCoreference extends Requirement
case object ChunkDependencies extends Requirement
case object BasicPhrase extends Requirement
case object BasicPhraseDependencies extends Requirement
// case object Coreference extends Requirement
// case object PredArg extends Requirement
case object CCGDerivation extends Requirement
case object CCGDependencies extends Requirement
case object BunsetsuChunk extends Requirement {
override val parent = Seq(Requirement.Chunk)
}
}
/** This set is a specialized set to preserve satisfied requirements. If an element is
* added to this collection, all its ancestor requirements are also added automatically.
*/
sealed trait RequirementSet { self =>
protected val elems: Set[Requirement]
def |(other: RequirementSet): RequirementSet =
this | other.elems.map(_.allAncestors).flatten.toSet
def |(otherElems: Set[Requirement]): RequirementSet = new RequirementSet {
override val elems = self.elems | otherElems
}
/** Elements in requirements, which is not in this.
*/
def lackedIn(requirements: RequirementSet): Set[Requirement] =
lackedIn(requirements.elems)
def lackedIn(requirements: Set[Requirement]): Set[Requirement] =
requirements &~ (elems & requirements)
}
object RequirementSet {
def apply(_elems: Requirement*) = new RequirementSet {
override val elems = _elems.map(_.allAncestors).flatten.toSet
}
}
|
tomeken-yoshinaga/jigg
|
src/main/scala/jigg/pipeline/Requirement.scala
|
Scala
|
apache-2.0
| 4,442
|
package fink.web
import org.scalatra.ScalatraBase
import fink.support.{Config, MediaManager}
import java.io.File
import fink.data.RepositorySupport
trait MediaSupport extends ScalatraBase with RepositorySupport {
get("/uploads/images/:hash/:spec/:file") {
(for {
hash <- Option(params("hash"))
image <- imageRepository.byHash(hash)
ext <- MediaManager.imageExtensions.get(image.contentType)
spec <- MediaManager.imageSpecs.filter(_.name == params("spec")).headOption
} yield {
val file = new File("%s/%s-%s.%s".format(Config.mediaDirectory, image.hash, spec.name, ext))
if (!file.exists) halt(404)
response.addHeader("Content-Disposition", "inline;filename=\\"%s\\"".format(image.filename))
response.addHeader("Content-type", image.contentType)
file
}) getOrElse(halt(404))
}
}
|
dozed/fink
|
src/main/scala/fink/web/MediaSupport.scala
|
Scala
|
mit
| 850
|
package co.rc.authmanager.persistence.daos
import co.rc.authmanager.persistence.daos.base.DAO
import io.strongtyped.active.slick.Lens
import io.strongtyped.active.slick.Lens._
import slick.ast.BaseTypedType
/**
* Class that defines DAO implementation for UsersRoles
*/
class UsersRolesDAO extends DAO {
import jdbcProfile.api._
override type Id = Int
override type Entity = UserRole
override type EntityTable = UsersRoles
override val baseTypedType: BaseTypedType[ Int ] = implicitly[ BaseTypedType[ Id ] ]
override val idLens: Lens[ UserRole, Option[ Int ] ] = lens { element: UserRole => element.id } { ( element, id ) => element.copy( id = id ) }
override val tableQuery: TableQuery[ UsersRoles ] = UsersRoles
override def $id( table: UsersRoles ): Rep[ Int ] = table.id
}
|
rodricifuentes1/authentication-manager
|
src/main/scala/co/rc/authmanager/persistence/daos/UsersRolesDAO.scala
|
Scala
|
mit
| 803
|
/* Copyright (C) 2016 Luis Rodero-Merino
*
* This file is part of MxCompanions
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package mxcompanions
import java.util.concurrent.TimeUnit
import java.util.concurrent.TimeUnit._
import mxcompanions.circuitbreaker.CircuitBreakerConfiguration
import mxcompanions.delegate.DelegateConfiguration
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Gen
import scala.concurrent.duration.FiniteDuration
/**Generators to be used for tests of [[mxcompanions.circuitbreaker.CircuitBreaker]],
* [[mxcompanions.circuitbreaker.CircuitBreakerConfiguration]],
* [[mxcompanions.delegate.Delegate]] and
* [[mxcompanions.delegate.DelegateConf]].
*/
object Gens {
val bitRingSize: Gen[Int] = Gen.choose(1,10)
val bitRingFalibleSize: Gen[Int] = Gen.chooseNum(-10,10)
val bitRingInsertions: Gen[List[Boolean]] = for {
amount <- Gen.choose(0,100)
toInsert <- Gen.containerOfN[List,Boolean](amount, arbitrary[Boolean])
} yield toInsert
// We ignore time units smaller than milliseconds
val timeUnit: Gen[TimeUnit] = Gen.oneOf(
DAYS, HOURS, MINUTES, SECONDS, MILLISECONDS
)
val finiteDuration: Gen[FiniteDuration] = for {
nanoseconds <- arbitrary[Int]
timeUnit <- timeUnit
time = timeUnit.convert(nanoseconds, NANOSECONDS)
} yield FiniteDuration(time, timeUnit)
val finiteDurationNullable: Gen[FiniteDuration] =
Gen.oneOf[FiniteDuration](CircuitBreakerConfiguration.default.timeInOpenState, null)
/* We need a generator for `FiniteDuration` instances that have
* positive length. Using `finiteDurationGen suchThat (_.length > 0)`
* cannot do the work because scalacheck can give up before getting all
* the instances it needs (not often, but it really happens). Thus,
* we must take the long path to define this generator.
*/
val positiveFiniteDuration: Gen[FiniteDuration] = for {
nanoseconds <- Gen.chooseNum(1, Int.MaxValue)
timeUnit <- timeUnit
time = timeUnit.convert(nanoseconds, NANOSECONDS) if(time > 0)
} yield FiniteDuration(time, timeUnit)
val delay: Gen[FiniteDuration] = for {
nanoseconds <- Gen.chooseNum(1, Int.MaxValue)
timeUnit <- timeUnit
time = timeUnit.convert(nanoseconds, NANOSECONDS) if (time > 0)
} yield FiniteDuration(time, timeUnit)
val failureRate: Gen[Float] = Gen.choose(0.0F, 1.0F) suchThat (_ > 0.0F)
val delegateConf: Gen[DelegateConfiguration] = for {
fd <- Gen.choose(1,5) map (FiniteDuration(_, MILLISECONDS))
tries <- Gen.choose(1,5)
} yield DelegateConfiguration.conf.
delay(fd).
maxTries(tries).
build.get
val circuitBreakerConf: Gen[CircuitBreakerConfiguration] = for {
fr <- failureRate
tios <- delay
rfr <- failureRate
mrto <- Gen.choose(1,Int.MaxValue)
mrtr <- Gen.choose(1,Int.MaxValue)
} yield CircuitBreakerConfiguration.conf.
failureRate(fr).
timeInOpenState(tios).
reopeningFailureRate(rfr).
minRequestsToOpen(mrto).
minRequestsToReopen(mrtr).
build.get
}
|
lrodero/mxcompanions
|
src/test/scala/mxcompanions/Gens.scala
|
Scala
|
gpl-2.0
| 3,756
|
/*
* Copyright 2019 ACINQ SAS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.acinq.eclair.wire.protocol
import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey}
import fr.acinq.bitcoin.{ByteVector32, ByteVector64, Satoshi}
import fr.acinq.eclair.blockchain.fee.FeeratePerKw
import fr.acinq.eclair.channel.ChannelFlags
import fr.acinq.eclair.crypto.Mac32
import fr.acinq.eclair.{BlockHeight, CltvExpiry, CltvExpiryDelta, MilliSatoshi, ShortChannelId, TimestampSecond, UInt64}
import org.apache.commons.codec.binary.Base32
import scodec.bits.{BitVector, ByteVector}
import scodec.codecs._
import scodec.{Attempt, Codec, DecodeResult, Err, SizeBound}
import java.net.{Inet4Address, Inet6Address, InetAddress}
import scala.Ordering.Implicits._
import scala.util.Try
/**
* Created by t-bast on 20/06/2019.
*/
object CommonCodecs {
/**
* Discriminator codec with a default fallback codec (of the same type).
*/
def discriminatorWithDefault[A](discriminator: Codec[A], fallback: Codec[A]): Codec[A] = new Codec[A] {
def sizeBound: SizeBound = discriminator.sizeBound | fallback.sizeBound
def encode(e: A): Attempt[BitVector] = discriminator.encode(e).recoverWith { case _ => fallback.encode(e) }
def decode(b: BitVector): Attempt[DecodeResult[A]] = discriminator.decode(b).recoverWith {
case _: KnownDiscriminatorType[_]#UnknownDiscriminator => fallback.decode(b)
}
}
/** byte-aligned boolean codec */
val bool8: Codec[Boolean] = bool(8)
// this codec can be safely used for values < 2^63 and will fail otherwise
// (for something smarter see https://github.com/yzernik/bitcoin-scodec/blob/master/src/main/scala/io/github/yzernik/bitcoinscodec/structures/UInt64.scala)
val uint64overflow: Codec[Long] = int64.narrow(l => if (l >= 0) Attempt.Successful(l) else Attempt.failure(Err(s"overflow for value $l")), l => l)
val uint64: Codec[UInt64] = bytes(8).xmap(b => UInt64(b), a => a.toByteVector.padLeft(8))
val satoshi: Codec[Satoshi] = uint64overflow.xmapc(l => Satoshi(l))(_.toLong)
val millisatoshi: Codec[MilliSatoshi] = uint64overflow.xmapc(l => MilliSatoshi(l))(_.toLong)
val feeratePerKw: Codec[FeeratePerKw] = uint32.xmapc(l => FeeratePerKw(Satoshi(l)))(_.toLong)
val blockHeight: Codec[BlockHeight] = uint32.xmapc(l => BlockHeight(l))(_.toLong)
val cltvExpiry: Codec[CltvExpiry] = blockHeight.as[CltvExpiry]
val cltvExpiryDelta: Codec[CltvExpiryDelta] = uint16.xmapc(CltvExpiryDelta)(_.toInt)
// this is needed because some millisatoshi values are encoded on 32 bits in the BOLTs
// this codec will fail if the amount does not fit on 32 bits
val millisatoshi32: Codec[MilliSatoshi] = uint32.xmapc(l => MilliSatoshi(l))(_.toLong)
val timestampSecond: Codec[TimestampSecond] = uint32.xmapc(TimestampSecond(_))(_.toLong)
/**
* We impose a minimal encoding on some values (such as varint and truncated int) to ensure that signed hashes can be
* re-computed correctly.
* If a value could be encoded with less bytes, it's considered invalid and results in a failed decoding attempt.
*
* @param codec the value codec (depends on the value).
* @param min the minimal value that should be encoded.
*/
def minimalvalue[A: Ordering](codec: Codec[A], min: A): Codec[A] = codec.exmap({
case i if i < min => Attempt.failure(Err("value was not minimally encoded"))
case i => Attempt.successful(i)
}, Attempt.successful)
// Bitcoin-style varint codec (CompactSize).
// See https://bitcoin.org/en/developer-reference#compactsize-unsigned-integers for reference.
val varint: Codec[UInt64] = discriminatorWithDefault(
discriminated[UInt64].by(uint8L)
.\\(0xff) { case i if i >= UInt64(0x100000000L) => i }(minimalvalue(uint64, UInt64(0x100000000L)))
.\\(0xfe) { case i if i >= UInt64(0x10000) => i }(minimalvalue(uint32.xmap(UInt64(_), _.toBigInt.toLong), UInt64(0x10000)))
.\\(0xfd) { case i if i >= UInt64(0xfd) => i }(minimalvalue(uint16.xmap(UInt64(_), _.toBigInt.toInt), UInt64(0xfd))),
uint8L.xmap(UInt64(_), _.toBigInt.toInt)
)
// This codec can be safely used for values < 2^63 and will fail otherwise.
// It is useful in combination with variableSizeBytesLong to encode/decode TLV lengths because those will always be < 2^63.
val varintoverflow: Codec[Long] = varint.narrow(l => if (l <= UInt64(Long.MaxValue)) Attempt.successful(l.toBigInt.toLong) else Attempt.failure(Err(s"overflow for value $l")), l => UInt64(l))
val bytes32: Codec[ByteVector32] = limitedSizeBytes(32, bytesStrict(32).xmap(d => ByteVector32(d), d => d.bytes))
val bytes64: Codec[ByteVector64] = limitedSizeBytes(64, bytesStrict(64).xmap(d => ByteVector64(d), d => d.bytes))
val sha256: Codec[ByteVector32] = bytes32
val varsizebinarydata: Codec[ByteVector] = variableSizeBytes(uint16, bytes)
val listofsignatures: Codec[List[ByteVector64]] = listOfN(uint16, bytes64)
val channelflags: Codec[ChannelFlags] = (ignore(7) dropLeft bool).as[ChannelFlags]
val ipv4address: Codec[Inet4Address] = bytes(4).xmap(b => InetAddress.getByAddress(b.toArray).asInstanceOf[Inet4Address], a => ByteVector(a.getAddress))
val ipv6address: Codec[Inet6Address] = bytes(16).exmap(b => Attempt.fromTry(Try(Inet6Address.getByAddress(null, b.toArray, null))), a => Attempt.fromTry(Try(ByteVector(a.getAddress))))
def base32(size: Int): Codec[String] = bytes(size).xmap(b => new Base32().encodeAsString(b.toArray).toLowerCase, a => ByteVector(new Base32().decode(a.toUpperCase())))
val nodeaddress: Codec[NodeAddress] =
discriminated[NodeAddress].by(uint8)
.typecase(1, (ipv4address :: uint16).as[IPv4])
.typecase(2, (ipv6address :: uint16).as[IPv6])
.typecase(3, (base32(10) :: uint16).as[Tor2])
.typecase(4, (base32(35) :: uint16).as[Tor3])
// this one is a bit different from most other codecs: the first 'len' element is *not* the number of items
// in the list but rather the number of bytes of the encoded list. The rationale is once we've read this
// number of bytes we can just skip to the next field
val listofnodeaddresses: Codec[List[NodeAddress]] = variableSizeBytes(uint16, list(nodeaddress))
val shortchannelid: Codec[ShortChannelId] = int64.xmap(l => ShortChannelId(l), s => s.toLong)
val privateKey: Codec[PrivateKey] = Codec[PrivateKey](
(priv: PrivateKey) => bytes(32).encode(priv.value),
(wire: BitVector) => bytes(32).decode(wire).map(_.map(b => PrivateKey(b)))
)
val publicKey: Codec[PublicKey] = Codec[PublicKey](
(pub: PublicKey) => bytes(33).encode(pub.value),
(wire: BitVector) => bytes(33).decode(wire).map(_.map(b => PublicKey(b)))
)
val rgb: Codec[Color] = bytes(3).xmap(buf => Color(buf(0), buf(1), buf(2)), t => ByteVector(t.r, t.g, t.b))
def zeropaddedstring(size: Int): Codec[String] = fixedSizeBytes(size, utf8).xmap(s => s.takeWhile(_ != '\\u0000'), s => s)
/**
* When encoding, prepend a valid mac to the output of the given codec.
* When decoding, verify that a valid mac is prepended.
*/
def prependmac[A](codec: Codec[A], mac: Mac32) = Codec[A](
(a: A) => codec.encode(a).map(bits => mac.mac(bits.toByteVector).bits ++ bits),
(bits: BitVector) => ("mac" | bytes32).decode(bits) match {
case Attempt.Successful(DecodeResult(msgMac, remainder)) if mac.verify(msgMac, remainder.toByteVector) => codec.decode(remainder)
case Attempt.Successful(_) => Attempt.Failure(scodec.Err("invalid mac"))
case Attempt.Failure(err) => Attempt.Failure(err)
}
)
/**
* All LN protocol message must be stored as length-delimited, because they may have arbitrary trailing data
*/
def lengthDelimited[T](codec: Codec[T]): Codec[T] = variableSizeBytesLong(varintoverflow, codec)
}
|
ACINQ/eclair
|
eclair-core/src/main/scala/fr/acinq/eclair/wire/protocol/CommonCodecs.scala
|
Scala
|
apache-2.0
| 8,278
|
package com.szadowsz.spark.ml.feature
import org.apache.spark.ml.Transformer
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.param.{Param, ParamMap}
import org.apache.spark.ml.util.Identifiable
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Dataset, Row}
import org.slf4j.{Logger, LoggerFactory}
/**
* Created on 11/01/2017.
*/
class ValueCounter(override val uid: String) extends Transformer with HasInputCols with HasOutputCol {
protected val logger: Logger = LoggerFactory.getLogger("com.szadowsz.spark.ml")
protected val value: Param[Double] = new Param[ Double](this, "value", "")
protected val countValue: Param[Boolean] = new Param[Boolean](this, "countValue", "")
setDefault(countValue, true)
def this() = this(Identifiable.randomUID("counter"))
override def copy(extra: ParamMap): Transformer = defaultCopy(extra)
override def transformSchema(schema: StructType): StructType = {
require(schema.filter(f => $(inputCols).contains(f.name)).forall(_.dataType.isInstanceOf[NumericType]))
StructType(schema.fields :+ StructField($(outputCol), IntegerType))
}
override def transform(dataset: Dataset[_]): DataFrame = {
val schema = transformSchema(dataset.schema)
// Data transformation.
val assembleFunc = if ($(countValue))
udf { r: Row => r.toSeq.asInstanceOf[Seq[Double]].count(_ == $(value)) }
else
udf { r: Row => r.toSeq.asInstanceOf[Seq[Double]].count(_ != $(value)) }
val args = $(inputCols).map { c =>
schema(c).dataType match {
case DoubleType => dataset(c)
case _: NumericType | BooleanType => dataset(c).cast(DoubleType).as(s"${c}_double_$uid")
}
}
dataset.select(col("*"), assembleFunc(struct(args: _*)).as($(outputCol)))
}
}
|
zakski/project-cadisainmduit
|
module/spark/src/main/scala/com/szadowsz/spark/ml/feature/ValueCounter.scala
|
Scala
|
apache-2.0
| 1,844
|
import com.google.inject.{AbstractModule, Guice}
import domain.repositories.LigaRepository
import infrastructure.persistence.tables.LigaRepositoryImpl
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
/**
* Add your spec here.
* You can mock out a whole application including requests, plugins etc.
* For more information, consult the wiki.
*/
@RunWith(classOf[JUnitRunner])
class ApplicationSpec extends Specification {
"Application" should {
"send 404 on a bad request" in new WithApplication{
route(FakeRequest(GET, "/boum")) must beNone
}
"render the index page" in new WithApplication{
val home = route(FakeRequest(GET, "/")).get
status(home) must equalTo(OK)
contentType(home) must beSome.which(_ == "text/html")
contentAsString(home) must contain ("Your new application is ready.")
}
}
"Guice" should {
"inject a oject to java interface" in {
val injector = Guice.createInjector( new AbstractModule {
override def configure(): Unit =
bind(classOf[LigaRepository]).toInstance(LigaRepositoryImpl)
})
val repo: LigaRepository = injector.getInstance(classOf[LigaRepository])
repo must not beNull
}
}
}
|
Bluewolfbr/makecups
|
makecups-backend/test/ApplicationSpec.scala
|
Scala
|
mit
| 1,305
|
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.conversions
import com.twitter.algebird.Moments
import com.twitter.conversions.time._
import com.twitter.util.Time
import com.twitter.zipkin.common._
import com.twitter.zipkin.query._
import com.twitter.zipkin.thriftscala
import scala.collection.breakOut
import scala.language.implicitConversions
/**
* Convenience implicits for converting between common classes and Thrift.
*/
object thrift {
/* Endpoint */
class ThriftEndpoint(e: Endpoint) {
lazy val toThrift = thriftscala.Endpoint(e.ipv4, e.port, e.serviceName)
}
class WrappedEndpoint(e: thriftscala.Endpoint) {
lazy val toEndpoint = {
val serviceName = e.serviceName match {
case (null | "") => Endpoint.UnknownServiceName
case _ => e.serviceName
}
new Endpoint(e.ipv4, e.port, serviceName)
}
}
implicit def endpointToThriftEndpoint(e: Endpoint) = new ThriftEndpoint(e)
implicit def thriftEndpointToEndpoint(e: thriftscala.Endpoint) = new WrappedEndpoint(e)
/* AnnotationType */
class ThriftAnnotationType(a: AnnotationType) {
lazy val toThrift = thriftscala.AnnotationType(a.value)
}
class WrappedAnnotationType(a: thriftscala.AnnotationType) {
lazy val toAnnotationType = AnnotationType(a.value, a.name)
}
implicit def annotationTypeToThriftAnnotationType(a: AnnotationType) = new ThriftAnnotationType(a)
implicit def thriftAnnotationTypeToAnnotationType(a: thriftscala.AnnotationType) = new WrappedAnnotationType(a)
/* Annotation */
class ThriftAnnotation(a: Annotation) {
lazy val toThrift = {
thriftscala.Annotation(a.timestamp, a.value, a.host.map { _.toThrift }, a.duration.map(_.inMicroseconds.toInt))
}
}
class WrappedAnnotation(a: thriftscala.Annotation) {
lazy val toAnnotation = {
if (a.timestamp <= 0)
throw new IllegalArgumentException("Annotation must have a timestamp: %s".format(a.toString))
if ("".equals(a.value))
throw new IllegalArgumentException("Annotation must have a value: %s".format(a.toString))
new Annotation(a.timestamp, a.value, a.host.map { _.toEndpoint }, a.duration.map { _.microseconds })
}
}
implicit def annotationToThriftAnnotation(a: Annotation) = new ThriftAnnotation(a)
implicit def thriftAnnotationToAnnotation(a: thriftscala.Annotation) = new WrappedAnnotation(a)
/* BinaryAnnotation */
class ThriftBinaryAnnotation(b: BinaryAnnotation) {
lazy val toThrift = {
thriftscala.BinaryAnnotation(b.key, b.value, b.annotationType.toThrift, b.host.map { _.toThrift })
}
}
class WrappedBinaryAnnotation(b: thriftscala.BinaryAnnotation) {
lazy val toBinaryAnnotation = {
BinaryAnnotation(b.key, b.value, b.annotationType.toAnnotationType, b.host.map { _.toEndpoint })
}
}
implicit def binaryAnnotationToThriftBinaryAnnotation(b: BinaryAnnotation) = new ThriftBinaryAnnotation(b)
implicit def thriftBinaryAnnotationToBinaryAnnotation(b: thriftscala.BinaryAnnotation) = new WrappedBinaryAnnotation(b)
/* Span */
class ThriftSpan(s: Span) {
lazy val toThrift = {
thriftscala.Span(s.traceId, s.name, s.id, s.parentId, s.annotations.map { _.toThrift },
s.binaryAnnotations.map { _.toThrift }, s.debug)
}
}
class WrappedSpan(s: thriftscala.Span) {
lazy val toSpan = {
s.name match {
case null => throw new IncompleteTraceDataException("No name set in Span")
case _ => ()
}
Span(
s.traceId,
s.name,
s.id,
s.parentId,
s.annotations match {
case null => List.empty[Annotation]
case as => as.map(_.toAnnotation)(breakOut)
},
s.binaryAnnotations match {
case null => List.empty[BinaryAnnotation]
case b => b.map(_.toBinaryAnnotation)(breakOut)
},
s.debug
)
}
}
implicit def spanToThriftSpan(s: Span) = new ThriftSpan(s)
implicit def thriftSpanToSpan(s: thriftscala.Span) = new WrappedSpan(s)
/* Trace */
class WrappedTrace(t: Trace) {
lazy val toThrift = thriftscala.Trace(t.spans.map{ _.toThrift })
}
class ThriftTrace(t: thriftscala.Trace) {
lazy val toTrace = Trace(t.spans.map { _.toSpan })
}
implicit def traceToThrift(t: Trace) = new WrappedTrace(t)
implicit def thriftToTrace(t: thriftscala.Trace) = new ThriftTrace(t)
/* Dependencies */
class WrappedMoments(m: Moments) {
lazy val toThrift = thriftscala.Moments(m.m0, m.m1, nanToNone(m.m2), nanToNone(m.m3), nanToNone(m.m4))
private def nanToNone(d: Double) = if (d == Double.NaN) None else Some(d)
}
class ThriftMoments(m: thriftscala.Moments) {
lazy val toMoments = Moments(m.m0, m.m1, m.m2.getOrElse(Double.NaN), m.m3.getOrElse(Double.NaN), m.m4.getOrElse(Double.NaN))
}
implicit def momentsToThrift(m: Moments) = new WrappedMoments(m)
implicit def thriftToMoments(m: thriftscala.Moments) = new ThriftMoments(m)
class WrappedDependencyLink(dl: DependencyLink) {
lazy val toThrift = {
thriftscala.DependencyLink(dl.parent.name, dl.child.name, dl.durationMoments.toThrift)
}
}
class ThriftDependencyLink(dl: thriftscala.DependencyLink) {
lazy val toDependencyLink = DependencyLink(
Service(dl.parent),
Service(dl.child),
dl.durationMoments.toMoments
)
}
implicit def dependencyLinkToThrift(dl: DependencyLink) = new WrappedDependencyLink(dl)
implicit def thriftToDependencyLink(dl: thriftscala.DependencyLink) = new ThriftDependencyLink(dl)
class WrappedDependencies(d: Dependencies) {
lazy val toThrift = thriftscala.Dependencies(d.startTime.inMicroseconds, d.endTime.inMicroseconds, d.links.map {_.toThrift}.toSeq )
}
class ThriftDependencies(d: thriftscala.Dependencies) {
lazy val toDependencies = Dependencies(
Time.fromMicroseconds(d.startTime),
Time.fromMicroseconds(d.endTime),
d.links.map {_.toDependencyLink}
)
}
implicit def dependenciesToThrift(d: Dependencies) = new WrappedDependencies(d)
implicit def thriftToDependencies(d: thriftscala.Dependencies) = new ThriftDependencies(d)
}
|
gneokleo/zipkin
|
zipkin-scrooge/src/main/scala/com/twitter/zipkin/conversions/thrift.scala
|
Scala
|
apache-2.0
| 6,704
|
//TODO
/**
*/
package object domain {
}
|
rysh/scalatrader
|
store/ticker/src/main/scala/domain/package.scala
|
Scala
|
mit
| 42
|
package me.enkode.j8
import java.util.function.Consumer
import org.scalatest.{Matchers, FlatSpec}
class JavaConsumerSupportTest extends FlatSpec with Matchers {
def fixture(onAccept: (String) ⇒ Unit) = new JavaConsumerSupport[String] {
override def jConsumer: Consumer[String] = new Consumer[String] {
override def accept(t: String): Unit = onAccept(t)
}
}
"JavaConsumerSupport" should "be able to convert a java consumer to a scala function" in {
var consumed = false
val consumption = fixture((value) ⇒ consumed = true).asScala
consumption("foo")
consumed should be (true)
}
}
|
kender/java8-converters
|
src/test/scala/me/enkode/j8/JavaConsumerSupportTest.scala
|
Scala
|
mit
| 625
|
/**
* Copyright (C) 2015-2016 DANS - Data Archiving and Networked Services (info@dans.knaw.nl)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.stage.fileitem
import java.io.File
import java.sql.SQLException
import com.yourmediashelf.fedora.client.FedoraClientException
import nl.knaw.dans.easy.stage.lib.FOXML.{getDirFOXML, getFileFOXML}
import nl.knaw.dans.easy.stage.lib.Util._
import nl.knaw.dans.easy.stage.lib._
import org.apache.commons.configuration.PropertiesConfiguration
import org.slf4j.LoggerFactory
import scala.util.{Failure, Success, Try}
object EasyStageFileItem {
val log = LoggerFactory.getLogger(getClass)
def main(args: Array[String]) {
log.debug(s"app.home = ${System.getProperty("app.home")}")
val props = new PropertiesConfiguration(new File(System.getProperty("app.home"), "cfg/application.properties"))
//props.save(System.out)
Fedora.setFedoraConnectionSettings(props.getString("fcrepo.url"), props.getString("fcrepo.user"), props.getString("fcrepo.password"))
val conf = new FileItemConf(args)
getSettingsRows(conf).map {
_.foreach { settings =>
run(settings)
.map(_ => log.info(s"Staging SUCCESS of $settings"))
.recover { case t: Throwable =>
log.error(s"Staging FAIL of $settings", t)
if (t.isInstanceOf[SQLException] || t.isInstanceOf[FedoraClientException]) return
}
}
}.recover { case t: Throwable => log.error(s"Staging FAIL of $conf with repo url ${props.getString("fcrepo.url")}", t) }
}
def getSettingsRows(conf: FileItemConf): Try[Seq[FileItemSettings]] =
if (conf.datasetId.isDefined)
Success(Seq(FileItemSettings(conf)))
else {
val trailArgs = Seq(conf.sdoSetDir.apply().toString)
CSV(conf.csvFile.apply(), conf.longOptionNames).map {
case (csv, warning) =>
warning.foreach(log.warn)
val rows = csv.getRows
if (rows.isEmpty) log.warn(s"Empty CSV file")
rows.map(options => {
log.info("Options: "+options.mkString(" "))
FileItemSettings(new FileItemConf(options ++ trailArgs))
})
}
}
def run(implicit s: FileItemSettings): Try[Unit] = {
log.debug(s"executing: $s")
for {
datasetId <- getValidDatasetId(s)
sdoSetDir <- mkdirSafe(s.sdoSetDir)
datasetSdoSetDir <- mkdirSafe(new File(sdoSetDir, datasetId.replace(":", "_")))
(parentId, parentPath, newElements) <- getPathElements()
items <- Try { getItemsToStage(newElements, datasetSdoSetDir, parentId) }
_ = log.debug(s"Items to stage: $items")
_ <- Try{items.init.foreach { case (sdo, path, parentRelation) => createFolderSdo(sdo, relPath(parentPath, path), parentRelation) }}
_ <- items.last match {case (sdo, path, parentRelation) => createFileSdo(sdo, parentRelation) }
} yield ()
}
def relPath(parentPath: String, path: String): String =
if (parentPath.isEmpty) new File(path).toString // prevent a leading slash
else new File(parentPath, path).toString
def getPathElements()(implicit s: FileItemSettings): Try[(String, String, Seq[String])] = {
val file = s.pathInDataset.get
s.easyFilesAndFolders.getExistingAncestor(file, s.datasetId.get)
.map { case (parentPath, parentId) =>
log.debug(s"Parent in repository: $parentId $parentPath")
val newItems = file.toString.replaceFirst(s"^$parentPath/", "").split("/")
(parentId, parentPath, newItems.toSeq)
}
}
def getItemsToStage(pathElements: Seq[String], datasetSdoSet: File, existingFolderId: String): Seq[(File, String, (String, String))] = {
getPaths(pathElements)
.foldLeft(Seq[(File, String, (String, String))]())((items, path) => {
items match {
case s@Seq() => s :+ (new File(datasetSdoSet, toSdoName(path)), path, "object" -> s"info:fedora/$existingFolderId")
case seq =>
val parentFolderSdoName = seq.last match { case (sdo, _, _) => sdo.getName}
seq :+ (new File(datasetSdoSet, toSdoName(path)), path, "objectSDO" -> parentFolderSdoName)
}
})
}
def getPaths(path: Seq[String]): Seq[String] =
if(path.isEmpty) Seq()
else path.tail.scanLeft(path.head)((acc, next) => s"$acc/$next")
def createFileSdo(sdoDir: File, parent: (String,String))(implicit s: FileItemSettings): Try[Unit] = {
log.debug(s"Creating file SDO: ${s.pathInDataset.getOrElse("<no path in dataset?>")}")
sdoDir.mkdir()
for {
mime <- Try{s.format.get}
cfgContent <- Try{ JSON.createFileCfg(s.datastreamLocation.getOrElse(s.unsetUrl), mime, parent, s.subordinate)}
_ <- writeJsonCfg(sdoDir, cfgContent)
foxmlContent <- Try{ getFileFOXML(s.title.getOrElse(s.pathInDataset.get.getName), s.ownerId, mime)}
_ <- writeFoxml(sdoDir, foxmlContent)
fmd <- EasyFileMetadata(s)
_ <- writeFileMetadata(sdoDir, fmd)
_ <- s.isMendeley.filter(b => !b).flatMap(_ => s.file.map(copyFile(sdoDir, _))).getOrElse(Success(Unit))
} yield ()
}
def createFolderSdo(sdoDir: File, path: String, parent: (String,String))(implicit s: FileItemSettings): Try[Unit] = {
log.debug(s"Creating folder SDO: $path")
sdoDir.mkdir()
for {
_ <- writeJsonCfg(sdoDir,JSON.createDirCfg(parent, s.subordinate))
_ <- writeFoxml(sdoDir, getDirFOXML(path, s.ownerId))
_ <- writeItemContainerMetadata(sdoDir,EasyItemContainerMd(path))
} yield ()
}
private def getValidDatasetId(s: FileItemSettings): Try[String] =
if (s.datasetId.isEmpty)
Failure(new Exception(s"no datasetId provided"))
else if (s.fedora.findObjects(s"pid~${s.datasetId.get}").isEmpty)
Failure(new Exception(s"${s.datasetId.get} does not exist in repository"))
else
Success(s.datasetId.get)
def toSdoName(path: String): String =
path.replaceAll("[/.]", "_").replaceAll("^_", "")
}
|
ekoi/easy-stage-dataset
|
src/main/scala/nl/knaw/dans/easy/stage/fileitem/EasyStageFileItem.scala
|
Scala
|
apache-2.0
| 6,580
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.metadata
import org.apache.flink.table.api.TableException
import org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecRank
import org.apache.flink.table.planner.plan.stats._
import org.apache.flink.table.planner.plan.utils.ColumnIntervalUtil
import org.apache.flink.table.planner.{JBoolean, JDouble}
import org.apache.flink.table.types.logical.IntType
import org.apache.calcite.rel.RelDistributions
import org.apache.calcite.rel.core.JoinRelType
import org.apache.calcite.rel.logical.LogicalExchange
import org.apache.calcite.rex.{RexCall, RexUtil}
import org.apache.calcite.sql.fun.SqlStdOperatorTable._
import org.apache.calcite.util.{DateString, TimeString, TimestampString}
import org.junit.Assert._
import org.junit.Test
import java.sql.{Date, Time, Timestamp}
import scala.collection.JavaConversions._
class FlinkRelMdColumnIntervalTest extends FlinkRelMdHandlerTestBase {
@Test
def testGetColumnIntervalOnTableScan(): Unit = {
Array(studentLogicalScan, studentFlinkLogicalScan, studentBatchScan, studentStreamScan)
.foreach { scan =>
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(scan, 0))
assertNull(mq.getColumnInterval(scan, 1))
assertEquals(ValueInterval(bd(2.7D), bd(4.8D)), mq.getColumnInterval(scan, 2))
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(scan, 3))
assertEquals(ValueInterval(bd(161.0D), bd(172.1D)), mq.getColumnInterval(scan, 4))
assertNull(mq.getColumnInterval(scan, 5))
assertNull(mq.getColumnInterval(scan, 6))
}
Array(empLogicalScan, empFlinkLogicalScan, empBatchScan, empStreamScan).foreach { scan =>
(0 until 8).foreach { index =>
assertNull(mq.getColumnInterval(scan, index))
}
}
}
@Test
def testGetColumnIntervalOnValues(): Unit = {
(0 until emptyValues.getRowType.getFieldCount).foreach { idx =>
assertEquals(ValueInterval.empty, mq.getColumnInterval(emptyValues, idx))
}
assertEquals(ValueInterval(bd(1L), bd(3L)), mq.getColumnInterval(logicalValues, 0))
assertEquals(ValueInterval(false, true), mq.getColumnInterval(logicalValues, 1))
assertEquals(ValueInterval(
new Date(new DateString(2017, 9, 1).getMillisSinceEpoch),
new Date(new DateString(2017, 10, 2).getMillisSinceEpoch)),
mq.getColumnInterval(logicalValues, 2))
assertEquals(ValueInterval(
new Time(new TimeString(9, 59, 59).toCalendar.getTimeInMillis),
new Time(new TimeString(10, 0, 2).toCalendar.getTimeInMillis)),
mq.getColumnInterval(logicalValues, 3))
assertEquals(ValueInterval(
new Timestamp(new TimestampString(2017, 7, 1, 1, 0, 0).getMillisSinceEpoch),
new Timestamp(new TimestampString(2017, 10, 1, 1, 0, 0).getMillisSinceEpoch)),
mq.getColumnInterval(logicalValues, 4))
assertEquals(ValueInterval(bd(-1D), bd(3.12D)), mq.getColumnInterval(logicalValues, 5))
assertEquals(ValueInterval.empty, mq.getColumnInterval(logicalValues, 6))
assertEquals(ValueInterval("F", "xyz"), mq.getColumnInterval(logicalValues, 7))
}
@Test
def testGetColumnIntervalOnSnapshot(): Unit = {
(0 until flinkLogicalSnapshot.getRowType.getFieldCount).foreach { idx =>
assertNull(mq.getColumnInterval(flinkLogicalSnapshot, idx))
}
}
@Test
def testGetColumnIntervalOnProject(): Unit = {
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(logicalProject, 0))
assertNull(mq.getColumnInterval(logicalProject, 1))
assertEqualsAsDouble(ValueInterval(bd(2.9), bd(5.0)), mq.getColumnInterval(logicalProject, 2))
assertEqualsAsDouble(ValueInterval(bd(11), bd(17)), mq.getColumnInterval(logicalProject, 3))
assertEqualsAsDouble(
ValueInterval(bd(177.1), bd(189.31)), mq.getColumnInterval(logicalProject, 4))
assertNull(mq.getColumnInterval(logicalProject, 5))
assertEqualsAsDouble(
ValueInterval(bd(161.0D), bd(172.1)), mq.getColumnInterval(logicalProject, 6))
assertEquals(ValueInterval(bd(1), bd(2)), mq.getColumnInterval(logicalProject, 7))
assertEquals(ValueInterval(true, true), mq.getColumnInterval(logicalProject, 8))
assertEquals(ValueInterval(bd(2.1D), bd(2.1D)), mq.getColumnInterval(logicalProject, 9))
assertEquals(ValueInterval(bd(2L), bd(2L)), mq.getColumnInterval(logicalProject, 10))
assertNull(mq.getColumnInterval(logicalProject, 11))
// 3 * (score - 2)
val project = relBuilder.scan("student")
.project(
relBuilder.call(
MULTIPLY,
relBuilder.literal(3),
relBuilder.call(MINUS, relBuilder.field(2), relBuilder.literal(2))
)
).build()
assertEqualsAsDouble(ValueInterval(2.1, 8.4), mq.getColumnInterval(project, 0))
}
@Test
def testGetColumnIntervalOnFilter(): Unit = {
val ts = relBuilder.scan("student").build()
relBuilder.push(ts)
// id > 10
val expr0 = relBuilder.call(GREATER_THAN, relBuilder.field(0), relBuilder.literal(-1))
// id <= 20
val expr1 = relBuilder.call(LESS_THAN_OR_EQUAL, relBuilder.field(0), relBuilder.literal(20))
// id > 10.0 (note: the types of id and literal are different)
val expr2 = relBuilder.call(GREATER_THAN, relBuilder.field(0), relBuilder.literal(10.0))
// DIV(id, 2) > 3
val expr3 = relBuilder.call(GREATER_THAN,
relBuilder.call(DIVIDE, relBuilder.field(0), relBuilder.literal(2)),
relBuilder.literal(3))
// score < 4.1
val expr4 = relBuilder.call(LESS_THAN, relBuilder.field(2), relBuilder.literal(4.1D))
// score > 6.0
val expr5 = relBuilder.call(GREATER_THAN, relBuilder.field(2), relBuilder.literal(6.0))
// score <= 4.0
val expr6 = relBuilder.call(LESS_THAN_OR_EQUAL, relBuilder.field(2), relBuilder.literal(4.0))
// score > 1.9
val expr7 = relBuilder.call(GREATER_THAN, relBuilder.field(2), relBuilder.literal(1.9D))
// id > -1
val filter0 = relBuilder.push(ts).filter(expr0).build
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(filter0, 0))
// id <= 20
val filter1 = relBuilder.push(ts).filter(expr1).build
assertEquals(ValueInterval(bd(0), bd(20)), mq.getColumnInterval(filter1, 0))
// id <= 20 AND id > 10 AND DIV(id, 2) > 3
val filter2 = relBuilder.push(ts).filter(expr1, expr2, expr3).build
assertEquals(
ValueInterval(bd(10.0), bd(20), includeLower = false), mq.getColumnInterval(filter2, 0))
// id <= 20 AND id > 10 AND score < 4.1
val filter3 = relBuilder.push(ts).filter(expr1, expr2, expr4).build
assertEquals(
ValueInterval(bd(10.0), bd(20), includeLower = false),
mq.getColumnInterval(filter3, 0))
// score > 6.0 OR score <= 4.0
val filter4 = relBuilder.push(ts).filter(relBuilder.call(OR, expr5, expr6)).build
assertEquals(ValueInterval(bd(2.7), bd(4.0)), mq.getColumnInterval(filter4, 2))
// score > 6.0 OR score <= 4.0 OR id < 20
val filter5 = relBuilder.push(ts).filter(relBuilder.call(OR, expr5, expr6, expr1)).build
assertEquals(ValueInterval(bd(2.7), bd(4.8)), mq.getColumnInterval(filter5, 2))
// (id <= 20 AND score < 4.1) OR NOT(DIV(id, 2) > 3 OR score > 1.9)
val filter6 = relBuilder.push(ts).filter(relBuilder.call(OR,
relBuilder.call(AND, expr1, expr4),
relBuilder.call(NOT, relBuilder.call(OR, expr3, expr7)))).build
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(filter6, 0))
// (id <= 20 AND score < 4.1) OR NOT(id <= 20 OR score > 1.9)
val filter7 = relBuilder.push(ts).filter(relBuilder.call(OR,
relBuilder.call(AND, expr1, expr4),
relBuilder.call(NOT,
relBuilder.call(OR,
RexUtil.negate(relBuilder.getRexBuilder, expr1.asInstanceOf[RexCall]),
expr7)))).build
assertEquals(ValueInterval(bd(0), bd(20)), mq.getColumnInterval(filter7, 0))
}
@Test
def testGetColumnIntervalOnCalc(): Unit = {
relBuilder.push(studentLogicalScan)
val outputRowType = logicalProject.getRowType
// id, name, score + 0.2, age - 1, height * 1.1 as h1, height / 0.9 as h2,
// case sex = 'M' then 1 else 2, true, 2.1, 2, cast(score as double not null) as s
val projects = logicalProject.getProjects
// id <= 20
val expr1 = relBuilder.call(LESS_THAN_OR_EQUAL, relBuilder.field(0), relBuilder.literal(20))
// id > 10
val expr2 = relBuilder.call(GREATER_THAN, relBuilder.field(0), relBuilder.literal(10))
// DIV(id, 2) > 3
val expr3 = relBuilder.call(GREATER_THAN,
relBuilder.call(DIVIDE, relBuilder.field(0), relBuilder.literal(2)),
relBuilder.literal(3))
// score < 4.1
val expr4 = relBuilder.call(LESS_THAN, relBuilder.field(2), relBuilder.literal(4.1D))
// score > 6.0
val expr5 = relBuilder.call(GREATER_THAN, relBuilder.field(2), relBuilder.literal(6.0))
// score <= 4.0
val expr6 = relBuilder.call(LESS_THAN_OR_EQUAL, relBuilder.field(2), relBuilder.literal(4.0))
// score > 1.9
val expr7 = relBuilder.call(GREATER_THAN, relBuilder.field(2), relBuilder.literal(1.9D))
// calc => projects + filter(id <= 20)
val calc1 = createLogicalCalc(studentLogicalScan, outputRowType, projects, List(expr1))
assertEquals(ValueInterval(bd(0), bd(20)), mq.getColumnInterval(calc1, 0))
assertNull(mq.getColumnInterval(calc1, 1))
assertEqualsAsDouble(ValueInterval(bd(2.9), bd(5.0)), mq.getColumnInterval(calc1, 2))
assertEqualsAsDouble(ValueInterval(bd(11), bd(17)), mq.getColumnInterval(calc1, 3))
assertEqualsAsDouble(ValueInterval(bd(177.1), bd(189.31)), mq.getColumnInterval(calc1, 4))
assertNull(mq.getColumnInterval(calc1, 5))
assertEqualsAsDouble(ValueInterval(bd(161.0D), bd(172.1)), mq.getColumnInterval(calc1, 6))
assertEquals(ValueInterval(bd(1), bd(2)), mq.getColumnInterval(calc1, 7))
assertEquals(ValueInterval(true, true), mq.getColumnInterval(calc1, 8))
assertEquals(ValueInterval(bd(2.1D), bd(2.1D)), mq.getColumnInterval(calc1, 9))
assertEquals(ValueInterval(bd(2L), bd(2L)), mq.getColumnInterval(calc1, 10))
assertNull(mq.getColumnInterval(calc1, 11))
// calc => project + filter(id <= 20 AND id > 10 AND DIV(id, 2) > 3)
val calc2 = createLogicalCalc(
studentLogicalScan, outputRowType, projects, List(expr1, expr2, expr3))
assertEquals(
ValueInterval(bd(10), bd(20), includeLower = false), mq.getColumnInterval(calc2, 0))
assertNull(mq.getColumnInterval(calc2, 1))
// calc => project + filter(id <= 20 AND id > 10 AND score < 4.1)
val calc3 = createLogicalCalc(
studentLogicalScan, outputRowType, projects, List(expr1, expr2, expr4))
assertEquals(
ValueInterval(bd(10), bd(20), includeLower = false), mq.getColumnInterval(calc3, 0))
// calc => project + filter(score > 6.0 OR score <= 4.0)
val calc4 = createLogicalCalc(
studentLogicalScan, outputRowType, projects, List(relBuilder.call(OR, expr5, expr6)))
assertEqualsAsDouble(ValueInterval(bd(2.9), bd(5.0)), mq.getColumnInterval(calc4, 2))
// calc => project + filter(score > 6.0 OR score <= 4.0 OR id < 20)
val calc5 = createLogicalCalc(studentLogicalScan, outputRowType, projects,
List(relBuilder.call(OR, expr5, expr6, expr1)))
assertEqualsAsDouble(ValueInterval(bd(2.9), bd(5.0)), mq.getColumnInterval(calc5, 2))
// calc => project + filter((id <= 20 AND score < 4.1) OR NOT(DIV(id, 2) > 3 OR score > 1.9))
val calc6 = createLogicalCalc(studentLogicalScan, outputRowType, projects,
List(relBuilder.call(OR,
relBuilder.call(AND, expr1, expr4),
relBuilder.call(NOT, relBuilder.call(OR, expr3, expr7)))))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(calc6, 0))
// calc => project + filter: ($0 <=2 and $1 < 1.1) or not( $0>2 or $1 > 1.9)
val calc7 = createLogicalCalc(studentLogicalScan, outputRowType, projects,
List(relBuilder.call(OR,
relBuilder.call(AND, expr1, expr4),
relBuilder.call(NOT,
relBuilder.call(OR,
RexUtil.negate(relBuilder.getRexBuilder, expr1.asInstanceOf[RexCall]),
expr7)))))
assertEquals(ValueInterval(bd(0), bd(20)), mq.getColumnInterval(calc7, 0))
relBuilder.push(studentLogicalScan)
val expr8 = relBuilder.call(CASE, expr5, relBuilder.literal(1), relBuilder.literal(0))
val expr9 = relBuilder.call(CASE, expr5, relBuilder.literal(11),
expr7, relBuilder.literal(10), relBuilder.literal(12))
val expr10 = relBuilder.call(CASE, expr2, expr9, expr4, expr8, relBuilder.literal(null))
val expr11 = relBuilder.call(CASE, expr5, relBuilder.literal(1), relBuilder.field(3))
// TODO add tests for IF
val rowType = typeFactory.buildRelNodeRowType(
Array("f0", "f1", "f2", "f3"),
Array(new IntType(), new IntType(), new IntType(), new IntType()))
val calc8 = createLogicalCalc(
studentLogicalScan, rowType, List(expr8, expr9, expr10, expr11), List())
assertEquals(ValueInterval(bd(0), bd(1)), mq.getColumnInterval(calc8, 0))
assertEquals(ValueInterval(bd(10), bd(12)), mq.getColumnInterval(calc8, 1))
assertEquals(ValueInterval(bd(0), bd(12)), mq.getColumnInterval(calc8, 2))
assertEquals(ValueInterval(bd(1), bd(18)), mq.getColumnInterval(calc8, 3))
}
@Test
def testGetColumnIntervalOnExpand(): Unit = {
Array(logicalExpand, flinkLogicalExpand, batchExpand, streamExpand).foreach {
expand =>
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(expand, 0))
assertNull(mq.getColumnInterval(expand, 1))
assertEquals(ValueInterval(bd(2.7), bd(4.8)), mq.getColumnInterval(expand, 2))
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(expand, 3))
assertEquals(ValueInterval(bd(161.0), bd(172.1)), mq.getColumnInterval(expand, 4))
assertEquals(null, mq.getColumnInterval(expand, 5))
assertEquals(null, mq.getColumnInterval(expand, 6))
assertEquals(ValueInterval(bd(0), bd(5)), mq.getColumnInterval(expand, 7))
}
}
@Test
def testGetColumnIntervalOnSort(): Unit = {
Array(logicalSort, flinkLogicalSort, batchSort, streamSort,
logicalLimit, flinkLogicalLimit, batchLimit, batchLocalLimit, batchGlobalLimit, streamLimit,
logicalSortLimit, flinkLogicalSortLimit, batchSortLimit, batchLocalSortLimit,
batchGlobalSortLimit, streamSortLimit).foreach {
sort =>
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(sort, 0))
assertNull(mq.getColumnInterval(sort, 1))
assertEquals(ValueInterval(bd(2.7D), bd(4.8D)), mq.getColumnInterval(sort, 2))
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(sort, 3))
assertEquals(ValueInterval(bd(161.0D), bd(172.1D)), mq.getColumnInterval(sort, 4))
assertNull(mq.getColumnInterval(sort, 5))
assertNull(mq.getColumnInterval(sort, 6))
}
}
@Test
def testGetColumnIntervalOnRank(): Unit = {
Array(logicalRank, flinkLogicalRank, batchLocalRank, batchGlobalRank, streamRank).foreach {
rank =>
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(rank, 0))
assertNull(mq.getColumnInterval(rank, 1))
assertEquals(ValueInterval(bd(2.7D), bd(4.8D)), mq.getColumnInterval(rank, 2))
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(rank, 3))
assertEquals(ValueInterval(bd(161.0D), bd(172.1D)), mq.getColumnInterval(rank, 4))
assertNull(mq.getColumnInterval(rank, 5))
assertNull(mq.getColumnInterval(rank, 6))
rank match {
case r: BatchExecRank if !r.isGlobal => // local batch rank does not output rank function
case _ => assertEquals(ValueInterval(bd(1), bd(5)), mq.getColumnInterval(rank, 7))
}
}
Array(logicalRankWithVariableRange, flinkLogicalRankWithVariableRange,
streamRankWithVariableRange).foreach {
rank =>
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(rank, 0))
assertNull(mq.getColumnInterval(rank, 1))
assertEquals(ValueInterval(bd(2.7D), bd(4.8D)), mq.getColumnInterval(rank, 2))
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(rank, 3))
assertEquals(ValueInterval(bd(161.0D), bd(172.1D)), mq.getColumnInterval(rank, 4))
assertNull(mq.getColumnInterval(rank, 5))
assertNull(mq.getColumnInterval(rank, 6))
assertEquals(ValueInterval(bd(1), bd(18)), mq.getColumnInterval(rank, 7))
}
Array(logicalRowNumber, flinkLogicalRowNumber, streamRowNumber).foreach {
rank =>
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(rank, 0))
assertNull(mq.getColumnInterval(rank, 1))
assertEquals(ValueInterval(bd(2.7D), bd(4.8D)), mq.getColumnInterval(rank, 2))
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(rank, 3))
assertEquals(ValueInterval(bd(161.0D), bd(172.1D)), mq.getColumnInterval(rank, 4))
assertNull(mq.getColumnInterval(rank, 5))
assertNull(mq.getColumnInterval(rank, 6))
assertEquals(ValueInterval(bd(3), bd(6)), mq.getColumnInterval(rank, 7))
}
}
@Test
def testGetColumnIntervalOnExchange(): Unit = {
val exchange = LogicalExchange.create(studentLogicalScan, RelDistributions.SINGLETON)
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(exchange, 0))
assertNull(mq.getColumnInterval(exchange, 1))
assertEquals(ValueInterval(bd(2.7D), bd(4.8D)), mq.getColumnInterval(exchange, 2))
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(exchange, 3))
assertEquals(ValueInterval(bd(161.0D), bd(172.1D)), mq.getColumnInterval(exchange, 4))
assertNull(mq.getColumnInterval(exchange, 5))
assertNull(mq.getColumnInterval(exchange, 6))
}
@Test
def testGetColumnIntervalOnAggregate(): Unit = {
Array(logicalAgg, flinkLogicalAgg).foreach {
agg =>
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(agg, 0))
assertNull(mq.getColumnInterval(agg, 1))
assertEquals(ValueInterval(bd(2.7), null), mq.getColumnInterval(agg, 2))
assertNull(mq.getColumnInterval(agg, 3))
assertNull(mq.getColumnInterval(agg, 4))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(agg, 5))
}
Array(logicalAggWithAuxGroup, flinkLogicalAggWithAuxGroup).foreach {
agg =>
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(agg, 0))
assertNull(mq.getColumnInterval(agg, 1))
assertEquals(ValueInterval(bd(161.0), bd(172.1)), mq.getColumnInterval(agg, 2))
assertNull(mq.getColumnInterval(agg, 3))
assertEquals(ValueInterval(bd(2.7), null), mq.getColumnInterval(agg, 4))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(agg, 5))
}
}
@Test
def testGetColumnIntervalOnBatchExecAggregate(): Unit = {
Array(batchGlobalAggWithLocal, batchGlobalAggWithoutLocal).foreach {
agg =>
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(agg, 0))
assertNull(mq.getColumnInterval(agg, 1))
assertEquals(ValueInterval(bd(2.7), null), mq.getColumnInterval(agg, 2))
assertNull(mq.getColumnInterval(agg, 3))
assertNull(mq.getColumnInterval(agg, 4))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(agg, 5))
}
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(batchLocalAgg, 0))
assertNull(mq.getColumnInterval(batchLocalAgg, 1))
assertNull(mq.getColumnInterval(batchLocalAgg, 2))
assertEquals(ValueInterval(bd(2.7), null), mq.getColumnInterval(batchLocalAgg, 3))
assertNull(mq.getColumnInterval(batchLocalAgg, 4))
assertNull(mq.getColumnInterval(batchLocalAgg, 5))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(batchLocalAgg, 6))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(batchLocalAggWithAuxGroup, 0))
assertNull(mq.getColumnInterval(batchLocalAggWithAuxGroup, 1))
assertEquals(ValueInterval(bd(161.0), bd(172.1)),
mq.getColumnInterval(batchLocalAggWithAuxGroup, 2))
assertNull(mq.getColumnInterval(batchLocalAggWithAuxGroup, 3))
assertNull(mq.getColumnInterval(batchLocalAggWithAuxGroup, 4))
assertEquals(ValueInterval(bd(2.7), null), mq.getColumnInterval(batchLocalAggWithAuxGroup, 5))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(batchLocalAggWithAuxGroup, 6))
Array(batchGlobalAggWithLocalWithAuxGroup, batchGlobalAggWithoutLocalWithAuxGroup)
.foreach {
agg =>
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(agg, 0))
assertNull(mq.getColumnInterval(agg, 1))
assertEquals(ValueInterval(bd(161.0), bd(172.1)), mq.getColumnInterval(agg, 2))
assertNull(mq.getColumnInterval(agg, 3))
assertEquals(ValueInterval(bd(2.7), null), mq.getColumnInterval(agg, 4))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(agg, 5))
}
}
@Test
def testGetColumnIntervalOnStreamExecAggregate(): Unit = {
Array(streamGlobalAggWithLocal, streamGlobalAggWithoutLocal).foreach {
agg =>
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(agg, 0))
assertNull(mq.getColumnInterval(agg, 1))
assertEquals(ValueInterval(bd(2.7), null), mq.getColumnInterval(agg, 2))
assertNull(mq.getColumnInterval(agg, 3))
assertNull(mq.getColumnInterval(agg, 4))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(agg, 5))
}
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(streamLocalAgg, 0))
assertNull(mq.getColumnInterval(streamLocalAgg, 1))
assertNull(mq.getColumnInterval(streamLocalAgg, 2))
assertEquals(ValueInterval(bd(2.7), null), mq.getColumnInterval(streamLocalAgg, 3))
assertNull(mq.getColumnInterval(streamLocalAgg, 4))
assertNull(mq.getColumnInterval(streamLocalAgg, 5))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(streamLocalAgg, 6))
}
@Test
def testGetColumnIntervalOnTableAggregate(): Unit = {
Array(logicalTableAgg, flinkLogicalTableAgg, streamExecTableAgg).foreach {
agg =>
assertEquals(RightSemiInfiniteValueInterval(bd(0), true), mq.getColumnInterval(agg, 0))
assertNull(mq.getColumnInterval(agg, 1))
assertNull(mq.getColumnInterval(agg, 2))
}
}
@Test
def testGetColumnIntervalOnWindowTableAgg(): Unit = {
Array(logicalWindowTableAgg, flinkLogicalWindowTableAgg, streamWindowTableAgg).foreach { agg =>
assertEquals(ValueInterval(bd(5), bd(45)), mq.getColumnInterval(agg, 0))
assertEquals(null, mq.getColumnInterval(agg, 1))
assertEquals(null, mq.getColumnInterval(agg, 2))
assertEquals(null, mq.getColumnInterval(agg, 3))
assertEquals(null, mq.getColumnInterval(agg, 4))
assertEquals(null, mq.getColumnInterval(agg, 5))
assertEquals(null, mq.getColumnInterval(agg, 6))
}
}
@Test
def testGetColumnIntervalOnWindowAgg(): Unit = {
Array(logicalWindowAgg, flinkLogicalWindowAgg, batchGlobalWindowAggWithLocalAgg,
batchGlobalWindowAggWithoutLocalAgg, streamWindowAgg).foreach { agg =>
assertEquals(ValueInterval(bd(5), bd(45)), mq.getColumnInterval(agg, 0))
assertEquals(null, mq.getColumnInterval(agg, 1))
assertEquals(RightSemiInfiniteValueInterval(bd(0)), mq.getColumnInterval(agg, 2))
assertEquals(null, mq.getColumnInterval(agg, 3))
}
assertEquals(ValueInterval(bd(5), bd(45)), mq.getColumnInterval(batchLocalWindowAgg, 0))
assertEquals(null, mq.getColumnInterval(batchLocalWindowAgg, 1))
assertEquals(null, mq.getColumnInterval(batchLocalWindowAgg, 2))
assertEquals(
RightSemiInfiniteValueInterval(bd(0)), mq.getColumnInterval(batchLocalWindowAgg, 3))
assertEquals(null, mq.getColumnInterval(batchLocalWindowAgg, 4))
Array(logicalWindowAggWithAuxGroup, flinkLogicalWindowAggWithAuxGroup,
batchGlobalWindowAggWithLocalAggWithAuxGroup,
batchGlobalWindowAggWithoutLocalAggWithAuxGroup).foreach { agg =>
assertEquals(ValueInterval(bd(5), bd(55)), mq.getColumnInterval(agg, 0))
assertEquals(ValueInterval(bd(0), bd(50)), mq.getColumnInterval(agg, 1))
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(agg, 2))
assertEquals(null, mq.getColumnInterval(agg, 3))
}
assertEquals(
ValueInterval(bd(5), bd(55)), mq.getColumnInterval(batchLocalWindowAggWithAuxGroup, 0))
assertEquals(null, mq.getColumnInterval(batchLocalWindowAggWithAuxGroup, 1))
assertEquals(
ValueInterval(bd(0), bd(50)), mq.getColumnInterval(batchLocalWindowAggWithAuxGroup, 2))
assertEquals(
ValueInterval(bd(0), null), mq.getColumnInterval(batchLocalWindowAggWithAuxGroup, 3))
assertEquals(null, mq.getColumnInterval(batchLocalWindowAggWithAuxGroup, 4))
}
@Test
def testGetColumnIntervalOnOverAgg(): Unit = {
Array(flinkLogicalOverAgg, batchOverAgg).foreach {
agg =>
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(agg, 0))
assertEquals(null, mq.getColumnInterval(agg, 1))
assertEquals(ValueInterval(bd(2.7), bd(4.8)), mq.getColumnInterval(agg, 2))
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(agg, 3))
assertNull(mq.getColumnInterval(agg, 4))
assertNull(mq.getColumnInterval(agg, 5))
assertNull(mq.getColumnInterval(agg, 6))
assertNull(mq.getColumnInterval(agg, 7))
assertNull(mq.getColumnInterval(agg, 8))
assertNull(mq.getColumnInterval(agg, 9))
assertNull(mq.getColumnInterval(agg, 10))
}
assertEquals(ValueInterval(bd(0), null), mq.getColumnInterval(streamOverAgg, 0))
assertEquals(null, mq.getColumnInterval(streamOverAgg, 1))
assertEquals(ValueInterval(bd(2.7), bd(4.8)), mq.getColumnInterval(streamOverAgg, 2))
assertEquals(ValueInterval(bd(12), bd(18)), mq.getColumnInterval(streamOverAgg, 3))
assertNull(mq.getColumnInterval(streamOverAgg, 4))
assertNull(mq.getColumnInterval(streamOverAgg, 5))
assertNull(mq.getColumnInterval(streamOverAgg, 6))
assertNull(mq.getColumnInterval(streamOverAgg, 7))
}
@Test
def testGetColumnIntervalOnJoin(): Unit = {
val left = relBuilder.scan("MyTable1").build()
val right = relBuilder.scan("MyTable2").build()
// join condition is MyTable1.a=MyTable1.a and MyTable1.a > 100 and MyTable2.b <= 1000
val join = relBuilder.push(left).push(right).join(JoinRelType.INNER,
relBuilder.call(EQUALS, relBuilder.field(2, 0, 0), relBuilder.field(2, 1, 0)),
relBuilder.call(GREATER_THAN, relBuilder.field(2, 0, 0), relBuilder.literal(100)),
relBuilder.call(LESS_THAN_OR_EQUAL, relBuilder.field(2, 1, 1),
rexBuilder.makeLiteral(1000L, longType, false))
).build
assertEquals(ValueInterval(bd(100), null, includeLower = false), mq.getColumnInterval(join, 0))
assertEquals(ValueInterval(bd(1L), bd(800000000L)), mq.getColumnInterval(join, 1))
assertNull(mq.getColumnInterval(join, 2))
assertNull(mq.getColumnInterval(join, 3))
assertEquals(ValueInterval(bd(1L), bd(100L)), mq.getColumnInterval(join, 4))
assertNull(mq.getColumnInterval(join, 5))
assertEquals(ValueInterval(bd(8L), bd(1000L)), mq.getColumnInterval(join, 6))
assertNull(mq.getColumnInterval(join, 7))
assertNull(mq.getColumnInterval(join, 8))
assertEquals(ValueInterval(bd(0), null, includeLower = true),
mq.getColumnInterval(logicalSemiJoinNotOnUniqueKeys, 0))
assertEquals(ValueInterval(bd(1L), bd(800000000L)),
mq.getColumnInterval(logicalSemiJoinNotOnUniqueKeys, 1))
assertNull(mq.getColumnInterval(logicalSemiJoinNotOnUniqueKeys, 2))
assertNull(mq.getColumnInterval(logicalSemiJoinNotOnUniqueKeys, 3))
assertEquals(
ValueInterval(bd(1L), bd(100L)), mq.getColumnInterval(logicalSemiJoinNotOnUniqueKeys, 4))
assertEquals(ValueInterval(bd(0), null, includeLower = true),
mq.getColumnInterval(logicalAntiJoinWithoutEquiCond, 0))
assertEquals(ValueInterval(bd(1L), bd(800000000L)),
mq.getColumnInterval(logicalAntiJoinWithoutEquiCond, 1))
assertNull(mq.getColumnInterval(logicalAntiJoinWithoutEquiCond, 2))
assertNull(mq.getColumnInterval(logicalAntiJoinWithoutEquiCond, 3))
assertEquals(
ValueInterval(bd(1L), bd(100L)), mq.getColumnInterval(logicalAntiJoinWithoutEquiCond, 4))
}
@Test
def testGetColumnIntervalOnUnion(): Unit = {
val ts1 = relBuilder.scan("MyTable1").build()
val ts2 = relBuilder.scan("MyTable2").build()
val union = relBuilder.push(ts1).push(ts2).union(true).build()
assertNull(mq.getColumnInterval(union, 0))
assertEquals(ValueInterval(bd(1L), bd(800000000L)), mq.getColumnInterval(union, 1))
assertNull(mq.getColumnInterval(union, 2))
assertNull(mq.getColumnInterval(union, 3))
}
@Test
def testGetColumnIntervalOnDefault(): Unit = {
(0 until testRel.getRowType.getFieldCount).foreach { idx =>
assertNull(mq.getColumnInterval(testRel, idx))
}
}
def assertEqualsAsDouble(
expected: ValueInterval,
actual: ValueInterval,
delta: Double = 1e-6): Unit = {
if (expected == null || actual == null) {
assertTrue(s"expected: $expected, actual: $actual", expected == null && actual == null)
return
}
def toDouble(number: Any): JDouble = {
val v = ColumnIntervalUtil.convertNumberToString(number)
.getOrElse(throw new TableException(""))
java.lang.Double.valueOf(v)
}
def decompose(v: ValueInterval): (JDouble, JDouble, JBoolean, JBoolean) = {
v match {
case EmptyValueInterval => (null, null, false, false)
case InfiniteValueInterval =>
(Double.NegativeInfinity, Double.PositiveInfinity, false, false)
case f: FiniteValueInterval =>
(toDouble(f.lower), toDouble(f.upper), f.includeLower, f.includeUpper)
case l: LeftSemiInfiniteValueInterval =>
(Double.NegativeInfinity, toDouble(l.upper), false, l.includeUpper)
case r: RightSemiInfiniteValueInterval =>
(toDouble(r.lower), Double.PositiveInfinity, r.includeLower, false)
}
}
val (lower1, upper1, includeLower1, includeUpper1) = decompose(expected)
val (lower2, upper2, includeLower2, includeUpper2) = decompose(actual)
assertEquals(lower1, lower2, delta)
assertEquals(upper1, upper2, delta)
assertEquals(includeLower1, includeLower2)
assertEquals(includeUpper1, includeUpper2)
}
}
|
tzulitai/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/metadata/FlinkRelMdColumnIntervalTest.scala
|
Scala
|
apache-2.0
| 31,466
|
object IfStatementComplex {
def foo(x: Boolean => String => String) = x(false)("45")
foo(if (_) /*start*/_.concat("3")/*end*/ else _.concat("6"))
}
//(String) => String
|
ilinum/intellij-scala
|
testdata/typeInference/expected/placeholder/IfStatementComplex.scala
|
Scala
|
apache-2.0
| 173
|
package org.newsfromthestreets.lib
import net.liftweb.http._
import net.liftweb.util._
import net.liftweb.common._
import java.util.Date
/**
* A factory for generating new instances of Date. You can create
* factories for each kind of thing you want to vend in your application.
* An example is a payment gateway. You can change the default implementation,
* or override the default implementation on a session, request or current call
* stack basis.
*/
object DependencyFactory extends Factory {
implicit object time extends FactoryMaker(Helpers.now _)
/**
* objects in Scala are lazily created. The init()
* method creates a List of all the objects. This
* results in all the objects getting initialized and
* registering their types with the dependency injector
*/
private def init() {
List(time)
}
init()
}
/*
/**
* Examples of changing the implementation
*/
sealed abstract class Changer {
def changeDefaultImplementation() {
DependencyFactory.time.default.set(() => new Date())
}
def changeSessionImplementation() {
DependencyFactory.time.session.set(() => new Date())
}
def changeRequestImplementation() {
DependencyFactory.time.request.set(() => new Date())
}
def changeJustForCall(d: Date) {
DependencyFactory.time.doWith(d) {
// perform some calculations here
}
}
}
*/
|
Rmanolis/newsfromthestreets
|
src/main/scala/org/newsfromthestreets/lib/DependencyFactory.scala
|
Scala
|
apache-2.0
| 1,368
|
trait HList
trait HNil extends HList
trait FromTraversable[Out <: HList]
object FromTraversable {
implicit def hnilFromTraversable[T]: FromTraversable[HNil] =
new FromTraversable[HNil]{}
}
object Filter {
def apply[A <: HList, O <: HList]()(implicit ftA: FromTraversable[A],
ftO: FromTraversable[O]): Unit = ()
}
object Main {
def main = Filter[HNil, HNil]()
}
|
som-snytt/dotty
|
tests/pos/i2981.scala
|
Scala
|
apache-2.0
| 411
|
package cwe.scala.library.audit.listener
import cwe.scala.library.audit._
import scala.collection.mutable.HashSet
/**
* A queued event listener which has an independent working thread to manage the event queue
*/
class QueuedListener extends AuditEventListener {
private val auditor = AuditServiceProvider.createAuditor(this)
// Listeners
private var _listeners: HashSet[AuditEventListener] = null
// AuditEvent queue
private class AuditEventQueue {
var event: AuditEvent = null
var next: AuditEventQueue = null
}
private var lastWritten = new AuditEventQueue
private var written = lastWritten
// Worker
private var thread: Thread = null
private var running: Boolean = false
private val queueWorker = new Runnable {
private val auditor = AuditServiceProvider.createAuditor(this)
def run {
//val auditToken = auditor.begin("run")
var tired = false
while (!tired) {
while (hasSomethingToDo) {
doTheWork
tired = true
}
if (tired) synchronized {
//val wtoken = auditor.begin("wait")
this.wait(10)
//auditor.end(wtoken)
tired = false
}
else tired = true
}
workFinished
//auditor.end(auditToken)
}
}
private def startWorker = if (!this.running) synchronized {
if (!this.running) {
this.thread = new Thread(queueWorker, "queued-listener-worker")
this.running = true
this.thread.start()
}
}
private def workFinished = synchronized { this.running = false }
private def hasSomethingToDo: Boolean = written.next != null
private def doTheWork = {
var event: AuditEvent = null
// gets first element from the queue
synchronized {
if (written.next != null) {
event = written.event
written = written.next
}
}
// dispatches the event
if (event != null && this.hasListeners) synchronized {
if (this.hasListeners) this.listeners.foreach((l) => l.handleAuditEvent(event))
}
}
// Accessors
/**
* Set of registered listeners
*/
protected def listeners: HashSet[AuditEventListener] = {
if (_listeners == null) synchronized { if (_listeners == null) _listeners = new HashSet }
_listeners
}
/**
* Returns true if some listeners have registered themselves
*/
protected def hasListeners: Boolean = _listeners != null && _listeners.size > 0
// AuditEventListener implementation
def handleAuditEvent(e: AuditEvent) = {
// adds element in the queue
synchronized {
lastWritten.event = e
lastWritten.next = new AuditEventQueue
lastWritten = lastWritten.next
}
// starts working thread
this.startWorker
}
def unregistred() = if (this.hasListeners) synchronized {
val ls = this.listeners
ls.foreach({ (l) => l.unregistred() })
ls.clear()
}
/**
* Registers a new AuditEventListener
* if listener already exists, nothing happens
*/
def registerListener(l: AuditEventListener) = synchronized { this.listeners.add(l) }
/**
* Unregisters an AuditEventListener
* if listener does not exists, nothing happens
*/
def unregisterListener(l: AuditEventListener) = if (this.hasListeners) synchronized {
if (this.listeners.remove(l)) l.unregistred()
}
}
|
wwwigii-system/research
|
cwe-scala-library/src/cwe/scala/library/audit/listener/QueuedListener.scala
|
Scala
|
gpl-3.0
| 3,139
|
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.precog.common.security
import scalaz._
import scalaz.syntax.monad._
import java.time.LocalDateTime
trait AccessControl[M[+ _]] {
def hasCapability(apiKey: APIKey, perms: Set[Permission], at: Option[LocalDateTime]): M[Boolean]
}
class UnrestrictedAccessControl[M[+ _]: Applicative] extends AccessControl[M] {
def hasCapability(apiKey: APIKey, perms: Set[Permission], at: Option[LocalDateTime]): M[Boolean] = true.point[M]
}
|
drostron/quasar
|
blueeyes/src/main/scala/quasar/precog/common/security/AccessControl.scala
|
Scala
|
apache-2.0
| 1,051
|
package com.typesafe.sbt.packager.jdkpackager
import com.typesafe.sbt.packager.jdkpackager.JDKPackagerPlugin.autoImport._
import org.apache.tools.ant.{ BuildEvent, BuildListener, ProjectHelper }
import sbt.Keys._
import sbt._
import scala.util.Try
import scala.xml.Elem
/**
* Helpers for working with Ant build definitions
*
* @author <a href="mailto:fitch@datamininglab.com">Simeon H.K. Fitch</a>
* @since 5/7/15
*/
object JDKPackagerAntHelper {
/** Attempts to compute the path to the `javapackager` tool. */
private[jdkpackager] def locateAntTasks(javaHome: Option[File], logger: Logger): Option[File] = {
val jarname = "ant-javafx.jar"
// This approach to getting JDK bits is borrowed from: http://stackoverflow.com/a/25163628/296509
// Starting with an ordered list of possible java directory sources, create derivative and
// then test for the tool. It's nasty looking because there's no canonical way of finding the
// JDK from the JRE, and JDK_HOME isn't always defined.
val searchPoints = Seq(
// Build-defined
javaHome,
// Environment override
sys.env.get("JDK_HOME").map(file),
sys.env.get("JAVA_HOME").map(file),
// MacOS X
Try("/usr/libexec/java_home".!!.trim).toOption.map(file),
// From system properties
sys.props.get("java.home").map(file)
)
// Unlift searchPoint `Option`-s, and for each base directory, add the parent variant to cover nested JREs on Unix.
val entryPoints = searchPoints.flatten.flatMap(f ⇒ Seq(f, f.getAbsoluteFile))
// On Windows we're often running in the JRE and not the JDK. If JDK is installed,
// it's likely to be in a parallel directory, with the "jre" prefix changed to "jdk"
val entryPointsSpecialCaseWindows = entryPoints.flatMap { f ⇒
if (f.getName.startsWith("jre")) Seq(f, f.getParentFile / ("jdk" + f.getName.drop(3)))
else Seq(f)
}
// Now search for the tool
entryPointsSpecialCaseWindows
.map(_ / "lib" / jarname)
.find { f ⇒ logger.debug(s"Looking for '$jarname' in '${f.getParent}'"); f.exists() }
.map { f ⇒ logger.debug(s"Found '$f'!"); f }
}
type PlatformDOM = Elem
/** Creates the `<fx:platform>` definition. */
private[jdkpackager] def platformDOM(jvmArgs: Seq[String], properties: Map[String, String]): PlatformDOM =
// format: OFF
<fx:platform id="platform" javafx="8+" j2se="8+">
{
for {
arg <- jvmArgs
} yield <fx:jvmarg value={arg}/>
}
{
for {
(key, value) <- properties
} yield <fx:property name={key} value={value}/>
}
</fx:platform>
// format: ON
type ApplicationDOM = Elem
/** Create the `<fx:application>` definition. */
private[jdkpackager] def applicationDOM(
name: String,
version: String,
mainClass: Option[String],
toolkit: JDKPackagerToolkit,
appArgs: Seq[String]): ApplicationDOM =
// format: OFF
<fx:application id="app"
name={name}
version={version}
mainClass={mainClass.map(xml.Text.apply)}
toolkit={toolkit.arg}>
{
for {
arg <- appArgs
} yield <fx:argument>{arg}</fx:argument>
}
</fx:application>
// format: ON
type InfoDOM = Elem
/** Create the `<fx:info>` definition. */
private[jdkpackager] def infoDOM(
name: String,
description: String,
maintainer: String,
iconPath: Option[File],
associations: Seq[FileAssociation]): InfoDOM =
// format: OFF
<fx:info id="info" title={name} description={description} vendor={maintainer}>
{
if (iconPath.nonEmpty) <fx:icon href={iconPath.get.getAbsolutePath} kind="default"/>
}
{
for {
fa <- associations
} yield <fx:association extension={fa.extension} mimetype={fa.mimetype}
description={fa.description}
icon={fa.icon.map(_.getAbsolutePath).map(xml.Text.apply)}/>
}
</fx:info>
// format: ON
type DeployDOM = Elem
/** Create the `<fx:deploy>` definition. */
private[jdkpackager] def deployDOM(
basename: String,
packageType: String,
mainJar: File,
outputDir: File,
infoDOM: InfoDOM): DeployDOM =
// format: OFF
<fx:deploy outdir={outputDir.getAbsolutePath}
outfile={basename}
nativeBundles={packageType}
verbose="true">
<fx:preferences install="true" menu="true" shortcut="true"/>
<fx:application refid="app"/>
<fx:platform refid="platform"/>
{infoDOM}
<fx:resources>
<fx:fileset refid="jar.files"/>
<fx:fileset refid="data.files"/>
</fx:resources>
<fx:bundleArgument arg="mainJar" value={"lib/" + mainJar.getName} />
</fx:deploy>
// format: ON
type BuildDOM = xml.Elem
/**
* Create Ant project DOM for building packages, using ant-javafx.jar tasks.
*
* see: https://docs.oracle.com/javase/8/docs/technotes/guides/deploy/javafx_ant_task_reference.html
*/
private[jdkpackager] def makeAntBuild(
antTaskLib: Option[File],
name: String,
sourceDir: File,
mappings: Seq[(File, String)],
platformDOM: PlatformDOM,
applicationDOM: ApplicationDOM,
deployDOM: DeployDOM): BuildDOM = {
if (antTaskLib.isEmpty) {
sys.error(
"Please set key `antPackagerTasks in JDKPackager` to `ant-javafx.jar` path, " +
"which should be find in the `lib` directory of the Oracle JDK 8 installation. For example (Windows):\\n" +
"""(antPackagerTasks in JDKPackager) := Some("C:\\\\Program Files\\\\Java\\\\jdk1.8.0_45\\\\lib\\\\ant-javafx.jar")""")
}
val taskClassPath = Seq(sourceDir.getAbsolutePath, antTaskLib.get, ".")
val (jarFiles, supportFiles) = mappings.partition(_._2.endsWith(".jar"))
// format: OFF
<project name={name} default="default" basedir="." xmlns:fx="javafx:com.sun.javafx.tools.ant">
<target name="default">
<property name="plugin.classpath" value={taskClassPath.mkString(":")}/>
<taskdef resource="com/sun/javafx/tools/ant/antlib.xml"
uri="javafx:com.sun.javafx.tools.ant" classpath="${plugin.classpath}"/>
{platformDOM}
{applicationDOM}
<fx:fileset id="jar.files" dir={sourceDir.getAbsolutePath} type="jar">
{jarFiles.map(_._2).map(f => <include name={f}/> )}
</fx:fileset>
<fx:fileset id="data.files" dir={sourceDir.getAbsolutePath} type="data">
{supportFiles.map(_._2).map(f => <include name={f}/> )}
</fx:fileset>
{deployDOM}
</target>
</project>
// format: ON
}
/**
* Locate the generated packge.
* TODO: replace with something significantly more intelligent.
* @param output output directory
* @return generated file location
*/
private[jdkpackager] def findResult(output: File, s: TaskStreams): Option[File] = {
// Oooof. Need to do better than this to determine what was generated.
val globs = Seq("*.dmg", "*.pkg", "*.app", "*.msi", "*.exe", "*.deb", "*.rpm")
val finder = globs.foldLeft(PathFinder.empty)(_ +++ output ** _)
val result = finder.getPaths.headOption
result.foreach(f ⇒ s.log.info("Wrote " + f))
result.map(file)
}
/** Serialize the Ant DOM to `build.xml`. */
private[jdkpackager] def writeAntFile(outdir: File, dom: xml.Node, s: TaskStreams) = {
if (!outdir.exists()) IO.createDirectory(outdir)
val out = outdir / "build.xml"
scala.xml.XML.save(out.getAbsolutePath, dom, "UTF-8", xmlDecl = true)
s.log.info("Wrote " + out)
out
}
/** Build package via Ant build.xml definition. */
private[jdkpackager] def buildPackageWithAnt(
buildXML: File, target: File, s: TaskStreams): File = {
import org.apache.tools.ant.{ Project ⇒ AntProject }
val ap = new AntProject
ap.setUserProperty("ant.file", buildXML.getAbsolutePath)
val adapter = new AntLogAdapter(s)
ap.addBuildListener(adapter)
ap.init()
val antHelper = ProjectHelper.getProjectHelper
antHelper.parse(ap, buildXML)
ap.executeTarget(ap.getDefaultTarget)
ap.removeBuildListener(adapter)
// Not sure what to do when we can't find the result
findResult(target, s).getOrElse(target)
}
/** For piping Ant messages to sbt logger. */
private class AntLogAdapter(s: TaskStreams) extends BuildListener {
import org.apache.tools.ant.{ Project ⇒ AntProject }
def buildFinished(event: BuildEvent): Unit = ()
def buildStarted(event: BuildEvent): Unit = ()
def targetStarted(event: BuildEvent): Unit = ()
def taskFinished(event: BuildEvent): Unit = ()
def targetFinished(event: BuildEvent): Unit = ()
def taskStarted(event: BuildEvent): Unit = ()
def messageLogged(event: BuildEvent): Unit = event.getPriority match {
case AntProject.MSG_ERR ⇒ s.log.error(event.getMessage)
case AntProject.MSG_WARN ⇒ s.log.warn(event.getMessage)
case AntProject.MSG_INFO ⇒ s.log.info(event.getMessage)
case AntProject.MSG_VERBOSE ⇒ s.log.verbose(event.getMessage)
case _ ⇒ s.log.debug(event.getMessage)
}
}
}
|
bfil/sbt-native-packager
|
src/main/scala/com/typesafe/sbt/packager/jdkpackager/JDKPackagerAntHelper.scala
|
Scala
|
bsd-2-clause
| 9,263
|
/*
* Copyright (c) 2017-2022 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.eventpopulator
// Spark
import org.apache.hadoop.io.compress.GzipCodec
/**
* Codec reading archived files with `.gzip` extension instead
* of default `.gz`. Used to output enriched data before R83
*/
class R83Codec extends GzipCodec {
override def getDefaultExtension: String = "gzip"
}
|
snowplow/snowplow
|
5-data-modeling/event-manifest-populator/src/main/scala/com/snowplowanalytics/snowplow/eventpopulator/R83Codec.scala
|
Scala
|
apache-2.0
| 1,038
|
package reactivemongo.api
import reactivemongo.bson.buffer.{ ReadableBuffer, WritableBuffer }
trait SerializationPack { self: Singleton =>
type Value
type ElementProducer
type Document <: Value
type Writer[A]
type Reader[A]
def IdentityWriter: Writer[Document]
def IdentityReader: Reader[Document]
def serialize[A](a: A, writer: Writer[A]): Document
def deserialize[A](document: Document, reader: Reader[A]): A
def writeToBuffer(buffer: WritableBuffer, document: Document): WritableBuffer
def readFromBuffer(buffer: ReadableBuffer): Document
def serializeAndWrite[A](buffer: WritableBuffer, document: A, writer: Writer[A]): WritableBuffer = writeToBuffer(buffer, serialize(document, writer))
def readAndDeserialize[A](buffer: ReadableBuffer, reader: Reader[A]): A =
deserialize(readFromBuffer(buffer), reader)
import reactivemongo.core.protocol.Response
import reactivemongo.core.netty.ChannelBufferReadableBuffer
final def readAndDeserialize[A](response: Response, reader: Reader[A]): A = {
val buf = response.documents
val channelBuf = ChannelBufferReadableBuffer(buf.readBytes(buf.getInt(buf.readerIndex)))
readAndDeserialize(channelBuf, reader)
}
def writer[A](f: A => Document): Writer[A]
def isEmpty(document: Document): Boolean
}
/** The default serialization pack. */
object BSONSerializationPack extends SerializationPack {
import reactivemongo.bson._
import reactivemongo.bson.buffer.DefaultBufferHandler
type Value = BSONValue
type ElementProducer = Producer[BSONElement]
type Document = BSONDocument
type Writer[A] = BSONDocumentWriter[A]
type Reader[A] = BSONDocumentReader[A]
object IdentityReader extends Reader[Document] {
def read(document: Document): Document = document
}
object IdentityWriter extends Writer[Document] {
def write(document: Document): Document = document
}
def serialize[A](a: A, writer: Writer[A]): Document = writer.write(a)
def deserialize[A](document: Document, reader: Reader[A]): A =
reader.read(document)
def writeToBuffer(buffer: WritableBuffer, document: Document): WritableBuffer = DefaultBufferHandler.writeDocument(document, buffer)
def readFromBuffer(buffer: ReadableBuffer): Document =
DefaultBufferHandler.readDocument(buffer).get
def writer[A](f: A => Document): Writer[A] = new BSONDocumentWriter[A] {
def write(input: A): Document = f(input)
}
def isEmpty(document: Document) = document.isEmpty
}
|
maowug/ReactiveMongo
|
driver/src/main/scala/api/serializationpack.scala
|
Scala
|
apache-2.0
| 2,482
|
package com.sfxcode.sapphire.core.scene
import javafx.beans.property.Property
import javafx.scene.Node
import javafx.scene.control._
import javafx.scene.text.Text
class DefaultResolver extends NodePropertyResolving {
def resolve(node: Node): Option[Property[_]] =
node match {
case label: Label => Some(label.textProperty())
case text: Text => Some(text.textProperty())
case textField: TextField => Some(textField.textProperty())
case textArea: TextArea => Some(textArea.textProperty())
case datePicker: DatePicker => Some(datePicker.valueProperty())
case toggleButton: ToggleButton => Some(toggleButton.selectedProperty())
case checkBox: CheckBox => Some(checkBox.selectedProperty())
case slider: Slider => Some(slider.valueProperty())
case comboBox: ComboBox[_] => Some(comboBox.valueProperty())
case choiceBox: ChoiceBox[_] => Some(choiceBox.valueProperty())
case spinner: Spinner[_] => Some(spinner.getValueFactory.valueProperty())
case _ => None
}
}
object DefaultResolver {
def apply(): DefaultResolver = new DefaultResolver()
}
|
sfxcode/sapphire-core
|
src/main/scala/com/sfxcode/sapphire/core/scene/DefaultResolver.scala
|
Scala
|
apache-2.0
| 1,125
|
package com.danielasfregola.twitter4s.http.clients.rest.blocks
import com.danielasfregola.twitter4s.entities._
import com.danielasfregola.twitter4s.http.clients.rest.RestClient
import com.danielasfregola.twitter4s.http.clients.rest.blocks.parameters.{
BlockParameters,
BlockedUserIdsParameters,
BlockedUsersParameters
}
import com.danielasfregola.twitter4s.util.Configurations._
import scala.concurrent.Future
/** Implements the available requests for the `blocks` resource.
*/
trait TwitterBlockClient {
protected val restClient: RestClient
private val blocksUrl = s"$apiTwitterUrl/$twitterVersion/blocks"
/** Returns the users that the authenticating user is blocking.
* For more information see
* <a href="https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-blocks-list" target="_blank">
* https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-blocks-list</a>.
*
* @param include_entities : By default it is `true`.
* The parameters node will not be included when set to false.
* @param skip_status : By default it is `false`.
* When set to either `true` statuses will not be included in the returned user object.
* @param cursor : By default it is `-1`, which is the first “page”.
* Causes the list of blocked users to be broken into pages of no more than 5000 IDs at a time.
* The number of IDs returned is not guaranteed to be 5000 as suspended users are filtered out after connections are queried.
* @return : The cursored representation of blocked users.
*/
def blockedUsers(include_entities: Boolean = true,
skip_status: Boolean = false,
cursor: Long = -1): Future[RatedData[Users]] = {
import restClient._
val parameters = BlockedUsersParameters(include_entities, skip_status, cursor)
Get(s"$blocksUrl/list.json", parameters).respondAsRated[Users]
}
/** Returns an array of user ids the authenticating user is blocking.
* For more information see
* <a href="https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-blocks-ids" target="_blank">
* https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-blocks-ids</a>.
*
* @param cursor : By default it is `-1`, which is the first “page”.
* Causes the list of blocked users to be broken into pages of no more than 5000 IDs at a time.
* The number of IDs returned is not guaranteed to be 5000 as suspended users are filtered out after connections are queried.
* @return : The cursored representation of user ids.
*/
def blockedUserIds(cursor: Long = -1): Future[RatedData[UserIds]] = {
val parameters = BlockedUserIdsParameters(stringify_ids = false, cursor)
genericGetBlockedUserIds[UserIds](parameters)
}
/** Returns an array of user stringified ids the authenticating user is blocking.
* For more information see
* <a href="https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-blocks-ids" target="_blank">
* https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-blocks-ids</a>.
*
* @param cursor : By default it is `-1`, which is the first “page”.
* Causes the list of blocked users to be broken into pages of no more than 5000 IDs at a time.
* The number of IDs returned is not guaranteed to be 5000 as suspended users are filtered out after connections are queried.
* @return : The cursored representation of user stringified ids with cursors.
*/
def blockedUserStringifiedIds(cursor: Long = -1): Future[RatedData[UserStringifiedIds]] = {
val parameters = BlockedUserIdsParameters(stringify_ids = true, cursor)
genericGetBlockedUserIds[UserStringifiedIds](parameters)
}
private def genericGetBlockedUserIds[T: Manifest](parameters: BlockedUserIdsParameters): Future[RatedData[T]] = {
import restClient._
Get(s"$blocksUrl/ids.json", parameters).respondAsRated[T]
}
/** Blocks the specified user from following the authenticating user.
* In addition the blocked user will not show in the authenticating users mentions or timeline (unless retweeted by another user).
* If a follow or friend relationship exists it is destroyed.
* For more information see
* <a href="https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-create" target="_blank">
* https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-create</a>.
*
* @param screen_name : The screen name of the potentially blocked user.
* Helpful for disambiguating when a valid user ID is also a valid screen name.
* @param include_entities : By default it is `true`.
* The parameters node will not be included when set to false.
* @param skip_status : By default it is `false`.
* When set to either `true` statuses will not be included in the returned user object.
* @return : The representation of the blocked user.
*/
def blockUser(screen_name: String, include_entities: Boolean = true, skip_status: Boolean = false): Future[User] = {
val parameters = BlockParameters(user_id = None, Some(screen_name), include_entities, skip_status)
genericBlock(parameters)
}
/** Blocks the specified user id from following the authenticating user.
* In addition the blocked user will not show in the authenticating users mentions or timeline (unless retweeted by another user).
* If a follow or friend relationship exists it is destroyed.
* For more information see
* <a href="https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-create" target="_blank">
* https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-create</a>.
*
* @param user_id : The ID of the potentially blocked user.
* Helpful for disambiguating when a valid user ID is also a valid screen name.
* @param include_entities : By default it is `true`.
* The parameters node will not be included when set to false.
* @param skip_status : By default it is `false`.
* When set to either `true` statuses will not be included in the returned user object.
* @return : The representation of the blocked user.
*/
def blockUserId(user_id: Long, include_entities: Boolean = true, skip_status: Boolean = false): Future[User] = {
val parameters = BlockParameters(Some(user_id), screen_name = None, include_entities, skip_status)
genericBlock(parameters)
}
private def genericBlock(parameters: BlockParameters): Future[User] = {
import restClient._
Post(s"$blocksUrl/create.json", parameters).respondAs[User]
}
/** Un-blocks the user for the authenticating user.
* Returns the un-blocked user in the requested format when successful.
* If relationships existed before the block was instated, they will not be restored.
* For more information see
* <a href="https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-destroy" target="_blank">
* https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-destroy</a>.
*
* @param screen_name : The screen name of the potentially blocked user.
* Helpful for disambiguating when a valid user ID is also a valid screen name.
* @param include_entities : By default it is `true`.
* The parameters node will not be included when set to false.
* @param skip_status : By default it is `false`.
* When set to either `true` statuses will not be included in the returned user object.
* @return : The representation of the unblocked user.
*/
def unblockUser(screen_name: String, include_entities: Boolean = true, skip_status: Boolean = false): Future[User] = {
val parameters = BlockParameters(user_id = None, Some(screen_name), include_entities, skip_status)
genericUnblock(parameters)
}
/** Un-blocks the user specified id for the authenticating user.
* Returns the un-blocked user in the requested format when successful.
* If relationships existed before the block was instated, they will not be restored.
* For more information see
* <a href="https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-destroy" target="_blank">
* https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-destroy</a>.
*
* @param user_id : The ID of the potentially blocked user.
* Helpful for disambiguating when a valid user ID is also a valid screen name.
* @param include_entities : By default it is `true`.
* The parameters node will not be included when set to false.
* @param skip_status : By default it is `false`.
* When set to either `true` statuses will not be included in the returned user object.
* @return : The representation of the unblocked user.
*/
def unblockUserId(user_id: Long, include_entities: Boolean = true, skip_status: Boolean = false): Future[User] = {
val parameters = BlockParameters(Some(user_id), screen_name = None, include_entities, skip_status)
genericUnblock(parameters)
}
private def genericUnblock(parameters: BlockParameters): Future[User] = {
import restClient._
Post(s"$blocksUrl/destroy.json", parameters).respondAs[User]
}
}
|
DanielaSfregola/twitter4s
|
src/main/scala/com/danielasfregola/twitter4s/http/clients/rest/blocks/TwitterBlockClient.scala
|
Scala
|
apache-2.0
| 10,063
|
package scalax.collection
import GraphPredef._
import GraphEdge._
/* TODO
import edge._
import edge.Implicits._
*/
abstract class TGraph[N, E <: EdgeLike[N], G[N, E <: EdgeLike[N]] <: Graph[N, E] with GraphLike[N, E, G]](
val g: G[N, E]
) {
def node(outer: N): g.NodeT = g get outer
def n(outer: N): g.NodeT = node(outer)
def edge(outer: E): g.EdgeT = g get outer
def e(outer: E): g.EdgeT = edge(outer)
}
/** The Graph for Scala representation of graph pictures located in `scala/test/doc`.
*/
object Data {
// WDi-1.jpg without weights
val outerElemsOfDi_1 = List(1 ~> 2, 2 ~> 3, 4 ~> 3, 3 ~> 5, 1 ~> 5, 1 ~> 3)
val elementsOfDi_1 = List(outerElemsOfDi_1: _*)
/* TODO
// WDi-1.jpg
val elementsOfWDi_1 = List(1~>2 % 4, 2~>3 % 40, 4~>3 % 7, 3~>5 % 50, 1~>5 % 40, 1~>3 % 2)
// WUnDi-1.jpg without weights
val elementsOfUnDi_1 = List(1~2, 2~3, 1~>3, 1~5, 3~5, 3~4, 4~>4, 4~>5)
// WUnDi-1.jpg
val elementsOfWUnDi_1 =
List(1 ~ 2 % 4, 2 ~ 3 % 2, 1 ~> 3 % 5, 1 ~ 5 % 3, 3 ~ 5 % 2, 3 ~ 4 % 1, 4 ~> 4 % 1, 4 ~> 5 % 0)
// WUnDi-2.jpg without weights
val elementsOfUnDi_2 = List(1 ~ 2, 2 ~ 3, 1 ~> 3, 1 ~ 3, 1 ~> 2, 2 ~ 2)
// WUnDi-2.jpg
val elementsOfWUnDi_2 = List(
1 ~ 2 % 4,
2 ~ 3 % -1,
1 ~> 3 % 5,
1 ~ 3 % 4,
1 ~> 2 % 3,
2 ~ 2 % 1
)
*/
}
|
scala-graph/scala-graph
|
core/src/test/scala/scalax/collection/Data.scala
|
Scala
|
apache-2.0
| 1,289
|
package smtlib
package theories
import Ints._
import org.scalatest.FunSuite
class IntsTests extends FunSuite {
override def suiteName = "Ints theory test suite"
test("IntSort is correctly constructed and extracted") {
IntSort() match {
case IntSort() => assert(true)
case _ => assert(false)
}
IntSort() match {
case FixedSizeBitVectors.BitVectorSort(_) => assert(false)
case IntSort() => assert(true)
case _ => assert(false)
}
}
test("NumeralLit is correctly constructed and extracted") {
val l1 = NumeralLit(42)
l1 match {
case NumeralLit(n) => assert(n === 42)
case _ => assert(false)
}
}
test("Divisible is correctly constructed and extracted") {
Divisible(BigInt(3), NumeralLit(9)) match {
case Divisible(d, NumeralLit(n)) =>
assert(n === 9)
assert(d === 3)
case _ => assert(false)
}
}
test("Neg is correctly constructed and extracted") {
Neg(NumeralLit(23)) match {
case Neg(NumeralLit(n)) => assert(n === 23)
case _ => assert(false)
}
}
test("Add is correctly constructed and extracted") {
Add(NumeralLit(23), NumeralLit(112)) match {
case Add(NumeralLit(n1), NumeralLit(n2)) =>
assert(n1 === 23)
assert(n2 === 112)
case _ => assert(false)
}
}
test("Sub is correctly constructed and extracted") {
Sub(NumeralLit(23), NumeralLit(112)) match {
case Sub(NumeralLit(n1), NumeralLit(n2)) =>
assert(n1 === 23)
assert(n2 === 112)
case _ => assert(false)
}
}
test("Mul is correctly constructed and extracted") {
Mul(NumeralLit(23), NumeralLit(112)) match {
case Mul(NumeralLit(n1), NumeralLit(n2)) =>
assert(n1 === 23)
assert(n2 === 112)
case _ => assert(false)
}
}
test("Div is correctly constructed and extracted") {
Div(NumeralLit(10), NumeralLit(2)) match {
case Div(NumeralLit(n1), NumeralLit(n2)) =>
assert(n1 === 10)
assert(n2 === 2)
case _ => assert(false)
}
}
test("Mod is correctly constructed and extracted") {
Mod(NumeralLit(10), NumeralLit(2)) match {
case Mod(NumeralLit(n1), NumeralLit(n2)) =>
assert(n1 === 10)
assert(n2 === 2)
case _ => assert(false)
}
}
test("Abs is correctly constructed and extracted") {
Abs(NumeralLit(23)) match {
case Abs(NumeralLit(n)) => assert(n === 23)
case _ => assert(false)
}
}
test("LessThan is correctly constructed and extracted") {
LessThan(NumeralLit(10), NumeralLit(2)) match {
case LessThan(NumeralLit(n1), NumeralLit(n2)) =>
assert(n1 === 10)
assert(n2 === 2)
case _ => assert(false)
}
}
test("LessEquals is correctly constructed and extracted") {
LessEquals(NumeralLit(10), NumeralLit(2)) match {
case LessEquals(NumeralLit(n1), NumeralLit(n2)) =>
assert(n1 === 10)
assert(n2 === 2)
case _ => assert(false)
}
}
test("GreaterThan is correctly constructed and extracted") {
GreaterThan(NumeralLit(10), NumeralLit(2)) match {
case GreaterThan(NumeralLit(n1), NumeralLit(n2)) =>
assert(n1 === 10)
assert(n2 === 2)
case _ => assert(false)
}
}
test("GreaterEquals is correctly constructed and extracted") {
GreaterEquals(NumeralLit(10), NumeralLit(2)) match {
case GreaterEquals(NumeralLit(n1), NumeralLit(n2)) =>
assert(n1 === 10)
assert(n2 === 2)
case _ => assert(false)
}
}
test("Extractors correctly extract parsed strings") {
import parser.Parser
Parser.fromString("12").parseTerm match {
case NumeralLit(n) => assert(n == 12)
case _ => assert(false)
}
Parser.fromString("(- 13)").parseTerm match {
case Neg(NumeralLit(n)) => assert(n == 13)
case _ => assert(false)
}
Parser.fromString("(- 13 17)").parseTerm match {
case Sub(
NumeralLit(n1),
NumeralLit(n2)
) => assert(n1 == 13 && n2 == 17)
case _ => assert(false)
}
Parser.fromString("(+ 13 17)").parseTerm match {
case Add(
NumeralLit(n1),
NumeralLit(n2)
) => assert(n1 == 13 && n2 == 17)
case _ => assert(false)
}
Parser.fromString("(* 13 17)").parseTerm match {
case Mul(
NumeralLit(n1),
NumeralLit(n2)
) => assert(n1 == 13 && n2 == 17)
case _ => assert(false)
}
}
}
|
manoskouk/scala-smtlib
|
src/test/scala/smtlib/theories/IntsTests.scala
|
Scala
|
mit
| 4,552
|
import sbt._
import sbt.Keys._
object StandardLayout {
def settings: Seq[Setting[_]] = List(
sourceDirectory := baseDirectory.value / "src",
sourceDirectory in Compile := sourceDirectory.value / "main",
sourceDirectory in Test := sourceDirectory.value / "test",
resourceDirectory in Compile := (sourceDirectory in Compile).value / "resources",
resourceDirectory in Test := (sourceDirectory in Test).value / "resources",
scalaSource in Compile := (sourceDirectory in Compile).value / "scala",
scalaSource in Test := (sourceDirectory in Test).value / "scala",
javaSource in Compile := (sourceDirectory in Compile).value / "java",
javaSource in Test := (sourceDirectory in Test).value / "java"
)
}
|
restfulscala/play-content-negotiation
|
project/StandardLayout.scala
|
Scala
|
mit
| 736
|
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import cmwell.ws.Settings
import security.Token
import org.joda.time.DateTime
import play.api.libs.json.Json
import play.api.mvc._
import security.httpauth._
import security.{Authentication, EagerAuthCache}
import javax.inject._
import filters.Attrs
import wsutil.{asyncErrorHandler, exceptionToResponse}
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
@Singleton
class LoginHandler @Inject()(authCache: EagerAuthCache)(implicit ec: ExecutionContext)
extends InjectedController
with BasicHttpAuthentication
with DigestHttpAuthentication {
private val notAuthenticated = Unauthorized("Not authenticated.\n")
def login: Action[AnyContent] = Action.async { implicit req =>
val exp: Option[DateTime] = req.getQueryString("exp").map(parseShortFormatDuration)
def whichAuthType: Option[HttpAuthType] = {
req.headers.get("authorization").map { h =>
if (h.contains("Digest")) Digest else Basic
}
}
def loginDigest =
digestAuthenticate(authCache)(req)
.map(status => if (status.isAuthenticated) grantToken(status.username, exp) else notAuthenticated)
def loginBasic = {
val (username, pass) = decodeBasicAuth(req.headers("authorization"))
authCache.getUserInfoton(username) match {
case Some(user) if Authentication.passwordMatches(user, pass) => grantToken(username, exp)
case _ => notAuthenticated
}
}
// default (`case None` below) is Digest, s.t. client can be provided with the challenge.
// However, Basic Auth is supported, if client initiates it (e.g. `curl -u user:pass` etc.)
//
// This provides two different options to login:
//
// 1.
// ------------[GET]------------>
// <-----[digest Challenge]------
// ------[digest Response]------>
// <---------[200/403]-----------
//
// 2.
// ---[GET + Basic user:pass]--->
// <---------[200/403]-----------
val penalty = Settings.loginPenalty.seconds
(whichAuthType match {
case Some(Basic) => cmwell.util.concurrent.delayedTask(penalty)(loginBasic)
case Some(Digest) => cmwell.util.concurrent.delayedTask(penalty / 2)(loginDigest).flatMap(identity)
case None =>
cmwell.util.concurrent.delayedTask(penalty)(
Unauthorized("Please provide your credentials.\n")
.withHeaders("WWW-Authenticate" -> initialDigestHeader.toString)
)
}).recover { case t => exceptionToResponse(t) }
}
// SAML2 POC
// def sso: Action[AnyContent] = RequiresAuthentication("Saml2Client") { profile =>
// Action { implicit request =>
//
// ???
// // this is only a POC.
// // to make it Production-ready, please first implement configSSO method in Global.scala with proper a callback host (the IdP)
//
// val username = profile.getAttribute("uid").asInstanceOf[util.ArrayList[String]].get(0)
// grantTokenWithHtmlRedirectToSPA(username)
// }
// }
// private def grantToken(username: String) = Future(Ok(s"Token is hereby granted for $username.")
// .withHeaders("X-CM-WELL-TOKEN" -> Authentication.generateToken(username)))
private def grantToken(username: String, expiry: Option[DateTime]) = {
Try(Token.generate(authCache, username, expiry)) match {
case Success(token) => Ok(Json.obj("token" -> token))
case Failure(err) => wsutil.exceptionToResponse(err)
}
}
private def grantTokenWithHtmlRedirectToSPA(username: String) =
Redirect(s"/?token=${Token.generate(authCache, username)}")
private def parseShortFormatDuration(shortFormatDuration: String): DateTime = {
val durs = Seq("d", "h", "m")
.map(
part => part -> s"(\\d+)(?i)$part".r.findFirstMatchIn(shortFormatDuration).map(_.group(1).toInt).getOrElse(0)
)
.toMap
DateTime.now().plusDays(durs("d")).plusHours(durs("h")).plusMinutes(durs("m"))
}
}
|
thomsonreuters/CM-Well
|
server/cmwell-ws/app/controllers/LoginHandler.scala
|
Scala
|
apache-2.0
| 4,689
|
/** *
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker
import com.rackspace.com.papi.components.checker.Converters._
import com.rackspace.com.papi.components.checker.servlet.RequestAttributes._
import org.w3c.dom.Document
abstract class MultiSchemaElementsBaseSuite extends BaseValidatorSuite {
val wadl_SimpleSame =
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tstOne="http://www.rackspace.com/repose/wadl/simple/one/test">
<grammars>
<schema elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns="http://www.w3.org/2001/XMLSchema"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
targetNamespace="http://www.rackspace.com/repose/wadl/simple/one/test">
<simpleType name="Progress">
<restriction base="xsd:integer">
<minInclusive value="0"/>
<maxInclusive value="100"/>
</restriction>
</simpleType>
</schema>
<schema elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns="http://www.w3.org/2001/XMLSchema"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
targetNamespace="http://www.rackspace.com/repose/wadl/simple/one/test">
<simpleType name="UUID">
<restriction base="xsd:string">
<length value="36" fixed="true"/>
<pattern value="[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}"/>
</restriction>
</simpleType>
</schema>
</grammars>
<resources base="https://test.rackspace.com">
<resource id="progress" path="test/progress/{progress}">
<param name="progress" style="template" type="tstOne:Progress"/>
<method href="#getMethod"/>
</resource>
<resource id="uuid" path="test/uuid/{uuid}">
<param name="uuid" style="template" type="tstOne:UUID"/>
<method href="#getMethod"/>
</resource>
</resources>
<method id="getMethod" name="GET">
<response status="200 203"/>
</method>
</application>
val wadl_SimpleDiff =
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tstOne="http://www.rackspace.com/repose/wadl/simple/one/test"
xmlns:tstTwo="http://www.rackspace.com/repose/wadl/simple/two/test">
<grammars>
<schema elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns="http://www.w3.org/2001/XMLSchema"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
targetNamespace="http://www.rackspace.com/repose/wadl/simple/one/test">
<simpleType name="Progress">
<restriction base="xsd:integer">
<minInclusive value="0"/>
<maxInclusive value="100"/>
</restriction>
</simpleType>
</schema>
<schema elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns="http://www.w3.org/2001/XMLSchema"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
targetNamespace="http://www.rackspace.com/repose/wadl/simple/two/test">
<simpleType name="UUID">
<restriction base="xsd:string">
<length value="36" fixed="true"/>
<pattern value="[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}"/>
</restriction>
</simpleType>
</schema>
</grammars>
<resources base="https://test.rackspace.com">
<resource id="progress" path="test/progress/{progress}">
<param name="progress" style="template" type="tstOne:Progress"/>
<method href="#getMethod"/>
</resource>
<resource id="uuid" path="test/uuid/{uuid}">
<param name="uuid" style="template" type="tstTwo:UUID"/>
<method href="#getMethod"/>
</resource>
</resources>
<method id="getMethod" name="GET">
<response status="200 203"/>
</method>
</application>
val wadl_ElementSame =
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tstOne="http://www.rackspace.com/repose/wadl/element/one/test">
<grammars>
<schema elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns="http://www.w3.org/2001/XMLSchema"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:tstOne="http://www.rackspace.com/repose/wadl/element/one/test"
targetNamespace="http://www.rackspace.com/repose/wadl/element/one/test">
<element name="e" type="tstOne:SampleElement"/>
<complexType name="SampleElement">
<sequence>
<element name="id" type="xsd:integer" minOccurs="0" default="1"/>
</sequence>
</complexType>
</schema>
<schema elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns="http://www.w3.org/2001/XMLSchema"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:tstOne="http://www.rackspace.com/repose/wadl/element/one/test"
targetNamespace="http://www.rackspace.com/repose/wadl/element/one/test">
<element name="a" type="tstOne:SampleAttribute"/>
<complexType name="SampleAttribute">
<attribute name="id" type="xsd:integer" use="optional" default="2"/>
</complexType>
</schema>
</grammars>
<resources base="https://test.rackspace.com">
<resource path="/test">
<method name="PUT">
<request>
<representation mediaType="application/xml"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml"/>
</request>
</method>
</resource>
</resources>
</application>
val wadl_ElementDiff =
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tstOne="http://www.rackspace.com/repose/wadl/element/one/test"
xmlns:tstTwo="http://www.rackspace.com/repose/wadl/element/two/test">
<grammars>
<schema elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns="http://www.w3.org/2001/XMLSchema"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:tstOne="http://www.rackspace.com/repose/wadl/element/one/test"
targetNamespace="http://www.rackspace.com/repose/wadl/element/one/test">
<element name="e" type="tstOne:SampleElement"/>
<complexType name="SampleElement">
<sequence>
<element name="id" type="xsd:integer" minOccurs="0" default="1"/>
</sequence>
</complexType>
</schema>
<schema elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns="http://www.w3.org/2001/XMLSchema"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:tstTwo="http://www.rackspace.com/repose/wadl/element/two/test"
targetNamespace="http://www.rackspace.com/repose/wadl/element/two/test">
<element name="a" type="tstTwo:SampleAttribute"/>
<complexType name="SampleAttribute">
<attribute name="id" type="xsd:integer" use="optional" default="2"/>
</complexType>
</schema>
</grammars>
<resources base="https://test.rackspace.com">
<resource path="/test">
<method name="PUT">
<request>
<representation mediaType="application/xml"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml"/>
</request>
</method>
</resource>
</resources>
</application>
def sameDiff(same: Boolean): String = {
if(same) {
"Same"
}else {
"Diff"
}
}
def createConfigWithSaxonEE(enabled: Boolean): Config = {
val config = new Config
if (enabled) {
config.xsdEngine = "SaxonEE"
}
config.removeDups = true // -d Wadl2Checker default is different from Config default.
config.checkWellFormed = true // -w
config.checkXSDGrammar = true // -x
config.checkPlainParams = true // -p
config.doXSDGrammarTransform = true // -g
config.joinXPathChecks = true // -j
config.checkHeaders = true // -H
config.validateChecker = true // !-D
config.resultHandler = TestConfig.assertHandler
config
}
def assertions_Simple(validator: Validator, same: Boolean, useSaxon: Boolean) {
test("GET with valid Progress should succeed on Simple-" + sameDiff(same)) {
validator.validate(request("GET", "/test/progress/100"), response, chain)
}
test("GET with invalid Progress should fail on Simple-" + sameDiff(same)) {
assertResultFailed(validator.validate(request("GET", "/test/progress/101"), response, chain), 404)
}
test("GET with illegal Progress should fail on Simple-" + sameDiff(same)) {
assertResultFailed(validator.validate(request("GET", "/test/progress/hello"), response, chain), 404)
}
// NOTE: This is a bug that was identified in Xerces and is being reported, but until it is fixed will cause
// failures when multiple Schema elements are present in the same targetNamespace.
if(useSaxon || !same) {
test("GET with valid UUID should succeed on Simple-" + sameDiff(same)) {
validator.validate(request("GET", "/test/uuid/bbe10c88-6477-11e1-84cf-979e24b1498f"), response, chain)
}
}
test("GET with invalid UUID should fail on Simple-" + sameDiff(same)) {
assertResultFailed(validator.validate(request("GET", "/test/uuid/bbe10c88-6477-11e1-84cf-979e24b1498z"), response, chain), 404)
}
test("GET with illegal UUID should fail on Simple-" + sameDiff(same)) {
assertResultFailed(validator.validate(request("GET", "/test/uuid/bbe10c88-6477-11e1-84cf-979e24b1498"), response, chain), 404)
}
}
def assertions_Element(validator: Validator, same: Boolean, useSaxon: Boolean) {
test("PUT with valid XML One should succeed on Element-" + sameDiff(same)) {
val req = request("PUT", "/test", "application/xml",
<e xmlns="http://www.rackspace.com/repose/wadl/element/one/test">
<id>10</id>
</e>
)
validator.validate(req, response, chain)
val dom = req.getAttribute(PARSED_XML).asInstanceOf[Document]
assert((dom \\ "id").text == "10")
}
// NOTE: This is a bug that was identified in Xerces and is being reported, but until it is fixed will cause
// failures when multiple Schema elements are present in the same targetNamespace.
if(useSaxon || !same) {
test("PUT with valid XML Two should succeed on Element-" + sameDiff(same)) {
val req = if (same) {
request("PUT", "/test", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/element/one/test" id="20"/>
)
} else {
request("PUT", "/test", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/element/two/test" id="20"/>
)
}
validator.validate(req, response, chain)
val dom = req.getAttribute(PARSED_XML).asInstanceOf[Document]
assert((dom \\ "@id").text == "20")
}
}
test("PUT with invalid XML should fail on Element-" + sameDiff(same)) {
assertResultFailed(validator.validate(request("PUT", "/test", "application/xml",
<e xmlns="http://www.rackspace.com/repose/wadl/element/one/test">
<junk/>
</e>
), response, chain), 400)
}
test("POST with valid XML One should succeed and default values should be filled in on Element-" + sameDiff(same)) {
val req = request("POST", "/test", "application/xml",
<e xmlns="http://www.rackspace.com/repose/wadl/element/one/test">
<id/>
</e>
)
validator.validate(req, response, chain)
val dom = req.getAttribute(PARSED_XML).asInstanceOf[Document]
assert((dom \\ "id").text == "1")
}
// NOTE: This is a bug that was identified in Xerces and is being reported, but until it is fixed will cause
// failures when multiple Schema elements are present in the same targetNamespace.
if(useSaxon || !same) {
test("POST with valid XML Two should succeed and default values should be filled in on Element-" + sameDiff(same)) {
val req = if (same) {
request("POST", "/test", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/element/one/test"/>
)
} else {
request("POST", "/test", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/element/two/test"/>
)
}
validator.validate(req, response, chain)
val dom = req.getAttribute(PARSED_XML).asInstanceOf[Document]
assert((dom \\ "@id").text == "2")
}
}
}
}
|
wdschei/api-checker
|
core/src/test/scala/com/rackspace/com/papi/components/checker/MultiSchemaElementsBaseSuite.scala
|
Scala
|
apache-2.0
| 14,011
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.operators
import org.apache.flink.api.common.state.{ValueState, ValueStateDescriptor}
import org.apache.flink.api.common.time.Time
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.operators.KeyedProcessOperator
import org.apache.flink.table.api.StreamQueryConfig
import org.apache.flink.table.runtime.aggregate.KeyedProcessFunctionWithCleanupState
import org.apache.flink.table.runtime.harness.HarnessTestBase
import org.apache.flink.table.runtime.harness.HarnessTestBase.TestStreamQueryConfig
import org.apache.flink.util.Collector
import org.junit.Assert.assertEquals
import org.junit.Test
class KeyedProcessFunctionWithCleanupStateTest extends HarnessTestBase {
@Test
def testStateCleaning(): Unit = {
val queryConfig = new TestStreamQueryConfig(Time.milliseconds(5), Time.milliseconds(10))
val func = new MockedKeyedProcessFunction(queryConfig)
val operator = new KeyedProcessOperator(func)
val testHarness = createHarnessTester(operator,
new FirstFieldSelector,
TypeInformation.of(classOf[String]))
testHarness.open()
testHarness.setProcessingTime(1)
// add state for key "a"
testHarness.processElement(("a", "payload"), 1)
// add state for key "b"
testHarness.processElement(("b", "payload"), 1)
// check that we have two states (a, b)
// we check for the double number of states, because KeyedProcessFunctionWithCleanupState
// adds one more state per key to hold the cleanup timestamp.
assertEquals(4, testHarness.numKeyedStateEntries())
// advance time and add state for key "c"
testHarness.setProcessingTime(5)
testHarness.processElement(("c", "payload"), 1)
// add state for key "a". Timer is not reset, because it is still within minRetentionTime
testHarness.processElement(("a", "payload"), 1)
// check that we have three states (a, b, c)
assertEquals(6, testHarness.numKeyedStateEntries())
// advance time and update key "b". Timer for "b" is reset to 18
testHarness.setProcessingTime(8)
testHarness.processElement(("b", "payload"), 1)
// check that we have three states (a, b, c)
assertEquals(6, testHarness.numKeyedStateEntries())
// advance time to clear state for key "a"
testHarness.setProcessingTime(11)
// check that we have two states (b, c)
assertEquals(4, testHarness.numKeyedStateEntries())
// advance time to clear state for key "c"
testHarness.setProcessingTime(15)
// check that we have one state (b)
assertEquals(2, testHarness.numKeyedStateEntries())
// advance time to clear state for key "c"
testHarness.setProcessingTime(18)
// check that we have no states
assertEquals(0, testHarness.numKeyedStateEntries())
testHarness.close()
}
}
private class MockedKeyedProcessFunction(queryConfig: StreamQueryConfig)
extends KeyedProcessFunctionWithCleanupState[String, (String, String), String](queryConfig) {
var state: ValueState[String] = _
override def open(parameters: Configuration): Unit = {
initCleanupTimeState("CleanUpState")
val stateDesc = new ValueStateDescriptor[String]("testState", classOf[String])
state = getRuntimeContext.getState(stateDesc)
}
override def processElement(
value: (String, String),
ctx: KeyedProcessFunction[String, (String, String), String]#Context,
out: Collector[String]): Unit = {
val curTime = ctx.timerService().currentProcessingTime()
processCleanupTimer(ctx, curTime)
state.update(value._2)
}
override def onTimer(
timestamp: Long,
ctx: KeyedProcessFunction[String, (String, String), String]#OnTimerContext,
out: Collector[String]): Unit = {
if (stateCleaningEnabled) {
val cleanupTime = cleanupTimeState.value()
if (null != cleanupTime && timestamp == cleanupTime) {
// clean up
cleanupState(state)
}
}
}
}
|
fhueske/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/operators/KeyedProcessFunctionWithCleanupStateTest.scala
|
Scala
|
apache-2.0
| 4,908
|
import scala.quoted._
def test(using QuoteContext) = {
val a = '{
def z: Int = 5
Macro.ff(z, 5)
}
}
|
som-snytt/dotty
|
tests/disabled/pos-macros/i3898c/quoted_2.scala
|
Scala
|
apache-2.0
| 112
|
package junto
import junto.io._
import junto.graph._
import junto.util.Evaluator
/**
* Given the edge and seed descriptions, create the graph and run modified adsorption.
*/
object Junto {
def main(args: Array[String]) {
val conf = new JuntoOptions(args)
val separator = if (conf.tabSeparated()) '\\t' else ','
val edges = getEdges(conf.edgeFile(), separator)
val seeds = getLabels(conf.seedLabelFile(), separator)
val parameters = AdsorptionParameters(conf.mu1(), conf.mu2(), conf.mu3())
val beta = 2.0
val numIterations = conf.iterations()
val graph = LabelPropGraph(edges, seeds, false)
val (nodeNames, labelNames, estimatedLabels) =
Junto(graph, parameters, numIterations, beta)
conf.evalLabelFile.toOption match {
case Some(evalLabelFile) => {
val evalLabelSequence = getLabels(evalLabelFile, skipHeader = true)
val evalLabels = (for {
LabelSpec(nodeName, label, strength) <- evalLabelSequence
} yield (nodeName -> label)).toMap
val (accuracy, meanReciprocalRank) =
Evaluator.score(nodeNames, labelNames, estimatedLabels, "L1", evalLabels)
println("Accuracy: " + accuracy)
println("MRR: " + meanReciprocalRank)
}
case None => ; // ignore evaluation when evalLabelFile is not specified
}
// Output predictions if an output file is specified.
// output predictions are comma seperated
conf.outputFile.toOption match {
case Some(outputFile) =>
val out = createWriter(outputFile)
out.write("id," + graph.labelNames.mkString(",") + "\\n")
for ((name, distribution) <- graph.nodeNames.zip(estimatedLabels))
out.write(name + "," + distribution.mkString(",") + "\\n")
out.close
}
}
def apply(
graph: LabelPropGraph,
parameters: AdsorptionParameters = AdsorptionParameters(),
numIterations: Int = 10,
beta: Double = 2.0,
isDirected: Boolean = false
) = {
val estimatedLabels = ModifiedAdsorption(graph, parameters, beta)(numIterations)
(graph.nodeNames, graph.labelNames, estimatedLabels)
}
}
|
scalanlp/junto
|
src/main/scala/junto/Junto.scala
|
Scala
|
apache-2.0
| 2,144
|
package org.eknet.sitebag.rest
import scala.xml.PrettyPrinter
import spray.http._
import spray.http.HttpResponse
import spray.httpx.marshalling.ToResponseMarshaller
import porter.model.Ident
import org.eknet.sitebag.{Failure, Success, Result}
import org.eknet.sitebag.model.PageEntry
object RssSupport {
type RssMarshall[T] = T => xml.Node
class EntryRss(uriMaker: PageEntry => Uri) extends RssMarshall[PageEntry] {
def apply(entry: PageEntry) = {
val url = uriMaker(entry)
<item>
<title>{ entry.title }</title>
<link>{ url.toString() }</link>
<guid>{ url.toString() }</guid>
<pubDate>{ entry.created.toRfc1123DateTimeString }</pubDate>
<description>{ entry.content }</description>
{ entry.tags.map(t => <category>{ t.name }</category> ) }
</item>
}
}
def toRss(title: String, description: String, link: Uri, items: Seq[PageEntry])(uriMaker: PageEntry => Uri): xml.NodeSeq = {
val entryRss = new EntryRss(uriMaker)
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<title>{ title }</title>
<description>{ description }</description>
<link>{ link.toString() }</link>
<atom:link href={ link.toString() } rel="self" type="application/rss+xml" />
<generator>sitebag</generator>
{ items.map(entryRss) }
</channel>
</rss>
}
def mapRss(url: Uri, user: Ident, search: EntrySearch, result: Result[List[PageEntry]])(uriMaker: PageEntry => Uri): Result[xml.NodeSeq] = {
val title = s"${user.name}'s sitebag feed: ${search.tag.tags.map(_.name).mkString(", ")}"
val descr = s"sitebag - elementy by tags"
result mapmap { list => toRss(title, descr, url, list)(uriMaker) }
}
def rssEntity(xmldata: Result[xml.NodeSeq]) = {
val printer = new PrettyPrinter(80, 2)
val rss = MediaTypes.`text/xml`
xmldata match {
case Success(Some(node), _) =>
HttpResponse(
status = StatusCodes.OK,
entity = HttpEntity(ContentType(rss, HttpCharsets.`UTF-8`), """<?xml version="1.0" encoding="utf-8"?>""" +printer.formatNodes(node)))
case Success(None, _) =>
HttpResponse(status = StatusCodes.NotFound, entity = HttpEntity("RSS feed resource not found."))
case Failure(msg, ex) =>
HttpResponse(status = StatusCodes.InternalServerError, entity = HttpEntity(msg))
}
}
implicit val rssMarshaller = ToResponseMarshaller[Result[xml.NodeSeq]] {
(res, ctx) => ctx.marshalTo(rssEntity(res))
}
}
|
eikek/sitebag
|
src/main/scala/org/eknet/sitebag/rest/RssSupport.scala
|
Scala
|
apache-2.0
| 2,545
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.connector.spark.exceptions
private[pinot] case class HttpStatusCodeException(message: String, statusCode: Int)
extends Exception(message) {
def isStatusCodeNotFound: Boolean = statusCode == 404
}
private[pinot] case class PinotException(message: String, throwable: Throwable = None.orNull)
extends Exception(message, throwable)
|
linkedin/pinot
|
pinot-connectors/pinot-spark-connector/src/main/scala/org/apache/pinot/connector/spark/exceptions/exceptions.scala
|
Scala
|
apache-2.0
| 1,170
|
import scala.annotation.tailrec
object Fact {
def main(args : Array[String]) = {
if(args.length > 0 && args(0).matches("\\\\d+"))
Console println factorial(args(0).toInt)
else
Console println factorial(3)
}
def factorial (n: Int) = {
@tailrec
def facto(a : Int, acc: Int): Int =
if (a == 0) acc
else facto(a - 1, a * acc)
facto(n, 1)
}
}
|
splacentino/My-Project-Euler
|
other/Fact.scala
|
Scala
|
mit
| 390
|
package com.seanshubin.concurrency.samples.statemachine
object StateMachineSampleApp extends App {
new DependencyInjection {}.runner.run()
}
|
SeanShubin/concurrency-samples
|
statemachine/src/main/scala/com/seanshubin/concurrency/samples/statemachine/StateMachineSampleApp.scala
|
Scala
|
unlicense
| 144
|
package hevs.especial.utils.tests
// A Tour of Scala: Generic Classes
// http://www.scala-lang.org/old/node/113.html
object Var1 {
// Subtyping of generic types is INVARIANT.
// Stack[T] is only a subtype of Stack[S] iff S = T
val stack = new Stack[Int]
// Mutable stacks of an arbitrary element type T
class Stack[T] {
var elems: List[T] = Nil
def push(x: T) {
elems = x :: elems
}
def top: T = elems.head
def pop() {
elems = elems.tail
}
}
stack.push(1)
println(stack.top)
stack.push('a') // 'a' toInt = 97
println(stack.top)
stack.pop()
println(stack.top)
// The type defined by the class Stack[T] is subject to invariant subtyping
// regarding the type parameter.
// Result
// 97
// 1
}
// A Tour of Scala: Variances
// http://www.scala-lang.org/old/node/129.html
object Var2 {
// The annotation +T declares type T to be used only in covariant positions.
// -T would declare T to be used only in contravariant positions.
// Stack[T] is a subtype of Stack[S] if T is a subtype of S
// Opposite holds for type parameters that are tagged with a -
class Stack[+A] {
def push[B >: A](elem: B): Stack[B] = new Stack[B] {
override def top: B = elem
override def pop: Stack[B] = Stack.this
override def toString = elem.toString + " " +
Stack.this.toString()
}
override def toString = ""
def top: A = sys.error("no element on stack")
def pop: Stack[A] = sys.error("no element on stack")
}
object VariancesTest extends App {
var s: Stack[Any] = new Stack().push("hello")
s = s.push(new Object())
s = s.push(7)
println("Stack is: " + s)
}
}
|
hevs-isi/especial-frontend
|
src/main/scala/hevs/especial/utils/tests/Variance.scala
|
Scala
|
mit
| 1,699
|
package com.github.ornicar.paginator.test.adapter
import org.scalatest._
import com.github.ornicar.paginator._
import com.github.ornicar.paginator.InMemoryAdapter
class PaginatorTest extends FunSuite {
test("Empty seq") {
val a = makeAdapter[Int]()
Paginator(a, 1, 10).fold( { f => fail(f) }, { p =>
assert(p.currentPage === 1)
assert(p.maxPerPage === 10)
assert(p.nbResults === 0)
assert(p.currentPageResults === Seq())
assert(p.nbPages === 0)
})
}
test("Big seq") {
val a = makeAdapter('a' to 'z')
Paginator(a, 1, 10).fold( { f => fail(f) }, { p =>
assert(p.currentPageResults === ('a' to 'j'))
assert(p.nbResults === 26)
assert(p.nbPages === 3)
})
}
test("Previous page") {
val a = makeAdapter('a' to 'z')
Paginator(a, 1, 10).fold( { f => fail(f) }, { p =>
assert(p.previousPage === None)
assert(p.hasPreviousPage === false)
})
Paginator(a, 2, 10).fold( { f => fail(f) }, { p =>
assert(p.previousPage === Option(1))
assert(p.hasPreviousPage === true)
})
}
test("Next page") {
val a = makeAdapter('a' to 'z')
Paginator(a, 3, 10).fold( { f => fail(f) }, { p =>
assert(p.nextPage === None)
assert(p.hasNextPage === false)
})
Paginator(a, 2, 10).fold( { f => fail(f) }, { p =>
assert(p.nextPage === Option(3))
assert(p.hasNextPage === true)
})
}
private def makeAdapter[A](s: Seq[A] = Seq()) =
new InMemoryAdapter[A](s)
}
|
ornicar/scala-paginator
|
core/src/test/scala/PaginatorTest.scala
|
Scala
|
mit
| 1,510
|
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.cloudwatch
import java.time.Duration
import java.util.concurrent.TimeUnit
import com.netflix.atlas.core.model.Query
import com.netflix.atlas.core.model.QueryVocabulary
import com.netflix.atlas.core.stacklang.Interpreter
import com.typesafe.config.Config
import com.typesafe.scalalogging.StrictLogging
import software.amazon.awssdk.services.cloudwatch.model.DimensionFilter
import software.amazon.awssdk.services.cloudwatch.model.ListMetricsRequest
/**
* Category of metrics to fetch from CloudWatch. This will typically correspond with
* a CloudWatch namespace, though there may be multiple categories per namespace if
* there is some variation in the behavior. For example, some namespaces will use a
* different period for some metrics.
*
* @param namespace
* CloudWatch namespace for the data.
* @param period
* How frequently data in this category is updated. Atlas is meant for data that
* is continuously reported and requires a consistent step. To minimize confusion
* for CloudWatch data we use the last reported value in CloudWatch as long as it
* is within one period from the polling time. The period is also needed for
* performing rate conversions on some metrics.
* @param endPeriodOffset
* How many periods back from `now` to set the end of the time range.
* @param periodCount
* How many periods total to retrieve.
* @param timeout
* How long the system should interpolate a base value for unreported CloudWatch
* metrics before ceasing to send them. CloudWatch will return 0 metrics for at
* least two cases:
* - No metrics were recorded.
* - The resource has been removed, metrics still show up when listing metrics due
* to the retention window, but the specified time interval for the metric
* statistics request is after the removal.
* @param dimensions
* The dimensions to query for when getting data from CloudWatch. For the
* GetMetricData calls we have to explicitly specify all of the dimensions. In some
* cases CloudWatch has duplicate data for pre-computed aggregates. For example,
* ELB data is reported overall for the load balancer and by zone. For Atlas it
* is better to map in the most granular form and allow the aggregate to be done
* dynamically at query time.
* @param metrics
* The set of metrics to fetch and metadata for how to convert them.
* @param filter
* Query expression used to select the set of metrics which should get published.
* This can sometimes be useful for debugging or if there are many "spammy" metrics
* for a given category.
*/
case class MetricCategory(
namespace: String,
period: Int,
endPeriodOffset: Int,
periodCount: Int,
timeout: Option[Duration],
dimensions: List[String],
metrics: List[MetricDefinition],
filter: Query
) {
/**
* Returns a set of list requests to fetch the metadata for the metrics matching
* this category. As there may be a lot of data in CloudWatch that we are not
* interested in, the list is used to restrict to the subset we actually care
* about rather than a single request fetching everything for the namespace.
*/
def toListRequests: List[(MetricDefinition, ListMetricsRequest)] = {
import scala.jdk.CollectionConverters._
metrics.map { m =>
m -> ListMetricsRequest
.builder()
.namespace(namespace)
.metricName(m.name)
.dimensions(dimensions.map(d => DimensionFilter.builder().name(d).build()).asJava)
.build()
}
}
}
object MetricCategory extends StrictLogging {
private val interpreter = Interpreter(QueryVocabulary.allWords)
private def parseQuery(query: String): Query = {
interpreter.execute(query).stack match {
case (q: Query) :: Nil => q
case _ =>
logger.warn(s"invalid query '$query', using default of ':true'")
Query.True
}
}
def fromConfig(config: Config): MetricCategory = {
import scala.jdk.CollectionConverters._
val metrics = config.getConfigList("metrics").asScala.toList
val filter =
if (!config.hasPath("filter")) Query.True
else {
parseQuery(config.getString("filter"))
}
val timeout = if (config.hasPath("timeout")) Some(config.getDuration("timeout")) else None
val endPeriodOffset =
if (config.hasPath("end-period-offset")) config.getInt("end-period-offset") else 1
val periodCount = if (config.hasPath("period-count")) config.getInt("period-count") else 6
apply(
namespace = config.getString("namespace"),
period = config.getDuration("period", TimeUnit.SECONDS).toInt,
endPeriodOffset = endPeriodOffset,
periodCount = periodCount,
timeout = timeout,
dimensions = config.getStringList("dimensions").asScala.toList,
metrics = metrics.flatMap(MetricDefinition.fromConfig),
filter = filter
)
}
}
|
Netflix-Skunkworks/iep-apps
|
atlas-cloudwatch/src/main/scala/com/netflix/atlas/cloudwatch/MetricCategory.scala
|
Scala
|
apache-2.0
| 5,586
|
package io.getquill
import com.datastax.oss.driver.api.core.{ CqlSession, CqlSessionBuilder }
import com.typesafe.config.Config
import io.getquill.context.ExecutionInfo
import io.getquill.context.cassandra.util.FutureConversions._
import io.getquill.monad.ScalaFutureIOMonad
import io.getquill.util.{ ContextLogger, LoadConfig }
import scala.jdk.CollectionConverters._
import scala.compat.java8.FutureConverters._
import scala.concurrent.{ ExecutionContext, Future }
class CassandraAsyncContext[N <: NamingStrategy](
naming: N,
session: CqlSession,
preparedStatementCacheSize: Long
)
extends CassandraCqlSessionContext[N](naming, session, preparedStatementCacheSize)
with ScalaFutureIOMonad {
def this(naming: N, config: CassandraContextConfig) = this(naming, config.session, config.preparedStatementCacheSize)
def this(naming: N, config: Config) = this(naming, CassandraContextConfig(config))
def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))
private val logger = ContextLogger(classOf[CassandraAsyncContext[_]])
override type Result[T] = Future[T]
override type RunQueryResult[T] = List[T]
override type RunQuerySingleResult[T] = T
override type RunActionResult = Unit
override type RunBatchActionResult = Unit
override type Runner = Unit
override def performIO[T](io: IO[T, _], transactional: Boolean = false)(implicit ec: ExecutionContext): Result[T] = {
if (transactional) logger.underlying.warn("Cassandra doesn't support transactions, ignoring `io.transactional`")
super.performIO(io)
}
def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner)(implicit executionContext: ExecutionContext): Result[RunQueryResult[T]] = {
val statement = prepareAsyncAndGetStatement(cql, prepare, this, logger)
statement.map(st => session.execute(st).asScala.toList.map(row => extractor(row, this)))
}
def executeQuerySingle[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner)(implicit executionContext: ExecutionContext): Result[RunQuerySingleResult[T]] = {
executeQuery(cql, prepare, extractor)(info, dc).map(handleSingleResult)
}
def executeAction(cql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner)(implicit executionContext: ExecutionContext): Result[RunActionResult] = {
val statement = prepareAsyncAndGetStatement(cql, prepare, this, logger)
statement.flatMap(st => session.executeAsync(st).toCompletableFuture.toScala).map(_ => ())
}
def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner)(implicit executionContext: ExecutionContext): Result[RunBatchActionResult] = {
Future.sequence {
groups.flatMap {
case BatchGroup(cql, prepare) =>
prepare.map(executeAction(cql, _)(info, dc))
}
}.map(_ => ())
}
}
|
getquill/quill
|
quill-cassandra/src/main/scala/io/getquill/CassandraAsyncContext.scala
|
Scala
|
apache-2.0
| 3,026
|
/*
* Copyright (C) 2016 VSCT
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.vsct.dt.maze.topology
class NodeGroup(val nodes: Seq[DockerClusterNode]) {
def +(other: NodeGroup): NodeGroup = new NodeGroup(this.nodes ++ other.nodes)
def +(other: DockerClusterNode): NodeGroup = new NodeGroup(this.nodes ++ Seq(other))
}
|
voyages-sncf-technologies/maze
|
src/main/scala/fr/vsct/dt/maze/topology/NodeGroup.scala
|
Scala
|
apache-2.0
| 847
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.