code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.check.jsonpath
import io.gatling.commons.validation._
import io.gatling.core.util.cache.Cache
import io.gatling.jsonpath.JsonPath
import com.fasterxml.jackson.databind.JsonNode
final class JsonPaths(cacheMaxCapacity: Long) {
private val jsonPathCache = {
def compile(expression: String): Validation[JsonPath] = JsonPath.compile(expression) match {
case Left(error) => error.reason.failure
case Right(path) => path.success
}
Cache.newConcurrentLoadingCache(cacheMaxCapacity, compile)
}
def extractAll[X: JsonFilter](json: JsonNode, expression: String): Validation[Iterator[X]] =
compileJsonPath(expression).map(_.query(json).collect(JsonFilter[X].filter))
def compileJsonPath(expression: String): Validation[JsonPath] = jsonPathCache.get(expression)
}
| gatling/gatling | gatling-core/src/main/scala/io/gatling/core/check/jsonpath/JsonPaths.scala | Scala | apache-2.0 | 1,437 |
package me.elrod.ph
import monocle.Macro
import scalaz._, Scalaz._
case class HTTPStatus(_code: Int, _message: String)
object HTTPStatus extends HTTPStatusInstances
sealed trait HTTPStatusInstances {
import HTTPStatusLenses._
implicit def HTTPStatusEqual: Equal[HTTPStatus] =
new Equal[HTTPStatus] {
def equal(a: HTTPStatus, b: HTTPStatus) =
code.get(a) === code.get(b) && message.get(a) === message.get(b)
}
implicit def HTTPStatusOrder: Order[HTTPStatus] =
new Order[HTTPStatus] {
def order(a: HTTPStatus, b: HTTPStatus) =
Order[Int].order(code.get(a), code.get(b))
}
implicit def HTTPStatusShow: Show[HTTPStatus] =
new Show[HTTPStatus] {
override def shows(a: HTTPStatus) = s"${code.get(a)} ${message.get(a)}"
}
}
object HTTPStatusLenses {
val code = Macro.mkLens[HTTPStatus, Int]("_code")
val message = Macro.mkLens[HTTPStatus, String]("_message")
}
| relrod/ph | core/src/main/scala/types/Status.scala | Scala | apache-2.0 | 932 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.storage.hbase
import org.apache.predictionio.data.storage.BaseStorageClient
import org.apache.predictionio.data.storage.StorageClientConfig
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.MasterNotRunningException
import org.apache.hadoop.hbase.ZooKeeperConnectionException
import org.apache.hadoop.hbase.client.HConnectionManager
import org.apache.hadoop.hbase.client.HConnection
import org.apache.hadoop.hbase.client.HBaseAdmin
import grizzled.slf4j.Logging
case class HBClient(
val conf: Configuration,
val connection: HConnection,
val admin: HBaseAdmin
)
class StorageClient(val config: StorageClientConfig)
extends BaseStorageClient with Logging {
val conf = HBaseConfiguration.create()
if (config.test) {
// use fewer retries and shorter timeout for test mode
conf.set("hbase.client.retries.number", "1")
conf.set("zookeeper.session.timeout", "30000");
conf.set("zookeeper.recovery.retry", "1")
}
try {
HBaseAdmin.checkHBaseAvailable(conf)
} catch {
case e: MasterNotRunningException =>
error("HBase master is not running (ZooKeeper ensemble: " +
conf.get("hbase.zookeeper.quorum") + "). Please make sure that HBase " +
"is running properly, and that the configuration is pointing at the " +
"correct ZooKeeper ensemble.")
throw e
case e: ZooKeeperConnectionException =>
error("Cannot connect to ZooKeeper (ZooKeeper ensemble: " +
conf.get("hbase.zookeeper.quorum") + "). Please make sure that the " +
"configuration is pointing at the correct ZooKeeper ensemble. By " +
"default, HBase manages its own ZooKeeper, so if you have not " +
"configured HBase to use an external ZooKeeper, that means your " +
"HBase is not started or configured properly.")
throw e
case e: Exception => {
error("Failed to connect to HBase." +
" Please check if HBase is running properly.")
throw e
}
}
val connection = HConnectionManager.createConnection(conf)
val client = HBClient(
conf = conf,
connection = connection,
admin = new HBaseAdmin(connection)
)
override
val prefix = "HB"
}
| alex9311/PredictionIO | data/src/main/scala/org/apache/predictionio/data/storage/hbase/StorageClient.scala | Scala | apache-2.0 | 2,898 |
/*
* Copyright (C) 2012-2013 Age Mooij (http://scalapenos.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.scalapenos.riak
object RiakSecondaryIndexesTestData {
case class ClassWithOneIntIndex(foo: String)
object ClassWithOneIntIndex {
val indexes: Set[RiakIndex] = Set(RiakIndex("foos", 1))
implicit def serializer = plainTextSerializer[ClassWithOneIntIndex](_.foo)
implicit def deserializer = planTextDeserializer(ClassWithOneIntIndex.apply)
implicit def indexer = simpleIndexer[ClassWithOneIntIndex](indexes)
}
case class ClassWithOneStringIndex(foo: String)
object ClassWithOneStringIndex {
val indexes: Set[RiakIndex] = Set(RiakIndex("foos", "bars"))
implicit def serializer = plainTextSerializer[ClassWithOneStringIndex](_.foo)
implicit def deserializer = planTextDeserializer(ClassWithOneStringIndex.apply)
implicit def indexer = simpleIndexer[ClassWithOneStringIndex](indexes)
}
case class ClassWithTwoIntIndexes(foo: String)
object ClassWithTwoIntIndexes {
val indexes: Set[RiakIndex] = Set(RiakIndex("foos", 1), RiakIndex("bars", 42))
implicit def serializer = plainTextSerializer[ClassWithTwoIntIndexes](_.foo)
implicit def deserializer = planTextDeserializer(ClassWithTwoIntIndexes.apply)
implicit def indexer = simpleIndexer[ClassWithTwoIntIndexes](indexes)
}
case class ClassWithTwoIntIndexesWithTheSameName(foo: String)
object ClassWithTwoIntIndexesWithTheSameName {
val indexes: Set[RiakIndex] = Set(RiakIndex("foos", 1), RiakIndex("foos", 42))
implicit def serializer = plainTextSerializer[ClassWithTwoIntIndexesWithTheSameName](_.foo)
implicit def deserializer = planTextDeserializer(ClassWithTwoIntIndexesWithTheSameName.apply)
implicit def indexer = simpleIndexer[ClassWithTwoIntIndexesWithTheSameName](indexes)
}
case class ClassWithMixedIndexes(foo: String)
object ClassWithMixedIndexes {
val indexes: Set[RiakIndex] = Set(
RiakIndex("foos", 1),
RiakIndex("foos", 2),
RiakIndex("foos", "bars"),
RiakIndex("foos", "barsbars"),
RiakIndex("bars", 42)
// these will not be supported until we find a better way to deal with encoding/decoding them in urls and headers
// RiakIndex("foos", "foos foos"),
// RiakIndex("foos", "bars, bars"),
// RiakIndex("foo bars", "bacon")
)
implicit def serializer = plainTextSerializer[ClassWithMixedIndexes](_.foo)
implicit def deserializer = planTextDeserializer[ClassWithMixedIndexes](apply)
implicit def indexer = simpleIndexer[ClassWithMixedIndexes](indexes)
}
case class ClassWithConfigurableIntIndex(foo: String, indexedBy: Int)
object ClassWithConfigurableIntIndex {
val indexName = "foos"
implicit def serializer = plainTextSerializer[ClassWithConfigurableIntIndex](_.foo)
implicit def deserializer = planTextDeserializer[ClassWithConfigurableIntIndex](data ⇒ ClassWithConfigurableIntIndex(data, -1))
implicit def indexer = new RiakIndexer[ClassWithConfigurableIntIndex] {
def index(t: ClassWithConfigurableIntIndex) = Set(RiakIndex(indexName, t.indexedBy))
}
}
case class ClassWithConfigurableStringIndex(foo: String, indexedBy: String)
object ClassWithConfigurableStringIndex {
val indexName = "bars"
implicit def serializer = plainTextSerializer[ClassWithConfigurableStringIndex](_.foo)
implicit def deserializer = planTextDeserializer[ClassWithConfigurableStringIndex](data ⇒ ClassWithConfigurableStringIndex(data, ""))
implicit def indexer = new RiakIndexer[ClassWithConfigurableStringIndex] {
def index(t: ClassWithConfigurableStringIndex) = Set(RiakIndex(indexName, t.indexedBy))
}
}
def plainTextSerializer[T](ser: T ⇒ String) = new RiakSerializer[T] {
def serialize(t: T): (String, ContentType) = (ser(t), ContentTypes.`text/plain`)
}
def planTextDeserializer[T](d: String ⇒ T) = new RiakDeserializer[T] {
def deserialize(data: String, contentType: ContentType): T = d(data)
}
def simpleIndexer[T](indexes: Set[RiakIndex]) = new RiakIndexer[T] {
def index(t: T) = indexes
}
}
| agemooij/riak-scala-client | src/test/scala/com/scalapenos/riak/RiakSecondaryIndexesTestData.scala | Scala | apache-2.0 | 4,645 |
/*
* Copyright 2015-2016 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.container.test
import java.time.Instant
import java.nio.charset.StandardCharsets
import scala.concurrent.duration._
import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfter
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import org.apache.http.localserver.LocalServerTestBase
import org.apache.http.protocol.HttpRequestHandler
import org.apache.http.HttpResponse
import org.apache.http.HttpRequest
import org.apache.http.protocol.HttpContext
import org.apache.http.entity.StringEntity
import spray.json.JsObject
import whisk.core.container.HttpUtils
import whisk.core.entity.size._
import whisk.core.entity.ActivationResponse._
/**
* Unit tests for HttpUtils which communicate with containers.
*/
@RunWith(classOf[JUnitRunner])
class ContainerConnectionTests
extends FlatSpec
with Matchers
with BeforeAndAfter
with BeforeAndAfterAll {
var testHang: FiniteDuration = 0.second
var testStatusOK: Boolean = true
var testResponse: String = null
val mockServer = new LocalServerTestBase {
override def setUp() = {
super.setUp()
this.serverBootstrap.registerHandler("/init", new HttpRequestHandler() {
override def handle(request: HttpRequest, response: HttpResponse, context: HttpContext) = {
if (testHang.length > 0) {
Thread.sleep(testHang.toMillis)
}
response.setStatusCode(if (testStatusOK) 200 else 500);
if (testResponse != null) {
response.setEntity(new StringEntity(testResponse, StandardCharsets.UTF_8))
}
}
})
}
}
mockServer.setUp()
val httpHost = mockServer.start()
val hostWithPort = s"${httpHost.getHostName}:${httpHost.getPort}"
before {
testHang = 0.second
testStatusOK = true
testResponse = null
}
override def afterAll = {
mockServer.shutDown()
}
behavior of "Container HTTP Utils"
it should "not wait longer than set timeout" in {
val timeout = 5.seconds
val connection = new HttpUtils(hostWithPort, timeout, 1.B)
testHang = timeout * 2
val start = Instant.now()
val result = connection.post("/init", JsObject(), retry=true)
val end = Instant.now()
val waited = end.toEpochMilli - start.toEpochMilli
result.isLeft shouldBe true
waited should be > timeout.toMillis
waited should be < (timeout * 2).toMillis
}
it should "not truncate responses within limit" in {
val timeout = 1.minute.toMillis
val connection = new HttpUtils(hostWithPort, timeout.millis, 50.B)
Seq(true, false).foreach { code =>
Seq(null, "", "abc", """{"a":"B"}""", """["a", "b"]""").foreach { r =>
testStatusOK = code
testResponse = r
val result = connection.post("/init", JsObject(), retry=true)
result shouldBe Right {
ContainerResponse(okStatus = testStatusOK, if (r != null) r else "", None)
}
}
}
}
it should "truncate responses that exceed limit" in {
val timeout = 1.minute.toMillis
val limit = 1.B
val excess = limit + 1.B
val connection = new HttpUtils(hostWithPort, timeout.millis, limit)
Seq(true, false).foreach { code =>
Seq("abc", """{"a":"B"}""", """["a", "b"]""").foreach { r =>
testStatusOK = code
testResponse = r
val result = connection.post("/init", JsObject(), retry=true)
result shouldBe Right {
ContainerResponse(okStatus = testStatusOK, r.take(limit.toBytes.toInt), Some((r.length.B, limit)))
}
}
}
}
}
| CrowdFlower/incubator-openwhisk | tests/src/test/scala/whisk/core/container/test/ContainerConnectionTests.scala | Scala | apache-2.0 | 4,610 |
/* Copyright 2014-2015 - Big Data Technologies S.R.L. All Rights Reserved. */
package supler.fields
import app.utils.IDGenerator
import japgolly.scalajs.react.ReactElement
import japgolly.scalajs.react.vdom.prefix_<^._
import supler.{GlobalOptions, Field, RenderHint, Validation}
import scala.scalajs.js
//params to be defined
case class StaticValue(key:Option[String]=None, params:List[String]=Nil)
object StaticValue {
def fromJson(json:js.Dictionary[Any]): StaticValue ={
var field=new StaticValue()
for((fname, value)<-json){
fname match {
case "key" =>
field=field.copy(key = Some(value.asInstanceOf[String]))
case "params" =>
field=field.copy(params = value.asInstanceOf[js.Array[_]].map(_.asInstanceOf[String]).toList)
}
}
field
}
}
case class StaticField(name:String, label:String="", path:String="", validate:Validation[String]=Validation[String](),
value:StaticValue=StaticValue(),
renderHint:Option[RenderHint]=None, enabled:Boolean=true,
onChange:(Field=>Unit), globalOptions: GlobalOptions) extends Field {
val `type`=StaticField.NAME
def render(parentRenderHint:Option[RenderHint])(implicit idGenerator: IDGenerator): TagMod = {
val id = idGenerator.getId
<.div( ^.className :="form-group",
<.label(^.`for` := id, label),
<.div( ^.className :="form-control-static", value.key.getOrElse("[EMPTY]").toString)
// <div class="text-danger" id="id68"></div>
)
}
def extractValue: Option[(String, js.Any)] = None
}
object StaticField {
val NAME="static"
def fromJson(json:js.Dictionary[Any], onChange:(Field=>Unit), globalOptions: GlobalOptions): StaticField ={
var field=new StaticField(name="", onChange=onChange, globalOptions=globalOptions)
for((fname, value)<-json){
fname match {
case "label" =>
field=field.copy(label = value.asInstanceOf[String])
case "name" =>
field = field.copy(name = value.asInstanceOf[String])
case "enabled" =>
field=field.copy(enabled = value.asInstanceOf[Boolean])
case "type" =>
case "path" =>
field=field.copy(path = value.asInstanceOf[String])
case "value" =>
field=field.copy(value = StaticValue.fromJson(value.asInstanceOf[js.Dictionary[Any]]) )
case "validate" =>
field=field.copy(validate = Validation.fromJson[String](value.asInstanceOf[js.Dictionary[Any]]))
case "render_hint" =>
field=field.copy(renderHint = Some(RenderHint.fromJson(value.asInstanceOf[js.Dictionary[Any]])))
}
}
field
}
} | aparo/scalajs-supler | supler/js/src/main/supler/fields/StaticField.scala | Scala | apache-2.0 | 2,704 |
package com.olegych.scastie.api
import play.api.libs.json._
object Severity {
implicit object SeverityFormat extends Format[Severity] {
def writes(severity: Severity): JsValue =
severity match {
case Info => JsString("Info")
case Warning => JsString("Warning")
case Error => JsString("Error")
}
def reads(json: JsValue): JsResult[Severity] = {
json match {
case JsString("Info") => JsSuccess(Info)
case JsString("Warning") => JsSuccess(Warning)
case JsString("Error") => JsSuccess(Error)
case _ => JsError(Seq())
}
}
}
}
sealed trait Severity
case object Info extends Severity
case object Warning extends Severity
case object Error extends Severity
object Problem {
implicit val formatProblem: OFormat[Problem] = Json.format[Problem]
}
case class Problem(
severity: Severity,
line: Option[Int],
message: String
)
| scalacenter/scastie | api/src/main/scala/com.olegych.scastie.api/CompilerInfo.scala | Scala | apache-2.0 | 957 |
package io.udash.i18n
object Utils {
def getTranslatedString(tr: TranslationKey0)(implicit lang: Lang, provider: TranslationProvider): String =
tr().value.get.get.string
}
| UdashFramework/udash-core | i18n/src/test/scala/io/udash/i18n/Utils.scala | Scala | apache-2.0 | 179 |
package org.json4s
package jackson
import com.fasterxml.jackson.core.Version
import com.fasterxml.jackson.databind.Module
import com.fasterxml.jackson.databind.Module.SetupContext
object Json4sModule {
private val VersionRegex = """(\\d+)\\.(\\d+)(?:\\.(\\d+)(?:\\-(.*))?)?""".r
val version: Version = try {
val groupId = BuildInfo.organization
val artifactId = BuildInfo.name
BuildInfo.version match {
case VersionRegex(major,minor,patchOpt,snapOpt) => {
val patch = Option(patchOpt) map (_.toInt) getOrElse 0
new Version(major.toInt,minor.toInt,patch,snapOpt,groupId,artifactId)
}
case _ => Version.unknownVersion()
}
} catch { case _ => Version.unknownVersion() }
}
class Json4sScalaModule extends Module {
def getModuleName: String = "Json4sScalaModule"
def version(): Version = Json4sModule.version
def setupModule(ctxt: SetupContext) {
ctxt.addSerializers(JValueSerializerResolver)
ctxt.addDeserializers(JValueDeserializerResolver)
}
}
object Json4sScalaModule extends Json4sScalaModule
| nornagon/json4s | jackson/src/main/scala/org/json4s/jackson/Json4sScalaModule.scala | Scala | apache-2.0 | 1,066 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package com.intel.hibench.sparkbench.ml
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.tree.GradientBoostedTrees
import org.apache.spark.mllib.tree.configuration.BoostingStrategy
import org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.mllib.regression.LabeledPoint
object GradientBoostingTree {
def main(args: Array[String]): Unit = {
var inputPath = ""
var numIterations: Int = 3
val numClasses: Int = 2
val maxDepth: Int = 5
if (args.length == 2) {
inputPath = args(0)
numIterations = args(1).toInt
}
val conf = new SparkConf()
.setAppName("GradientBoostingTree")
val sc = new SparkContext(conf)
// Load and parse the data file.
//val data = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_libsvm_data.txt")
val data: RDD[LabeledPoint] = sc.objectFile(inputPath)
// Split the data into training and test sets (30% held out for testing)
val splits = data.randomSplit(Array(0.7, 0.3))
val (trainingData, testData) = (splits(0), splits(1))
// Train a GradientBoostedTrees model.
// The defaultParams for Classification use LogLoss by default.
val boostingStrategy = BoostingStrategy.defaultParams("Classification")
boostingStrategy.numIterations = numIterations
boostingStrategy.treeStrategy.numClasses = numClasses
boostingStrategy.treeStrategy.maxDepth = maxDepth
// Empty categoricalFeaturesInfo indicates all features are continuous.
boostingStrategy.treeStrategy.categoricalFeaturesInfo = Map[Int, Int]()
val model = GradientBoostedTrees.train(trainingData, boostingStrategy)
// Evaluate model on test instances and compute test error
val labelAndPreds = testData.map { point =>
val prediction = model.predict(point.features)
(point.label, prediction)
}
val testErr = labelAndPreds.filter(r => r._1 != r._2).count.toDouble / testData.count()
println("Test Error = " + testErr)
sc.stop()
}
}
| kimihe/Swallow | swallow-benchmark/HiBench-master/sparkbench/ml/src/main/scala/com/intel/sparkbench/ml/GradientBoostingTree.scala | Scala | apache-2.0 | 2,926 |
package io.gustavoamigo.quill.pgsql.encoding.range.numeric
import io.getquill.source.jdbc.JdbcSource
import io.gustavoamigo.quill.pgsql.encoding.GenericDecoder
import scala.collection.immutable.NumericRange
trait Decoders extends GenericDecoder {
this: JdbcSource[_, _] =>
private val rangePattern = """\\[(\\d+\\.*\\d*),(\\d+\\.*\\d*)[\\]|)]""".r
private def decoder[T](transform: (String, String) => T) = decode(s =>
rangePattern.findFirstMatchIn(s) match {
case Some(m) => transform(m.group(1), m.group(2))
}
)
implicit val intTupleDecoder: Decoder[(Int, Int)] = decoder((s1, s2) => (s1.toInt, s2.toInt - 1))
implicit val intRangeDecoder: Decoder[NumericRange[Int]] = decoder((s1, s2) => Range.Int(s1.toInt, s2.toInt, 1))
implicit val bigIntTupleDecoder: Decoder[(BigInt, BigInt)] = decoder((s1, s2) => (BigInt(s1), BigInt(s2) - BigInt(1)))
implicit val bigIntRangeDecoder: Decoder[NumericRange[BigInt]] =
decoder((s1, s2) => Range.BigInt(BigInt(s1), BigInt(s2), BigInt(1)))
implicit val longTupleDecoder: Decoder[(Long, Long)] = decoder((s1, s2) => (s1.toLong, s2.toLong - 1))
implicit val longRangeDecoder: Decoder[NumericRange[Long]] = decoder((s1, s2) => Range.Long(s1.toLong, s2.toLong, 1))
implicit val doubleTupleDecoder: Decoder[(Double, Double)] = decoder((s1, s2) => (s1.toDouble, s2.toDouble))
implicit val bigDecimalTupleDecoder: Decoder[(BigDecimal, BigDecimal)] = decoder((s1, s2) => (BigDecimal(s1), BigDecimal(s2)))
implicit val bigDecimalRangeDecoder: Decoder[NumericRange[BigDecimal]] = decoder((s1, s2) => {
val (d1, d2) = (BigDecimal(s1), BigDecimal(s2))
Range.BigDecimal.inclusive(d1, d2, step(d1, d2))
})
private def step(d1: BigDecimal, d2: BigDecimal): BigDecimal = {
val fraction1 = d1.remainder(BigDecimal(1)).toString.length
val fraction2 = d2.remainder(BigDecimal(1)).toString.length
val fraction = if (fraction1 > fraction2) d1 else d2
BigDecimal(1) / BigDecimal(10).pow(fraction.precision - 1)
}
} | gustavoamigo/quill-pgsql | src/main/scala/io/gustavoamigo/quill/pgsql/encoding/range/numeric/Decoders.scala | Scala | apache-2.0 | 2,004 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter
/**
* Scala extensions to the Kryo serialization library.
*/
package object chill {
type Kryo = com.esotericsoftware.kryo.Kryo
type KSerializer[T] = com.esotericsoftware.kryo.Serializer[T]
type Input = com.esotericsoftware.kryo.io.Input
type Output = com.esotericsoftware.kryo.io.Output
implicit def toRich(k: Kryo): RichKryo = new RichKryo(k)
implicit def toInstantiator(fn: Function0[Kryo]): KryoInstantiator = new KryoInstantiator {
override def newKryo: Kryo = fn.apply
}
implicit def toRegistrar(fn: Function1[Kryo, Unit]): IKryoRegistrar = new IKryoRegistrar {
def apply(k: Kryo): Unit = fn(k)
}
implicit def toRegistrar(items: Iterable[IKryoRegistrar]): IKryoRegistrar = new IKryoRegistrar {
def apply(k: Kryo): Unit = items.foreach(_.apply(k))
}
def printIfRegistered(cls: Class[_]): IKryoRegistrar = new IKryoRegistrar {
def apply(k: Kryo): Unit =
if (k.alreadyRegistered(cls)) {
System.err.printf("%s is already registered.", cls.getName)
}
}
def assertNotRegistered(cls: Class[_]): IKryoRegistrar = new IKryoRegistrar {
def apply(k: Kryo): Unit =
assert(!k.alreadyRegistered(cls), String.format("%s is already registered.", cls.getName))
}
}
| twitter/chill | chill-scala/src/main/scala/com/twitter/package.scala | Scala | apache-2.0 | 1,811 |
/*******************************************************************************
Copyright (c) 2013, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
***************************************************************************** */
package kr.ac.kaist.jsaf.analysis.typing.models.jquery
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing.{AccessHelper => AH, _}
import scala.Some
import kr.ac.kaist.jsaf.analysis.typing.domain.Heap
import kr.ac.kaist.jsaf.analysis.typing.domain.Context
import kr.ac.kaist.jsaf.analysis.typing.models.AbsBuiltinFunc
import kr.ac.kaist.jsaf.analysis.cfg.{CFGExpr, CFG}
import scala.Some
import kr.ac.kaist.jsaf.analysis.typing.domain.Heap
import kr.ac.kaist.jsaf.analysis.typing.domain.Context
import kr.ac.kaist.jsaf.analysis.typing.models.AbsBuiltinFunc
import kr.ac.kaist.jsaf.analysis.typing.models.DOMCore.DOMNodeList
object JQueryManipulation extends ModelData {
private val prop_proto: List[(String, AbsProperty)] = List(
("after", AbsBuiltinFunc("jQuery.prototype.after", 0)),
("append", AbsBuiltinFunc("jQuery.prototype.append", 0)),
("appendTo", AbsBuiltinFunc("jQuery.prototype.appendTo", 1)),
("before", AbsBuiltinFunc("jQuery.prototype.before", 0)),
("clone", AbsBuiltinFunc("jQuery.prototype.clone", 2)),
("detach", AbsBuiltinFunc("jQuery.prototype.detach", 1)),
("empty", AbsBuiltinFunc("jQuery.prototype.empty", 0)),
("insertAfter", AbsBuiltinFunc("jQuery.prototype.insertAfter", 1)),
("insertBefore", AbsBuiltinFunc("jQuery.prototype.insertBefore", 1)),
("prepend", AbsBuiltinFunc("jQuery.prototype.prepend", 0)),
("prependTo", AbsBuiltinFunc("jQuery.prototype.prependTo", 1)),
("remove", AbsBuiltinFunc("jQuery.prototype.remove", 2)),
("replaceAll", AbsBuiltinFunc("jQuery.prototype.replaceAll", 1)),
("replaceWith", AbsBuiltinFunc("jQuery.prototype.replaceWith", 1)),
("text", AbsBuiltinFunc("jQuery.prototype.text", 1)),
("unwrap", AbsBuiltinFunc("jQuery.prototype.unwrap", 0)),
("wrap", AbsBuiltinFunc("jQuery.prototype.wrap", 1)),
("wrapAll", AbsBuiltinFunc("jQuery.prototype.wrapAll", 1)),
("wrapInner", AbsBuiltinFunc("jQuery.prototype.wrapInner", 1))
)
def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(JQuery.ProtoLoc, prop_proto)
)
def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("jQuery.prototype.after" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* jQuery object */
val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2
/* 1st argument */
val v_arg1 = getArgValue(h, ctx, args, "0")
val lset_arg = v_arg1._2
// ignore function, HTMLString, 2nd argument
// argument is HTMLElement
val lset_html = lset_arg.filter((l) => T <= Helper.HasProperty(h, l, AbsString.alpha("nodeType")))
val h_1 =
if (!lset_html.isEmpty)
lset_this.foldLeft(h)((hh, ll) => {
val n_len = Helper.Proto(hh, ll, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(hh)((hhh, i) => {
val lset_current = Helper.Proto(hhh, ll, AbsString.alpha(i.toString))._2
val lset_target = lset_current.foldLeft(LocSetBot)((ls, l) =>
ls ++ Helper.Proto(hhh, l, AbsString.alpha("parentNode"))._2)
DOMHelper.appendChild(hhh, lset_target, lset_html)
})
case None =>
if (n_len </ NumBot) {
val lset_current = Helper.Proto(hh, ll, NumStr)._2
val lset_target = lset_current.foldLeft(LocSetBot)((ls, l) =>
ls ++ Helper.Proto(hh, l, AbsString.alpha("parentNode"))._2)
DOMHelper.appendChild(hh, lset_target, lset_html)
}
else
hh
}
})
else
HeapBot
// argument is jQuery object
val lset_jq = lset_arg.filter((l) => T <= Helper.HasProperty(h, l, AbsString.alpha("jquery")))
val h_2 =
if (!lset_jq.isEmpty)
lset_this.foldLeft(h)((h1, l1) => {
val n_len = Helper.Proto(h1, l1, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h1)((h2, i) => {
val lset_current = Helper.Proto(h2, l1, AbsString.alpha(i.toString))._2
val lset_parent = lset_current.foldLeft(LocSetBot)((ls, l) =>
ls ++ Helper.Proto(h2, l, AbsString.alpha("parentNode"))._2)
lset_jq.foldLeft(h2)((h3, l3) => {
val n_len = Helper.Proto(h3, l3, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h3)((h4, j) => {
val lset_child = Helper.Proto(h4, l3, AbsString.alpha(j.toString))._2
DOMHelper.appendChild(h4, lset_parent, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_child = Helper.Proto(h3, l3, NumStr)._2
// now length is top
val h5 = lset_parent.foldLeft(h3)((h4, l4) => Helper.PropStore(h4, l4, AbsString.alpha("length"), Value(UInt)))
DOMHelper.appendChild(h5, lset_parent, lset_child)
}
else
h3
}
})
})
case None =>
if (n_len </ NumBot) {
val lset_current = Helper.Proto(h1, l1, NumStr)._2
val lset_target = lset_current.foldLeft(LocSetBot)((ls, l) =>
ls ++ Helper.Proto(h1, l, AbsString.alpha("parentNode"))._2)
lset_jq.foldLeft(h1)((h2, l2) => {
val n_len = Helper.Proto(h2, l2, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h2)((h4, i) => {
val lset_child = Helper.Proto(h4, l2, AbsString.alpha(i.toString))._2
DOMHelper.appendChild(h4, lset_target, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_child = Helper.Proto(h2, l2, NumStr)._2
DOMHelper.appendChild(h2, lset_target, lset_child)
}
else
h2
}
})
}
else
h1
}
})
else
HeapBot
if (!lset_html.isEmpty || !lset_jq.isEmpty) {
val h_ret = h_1 + h_2
((Helper.ReturnStore(h_ret, Value(lset_this)), ctx), (he, ctxe))
}
else
((HeapBot, ContextBot), (he, ctxe))
})),
("jQuery.prototype.append" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* jQuery object */
val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2
/* 1st argument */
val v_arg1 = getArgValue(h, ctx, args, "0")
val lset_arg = v_arg1._2
// ignore function, HTMLString, 2nd argument
// argument is HTMLElement
val lset_html = lset_arg.filter((l) => T <= Helper.HasProperty(h, l, AbsString.alpha("nodeType")))
val h_1 =
if (!lset_html.isEmpty)
lset_this.foldLeft(h)((hh, ll) => {
val n_len = Helper.Proto(hh, ll, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(hh)((hhh, i) => {
val lset_target = Helper.Proto(hhh, ll, AbsString.alpha(i.toString))._2
DOMHelper.appendChild(hhh, lset_target, lset_html)
})
case None =>
if (n_len </ NumBot) {
val lset_target = Helper.Proto(hh, ll, NumStr)._2
DOMHelper.appendChild(hh, lset_target, lset_html)
}
else
hh
}
})
// argument is HTMLString
/*
else if lset_arg is HTMLString
//HTML string to insert at the end of each element in the set of matched elements.
//append:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.appendChild(a)})},
*/
else
HeapBot
// argument is jQuery object
val lset_jq = lset_arg.filter((l) => T <= Helper.HasProperty(h, l, AbsString.alpha("jquery")))
val h_2 =
if (!lset_jq.isEmpty)
lset_this.foldLeft(h)((h1, l1) => {
val n_len = Helper.Proto(h1, l1, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h1)((h2, i) => {
val lset_parent = Helper.Proto(h2, l1, AbsString.alpha(i.toString))._2
lset_jq.foldLeft(h2)((h3, l3) => {
val n_len = Helper.Proto(h3, l3, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h3)((h4, j) => {
val lset_child = Helper.Proto(h4, l3, AbsString.alpha(j.toString))._2
DOMHelper.appendChild(h4, lset_parent, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_child = Helper.Proto(h3, l3, NumStr)._2
// now length is top
val h5 = lset_parent.foldLeft(h3)((h4, l4) => Helper.PropStore(h4, l4, AbsString.alpha("length"), Value(UInt)))
DOMHelper.appendChild(h5, lset_parent, lset_child)
}
else
h3
}
})
})
case None =>
if (n_len </ NumBot) {
val lset_target = Helper.Proto(h1, l1, NumStr)._2
lset_jq.foldLeft(h1)((h2, l2) => {
val n_len = Helper.Proto(h2, l2, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h2)((h4, i) => {
val lset_child = Helper.Proto(h4, l2, AbsString.alpha(i.toString))._2
DOMHelper.appendChild(h4, lset_target, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_child = Helper.Proto(h2, l2, NumStr)._2
DOMHelper.appendChild(h2, lset_target, lset_child)
}
else
h2
}
})
}
else
h1
}
})
else
HeapBot
if (!lset_html.isEmpty || !lset_jq.isEmpty) {
val h_ret = h_1 + h_2
((Helper.ReturnStore(h_ret, Value(lset_this)), ctx), (he, ctxe))
}
else
((HeapBot, ContextBot), (he, ctxe))
})),
("jQuery.prototype.appendTo" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* jQuery object */
val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2
/* 1st argument */
val v_arg1 = getArgValue(h, ctx, args, "0")
val lset_arg = v_arg1._2
// ignore HTMLString
// argument is HTMLElement
val lset_html = lset_arg.filter((l) => T <= Helper.HasProperty(h, l, AbsString.alpha("nodeType")))
val h_1 =
if (!lset_html.isEmpty)
lset_this.foldLeft(h)((hh, ll) => {
val n_len = Helper.Proto(hh, ll, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(hh)((hhh, i) => {
val lset_child = Helper.Proto(hhh, ll, AbsString.alpha(i.toString))._2
// append
DOMHelper.appendChild(hhh, lset_html, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_child = Helper.Proto(hh, ll, NumStr)._2
// now length is top
val _h = lset_html.foldLeft(hh)((_h, l) => Helper.PropStore(_h, l, AbsString.alpha("length"), Value(UInt)))
// append
DOMHelper.appendChild(_h, lset_html, lset_child)
}
else
hh
}
})
else
HeapBot
// argument is jQuery object
val lset_jq = lset_arg.filter((l) => T <= Helper.HasProperty(h, l, AbsString.alpha("jquery")))
val h_2 =
if (!lset_jq.isEmpty)
lset_this.foldLeft(h)((h1, l1) => {
val n_len = Helper.Proto(h1, l1, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h1)((h2, i) => {
val lset_child = Helper.Proto(h2, l1, AbsString.alpha(i.toString))._2
lset_jq.foldLeft(h2)((h3, l3) => {
val n_len = Helper.Proto(h3, l3, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h3)((h4, j) => {
val lset_parent = Helper.Proto(h4, l3, AbsString.alpha(j.toString))._2
DOMHelper.appendChild(h4, lset_parent, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_parent = Helper.Proto(h3, l3, NumStr)._2
DOMHelper.appendChild(h3, lset_parent, lset_child)
}
else
h3
}
})
})
case None =>
if (n_len </ NumBot) {
val lset_child = Helper.Proto(h1, l1, NumStr)._2
lset_jq.foldLeft(h1)((h2, l2) => {
val n_len = Helper.Proto(h2, l2, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h2)((h4, i) => {
val lset_target = Helper.Proto(h4, l2, AbsString.alpha(i.toString))._2
DOMHelper.appendChild(h4, lset_target, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_target = Helper.Proto(h2, l2, NumStr)._2
DOMHelper.appendChild(h2, lset_target, lset_child)
}
else
h2
}
})
}
else
h1
}
})
else
HeapBot
if (!lset_html.isEmpty || !lset_jq.isEmpty) {
val h_ret = h_1 + h_2
((Helper.ReturnStore(h_ret, Value(lset_this)), ctx), (he, ctxe))
}
else
((HeapBot, ContextBot), (he, ctxe))
})),
("jQuery.prototype.empty" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* jQuery object */
val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2
val h_ret = lset_this.foldLeft(h)((h1, l1) => {
val lset_parent = Helper.Proto(h1, l1, NumStr)._2
lset_parent.foldLeft(h1)((h2, l2) => {
val lset_ns = Helper.Proto(h2, l2, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(h2)((h3, l3) => h3.update(l3, DOMHelper.NewChildNodeListObj(0)))
})
})
((Helper.ReturnStore(h_ret, Value(lset_this)), ctx), (he, ctxe))
})),
("jQuery.prototype.prepend" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* jQuery object */
val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2
/* 1st argument */
val v_arg1 = getArgValue(h, ctx, args, "0")
val lset_arg = v_arg1._2
// ignore function, HTMLString, 2nd argument
// argument is HTMLElement
val lset_html = lset_arg.filter((l) => T <= Helper.HasProperty(h, l, AbsString.alpha("nodeType")))
val h_1 =
if (!lset_html.isEmpty)
lset_this.foldLeft(h)((hh, ll) => {
val n_len = Helper.Proto(hh, ll, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(hh)((hhh, i) => {
val lset_target = Helper.Proto(hhh, ll, AbsString.alpha(i.toString))._2
DOMHelper.prependChild(hhh, lset_target, lset_html)
})
case None =>
if (n_len </ NumBot) {
val lset_target = Helper.Proto(hh, ll, NumStr)._2
DOMHelper.prependChild(hh, lset_target, lset_html)
}
else
hh
}
})
else
HeapBot
// argument is jQuery object
val lset_jq = lset_arg.filter((l) => T <= Helper.HasProperty(h, l, AbsString.alpha("jquery")))
val h_2 =
if (!lset_jq.isEmpty)
lset_this.foldLeft(h)((h1, l1) => {
val n_len = Helper.Proto(h1, l1, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h1)((h2, i) => {
val lset_parent = Helper.Proto(h2, l1, AbsString.alpha(i.toString))._2
lset_jq.foldLeft(h2)((h3, l3) => {
val n_len = Helper.Proto(h3, l3, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h3)((h4, j) => {
val lset_child = Helper.Proto(h4, l3, AbsString.alpha(j.toString))._2
DOMHelper.prependChild(h4, lset_parent, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_child = Helper.Proto(h3, l3, NumStr)._2
// now length is top
val h5 = lset_parent.foldLeft(h3)((h4, l4) => Helper.PropStore(h4, l4, AbsString.alpha("length"), Value(UInt)))
DOMHelper.prependChild(h5, lset_parent, lset_child)
}
else
h3
}
})
})
case None =>
if (n_len </ NumBot) {
val lset_target = Helper.Proto(h1, l1, NumStr)._2
lset_jq.foldLeft(h1)((h2, l2) => {
val n_len = Helper.Proto(h2, l2, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(len) =>
(0 until len.toInt).foldLeft(h2)((h4, i) => {
val lset_child = Helper.Proto(h4, l2, AbsString.alpha(i.toString))._2
DOMHelper.prependChild(h4, lset_target, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_child = Helper.Proto(h2, l2, NumStr)._2
DOMHelper.prependChild(h2, lset_target, lset_child)
}
else
h2
}
})
}
else
h1
}
})
else
HeapBot
if (!lset_html.isEmpty || !lset_jq.isEmpty) {
val h_ret = h_1 + h_2
((Helper.ReturnStore(h_ret, Value(lset_this)), ctx), (he, ctxe))
}
else
((HeapBot, ContextBot), (he, ctxe))
})),
("jQuery.prototype.remove" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* jQuery object */
val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2
if (!lset_this.isEmpty) {
val h_ret = lset_this.foldLeft(h)((h1, l1) => {
val n_len = Helper.Proto(h1, l1, AbsString.alpha("length"))._1._4
AbsNumber.concretize(n_len) match {
case Some(n) =>
(0 until n.toInt).foldLeft(h1)((h2, i) => {
val lset_child = Helper.Proto(h2, l1, AbsString.alpha(i.toString))._2
val lset_parent = lset_child.foldLeft(LocSetBot)((lset, _l) =>
lset ++ Helper.Proto(h2, _l, AbsString.alpha("parentNode"))._2)
DOMHelper.removeChild(h2, lset_parent, lset_child)
})
case None =>
if (n_len </ NumBot) {
val lset_child = Helper.Proto(h1, l1, NumStr)._2
val lset_parent = lset_child.foldLeft(LocSetBot)((lset, _l) =>
lset ++ Helper.Proto(h1, _l, AbsString.alpha("parentNode"))._2)
DOMHelper.removeChild(h1, lset_parent, lset_child)
}
else
HeapBot
}
})
((Helper.ReturnStore(h_ret, Value(lset_this)), ctx), (he, ctxe))
}
else
((HeapBot, ContextBot), (he, ctxe))
})),
("jQuery.prototype.text" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* jQuery object */
val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2
/* 1st argument */
val v_arg1 = getArgValue(h, ctx, args, "0")
val v_ret1 =
if (v_arg1._1._1 </ UndefBot)
Value(StrTop)
else
ValueBot
val v_ret2 =
if (v_arg1._1._1 <= UndefBot && v_arg1 </ ValueBot)
// TODO: unsound, igrnoe DOM manipulation
Value(lset_this)
else
ValueBot
val v_ret = v_ret1 + v_ret2
if (v_ret </ ValueBot)
((Helper.ReturnStore(h, v_ret), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
}))
)
}
def getPreSemanticMap(): Map[String, SemanticFun] = {
Map()
}
def getDefMap(): Map[String, AccessFun] = {
Map()
}
def getUseMap(): Map[String, AccessFun] = {
Map()
}
}
| daejunpark/jsaf | src/kr/ac/kaist/jsaf/analysis/typing/models/jquery/JQueryManipulation.scala | Scala | bsd-3-clause | 25,983 |
/*
* Copyright 2012-2013 Stephane Godbillon (@sgodbillon) and Zenexity
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactivemongo.api.gridfs
import java.io._
import java.util.Arrays
import core.AkkaByteStringWritableBuffer
import play.api.libs.iteratee._
import reactivemongo.api._
import reactivemongo.bson._
import reactivemongo.api.commands.WriteResult
import reactivemongo.core.AkkaReadableBuffer
import reactivemongo.utils._
import scala.concurrent.{ ExecutionContext, Future }
import reactivemongo.api.collections.GenericCollectionProducer
import reactivemongo.api.collections.GenericCollection
object `package` {
}
object Implicits {
/** A default `BSONReader` for `ReadFile`. */
implicit object DefaultReadFileReader extends BSONDocumentReader[ReadFile[BSONValue]] {
import DefaultBSONHandlers._
def read(doc: BSONDocument) = {
DefaultReadFile(
doc.getAs[BSONValue]("_id").get,
doc.getAs[BSONString]("contentType").map(_.value),
doc.getAs[BSONString]("filename").map(_.value).get,
doc.getAs[BSONNumberLike]("uploadDate").map(_.toLong),
doc.getAs[BSONNumberLike]("chunkSize").map(_.toInt).get,
doc.getAs[BSONNumberLike]("length").map(_.toInt).get,
doc.getAs[BSONString]("md5").map(_.value),
doc.getAs[BSONDocument]("metadata").getOrElse(BSONDocument()),
doc)
}
}
}
/** Metadata that cannot be customized. */
trait ComputedMetadata {
/** Length of the file. */
def length: Int
/** Size of the chunks of this file. */
def chunkSize: Int
/** MD5 hash of this file. */
def md5: Option[String]
}
/**
* Common metadata.
* @tparam Id Type of the id of this file (generally `BSONObjectID` or `BSONValue`).
*/
trait BasicMetadata[+Id <: BSONValue] {
/** Id of this file. */
def id: Id
/** Name of this file. */
def filename: String
/** Date when this file was uploaded. */
def uploadDate: Option[Long]
/** Content type of this file. */
def contentType: Option[String]
}
/** Custom metadata (generic trait) */
trait CustomMetadata {
/** A BSONDocument holding all the metadata that are not standard. */
def metadata: BSONDocument
}
/**
* A file that will be saved in a GridFS store.
* @tparam Id Type of the id of this file (generally `BSONObjectID` or `BSONValue`).
*/
trait FileToSave[+Id <: BSONValue] extends BasicMetadata[Id] with CustomMetadata
/** A default implementation of `FileToSave[BSONValue]`. */
case class DefaultFileToSave(
filename: String,
contentType: Option[String] = None,
uploadDate: Option[Long] = None,
metadata: BSONDocument = BSONDocument(),
id: BSONValue = BSONObjectID.generate) extends FileToSave[BSONValue]
/**
* A file read from a GridFS store.
* @tparam Id Type of the id of this file (generally `BSONObjectID` or `BSONValue`).
*/
trait ReadFile[+Id <: BSONValue] extends BasicMetadata[Id] with CustomMetadata with ComputedMetadata
/** A default implementation of `ReadFile[BSONValue]`. */
case class DefaultReadFile(
id: BSONValue,
contentType: Option[String],
filename: String,
uploadDate: Option[Long],
chunkSize: Int,
length: Int,
md5: Option[String],
metadata: BSONDocument,
original: BSONDocument) extends ReadFile[BSONValue]
/**
* A GridFS store.
* @param db The database where this store is located.
* @param prefix The prefix of this store. The `files` and `chunks` collections will be actually named `prefix.files` and `prefix.chunks`.
*/
class GridFS[P <: SerializationPack with Singleton](db: DB with DBMetaCommands, prefix: String = "fs")(implicit producer: GenericCollectionProducer[P, GenericCollection[P]] = collections.bson.BSONCollectionProducer) {
import indexes._
import IndexType._
/** The `files` collection */
val files = db(prefix + ".files")(producer)
/** The `chunks` collection */
val chunks = db(prefix + ".chunks")(producer)
val pack: files.pack.type = files.pack
/**
* Finds the files matching the given selector.
*
* @param selector The document to select the files to return
*
* @tparam S The type of the selector document. An implicit `Writer[S]` must be in the scope.
*/ // TODO More generic deserializers ?
def find[S, T <: ReadFile[_]](selector: S)(implicit sWriter: pack.Writer[S], readFileReader: pack.Reader[T], ctx: ExecutionContext, cp: CursorProducer[T]): cp.ProducedCursor = files.find(selector).cursor
/**
* Saves the content provided by the given enumerator with the given metadata.
*
* @param enumerator Producer of content.
* @param file Metadata of the file to store.
* @param chunkSize Size of the chunks. Defaults to 256kB.
*
* @return A future of a ReadFile[Id].
*/
def save[Id <: BSONValue](enumerator: Enumerator[Array[Byte]], file: FileToSave[Id], chunkSize: Int = 262144)(implicit readFileReader: pack.Reader[ReadFile[Id]], ctx: ExecutionContext): Future[ReadFile[Id]] = {
(enumerator |>>> iteratee(file, chunkSize)).flatMap(f => f)
}
import reactivemongo.api.collections.bson.{ BSONCollection, BSONCollectionProducer }
/**
* Gets an `Iteratee` that will consume data to put into a GridFS store.
* @param file Metadata of the file to store.
* @param chunkSize Size of the chunks. Defaults to 256kB.
*
* @tparam Id Type of the id of this file (generally `BSONObjectID` or `BSONValue`).
*
* @return An `Iteratee` that will consume data to put into a GridFS store.
*/
def iteratee[Id <: BSONValue](file: FileToSave[Id], chunkSize: Int = 262144)(implicit readFileReader: pack.Reader[ReadFile[Id]], ctx: ExecutionContext): Iteratee[Array[Byte], Future[ReadFile[Id]]] = {
implicit val ec = db.connection.actorSystem
case class Chunk(
previous: Array[Byte] = new Array(0),
n: Int = 0,
md: java.security.MessageDigest = java.security.MessageDigest.getInstance("MD5"),
length: Int = 0) {
def feed(chunk: Array[Byte]): Future[Chunk] = {
val wholeChunk = concat(previous, chunk)
val normalizedChunkNumber = wholeChunk.length / chunkSize
// logger.debug("wholeChunk size is " + wholeChunk.length + " => " + normalizedChunkNumber)
val zipped = for (i <- 0 until normalizedChunkNumber) yield Arrays.copyOfRange(wholeChunk, i * chunkSize, (i + 1) * chunkSize) -> i
val left = Arrays.copyOfRange(wholeChunk, normalizedChunkNumber * chunkSize, wholeChunk.length)
Future.traverse(zipped) { ci =>
writeChunk(n + ci._2, ci._1)
}.map { _ =>
// logger.debug("all futures for the last given chunk are redeemed.")
Chunk(
if (left.isEmpty) Array.empty else left,
n + normalizedChunkNumber,
md, //{ md.update(chunk) ; md },
length + chunk.length)
}
}
def finish(): Future[ReadFile[Id]] = {
import DefaultBSONHandlers._
// logger.debug("writing last chunk (n=" + n + ")!")
val uploadDate = file.uploadDate.getOrElse(System.currentTimeMillis)
writeChunk(n, previous).flatMap { f =>
val bson = BSONDocument(
"_id" -> file.id.asInstanceOf[BSONValue],
"filename" -> BSONString(file.filename),
"chunkSize" -> BSONInteger(chunkSize),
"length" -> BSONInteger(length),
"uploadDate" -> BSONDateTime(uploadDate),
"contentType" -> file.contentType.map(BSONString(_)),
"metadata" -> option(!file.metadata.isEmpty, file.metadata))
files.as[BSONCollection]().insert(bson).map { _ =>
val buf = AkkaByteStringWritableBuffer()
BSONSerializationPack.writeToBuffer(buf, bson)
// TODO: fix
pack.readAndDeserialize(new AkkaReadableBuffer(buf.builder.result()), readFileReader)
}
}
}
def writeChunk(n: Int, array: Array[Byte]) = {
// logger.debug("writing chunk " + n)
val bson = {
import DefaultBSONHandlers._
BSONDocument(
"files_id" -> file.id.asInstanceOf[BSONValue],
"n" -> BSONInteger(n),
"data" -> BSONBinary(array, Subtype.GenericBinarySubtype))
}
chunks.as[BSONCollection]().insert(bson)
}
}
Iteratee.foldM(Chunk()) { (previous, chunk: Array[Byte]) =>
// logger.debug("processing new enumerated chunk from n=" + previous.n + "...\\n")
previous.feed(chunk)
}.map(_.finish)
}
/** Produces an enumerator of chunks of bytes from the `chunks` collection matching the given file metadata. */
def enumerate(file: ReadFile[_ <: BSONValue])(implicit ctx: ExecutionContext): Enumerator[Array[Byte]] = {
import DefaultBSONHandlers._
val selector = BSONDocument(
"$query" -> BSONDocument(
"files_id" -> file.id,
"n" -> BSONDocument(
"$gte" -> BSONInteger(0),
"$lte" -> BSONInteger(file.length / file.chunkSize + (if (file.length % file.chunkSize > 0) 1 else 0)))),
"$orderby" -> BSONDocument(
"n" -> BSONInteger(1)))
val cursor = chunks.as[BSONCollection]().find(selector).cursor
cursor.enumerate() &> Enumeratee.map { doc =>
doc.get("data").flatMap {
case BSONBinary(data, _) => {
val array = new Array[Byte](data.readable)
data.slice(data.readable).readBytes(array)
Some(array)
}
case _ => None
}.getOrElse {
// logger.error("not a chunk! failed assertion: data field is missing")
throw new RuntimeException("not a chunk! failed assertion: data field is missing")
}
}
}
/** Reads the given file and writes its contents to the given OutputStream */
def readToOutputStream(file: ReadFile[_ <: BSONValue], out: OutputStream)(implicit ctx: ExecutionContext): Future[Unit] = {
enumerate(file) |>>> Iteratee.foreach { chunk =>
out.write(chunk)
}
}
/** Writes the data provided by the given InputStream to the given file. */
def writeFromInputStream[Id <: BSONValue](file: FileToSave[Id], input: InputStream, chunkSize: Int = 262144)(implicit readFileReader: pack.Reader[ReadFile[Id]], ctx: ExecutionContext): Future[ReadFile[Id]] = {
save(Enumerator.fromStream(input, chunkSize), file)
}
/**
* Removes a file from this store.
* Note that if the file does not actually exist, the returned future will not be hold an error.
*
* @param file The file entry to remove from this store.
*/
def remove[Id <: BSONValue](file: BasicMetadata[Id])(implicit ctx: ExecutionContext): Future[WriteResult] = remove(file.id)
/**
* Removes a file from this store.
* Note that if the file does not actually exist, the returned future will not be hold an error.
*
* @param id The file id to remove from this store.
*/
def remove(id: BSONValue)(implicit ctx: ExecutionContext): Future[WriteResult] = {
import DefaultBSONHandlers._
chunks.as[BSONCollection]().remove(BSONDocument("files_id" -> id)).flatMap { _ =>
files.as[BSONCollection]().remove(BSONDocument("_id" -> id))
}
}
/**
* Creates the needed index on the `chunks` collection, if none.
*
* Please note that you should really consider reading [[http://www.mongodb.org/display/DOCS/Indexes]] before doing this, especially in production.
*
* @return A future containing true if the index was created, false if it already exists.
*/
def ensureIndex()(implicit ctx: ExecutionContext): Future[Boolean] =
db.indexesManager.onCollection(prefix + ".chunks").ensure(Index(List("files_id" -> Ascending, "n" -> Ascending), unique = true))
}
object GridFS {
def apply[P <: SerializationPack with Singleton](db: DB with DBMetaCommands, prefix: String = "fs")(implicit producer: GenericCollectionProducer[P, GenericCollection[P]] = collections.bson.BSONCollectionProducer) =
new GridFS(db, prefix)(producer)
}
| sh1ng/ReactiveMongo | driver/src/main/scala/api/gridfs.scala | Scala | apache-2.0 | 12,404 |
package codegen.json.serialization
import codegen.Constants
import codegen.functions._
import datamodel._
object MessageJSONPrettyStringSerializer {
private val messageParam = "obj"
private val jsonOutputParam = "json_out"
/**
* Creates a function to serialize a message object to a formatted
* JSON string. This function is non-static and exported.
* @param message Message to serialize
* @return Definition of function to serialize a message to a formatted
* JSON string
*/
def apply(message: Message): FunctionDefinition = {
FunctionDefinition(
name = name(message.name),
documentation(message),
prototype(message),
body(message)
)
}
/**
* Gets the name of the function to serialize messages to a formatted
* JSON string
* @param messageName Name of message to serialize
* @return Name of function to serialize messages to a formatted JSON string
*/
def name(messageName: String): String = {
s"${messageName}_json_serialize_pretty"
}
/**
* @param message Message to serialize
* @return Documentation of function to serialize messages to a formatted JSON string
*/
private def documentation(message: Message): FunctionDocumentation = {
FunctionDocumentation(
shortSummary = s"Pretty print a ${message.name} to JSON",
description = s"Serializes a ${message.name} to a formatted JSON string. The caller must free $jsonOutputParam."
)
}
/**
* @param message Message to serialize
* @return Prototype of function to serialize messages to a formatted JSON string
*/
private def prototype(message: Message): FunctionPrototype = {
FunctionPrototype(
isStatic = false,
returnType = Constants.defaultBooleanCType,
parameters = List(
FunctionParameter(paramType = message.name + " const*", paramName = messageParam),
FunctionParameter(paramType = Constants.defaultCharacterCType + "**", paramName = jsonOutputParam)
)
)
}
/**
* @param message Message to serialize
* @return Body of function to serialize messages to a formatted JSON string
*/
private def body(message: Message): String = {
val objectSerializer = MessageJSONObjectSerializer.name(message.name)
s"""${Constants.defaultBooleanCType} success;
|cJSON* json_root;
|
|json_root = NULL;
|*$jsonOutputParam = NULL;
|
|success = $objectSerializer( $messageParam, &json_root );
|
|if( success )
| {
| *$jsonOutputParam = cJSON_Print( json_root );
| success = ( NULL != *$jsonOutputParam );
| }
|
|cJSON_Delete( json_root );
|
|return success;""".stripMargin
}
} | gatkin/cDTO | src/main/scala/codegen/json/serialization/MessageJSONPrettyStringSerializer.scala | Scala | mit | 2,775 |
package com.chobostudy.audtjddld.datastructure
import com.chobostudy.datastructure._
/**
* The why solution for page 55 to be trait.
*
* The reason enables the other packages to use it.
*
* @author audtjddld
* @since 08/23/2017
*/
trait SolutionP55 {
/*
List(1,2,3,4) 부분 순차열 List(1,2)나 List(2,3) , List(4)는 List (1,2,3,4)의 부분 순차열이다.
*/
// exercise 3.24
def hasSubsequence[A](sup: List[A], sub: List[A]): Boolean = {
def isSubsequence[A](sup: List[A], sub: List[A], result: Boolean): Boolean = sup match {
case Cons(h1, t1) => sub match {
case Cons(h2, t2) => {
if (h1 == h2) isSubsequence(t1, t2, true)
else isSubsequence(t1, t2, false)
}
case Nil => result
}
case Nil => result
}
isSubsequence(startWith(sup, sub), sub, false)
}
def startWith[A](sup: List[A], sub: List[A]): List[A] = sub match {
case Cons(h2, t2) => {
@annotation.tailrec
def loop(l: List[A], h2: A): List[A] = l match {
case Cons(h, t) => {
if (h == h2) Cons(h, t) else loop(t, h2)
}
case Nil => Nil
}
loop(sup, h2)
}
case Nil => {
Nil
}
}
}
object SolutionP55 extends ListFactory with SolutionP55 {}
| codechobostudy/FPIS | src/main/scala/com/chobostudy/audtjddld/datastructure/SolutionP55.scala | Scala | apache-2.0 | 1,320 |
package org.marxc.ast
abstract class StatementNode extends AstNode | iogr/MarxC | src/main/scala/org/marxc/ast/StatementNode.scala | Scala | apache-2.0 | 67 |
package core
import akka.actor.{Props, ActorSystem}
import scala.annotation.tailrec
object Main extends App {
import Commands._
val system = ActorSystem()
val sentiment = system.actorOf(Props(new SentimentAnalysisActor with CSVLoadedSentimentSets with AnsiConsoleSentimentOutput))
val stream = system.actorOf(Props(new TweetStreamerActor(TweetStreamerActor.twitterUri, sentiment) with OAuthTwitterAuthorization))
@tailrec
private def commandLoop(): Unit = {
Console.readLine() match {
case QuitCommand => return
case TrackCommand(query) => stream ! query
case _ => println("WTF??!!")
}
commandLoop()
}
// start processing the commands
commandLoop()
}
object Commands {
val QuitCommand = "quit"
val TrackCommand = "track (.*)".r
}
| frankfarrell/irelandweather | src/main/scala/core/main.scala | Scala | mit | 820 |
/*
Copyright 2010 the original author or authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.gumbix.hl7dsl.DSL
import org.hl7.rim.{RimObjectFactory, NonPersonLivingSubject}
import org.hl7.types._
/**
* Wrapper Class for the RIM Class "NonPersonLivingSubject"
* @author Ahmet Gül (guel.ahmet@hotmail.de)
*/
class NonPersonLivingSubjectDSL(nonLivingSubject: NonPersonLivingSubject)
extends LivingSubjectDSL(nonLivingSubject) {
def this() = {
this (RimObjectFactory.getInstance.createRimObject("NonPersonLivingSubject").asInstanceOf[NonPersonLivingSubject])
}
/**
* @return ED
*/
def strainText: ED = nonLivingSubject.getStrainText
def strainText_=(v: ED) {
nonLivingSubject.setStrainText(v)
}
/**
* @return CE
*/
def genderStatusCode: CE = nonLivingSubject.getGenderStatusCode
def genderStatusCode_=(v: CE) {
nonLivingSubject.setGenderStatusCode(v)
}
} | markusgumbel/dshl7 | core/src/main/scala/net/gumbix/hl7dsl/DSL/NonPersonLivingSubjectDSL.scala | Scala | apache-2.0 | 1,437 |
import sbt._
import Keys._
import sbtrelease.ReleaseStateTransformations._
import sbtrelease.ReleasePlugin.autoImport._
import xerial.sbt.Sonatype._
import com.typesafe.sbt.pgp.PgpKeys
object Common {
private def gitHash: String = scala.util.Try(
sys.process.Process("git rev-parse HEAD").lines_!.head
).getOrElse("master")
private[this] val unusedWarnings = (
"-Ywarn-unused" ::
"-Ywarn-unused-import" ::
Nil
)
private[this] val scala211 = "2.11.8"
lazy val commonSettings = Seq(
sonatypeSettings
).flatten ++ Seq(
scalaVersion := scala211,
crossScalaVersions := Seq("2.10.6", scala211, "2.12.1"),
resolvers += Opts.resolver.sonatypeReleases,
scalacOptions ++= (
"-deprecation" ::
"-unchecked" ::
"-Xlint" ::
"-feature" ::
"-language:existentials" ::
"-language:higherKinds" ::
"-language:implicitConversions" ::
"-language:reflectiveCalls" ::
Nil
),
scalacOptions ++= PartialFunction.condOpt(CrossVersion.partialVersion(scalaVersion.value)){
case Some((2, v)) if v >= 11 => unusedWarnings
}.toList.flatten,
releaseProcess := Seq[ReleaseStep](
checkSnapshotDependencies,
inquireVersions,
runClean,
runTest,
setReleaseVersion,
commitReleaseVersion,
UpdateReadme.updateReadmeProcess,
tagRelease,
ReleaseStep(
action = { state =>
val extracted = Project extract state
extracted.runAggregated(PgpKeys.publishSigned in Global in extracted.get(thisProjectRef), state)
},
enableCrossBuild = true
),
setNextVersion,
commitNextVersion,
UpdateReadme.updateReadmeProcess,
pushChanges
),
credentials ++= PartialFunction.condOpt(sys.env.get("SONATYPE_USER") -> sys.env.get("SONATYPE_PASS")){
case (Some(user), Some(pass)) =>
Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", user, pass)
}.toList,
organization := "com.github.pocketberserker",
homepage := Some(url("https://github.com/pocketberserker/httpz-http4s")),
licenses := Seq("MIT License" -> url("http://www.opensource.org/licenses/mit-license.php")),
pomExtra :=
<developers>
<developer>
<id>pocketberserker</id>
<name>Yuki Nakayama</name>
<url>https://github.com/pocketberserker</url>
</developer>
</developers>
<scm>
<url>git@github.com:pocketberserker/httpz-http4s.git</url>
<connection>scm:git:git@github.com:pocketberserker/httpz-http4s.git</connection>
<tag>{if(isSnapshot.value) gitHash else { "v" + version.value }}</tag>
</scm>
,
description := "httpz interpreters for http4s client",
pomPostProcess := { node =>
import scala.xml._
import scala.xml.transform._
def stripIf(f: Node => Boolean) = new RewriteRule {
override def transform(n: Node) =
if (f(n)) NodeSeq.Empty else n
}
val stripTestScope = stripIf { n => n.label == "dependency" && (n \\ "scope").text == "test" }
new RuleTransformer(stripTestScope).transform(node)(0)
}
) ++ Seq(Compile, Test).flatMap(c =>
scalacOptions in (c, console) ~= {_.filterNot(unusedWarnings.toSet)}
)
}
| pocketberserker/httpz-http4s | project/Common.scala | Scala | mit | 3,289 |
package com.seanshubin.web.sync.domain
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}
class DownloadStatusDeserializer extends JsonDeserializer[DownloadStatus] {
def deserialize(parser: JsonParser, context: DeserializationContext) = {
DownloadStatus.fromString(parser.getValueAsString) match {
case Some(downloadStatus) => downloadStatus
case None => throw new RuntimeException(
s"Cannot convert '${parser.getValueAsString}' to a DownloadStatus, expected one of: ${DownloadStatus.validValuesString}")
}
}
}
| SeanShubin/web-sync | domain/src/main/scala/com/seanshubin/web/sync/domain/DownloadStatusDeserializer.scala | Scala | unlicense | 620 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command
import org.apache.spark.SparkConf
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.catalog.InMemoryPartitionTableCatalog
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
import org.apache.spark.sql.types._
trait CharVarcharDDLTestBase extends QueryTest with SQLTestUtils {
def format: String
def checkColType(f: StructField, dt: DataType): Unit = {
assert(f.dataType == CharVarcharUtils.replaceCharVarcharWithString(dt))
assert(CharVarcharUtils.getRawType(f.metadata).contains(dt))
}
test("allow to change column for char(x) to char(y), x == y") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(4)) USING $format")
sql("ALTER TABLE t CHANGE COLUMN c TYPE CHAR(4)")
checkColType(spark.table("t").schema(1), CharType(4))
}
}
test("not allow to change column for char(x) to char(y), x != y") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(4)) USING $format")
val e = intercept[AnalysisException] {
sql("ALTER TABLE t CHANGE COLUMN c TYPE CHAR(5)")
}
val v1 = e.getMessage contains "'CharType(4)' to 'c' with type 'CharType(5)'"
val v2 = e.getMessage contains "char(4) cannot be cast to char(5)"
assert(v1 || v2)
}
}
test("not allow to change column from string to char type") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c STRING) USING $format")
val e = intercept[AnalysisException] {
sql("ALTER TABLE t CHANGE COLUMN c TYPE CHAR(5)")
}
val v1 = e.getMessage contains "'StringType' to 'c' with type 'CharType(5)'"
val v2 = e.getMessage contains "string cannot be cast to char(5)"
assert(v1 || v2)
}
}
test("not allow to change column from int to char type") {
withTable("t") {
sql(s"CREATE TABLE t(i int, c CHAR(4)) USING $format")
val e = intercept[AnalysisException] {
sql("ALTER TABLE t CHANGE COLUMN i TYPE CHAR(5)")
}
val v1 = e.getMessage contains "'IntegerType' to 'i' with type 'CharType(5)'"
val v2 = e.getMessage contains "int cannot be cast to char(5)"
assert(v1 || v2)
}
}
test("allow to change column for varchar(x) to varchar(y), x == y") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c VARCHAR(4)) USING $format")
sql("ALTER TABLE t CHANGE COLUMN c TYPE VARCHAR(4)")
checkColType(spark.table("t").schema(1), VarcharType(4))
}
}
test("not allow to change column for varchar(x) to varchar(y), x > y") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c VARCHAR(4)) USING $format")
val e = intercept[AnalysisException] {
sql("ALTER TABLE t CHANGE COLUMN c TYPE VARCHAR(3)")
}
val v1 = e.getMessage contains "'VarcharType(4)' to 'c' with type 'VarcharType(3)'"
val v2 = e.getMessage contains "varchar(4) cannot be cast to varchar(3)"
assert(v1 || v2)
}
}
def checkTableSchemaTypeStr(expected: Seq[Row]): Unit = {
checkAnswer(sql("desc t").selectExpr("data_type").where("data_type like '%char%'"), expected)
}
test("SPARK-33901: alter table add columns should not change original table's schema") {
withTable("t") {
sql(s"CREATE TABLE t(i CHAR(5), c VARCHAR(4)) USING $format")
sql("ALTER TABLE t ADD COLUMNS (d VARCHAR(5))")
checkTableSchemaTypeStr(Seq(Row("char(5)"), Row("varchar(4)"), Row("varchar(5)")))
}
}
test("SPARK-33901: ctas should should not change table's schema") {
withTable("t", "tt") {
sql(s"CREATE TABLE tt(i CHAR(5), c VARCHAR(4)) USING $format")
sql(s"CREATE TABLE t USING $format AS SELECT * FROM tt")
checkTableSchemaTypeStr(Seq(Row("char(5)"), Row("varchar(4)")))
}
}
}
class FileSourceCharVarcharDDLTestSuite extends CharVarcharDDLTestBase with SharedSparkSession {
override def format: String = "parquet"
override protected def sparkConf: SparkConf = {
super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "parquet")
}
// TODO(SPARK-33902): MOVE TO SUPER CLASS AFTER THE TARGET TICKET RESOLVED
test("SPARK-33901: create table like should should not change table's schema") {
withTable("t", "tt") {
sql(s"CREATE TABLE tt(i CHAR(5), c VARCHAR(4)) USING $format")
sql("CREATE TABLE t LIKE tt")
checkTableSchemaTypeStr(Seq(Row("char(5)"), Row("varchar(4)")))
}
}
// TODO(SPARK-33903): MOVE TO SUPER CLASS AFTER THE TARGET TICKET RESOLVED
test("SPARK-33901: cvas should should not change view's schema") {
withTable( "tt") {
sql(s"CREATE TABLE tt(i CHAR(5), c VARCHAR(4)) USING $format")
withView("t") {
sql("CREATE VIEW t AS SELECT * FROM tt")
checkTableSchemaTypeStr(Seq(Row("char(5)"), Row("varchar(4)")))
}
}
}
}
class DSV2CharVarcharDDLTestSuite extends CharVarcharDDLTestBase
with SharedSparkSession {
override def format: String = "foo"
protected override def sparkConf = {
super.sparkConf
.set("spark.sql.catalog.testcat", classOf[InMemoryPartitionTableCatalog].getName)
.set(SQLConf.DEFAULT_CATALOG.key, "testcat")
}
test("allow to change change column from char to string type") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(4)) USING $format")
sql("ALTER TABLE t CHANGE COLUMN c TYPE STRING")
assert(spark.table("t").schema(1).dataType === StringType)
}
}
test("allow to change column from char(x) to varchar(y) type x <= y") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(4)) USING $format")
sql("ALTER TABLE t CHANGE COLUMN c TYPE VARCHAR(4)")
checkColType(spark.table("t").schema(1), VarcharType(4))
}
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(4)) USING $format")
sql("ALTER TABLE t CHANGE COLUMN c TYPE VARCHAR(5)")
checkColType(spark.table("t").schema(1), VarcharType(5))
}
}
test("allow to change column from varchar(x) to varchar(y) type x <= y") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c VARCHAR(4)) USING $format")
sql("ALTER TABLE t CHANGE COLUMN c TYPE VARCHAR(4)")
checkColType(spark.table("t").schema(1), VarcharType(4))
sql("ALTER TABLE t CHANGE COLUMN c TYPE VARCHAR(5)")
checkColType(spark.table("t").schema(1), VarcharType(5))
}
}
test("not allow to change column from char(x) to varchar(y) type x > y") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(4)) USING $format")
val e = intercept[AnalysisException] {
sql("ALTER TABLE t CHANGE COLUMN c TYPE VARCHAR(3)")
}
assert(e.getMessage contains "char(4) cannot be cast to varchar(3)")
}
}
}
| wangmiao1981/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/command/CharVarcharDDLTestBase.scala | Scala | apache-2.0 | 7,672 |
/**
* Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.pantsbuild.zinc.extractor
import java.io.File
import scala.compat.java8.OptionConverters._
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import org.pantsbuild.zinc.analysis.{AnalysisMap, PortableAnalysisMappers}
import org.pantsbuild.zinc.options.Parsed
/**
* Command-line main class for analysis extraction.
*/
object Main {
val Command = "zinc-extractor"
private val om = {
val mapper = new ObjectMapper with ScalaObjectMapper
mapper.registerModule(DefaultScalaModule)
mapper
}
def main(args: Array[String]): Unit = {
val Parsed(settings, residual, errors) = Settings.parse(args)
// bail out on any command-line option errors
if (errors.nonEmpty) {
for (error <- errors) System.err.println(error)
System.err.println("See %s -help for information about options" format Command)
sys.exit(1)
}
if (settings.help) {
Settings.printUsage(Command)
return
}
val summaryJson =
settings.summaryJson.getOrElse {
throw new RuntimeException(s"An output file is required.")
}
// Load relevant analysis.
val analysisMap = AnalysisMap.create(settings.analysis)
val analysis =
analysisMap.cachedStore(settings.analysis.cache)
.get()
.asScala
.getOrElse {
throw new RuntimeException(s"Failed to load analysis from ${settings.analysis.cache}")
}
.getAnalysis
// Extract products and dependencies.
val extractor = new Extractor(settings.classpath, analysis, analysisMap)
om.writeValue(
summaryJson,
Summary(
extractor.products,
extractor.dependencies
)
)
}
}
case class Summary(
products: collection.Map[File, collection.Set[File]],
dependencies: collection.Map[File, collection.Set[File]]
)
| fkorotkov/pants | src/scala/org/pantsbuild/zinc/extractor/Main.scala | Scala | apache-2.0 | 2,109 |
class scala
| som-snytt/dotty | tests/pending/fuzzy/AE-86f761cfdd0d96cda748d11fe449910cdd76e935.scala | Scala | apache-2.0 | 12 |
package spark.examples
import spark._
import spark.rdd.NewHadoopRDD
import org.apache.hadoop.hbase.{HBaseConfiguration, HTableDescriptor}
import org.apache.hadoop.hbase.client.HBaseAdmin
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
object HBaseTest {
def main(args: Array[String]) {
val sc = new SparkContext(args(0), "HBaseTest",
System.getenv("SPARK_HOME"), Seq(System.getenv("SPARK_EXAMPLES_JAR")))
val conf = HBaseConfiguration.create()
// Other options for configuring scan behavior are available. More information available at
// http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
conf.set(TableInputFormat.INPUT_TABLE, args(1))
// Initialize hBase table if necessary
val admin = new HBaseAdmin(conf)
if(!admin.isTableAvailable(args(1))) {
val tableDesc = new HTableDescriptor(args(1))
admin.createTable(tableDesc)
}
val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
classOf[org.apache.hadoop.hbase.client.Result])
hBaseRDD.count()
System.exit(0)
}
} | baeeq/incubator-spark | examples/src/main/scala/spark/examples/HBaseTest.scala | Scala | bsd-3-clause | 1,175 |
import annotation.unused
trait Base {
@unused private val secure_# = 10L
}
class TestUser extends Base {
def clsMeth(x: Int) = x
@unused private def foo(x: Int) = x
}
object TestUser extends TestUser {
def objMeth = "a"
@unused private def foo(x: Int) = x
}
object Test {
def main(args: Array[String]): Unit = {
TestUser.objMeth
// no-op, just check that it passes verification
println("verified!")
}
}
| scala/scala | test/files/run/duplicate-meth.scala | Scala | apache-2.0 | 434 |
package breeze.util
import breeze.macros.expand
/**
*
* @author dlwh
*/
object Sorting {
// Based on code by
// /* __ *\\
//** ________ ___ / / ___ Scala API **
//** / __/ __// _ | / / / _ | (c) 2006-2009, Ross Judson **
//** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
//** /____/\\___/_/ |_/____/_/ | | **
//** |/ **
//\\* */
def indexSort(x: Array[Int], off: Int, len: Int, order: Array[Int]): Array[Int] = {
indexSort_Int(x, off, len, order)
}
def indexSort(x: Array[Int], off: Int, len: Int, order: Array[Long]): Array[Int] = {
indexSort_Long(x, off, len, order)
}
def indexSort(x: Array[Int], off: Int, len: Int, order: Array[Float]): Array[Int] = {
indexSort_Float(x, off, len, order)
}
def indexSort(x: Array[Int], off: Int, len: Int, order: Array[Double]): Array[Int] = {
indexSort_Double(x, off, len, order)
}
@expand
def indexSort[@expand.args(Int, Long, Float, Double) T](x: Array[Int], off: Int, len: Int, order: Array[T]): Array[Int] = {
def swap(a: Int, b: Int) {
val t = x(a)
x(a) = x(b)
x(b) = t
}
def vecswap(_a: Int, _b: Int, n: Int) {
var a = _a
var b = _b
var i = 0
while (i < n) {
swap(a, b)
i += 1
a += 1
b += 1
}
}
def med3(a: Int, b: Int, c: Int) = {
if (order(x(a)) < order(x(b))) {
if (order(x(b)) < order(x(c))) b else if (order(x(a)) < order(x(c))) c else a
} else {
if (order(x(b)) > order(x(c))) b else if (order(x(a)) > order(x(c))) c else a
}
}
def sort2(off: Int, len: Int) {
// Insertion sort on smallest arrays
if (len < 7) {
var i = off
while (i < len + off) {
var j = i
while (j>off && order(x(j-1)) > order(x(j))) {
swap(j, j-1)
j -= 1
}
i += 1
}
} else {
// Choose a partition element, v
var m = off + (len >> 1) // Small arrays, middle element
if (len > 7) {
var l = off
var n = off + len - 1
if (len > 40) { // Big arrays, pseudomedian of 9
val s = len / 8
l = med3(l, l+s, l+2*s)
m = med3(m-s, m, m+s)
n = med3(n-2*s, n-s, n)
}
m = med3(l, m, n) // Mid-size, med of 3
}
val v = order(x(m))
// Establish Invariant: v* (<v)* (>v)* v*
var a = off
var b = a
var c = off + len - 1
var d = c
var done = false
while (!done) {
while (b <= c && order(x(b)) <= v) {
if (order(x(b)) == v) {
swap(a, b)
a += 1
}
b += 1
}
while (c >= b && order(x(c)) >= v) {
if (order(x(c)) == v) {
swap(c, d)
d -= 1
}
c -= 1
}
if (b > c) {
done = true
} else {
swap(b, c)
c -= 1
b += 1
}
}
// Swap partition elements back to middle
val n = off + len
var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
s = b - a
if (s > 1)
sort2(off, s)
s = d - c
if (s > 1)
sort2(n-s, s)
}
}
sort2(off, len)
x
}
}
| sheide/breeze | math/src/main/scala/breeze/util/Sorting.scala | Scala | apache-2.0 | 3,830 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.optimization
import scala.math._
import breeze.linalg.{axpy => brzAxpy, norm => brzNorm, Vector => BV}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.mllib.linalg.{Vector, Vectors}
/**
* :: DeveloperApi ::
* Class used to perform steps (weight update) using Gradient Descent methods.
*
* For general minimization problems, or for regularized problems of the form
* min L(w) + regParam * R(w),
* the compute function performs the actual update step, when given some
* (e.g. stochastic) gradient direction for the loss L(w),
* and a desired step-size (learning rate).
*
* The updater is responsible to also perform the update coming from the
* regularization term R(w) (if any regularization is used).
*/
@DeveloperApi
abstract class Updater extends Serializable {
/**
* Compute an updated value for weights given the gradient, stepSize, iteration number and
* regularization parameter. Also returns the regularization value regParam * R(w)
* computed using the *updated* weights.
*
* @param weightsOld - Column matrix of size dx1 where d is the number of features.
* @param gradient - Column matrix of size dx1 where d is the number of features.
* @param stepSize - step size across iterations
* @param iter - Iteration number
* @param regParam - Regularization parameter
*
* @return A tuple of 2 elements. The first element is a column matrix containing updated weights,
* and the second element is the regularization value computed using updated weights.
*/
def compute(
weightsOld: Vector,
gradient: Vector,
stepSize: Double,
iter: Int,
regParam: Double): (Vector, Double)
}
/**
* :: DeveloperApi ::
* A simple updater for gradient descent *without* any regularization.
* Uses a step-size decreasing with the square root of the number of iterations.
*/
@DeveloperApi
class SimpleUpdater extends Updater {
override def compute(
weightsOld: Vector,
gradient: Vector,
stepSize: Double,
iter: Int,
regParam: Double): (Vector, Double) = {
val thisIterStepSize = stepSize / math.sqrt(iter)
val brzWeights: BV[Double] = weightsOld.toBreeze.toDenseVector
brzAxpy(-thisIterStepSize, gradient.toBreeze, brzWeights)
(Vectors.fromBreeze(brzWeights), 0)
}
}
/**
* :: DeveloperApi ::
* Updater for L1 regularized problems.
* R(w) = ||w||_1
* Uses a step-size decreasing with the square root of the number of iterations.
* Instead of subgradient of the regularizer, the proximal operator for the
* L1 regularization is applied after the gradient step. This is known to
* result in better sparsity of the intermediate solution.
*
* The corresponding proximal operator for the L1 norm is the soft-thresholding
* function. That is, each weight component is shrunk towards 0 by shrinkageVal.
*
* If w > shrinkageVal, set weight component to w-shrinkageVal.
* If w < -shrinkageVal, set weight component to w+shrinkageVal.
* If -shrinkageVal < w < shrinkageVal, set weight component to 0.
*
* Equivalently, set weight component to signum(w) * max(0.0, abs(w) - shrinkageVal)
*/
@DeveloperApi
class L1Updater extends Updater {
override def compute(
weightsOld: Vector,
gradient: Vector,
stepSize: Double,
iter: Int,
regParam: Double): (Vector, Double) = {
val thisIterStepSize = stepSize / math.sqrt(iter)
// Take gradient step
val brzWeights: BV[Double] = weightsOld.toBreeze.toDenseVector
brzAxpy(-thisIterStepSize, gradient.toBreeze, brzWeights)
// Apply proximal operator (soft thresholding)
val shrinkageVal = regParam * thisIterStepSize
var i = 0
val len = brzWeights.length
while (i < len) {
val wi = brzWeights(i)
brzWeights(i) = signum(wi) * max(0.0, abs(wi) - shrinkageVal)
i += 1
}
(Vectors.fromBreeze(brzWeights), brzNorm(brzWeights, 1.0) * regParam)
}
}
/**
* :: DeveloperApi ::
* Updater for L2 regularized problems.
* R(w) = 1/2 ||w||^2
* Uses a step-size decreasing with the square root of the number of iterations.
*/
@DeveloperApi
class SquaredL2Updater extends Updater {
override def compute(
weightsOld: Vector,
gradient: Vector,
stepSize: Double,
iter: Int,
regParam: Double): (Vector, Double) = {
// add up both updates from the gradient of the loss (= step) as well as
// the gradient of the regularizer (= regParam * weightsOld)
// w' = w - thisIterStepSize * (gradient + regParam * w)
// w' = (1 - thisIterStepSize * regParam) * w - thisIterStepSize * gradient
val thisIterStepSize = stepSize / math.sqrt(iter)
val brzWeights: BV[Double] = weightsOld.toBreeze.toDenseVector
brzWeights :*= (1.0 - thisIterStepSize * regParam)
brzAxpy(-thisIterStepSize, gradient.toBreeze, brzWeights)
val norm = brzNorm(brzWeights, 2.0)
(Vectors.fromBreeze(brzWeights), 0.5 * regParam * norm * norm)
}
}
| xieguobin/Spark_2.0.0_cn1 | mllib/optimization/Updater.scala | Scala | apache-2.0 | 5,828 |
package com.arcusys.valamis.web.service
import com.arcusys.learn.liferay.model.ValamisBaseOpenSearchImpl
import com.arcusys.valamis.lesson.model.Lesson
class OpenSearchImpl extends ValamisBaseOpenSearchImpl {
val SEARCH_PATH = "/c/valamis/open_search"
val TITLE = "Valamis Search: "
override def getSearchPath = SEARCH_PATH
override def getPortletId = LessonIndexer.PortletId
override def getTitle(keywords: String) = TITLE + keywords
override def getClassName: String = classOf[Lesson].getName
}
| igor-borisov/valamis | valamis-portlets/src/main/scala/com/arcusys/valamis/web/service/OpenSearchImpl.scala | Scala | gpl-3.0 | 515 |
package tu.coreservice.action.critic.manager
import tu.model.knowledge.{Constant, Resource, Probability, KnowledgeURI}
import tu.model.knowledge.communication.{ContextHelper, ShortTermMemory}
import tu.model.knowledge.helper.URIGenerator
import tu.coreservice.action.critic.{CriticLink, Critic}
/**
* @author alex toschev
* date 2014-09-08
* time: 10:09 PM
*/
class DoNotUnderstandManager(_exclude: List[CriticLink], _include: List[CriticLink], _uri: KnowledgeURI, _probability: Probability = new Probability())
extends Critic(_exclude, _include, _uri, _probability) {
def this() = this(List[CriticLink](), List[CriticLink](), URIGenerator.generateURI("DoNotUnderstandManager"))
def start() = false
def stop() = false
/**
* Starts DoNotUnderstand that invokes Cry4Help.
* @param inputContext ShortTermMemory of all inbound parameters
* @return output ShortTermMemory.
*/
override def apply(inputContext: ShortTermMemory): ShortTermMemory = {
inputContext.lastError match {
case Some(error: Error) => {
val d = new DoNotUnderstand()
ContextHelper(List[Resource](), d(error), this.getClass.getName + " result")
}
case None => {
if (inputContext.notUnderstoodConcepts.size > 0) {
val d = new DoNotUnderstand()
ContextHelper.createReflectiveContext(d(inputContext.notUnderstoodConcepts), this.getClass.getName + " result")
} else if (inputContext.notUnderstoodPhrases.size > 0) {
val d = new DoNotUnderstand()
ContextHelper.createReflectiveContext(d(), this.getClass.getName + " result")
} else if (inputContext.resultToReport.size > 0) {
val d = new DoNotUnderstand()
ContextHelper.createReflectiveContext(d(inputContext.resultToReport), this.getClass.getName + " " + Constant.RESULT_TO_REPORT)
} else if (inputContext.solutionsToReport.size > 0) {
val d = new DoNotUnderstand()
ContextHelper.createReflectiveContext(d(inputContext.resultToReport), this.getClass.getName + " " + Constant.FOUND_SOLUTIONS)
}else {
ContextHelper(List[Resource](), this.getClass.getName + " result")
}
}
}
}
}
| tu-team/2 | coreservice.action.critic/src/main/scala/tu/coreservice/action/critic/manager/DoNotUnderstandManager.scala | Scala | gpl-3.0 | 2,226 |
package com.sksamuel.scrimage.composite
import org.scalatest.{ OneInstancePerTest, BeforeAndAfter, FunSuite }
import com.sksamuel.scrimage.Image
import com.sksamuel.scrimage.canvas.Canvas._
/** @author Stephen Samuel */
class AlphaCompositeTest extends FunSuite with BeforeAndAfter with OneInstancePerTest {
val source = Image(getClass.getResourceAsStream("/colosseum.jpg")).resizeTo(400, 300)
val transparent = Image(getClass.getResourceAsStream("/transparent_chip.png"))
val expected1 = Image(getClass.getResourceAsStream("/composite/alpha_composite.png"))
val expected2 = Image(getClass.getResourceAsStream("/composite/alpha_composite_0.5f.png"))
test("alpha composite uses transparency of application image") {
val actual = source.composite(AlphaComposite(1f), transparent)
assert(expected1 === actual)
}
test("alpha composite uses transparency of application image combined with alpha") {
val actual = source.composite(AlphaComposite(0.5f), transparent)
assert(expected2 === actual)
}
}
| carlosFattor/scrimage | scrimage-core/src/test/scala/com/sksamuel/scrimage/composite/AlphaCompositeTest.scala | Scala | apache-2.0 | 1,029 |
package im.actor.server.api.rpc.service.profile
import scala.concurrent.duration._
import scala.concurrent.forkjoin.ThreadLocalRandom
import scala.concurrent.{ ExecutionContext, Future }
import akka.actor.ActorSystem
import akka.util.Timeout
import slick.driver.PostgresDriver.api._
import im.actor.api.rpc.DBIOResult._
import im.actor.api.rpc._
import im.actor.api.rpc.files.FileLocation
import im.actor.api.rpc.misc.{ ResponseBool, ResponseSeq }
import im.actor.api.rpc.profile.{ ProfileService, ResponseEditAvatar }
import im.actor.server.api.ApiConversions._
import im.actor.server.db.DbExtension
import im.actor.server.file.FileErrors
import im.actor.server.persist
import im.actor.server.push.SeqUpdatesExtension
import im.actor.server.sequence.SeqState
import im.actor.server.social.{ SocialExtension, SocialManagerRegion }
import im.actor.server.user._
import im.actor.server.util.{ FileStorageAdapter, ImageUtils, S3StorageExtension, StringUtils }
object ProfileErrors {
val NicknameInvalid = RpcError(400, "NICKNAME_INVALID",
"Invalid nickname. Valid nickname should contain from 5 to 32 characters, and may consist of latin characters, numbers and underscores", false, None)
val NicknameBusy = RpcError(400, "NICKNAME_BUSY", "This nickname already belongs some other user, we are sorry!", false, None)
val AboutTooLong = RpcError(400, "ABOUT_TOO_LONG",
"About is too long. It should be no longer then 255 characters", false, None)
}
class ProfileServiceImpl()(
implicit
actorSystem: ActorSystem
) extends ProfileService {
import FileHelpers._
import ImageUtils._
override implicit val ec: ExecutionContext = actorSystem.dispatcher
private implicit val timeout = Timeout(5.seconds)
// TODO: configurable
private implicit val db: Database = DbExtension(actorSystem).db
private implicit val seqUpdExt: SeqUpdatesExtension = SeqUpdatesExtension(actorSystem)
private implicit val userProcessorRegion: UserProcessorRegion = UserExtension(actorSystem).processorRegion
private implicit val userViewRegion: UserViewRegion = UserExtension(actorSystem).viewRegion
private implicit val socialRegion: SocialManagerRegion = SocialExtension(actorSystem).region
private implicit val fsAdapter: FileStorageAdapter = S3StorageExtension(actorSystem).s3StorageAdapter
override def jhandleEditAvatar(fileLocation: FileLocation, clientData: ClientData): Future[HandlerResult[ResponseEditAvatar]] = {
// TODO: flatten
val authorizedAction = requireAuth(clientData).map { implicit client ⇒
withFileLocation(fileLocation, AvatarSizeLimit) {
scaleAvatar(fileLocation.fileId, ThreadLocalRandom.current()) flatMap {
case Right(avatar) ⇒
for {
UserCommands.UpdateAvatarAck(avatar, SeqState(seq, state)) ← DBIO.from(UserOffice.updateAvatar(client.userId, client.authId, Some(avatar)))
} yield Ok(ResponseEditAvatar(
avatar.get,
seq,
state.toByteArray
))
case Left(e) ⇒
throw FileErrors.LocationInvalid
}
}
}
db.run(toDBIOAction(authorizedAction)) recover {
case FileErrors.LocationInvalid ⇒ Error(Errors.LocationInvalid)
}
}
override def jhandleRemoveAvatar(clientData: ClientData): Future[HandlerResult[ResponseSeq]] = {
val authorizedAction = requireAuth(clientData).map { implicit client ⇒
for {
UserCommands.UpdateAvatarAck(_, SeqState(seq, state)) ← DBIO.from(UserOffice.updateAvatar(client.userId, client.authId, None))
} yield Ok(ResponseSeq(seq, state.toByteArray))
}
db.run(toDBIOAction(authorizedAction))
}
override def jhandleEditName(name: String, clientData: ClientData): Future[HandlerResult[ResponseSeq]] = {
val authorizedAction = requireAuth(clientData) map { implicit client ⇒
DBIO.from(UserOffice.changeName(client.userId, name) map {
case SeqState(seq, state) ⇒ Ok(ResponseSeq(seq, state.toByteArray))
})
}
db.run(toDBIOAction(authorizedAction))
}
def jhandleEditNickName(nickname: Option[String], clientData: ClientData): Future[HandlerResult[ResponseSeq]] = {
val authorizedAction = requireAuth(clientData) map { implicit client ⇒
val action: Result[ResponseSeq] = for {
trimmed ← point(nickname.map(_.trim))
_ ← fromBoolean(ProfileErrors.NicknameInvalid)(trimmed.map(StringUtils.validNickName).getOrElse(true))
_ ← if (trimmed.isDefined) {
for {
checkExist ← fromOption(ProfileErrors.NicknameInvalid)(trimmed)
_ ← fromDBIOBoolean(ProfileErrors.NicknameBusy)(persist.User.nicknameExists(checkExist).map(exist ⇒ !exist))
} yield ()
} else point(())
SeqState(seq, state) ← fromFuture(UserOffice.changeNickname(client.userId, client.authId, trimmed))
} yield ResponseSeq(seq, state.toByteArray)
action.run
}
db.run(toDBIOAction(authorizedAction))
}
def jhandleCheckNickName(nickname: String, clientData: ClientData): Future[HandlerResult[ResponseBool]] = {
val authorizedAction = requireAuth(clientData) map { implicit client ⇒
(for {
_ ← fromBoolean(ProfileErrors.NicknameInvalid)(StringUtils.validNickName(nickname))
exists ← fromDBIO(persist.User.nicknameExists(nickname.trim))
} yield ResponseBool(!exists)).run
}
db.run(toDBIOAction(authorizedAction))
}
//todo: move validation inside of UserOffice
def jhandleEditAbout(about: Option[String], clientData: ClientData): Future[HandlerResult[ResponseSeq]] = {
val authorizedAction = requireAuth(clientData) map { implicit client ⇒
val action: Result[ResponseSeq] = for {
trimmed ← point(about.map(_.trim))
_ ← fromBoolean(ProfileErrors.AboutTooLong)(trimmed.map(s ⇒ s.nonEmpty & s.length < 255).getOrElse(true))
SeqState(seq, state) ← fromFuture(UserOffice.changeAbout(client.userId, client.authId, trimmed))
} yield ResponseSeq(seq, state.toByteArray)
action.run
}
db.run(toDBIOAction(authorizedAction))
}
} | lstNull/actor-platform | actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/profile/ProfileServiceImpl.scala | Scala | mit | 6,144 |
package com.caffinc.hydrangea.core.filter
import com.caffinc.hydrangea.core.serde.KafkaRecord
import com.typesafe.scalalogging.LazyLogging
/**
* Filters records with null Keys
*
* @author Sriram
*/
object NullKeyFilter extends Filter[KafkaRecord] with LazyLogging {
def apply(implicit record: KafkaRecord): Boolean = filter
override def filter(implicit record: KafkaRecord): Boolean = {
if (null != record.key) {
true
} else {
logger.info("Filtering: {}", record)
false
}
}
}
| caffinc/hydrangea | hydrangea/core/src/main/scala/com/caffinc/hydrangea/core/filter/NullKeyFilter.scala | Scala | mit | 523 |
package io.vamp.model.reader
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{ FlatSpec, Matchers }
import scala.util.{ Failure, Success }
@RunWith(classOf[JUnitRunner])
class UnitValueSpec extends FlatSpec with Matchers {
"Percentage" should "parse" in {
UnitValue.of[Percentage]("0 %") shouldBe Success(Percentage(0))
UnitValue.of[Percentage](" 50 %") shouldBe Success(Percentage(50))
UnitValue.of[Percentage]("100%") shouldBe Success(Percentage(100))
UnitValue.of[Percentage]("x") shouldBe a[Failure[_]]
UnitValue.of[Percentage]("-50") shouldBe a[Failure[_]]
UnitValue.of[Percentage]("1.5") shouldBe a[Failure[_]]
UnitValue.of[Percentage]("100") shouldBe a[Failure[_]]
}
"MegaByte" should "parse" in {
UnitValue.of[MegaByte]("128mb") shouldBe Success(MegaByte(128))
UnitValue.of[MegaByte](" 128mb ") shouldBe Success(MegaByte(128))
UnitValue.of[MegaByte](" 128 mb ") shouldBe Success(MegaByte(128))
UnitValue.of[MegaByte](" 128 Mi ") shouldBe Success(MegaByte(128))
UnitValue.of[MegaByte](" 128 mi ") shouldBe Success(MegaByte(128))
UnitValue.of[MegaByte](".1m") shouldBe Success(MegaByte(0.1))
UnitValue.of[MegaByte]("10.1Mb") shouldBe Success(MegaByte(10.1))
UnitValue.of[MegaByte]("64.MB") shouldBe Success(MegaByte(64))
UnitValue.of[MegaByte](".1gb") shouldBe Success(MegaByte(102.4))
UnitValue.of[MegaByte]("1GB") shouldBe Success(MegaByte(1024))
UnitValue.of[MegaByte]("1.5G") shouldBe Success(MegaByte(1536))
UnitValue.of[MegaByte](".1gB") shouldBe Success(MegaByte(102.4))
UnitValue.of[MegaByte]("1") shouldBe a[Failure[_]]
UnitValue.of[MegaByte]("-1") shouldBe a[Failure[_]]
UnitValue.of[MegaByte]("1kb") shouldBe a[Failure[_]]
UnitValue.of[MegaByte](".") shouldBe a[Failure[_]]
}
"Quantity" should "parse" in {
UnitValue.of[Quantity]("128") shouldBe Success(Quantity(128.0))
UnitValue.of[Quantity]("-128.5") shouldBe Success(Quantity(-128.5))
UnitValue.of[Quantity](" 1m ") shouldBe Success(Quantity(0.001))
UnitValue.of[Quantity](" 0.1 ") shouldBe Success(Quantity(0.1))
UnitValue.of[Quantity](".1") shouldBe Success(Quantity(0.1))
UnitValue.of[Quantity]("-0.1 ") shouldBe Success(Quantity(-0.1))
UnitValue.of[Quantity]("-.1 ") shouldBe Success(Quantity(-.1))
}
"Time" should "parse" in {
// Test for second values
UnitValue.of[Time]("1sec") shouldBe Success(Time(1))
UnitValue.of[Time]("20s") shouldBe Success(Time(20))
UnitValue.of[Time]("1second") shouldBe Success(Time(1))
UnitValue.of[Time]("2seconds") shouldBe Success(Time(2))
// Test for minute values
UnitValue.of[Time]("20m") shouldBe Success(Time(20 * 60))
UnitValue.of[Time]("1min") shouldBe Success(Time(60))
UnitValue.of[Time]("1minute") shouldBe Success(Time(60))
UnitValue.of[Time]("5minutes") shouldBe Success(Time(5 * 60))
// Test for hourly values
UnitValue.of[Time]("1h") shouldBe Success(Time(3600))
UnitValue.of[Time]("2hrs") shouldBe Success(Time(2 * 3600))
UnitValue.of[Time]("1hour") shouldBe Success(Time(3600))
UnitValue.of[Time]("4hours") shouldBe Success(Time(4 * 3600))
}
}
| dragoslav/vamp | model/src/test/scala/io/vamp/model/reader/UnitValueSpec.scala | Scala | apache-2.0 | 3,220 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package base
package types
/**
* @author Alexander Podkhalyuzin
* Date: 13.03.2008
*/
trait ScExistentialTypeElement extends ScTypeElement {
override protected val typeName = "ExistentialType"
def quantified = findChildByClassScala(classOf[ScTypeElement])
def clause = findChildByClassScala(classOf[ScExistentialClause])
} | whorbowicz/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/base/types/ScExistentialTypeElement.scala | Scala | apache-2.0 | 405 |
package com.ee.js
import com.google.javascript.jscomp.{JSError, Compiler, CompilerOptions, JSSourceFile}
import java.io.File
object JavascriptCompiler {
def minify(source: String, name: Option[String], compilerOptions: Option[CompilerOptions] = None): String = {
val compiler = new Compiler()
val extern = JSSourceFile.fromCode("externs.js", "function alert(x) {}")
val options = compilerOptions.getOrElse(new CompilerOptions())
val input = JSSourceFile.fromCode(name.getOrElse("unknown"), source)
compiler.compile(extern, input, options).success match {
case true => compiler.toSource()
case false => {
val errorFolder = dumpJsAndErrors(source, compiler.getErrors)
throw new RuntimeException("JS Errors see: " + errorFolder)
}
}
}
private def dumpJsAndErrors(source: String, errors: Array[JSError]): String = {
val errorFolderName = ".Assets-Loader--JavascriptCompiler"
val errorString = errors.map {
e =>
"[" + e.lineNumber + "]" + e.description
}.mkString("\n")
val errorFolder: File = new File(errorFolderName)
errorFolder.mkdir()
com.ee.utils.file.writeToFile(errorFolderName + "/errors.log", errorString)
com.ee.utils.file.writeToFile(errorFolderName + "/all_errors.js", source)
errorFolder.getAbsolutePath
}
} | edeustace/assets-loader | plugin/app/com/ee/js/JavascriptCompiler.scala | Scala | mit | 1,336 |
package com.meongx.pw.logwatcher
import scala.actors.Actor
object DataPoster extends Actor {
def act = {
}
}
| meongx/ngANU | src/main/java/com/meongx/pw/logwatcher/DataPoster.scala | Scala | gpl-3.0 | 116 |
// Copyright (c) 2013 Aleksander Bielawski. All rights reserved.
// Use of this source code is governed by a BSD-style license
// that can be found in the LICENSE file.
package com.github.pabzdzdzwiagief.initialization
import scala.reflect.internal.Flags
import tools.nsc.Global
import tools.nsc.plugins.PluginComponent
import tools.nsc.transform.Transform
private[this] class Order(val global: Global)
extends PluginComponent with Transform with Annotations {
import global.{CompilationUnit, Transformer}
import global.{Tree, ClassDef, DefDef}
import global.{Select, This, Assign => AssignTree, Apply, Ident, Super}
import global.{Typed, TypeTree, Annotated, AnnotationInfo}
import global.definitions.UncheckedClass.{tpe => uncheckedType}
import global.{reporter => out}
override final val phaseName = "initorder"
/** Runs after AST becomes as simple as it can get. */
override final val runsAfter = List("cleanup")
override final def newTransformer(unit: CompilationUnit) = new Transformer {
/** Annotates methods of every class.
* Annotations inform about anything that can help spotting possible
* initialization problems, e.g. which class members are used.
*/
override def transform(tree: Tree): Tree = super.transform(tree) match {
case classDef: ClassDef => try {
for {
(defDef, toAttach) ← infos(classDef)
method ← defDef.symbol.alternatives if method.isMethod
annotationInfo ← toAttach
} {
classDef.symbol.addAnnotation(annotationInfo)
}
classDef
} catch {
case e: Exception =>
out.warning(classDef.pos, s"$phaseName: failed with exception: $e")
classDef
}
case other => other
}
/** @return a map from method definitions to annotations that should be
* attached to them.
*/
private[this] def infos(c: ClassDef): Map[DefDef, List[AnnotationInfo]] =
(for {
defDef@ DefDef(_, _, _, _, _, _) ← c.impl.body
from = defDef.symbol.asMethod
ordinals = dfsTraverse(defDef).zipWithIndex.toMap
shouldCheck = (for {
Typed(expression, _) ← unchecks(defDef)
child ← expression :: expression.children
} yield child).toSet.andThen(!_)
access = for {
tree ← accesses(defDef) if shouldCheck(tree)
point = tree.pos.pointOrElse(-1)
} yield Get(from, tree.symbol.asTerm, point, ordinals(tree))
invoke = for {
tree ← invocations(defDef) if shouldCheck(tree)
invoked = tree.symbol.asMethod
point = tree.pos.pointOrElse(-1)
} yield Virtual(from, invoked, point, ordinals(tree))
special = for {
tree ← specials(defDef) if shouldCheck(tree)
invoked = tree.symbol.asMethod
point = tree.pos.pointOrElse(-1)
} yield new Static(from, invoked, point, ordinals(tree))
assign = for {
tree ← assignments(defDef) if shouldCheck(tree)
point = tree.pos.pointOrElse(-1)
} yield Set(from, tree.lhs.symbol.asTerm, point, ordinals(tree))
toAttach = access ::: invoke ::: special ::: assign
annotationInfos = toAttach.map(Trace.toAnnotation)
} yield defDef → annotationInfos).toMap
/** Works like [[scala.reflect.internal.Trees#Tree.children]], but puts
* assignments after their subtrees.
*
* @return trace of depth-first tree traversal.
*/
private[this] def dfsTraverse(t: Any): List[Tree] = t match {
case a@ AssignTree(Select(This(_), _), _) =>
a.productIterator.toList.flatMap(dfsTraverse) ::: List(a)
case tree: Tree =>
tree :: tree.productIterator.toList.flatMap(dfsTraverse)
case list: List[_] => list.flatMap(dfsTraverse)
case _ => Nil
}
/** @return trees that represent unchecked expressions.
* Matches trees of form:
* - (expr: @unchecked)
*/
private[this] def unchecks(t: DefDef): List[Typed] = t.collect {
case t@ Typed(_, tpt: TypeTree) if (tpt.original match {
case a: Annotated => a.tpe match {
case AnnotatedType(i) => i.exists(_.tpe <:< uncheckedType)
case _ => false
}
case _ => false
}) => t
}
/** @return trees that represent member assignments.
* Matches trees of form:
* - Class.this.field = ..., where this.field is immutable
*/
private[this] def assignments(t: Tree): List[AssignTree] = t.collect {
case a@ AssignTree(s@ Select(This(_), _), _) if !s.symbol.isMutable => a
}
/** @return trees that represent member method invocations.
* Matches trees of form:
* - Class.this.method(...)
* - $this.method(...), where $this is Mixin.$init$ parameter
* - Trait$class.method(this, ...), where Trait$class is Trait's
* implementation module
* - $outer.$outer1.$outer2.method(...), where $outer is an outer
* parameter used in
* a constructor of an
* inner class and $outerN
* is an outer accessor
* - this.$outer1.$outer2.method(...), where $outerN is an outer
* accessor used in a method
* of an inner class
* - this.x.y.z(...), where any of x, y, z is a member
* of an inner class
*
*/
private[this] def invocations(t: DefDef): List[Apply] = t.collect {
case a@ Apply(Select(This(_), _), _) => a
case a@ Apply(Select(i: Ident, _), _)
if i.hasSymbolWhich(_.name == global.nme.SELF)
&& i.hasSymbolWhich(_.owner.owner.isTrait) => a
case a@ Apply(_: Select, This(_) :: _)
if t.hasSymbolWhich(_.hasFlag(Flags.MIXEDIN))
&& a.hasSymbolWhich(_.isMethod)
&& a.hasSymbolWhich(_.owner.isImplClass) => a
case a@ Apply(Select(outer, _), _)
if a.hasSymbolWhich(!_.isOuterAccessor)
&& outer.forAll {
case i@ Ident(global.nme.OUTER)
if i.hasSymbolWhich(_.isValueParameter)
&& i.hasSymbolWhich(_.owner.isConstructor)
&& i.hasSymbolWhich(_.owner.owner.isLifted) => true
case This(_) => true
case accessorChainSelect: Select
if accessorChainSelect.hasSymbolWhich { s =>
s.accessedOrSelf.isOuterField || s.isOuterAccessor
} => true
case accessorChainApply: Apply
if accessorChainApply.hasSymbolWhich { s =>
s.accessedOrSelf.isOuterField || s.isOuterAccessor
} => true
case _ => false
} => a
case a: Apply
if a.exists { case Select(This(_), _) => true case _ => false }
&& a.hasSymbolWhich(_.owner.isLifted) => a
}
/** @return trees that represent special member method invocations.
* Matches trees of form:
* - (new anonymous class anonfun$1(Class.this) : FunctionN),
* where anonfun$1 is an anonymous function, the returned tree
* is however not a reference to constructor, but to apply()
* method
* - Class.super.method(...)
* - Mixin.$init$(...)
* - new Class.Inner(...), where Inner is an inner class
* enclosed by Class
*/
private[this] def specials(t: Tree): List[Apply] = t.collect {
case Typed(a@ Apply(_, This(_) :: _), functionType: TypeTree)
if a.hasSymbolWhich(_.isConstructor)
&& a.hasSymbolWhich(_.owner.isAnonymousFunction) =>
new Apply(a.fun, a.args) {
override var symbol = a.symbol.owner.info.findMember(
global.nme.apply,
excludedFlags = Flags.BRIDGE | Flags.DEFERRED,
requiredFlags = Flags.FINAL | Flags.METHOD,
stableOnly = false
).alternatives.headOption.getOrElse(a.symbol)
setPos(a.pos)
override def hashCode() = a.hashCode()
override def equals(b: Any) = a.equals(b)
}
case a@ Apply(Select(Super(_, _), _), _) => a
case a@ Apply(_, _) if a.symbol.isMixinConstructor => a
case a@ Apply(_, This(_) :: _)
if a.hasSymbolWhich(_.isConstructor)
&& a.hasSymbolWhich(_.owner.isLifted) => a
}
/** @return trees that represent member accesses.
* Matches trees of form:
* - Class.this.field, inside stable member accessor def
* - Class.this, where Class is an outer class
*/
private[this] def accesses(t: DefDef): List[Select] = t match {
case d if d.symbol.isAccessor && d.symbol.isStable => d.collect {
case s@ Select(This(_), _) if s.symbol.isPrivateLocal => s
}
case d if d.symbol.isOuterAccessor => d.collect {
case s@ Select(This(_), _) if s.symbol.isProtected => s
}
case _ => Nil
}
/** AnnotatedType has different interfaces in 2.10 and 2.11.
* This extractor is meant to compile under both versions. */
private object AnnotatedType {
import global.{AnnotatedType => Type}
import global.{AnnotationInfo => Info}
def unapply(annotated: Type): Option[List[Info]] =
Type.unapply(annotated).map(_._1)
}
}
}
| pabzdzdzwiagief/initialization | src/main/scala/com/github/pabzdzdzwiagief/initialization/Order.scala | Scala | bsd-2-clause | 9,695 |
package swing.streams
package examples
import java.awt.event._
import javax.swing._
import scalaz.concurrent.Task
import scalaz.stream._
import scalaz.\\/
import listener._
object Dragging extends App {
val frameTask: Task[JFrame] =
Task.delay {
val frame = new JFrame
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
frame.setSize(320, 240)
frame.setVisible(true)
frame
}
def mouseEvents(frame: JFrame): Process[Task, MouseEvent] = {
def readEvents(callback: Throwable \\/ MouseEvent => Unit): Unit = {
println("adding mouse listener")
frame.addMouseListener(new CallbackMouseListener(callback))
}
Process.eval(Task.async(readEvents))
}
def mouseMotions(frame: JFrame): Process[Task, MouseEvent] = {
def readEvents(callback: Throwable \\/ MouseEvent => Unit): Unit = {
println("adding mouse motion listener")
frame.addMouseMotionListener(new CallbackMouseMotionListener(callback))
}
Process.eval(Task.async(readEvents))
}
val p = Process.eval(frameTask).flatMap { frame =>
Process(mouseEvents(frame), mouseMotions(frame))
}
merge.mergeN(p)
.map(_.toString)
.through(io.stdOutLines)
.run.run
}
| fthomas/swing-streams | src/test/scala/swing/streams/examples/Dragging.scala | Scala | gpl-3.0 | 1,224 |
package org.jetbrains.sbt
package project.settings
import com.intellij.util.messages.Topic
/**
* @author Pavel Fatin
*/
object SbtTopic extends Topic[SbtProjectSettingsListener]("SBT-specific settings", classOf[SbtProjectSettingsListener]) | LPTK/intellij-scala | src/org/jetbrains/sbt/project/settings/SbtTopic.scala | Scala | apache-2.0 | 243 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc.util
import collection.mutable, mutable.ListBuffer
import java.lang.System.lineSeparator
private[util] trait StackTracing extends Any {
/** Format a stack trace, returning the prefix consisting of frames that satisfy a given predicate.
*
* The format is similar to the typical case described in the Javadoc
* for [[java.lang.Throwable#printStackTrace()*]].
* If a stack trace is truncated, it will be followed by a line of the form
* `... 3 elided`, by analogy to the lines `... 3 more` which indicate
* shared stack trace segments.
* @param e the exception
* @param p the predicate to select the prefix
*/
def stackTracePrefixString(e: Throwable)(p: StackTraceElement => Boolean): String = {
type TraceRelation = String
val Self = new TraceRelation("")
val CausedBy = new TraceRelation("Caused by: ")
val Suppressed = new TraceRelation("Suppressed: ")
def header(e: Throwable): String = {
def because = e.getCause match { case null => null ; case c => header(c) }
def msg = e.getMessage match { case null => because ; case s => s }
def txt = msg match { case null => "" ; case s => s": $s" }
s"${e.getClass.getName}$txt"
}
val seen = mutable.Set.empty[Throwable]
def unseen(t: Throwable) = {
val interesting = t != null && !seen(t)
if (interesting) seen += t
interesting
}
val lines = ListBuffer.empty[String]
// format the stack trace, skipping the shared trace
def print(e: Throwable, r: TraceRelation, share: Array[StackTraceElement], indents: Int): Unit = if (unseen(e)) {
val trace = e.getStackTrace
val frames = if (share.isEmpty) trace else {
val spare = share.reverseIterator
val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next() == _)
trimmed.reverse
}
val prefix = frames.takeWhile(p)
val margin = " " * indents
lines += s"$margin$r${header(e)}"
prefix.foreach(frame => lines += s"$margin at $frame")
val traceFramesLenDiff = trace.length - frames.length
val framesPrefixLenDiff = frames.length - prefix.length
if (traceFramesLenDiff > 0) {
if (framesPrefixLenDiff > 0) lines += s"$margin ... $framesPrefixLenDiff elided and $traceFramesLenDiff more"
else lines += s"$margin ... $traceFramesLenDiff more"
} else if (framesPrefixLenDiff > 0) lines += s"$margin ... $framesPrefixLenDiff elided"
print(e.getCause, CausedBy, trace, indents)
e.getSuppressed.foreach(print(_, Suppressed, frames, indents + 1))
}
print(e, Self, share = Array.empty, indents = 0)
lines.mkString(lineSeparator)
}
}
| scala/scala | src/compiler/scala/tools/nsc/util/StackTracing.scala | Scala | apache-2.0 | 3,054 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.server.core
import org.infinispan.manager.EmbeddedCacheManager
import java.util.Properties
import org.jboss.netty.handler.codec.oneone.OneToOneEncoder
import org.jboss.netty.channel.ChannelHandler
/**
* Represents a protocol compliant server.
*
* @author Galder Zamarreño
* @since 4.1
*/
trait ProtocolServer {
/**
* Starts the server backed by the given cache manager and with the corresponding properties. If properties object
* is either null or empty, default values depending on the server type are assumed. Note that properties mandate
* String keys and values. Accepted property keys and default values are listed in {@link Main} class.
*/
def start(properties: Properties, cacheManager: EmbeddedCacheManager)
/**
* Overloaded method that starts the server by using a properties file. This is particularly useful if trying to
* start the cache through a beans.xml file or similar.
*/
def start(propertiesFileName: String, cacheManager: EmbeddedCacheManager)
/**
* Stops the server
*/
def stop
/**
* Gets the encoder for this protocol server. The encoder is responsible for writing back common header responses
* back to client. This method can return null if the server has no encoder. You can find an example of the server
* that has no encoder in the Memcached server.
*/
def getEncoder: OneToOneEncoder
/**
* Gets the decoder for this protocol server. The decoder is responsible for reading client requests.
* This method cannot return null.
*/
def getDecoder: ChannelHandler
}
| nmldiegues/stibt | infinispan/server/core/src/main/scala/org/infinispan/server/core/ProtocolServer.scala | Scala | apache-2.0 | 2,674 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar.compression
import java.nio.{ByteBuffer, ByteOrder}
import java.nio.charset.StandardCharsets
import org.apache.commons.lang3.RandomStringUtils
import org.apache.commons.math3.distribution.LogNormalDistribution
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.execution.columnar.{BOOLEAN, INT, LONG, NativeColumnType, SHORT, STRING}
import org.apache.spark.sql.types.AtomicType
import org.apache.spark.util.Benchmark
import org.apache.spark.util.Utils._
/**
* Benchmark to decoders using various compression schemes.
*/
object CompressionSchemeBenchmark extends AllCompressionSchemes {
private[this] def allocateLocal(size: Int): ByteBuffer = {
ByteBuffer.allocate(size).order(ByteOrder.nativeOrder)
}
private[this] def genLowerSkewData() = {
val rng = new LogNormalDistribution(0.0, 0.01)
() => rng.sample
}
private[this] def genHigherSkewData() = {
val rng = new LogNormalDistribution(0.0, 1.0)
() => rng.sample
}
private[this] def prepareEncodeInternal[T <: AtomicType](
count: Int,
tpe: NativeColumnType[T],
supportedScheme: CompressionScheme,
input: ByteBuffer): ((ByteBuffer, ByteBuffer) => ByteBuffer, Double, ByteBuffer) = {
assert(supportedScheme.supports(tpe))
def toRow(d: Any) = new GenericInternalRow(Array[Any](d))
val encoder = supportedScheme.encoder(tpe)
for (i <- 0 until count) {
encoder.gatherCompressibilityStats(toRow(tpe.extract(input)), 0)
}
input.rewind()
val compressedSize = if (encoder.compressedSize == 0) {
input.remaining()
} else {
encoder.compressedSize
}
(encoder.compress, encoder.compressionRatio, allocateLocal(4 + compressedSize))
}
private[this] def runEncodeBenchmark[T <: AtomicType](
name: String,
iters: Int,
count: Int,
tpe: NativeColumnType[T],
input: ByteBuffer): Unit = {
val benchmark = new Benchmark(name, iters * count.toLong)
schemes.filter(_.supports(tpe)).foreach { scheme =>
val (compressFunc, compressionRatio, buf) = prepareEncodeInternal(count, tpe, scheme, input)
val label = s"${getFormattedClassName(scheme)}(${compressionRatio.formatted("%.3f")})"
benchmark.addCase(label)({ i: Int =>
for (n <- 0L until iters) {
compressFunc(input, buf)
input.rewind()
buf.rewind()
}
})
}
benchmark.run()
}
private[this] def runDecodeBenchmark[T <: AtomicType](
name: String,
iters: Int,
count: Int,
tpe: NativeColumnType[T],
input: ByteBuffer): Unit = {
val benchmark = new Benchmark(name, iters * count.toLong)
schemes.filter(_.supports(tpe)).foreach { scheme =>
val (compressFunc, _, buf) = prepareEncodeInternal(count, tpe, scheme, input)
val compressedBuf = compressFunc(input, buf)
val label = s"${getFormattedClassName(scheme)}"
input.rewind()
benchmark.addCase(label)({ i: Int =>
val rowBuf = new GenericInternalRow(1)
for (n <- 0L until iters) {
compressedBuf.rewind.position(4)
val decoder = scheme.decoder(compressedBuf, tpe)
while (decoder.hasNext) {
decoder.next(rowBuf, 0)
}
}
})
}
benchmark.run()
}
def bitEncodingBenchmark(iters: Int): Unit = {
val count = 65536
val testData = allocateLocal(count * BOOLEAN.defaultSize)
val g = {
val rng = genLowerSkewData()
() => (rng().toInt % 2).toByte
}
for (i <- 0 until count) {
testData.put(i * BOOLEAN.defaultSize, g())
}
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// BOOLEAN Encode: Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough(1.000) 3 / 4 19300.2 0.1 1.0X
// RunLengthEncoding(2.491) 923 / 939 72.7 13.8 0.0X
// BooleanBitSet(0.125) 359 / 363 187.1 5.3 0.0X
runEncodeBenchmark("BOOLEAN Encode", iters, count, BOOLEAN, testData)
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// BOOLEAN Decode: Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough 129 / 136 519.8 1.9 1.0X
// RunLengthEncoding 613 / 623 109.4 9.1 0.2X
// BooleanBitSet 1196 / 1222 56.1 17.8 0.1X
runDecodeBenchmark("BOOLEAN Decode", iters, count, BOOLEAN, testData)
}
def shortEncodingBenchmark(iters: Int): Unit = {
val count = 65536
val testData = allocateLocal(count * SHORT.defaultSize)
val g1 = genLowerSkewData()
for (i <- 0 until count) {
testData.putShort(i * SHORT.defaultSize, g1().toShort)
}
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// SHORT Encode (Lower Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough(1.000) 6 / 7 10971.4 0.1 1.0X
// RunLengthEncoding(1.510) 1526 / 1542 44.0 22.7 0.0X
runEncodeBenchmark("SHORT Encode (Lower Skew)", iters, count, SHORT, testData)
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// SHORT Decode (Lower Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough 811 / 837 82.8 12.1 1.0X
// RunLengthEncoding 1219 / 1266 55.1 18.2 0.7X
runDecodeBenchmark("SHORT Decode (Lower Skew)", iters, count, SHORT, testData)
val g2 = genHigherSkewData()
for (i <- 0 until count) {
testData.putShort(i * SHORT.defaultSize, g2().toShort)
}
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// SHORT Encode (Higher Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough(1.000) 7 / 7 10112.4 0.1 1.0X
// RunLengthEncoding(2.009) 1623 / 1661 41.4 24.2 0.0X
runEncodeBenchmark("SHORT Encode (Higher Skew)", iters, count, SHORT, testData)
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// SHORT Decode (Higher Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough 818 / 827 82.0 12.2 1.0X
// RunLengthEncoding 1202 / 1237 55.8 17.9 0.7X
runDecodeBenchmark("SHORT Decode (Higher Skew)", iters, count, SHORT, testData)
}
def intEncodingBenchmark(iters: Int): Unit = {
val count = 65536
val testData = allocateLocal(count * INT.defaultSize)
val g1 = genLowerSkewData()
for (i <- 0 until count) {
testData.putInt(i * INT.defaultSize, g1().toInt)
}
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// INT Encode (Lower Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough(1.000) 18 / 19 3716.4 0.3 1.0X
// RunLengthEncoding(1.001) 1992 / 2056 33.7 29.7 0.0X
// DictionaryEncoding(0.500) 723 / 739 92.8 10.8 0.0X
// IntDelta(0.250) 368 / 377 182.2 5.5 0.0X
runEncodeBenchmark("INT Encode (Lower Skew)", iters, count, INT, testData)
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// INT Decode (Lower Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough 821 / 845 81.8 12.2 1.0X
// RunLengthEncoding 1246 / 1256 53.9 18.6 0.7X
// DictionaryEncoding 757 / 766 88.6 11.3 1.1X
// IntDelta 680 / 689 98.7 10.1 1.2X
runDecodeBenchmark("INT Decode (Lower Skew)", iters, count, INT, testData)
val g2 = genHigherSkewData()
for (i <- 0 until count) {
testData.putInt(i * INT.defaultSize, g2().toInt)
}
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// INT Encode (Higher Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough(1.000) 17 / 19 3888.4 0.3 1.0X
// RunLengthEncoding(1.339) 2127 / 2148 31.5 31.7 0.0X
// DictionaryEncoding(0.501) 960 / 972 69.9 14.3 0.0X
// IntDelta(0.250) 362 / 366 185.5 5.4 0.0X
runEncodeBenchmark("INT Encode (Higher Skew)", iters, count, INT, testData)
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// INT Decode (Higher Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough 838 / 884 80.1 12.5 1.0X
// RunLengthEncoding 1287 / 1311 52.1 19.2 0.7X
// DictionaryEncoding 844 / 859 79.5 12.6 1.0X
// IntDelta 764 / 784 87.8 11.4 1.1X
runDecodeBenchmark("INT Decode (Higher Skew)", iters, count, INT, testData)
}
def longEncodingBenchmark(iters: Int): Unit = {
val count = 65536
val testData = allocateLocal(count * LONG.defaultSize)
val g1 = genLowerSkewData()
for (i <- 0 until count) {
testData.putLong(i * LONG.defaultSize, g1().toLong)
}
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// LONG Encode (Lower Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough(1.000) 37 / 38 1804.8 0.6 1.0X
// RunLengthEncoding(0.748) 2065 / 2094 32.5 30.8 0.0X
// DictionaryEncoding(0.250) 950 / 962 70.6 14.2 0.0X
// LongDelta(0.125) 475 / 482 141.2 7.1 0.1X
runEncodeBenchmark("LONG Encode (Lower Skew)", iters, count, LONG, testData)
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// LONG Decode (Lower Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough 888 / 894 75.5 13.2 1.0X
// RunLengthEncoding 1301 / 1311 51.6 19.4 0.7X
// DictionaryEncoding 887 / 904 75.7 13.2 1.0X
// LongDelta 693 / 735 96.8 10.3 1.3X
runDecodeBenchmark("LONG Decode (Lower Skew)", iters, count, LONG, testData)
val g2 = genHigherSkewData()
for (i <- 0 until count) {
testData.putLong(i * LONG.defaultSize, g2().toLong)
}
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// LONG Encode (Higher Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough(1.000) 34 / 35 1963.9 0.5 1.0X
// RunLengthEncoding(0.999) 2260 / 3021 29.7 33.7 0.0X
// DictionaryEncoding(0.251) 1270 / 1438 52.8 18.9 0.0X
// LongDelta(0.125) 496 / 509 135.3 7.4 0.1X
runEncodeBenchmark("LONG Encode (Higher Skew)", iters, count, LONG, testData)
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// LONG Decode (Higher Skew): Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough 965 / 1494 69.5 14.4 1.0X
// RunLengthEncoding 1350 / 1378 49.7 20.1 0.7X
// DictionaryEncoding 892 / 924 75.2 13.3 1.1X
// LongDelta 817 / 847 82.2 12.2 1.2X
runDecodeBenchmark("LONG Decode (Higher Skew)", iters, count, LONG, testData)
}
def stringEncodingBenchmark(iters: Int): Unit = {
val count = 65536
val strLen = 8
val tableSize = 16
val testData = allocateLocal(count * (4 + strLen))
val g = {
val dataTable = (0 until tableSize).map(_ => RandomStringUtils.randomAlphabetic(strLen))
val rng = genHigherSkewData()
() => dataTable(rng().toInt % tableSize)
}
for (i <- 0 until count) {
testData.putInt(strLen)
testData.put(g().getBytes(StandardCharsets.UTF_8))
}
testData.rewind()
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// STRING Encode: Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough(1.000) 56 / 57 1197.9 0.8 1.0X
// RunLengthEncoding(0.893) 4892 / 4937 13.7 72.9 0.0X
// DictionaryEncoding(0.167) 2968 / 2992 22.6 44.2 0.0X
runEncodeBenchmark("STRING Encode", iters, count, STRING, testData)
// Intel(R) Core(TM) i7-4578U CPU @ 3.00GHz
// STRING Decode: Best/Avg Time(ms) Rate(M/s) Per Row(ns) Relative
// -------------------------------------------------------------------------------------------
// PassThrough 2422 / 2449 27.7 36.1 1.0X
// RunLengthEncoding 2885 / 3018 23.3 43.0 0.8X
// DictionaryEncoding 2716 / 2752 24.7 40.5 0.9X
runDecodeBenchmark("STRING Decode", iters, count, STRING, testData)
}
def main(args: Array[String]): Unit = {
bitEncodingBenchmark(1024)
shortEncodingBenchmark(1024)
intEncodingBenchmark(1024)
longEncodingBenchmark(1024)
stringEncodingBenchmark(1024)
}
}
| lxsmnv/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/CompressionSchemeBenchmark.scala | Scala | apache-2.0 | 16,716 |
package scala.meta
package internal
package parsers
import scala.{Seq => _}
import scala.collection.immutable.Seq
import scala.reflect.ClassTag
import org.scalameta.invariants._
import org.scalameta.unreachable
import scala.meta.internal.ast._
import scala.meta.internal.tokenizers.Chars.{isOperatorPart, isScalaLetter}
private[meta] object Helpers {
private[meta] val unaryOps = Set("-", "+", "~", "!")
private[meta] def isUnaryOp(s: String): Boolean = unaryOps contains s
implicit class XtensionSyntacticTermName(name: Term.Name) {
import name._
def isLeftAssoc: Boolean = value.last != ':'
def isUnaryOp: Boolean = Helpers.isUnaryOp(value)
def isAssignmentOp = value match {
case "!=" | "<=" | ">=" | "" => false
case _ => (value.last == '=' && value.head != '='
&& isOperatorPart(value.head))
}
// opPrecedence?
def precedence: Int =
if (isAssignmentOp) 0
else if (isScalaLetter(value.head)) 1
else (value.head: @scala.annotation.switch) match {
case '|' => 2
case '^' => 3
case '&' => 4
case '=' | '!' => 5
case '<' | '>' => 6
case ':' => 7
case '+' | '-' => 8
case '*' | '/' | '%' => 9
case _ => 10
}
}
implicit class XtensionTermOps(tree: Term) {
def isCtorCall: Boolean = tree match {
case _: Ctor.Ref => true
case Term.ApplyType(callee, _) => callee.isCtorCall
case Term.Apply(callee, _) => callee.isCtorCall
case Term.Annotate(annottee, _) => annottee.isCtorCall
case _ => false
}
def ctorTpe: Type = {
def loop(tree: Tree): Type = tree match {
case Ctor.Ref.Name(value) => Type.Name(value)
case Ctor.Ref.Select(qual, name) => Type.Select(qual, Type.Name(name.value))
case Ctor.Ref.Project(qual, name) => Type.Project(qual, Type.Name(name.value))
case Ctor.Ref.Function(_) => unreachable(debug(XtensionTermOps.this.tree, XtensionTermOps.this.tree.show[Structure]))
case Term.ApplyType(Ctor.Ref.Function(_), targs) => Type.Function(targs.init, targs.last)
case Term.ApplyType(callee, targs) => Type.Apply(loop(callee), targs)
case Term.Apply(callee, _) => callee.ctorTpe
case Term.Annotate(annottee, annots) => Type.Annotate(loop(annottee), annots)
case _ => unreachable(debug(XtensionTermOps.this.tree, XtensionTermOps.this.tree.show[Structure], tree, tree.show[Structure]))
}
loop(tree)
}
def ctorArgss: Seq[Seq[Term.Arg]] = {
def loop(tree: Tree): Seq[Seq[Term.Arg]] = tree match {
case _: Ctor.Ref => Nil
case Term.ApplyType(callee, _) => callee.ctorArgss
case Term.Apply(callee, args) => callee.ctorArgss :+ args
case Term.Annotate(annottee, _) => annottee.ctorArgss
case _ => unreachable(debug(XtensionTermOps.this.tree, XtensionTermOps.this.tree.show[Structure]))
}
loop(tree)
}
def isCtorBody: Boolean = {
def isSuperCall(tree: Tree): Boolean = tree match {
case _: Ctor.Name => true
case Term.Apply(fn, _) => isSuperCall(fn)
case _ => false // you can't write `this[...](...)`
}
tree match {
case Term.Block(superCall +: _) => isSuperCall(superCall)
case superCall => isSuperCall(superCall)
}
}
}
implicit class XtensionTermRefOps(tree: Term.Ref) {
def isPath: Boolean = tree.isStableId || tree.isInstanceOf[Term.This]
def isQualId: Boolean = tree match {
case _: Term.Name => true
case Term.Select(qual: Term.Ref, _) => qual.isQualId
case _ => false
}
def isStableId: Boolean = tree match {
case _: Term.Name | Term.Select(_: Term.Super, _) => true
case Term.Select(qual: Term.Ref, _) => qual.isPath
case _ => false
}
}
implicit class XtensionMod(mod: Mod) {
def hasAccessBoundary: Boolean = mod match {
case _: Mod.Private => true
case _: Mod.Protected => true
case _ => false
}
}
implicit class XtensionMods(mods: List[Mod]) {
def has[T <: Mod](implicit tag: ClassTag[T]): Boolean =
mods.exists { _.getClass == tag.runtimeClass }
def getAll[T <: Mod](implicit tag: ClassTag[T]): List[T] =
mods.collect { case m if m.getClass == tag.runtimeClass => m.require[T] }
def accessBoundary: Option[Name.Qualifier] = mods.collectFirst{ case Mod.Private(name) => name; case Mod.Protected(name) => name }
}
implicit class XtensionStat(stat: Stat) {
def isTopLevelStat: Boolean = stat match {
case _: Stat.Quasi => true
case _: Import => true
case _: Pkg => true
case _: Defn.Class => true
case _: Defn.Trait => true
case _: Defn.Object => true
case _: Pkg.Object => true
case _ => false
}
def isTemplateStat: Boolean = stat match {
case _: Stat.Quasi => true
case _: Import => true
case _: Term => true
case _: Decl => true
case _: Defn => true
case _: Ctor.Secondary => true
case _ => false
}
def isBlockStat: Boolean = stat match {
case _: Stat.Quasi => true
case _: Import => true
case _: Term => true
case stat: Defn.Var => stat.rhs.isDefined
case _: Defn => true
case _ => false
}
def isRefineStat: Boolean = stat match {
case _: Stat.Quasi => true
case _: Decl => true
case _: Defn.Type => true
case _ => false
}
def isExistentialStat: Boolean = stat match {
case _: Stat.Quasi => true
case _: Decl.Val => true
case _: Decl.Type => true
case _ => false
}
def isEarlyStat: Boolean = stat match {
case _: Stat.Quasi => true
case _: Defn.Val => true
case _: Defn.Var => true
case _ => false
}
}
implicit class XtensionCase(tree: Case) {
def stats: Seq[Stat] = tree.body match {
case Term.Block(stats) => stats
case body => List(body)
}
}
}
| smarter/scalameta | scalameta/src/main/scala/scala/meta/internal/parsers/Helpers.scala | Scala | bsd-3-clause | 6,249 |
package io.mem0r1es.trank.pipeline
import de.l3s.boilerpipe.extractors.ArticleExtractor
import de.l3s.boilerpipe.extractors.DefaultExtractor
object PreProcessor {
/**
* Runs the content pre-processing step (e.g., HTML boilerplate removal)
*/
def preProcess(content: String): String = {
boilerpipe(content)
}
private def boilerpipe(content: String): String = {
val articleExtr = Option(ArticleExtractor.INSTANCE.getText(content))
val defaultExtr = Option(DefaultExtractor.INSTANCE.getText(content))
// gives priority to the ArticleExtractor, allegedly leading to better results
if (articleExtr.getOrElse("") nonEmpty)
return articleExtr.get
if (defaultExtr.getOrElse("") nonEmpty)
return defaultExtr.get
return content
}
}
| homerquan/TRank | src/main/scala/io/mem0r1es/trank/pipeline/PreProcessor.scala | Scala | apache-2.0 | 784 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.tasty
import java.util.UUID
import TastyFormat.{MajorVersion, MinorVersion, ExperimentalVersion, header}
class TastyHeaderUnpickler(reader: TastyReader) {
import TastyHeaderUnpickler._
import reader._
def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
/** reads and verifies the TASTy version, extracting the UUID */
def readHeader(): UUID = {
for (i <- 0 until header.length)
check(readByte() == header(i), "not a TASTy file")
val fileMajor = readNat()
if (fileMajor <= 27) { // old behavior before `tasty-core` 3.0.0-RC1
val fileMinor = readNat()
val signature = signatureString(fileMajor, fileMinor, 0)
throw new UnpickleException(signature + backIncompatAddendum + toolingAddendum)
}
else {
val fileMinor = readNat()
val fileExperimental = readNat()
val toolingLength = readNat()
val toolingStart = {
val start = currentAddr
val end = start + toolingLength
goto(end)
start
}
val validVersion = TastyFormat.isVersionCompatible(
fileMajor = fileMajor,
fileMinor = fileMinor,
fileExperimental = fileExperimental,
compilerMajor = MajorVersion,
compilerMinor = MinorVersion,
compilerExperimental = ExperimentalVersion
)
check(validVersion, {
val signature = signatureString(fileMajor, fileMinor, fileExperimental)
val toolingVersion = new String(bytes, toolingStart.index, toolingLength)
val producedByAddendum = s"\\nThe TASTy file was produced by $toolingVersion.$toolingAddendum"
val msg = (
if (fileExperimental != 0) unstableAddendum
else if (fileMajor < MajorVersion) backIncompatAddendum
else forwardIncompatAddendum
)
signature + msg + producedByAddendum
})
new UUID(readUncompressedLong(), readUncompressedLong())
}
}
private def check(cond: Boolean, msg: => String): Unit = {
if (!cond) throw new UnpickleException(msg)
}
}
object TastyHeaderUnpickler {
private def toolingAddendum = (
if (ExperimentalVersion > 0)
"\\nNote that your tooling is currently using an unstable TASTy version."
else
""
)
private def signatureString(fileMajor: Int, fileMinor: Int, fileExperimental: Int) = {
def showMinorVersion(min: Int, exp: Int) = {
val expStr = if (exp == 0) "" else s" [unstable release: $exp]"
s"$min$expStr"
}
val minorVersion = showMinorVersion(MinorVersion, ExperimentalVersion)
val fileMinorVersion = showMinorVersion(fileMinor, fileExperimental)
s"""TASTy signature has wrong version.
| expected: {majorVersion: $MajorVersion, minorVersion: $minorVersion}
| found : {majorVersion: $fileMajor, minorVersion: $fileMinorVersion}
|
|""".stripMargin
}
private def unstableAddendum =
"""This TASTy file was produced by an unstable release.
|To read this TASTy file, your tooling must be at the same version.""".stripMargin
private def backIncompatAddendum =
"""This TASTy file was produced by an earlier release that is not supported anymore.
|Please recompile this TASTy with a later version.""".stripMargin
private def forwardIncompatAddendum =
"""This TASTy file was produced by a more recent, forwards incompatible release.
|To read this TASTy file, please upgrade your tooling.""".stripMargin
}
| scala/scala | src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala | Scala | apache-2.0 | 3,790 |
package naming
import eu.inn.binders.naming.{SnakeCaseParser, CamelCaseBuilder}
import org.scalatest.{FlatSpec, Matchers}
class TestSnakeCaseParser extends FlatSpec with Matchers {
"SnakeCaseParser " should " parse string_like_this " in {
val parser = new SnakeCaseParser()
val builder = new CamelCaseBuilder()
parser.parse("string_like_this", builder)
val result = builder.toString
assert(result == "stringLikeThis")
}
} | InnovaCo/binders | src/test/scala/naming/TestSnakeCaseParser.scala | Scala | bsd-3-clause | 450 |
/*
* Copyright (c) 2017-2022 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.eventpopulator
// scalaz
import scalaz.{Ordering => _, _}
import Scalaz._
// Scala
import scala.util.control.NonFatal
import scala.collection.convert.decorateAsScala._
// AWS SDK
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain
import com.amazonaws.services.s3.AmazonS3ClientBuilder
import com.amazonaws.services.s3.model.{ListObjectsV2Request, ListObjectsV2Result}
// Java
import java.nio.charset.StandardCharsets.UTF_8
// Jackson
import com.fasterxml.jackson.databind.{ ObjectMapper, JsonNode }
// Iglu client
import com.snowplowanalytics.iglu.client.Validated
import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._
// Apache Commons Codec
import org.apache.commons.codec.binary.Base64
// Joda time
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import org.joda.time.format.DateTimeFormatterBuilder
// This project
import Main.JobConf
/**
* Util functions and constant that not directly related to spark job
*/
object Utils {
// Column indexes in TSV
val EtlTimestampIndex = 2
val EventIdIndex = 6
val FingerprintIndex = 129
/**
* Date format in archive buckets: run=YYYY-MM-dd-HH-mm-ss
* In EmrEtlRunner: "%Y-%m-%d-%H-%M-%S"
*/
val runIdFormat = DateTimeFormat.forPattern("YYYY-MM-dd-HH-mm-ss")
/**
* Shortened `runIdFormat` that can be used for manual input
*/
val dayFormat = DateTimeFormat.forPattern("YYYY-MM-dd")
/**
* Combined `dayFormat` and `runIdFormat` that can be used
* both for precise and shorten manual input
*/
val inputFormatter = new DateTimeFormatterBuilder()
.append(null, List(runIdFormat.getParser, dayFormat.getParser).toArray)
.toFormatter
implicit def dateTimeOrdering: Ordering[DateTime] =
Ordering.fromLessThan(_ isBefore _)
private val UrlSafeBase64 = new Base64(true) // true means "url safe"
private lazy val Mapper = new ObjectMapper
/**
* Data extracted from EnrichedEvent and storing in DynamoDB
*/
case class DeduplicationTriple(eventId: String, fingerprint: String, etlTstamp: String)
/**
* Get run ids from `jobConf.enrichedInBucket` happened since `jobConf.since`
*
* @param jobConf input configuration
* @return list of timestamps (run ids)
*/
def getRuns(jobConf: JobConf): List[String] = {
val runIds = listS3(jobConf)
val filtered = jobConf.since match {
case Some(start) => filterSince(runIds, start)
case None => runIds
}
filtered.map(runId => runIdFormat.print(runId))
}
/**
* Get list of all folders inside S3 bucket with enriched archive
* This function doesn't respect S3 truncation (MaxKeys = 1000)
* and return **all** keys
*
* @param jobConf parameters parsed from command line
* @return list of **dates** (without "run") of for arhives
*/
def listS3(jobConf: JobConf): List[DateTime] = {
// Initialize mutable buffer
val buffer = collection.mutable.ListBuffer.empty[String]
var result: ListObjectsV2Result = null
// Initialize S3
val credentials = new DefaultAWSCredentialsProviderChain()
val s3Client = AmazonS3ClientBuilder.standard().withCredentials(credentials).build()
val (bucket, subpath) = splitS3Path(jobConf.enrichedInBucket)
val req = new ListObjectsV2Request()
.withBucketName(bucket)
.withPrefix(subpath)
.withDelimiter("/")
do {
result = s3Client.listObjectsV2(req)
val objects = result.getCommonPrefixes.asScala.toList
buffer ++= objects
req.setContinuationToken(result.getNextContinuationToken)
} while(result.isTruncated)
buffer.toList.flatMap(_.split("=").lift(1)).flatMap(parseRunId)
}
/**
* Parse run id from enriched event archive dir
*
* @param date run timestamp (part after `run=`)
* @return datetime object if parsed successfully
*/
def parseRunId(date: String): Option[DateTime] = {
val runId = if (date.endsWith("/")) date.dropRight(1) else date
try {
Some(DateTime.parse(runId, runIdFormat))
} catch {
case _: IllegalArgumentException => None
}
}
/**
* Parse date from user input. Precise or shorten
*
* @param date timestamp string that can be date or datetime
* @return datetime object
*/
def parseInput(date: String): DateTime =
DateTime.parse(date, inputFormatter)
/**
* Get sorted list of run ids (dates) started after some point in time
*/
def filterSince(runs: List[DateTime], start: DateTime): List[DateTime] =
runs.sorted.dropWhile(run => run.isBefore(start))
/**
* Split S3 path into bucket name and prefix
*
* @param path S3 full path without `s3://` prefix and with trailing slash
* @return pair of bucket name and remaining path
*/
private def splitS3Path(path: String): (String, String) = { // TODO: check that it works on root level bucket
path.split("/").toList match {
case head :: Nil => (head, "/")
case head :: tail => (head, tail.mkString("/") + "/")
}
}
/**
* Split `EnrichedEvent` TSV line and extract necessary columns
*
* @param line plain `EnrichedEvent` TSV
* @return deduplication triple encapsulated into special class
*/
def lineToTriple(line: String): DeduplicationTriple = {
val tsv = line.split('\\t')
try {
DeduplicationTriple(eventId = tsv(EventIdIndex), etlTstamp = tsv(EtlTimestampIndex), fingerprint = tsv(FingerprintIndex))
} catch {
case e: IndexOutOfBoundsException => throw new RuntimeException(s"ERROR: Cannot split TSV [$line]\\n${e.toString}")
}
}
/**
* Converts a base64-encoded JSON string into a JsonNode
*
* @param str base64-encoded JSON
* @return a JsonNode on Success, a NonEmptyList of ProcessingMessages on Failure
*/
def base64ToJsonNode(str: String): Validated[JsonNode] =
(for {
raw <- decodeBase64Url(str)
node <- extractJson(raw)
} yield node).toProcessingMessage
def extractJson(instance: String): Validation[String, JsonNode] =
try {
Mapper.readTree(instance).success
} catch {
case NonFatal(e) => s"Invalid JSON [%s] with parsing error: %s".format(instance, e.getMessage).failure
}
def decodeBase64Url(str: String): Validation[String, String] = {
try {
val decodedBytes = UrlSafeBase64.decode(str)
new String(decodedBytes, UTF_8).success
} catch {
case NonFatal(e) =>
"Exception while decoding Base64-decoding string [%s] (URL-safe encoding): [%s]".format(str, e.getMessage).failure
}
}
}
| snowplow/snowplow | 5-data-modeling/event-manifest-populator/src/main/scala/com/snowplowanalytics/snowplow/eventpopulator/Utils.scala | Scala | apache-2.0 | 7,341 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.niocharset
import java.nio._
import java.nio.charset._
import scala.scalajs.niocharset.StandardCharsets
import BaseCharsetTest._
abstract class BaseUTF16Test(charset: Charset) extends BaseCharsetTest(charset) {
describe(charset.name) {
it("decode") {
// ASCII characters
testDecode(bb"0042 006f 006e 006a 006f 0075 0072")(cb"Bonjour")
// Other characters without surrogate pairs
testDecode(bb"0047 0072 00fc 00df 0020 0047 006f 0074 0074")(cb"Grüß Gott")
testDecode(bb"039a 03b1 03bb 03b7 03bc 03ad 03c1 03b1")(cb"Καλημέρα")
testDecode(bb"0635 0628 0627 062d 0020 0627 0644 062e 064a 0631")(cb"صباح الخير")
testDecode(bb"3053 3093 306b 3061 306f")(cb"こんにちは")
testDecode(bb"0414 043e 0431 0440 044b 0439 0020 0434 0435 043d 044c")(cb"Добрый день")
testDecode(bb"4f60 597d")(cb"你好")
// 4-byte characters
testDecode(bb"d835 dcd7 d835 dcee d835 dcf5 d835 dcf5 d835 dcf8")(
cb"\\ud835\\udcd7\\ud835\\udcee\\ud835\\udcf5\\ud835\\udcf5\\ud835\\udcf8")
testDecode(bb"")(cb"")
// Here begin the sequences with at least one error
// Single UTF-16 surrogates
testDecode(bb"d800")(Malformed(2))
testDecode(bb"daff")(Malformed(2))
testDecode(bb"db80")(Malformed(2))
testDecode(bb"dbff")(Malformed(2))
testDecode(bb"dc00")(Malformed(2))
testDecode(bb"df80")(Malformed(2))
testDecode(bb"dfff")(Malformed(2))
// High UTF-16 surrogates not followed by low surrogates
testDecode(bb"d800 0041")(Malformed(2), cb"A")
testDecode(bb"d800 d800")(Malformed(2), Malformed(2))
testDecode(bb"d800 d835 dcd7")(Malformed(2), cb"\\ud835\\udcd7")
testDecode(bb"dbff 0041")(Malformed(2), cb"A")
testDecode(bb"dbff db8f")(Malformed(2), Malformed(2))
testDecode(bb"dbff d835 dcd7")(Malformed(2), cb"\\ud835\\udcd7")
// Lonely byte at the end
testDecode(bb"0041 41")(cb"A", Malformed(1))
}
it("encode") {
// ASCII characters
testEncode(cb"Bonjour")(bb"0042 006f 006e 006a 006f 0075 0072")
// Other characters without surrogate pairs
testEncode(cb"Grüß Gott")(bb"0047 0072 00fc 00df 0020 0047 006f 0074 0074")
testEncode(cb"Καλημέρα")(bb"039a 03b1 03bb 03b7 03bc 03ad 03c1 03b1")
testEncode(cb"صباح الخير")(bb"0635 0628 0627 062d 0020 0627 0644 062e 064a 0631")
testEncode(cb"こんにちは")(bb"3053 3093 306b 3061 306f")
testEncode(cb"Добрый день")(bb"0414 043e 0431 0440 044b 0439 0020 0434 0435 043d 044c")
testEncode(cb"你好")(bb"4f60 597d")
// 4-byte characters
testEncode(cb"\\ud835\\udcd7\\ud835\\udcee\\ud835\\udcf5\\ud835\\udcf5\\ud835\\udcf8")(
bb"d835 dcd7 d835 dcee d835 dcf5 d835 dcf5 d835 dcf8")
testEncode(cb"")(bb"")
// Here begin the sequences with at least one error
// Single UTF-16 surrogates
testEncode(cb"\\ud800")(Malformed(1))
testEncode(cb"\\udaff")(Malformed(1))
testEncode(cb"\\udb80")(Malformed(1))
testEncode(cb"\\udbff")(Malformed(1))
testEncode(cb"\\udc00")(Malformed(1))
testEncode(cb"\\udf80")(Malformed(1))
testEncode(cb"\\udfff")(Malformed(1))
// High UTF-16 surrogates not followed by low surrogates
testEncode(cb"\\ud800A")(Malformed(1), bb"0041")
testEncode(cb"\\ud800\\ud800")(Malformed(1), Malformed(1))
testEncode(cb"\\ud800\\ud835\\udcd7")(Malformed(1), bb"d835 dcd7")
testEncode(cb"\\udbffA")(Malformed(1), bb"0041")
testEncode(cb"\\udbff\\udb8f")(Malformed(1), Malformed(1))
testEncode(cb"\\udbff\\ud835\\udcd7")(Malformed(1), bb"d835 dcd7")
}
}
}
object UTF16BETest extends BaseUTF16Test(StandardCharsets.UTF_16BE)
object UTF16LETest extends BaseUTF16Test(StandardCharsets.UTF_16LE) {
override protected def testDecode(in: ByteBuffer)(
outParts: OutPart[CharBuffer]*): Unit = {
flipByteBuffer(in)
super.testDecode(in)(outParts: _*)
}
override protected def testEncode(in: CharBuffer)(
outParts: OutPart[ByteBuffer]*): Unit = {
for (BufferPart(buf) <- outParts)
flipByteBuffer(buf)
super.testEncode(in)(outParts: _*)
}
/** Flips all pairs of bytes in a byte buffer, except a potential lonely
* last byte.
*/
def flipByteBuffer(buf: ByteBuffer): Unit = {
buf.mark()
while (buf.remaining() >= 2) {
val high = buf.get()
val low = buf.get()
buf.position(buf.position - 2)
buf.put(low)
buf.put(high)
}
buf.reset()
}
}
object UTF16Test extends BaseUTF16Test(StandardCharsets.UTF_16) {
def BigEndianBOM: ByteBuffer =
ByteBuffer.wrap(Array(0xfe.toByte, 0xff.toByte))
override protected def testDecode(in: ByteBuffer)(
outParts: OutPart[CharBuffer]*): Unit = {
// Without BOM, big endian is assumed
super.testDecode(in)(outParts: _*)
// With BOM, big endian
val inWithBOM = ByteBuffer.allocate(2+in.remaining)
inWithBOM.put(BigEndianBOM).put(in).flip()
super.testDecode(inWithBOM)(outParts: _*)
// With BOM, little endian
UTF16LETest.flipByteBuffer(inWithBOM)
super.testDecode(inWithBOM)(outParts: _*)
}
override protected def testEncode(in: CharBuffer)(
outParts: OutPart[ByteBuffer]*): Unit = {
if (in.remaining == 0) super.testEncode(in)(outParts: _*)
else super.testEncode(in)(BufferPart(BigEndianBOM) +: outParts: _*)
}
}
| jmnarloch/scala-js | test-suite/src/test/scala/org/scalajs/testsuite/niocharset/UTF16Test.scala | Scala | bsd-3-clause | 6,000 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the Movie entity.
*/
class MovieGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connection("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authentication = Map(
"Content-Type" -> """application/x-www-form-urlencoded""",
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"x-auth-token" -> "${x_auth_token}"
)
val scn = scenario("Test the Movie entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401)))
.pause(10)
.exec(http("Authentication")
.post("/api/authenticate")
.headers(headers_http_authentication)
.formParam("username", "admin")
.formParam("password", "admin")
.check(jsonPath("$.token").saveAs("x_auth_token")))
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10)
.repeat(2) {
exec(http("Get all movies")
.get("/api/movies")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new movie")
.put("/api/movies")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "title":"SAMPLE_TEXT", "rating":"SAMPLE_TEXT", "director":"SAMPLE_TEXT"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_movie_url")))
.pause(10)
.repeat(5) {
exec(http("Get created movie")
.get("${new_movie_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created movie")
.delete("${new_movie_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(100) over (1 minutes))
).protocols(httpConf)
}
| ozkangokturk/moviefan | src/test/gatling/simulations/MovieGatlingTest.scala | Scala | apache-2.0 | 3,261 |
/*
* This file is part of the gnieh-pp project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gnieh.pp
package tests
import org.scalatest._
/** @author Lucas Satabin
*
*/
class CompactRendererTest extends PpTest {
val doc = "this" :|: "is" :|: "some" :|: "document" :|: "with" :|: "lines"
val grouped = group("this" :|: "is" :|: "a" :|: "grouped" :|: "document")
val nested = nest(4) { "this" :|: "one" :|: "has" :|: "indentation" }
val aligned = "this" :+: align("document" :|: "is" :|: "aligned")
"the compact renderer" should "flatten the entire document" in {
compact(doc) should be("this is some document with lines")
}
it should "ignore groups" in {
compact(grouped) should be("this is a grouped document")
}
it should "ignore nesting" in {
compact(nested) should be("this one has indentation")
}
it should "ignore alignment" in {
compact(aligned) should be("this document is aligned")
}
}
| gnieh/tekstlib | src/test/scala/gnieh/pp/tests/CompactRendererTest.scala | Scala | apache-2.0 | 1,475 |
import sbt._
import Keys._
import sbtassembly.AssemblyPlugin.autoImport._
import com.typesafe.sbt.SbtScalariform
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
import org.scalastyle.sbt.ScalastylePlugin
object TcpFrontmanBuild extends Build {
import ProjectSettings._
/**
* Main module
*/
lazy val tcpFrontman = Project("tcp-frontman", file("."))
.settings(defaultSettings: _*)
.settings(tcpFrontmanAssemblySettings: _*)
.settings(libraryDependencies ++= Dependencies.tcpFrontman)
override lazy val settings = {
super.settings ++
buildSettings ++
Seq(
shellPrompt := {
s => Project.extract(s).currentProject.id + " > "
}
)
}
}
object ProjectSettings {
val ProjectVersion = "0.1"
val ScalaVersion = "2.11.7"
lazy val buildSettings = Seq(
organization := "io.dmitryivanov.tcpfrontman",
version := ProjectVersion,
scalaVersion := ScalaVersion
)
lazy val defaultSettings = Defaults.defaultSettings ++
ScalastylePlugin.Settings ++
formatSettings ++
Seq(
scalacOptions in Compile := Seq(
"-encoding", "utf8", "-target:jvm-1.8", "-feature", "-language:implicitConversions", "-language:postfixOps", "-unchecked", "-deprecation",
"-Ywarn-adapted-args", "-Xlog-reflective-calls"
))
lazy val tcpFrontmanAssemblySettings = Seq(
mainClass in assembly := Some("io.dmitryivanov.tcpfrontman.Boot"),
jarName in assembly := "tcp-frontman.jar")
lazy val formatSettings = SbtScalariform.scalariformSettings ++ Seq(
ScalariformKeys.preferences in Compile := formattingPreferences,
ScalariformKeys.preferences in Test := formattingPreferences
)
lazy val formattingPreferences = {
import scalariform.formatter.preferences._
FormattingPreferences()
.setPreference(AlignParameters, true)
.setPreference(AlignSingleLineCaseStatements, true)
}
}
object Dependencies {
val nettyV = "4.1.0.Beta7"
val scalaTestV = "2.2.4"
object Compile {
val netty = "io.netty" % "netty-all" % nettyV % "compile"
val scopt = "com.github.scopt" %% "scopt" % "3.3.0"
}
object Test {
val scalatest = "org.scalatest" %% "scalatest" % scalaTestV % "test"
}
val tcpFrontman = Seq(
Compile.netty,
Compile.scopt,
Test.scalatest)
}
| ajantis/tcp-frontman | project/TcpFrontmanBuild.scala | Scala | apache-2.0 | 2,355 |
package io.iohk.ethereum.db.storage
import java.nio.ByteBuffer
import akka.util.ByteString
import boopickle.Default._
import com.google.common.cache.RemovalNotification
import io.iohk.ethereum.db.cache.Cache
import io.iohk.ethereum.db.storage.NodeStorage.{NodeEncoded, NodeHash}
import io.iohk.ethereum.mpt.{ByteArraySerializable, NodesKeyValueStorage}
import scala.collection.mutable
/**
* In-memory pruner - All pruning is done in LRU cache, which means all mpt nodes saved to db, are there permanently.
* There are two occasions where node is saved to disk:
* 1 - When cache becomes full, least recently used nodes are flushed to disk. In normal operation, these nodes
* have already survived several pruning cycles, and still have references pointing at them, which makes them
* unlikely to be pruned in future.
* 2 - Every now and then, cache needs to be flushed to disk to bump up the best block number. It leads to
* saving nodes which were in cache long time and survived many pruning cycles,
* but also some junk nodes from last X Blocks (X - kept history)
* There are two supporting data structures which are saved to database after processing each block:
* DeathRow - List of nodes which reference count drop to 0, and can be potentially deleted in future
* ChangeLog - List of changes to nodes reference counts during processing block. It enables rollbacks of state changes
* made by some block.
*
* It is something between [[ArchiveNodeStorage]] which saves all nodes even if they would become junk right away, but
* is really fast performance wise (only writing data) and [[ReferenceCountNodeStorage]] which tries to clear all junk nodes
* but it requires being in sync with db (constant read operations) which hutrs performance.
*/
class CachedReferenceCountedStorage(
nodeStorage: NodeStorage,
cache: Cache[ByteString, HeapEntry],
changeLog: ChangeLog,
bn: BigInt
) extends NodesKeyValueStorage {
def get(nodeHash: NodeHash): Option[NodeEncoded] = {
cache
.get(nodeHash)
.fold {
nodeStorage.get(nodeHash).map(enc => HeapEntry.fromBytes(enc)).map { entry =>
cache.put(nodeHash, entry)
entry.nodeEncoded
}
}(entry => Some(entry.nodeEncoded))
}
def update(toRemove: Seq[ByteString], toUpsert: Seq[(ByteString, NodeEncoded)]): NodesKeyValueStorage = {
changeLog.withChangeLog(bn) { blockChangeLog =>
toUpsert.foreach { case (nodeKey, nodeValue) =>
val (updatedValue, change) = {
val fromCache = cache.get(nodeKey)
if (fromCache.isDefined)
(fromCache.get.incrementParents(bn), Increase(nodeKey))
else
(HeapEntry(nodeValue, 1, bn), New(nodeKey))
}
cache.put(nodeKey, updatedValue)
blockChangeLog.registerChange(change, updatedValue.numOfParents)
}
toRemove.foreach { node =>
// In normal operation node should be in cache ( to delete node mpt trie need to read from db first)
cache.get(node).foreach { nodeToDel =>
val updatedValue = nodeToDel.decrementParents(bn)
cache.put(node, updatedValue)
blockChangeLog.registerChange(Decrease(node), updatedValue.numOfParents)
}
}
}
this
}
def persist(): Unit = ()
}
object CachedReferenceCountedStorage {
private def getNodesToPruneInCache(
cache: Cache[NodeHash, HeapEntry],
deathRow: List[NodeHash],
blockToPrune: BigInt
): List[NodeHash] = {
var nodesToDeleteFromCache = List.empty[NodeHash]
deathRow.foreach { nodeHash =>
cache.get(nodeHash).foreach { nodeFromCache =>
if (nodeFromCache.numOfParents == 0 && nodeFromCache.bn <= blockToPrune) {
nodesToDeleteFromCache = nodeHash :: nodesToDeleteFromCache
}
}
}
nodesToDeleteFromCache
}
def persistCache[V](cache: Cache[ByteString, V], storage: NodeStorage, forced: Boolean = false)(implicit
ser: ByteArraySerializable[V]
): Boolean = {
if (cache.shouldPersist || forced) {
val values = cache.getValues
val serialized = values.map { case (key, value) => key -> ser.toBytes(value) }
storage.update(Nil, serialized)
cache.clear()
true
} else {
false
}
}
def prune(deathRow: List[NodeHash], cache: Cache[NodeHash, HeapEntry], blockToPrune: BigInt): Unit = {
val toDelFromCache = getNodesToPruneInCache(cache, deathRow, blockToPrune)
cache.update(toDelFromCache, Nil)
}
private def getPreviousBlockState(
cache: Cache[NodeHash, HeapEntry],
nodeStorage: NodeStorage,
changeLog: List[Update],
newBestAfterRollback: BigInt
): Map[NodeHash, (HeapEntry, Boolean)] = {
var newState = Map.empty[NodeHash, (HeapEntry, Boolean)]
changeLog.foreach { update =>
val nodeHash = update.hash
val currentState =
newState.get(nodeHash) orElse
cache.get(nodeHash).map((_, false)) orElse
nodeStorage.get(nodeHash).map(HeapEntry.fromBytes).map((_, false))
currentState.foreach { case (current, deletable) =>
val reversedState = update match {
case Increase(_) => (current.decrementParents(newBestAfterRollback), deletable)
case Decrease(_) => (current.incrementParents(newBestAfterRollback), deletable)
case New(_) => (current.decrementParents(newBestAfterRollback), true)
}
newState += nodeHash -> reversedState
}
}
newState
}
private def gatherCacheUpdates(
previousState: Map[NodeHash, (HeapEntry, Boolean)]
): (List[NodeHash], List[(NodeHash, HeapEntry)]) = {
previousState.foldLeft(List.empty[NodeHash], List.empty[(NodeHash, HeapEntry)]) {
case ((toDel, toUpdate), (entryKey, (entryValue, deletable))) =>
if (entryValue.numOfParents == 0 && deletable)
(entryKey :: toDel, toUpdate)
else
(toDel, (entryKey, entryValue) :: toUpdate)
}
}
def rollback(
cache: Cache[NodeHash, HeapEntry],
nodeStorage: NodeStorage,
changeLog: List[Update],
bn: BigInt
): Unit = {
val newBestAfterRollback = bn - 1
val previousState = getPreviousBlockState(cache, nodeStorage, changeLog, newBestAfterRollback)
val (nodesToDelete, nodesToUpdate) = gatherCacheUpdates(previousState)
cache.update(nodesToDelete, nodesToUpdate)
}
def saveOnlyNotificationHandler(
nodeStorage: NodeStorage
)(notification: RemovalNotification[ByteString, HeapEntry]): Unit = {
val entry = notification.getValue
val key = notification.getKey
nodeStorage.update(Nil, Seq(key -> HeapEntry.toBytes(entry)))
}
}
class NoHistoryCachedReferenceCountedStorage(nodeStorage: NodeStorage, cache: Cache[ByteString, HeapEntry], bn: BigInt)
extends NodesKeyValueStorage {
def get(nodeHash: NodeHash): Option[NodeEncoded] = {
cache.get(nodeHash).map(_.nodeEncoded) orElse nodeStorage
.get(nodeHash)
.map(enc => HeapEntry.fromBytes(enc).nodeEncoded)
}
def update(toRemove: Seq[ByteString], toUpsert: Seq[(ByteString, NodeEncoded)]): NodesKeyValueStorage = {
toUpsert.foreach { case (key, value) =>
cache.put(key, HeapEntry(value, 1, bn))
}
toRemove.foreach { key =>
cache.remove(key)
}
this
}
def persist(): Unit = {}
}
import io.iohk.ethereum.utils.ByteUtils._
final case class HeapEntry(nodeEncoded: NodeEncoded, numOfParents: Int, bn: BigInt) {
def incrementParents(incrementationBlock: BigInt): HeapEntry = {
copy(numOfParents = numOfParents + 1, bn = incrementationBlock)
}
def decrementParents(decrementationBlock: BigInt): HeapEntry = {
copy(numOfParents = numOfParents - 1, bn = decrementationBlock)
}
}
object HeapEntry {
import boopickle.Default._
implicit val HeapEntryPickler: Pickler[HeapEntry] = generatePickler[HeapEntry]
def toBytes(entry: HeapEntry): Array[Byte] = {
compactPickledBytes(Pickle.intoBytes(entry)).toArray[Byte]
}
def fromBytes(asbytes: Array[Byte]): HeapEntry = {
Unpickle[HeapEntry].fromBytes(ByteBuffer.wrap(asbytes))
}
implicit val heapEntrySerializer = new ByteArraySerializable[HeapEntry] {
override def toBytes(input: HeapEntry): Array[Byte] = HeapEntry.toBytes(input)
override def fromBytes(bytes: Array[Byte]): HeapEntry = HeapEntry.fromBytes(bytes)
}
}
sealed abstract class Update {
val hash: ByteString
}
final case class Increase(hash: ByteString) extends Update
final case class Decrease(hash: ByteString) extends Update
final case class New(hash: ByteString) extends Update
object Update {
implicit val byteStringPickler: Pickler[ByteString] =
transformPickler[ByteString, Array[Byte]](ByteString(_))(_.toArray[Byte])
implicit val updatePickler: Pickler[Update] =
compositePickler[Update].addConcreteType[Increase].addConcreteType[Decrease].addConcreteType[New]
}
class ChangeLog(nodeStorage: NodeStorage) {
private val logs = mutable.Map.empty[BigInt, BlockChangeLog]
def persistChangeLog(forBlock: BigInt): Unit = {
logs.get(forBlock).foreach { changeLog =>
nodeStorage.update(
Nil,
Seq(
ChangeLog.getLogKey(forBlock) -> ChangeLog.serializeChangeLog(changeLog.getAllChanges),
ChangeLog.getDrwKey(forBlock) -> ChangeLog.serializeDeathRow(changeLog.getAllToDelete)
)
)
logs.remove(forBlock)
}
}
def withChangeLog(bn: BigInt)(updates: BlockChangeLog => Unit): Unit = {
val changeLog = getChangeLogForBlock(bn)
updates(changeLog)
logs.update(bn, changeLog)
}
def getChangeLogForBlock(bn: BigInt): BlockChangeLog = {
logs.getOrElse(
bn, {
val newChangeLog = new BlockChangeLog(List.empty, Set.empty)
logs += bn -> newChangeLog
newChangeLog
}
)
}
def removeBlockMetaData(bn: BigInt): Unit = {
nodeStorage.update(Seq(ChangeLog.getLogKey(bn), ChangeLog.getDrwKey(bn)), Nil)
}
def getChangeLogFromStorage(bn: BigInt): Option[List[Update]] = {
nodeStorage.get(ChangeLog.getLogKey(bn)).map(ChangeLog.deserializeChangeLog)
}
def getDeathRowFromStorage(bn: BigInt): Option[List[NodeHash]] = {
nodeStorage.get(ChangeLog.getDrwKey(bn)).map(ChangeLog.deserializeDeathRow)
}
}
object ChangeLog {
private val changeLogPrefix = "log".getBytes
private val deathRowPrefix = "drw".getBytes
def getLogKey(bn: BigInt): ByteString = ByteString(changeLogPrefix ++ bn.toByteArray)
def getDrwKey(bn: BigInt): ByteString = ByteString(deathRowPrefix ++ bn.toByteArray)
import Update.{updatePickler, byteStringPickler}
import boopickle.Default._
def serializeChangeLog(changeLog: List[Update]): Array[Byte] =
compactPickledBytes(Pickle.intoBytes(changeLog)).toArray[Byte]
def serializeDeathRow(deathRow: List[NodeHash]): Array[Byte] =
compactPickledBytes(Pickle.intoBytes(deathRow)).toArray[Byte]
def deserializeChangeLog(bytes: Array[Byte]): List[Update] =
Unpickle[List[Update]].fromBytes(ByteBuffer.wrap(bytes))
def deserializeDeathRow(bytes: Array[Byte]): List[NodeHash] =
Unpickle[List[NodeHash]].fromBytes(ByteBuffer.wrap(bytes))
}
class BlockChangeLog(private val initialLog: List[Update], private val initialToDel: Set[NodeHash]) {
private var updates = initialLog
private var potentialNodesToDel = initialToDel
def registerChange(update: Update, refCountAfterUpdate: Int): Unit = {
updates = update :: updates
if (refCountAfterUpdate == 0) {
potentialNodesToDel = potentialNodesToDel + update.hash
}
}
def getAllChanges: List[Update] = updates
def getAllToDelete: List[NodeHash] = potentialNodesToDel.toList
}
| input-output-hk/etc-client | src/main/scala/io/iohk/ethereum/db/storage/CachedReferenceCountedStorage.scala | Scala | mit | 11,746 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.Logging
import org.apache.spark.rdd.{RDD, RDDOperationScope}
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.CatalystTypeConverters
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.execution.metric.{LongSQLMetric, SQLMetric}
import org.apache.spark.sql.types.DataType
object SparkPlan {
protected[sql] val currentContext = new ThreadLocal[SQLContext]()
}
/**
* The base class for physical operators.
*/
abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializable {
/**
* A handle to the SQL Context that was used to create this plan. Since many operators need
* access to the sqlContext for RDD operations or configuration this field is automatically
* populated by the query planning infrastructure.
*/
@transient
protected[spark] final val sqlContext = SparkPlan.currentContext.get()
protected def sparkContext = sqlContext.sparkContext
// sqlContext will be null when we are being deserialized on the slaves. In this instance
// the value of codegenEnabled/unsafeEnabled will be set by the desserializer after the
// constructor has run.
val codegenEnabled: Boolean = if (sqlContext != null) {
sqlContext.conf.codegenEnabled
} else {
false
}
val unsafeEnabled: Boolean = if (sqlContext != null) {
sqlContext.conf.unsafeEnabled
} else {
false
}
/**
* Whether the "prepare" method is called.
*/
private val prepareCalled = new AtomicBoolean(false)
/** Overridden make copy also propogates sqlContext to copied plan. */
override def makeCopy(newArgs: Array[AnyRef]): SparkPlan = {
SparkPlan.currentContext.set(sqlContext)
super.makeCopy(newArgs)
}
/**
* Return all metrics containing metrics of this SparkPlan.
*/
private[sql] def metrics: Map[String, SQLMetric[_, _]] = Map.empty
/**
* Return a LongSQLMetric according to the name.
*/
private[sql] def longMetric(name: String): LongSQLMetric =
metrics(name).asInstanceOf[LongSQLMetric]
// TODO: Move to `DistributedPlan`
/** Specifies how data is partitioned across different nodes in the cluster. */
def outputPartitioning: Partitioning = UnknownPartitioning(0) // TODO: WRONG WIDTH!
/** Specifies any partition requirements on the input data for this operator. */
def requiredChildDistribution: Seq[Distribution] =
Seq.fill(children.size)(UnspecifiedDistribution)
/** Specifies how data is ordered in each partition. */
def outputOrdering: Seq[SortOrder] = Nil
/** Specifies sort order for each partition requirements on the input data for this operator. */
def requiredChildOrdering: Seq[Seq[SortOrder]] = Seq.fill(children.size)(Nil)
/** Specifies whether this operator outputs UnsafeRows */
def outputsUnsafeRows: Boolean = false
/** Specifies whether this operator is capable of processing UnsafeRows */
def canProcessUnsafeRows: Boolean = false
/**
* Specifies whether this operator is capable of processing Java-object-based Rows (i.e. rows
* that are not UnsafeRows).
*/
def canProcessSafeRows: Boolean = true
/**
* Returns the result of this query as an RDD[InternalRow] by delegating to doExecute
* after adding query plan information to created RDDs for visualization.
* Concrete implementations of SparkPlan should override doExecute instead.
*/
final def execute(): RDD[InternalRow] = {
if (children.nonEmpty) {
val hasUnsafeInputs = children.exists(_.outputsUnsafeRows)
val hasSafeInputs = children.exists(!_.outputsUnsafeRows)
assert(!(hasSafeInputs && hasUnsafeInputs),
"Child operators should output rows in the same format")
assert(canProcessSafeRows || canProcessUnsafeRows,
"Operator must be able to process at least one row format")
assert(!hasSafeInputs || canProcessSafeRows,
"Operator will receive safe rows as input but cannot process safe rows")
assert(!hasUnsafeInputs || canProcessUnsafeRows,
"Operator will receive unsafe rows as input but cannot process unsafe rows")
}
RDDOperationScope.withScope(sparkContext, nodeName, false, true) {
prepare()
doExecute()
}
}
/**
* Prepare a SparkPlan for execution. It's idempotent.
*/
final def prepare(): Unit = {
if (prepareCalled.compareAndSet(false, true)) {
doPrepare()
children.foreach(_.prepare())
}
}
/**
* Overridden by concrete implementations of SparkPlan. It is guaranteed to run before any
* `execute` of SparkPlan. This is helpful if we want to set up some state before executing the
* query, e.g., `BroadcastHashJoin` uses it to broadcast asynchronously.
*
* Note: the prepare method has already walked down the tree, so the implementation doesn't need
* to call children's prepare methods.
*/
protected def doPrepare(): Unit = {}
/**
* Overridden by concrete implementations of SparkPlan.
* Produces the result of the query as an RDD[InternalRow]
*/
protected def doExecute(): RDD[InternalRow]
/**
* Runs this query returning the result as an array.
*/
def executeCollect(): Array[InternalRow] = {
execute().map(_.copy()).collect()
}
/**
* Runs this query returning the result as an array, using external Row format.
*/
def executeCollectPublic(): Array[Row] = {
val converter = CatalystTypeConverters.createToScalaConverter(schema)
executeCollect().map(converter(_).asInstanceOf[Row])
}
/**
* Runs this query returning the first `n` rows as an array.
*
* This is modeled after RDD.take but never runs any job locally on the driver.
*/
def executeTake(n: Int): Array[InternalRow] = {
if (n == 0) {
return new Array[InternalRow](0)
}
val childRDD = execute().map(_.copy())
val buf = new ArrayBuffer[InternalRow]
val totalParts = childRDD.partitions.length
var partsScanned = 0
while (buf.size < n && partsScanned < totalParts) {
// The number of partitions to try in this iteration. It is ok for this number to be
// greater than totalParts because we actually cap it at totalParts in runJob.
var numPartsToTry = 1
if (partsScanned > 0) {
// If we didn't find any rows after the first iteration, just try all partitions next.
// Otherwise, interpolate the number of partitions we need to try, but overestimate it
// by 50%.
if (buf.size == 0) {
numPartsToTry = totalParts - 1
} else {
numPartsToTry = (1.5 * n * partsScanned / buf.size).toInt
}
}
numPartsToTry = math.max(0, numPartsToTry) // guard against negative num of partitions
val left = n - buf.size
val p = partsScanned until math.min(partsScanned + numPartsToTry, totalParts)
val sc = sqlContext.sparkContext
val res =
sc.runJob(childRDD, (it: Iterator[InternalRow]) => it.take(left).toArray, p)
res.foreach(buf ++= _.take(n - buf.size))
partsScanned += numPartsToTry
}
buf.toArray
}
private[this] def isTesting: Boolean = sys.props.contains("spark.testing")
protected def newProjection(
expressions: Seq[Expression], inputSchema: Seq[Attribute]): Projection = {
log.debug(
s"Creating Projection: $expressions, inputSchema: $inputSchema, codegen:$codegenEnabled")
if (codegenEnabled) {
try {
GenerateProjection.generate(expressions, inputSchema)
} catch {
case e: Exception =>
if (isTesting) {
throw e
} else {
log.error("Failed to generate projection, fallback to interpret", e)
new InterpretedProjection(expressions, inputSchema)
}
}
} else {
new InterpretedProjection(expressions, inputSchema)
}
}
protected def newMutableProjection(
expressions: Seq[Expression],
inputSchema: Seq[Attribute]): () => MutableProjection = {
log.debug(
s"Creating MutableProj: $expressions, inputSchema: $inputSchema, codegen:$codegenEnabled")
if(codegenEnabled) {
try {
GenerateMutableProjection.generate(expressions, inputSchema)
} catch {
case e: Exception =>
if (isTesting) {
throw e
} else {
log.error("Failed to generate mutable projection, fallback to interpreted", e)
() => new InterpretedMutableProjection(expressions, inputSchema)
}
}
} else {
() => new InterpretedMutableProjection(expressions, inputSchema)
}
}
protected def newPredicate(
expression: Expression, inputSchema: Seq[Attribute]): (InternalRow) => Boolean = {
if (codegenEnabled) {
try {
GeneratePredicate.generate(expression, inputSchema)
} catch {
case e: Exception =>
if (isTesting) {
throw e
} else {
log.error("Failed to generate predicate, fallback to interpreted", e)
InterpretedPredicate.create(expression, inputSchema)
}
}
} else {
InterpretedPredicate.create(expression, inputSchema)
}
}
protected def newOrdering(
order: Seq[SortOrder],
inputSchema: Seq[Attribute]): Ordering[InternalRow] = {
if (codegenEnabled) {
try {
GenerateOrdering.generate(order, inputSchema)
} catch {
case e: Exception =>
if (isTesting) {
throw e
} else {
log.error("Failed to generate ordering, fallback to interpreted", e)
new InterpretedOrdering(order, inputSchema)
}
}
} else {
new InterpretedOrdering(order, inputSchema)
}
}
/**
* Creates a row ordering for the given schema, in natural ascending order.
*/
protected def newNaturalAscendingOrdering(dataTypes: Seq[DataType]): Ordering[InternalRow] = {
val order: Seq[SortOrder] = dataTypes.zipWithIndex.map {
case (dt, index) => new SortOrder(BoundReference(index, dt, nullable = true), Ascending)
}
newOrdering(order, Seq.empty)
}
}
private[sql] trait LeafNode extends SparkPlan {
override def children: Seq[SparkPlan] = Nil
}
private[sql] trait UnaryNode extends SparkPlan {
def child: SparkPlan
override def children: Seq[SparkPlan] = child :: Nil
override def outputPartitioning: Partitioning = child.outputPartitioning
}
private[sql] trait BinaryNode extends SparkPlan {
def left: SparkPlan
def right: SparkPlan
override def children: Seq[SparkPlan] = Seq(left, right)
}
| pronix/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala | Scala | apache-2.0 | 11,772 |
package org.apache.spark.api.julia
import scala.collection.JavaConversions._
/**
* Class for execution of Julia scripts on a cluster.
* WARNING: this class isn't used currently, will be utilized later
*/
object JuliaRunner {
def main(args: Array[String]): Unit = {
val juliaScript = args(0)
val scriptArgs = args.slice(1, args.length)
val pb = new ProcessBuilder(Seq("julia", juliaScript) ++ scriptArgs)
val process = pb.start()
StreamUtils.redirectStreamsToStderr(process.getInputStream, process.getErrorStream)
val errorCode = process.waitFor()
if (errorCode != 0) {
throw new RuntimeException("Julia script exited with an error")
}
}
}
| dfdx/Sparta.jl | jvm/sparkjl/src/main/scala/org/apache/spark/api/julia/JuliaRunner.scala | Scala | mit | 688 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.calculations.IntangibleAssetsCalculator
import uk.gov.hmrc.ct.accounts.frs102.retriever.{Frs102AccountsBoxRetriever, FullAccountsBoxRetriever}
import uk.gov.hmrc.ct.box._
case class AC117A(value: Option[Int]) extends CtBoxIdentifier(name = "Intangible assets - Goodwill - Cost - Cost at [POA END]")
with CtOptionalInteger
with Input
with ValidatableBox[Frs102AccountsBoxRetriever]
with Validators {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value, min = 0)
)
}
}
object AC117A extends Calculated[AC117A, FullAccountsBoxRetriever]
with IntangibleAssetsCalculator {
override def calculate(boxRetriever: FullAccountsBoxRetriever): AC117A = {
import boxRetriever._
calculateAC117A(ac114A(), ac115A(), ac116A(), ac209A(), ac210A())
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC117A.scala | Scala | apache-2.0 | 1,540 |
/*
* Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.hue.livy.server.batch
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicInteger
import scala.collection.JavaConversions._
import scala.concurrent.Future
class BatchManager(batchFactory: BatchSessionFactory) {
private[this] val _idCounter = new AtomicInteger()
private[this] val _batches = new ConcurrentHashMap[Int, BatchSession]
def getBatch(id: Int): Option[BatchSession] = Option(_batches.get(id))
def getBatches: Array[BatchSession] = _batches.values().iterator().toArray
def createBatch(createBatchRequest: CreateBatchRequest): BatchSession = {
val id = _idCounter.getAndIncrement
val batch = batchFactory.create(id, createBatchRequest)
_batches.put(id, batch)
batch
}
def remove(id: Int): Option[BatchSession] = {
Option(_batches.remove(id))
}
def delete(batch: BatchSession): Future[Unit] = {
_batches.remove(batch.id)
batch.stop()
}
def shutdown() = {
}
}
case class CreateBatchRequest(file: String,
proxyUser: Option[String] = None,
args: List[String] = List(),
className: Option[String] = None,
jars: List[String] = List(),
pyFiles: List[String] = List(),
files: List[String] = List(),
driverMemory: Option[String] = None,
driverCores: Option[Int] = None,
executorMemory: Option[String] = None,
executorCores: Option[Int] = None,
archives: List[String] = List())
| kalahbrown/HueBigSQL | apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/batch/BatchSessionManager.scala | Scala | apache-2.0 | 2,517 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.rdd.read.recalibration
import org.bdgenomics.adam.models.{
RecordGroup,
RecordGroupDictionary
}
import org.bdgenomics.formats.avro.AlignmentRecord
import org.scalatest.FunSuite
class RecalibratorSuite extends FunSuite {
val table = RecalibrationTable(new ObservationTable(
Map((CovariateKey(0,
(50 + 33).toChar,
2,
'A',
'C') -> new Aggregate(1000000, 1, 10.0)),
(CovariateKey(0,
(40 + 33).toChar,
1,
'N',
'N') -> new Aggregate(100000, 1, 10.0)))))
val rgd = RecordGroupDictionary(Seq(RecordGroup("s", "rg0")))
val read = AlignmentRecord.newBuilder
.setContigName("chr1")
.setRecordGroupName("rg0")
.setStart(10L)
.setEnd(12L)
.setSequence("AC")
.setReadNegativeStrand(false)
.setQual(Seq(40, 50).map(i => (i + 33).toChar).mkString)
.setDuplicateRead(false)
.setReadMapped(true)
.setReadPaired(false)
.setReadInFragment(0)
.setPrimaryAlignment(true)
.setCigar("2M")
.setMismatchingPositions("2")
.setMapq(40)
.build
val hiRecalibrator = Recalibrator(table, (48 + 33).toChar)
val lowRecalibrator = Recalibrator(table, (40 + 33).toChar)
test("don't replace quality if quality was null") {
val qualFreeRead = AlignmentRecord.newBuilder(read)
.setQual(null)
.build
val recalibratedRead = lowRecalibrator(qualFreeRead,
Array.empty)
assert(recalibratedRead.getQual === null)
assert(recalibratedRead.getOrigQual === null)
}
test("if no covariates, return alignment") {
val emptyRead = AlignmentRecord.newBuilder
.setReadName("emptyRead")
.build
val notRecalibratedRead = lowRecalibrator(emptyRead, Array.empty)
assert(emptyRead === notRecalibratedRead)
}
test("skip recalibration if base is below quality threshold") {
val recalibratedRead = hiRecalibrator(read,
BaseQualityRecalibration.observe(read, rgd))
val expectedRead = AlignmentRecord.newBuilder(read)
.setOrigQual(read.getQual)
.build
assert(recalibratedRead === expectedRead)
}
test("recalibrate changed bases above quality threshold") {
val recalibratedRead = lowRecalibrator(read,
BaseQualityRecalibration.observe(read, rgd))
val expectedRead = AlignmentRecord.newBuilder(read)
.setQual(Seq(47, 50).map(i => (i + 33).toChar).mkString)
.setOrigQual(read.getQual)
.build
assert(recalibratedRead === expectedRead)
}
}
| massie/adam | adam-core/src/test/scala/org/bdgenomics/adam/rdd/read/recalibration/RecalibratorSuite.scala | Scala | apache-2.0 | 3,284 |
/*
* This file is part of Kiama.
*
* Copyright (C) 2013-2015 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
import sbt._
import sbt.Keys._
object KiamaBuild extends Build {
import sbt.Project.Initialize
import sbtunidoc.Plugin.{unidocSettings, ScalaUnidoc, TestScalaUnidoc}
import sbtunidoc.Plugin.Unidoc
import sbtunidoc.Plugin.UnidocKeys.unidoc
import scala.collection.immutable.Seq
// Project configuration:
// - core project containing macros and code that they need
// - library project containing everything else, including all tests
// - kiama (root) project aggregates core and library
lazy val kiama =
Project (
id = "kiama",
base = file (".")
) aggregate (core, library) settings (
allNewSettings : _*
)
lazy val core =
Project (
id = "core",
base = file ("core")
)
lazy val library =
Project (
id = "library",
base = file ("library")
) dependsOn (core % "compile-internal, test-internal")
// All settings that have to be added to the kiama project
lazy val allNewSettings : Seq[Setting[_]] =
Seq (
mappings in (Compile, packageBin) :=
(mappings in (core, Compile, packageBin)).value ++
(mappings in (library, Compile, packageBin)).value,
mappings in (Compile, packageSrc) :=
(mappings in (core, Compile, packageSrc)).value ++
(mappings in (library, Compile, packageSrc)).value,
mappings in (Test, packageBin) :=
(mappings in (library, Test, packageBin)).value,
mappings in (Test, packageSrc) :=
(mappings in (library, Test, packageSrc)).value
) ++
unidocSettings ++
Seq (
doc in Compile := (doc in ScalaUnidoc).value,
doc in Test := (doc in TestScalaUnidoc).value,
target in unidoc in ScalaUnidoc := crossTarget.value / "api",
target in unidoc in TestScalaUnidoc := crossTarget.value / "test-api"
)
}
| adeze/kiama | project/Build.scala | Scala | gpl-3.0 | 2,856 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.mongodb
import slamdata.Predef.{Map => _, _}
import quasar._, Planner._, Type.{Const => _, Coproduct => _, _}
import quasar.common.{PhaseResult, PhaseResults, PhaseResultTell, SortDir}
import quasar.connector.BackendModule
import quasar.contrib.matryoshka._
import quasar.contrib.pathy.{ADir, AFile}
import quasar.contrib.scalaz._, eitherT._
import quasar.ejson.EJson
import quasar.ejson.implicits._
import quasar.fp._
import quasar.fp.ski._
import quasar.fs.{FileSystemError, MonadFsErr}, FileSystemError.qscriptPlanningFailed
import quasar.javascript._
import quasar.jscore, jscore.{JsCore, JsFn}
import quasar.namegen._
import quasar.physical.mongodb.WorkflowBuilder.{Subset => _, _}
import quasar.physical.mongodb.accumulator._
import quasar.physical.mongodb.expression._
import quasar.physical.mongodb.planner._
import quasar.physical.mongodb.planner.common._
import quasar.physical.mongodb.workflow.{ExcludeId => _, IncludeId => _, _}
import quasar.qscript.{Coalesce => _, _}
import quasar.std.StdLib._ // TODO: remove this
import java.time.Instant
import matryoshka.{Hole => _, _}
import matryoshka.data._
import matryoshka.implicits._
import matryoshka.patterns._
import org.bson.BsonDocument
import scalaz._, Scalaz.{ToIdOps => _, _}
// TODO: This is generalizable to an arbitrary `Recursive` type, I think.
sealed abstract class InputFinder[T[_[_]]] {
def apply[A](t: FreeMap[T]): FreeMap[T]
}
final case class Here[T[_[_]]]() extends InputFinder[T] {
def apply[A](a: FreeMap[T]): FreeMap[T] = a
}
final case class There[T[_[_]]](index: Int, next: InputFinder[T])
extends InputFinder[T] {
def apply[A](a: FreeMap[T]): FreeMap[T] =
a.resume.fold(fa => next(fa.toList.apply(index)), κ(a))
}
object MongoDbPlanner {
import fixExprOp._
type Partial[T[_[_]], In, Out] = (PartialFunction[List[In], Out], List[InputFinder[T]])
type OutputM[A] = PlannerError \/ A
type ExecTimeR[F[_]] = MonadReader_[F, Instant]
implicit def mongoQScriptToQScriptTotal[T[_[_]]]: Injectable.Aux[fs.MongoQScript[T, ?], QScriptTotal[T, ?]] =
::\::[QScriptCore[T, ?]](::/::[T, EquiJoin[T, ?], Const[ShiftedRead[AFile], ?]])
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def generateTypeCheck[In, Out](or: (Out, Out) => Out)(f: PartialFunction[Type, In => Out]):
Type => Option[In => Out] =
typ => f.lift(typ).fold(
typ match {
case Type.Interval => generateTypeCheck(or)(f)(Type.Dec)
case Type.Arr(_) => generateTypeCheck(or)(f)(Type.AnyArray)
case Type.Timestamp
| Type.Timestamp ⨿ Type.Date
| Type.Timestamp ⨿ Type.Date ⨿ Type.Time =>
generateTypeCheck(or)(f)(Type.Date)
case Type.Timestamp ⨿ Type.Date ⨿ Type.Time ⨿ Type.Interval =>
// Just repartition to match the right cases
generateTypeCheck(or)(f)(Type.Interval ⨿ Type.Date)
case Type.Int ⨿ Type.Dec ⨿ Type.Interval ⨿ Type.Str ⨿ (Type.Timestamp ⨿ Type.Date ⨿ Type.Time) ⨿ Type.Bool =>
// Just repartition to match the right cases
generateTypeCheck(or)(f)(
Type.Int ⨿ Type.Dec ⨿ Type.Interval ⨿ Type.Str ⨿ (Type.Date ⨿ Type.Bool))
case a ⨿ b =>
(generateTypeCheck(or)(f)(a) ⊛ generateTypeCheck(or)(f)(b))(
(a, b) => ((expr: In) => or(a(expr), b(expr))))
case _ => None
})(
Some(_))
def processMapFuncExpr
[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: ExecTimeR: MonadFsErr, EX[_]: Traverse, A]
(funcHandler: MapFunc[T, ?] ~> OptionFree[EX, ?], staticHandler: StaticHandler[T, EX])
(fm: FreeMapA[T, A])
(recovery: A => Fix[ExprOp])
(implicit inj: EX :<: ExprOp)
: M[Fix[ExprOp]] = {
val alg: AlgebraM[M, CoEnvMapA[T, A, ?], Fix[ExprOp]] =
interpretM[M, MapFunc[T, ?], A, Fix[ExprOp]](
recovery(_).point[M],
expression(funcHandler))
def convert(e: EX[FreeMapA[T, A]]): M[Fix[ExprOp]] =
inj(e.map(_.cataM(alg))).sequence.map(_.embed)
staticHandler.handle(fm).map(convert) getOrElse fm.cataM(alg)
}
def getSelector
[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: MonadFsErr, EX[_]: Traverse]
(v: BsonVersion)
(fm: FreeMap[T])
(implicit inj: EX :<: ExprOp)
: OutputM[PartialSelector[T]] =
fm.zygo(
interpret[MapFunc[T, ?], Hole, T[MapFunc[T, ?]]](
κ(MFC(MapFuncsCore.Undefined[T, T[MapFunc[T, ?]]]()).embed),
_.embed),
ginterpret[(T[MapFunc[T, ?]], ?), MapFunc[T, ?], Hole, OutputM[PartialSelector[T]]](
κ(defaultSelector[T].point[OutputM]),
selector[T](v)))
def processMapFunc[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: MonadFsErr: ExecTimeR, A]
(fm: FreeMapA[T, A])(recovery: A => JsCore)
: M[JsCore] =
fm.cataM(interpretM[M, MapFunc[T, ?], A, JsCore](recovery(_).point[M], javascript))
// FIXME: This is temporary. Should go away when the connector is complete.
def unimplemented[M[_]: MonadFsErr, A](label: String): M[A] =
raiseErr(qscriptPlanningFailed(InternalError.fromMsg(s"unimplemented $label")))
// TODO: Should have a JsFn version of this for $reduce nodes.
val accumulator: ReduceFunc[Fix[ExprOp]] => AccumOp[Fix[ExprOp]] = {
import quasar.qscript.ReduceFuncs._
{
case Arbitrary(a) => $first(a)
case First(a) => $first(a)
case Last(a) => $last(a)
case Avg(a) => $avg(a)
case Count(_) => $sum($literal(Bson.Int32(1)))
case Max(a) => $max(a)
case Min(a) => $min(a)
case Sum(a) => $sum(a)
case UnshiftArray(a) => $push(a)
case UnshiftMap(k, v) => ???
}
}
private def unpack[T[_[_]]: BirecursiveT, F[_]: Traverse](t: Free[F, T[F]]): T[F] =
t.cata(interpret[F, T[F], T[F]](ι, _.embed))
// NB: it's only safe to emit "core" expr ops here, but we always use the
// largest type in WorkflowOp, so they're immediately injected into ExprOp.
val check = new Check[Fix[ExprOp], ExprOp]
def ejsonToExpression[M[_]: Applicative: MonadFsErr, EJ]
(v: BsonVersion)(ej: EJ)(implicit EJ: Recursive.Aux[EJ, EJson])
: M[Fix[ExprOp]] =
ej.cataM(BsonCodec.fromEJson(v)).fold(pe => raiseErr(qscriptPlanningFailed(pe)), $literal(_).point[M])
// TODO: Use `JsonCodec.encode` and avoid failing.
def ejsonToJs[M[_]: Applicative: MonadFsErr, EJ: Show]
(ej: EJ)(implicit EJ: Recursive.Aux[EJ, EJson])
: M[JsCore] =
ej.cata(Data.fromEJson).toJs.fold(
raiseErr[M, JsCore](qscriptPlanningFailed(NonRepresentableEJson(ej.shows))))(
_.point[M])
def expression[
T[_[_]]: RecursiveT: ShowT,
M[_]: Monad: ExecTimeR: MonadFsErr,
EX[_]: Traverse](funcHandler: MapFunc[T, ?] ~> OptionFree[EX, ?])(
implicit inj: EX :<: ExprOp
): AlgebraM[M, MapFunc[T, ?], Fix[ExprOp]] = {
import MapFuncsCore._
import MapFuncsDerived._
def handleCommon(mf: MapFunc[T, Fix[ExprOp]]): Option[Fix[ExprOp]] =
funcHandler(mf).map(t => unpack(t.mapSuspension(inj)))
def execTime(implicit ev: ExecTimeR[M]): M[Bson.Date] =
OptionT[M, Bson.Date](ev.ask.map(Bson.Date.fromInstant(_)))
.getOrElseF(raiseErr(
qscriptPlanningFailed(InternalError.fromMsg("Could not get the current timestamp"))))
val handleSpecialCore: MapFuncCore[T, Fix[ExprOp]] => M[Fix[ExprOp]] = {
case Constant(v1) => unimplemented[M, Fix[ExprOp]]("Constant expression")
case Now() => execTime map ($literal(_))
case Date(a1) => unimplemented[M, Fix[ExprOp]]("Date expression")
case Time(a1) => unimplemented[M, Fix[ExprOp]]("Time expression")
case Timestamp(a1) => unimplemented[M, Fix[ExprOp]]("Timestamp expression")
case Interval(a1) => unimplemented[M, Fix[ExprOp]]("Interval expression")
case StartOfDay(a1) => unimplemented[M, Fix[ExprOp]]("StartOfDay expression")
case TemporalTrunc(a1, a2) => unimplemented[M, Fix[ExprOp]]("TemporalTrunc expression")
case IfUndefined(a1, a2) => unimplemented[M, Fix[ExprOp]]("IfUndefined expression")
case Within(a1, a2) => unimplemented[M, Fix[ExprOp]]("Within expression")
case ExtractIsoYear(a1) =>
unimplemented[M, Fix[ExprOp]]("ExtractIsoYear expression")
case Integer(a1) => unimplemented[M, Fix[ExprOp]]("Integer expression")
case Decimal(a1) => unimplemented[M, Fix[ExprOp]]("Decimal expression")
case ToString(a1) => unimplemented[M, Fix[ExprOp]]("ToString expression")
case MakeArray(a1) => unimplemented[M, Fix[ExprOp]]("MakeArray expression")
case MakeMap(a1, a2) => unimplemented[M, Fix[ExprOp]]("MakeMap expression")
case ConcatMaps(a1, a2) => unimplemented[M, Fix[ExprOp]]("ConcatMap expression")
case ProjectField($var(dv), $literal(Bson.Text(field))) =>
$var(dv \ BsonField.Name(field)).point[M]
case ProjectField(a1, a2) => unimplemented[M, Fix[ExprOp]](s"ProjectField expression")
case ProjectIndex(a1, a2) => unimplemented[M, Fix[ExprOp]]("ProjectIndex expression")
case DeleteField(a1, a2) => unimplemented[M, Fix[ExprOp]]("DeleteField expression")
// NB: Quasar strings are arrays of characters. However, MongoDB
// represent strings and arrays as distinct types. Moreoever, SQL^2
// exposes two functions: `array_length` to obtain the length of an
// array and `length` to obtain the length of a string. This
// distinction, however, is lost when LP is translated into
// QScript. There's only one `Length` MapFunc. The workaround here
// detects calls to array_length or length indirectly through the
// typechecks inserted around calls to `Length` or `ArrayLength` in
// LP typechecks.
case Length(a1) => unimplemented[M, Fix[ExprOp]]("Length expression")
case Guard(expr, Type.Str, cont @ $strLenCP(_), fallback) =>
$cond(check.isString(expr), cont, fallback).point[M]
case Guard(expr, Type.FlexArr(_, _, _), $strLenCP(str), fallback) =>
$cond(check.isArray(expr), $size(str), fallback).point[M]
// NB: This is maybe a NOP for Fix[ExprOp]s, as they (all?) safely
// short-circuit when given the wrong type. However, our guards may be
// more restrictive than the operation, in which case we still want to
// short-circuit, so …
case Guard(expr, typ, cont, fallback) =>
// NB: Even if certain checks aren’t needed by ExprOps, we have to
// maintain them because we may convert ExprOps to JS.
// Hopefully BlackShield will eliminate the need for this.
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def exprCheck: Type => Option[Fix[ExprOp] => Fix[ExprOp]] =
generateTypeCheck[Fix[ExprOp], Fix[ExprOp]]($or(_, _)) {
case Type.Null => check.isNull
case Type.Int
| Type.Dec
| Type.Int ⨿ Type.Dec
| Type.Int ⨿ Type.Dec ⨿ Type.Interval => check.isNumber
case Type.Str => check.isString
case Type.Obj(map, _) =>
((expr: Fix[ExprOp]) => {
val basic = check.isObject(expr)
expr match {
case $var(dv) =>
map.foldLeft(
basic)(
(acc, pair) =>
exprCheck(pair._2).fold(
acc)(
e => $and(acc, e($var(dv \ BsonField.Name(pair._1))))))
case _ => basic // FIXME: Check fields
}
})
case Type.FlexArr(_, _, _) => check.isArray
case Type.Binary => check.isBinary
case Type.Id => check.isId
case Type.Bool => check.isBoolean
case Type.Date => check.isDateOrTimestamp // FIXME: use isDate here when >= 3.0
// NB: Some explicit coproducts for adjacent types.
case Type.Int ⨿ Type.Dec ⨿ Type.Str => check.isNumberOrString
case Type.Int ⨿ Type.Dec ⨿ Type.Interval ⨿ Type.Str => check.isNumberOrString
case Type.Date ⨿ Type.Bool => check.isDateTimestampOrBoolean
case Type.Syntaxed => check.isSyntaxed
}
exprCheck(typ).fold(cont)(f => $cond(f(expr), cont, fallback)).point[M]
case Range(_, _) => unimplemented[M, Fix[ExprOp]]("Range expression")
case Search(_, _, _) => unimplemented[M, Fix[ExprOp]]("Search expression")
case Split(_, _) => unimplemented[M, Fix[ExprOp]]("Split expression")
}
val handleSpecialDerived: MapFuncDerived[T, Fix[ExprOp]] => M[Fix[ExprOp]] = {
case Abs(a1) => unimplemented[M, Fix[ExprOp]]("Abs expression")
case Ceil(a1) => unimplemented[M, Fix[ExprOp]]("Ceil expression")
case Floor(a1) => unimplemented[M, Fix[ExprOp]]("Floor expression")
case Trunc(a1) => unimplemented[M, Fix[ExprOp]]("Trunc expression")
case Round(a1) => unimplemented[M, Fix[ExprOp]]("Round expression")
case FloorScale(a1, a2) => unimplemented[M, Fix[ExprOp]]("FloorScale expression")
case CeilScale(a1, a2) => unimplemented[M, Fix[ExprOp]]("CeilScale expression")
case RoundScale(a1, a2) => unimplemented[M, Fix[ExprOp]]("RoundScale expression")
}
val handleSpecial: MapFunc[T, Fix[ExprOp]] => M[Fix[ExprOp]] = {
case MFC(mfc) => handleSpecialCore(mfc)
case MFD(mfd) => handleSpecialDerived(mfd)
}
mf => handleCommon(mf).cata(_.point[M], handleSpecial(mf))
}
def javascript[T[_[_]]: BirecursiveT: ShowT, M[_]: Applicative: MonadFsErr: ExecTimeR]
: AlgebraM[M, MapFunc[T, ?], JsCore] = {
import jscore.{
Add => _, In => _,
Lt => _, Lte => _, Gt => _, Gte => _, Eq => _, Neq => _,
And => _, Or => _, Not => _,
_}
import MapFuncsCore._
import MapFuncsDerived._
val mjs = quasar.physical.mongodb.javascript[JsCore](_.embed)
import mjs._
// NB: Math.trunc is not present in MongoDB.
def trunc(expr: JsCore): JsCore =
Let(Name("x"), expr,
BinOp(jscore.Sub,
ident("x"),
BinOp(jscore.Mod, ident("x"), Literal(Js.Num(1, false)))))
def execTime(implicit ev: ExecTimeR[M]): M[JsCore] =
ev.ask map (ts => Literal(Js.Str(ts.toString)))
def handleCommon(mf: MapFunc[T, JsCore]): Option[JsCore] =
JsFuncHandler.handle[MapFunc[T, ?]].apply(mf).map(unpack[Fix, JsCoreF])
val handleSpecialCore: MapFuncCore[T, JsCore] => M[JsCore] = {
case Constant(v1) => ejsonToJs[M, T[EJson]](v1)
case Undefined() => ident("undefined").point[M]
case JoinSideName(n) =>
raiseErr[M, JsCore](qscriptPlanningFailed(UnexpectedJoinSide(n)))
case Now() => execTime map (ts => New(Name("ISODate"), List(ts)))
case Length(a1) =>
Call(ident("NumberLong"), List(Select(a1, "length"))).point[M]
case Date(a1) =>
If(Call(Select(Call(ident("RegExp"), List(Literal(Js.Str("^" + string.dateRegex + "$")))), "test"), List(a1)),
Call(ident("ISODate"), List(a1)),
ident("undefined")).point[M]
case Time(a1) =>
If(Call(Select(Call(ident("RegExp"), List(Literal(Js.Str("^" + string.timeRegex + "$")))), "test"), List(a1)),
a1,
ident("undefined")).point[M]
case Timestamp(a1) =>
If(Call(Select(Call(ident("RegExp"), List(Literal(Js.Str("^" + string.timestampRegex + "$")))), "test"), List(a1)),
Call(ident("ISODate"), List(a1)),
ident("undefined")).point[M]
case Interval(a1) => unimplemented[M, JsCore]("Interval JS")
case TimeOfDay(a1) => {
def pad2(x: JsCore) =
Let(Name("x"), x,
If(
BinOp(jscore.Lt, ident("x"), Literal(Js.Num(10, false))),
BinOp(jscore.Add, Literal(Js.Str("0")), ident("x")),
ident("x")))
def pad3(x: JsCore) =
Let(Name("x"), x,
If(
BinOp(jscore.Lt, ident("x"), Literal(Js.Num(100, false))),
BinOp(jscore.Add, Literal(Js.Str("00")), ident("x")),
If(
BinOp(jscore.Lt, ident("x"), Literal(Js.Num(10, false))),
BinOp(jscore.Add, Literal(Js.Str("0")), ident("x")),
ident("x"))))
Let(Name("t"), a1,
binop(jscore.Add,
pad2(Call(Select(ident("t"), "getUTCHours"), Nil)),
Literal(Js.Str(":")),
pad2(Call(Select(ident("t"), "getUTCMinutes"), Nil)),
Literal(Js.Str(":")),
pad2(Call(Select(ident("t"), "getUTCSeconds"), Nil)),
Literal(Js.Str(".")),
pad3(Call(Select(ident("t"), "getUTCMilliseconds"), Nil)))).point[M]
}
case ToTimestamp(a1) => New(Name("Date"), List(a1)).point[M]
case ExtractCentury(date) =>
Call(ident("NumberLong"), List(
Call(Select(ident("Math"), "ceil"), List(
BinOp(jscore.Div,
Call(Select(date, "getUTCFullYear"), Nil),
Literal(Js.Num(100, false))))))).point[M]
case ExtractDayOfMonth(date) => Call(Select(date, "getUTCDate"), Nil).point[M]
case ExtractDecade(date) =>
Call(ident("NumberLong"), List(
trunc(
BinOp(jscore.Div,
Call(Select(date, "getUTCFullYear"), Nil),
Literal(Js.Num(10, false)))))).point[M]
case ExtractDayOfWeek(date) =>
Call(Select(date, "getUTCDay"), Nil).point[M]
case ExtractDayOfYear(date) =>
Call(ident("NumberInt"), List(
Call(Select(ident("Math"), "floor"), List(
BinOp(jscore.Add,
BinOp(jscore.Div,
BinOp(Sub,
date,
New(Name("Date"), List(
Call(Select(date, "getFullYear"), Nil),
Literal(Js.Num(0, false)),
Literal(Js.Num(0, false))))),
Literal(Js.Num(86400000, false))),
Literal(Js.Num(1, false))))))).point[M]
case ExtractEpoch(date) =>
Call(ident("NumberLong"), List(
BinOp(jscore.Div,
Call(Select(date, "valueOf"), Nil),
Literal(Js.Num(1000, false))))).point[M]
case ExtractHour(date) => Call(Select(date, "getUTCHours"), Nil).point[M]
case ExtractIsoDayOfWeek(date) =>
Let(Name("x"), Call(Select(date, "getUTCDay"), Nil),
If(
BinOp(jscore.Eq, ident("x"), Literal(Js.Num(0, false))),
Literal(Js.Num(7, false)),
ident("x"))).point[M]
case ExtractIsoYear(date) =>
Call(Select(date, "getUTCFullYear"), Nil).point[M]
case ExtractMicroseconds(date) =>
BinOp(jscore.Mult,
BinOp(jscore.Add,
Call(Select(date, "getUTCMilliseconds"), Nil),
BinOp(jscore.Mult,
Call(Select(date, "getUTCSeconds"), Nil),
Literal(Js.Num(1000, false)))),
Literal(Js.Num(1000, false))).point[M]
case ExtractMillennium(date) =>
Call(ident("NumberLong"), List(
Call(Select(ident("Math"), "ceil"), List(
BinOp(jscore.Div,
Call(Select(date, "getUTCFullYear"), Nil),
Literal(Js.Num(1000, false))))))).point[M]
case ExtractMilliseconds(date) =>
BinOp(jscore.Add,
Call(Select(date, "getUTCMilliseconds"), Nil),
BinOp(jscore.Mult,
Call(Select(date, "getUTCSeconds"), Nil),
Literal(Js.Num(1000, false)))).point[M]
case ExtractMinute(date) =>
Call(Select(date, "getUTCMinutes"), Nil).point[M]
case ExtractMonth(date) =>
BinOp(jscore.Add,
Call(Select(date, "getUTCMonth"), Nil),
Literal(Js.Num(1, false))).point[M]
case ExtractQuarter(date) =>
Call(ident("NumberInt"), List(
BinOp(jscore.Add,
BinOp(jscore.BitOr,
BinOp(jscore.Div,
Call(Select(date, "getUTCMonth"), Nil),
Literal(Js.Num(3, false))),
Literal(Js.Num(0, false))),
Literal(Js.Num(1, false))))).point[M]
case ExtractSecond(date) =>
BinOp(jscore.Add,
Call(Select(date, "getUTCSeconds"), Nil),
BinOp(jscore.Div,
Call(Select(date, "getUTCMilliseconds"), Nil),
Literal(Js.Num(1000, false)))).point[M]
case ExtractWeek(date) =>
Call(ident("NumberInt"), List(
Call(Select(ident("Math"), "floor"), List(
BinOp(jscore.Add,
BinOp(jscore.Div,
Let(Name("startOfYear"),
New(Name("Date"), List(
Call(Select(date, "getFullYear"), Nil),
Literal(Js.Num(0, false)),
Literal(Js.Num(1, false)))),
BinOp(jscore.Add,
BinOp(Div,
BinOp(Sub, date, ident("startOfYear")),
Literal(Js.Num(86400000, false))),
BinOp(jscore.Add,
Call(Select(ident("startOfYear"), "getDay"), Nil),
Literal(Js.Num(1, false))))),
Literal(Js.Num(7, false))),
Literal(Js.Num(1, false))))))).point[M]
case ExtractYear(date) => Call(Select(date, "getUTCFullYear"), Nil).point[M]
case Negate(a1) => UnOp(Neg, a1).point[M]
case Add(a1, a2) => BinOp(jscore.Add, a1, a2).point[M]
case Multiply(a1, a2) => BinOp(Mult, a1, a2).point[M]
case Subtract(a1, a2) => BinOp(Sub, a1, a2).point[M]
case Divide(a1, a2) => BinOp(Div, a1, a2).point[M]
case Modulo(a1, a2) => BinOp(Mod, a1, a2).point[M]
case Power(a1, a2) => Call(Select(ident("Math"), "pow"), List(a1, a2)).point[M]
case Not(a1) => UnOp(jscore.Not, a1).point[M]
case Eq(a1, a2) => BinOp(jscore.Eq, a1, a2).point[M]
case Neq(a1, a2) => BinOp(jscore.Neq, a1, a2).point[M]
case Lt(a1, a2) => BinOp(jscore.Lt, a1, a2).point[M]
case Lte(a1, a2) => BinOp(jscore.Lte, a1, a2).point[M]
case Gt(a1, a2) => BinOp(jscore.Gt, a1, a2).point[M]
case Gte(a1, a2) => BinOp(jscore.Gte, a1, a2).point[M]
case IfUndefined(a1, a2) =>
// TODO: Only evaluate `value` once.
If(BinOp(jscore.Eq, a1, ident("undefined")), a2, a1).point[M]
case And(a1, a2) => BinOp(jscore.And, a1, a2).point[M]
case Or(a1, a2) => BinOp(jscore.Or, a1, a2).point[M]
case Between(a1, a2, a3) =>
Call(ident("&&"), List(
Call(ident("<="), List(a2, a1)),
Call(ident("<="), List(a1, a3)))).point[M]
case Cond(a1, a2, a3) => If(a1, a2, a3).point[M]
case Within(a1, a2) =>
BinOp(jscore.Neq,
Literal(Js.Num(-1, false)),
Call(Select(a2, "indexOf"), List(a1))).point[M]
// TODO: move these to JsFuncHandler
case Lower(a1) => Call(Select(a1, "toLowerCase"), Nil).point[M]
case Upper(a1) => Call(Select(a1, "toUpperCase"), Nil).point[M]
case Bool(a1) =>
If(BinOp(jscore.Eq, a1, Literal(Js.Str("true"))),
Literal(Js.Bool(true)),
If(BinOp(jscore.Eq, a1, Literal(Js.Str("false"))),
Literal(Js.Bool(false)),
ident("undefined"))).point[M]
case Integer(a1) =>
If(Call(Select(Call(ident("RegExp"), List(Literal(Js.Str("^" + string.intRegex + "$")))), "test"), List(a1)),
Call(ident("NumberLong"), List(a1)),
ident("undefined")).point[M]
case Decimal(a1) =>
If(Call(Select(Call(ident("RegExp"), List(Literal(Js.Str("^" + string.floatRegex + "$")))), "test"), List(a1)),
Call(ident("parseFloat"), List(a1)),
ident("undefined")).point[M]
case Null(a1) =>
If(BinOp(jscore.Eq, a1, Literal(Js.Str("null"))),
Literal(Js.Null),
ident("undefined")).point[M]
case ToString(a1) =>
If(isInt(a1),
// NB: This is a terrible way to turn an int into a string, but the
// only one that doesn’t involve converting to a decimal and
// losing precision.
Call(Select(Call(ident("String"), List(a1)), "replace"), List(
Call(ident("RegExp"), List(
Literal(Js.Str("[^-0-9]+")),
Literal(Js.Str("g")))),
Literal(Js.Str("")))),
If(binop(jscore.Or, isTimestamp(a1), isDate(a1)),
Call(Select(a1, "toISOString"), Nil),
Call(ident("String"), List(a1)))).point[M]
case Search(a1, a2, a3) =>
Call(
Select(
New(Name("RegExp"), List(
a2,
If(a3, Literal(Js.Str("im")), Literal(Js.Str("m"))))),
"test"),
List(a1)).point[M]
case Substring(a1, a2, a3) =>
Call(Select(a1, "substr"), List(a2, a3)).point[M]
case Split(a1, a2) =>
Call(Select(a1, "split"), List(a2)).point[M]
case MakeMap(Embed(LiteralF(Js.Str(str))), a2) => Obj(ListMap(Name(str) -> a2)).point[M]
// TODO: pull out the literal, and handle this case in other situations
case MakeMap(a1, a2) => Obj(ListMap(Name("__Quasar_non_string_map") ->
Arr(List(Arr(List(a1, a2)))))).point[M]
case ConcatArrays(Embed(ArrF(a1)), Embed(ArrF(a2))) =>
Arr(a1 |+| a2).point[M]
case ConcatArrays(a1, a2) =>
If(BinOp(jscore.Or, isArray(a1), isArray(a2)),
Call(Select(a1, "concat"), List(a2)),
BinOp(jscore.Add, a1, a2)).point[M]
case ConcatMaps(Embed(ObjF(o1)), Embed(ObjF(o2))) =>
Obj(o1 ++ o2).point[M]
case ConcatMaps(a1, a2) => SpliceObjects(List(a1, a2)).point[M]
case ProjectField(a1, a2) => Access(a1, a2).point[M]
case ProjectIndex(a1, a2) => Access(a1, a2).point[M]
case DeleteField(a1, a2) => Call(ident("remove"), List(a1, a2)).point[M]
case Guard(expr, typ, cont, fallback) =>
val jsCheck: Type => Option[JsCore => JsCore] =
generateTypeCheck[JsCore, JsCore](BinOp(jscore.Or, _, _)) {
case Type.Null => isNull
case Type.Dec => isDec
case Type.Int
| Type.Int ⨿ Type.Dec
| Type.Int ⨿ Type.Dec ⨿ Type.Interval
=> isAnyNumber
case Type.Str => isString
case Type.Obj(_, _) ⨿ Type.FlexArr(_, _, _)
=> isObjectOrArray
case Type.Obj(_, _) => isObject
case Type.FlexArr(_, _, _) => isArray
case Type.Binary => isBinary
case Type.Id => isObjectId
case Type.Bool => isBoolean
case Type.Date => isDate
}
jsCheck(typ).fold[M[JsCore]](
raiseErr(qscriptPlanningFailed(InternalError.fromMsg("uncheckable type"))))(
f => If(f(expr), cont, fallback).point[M])
// FIXME: Doesn't work for Char.
case Range(start, end) =>
Call(
Select(
Call(Select(ident("Array"), "apply"), List(
Literal(Js.Null),
Call(ident("Array"), List(BinOp(Sub, end, start))))),
"map"),
List(
Fun(List(Name("element"), Name("index")),
BinOp(jscore.Add, ident("index"), start)))).point[M]
}
val handleSpecialDerived: MapFuncDerived[T, JsCore] => M[JsCore] = {
case Abs(a1) => unimplemented[M, JsCore]("Abs JS")
case Ceil(a1) => unimplemented[M, JsCore]("Ceil JS")
case Floor(a1) => unimplemented[M, JsCore]("Floor JS")
case Trunc(a1) => unimplemented[M, JsCore]("Trunc JS")
case Round(a1) => unimplemented[M, JsCore]("Round JS")
case FloorScale(a1, a2) => unimplemented[M, JsCore]("FloorScale JS")
case CeilScale(a1, a2) => unimplemented[M, JsCore]("CeilScale JS")
case RoundScale(a1, a2) => unimplemented[M, JsCore]("RoundScale JS")
}
val handleSpecial: MapFunc[T, JsCore] => M[JsCore] = {
case MFC(mfc) => handleSpecialCore(mfc)
case MFD(mfd) => handleSpecialDerived(mfd)
}
mf => handleCommon(mf).cata(_.point[M], handleSpecial(mf))
}
// TODO: Need this until the old connector goes away and we can redefine
// `Selector` as `Selector[A, B]`, where `A` is the field type
// (naturally `BsonField`), and `B` is the recursive parameter.
type PartialSelector[T[_[_]]] = Partial[T, BsonField, Selector]
def defaultSelector[T[_[_]]]: PartialSelector[T] = (
{ case List(field) =>
Selector.Doc(ListMap(
field -> Selector.Expr(Selector.Eq(Bson.Bool(true)))))
},
List(Here[T]()))
/** The selector phase tries to turn expressions into MongoDB selectors – i.e.
* Mongo query expressions. Selectors are only used for the filtering
* pipeline op, so it's quite possible we build more stuff than is needed
* (but it doesn’t matter, unneeded annotations will be ignored by the
* pipeline phase).
*
* Like the expression op phase, this one requires bson field annotations.
*
* Most expressions cannot be turned into selector expressions without using
* the "\$where" operator, which allows embedding JavaScript
* code. Unfortunately, using this operator turns filtering into a full table
* scan. We should do a pass over the tree to identify partial boolean
* expressions which can be turned into selectors, factoring out the
* leftovers for conversion using \$where.
*/
def selector[T[_[_]]: RecursiveT: ShowT](v: BsonVersion):
GAlgebra[(T[MapFunc[T, ?]], ?), MapFunc[T, ?], OutputM[PartialSelector[T]]] = { node =>
import MapFuncsCore._
type Output = OutputM[PartialSelector[T]]
object IsBson {
def unapply(x: (T[MapFunc[T, ?]], Output)): Option[Bson] =
x._1.project match {
case MFC(Constant(b)) => b.cataM(BsonCodec.fromEJson(v)).toOption
case _ => None
}
}
object IsBool {
def unapply(v: (T[MapFunc[T, ?]], Output)): Option[Boolean] =
v match {
case IsBson(Bson.Bool(b)) => b.some
case _ => None
}
}
object IsText {
def unapply(v: (T[MapFunc[T, ?]], Output)): Option[String] =
v match {
case IsBson(Bson.Text(str)) => Some(str)
case _ => None
}
}
object IsDate {
def unapply(v: (T[MapFunc[T, ?]], Output)): Option[Data.Date] =
v._1.project match {
case MFC(Constant(d @ Data.Date(_))) => Some(d)
case _ => None
}
}
val relFunc: MapFunc[T, _] => Option[Bson => Selector.Condition] = {
case MFC(Eq(_, _)) => Some(Selector.Eq)
case MFC(Neq(_, _)) => Some(Selector.Neq)
case MFC(Lt(_, _)) => Some(Selector.Lt)
case MFC(Lte(_, _)) => Some(Selector.Lte)
case MFC(Gt(_, _)) => Some(Selector.Gt)
case MFC(Gte(_, _)) => Some(Selector.Gte)
case _ => None
}
val default: PartialSelector[T] = defaultSelector[T]
def invoke(func: MapFunc[T, (T[MapFunc[T, ?]], Output)]): Output = {
/**
* All the relational operators require a field as one parameter, and
* BSON literal value as the other parameter. So we have to try to
* extract out both a field annotation and a selector and then verify
* the selector is actually a BSON literal value before we can
* construct the relational operator selector. If this fails for any
* reason, it just means the given expression cannot be represented
* using MongoDB's query operators, and must instead be written as
* Javascript using the "$where" operator.
*/
def relop
(x: (T[MapFunc[T, ?]], Output), y: (T[MapFunc[T, ?]], Output))
(f: Bson => Selector.Condition, r: Bson => Selector.Condition):
Output =
(x, y) match {
case (_, IsBson(v2)) =>
\/-(({ case List(f1) => Selector.Doc(ListMap(f1 -> Selector.Expr(f(v2)))) }, List(There(0, Here[T]()))))
case (IsBson(v1), _) =>
\/-(({ case List(f2) => Selector.Doc(ListMap(f2 -> Selector.Expr(r(v1)))) }, List(There(1, Here[T]()))))
case (_, _) => -\/(InternalError fromMsg node.map(_._1).shows)
}
def relDateOp1(f: Bson.Date => Selector.Condition, date: Data.Date, g: Data.Date => Data.Timestamp, index: Int): Output =
Bson.Date.fromInstant(g(date).value).fold[Output](
-\/(NonRepresentableData(g(date))))(
d => \/-((
{ case x :: Nil => Selector.Doc(x -> f(d)) },
List(There(index, Here[T]())))))
def relDateOp2(conj: (Selector, Selector) => Selector, f1: Bson.Date => Selector.Condition, f2: Bson.Date => Selector.Condition, date: Data.Date, g1: Data.Date => Data.Timestamp, g2: Data.Date => Data.Timestamp, index: Int): Output =
((Bson.Date.fromInstant(g1(date).value) \/> NonRepresentableData(g1(date))) ⊛
(Bson.Date.fromInstant(g2(date).value) \/> NonRepresentableData(g2(date))))((d1, d2) =>
(
{ case x :: Nil =>
conj(
Selector.Doc(x -> f1(d1)),
Selector.Doc(x -> f2(d2)))
},
List(There(index, Here[T]()))))
def invoke2Nel(x: Output, y: Output)(f: (Selector, Selector) => Selector):
Output =
(x ⊛ y) { case ((f1, p1), (f2, p2)) =>
({ case list =>
f(f1(list.take(p1.size)), f2(list.drop(p1.size)))
},
p1.map(There(0, _)) ++ p2.map(There(1, _)))
}
val flipCore: MapFuncCore[T, _] => Option[MapFuncCore[T, _]] = {
case Eq(a, b) => Some(Eq(a, b))
case Neq(a, b) => Some(Neq(a, b))
case Lt(a, b) => Some(Gt(a, b))
case Lte(a, b) => Some(Gte(a, b))
case Gt(a, b) => Some(Lt(a, b))
case Gte(a, b) => Some(Lte(a, b))
case And(a, b) => Some(And(a, b))
case Or(a, b) => Some(Or(a, b))
case _ => None
}
val flip: MapFunc[T, _] => Option[MapFunc[T, _]] = {
case MFC(mfc) => flipCore(mfc).map(MFC(_))
case _ => None
}
def reversibleRelop(x: (T[MapFunc[T, ?]], Output), y: (T[MapFunc[T, ?]], Output))(f: MapFunc[T, _]): Output =
(relFunc(f) ⊛ flip(f).flatMap(relFunc))(relop(x, y)(_, _)).getOrElse(-\/(InternalError fromMsg "couldn’t decipher operation"))
func match {
case MFC(Constant(_)) => \/-(default)
case MFC(Gt(_, IsDate(d2))) => relDateOp1(Selector.Gte, d2, date.startOfNextDay, 0)
case MFC(Lt(IsDate(d1), _)) => relDateOp1(Selector.Gte, d1, date.startOfNextDay, 1)
case MFC(Lt(_, IsDate(d2))) => relDateOp1(Selector.Lt, d2, date.startOfDay, 0)
case MFC(Gt(IsDate(d1), _)) => relDateOp1(Selector.Lt, d1, date.startOfDay, 1)
case MFC(Gte(_, IsDate(d2))) => relDateOp1(Selector.Gte, d2, date.startOfDay, 0)
case MFC(Lte(IsDate(d1), _)) => relDateOp1(Selector.Gte, d1, date.startOfDay, 1)
case MFC(Lte(_, IsDate(d2))) => relDateOp1(Selector.Lt, d2, date.startOfNextDay, 0)
case MFC(Gte(IsDate(d1), _)) => relDateOp1(Selector.Lt, d1, date.startOfNextDay, 1)
case MFC(Eq(_, IsDate(d2))) => relDateOp2(Selector.And(_, _), Selector.Gte, Selector.Lt, d2, date.startOfDay, date.startOfNextDay, 0)
case MFC(Eq(IsDate(d1), _)) => relDateOp2(Selector.And(_, _), Selector.Gte, Selector.Lt, d1, date.startOfDay, date.startOfNextDay, 1)
case MFC(Neq(_, IsDate(d2))) => relDateOp2(Selector.Or(_, _), Selector.Lt, Selector.Gte, d2, date.startOfDay, date.startOfNextDay, 0)
case MFC(Neq(IsDate(d1), _)) => relDateOp2(Selector.Or(_, _), Selector.Lt, Selector.Gte, d1, date.startOfDay, date.startOfNextDay, 1)
case MFC(Eq(a, b)) => reversibleRelop(a, b)(func)
case MFC(Neq(a, b)) => reversibleRelop(a, b)(func)
case MFC(Lt(a, b)) => reversibleRelop(a, b)(func)
case MFC(Lte(a, b)) => reversibleRelop(a, b)(func)
case MFC(Gt(a, b)) => reversibleRelop(a, b)(func)
case MFC(Gte(a, b)) => reversibleRelop(a, b)(func)
case MFC(Within(a, b)) =>
relop(a, b)(
Selector.In.apply _,
x => Selector.ElemMatch(\/-(Selector.In(Bson.Arr(List(x))))))
case MFC(Search(_, IsText(patt), IsBool(b))) =>
\/-(({ case List(f1) =>
Selector.Doc(ListMap(f1 -> Selector.Expr(Selector.Regex(patt, b, true, false, false)))) },
List(There(0, Here[T]()))))
case MFC(Between(_, IsBson(lower), IsBson(upper))) =>
\/-(({ case List(f) => Selector.And(
Selector.Doc(f -> Selector.Gte(lower)),
Selector.Doc(f -> Selector.Lte(upper)))
},
List(There(0, Here[T]()))))
case MFC(And(a, b)) => invoke2Nel(a._2, b._2)(Selector.And.apply _)
case MFC(Or(a, b)) => invoke2Nel(a._2, b._2)(Selector.Or.apply _)
case MFC(Not((_, v))) =>
v.map { case (sel, inputs) => (sel andThen (_.negate), inputs.map(There(0, _))) }
case MFC(Guard(_, typ, cont, _)) =>
def selCheck: Type => Option[BsonField => Selector] =
generateTypeCheck[BsonField, Selector](Selector.Or(_, _)) {
case Type.Null => ((f: BsonField) => Selector.Doc(f -> Selector.Type(BsonType.Null)))
case Type.Dec => ((f: BsonField) => Selector.Doc(f -> Selector.Type(BsonType.Dec)))
case Type.Int =>
((f: BsonField) => Selector.Or(
Selector.Doc(f -> Selector.Type(BsonType.Int32)),
Selector.Doc(f -> Selector.Type(BsonType.Int64))))
case Type.Int ⨿ Type.Dec ⨿ Type.Interval =>
((f: BsonField) =>
Selector.Or(
Selector.Doc(f -> Selector.Type(BsonType.Int32)),
Selector.Doc(f -> Selector.Type(BsonType.Int64)),
Selector.Doc(f -> Selector.Type(BsonType.Dec))))
case Type.Str => ((f: BsonField) => Selector.Doc(f -> Selector.Type(BsonType.Text)))
case Type.Obj(_, _) =>
((f: BsonField) => Selector.Doc(f -> Selector.Type(BsonType.Doc)))
case Type.Binary =>
((f: BsonField) => Selector.Doc(f -> Selector.Type(BsonType.Binary)))
case Type.Id =>
((f: BsonField) => Selector.Doc(f -> Selector.Type(BsonType.ObjectId)))
case Type.Bool => ((f: BsonField) => Selector.Doc(f -> Selector.Type(BsonType.Bool)))
case Type.Date =>
((f: BsonField) => Selector.Doc(f -> Selector.Type(BsonType.Date)))
}
selCheck(typ).fold[OutputM[PartialSelector[T]]](
-\/(InternalError.fromMsg(node.map(_._1).shows)))(
f =>
\/-(cont._2.fold[PartialSelector[T]](
κ(({ case List(field) => f(field) }, List(There(0, Here[T]())))),
{ case (f2, p2) =>
({ case head :: tail => Selector.And(f(head), f2(tail)) },
There(0, Here[T]()) :: p2.map(There(1, _)))
})))
case _ => -\/(InternalError fromMsg node.map(_._1).shows)
}
}
invoke(node) <+> \/-(default)
}
/** Brings a [[WBM]] into our `M`. */
def liftM[M[_]: Monad: MonadFsErr, A](meh: WBM[A]): M[A] =
meh.fold(
e => raiseErr(qscriptPlanningFailed(e)),
_.point[M])
def createFieldName(prefix: String, i: Int): String = prefix + i.toString
trait Planner[F[_]] {
type IT[G[_]]
def plan
[M[_]: Monad: ExecTimeR: MonadFsErr, WF[_]: Functor: Coalesce: Crush: Crystallize, EX[_]: Traverse]
(cfg: PlannerConfig[IT, EX, WF])
(implicit
ev0: WorkflowOpCoreF :<: WF,
ev1: RenderTree[WorkflowBuilder[WF]],
ev2: WorkflowBuilder.Ops[WF],
ev3: EX :<: ExprOp):
AlgebraM[M, F, WorkflowBuilder[WF]]
}
object Planner {
type Aux[T[_[_]], F[_]] = Planner[F] { type IT[G[_]] = T[G] }
def apply[T[_[_]], F[_]](implicit ev: Planner.Aux[T, F]) = ev
implicit def shiftedReadFile[T[_[_]]: BirecursiveT: ShowT]: Planner.Aux[T, Const[ShiftedRead[AFile], ?]] =
new Planner[Const[ShiftedRead[AFile], ?]] {
type IT[G[_]] = T[G]
def plan
[M[_]: Monad: ExecTimeR: MonadFsErr, WF[_]: Functor: Coalesce: Crush: Crystallize, EX[_]: Traverse]
(cfg: PlannerConfig[T, EX, WF])
(implicit
ev0: WorkflowOpCoreF :<: WF,
ev1: RenderTree[WorkflowBuilder[WF]],
WB: WorkflowBuilder.Ops[WF],
ev3: EX :<: ExprOp) =
qs => Collection
.fromFile(qs.getConst.path)
.fold(
e => raiseErr(qscriptPlanningFailed(PlanPathError(e))),
coll => {
val dataset = WB.read(coll)
// TODO: exclude `_id` from the value here?
qs.getConst.idStatus match {
case IdOnly =>
getExprBuilder[T, M, WF, EX](
cfg.funcHandler, cfg.staticHandler)(
dataset,
Free.roll(MFC(MapFuncsCore.ProjectField[T, FreeMap[T]](HoleF[T], MapFuncsCore.StrLit("_id")))))
case IncludeId =>
getExprBuilder[T, M, WF, EX](
cfg.funcHandler, cfg.staticHandler)(
dataset,
MapFuncCore.StaticArray(List(
Free.roll(MFC(MapFuncsCore.ProjectField[T, FreeMap[T]](HoleF[T], MapFuncsCore.StrLit("_id")))),
HoleF)))
case ExcludeId => dataset.point[M]
}
})
}
implicit def qscriptCore[T[_[_]]: BirecursiveT: EqualT: ShowT]:
Planner.Aux[T, QScriptCore[T, ?]] =
new Planner[QScriptCore[T, ?]] {
type IT[G[_]] = T[G]
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def plan
[M[_]: Monad: ExecTimeR: MonadFsErr,
WF[_]: Functor: Coalesce: Crush: Crystallize,
EX[_]: Traverse]
(cfg: PlannerConfig[T, EX, WF])
(implicit
ev0: WorkflowOpCoreF :<: WF,
ev1: RenderTree[WorkflowBuilder[WF]],
WB: WorkflowBuilder.Ops[WF],
ev3: EX :<: ExprOp) = {
case qscript.Map(src, f) =>
getExprBuilder[T, M, WF, EX](cfg.funcHandler, cfg.staticHandler)(src, f)
case LeftShift(src, struct, id, repair) =>
if (repair.contains(LeftSideF))
(handleFreeMap[T, M, EX](cfg.funcHandler, cfg.staticHandler, struct) ⊛
getJsMerge[T, M](
repair,
jscore.Select(jscore.Ident(JsFn.defaultName), "s"),
jscore.Select(jscore.Ident(JsFn.defaultName), "f")))((expr, j) =>
ExprBuilder(
FlatteningBuilder(
DocBuilder(
src,
ListMap(
BsonField.Name("s") -> docVarToExpr(DocVar.ROOT()),
BsonField.Name("f") -> expr)),
// TODO: Handle arrays properly
Set(StructureType.Object(DocField(BsonField.Name("f")), id))),
-\&/(j)))
else
getExprBuilder[T, M, WF, EX](cfg.funcHandler, cfg.staticHandler)(src, struct) >>= (builder =>
getExprBuilder[T, M, WF, EX](
cfg.funcHandler, cfg.staticHandler)(
FlatteningBuilder(
builder,
Set(StructureType.Object(DocVar.ROOT(), id))),
repair.as(SrcHole)))
case Reduce(src, bucket, reducers, repair) =>
(bucket.traverse(handleFreeMap[T, M, EX](cfg.funcHandler, cfg.staticHandler, _)) ⊛
reducers.traverse(_.traverse(handleFreeMap[T, M, EX](cfg.funcHandler, cfg.staticHandler, _))))((b, red) => {
getReduceBuilder[T, M, WF, EX](
cfg.funcHandler, cfg.staticHandler)(
// TODO: This work should probably be done in `toWorkflow`.
semiAlignExpr[λ[α => List[ReduceFunc[α]]]](red)(Traverse[List].compose).fold(
WB.groupBy(
DocBuilder(
src,
// FIXME: Doesn’t work with UnshiftMap
red.unite.zipWithIndex.map(_.map(i => BsonField.Name(createFieldName("f", i))).swap).toListMap ++
b.zipWithIndex.map(_.map(i => BsonField.Name(createFieldName("b", i))).swap).toListMap),
b.zipWithIndex.map(p => docVarToExpr(DocField(BsonField.Name(createFieldName("b", p._2))))),
red.zipWithIndex.map(ai =>
(BsonField.Name(createFieldName("f", ai._2)),
accumulator(ai._1.as($field(createFieldName("f", ai._2)))))).toListMap))(
exprs => WB.groupBy(src,
b,
exprs.zipWithIndex.map(ai =>
(BsonField.Name(createFieldName("f", ai._2)),
accumulator(ai._1))).toListMap)),
repair)
}).join
case Sort(src, bucket, order) =>
val (keys, dirs) = (bucket.toIList.map((_, SortDir.asc)) <::: order).unzip
keys.traverse(handleFreeMap[T, M, EX](cfg.funcHandler, cfg.staticHandler, _))
.map(ks => WB.sortBy(src, ks.toList, dirs.toList))
case Filter(src, cond) =>
getSelector[T, M, EX](cfg.bsonVersion)(cond).fold(
_ => handleFreeMap[T, M, EX](cfg.funcHandler, cfg.staticHandler, cond).map {
// TODO: Postpone decision until we know whether we are going to
// need mapReduce anyway.
case cond @ HasThat(_) => WB.filter(src, List(cond), {
case f :: Nil => Selector.Doc(f -> Selector.Eq(Bson.Bool(true)))
})
case \&/.This(js) => WB.filter(src, Nil, {
case Nil => Selector.Where(js(jscore.ident("this")).toJs)
})
},
{
case (sel, inputs) =>
inputs.traverse(f => handleFreeMap[T, M, EX](cfg.funcHandler, cfg.staticHandler, f(cond))).map(WB.filter(src, _, sel))
})
case Union(src, lBranch, rBranch) =>
(rebaseWB[T, M, WF, EX](cfg, lBranch, src) ⊛
rebaseWB[T, M, WF, EX](cfg, rBranch, src))(
UnionBuilder(_, _))
case Subset(src, from, sel, count) =>
(rebaseWB[T, M, WF, EX](cfg, from, src) ⊛
(rebaseWB[T, M, WF, EX](cfg, count, src) >>= (HasInt[M, WF](_))))(
sel match {
case Drop => WB.skip
case Take => WB.limit
// TODO: Better sampling
case Sample => WB.limit
})
case Unreferenced() =>
CollectionBuilder($pure(Bson.Null), WorkflowBuilder.Root(), none).point[M]
}
}
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
implicit def equiJoin[T[_[_]]: BirecursiveT: EqualT: ShowT]:
Planner.Aux[T, EquiJoin[T, ?]] =
new Planner[EquiJoin[T, ?]] {
type IT[G[_]] = T[G]
def plan
[M[_]: Monad: ExecTimeR: MonadFsErr, WF[_]: Functor: Coalesce: Crush: Crystallize, EX[_]: Traverse]
(cfg: PlannerConfig[T, EX, WF])
(implicit
ev0: WorkflowOpCoreF :<: WF,
ev1: RenderTree[WorkflowBuilder[WF]],
ev2: WorkflowBuilder.Ops[WF],
ev3: EX :<: ExprOp) =
qs =>
(rebaseWB[T, M, WF, EX](cfg, qs.lBranch, qs.src) ⊛
rebaseWB[T, M, WF, EX](cfg, qs.rBranch, qs.src))(
(lb, rb) => {
val (lKey, rKey) = Unzip[List].unzip(qs.key)
(lKey.traverse(handleFreeMap[T, M, EX](cfg.funcHandler, cfg.staticHandler, _)) ⊛
rKey.traverse(handleFreeMap[T, M, EX](cfg.funcHandler, cfg.staticHandler, _)))(
(lk, rk) =>
liftM[M, WorkflowBuilder[WF]](cfg.joinHandler.run(
qs.f,
JoinSource(lb, lk),
JoinSource(rb, rk))) >>=
(getExprBuilder[T, M, WF, EX](cfg.funcHandler, cfg.staticHandler)(_, qs.combine >>= {
case LeftSide => Free.roll(MFC(MapFuncsCore.ProjectField(HoleF, MapFuncsCore.StrLit("left"))))
case RightSide => Free.roll(MFC(MapFuncsCore.ProjectField(HoleF, MapFuncsCore.StrLit("right"))))
}))).join
}).join
}
implicit def coproduct[T[_[_]], F[_], G[_]](
implicit F: Planner.Aux[T, F], G: Planner.Aux[T, G]):
Planner.Aux[T, Coproduct[F, G, ?]] =
new Planner[Coproduct[F, G, ?]] {
type IT[G[_]] = T[G]
def plan
[M[_]: Monad: ExecTimeR: MonadFsErr, WF[_]: Functor: Coalesce: Crush: Crystallize, EX[_]: Traverse]
(cfg: PlannerConfig[T, EX, WF])
(implicit
ev0: WorkflowOpCoreF :<: WF,
ev1: RenderTree[WorkflowBuilder[WF]],
ev2: WorkflowBuilder.Ops[WF],
ev3: EX :<: ExprOp) =
_.run.fold(
F.plan[M, WF, EX](cfg),
G.plan[M, WF, EX](cfg))
}
// TODO: All instances below here only need to exist because of `FreeQS`,
// but can’t actually be called.
def default[T[_[_]], F[_]](label: String): Planner.Aux[T, F] =
new Planner[F] {
type IT[G[_]] = T[G]
def plan
[M[_]: Monad: ExecTimeR: MonadFsErr, WF[_]: Functor: Coalesce: Crush: Crystallize, EX[_]: Traverse]
(cfg: PlannerConfig[T, EX, WF])
(implicit
ev0: WorkflowOpCoreF :<: WF,
ev1: RenderTree[WorkflowBuilder[WF]],
ev2: WorkflowBuilder.Ops[WF],
ev3: EX :<: ExprOp) =
κ(raiseErr(qscriptPlanningFailed(InternalError.fromMsg(s"should not be reached: $label"))))
}
implicit def deadEnd[T[_[_]]]: Planner.Aux[T, Const[DeadEnd, ?]] =
default("DeadEnd")
implicit def read[T[_[_]], A]: Planner.Aux[T, Const[Read[A], ?]] =
default("Read")
implicit def shiftedReadDir[T[_[_]]]: Planner.Aux[T, Const[ShiftedRead[ADir], ?]] =
default("ShiftedRead[ADir]")
implicit def thetaJoin[T[_[_]]]: Planner.Aux[T, ThetaJoin[T, ?]] =
default("ThetaJoin")
implicit def projectBucket[T[_[_]]]: Planner.Aux[T, ProjectBucket[T, ?]] =
default("ProjectBucket")
}
def getExpr[
T[_[_]]: BirecursiveT: ShowT,
M[_]: Monad: ExecTimeR: MonadFsErr, EX[_]: Traverse: Inject[?[_], ExprOp]]
(funcHandler: MapFunc[T, ?] ~> OptionFree[EX, ?], staticHandler: StaticHandler[T, EX])(fm: FreeMap[T]
) : M[Fix[ExprOp]] =
processMapFuncExpr[T, M, EX, Hole](funcHandler, staticHandler)(fm)(κ($$ROOT))
def getJsFn[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: MonadFsErr: ExecTimeR]
(fm: FreeMap[T])
: M[JsFn] =
processMapFunc[T, M, Hole](fm)(κ(jscore.Ident(JsFn.defaultName))) ∘
(JsFn(JsFn.defaultName, _))
def getBuilder
[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: MonadFsErr, WF[_], EX[_]: Traverse, A]
(handler: FreeMapA[T, A] => M[Expr])
(src: WorkflowBuilder[WF], fm: FreeMapA[T, A])
(implicit ev: EX :<: ExprOp)
: M[WorkflowBuilder[WF]] =
fm.project match {
case MapFuncCore.StaticMap(elems) =>
elems.traverse(_.bitraverse({
case Embed(MapFuncCore.EC(ejson.Str(key))) => BsonField.Name(key).point[M]
case key => raiseErr[M, BsonField.Name](qscriptPlanningFailed(InternalError.fromMsg(s"Unsupported object key: ${key.shows}")))
},
handler)) ∘
(es => DocBuilder(src, es.toListMap))
case _ => handler(fm) ∘ (ExprBuilder(src, _))
}
def getExprBuilder
[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: ExecTimeR: MonadFsErr, WF[_], EX[_]: Traverse]
(funcHandler: MapFunc[T, ?] ~> OptionFree[EX, ?], staticHandler: StaticHandler[T, EX])
(src: WorkflowBuilder[WF], fm: FreeMap[T])
(implicit ev: EX :<: ExprOp)
: M[WorkflowBuilder[WF]] =
getBuilder[T, M, WF, EX, Hole](handleFreeMap[T, M, EX](funcHandler, staticHandler, _))(src, fm)
def getReduceBuilder
[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: ExecTimeR: MonadFsErr, WF[_], EX[_]: Traverse]
(funcHandler: MapFunc[T, ?] ~> OptionFree[EX, ?], staticHandler: StaticHandler[T, EX])
(src: WorkflowBuilder[WF], fm: FreeMapA[T, ReduceIndex])
(implicit ev: EX :<: ExprOp)
: M[WorkflowBuilder[WF]] =
getBuilder[T, M, WF, EX, ReduceIndex](handleRedRepair[T, M, EX](funcHandler, staticHandler, _))(src, fm)
def getJsMerge[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: MonadFsErr: ExecTimeR]
(jf: JoinFunc[T], a1: JsCore, a2: JsCore)
: M[JsFn] =
processMapFunc[T, M, JoinSide](
jf) {
case LeftSide => a1
case RightSide => a2
} ∘ (JsFn(JsFn.defaultName, _))
def exprOrJs[M[_]: Applicative: MonadFsErr, A]
(a: A)
(exf: A => M[Fix[ExprOp]], jsf: A => M[JsFn])
: M[Expr] = {
// TODO: Return _both_ errors
val js = jsf(a)
val expr = exf(a)
handleErr[M, Expr](
(js ⊛ expr)(\&/.Both(_, _)))(
_ => handleErr[M, Expr](js.map(-\&/))(_ => expr.map(\&/-)))
}
def handleFreeMap[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: ExecTimeR: MonadFsErr, EX[_]: Traverse]
(funcHandler: MapFunc[T, ?] ~> OptionFree[EX, ?], staticHandler: StaticHandler[T, EX], fm: FreeMap[T])
(implicit ev: EX :<: ExprOp)
: M[Expr] =
exprOrJs(fm)(getExpr[T, M, EX](funcHandler, staticHandler)(_), getJsFn[T, M])
def handleRedRepair[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: ExecTimeR: MonadFsErr, EX[_]: Traverse]
(funcHandler: MapFunc[T, ?] ~> OptionFree[EX, ?], staticHandler: StaticHandler[T, EX], jr: FreeMapA[T, ReduceIndex])
(implicit ev: EX :<: ExprOp)
: M[Expr] =
exprOrJs(jr)(getExprRed[T, M, EX](funcHandler, staticHandler)(_), getJsRed[T, M])
def getExprRed[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: ExecTimeR: MonadFsErr, EX[_]: Traverse]
(funcHandler: MapFunc[T, ?] ~> OptionFree[EX, ?], staticHandler: StaticHandler[T, EX])
(jr: FreeMapA[T, ReduceIndex])
(implicit ev: EX :<: ExprOp)
: M[Fix[ExprOp]] =
processMapFuncExpr[T, M, EX, ReduceIndex](funcHandler, staticHandler)(jr)(_.idx.fold(
i => $field("_id", i.toString),
i => $field(createFieldName("f", i))))
def getJsRed[T[_[_]]: BirecursiveT: ShowT, M[_]: Monad: MonadFsErr: ExecTimeR]
(jr: Free[MapFunc[T, ?], ReduceIndex])
: M[JsFn] =
processMapFunc[T, M, ReduceIndex](jr)(_.idx.fold(
i => jscore.Select(jscore.Select(jscore.Ident(JsFn.defaultName), "_id"), i.toString),
i => jscore.Select(jscore.Ident(JsFn.defaultName), createFieldName("f", i)))) ∘
(JsFn(JsFn.defaultName, _))
def rebaseWB
[T[_[_]]: EqualT, M[_]: Monad: ExecTimeR: MonadFsErr, WF[_]: Functor: Coalesce: Crush: Crystallize, EX[_]: Traverse]
(cfg: PlannerConfig[T, EX, WF],
free: FreeQS[T],
src: WorkflowBuilder[WF])
(implicit
F: Planner.Aux[T, QScriptTotal[T, ?]],
ev0: WorkflowOpCoreF :<: WF,
ev1: RenderTree[WorkflowBuilder[WF]],
ev2: WorkflowBuilder.Ops[WF],
ev3: EX :<: ExprOp)
: M[WorkflowBuilder[WF]] =
free.cataM(
interpretM[M, QScriptTotal[T, ?], qscript.Hole, WorkflowBuilder[WF]](κ(src.point[M]), F.plan(cfg)))
// TODO: Need `Delay[Show, WorkflowBuilder]`
@SuppressWarnings(Array("org.wartremover.warts.ToString"))
def HasLiteral[M[_]: Applicative: MonadFsErr, WF[_]]
(wb: WorkflowBuilder[WF])
(implicit ev0: WorkflowOpCoreF :<: WF)
: M[Bson] =
asLiteral(wb).fold(
raiseErr[M, Bson](qscriptPlanningFailed(NonRepresentableEJson(wb.toString))))(
_.point[M])
@SuppressWarnings(Array("org.wartremover.warts.ToString"))
def HasInt[M[_]: Monad: MonadFsErr, WF[_]]
(wb: WorkflowBuilder[WF])
(implicit ev0: WorkflowOpCoreF :<: WF)
: M[Long] =
HasLiteral[M, WF](wb) >>= {
case Bson.Int32(v) => v.toLong.point[M]
case Bson.Int64(v) => v.point[M]
case x => raiseErr(qscriptPlanningFailed(NonRepresentableEJson(x.toString)))
}
// This is maybe worth putting in Matryoshka?
def findFirst[T[_[_]]: RecursiveT, F[_]: Functor: Foldable, A](
f: PartialFunction[T[F], A]):
CoalgebraM[A \/ ?, F, T[F]] =
tf => (f.lift(tf) \/> tf.project).swap
// TODO: This should perhaps be _in_ PhaseResults or something
def log[M[_]: Monad, A: RenderTree]
(label: String, ma: M[A])
(implicit mtell: MonadTell_[M, PhaseResults])
: M[A] =
ma.mproduct(a => mtell.tell(Vector(PhaseResult.tree(label, a)))) ∘ (_._1)
def toMongoQScript[
T[_[_]]: BirecursiveT: EqualT: RenderTreeT: ShowT,
M[_]: Monad: MonadFsErr: PhaseResultTell]
(anyDoc: Collection => OptionT[M, BsonDocument],
qs: T[fs.MongoQScript[T, ?]])
(implicit BR: Branches[T, fs.MongoQScript[T, ?]])
: M[T[fs.MongoQScript[T, ?]]] = {
type MQS[A] = fs.MongoQScript[T, A]
type QST[A] = QScriptTotal[T, A]
val O = new Optimize[T]
val R = new Rewrite[T]
// TODO: All of these need to be applied through branches. We may also be able to compose
// them with normalization as the last step and run until fixpoint. Currently plans are
// too sensitive to the order in which these are applied.
for {
mongoQS0 <- qs.transCataM(liftFGM(assumeReadType[M, T, MQS](Type.AnyObject)))
mongoQS1 <- mongoQS0.transCataM(elideQuasarSigil[T, MQS, M](anyDoc))
mongoQS2 = mongoQS1.transCata[T[MQS]](R.normalizeEJ[MQS])
mongoQS3 = BR.branches.modify(
_.transCata[FreeQS[T]](liftCo(R.normalizeEJCoEnv[QScriptTotal[T, ?]]))
)(mongoQS2.project).embed
_ <- BackendModule.logPhase[M](PhaseResult.tree("QScript Mongo", mongoQS3))
// NB: Normalizing after these appears to revert the effects of `mapBeforeSort`.
mongoQS4 = Trans(mapBeforeSort[T], mongoQS3)
mongoQS5 = mongoQS4.transCata[T[MQS]](
liftFF[QScriptCore[T, ?], MQS, T[MQS]](
repeatedly(O.subsetBeforeMap[MQS, MQS](
reflNT[MQS]))))
_ <- BackendModule.logPhase[M](PhaseResult.tree("QScript Mongo (Shuffle Maps)", mongoQS5))
// TODO: Once field deletion is implemented for 3.4, this could be selectively applied, if necessary.
mongoQS6 = PreferProjection.preferProjection[MQS](mongoQS5)
_ <- BackendModule.logPhase[M](PhaseResult.tree("QScript Mongo (Prefer Projection)", mongoQS6))
} yield mongoQS6
}
def plan0
[T[_[_]]: BirecursiveT: EqualT: RenderTreeT: ShowT,
M[_]: Monad: PhaseResultTell: MonadFsErr: ExecTimeR,
WF[_]: Functor: Coalesce: Crush: Crystallize,
EX[_]: Traverse]
(anyDoc: Collection => OptionT[M, BsonDocument],
cfg: PlannerConfig[T, EX, WF])
(qs: T[fs.MongoQScript[T, ?]])
(implicit
ev0: WorkflowOpCoreF :<: WF,
ev1: WorkflowBuilder.Ops[WF],
ev2: EX :<: ExprOp,
ev3: RenderTree[Fix[WF]])
: M[Crystallized[WF]] = {
for {
opt <- toMongoQScript[T, M](anyDoc, qs)
wb <- log(
"Workflow Builder",
opt.cataM[M, WorkflowBuilder[WF]](
Planner[T, fs.MongoQScript[T, ?]].plan[M, WF, EX](cfg).apply(_) ∘
(_.transCata[Fix[WorkflowBuilderF[WF, ?]]](repeatedly(WorkflowBuilder.normalize[WF, Fix[WorkflowBuilderF[WF, ?]]])))))
wf1 <- log("Workflow (raw)", liftM[M, Fix[WF]](WorkflowBuilder.build[WBM, WF](wb)))
wf2 <- log(
"Workflow (crystallized)",
Crystallize[WF].crystallize(wf1).point[M])
} yield wf2
}
def planExecTime[
T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT,
M[_]: Monad: PhaseResultTell: MonadFsErr](
qs: T[fs.MongoQScript[T, ?]],
queryContext: fs.QueryContext,
queryModel: MongoQueryModel,
anyDoc: Collection => OptionT[M, BsonDocument],
execTime: Instant)
: M[Crystallized[WorkflowF]] = {
val peek = anyDoc andThen (r => OptionT(r.run.liftM[ReaderT[?[_], Instant, ?]]))
plan[T, ReaderT[M, Instant, ?]](qs, queryContext, queryModel, peek).run(execTime)
}
/** Translate the QScript plan to an executable MongoDB "physical"
* plan, taking into account the current runtime environment as captured by
* the given context.
*
* Internally, the type of the plan being built constrains which operators
* can be used, but the resulting plan uses the largest, common type so that
* callers don't need to worry about it.
*
* @param anyDoc returns any document in the given `Collection`
*/
def plan[
T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT,
M[_]: Monad: PhaseResultTell: MonadFsErr: ExecTimeR](
qs: T[fs.MongoQScript[T, ?]],
queryContext: fs.QueryContext,
queryModel: MongoQueryModel,
anyDoc: Collection => OptionT[M, BsonDocument])
: M[Crystallized[WorkflowF]] = {
import MongoQueryModel._
val bsonVersion = toBsonVersion(queryModel)
queryModel match {
case `3.4` =>
val joinHandler: JoinHandler[Workflow3_2F, WBM] =
JoinHandler.fallback[Workflow3_2F, WBM](
JoinHandler.pipeline(queryContext.statistics, queryContext.indexes),
JoinHandler.mapReduce)
val cfg = PlannerConfig[T, Expr3_4, Workflow3_2F](
joinHandler,
FuncHandler.handle3_4(bsonVersion),
StaticHandler.v3_2,
bsonVersion)
plan0[T, M, Workflow3_2F, Expr3_4](anyDoc, cfg)(qs)
case `3.2` =>
val joinHandler: JoinHandler[Workflow3_2F, WBM] =
JoinHandler.fallback[Workflow3_2F, WBM](
JoinHandler.pipeline(queryContext.statistics, queryContext.indexes),
JoinHandler.mapReduce)
val cfg = PlannerConfig[T, Expr3_2, Workflow3_2F](
joinHandler,
FuncHandler.handle3_2(bsonVersion),
StaticHandler.v3_2,
bsonVersion)
plan0[T, M, Workflow3_2F, Expr3_2](anyDoc, cfg)(qs)
case `3.0` =>
val cfg = PlannerConfig[T, Expr3_0, Workflow2_6F](
JoinHandler.mapReduce[WBM, Workflow2_6F],
FuncHandler.handle3_0(bsonVersion),
StaticHandler.v2_6,
bsonVersion)
plan0[T, M, Workflow2_6F, Expr3_0](anyDoc, cfg)(qs).map(_.inject[WorkflowF])
case _ =>
val cfg = PlannerConfig[T, Expr2_6, Workflow2_6F](
JoinHandler.mapReduce[WBM, Workflow2_6F],
FuncHandler.handle2_6(bsonVersion),
StaticHandler.v2_6,
bsonVersion)
plan0[T, M, Workflow2_6F, Expr2_6](anyDoc, cfg)(qs).map(_.inject[WorkflowF])
}
}
}
| drostron/quasar | mongodb/src/main/scala/quasar/physical/mongodb/MongoDbPlanner.scala | Scala | apache-2.0 | 63,903 |
package com.redhat.et.c9e.analysis.proximity;
import scala.language.implicitConversions
import scala.reflect.ClassTag
import scala.util.Random
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.rdd.RDD
object RandomForestClustering {
def featureHist(model: ClusteringRandomForestModel): Seq[(Int,Int)] = {
val raw = model.dtModels.flatMap(_.flatten.filter(!_.isLeaf)).map(_.split.get.feature)
val hist = mutable.Map.empty[Int, Int]
raw.foldLeft(hist)((h, x) => {
val n = h.getOrElse(x, 0)
h += (x -> (1 + n))
h
})
hist.toSeq.sortBy(_._2).reverse
}
def rfRules(model: ClusteringRandomForestModel, names: Map[Int, String]): Map[Double, Seq[Seq[Predicate]]] = {
val dtr = model.dtModels.map(_.rules(names))
val rfr = mutable.Map.empty[Double, Seq[Seq[Predicate]]]
dtr.foldLeft(rfr)((r, x) => {
for (k <- x.keys) {
val s = r.getOrElse(k, Seq.empty[Seq[Predicate]])
r += (k -> (s ++ x(k)))
}
r
})
rfr.toMap
}
def iidSynthetic[T :ClassTag](
data: RDD[Array[T]],
nSynth: Int,
sampleSize: Int = 10000): RDD[Array[T]] = {
val n = data.count
val ss = math.min(sampleSize, n).toInt
val fraction = math.min(1.0, ss.toDouble / n.toDouble)
val parts = ArrayBuffer.empty[RDD[Array[T]]]
var nn = 0
while (nn < nSynth) {
val sample = data.sample(false, fraction).collect
val l = sample.length
val m = sample.head.length
val transposed = Array.fill(m)(ArrayBuffer.empty[T])
sample.foldLeft(transposed)((t, x) => {
for (xx <- t.zip(x)) { xx._1 += xx._2 }
t
})
val permuted = transposed.map(Random.shuffle(_))
val iid = Array.fill(l)(ArrayBuffer.empty[T])
permuted.foldLeft(iid)((i, x) => {
for (xx <- i.zip(x)) { xx._1 += xx._2 }
i
})
parts += data.sparkContext.parallelize(iid.map(_.toArray))
nn += l
}
new org.apache.spark.rdd.UnionRDD(data.sparkContext, parts)
}
private def kMedoidsRefine[T, U >: T](
data: Seq[T],
metric: (U, U) => Double,
initial: Seq[T],
initialCost: Double,
cost: (Seq[T], Seq[T]) => Double,
medoidCost: (T, Seq[T]) => Double,
maxIterations: Int): (Seq[T], Double, Int, Boolean) = {
val k = initial.length
val medoidIdx = (e: T, mv: Seq[T]) => mv.view.map(metric(e, _)).zipWithIndex.min._2
val medoid = (data: Seq[T]) => data.view.minBy(medoidCost(_, data))
var current = initial
var currentCost = initialCost
var converged = false
var itr = 0
var halt = itr >= maxIterations
while (!halt) {
val next = data.groupBy(medoidIdx(_, current)).toVector.sortBy(_._1).map(_._2).map(medoid)
val nextCost = cost(next, data)
if (nextCost >= currentCost) {
converged = true
halt = true
} else {
current = next
currentCost = nextCost
}
itr += 1
if (itr >= maxIterations) halt = true
}
(current, currentCost, itr, converged)
}
def kMedoids[T :ClassTag, U >: T :ClassTag](
data: RDD[T],
k: Int,
metric: (U,U) => Double,
sampleSize: Int = 10000,
maxIterations: Int = 10,
resampleInterval: Int = 3
): (Seq[T], Double) = {
val n = data.count
require(k > 0)
require(n >= k)
val ss = math.min(sampleSize, n).toInt
val fraction = math.min(1.0, ss.toDouble / n.toDouble)
var sample: Array[T] = data.sample(false, fraction).collect
// initialize medoids to a set of (k) random and unique elements
var medoids: Seq[T] = Random.shuffle(sample.toSeq.distinct).take(k)
require(medoids.length >= k)
val minDist = (e: T, mv: Seq[T]) => mv.view.map(metric(e, _)).min
val cost = (mv: Seq[T], data: Seq[T]) => data.view.map(minDist(_, mv)).sum
val medoidCost = (e: T, data: Seq[T]) => data.view.map(metric(e, _)).sum
var itr = 1
var halt = itr > maxIterations
var lastCost = cost(medoids, sample)
while (!halt) {
println(s"\n\nitr= $itr")
// update the sample periodically
if (fraction < 1.0 && itr > 1 && (itr % resampleInterval) == 0) {
sample = data.sample(false, fraction).collect
}
val (nxt, nxtCost, _, _) =
kMedoidsRefine(
sample,
metric,
medoids,
lastCost,
cost,
medoidCost,
1)
// todo: test some function of metric values over time as an optional halting condition
// when improvement stops
println(s"last= $lastCost new= $nxtCost")
lastCost = nxtCost
medoids = nxt
itr += 1
if (itr > maxIterations) halt = true
}
// return most recent cluster medoids
(medoids, lastCost)
}
// Cumulative Residual Entropy
def CRE(data: Seq[Double]): Double = {
val n = data.length
if (n < 1) { return 0.0 }
val sorted = data.sorted
val hist = ArrayBuffer.empty[(Double, Double)]
val r = sorted.tail.foldLeft((hist, sorted.head, 1.0))((t, x) => {
val (h, prev, c) = t
if (x == prev) (h, prev, c + 1.0) else {
h += (prev -> c)
(h, x, 1.0)
}
})
hist += (r._2 -> r._3)
val dn = n.toDouble
var cre = 0.0
var cr = 1.0
for (j <- 0 until hist.length - 1) {
val (xj, fj) = hist(j)
val (xj1, _) = hist(j+1)
cr -= fj / dn
cre -= (xj1 - xj)*cr*math.log(cr)
}
cre
}
def xMedoids[T :ClassTag, U >: T :ClassTag](
data: RDD[T],
metric: (U,U) => Double,
sampleSize: Int = 10000,
maxIterations: Int = 25,
resampleInterval: Int = 3
): (Seq[T], Double) = {
val n = data.count
val ss = math.min(sampleSize, n).toInt
val fraction = math.min(1.0, ss.toDouble / n.toDouble)
var sample: Array[T] = data.sample(false, fraction).collect
val minDist = (e: T, mv: Seq[T]) => mv.view.map(metric(e, _)).min
val minIdx = (e: T, mv: Seq[T]) => mv.view.map(metric(e, _)).zipWithIndex.min._2
// val cost = (mv: Seq[T], data: Seq[T]) => CRE(data.map(minDist(_, mv)))
// val medoidCost = (e: T, data: Seq[T]) => CRE(data.map(metric(e, _)))
val creCost = (mv: Seq[T], data: Seq[T]) => CRE(data.map(minDist(_, mv)))
val cost = (mv: Seq[T], data: Seq[T]) => data.map(minDist(_, mv)).sum
val medoidCost = (e: T, data: Seq[T]) => data.map(metric(e, _)).sum
val medoid = (data: Seq[T]) => data.view.minBy(medoidCost(_, data))
val antipodes = (e: T, data: Seq[T]) => {
val a1 = data.view.maxBy(metric(e, _))
val a2 = data.view.maxBy(metric(a1, _))
Seq(a1, a2)
}
// initial model is single medoid of entire sample
var current = Seq(medoid(sample))
var currentCost = cost(current, sample)
var currentData = sample.groupBy(minIdx(_, current))
val initCost = creCost(current, sample)
println(s"initCost= $initCost")
var itr = 1
var halt = itr > maxIterations
while (!halt) {
println(s"\n\nitr= $itr")
val candidates = for (
j <- 0 until current.length;
cdj = currentData(j);
cdju = cdj.distinct;
if (cdju.length >= 2);
(cL, t) = current.splitAt(j);
(c, cR) = (t.head, t.tail)
) yield {
val cmp = antipodes(c, cdju)
val (rmp, _, _, _) = kMedoidsRefine(
currentData(j),
metric,
cmp,
cost(cmp, cdj),
cost,
medoidCost,
3)
val cmm = cL ++ rmp ++ cR
println(s" ${cL.length} ++ ${rmp.length} ++ ${cR.length} == ${cmm.length}")
val (rmm, _, _, _) = kMedoidsRefine(
sample,
metric,
cmm,
cost(cmm, sample),
cost,
medoidCost,
5)
val rmmData = sample.groupBy(minIdx(_, rmm))
val rmmCost = creCost(rmm, sample)
val dCost = initCost - rmmCost
// val penalty = (rmm.length - 1) * math.log(n)
val penalty = rmmData.values.map { cx =>
val p = cx.length.toDouble / sample.length.toDouble
- p * math.log(p)
}.sum
val delta = dCost - penalty
val gain = dCost / penalty
println(s"j= $j n= ${rmm.length} cost= $rmmCost dCost= $dCost penalty= $penalty delta= $delta gain= $gain")
(rmmCost, rmm, rmmData)
}
val (nextCost, next, nextData) = candidates.minBy(_._1)
println(s"next= ${next.length} cost= $nextCost")
current = next
currentCost = nextCost
currentData = nextData
itr += 1
if (itr > maxIterations) halt = true
}
// return most recent cluster medoids
(current, currentCost)
}
def leafIdDist(a: Vector[Int], b: Vector[Int]): Double = a.zip(b).count(e => (e._1 != e._2))
}
| willb/c9e | analysis/src/main/scala/com/redhat/et/c9e/proximity/RandomForestClustering.scala | Scala | apache-2.0 | 8,854 |
/**
*
* This file is part of Fixbugs.
*
* Fixbugs is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fixbugs is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Fixbugs. If not, see <http://www.gnu.org/licenses/>.
*
**/
package fixbugs.mc
import fixbugs.core.ir.SideCondition
import fixbugs.mc.sets._
import org.objectweb.asm.ClassReader
import org.objectweb.asm.tree.ClassNode
import org.objectweb.asm.tree.MethodNode
import org.objectweb.asm.tree.AbstractInsnNode
import org.objectweb.asm.Type
import scala.collection.mutable.{HashMap => MMap,Map}
import scala.collection.immutable.{Map => IMap}
import java.io.FileInputStream
import fixbugs.util.MapUtil.crossWith
import collection.jcl.MapWrapper
import org.slf4j.{Logger,LoggerFactory}
/**
* Main entry point to the bytecode analysis component
*/
object ModelCheck {
val log = LoggerFactory getLogger(ModelCheck getClass)
def conMap[X,Y](jm:java.util.Map[X,Y]):IMap[X,Y] = IMap() ++ new MapWrapper[X,Y]() {
def underlying = jm
}
def fromInternal(s:String) = s.replace("/",".")
def check(className:String,phi:SideCondition,domain:ClosedDomain[Any]):Map[String,ClosedEnvironment[Any]] = {
// refine IR
val psi = Refiner.refineSide(phi)
// extract line numbers
val file = new FileInputStream(className)
val cn = new ClassNode()
val cr = new ClassReader(file)
cr.accept(cn, 0);
val fieldTypes = conMap(TypeExtractor.lookupFieldTypes(cn))
// foreach method: (messy conversion from java collections)
var results = new MMap[String,ClosedEnvironment[Any]]
for (val i <- 0 to cn.methods.size()-1) {
val mn = cn.methods.get(i).asInstanceOf[MethodNode]
// extract cfg using asm
val (succs,preds) = cfg(ControlFlowGraphAnalysis.getControlFlowGraph("fixbugs",mn))
val lines = LineNumberExtractor.getLineNumberLookup(mn)
log debug ("lines = {}",lines)
val varTypes = conMap(TypeExtractor.lookupVarTypes(mn))
val nodes = Set() ++ lines
// remove values not for current method
val localName = fromInternal(cn.name)+"#"+mn.name
val localDomain = new SetClosedDomain[Any](domain.allValues.filter({ m =>
val name = m("_method").asInstanceOf[String]
log debug ("_method = {}, localName = {}",name,localName)
name.equals(localName)
}))
val typeEnv = new TypeEnvironment(fieldTypes,varTypes)
// model check the method, and add the results
val eval:Evaluator = new Eval(typeEnv,nodes,localDomain,succs,preds) //minimise(lines,succs),minimise(lines,preds)
log debug ("calling eval for method: {} with types {}",mn.name,typeEnv)
results += (mn.name -> eval.eval(mn,psi))
}
results
}
def convert[X](from:java.util.Set[X]):Set[X] = {
var s = Set[X]()
if(from != null) {
val it = from.iterator
while(it.hasNext)
s = s + it.next
}
s
}
/**
* generate lookup sets for CFG
*/
def cfg(nodes:Array[ControlFlowGraphNode]) = {
val succs = new MMap[Int,Set[Int]]
val preds = new MMap[Int,Set[Int]]
for (i <- 0 to nodes.length-1) {
// TODO: check this
if(nodes(i) != null) {
succs += (i -> convert(nodes(i).successors).map(nodes.indexOf(_)))
preds += (i -> convert(nodes(i).predecessors).map(nodes.indexOf(_)))
}
}
log debug ("succs = {}",succs)
log debug ("preds = {}",succs)
(succs,preds)
}
/**
* Simple Minimise Silhouettes algorithm
* Simply substitute through the numbers and union all the appropriate sets
*/
// TODO: remove cycles
def minimise(lines:Array[Int],cfg:MMap[Int,Set[Int]]) = {
cfg.transform((k,v) => v.map(x => lines(x)))
log debug ("transformed cfg = {}",cfg)
val acc = new MMap[Int,Set[Int]]
cfg.foreach(x => {
val (from,to) = x
val srcLine = lines(from)
val toAcc = acc.getOrElse(srcLine,Set())
acc += (srcLine -> (toAcc ++ to - srcLine))
})
log debug ("minimised acc = {}",cfg)
acc
}
}
| FauxFaux/fixbugs | src/main/java/fixbugs/mc/ModelCheck.scala | Scala | lgpl-3.0 | 4,566 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.yarn.security
import java.util.concurrent.{Executors, TimeUnit}
import scala.util.control.NonFatal
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.security.{Credentials, UserGroupInformation}
import org.apache.spark.SparkConf
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.deploy.yarn.config._
import org.apache.spark.internal.Logging
import org.apache.spark.util.{ThreadUtils, Utils}
private[spark] class CredentialUpdater(
sparkConf: SparkConf,
hadoopConf: Configuration,
credentialManager: YARNHadoopDelegationTokenManager) extends Logging {
@volatile private var lastCredentialsFileSuffix = 0
private val credentialsFile = sparkConf.get(CREDENTIALS_FILE_PATH)
private val freshHadoopConf =
SparkHadoopUtil.get.getConfBypassingFSCache(
hadoopConf, new Path(credentialsFile).toUri.getScheme)
private val credentialUpdater =
Executors.newSingleThreadScheduledExecutor(
ThreadUtils.namedThreadFactory("Credential Refresh Thread"))
// This thread wakes up and picks up new credentials from HDFS, if any.
private val credentialUpdaterRunnable =
new Runnable {
override def run(): Unit = Utils.logUncaughtExceptions(updateCredentialsIfRequired())
}
/** Start the credential updater task */
def start(): Unit = {
val startTime = sparkConf.get(CREDENTIALS_UPDATE_TIME)
val remainingTime = startTime - System.currentTimeMillis()
if (remainingTime <= 0) {
credentialUpdater.schedule(credentialUpdaterRunnable, 1, TimeUnit.MINUTES)
} else {
logInfo(s"Scheduling credentials refresh from HDFS in $remainingTime ms.")
credentialUpdater.schedule(credentialUpdaterRunnable, remainingTime, TimeUnit.MILLISECONDS)
}
}
private def updateCredentialsIfRequired(): Unit = {
val timeToNextUpdate = try {
val credentialsFilePath = new Path(credentialsFile)
val remoteFs = FileSystem.get(freshHadoopConf)
SparkHadoopUtil.get.listFilesSorted(
remoteFs, credentialsFilePath.getParent,
credentialsFilePath.getName, SparkHadoopUtil.SPARK_YARN_CREDS_TEMP_EXTENSION)
.lastOption.map { credentialsStatus =>
val suffix = SparkHadoopUtil.get.getSuffixForCredentialsPath(credentialsStatus.getPath)
if (suffix > lastCredentialsFileSuffix) {
logInfo("Reading new credentials from " + credentialsStatus.getPath)
val newCredentials = getCredentialsFromHDFSFile(remoteFs, credentialsStatus.getPath)
lastCredentialsFileSuffix = suffix
UserGroupInformation.getCurrentUser.addCredentials(newCredentials)
logInfo("Credentials updated from credentials file.")
val remainingTime = (getTimeOfNextUpdateFromFileName(credentialsStatus.getPath)
- System.currentTimeMillis())
if (remainingTime <= 0) TimeUnit.MINUTES.toMillis(1) else remainingTime
} else {
// If current credential file is older than expected, sleep 1 hour and check again.
TimeUnit.HOURS.toMillis(1)
}
}.getOrElse {
// Wait for 1 minute to check again if there's no credential file currently
TimeUnit.MINUTES.toMillis(1)
}
} catch {
// Since the file may get deleted while we are reading it, catch the Exception and come
// back in an hour to try again
case NonFatal(e) =>
logWarning("Error while trying to update credentials, will try again in 1 hour", e)
TimeUnit.HOURS.toMillis(1)
}
logInfo(s"Scheduling credentials refresh from HDFS in $timeToNextUpdate ms.")
credentialUpdater.schedule(
credentialUpdaterRunnable, timeToNextUpdate, TimeUnit.MILLISECONDS)
}
private def getCredentialsFromHDFSFile(remoteFs: FileSystem, tokenPath: Path): Credentials = {
val stream = remoteFs.open(tokenPath)
try {
val newCredentials = new Credentials()
newCredentials.readTokenStorageStream(stream)
newCredentials
} finally {
stream.close()
}
}
private def getTimeOfNextUpdateFromFileName(credentialsPath: Path): Long = {
val name = credentialsPath.getName
val index = name.lastIndexOf(SparkHadoopUtil.SPARK_YARN_CREDS_COUNTER_DELIM)
val slice = name.substring(0, index)
val last2index = slice.lastIndexOf(SparkHadoopUtil.SPARK_YARN_CREDS_COUNTER_DELIM)
name.substring(last2index + 1, index).toLong
}
def stop(): Unit = {
credentialUpdater.shutdown()
}
}
| aokolnychyi/spark | resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/security/CredentialUpdater.scala | Scala | apache-2.0 | 5,380 |
/**
* All rights reserved.
* @author Qiuzhuang.Lian
*/
package com.spark.ades
import java.lang._
import org.apache.spark._
import org.apache.spark.rdd._
sealed trait Model extends Serializable
/**
* Model parser with canonical trimming to avoid surprising result.
*
* It lets any exception fail elegantly since it uses RDD.filter() later to filter invalid data.
*
* It truncates float number data into long for demo's age field.
*/
object Model {
// val dataUrlPrefix = "file:///data/ade/aers_ascii_2009q4/ascii/"
val dataUrlPrefix = ""
import scala.math.Ordered.orderingToOrdered
/*
ISR$DRUG_SEQ$ROLE_COD$DRUGNAME$VAL_VBM$ROUTE$DOSE_VBM$DECHAL$RECHAL$LOT_NUM$EXP_DT$NDA_NUM
6253237$1011824832$PS$ABILIFY$1$INTRAMUSCULAR$$$$$$$
*/
case class Drug(isr: Long,
drugSeq: String,
name: String
) extends Ordered[Drug] {
override def compare(that: Drug): Int = isr.compare(that.isr)
}
/* "6384984$7099178$F$$6384984-0$$20090922$20091002$EXP$DE-BAYER-200929628GPV$BAYER HEALTHCARE PHARMACEUTICALS INC.$47$YR$F$Y$$$20091002$MD$$$$GERMANY$"
*/
case class Demo(isr: Long,
isrCase: Long,
fdaDt: String,
age: Long,
ageCod: String,
gender: String
) extends Ordered[Demo] {
override def compare(that: Demo): Int = isr.compare(that.isr)
}
case class React(isr: Long,
code: String
) extends Ordered[React] {
override def compare(that: React): Int = isr.compare(that.isr)
}
def parseDemo(line: String): Demo = {
if (line == null || line.length == 0) null
else {
try {
val fields = line.split("\\\\$")
val ageStr = fields(11)
var age = -1L
var isr = -1L
var isrCase = -1L
// Since age field in data source is float compatible so use float to truncate to long.
age = Float.parseFloat(ageStr.trim).toLong
isr = Long.parseLong(fields(0).trim)
isrCase = Long.parseLong(fields(1).trim)
val fdaDt = fields(7)
val ageCode = fields(12)
val gender = fields(13)
if ("YR".equalsIgnoreCase(ageCode.trim) &&
("M".equalsIgnoreCase(gender.trim) || "F".equalsIgnoreCase(gender.trim)) &&
(fdaDt != null && Integer.parseInt(fdaDt.trim.substring(0, 4)) >= 2008) &&
(age > 0 && age <= 100)) {
new Demo(isr, isrCase, fdaDt, age, ageCode, gender)
} else {
null
}
} catch {
case e: Throwable =>
null
}
}
}
def parseDrug(line: String): Drug = {
if (line == null || line.length == 0) null
else {
val fields = line.split("\\\\$")
try {
new Drug(Long.parseLong(fields(0).trim), fields(1).trim, fields(3).trim)
} catch {
case e: Exception =>
null
}
}
}
def parseReact(line: String): React = {
if (line == null || line.length == 0) null
else {
val fields = line.split("\\\\$")
if (fields.length >= 2) {
try {
new React(Long.parseLong(fields(0).trim), fields(1).trim)
} catch {
case e: Exception =>
null
}
} else {
null
}
}
}
def loadDemo(sc: SparkContext): RDD[Demo] = {
val demoLines = sc.textFile(s"${dataUrlPrefix}aers/demos")
println(s"total demo lines ${demoLines.count}")
val demos = demoLines.map(parseDemo(_)).filter(_ != null)
demos.groupBy(_.isrCase).map(x => x._2.min)
}
def loadDrug(sc: SparkContext): RDD[Drug] = {
val drugLines = sc.textFile(s"${dataUrlPrefix}aers/drugs")
drugLines.map(parseDrug(_)).filter(_ != null)
}
def loadReact(sc: SparkContext): RDD[React] = {
val reacLines = sc.textFile(s"${dataUrlPrefix}aers/reactions")
reacLines.map(parseReact(_)).filter(_ != null)
}
}
| Qiuzhuang/spark-ades | src/main/scala/com/spark/ades/Model.scala | Scala | apache-2.0 | 3,916 |
/*
* Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/agpl.html.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package generated.scala
/**
* This is the actual class that gets instantiated in the generated code. Ops corresponding to public operations
* here must have CodeGen methods defined by the DSL on them.
*
* Alternatively, everything in this class could be lifted, and we could generate a concrete class to be instantiated
* in the generated code.
*/
class IntDenseVector(__length: Int, __isRow: Boolean) {
var _length = __length
var _isRow = __isRow
var _data: Array[Int] = new Array[Int](_length)
/**
* These are temporarily needed because they are hard-coded into DeliteOp code gen.
*/
def unsafeSetData(xs: Array[Int], len: Int) {
_data = xs
_length = len
}
def Clone = {
val v = new IntDenseVector(_length, _isRow);
v._data = _data.clone
v
}
}
| tesendic/Relite | src/generated/scala/IntDenseVector.scala | Scala | agpl-3.0 | 1,787 |
package chiselutils.math
import collection.mutable.ArrayBuffer
import Chisel._
import chiselutils.utils.MemShiftRegister
/** A convolutional adder, takes in 1 pixel in each cycle
* filterSize is the size of filter for image eg) (7, 7, 64)
* imgSize is the size of the input image eg) ( 128, 128 )
*/
class ConvAdder( filterSize : (Int, Int, Int), imgSize : (Int, Int) ) extends Module {
val io = new Bundle {
val validIn = Bool( INPUT )
val multIn = Vec( filterSize._1 * filterSize._2, Vec( filterSize._3, Fixed( INPUT, 16, 8 ) ) )
val validOut = Bool( OUTPUT )
val convOut = Vec( filterSize._3, Fixed( OUTPUT, 16, 8 ) )
}
val rowSumLat = filterSize._2 + 1 + ( (filterSize._2 - 2) / 2 )
val colSumLat = imgSize._2 * ( filterSize._1 / 2 ) + 2
def latency : Int = rowSumLat + colSumLat
val rowAdder = ArrayBuffer[ List[ List[ Fixed ] ] ]()
rowAdder += ( 0 until filterSize._1 ).map( rIdx => {
io.multIn( rIdx*filterSize._2 ).toList.map( RegNext(_) )
}).toList
val muxConds = ArrayBuffer[ Bool ]()
for ( idx <- 1 until filterSize._2 ) {
val inputSelected = ( 0 until filterSize._1 ).map( rIdx => {
io.multIn( rIdx*filterSize._2 + idx ).toList
})
val inputDelayed = {
if ( idx == 1 )
inputSelected
else
inputSelected.map( x => x.map( ShiftRegister( _, idx - 1 ) ) )
}
val prevInputs = rowAdder.last
val adder = ( inputDelayed zip prevInputs ).map( x => {
( x._1 zip x._2 ).map( ab => RegNext( ab._1 + ab._2 ) )
})
val regDelay = {
if ( idx <= filterSize._2 / 2 )
inputDelayed.map( x => x.map( RegNext(_) ) )
else
prevInputs.map( x => x.map( RegNext(_) ) )
}
muxConds += Bool()
rowAdder += ( adder zip regDelay ).map( x => {
( x._1 zip x._2 ).map( ab => {
RegNext( Mux( muxConds.last, ab._2, ab._1) ) // If true then col not needed
})
}).toList
}
val noRowMuxs = muxConds.size
val colAdder = ArrayBuffer[ List[ Fixed ] ]()
colAdder += rowAdder.last( 0 ).map( ShiftRegister( _, 2 ) )
for ( cIdx <- 1 until filterSize._1 ) {
val inputSelected = rowAdder.last( cIdx )
val srIn = colAdder.last.map( MemShiftRegister( _, imgSize._2 - 2 ) )
val inputDelayed = {
if ( cIdx <= filterSize._1 / 2 )
inputSelected.map( RegNext(_) )
else
srIn.map( RegNext( _ ) )
}
val adder = ( srIn zip inputSelected ).map( ab => {
RegNext( ab._1 + ab._2 )
})
muxConds += Bool()
val muxOut = ( adder zip inputDelayed ).map( ab => {
RegNext( Mux( muxConds.last, ab._2, ab._1 ) )
})
colAdder += muxOut
}
val validSR = List( io.validIn ) ++ List.fill( latency ) { RegInit( Bool(false) ) }
for ( vIdx <- 1 until validSR.size )
validSR( vIdx ) := validSR( vIdx - 1 )
for ( mIdx <- 0 until noRowMuxs ) {
val initialVal = {
if ( mIdx < filterSize._2/2 )
( 2*imgSize._2 - mIdx - 1 ) % imgSize._2
else {
val cutoff = (filterSize._2 / 2)
( 2*imgSize._2 - cutoff - 2*( mIdx - cutoff + 1) ) % imgSize._2
}
}
val cntr = RegInit( UInt( initialVal, log2Up( imgSize._2 ) ) )
when ( io.validIn ) {
cntr := cntr + UInt( 1 )
}
when ( cntr === UInt( imgSize._2 - 1 ) ) {
cntr := UInt( 0 )
}
// if right of filter
if ( mIdx >= filterSize._2 / 2 )
muxConds( mIdx ) := { cntr >= UInt( imgSize._2 - 1 - ( mIdx - (filterSize._2 / 2 ) ) ) }
else
muxConds( mIdx ) := { cntr <= UInt( 0 ) }
}
for ( cIdx <- 0 until muxConds.size - noRowMuxs ) {
val mIdx = cIdx + noRowMuxs
val initialVal = ( 2*imgSize._2 - rowSumLat - 1 ) % imgSize._2
val initialRow = ( imgSize._1 - 1 - ( rowSumLat / imgSize._2 ) - {
if ( cIdx < filterSize._1 / 2 )
0
else
cIdx - ( filterSize._1 / 2 ) + 1
} ) % imgSize._1
val cntr = RegInit( UInt( initialVal, log2Up( imgSize._2 ) ))
val rowCntr = RegInit( UInt( initialRow, log2Up( imgSize._1 ) ))
when ( io.validIn ) {
cntr := cntr + UInt( 1 )
}
when ( cntr === UInt( imgSize._2 - 1 ) ) {
cntr := UInt( 0 )
rowCntr := rowCntr + UInt( 1 )
when ( rowCntr === UInt( imgSize._1 - 1 ) ) {
rowCntr := UInt( 0 )
}
}
// if below filter
if ( cIdx >= filterSize._1 / 2 )
muxConds( mIdx ) := { rowCntr >= UInt( imgSize._1 - 1 - ( cIdx - (filterSize._1 / 2 ) ) ) }
else
muxConds( mIdx ) := { rowCntr <= UInt( 0 ) }
}
io.convOut := Vec( colAdder.last )
io.validOut := validSR.last
}
| da-steve101/chisel-utils | src/main/scala/chiselutils/math/ConvAdder.scala | Scala | lgpl-3.0 | 4,590 |
package dbpedia.dataparsers.ontology
import dbpedia.dataparsers.DBpediaNamespace
import dbpedia.dataparsers.util.{Language, RdfNamespace}
import dbpedia.dataparsers.ontology._
/**
* Represents an ontology property.
* There are 2 sub classes of this class: OntologyObjectProperty and OntologyDatatypeProperty.
*
* @param name The name of this entity. e.g. foaf:name
* @param labels The labels of this entity. Map: LanguageCode -> Label
* @param comments Comments describing this entity. Map: LanguageCode -> Comment
* @param range The range of this property
* @param isFunctional Defines whether this is a functional property.
* A functional property is a property that can have only one (unique) value y for each instance x (see: http://www.w3.org/TR/owl-ref/#FunctionalProperty-def)
*/
class OntologyProperty(
name: String,
labels: Map[Language, String],
comments: Map[Language, String],
val domain: OntologyClass,
val range: OntologyType,
val isFunctional: Boolean,
val equivalentProperties: Set[OntologyProperty],
val superProperties: Set[OntologyProperty]
)
extends OntologyEntity(name, labels, comments)
{
require(! RdfNamespace.validate(name) || domain != null, "missing domain for property "+name)
require(! RdfNamespace.validate(name) || range != null, "missing range for property "+name)
require(equivalentProperties != null, "missing equivalent properties for property "+name)
require(superProperties != null, "missing super properties for property "+name)
val uri = RdfNamespace.fullUri(DBpediaNamespace.ONTOLOGY, name)
val isExternalProperty = ! uri.startsWith(DBpediaNamespace.ONTOLOGY.namespace)
override def toString = uri
override def equals(other : Any) = other match
{
case otherProperty : OntologyProperty => (name == otherProperty.name)
case _ => false
}
override def hashCode = name.hashCode
}
| FnOio/dbpedia-parsing-functions-scala | src/main/scala/dbpedia/dataparsers/ontology/OntologyProperty.scala | Scala | gpl-2.0 | 1,913 |
/*
* Copyright (c) 2016, Innoave.com
* All rights reserved.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL INNOAVE.COM OR ITS CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.innoave.abacus.domain.model
trait Bead
| haraldmaida/AbacusSFX | src/main/scala/com/innoave/abacus/domain/model/Bead.scala | Scala | apache-2.0 | 902 |
package com.twitter.finagle.thriftmux.exp.partitioning
import com.twitter.finagle
import com.twitter.finagle.ThriftMux.ThriftMuxMarshallable
import com.twitter.finagle.context.Contexts
import com.twitter.finagle.loadbalancer.LoadBalancerFactory
import com.twitter.finagle.mux.{Request, Response}
import com.twitter.finagle.partitioning.param
import com.twitter.finagle.thrift.exp.partitioning.{
CustomPartitioningStrategy,
Disabled,
HashingPartitioningStrategy,
PartitioningStrategy,
ThriftCustomPartitioningService,
ThriftHashingPartitioningService
}
import com.twitter.finagle.{Service, ServiceFactory, Stack, Stackable, Status}
import com.twitter.util.{Closable, Future, Time}
import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap}
import java.util.function.{Function => JFunction}
import scala.collection.JavaConverters._
private[thriftmux] object DynamicPartitioningService {
private val strategyKey = new Contexts.local.Key[PartitioningStrategy]()
/**
* Sets a per-request partitioning strategy, scoped to `f`. It only works in conjunction
* with a client using the [[perRequestModule]] installed in its stack.
*/
def letStrategy[T](strategy: PartitioningStrategy)(f: => T): T =
Contexts.local.let(strategyKey, strategy) { f }
def perRequestModule: Stackable[ServiceFactory[Request, Response]] =
new Stack.Module[ServiceFactory[Request, Response]] {
override def make(
params: Stack.Params,
next: Stack[ServiceFactory[Request, Response]]
): Stack[ServiceFactory[Request, Response]] = {
val service = new DynamicPartitioningService(params, next)
Stack.leaf(role, ServiceFactory.const(service))
}
def role: Stack.Role = Stack.Role("DynamicPartitioningService")
def description: String = "Apply dynamic partition awareness on the ThriftMux MethodBuilder."
def parameters: Seq[Stack.Param[_]] = Seq(
implicitly[Stack.Param[LoadBalancerFactory.Dest]],
implicitly[Stack.Param[finagle.param.Stats]]
)
}
}
/**
* A Service to switch among partitioning services based on the partitioning strategy
* of each request.
*/
private[partitioning] class DynamicPartitioningService(
params: Stack.Params,
next: Stack[ServiceFactory[Request, Response]])
extends Service[Request, Response] {
import DynamicPartitioningService._
private[this] val pool: ConcurrentMap[
PartitioningStrategy,
Service[Request, Response]
] = new ConcurrentHashMap()
private[this] val strategyFn =
new JFunction[PartitioningStrategy, Service[Request, Response]] {
def apply(strategy: PartitioningStrategy): Service[Request, Response] = {
strategy match {
case hashingStrategy: HashingPartitioningStrategy =>
val param.KeyHasher(hasher) = params[param.KeyHasher]
val param.NumReps(numReps) = params[param.NumReps]
new ThriftHashingPartitioningService[Request, Response](
next,
ThriftMuxMarshallable,
params,
hashingStrategy,
hasher,
numReps)
case customStrategy: CustomPartitioningStrategy =>
new ThriftCustomPartitioningService[Request, Response](
next,
ThriftMuxMarshallable,
params,
customStrategy
)
case Disabled => next.make(params).toService // should not happen
}
}
}
// Note, we want to initialize the non-partitioned request path eagerly. This allows
// features like eager connections to work without priming the client with a request.
private[this] val disabledPartitioningService = next.make(params).toService
def apply(request: Request): Future[Response] = {
val strategy = Contexts.local.getOrElse(strategyKey, () => Disabled)
val service = strategy match {
case Disabled => disabledPartitioningService
case _ => pool.computeIfAbsent(strategy, strategyFn)
}
service(request)
}
// We don't clear the pool so we won't re-compute a service when
// the MethodBuilder endpoint is closed.
override def close(deadline: Time): Future[Unit] = {
Closable.all(disabledPartitioningService +: pool.values.asScala.toSeq: _*).close(deadline)
}
// This status is the MethodBuilder status, it is only Closed if all partitioning
// services are closed.
override def status: Status = {
val services = disabledPartitioningService +: pool.values.asScala.toSeq
Status.bestOf[Service[Request, Response]](services, service => service.status)
}
// exposed for testing
private[partitioning] def getPool =
new ConcurrentHashMap[PartitioningStrategy, Service[Request, Response]](pool)
}
| twitter/finagle | finagle-thriftmux/src/main/scala/com/twitter/finagle/thriftmux/exp/partitioning/DynamicPartitioningService.scala | Scala | apache-2.0 | 4,757 |
/*
We get back the original list! Why is that? As we mentioned earlier, one way of thinking about what `foldRight` "does" is it replaces the `Nil` constructor of the list with the `z` argument, and it replaces the `Cons` constructor with the given function, `f`. If we just supply `Nil` for `z` and `Cons` for `f`, then we get back the input list.
foldRight(Cons(1, Cons(2, Cons(3, Nil))), Nil:List[Int])(Cons(_,_))
Cons(1, foldRight(Cons(2, Cons(3, Nil)), Nil:List[Int])(Cons(_,_)))
Cons(1, Cons(2, foldRight(Cons(3, Nil), Nil:List[Int])(Cons(_,_))))
Cons(1, Cons(2, Cons(3, foldRight(Nil, Nil:List[Int])(Cons(_,_)))))
Cons(1, Cons(2, Cons(3, Nil)))
*/ | ud3sh/coursework | functional-programming-in-scala-textbook/answerkey/datastructures/08.answer.scala | Scala | unlicense | 657 |
package com.arcusys.valamis.lesson.scorm.service.sequencing
import com.arcusys.valamis.lesson.scorm.model.manifest.SequencingPermissions
import com.arcusys.valamis.lesson.scorm.model.sequencing.{ NavigationRequestType, NavigationResponseInvalid, NavigationResponseWithTermination }
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class PreviousNavigationRequestTest extends NavigationRequestServiceTestBase(NavigationRequestType.Previous) {
"Previous navigation request" should "fail for no current activity (4.1.1)" in {
expectResult(NavigationResponseInvalid,
rootOnlyTree(),
rootOnlyTree(hasSuspended = true)
)
}
it should "succeed (with termination = exit) for non-root current activity if it's active and on parent flow is enabled and forward-only is not set (4.2.1.1.1)" in {
val parentPermissions = new SequencingPermissions(choiceForChildren = true, choiceForNonDescendants = true, flowForChildren = true, forwardOnlyForChildren = false)
expectResult(NavigationResponseWithTermination,
twoLevelTree(currentLevel = Some(1), rootPermissions = parentPermissions, currentActive = true),
threeLevelTree(currentLevel = Some(1), rootPermissions = parentPermissions, currentActive = true),
threeLevelTree(currentLevel = Some(2), leftPermissions = parentPermissions, currentActive = true)
)
}
it should "succeed (with empty termination) for non-root current activity if it's not active and on parent flow is enabled and forward-only is not set (4.2.1.2.1)" in {
val parentPermissions = new SequencingPermissions(choiceForChildren = true, choiceForNonDescendants = true, flowForChildren = true, forwardOnlyForChildren = false)
expectResult(NavigationResponseWithTermination,
twoLevelTree(currentLevel = Some(1), rootPermissions = parentPermissions, currentActive = true),
threeLevelTree(currentLevel = Some(1), rootPermissions = parentPermissions, currentActive = true),
threeLevelTree(currentLevel = Some(2), leftPermissions = parentPermissions, currentActive = true)
)
}
it should "fail for non-root current activity if on parent activity flow is enabled, but forward-only is set (4.2.2.1)" in {
val parentPermissions = new SequencingPermissions(choiceForChildren = true, choiceForNonDescendants = true, flowForChildren = true, forwardOnlyForChildren = true)
expectResult(NavigationResponseInvalid,
twoLevelTree(currentLevel = Some(1), rootPermissions = parentPermissions),
threeLevelTree(currentLevel = Some(1), rootPermissions = parentPermissions),
threeLevelTree(currentLevel = Some(2), leftPermissions = parentPermissions)
)
}
it should "fail for non-root current activity if on parent activity flow is disabled (4.2.2.1)" in {
val parentPermissions = new SequencingPermissions(choiceForChildren = true, choiceForNonDescendants = true, flowForChildren = false, forwardOnlyForChildren = false)
expectResult(NavigationResponseInvalid,
twoLevelTree(currentLevel = Some(1), rootPermissions = parentPermissions),
threeLevelTree(currentLevel = Some(1), rootPermissions = parentPermissions),
threeLevelTree(currentLevel = Some(2), leftPermissions = parentPermissions)
)
}
it should "fail for root current activity (4.3.1)" in {
expectResult(NavigationResponseInvalid,
rootOnlyTree(hasCurrent = true),
twoLevelTree(currentLevel = Some(0)),
threeLevelTree(currentLevel = Some(0))
)
}
} | igor-borisov/valamis | valamis-scorm-lesson/src/test/scala/com/arcusys/valamis/lesson/scorm/service/sequencing/PreviousNavigationRequestTest.scala | Scala | gpl-3.0 | 3,497 |
package breeze.linalg.support
import breeze.benchmark._
import breeze.linalg._
object CanTraverseValuesBenchmark extends MyRunner(classOf[CanTraverseValuesBenchmark])
class CanTraverseValuesBenchmark extends BreezeBenchmark with BuildsRandomVectors {
/*
def timeSumWithCanTraverseValues(reps: Int) = runWith(reps, { randomArray(1024*8) })(arr => {
val visitor = new ValuesVisitor[Double] {
var sum: Double = 0
@inline
def visit(a: Double) = { sum += a }
def zeros(numZero: Int, zeroValue: Double) = ()
}
DenseVector.canIterateValues[Double].traverse(arr, visitor)
visitor.sum
})
def timeUFuncSum(reps: Int) = runWith(reps, { randomArray(1024*8) })(arr => {
sum(arr)
})
def timePrimitiveSum(reps: Int) = runWith(reps, {randomArray(1024 * 8)}): Unit = { arr =>
val d = arr.data
var sum = 0.0
import breeze.macros._
cforRange(0 until d.length) {
sum += d(_)
}
sum
}
def timeUFuncSumStrided(reps: Int) = runWith(reps, { randomArray(1024*8 * 5) })(arr => {
sum(arr(0 to -1 by 5))
})
def timePrimitiveSumStrided(reps: Int) = runWith(reps, {randomArray(1024 * 8 * 5)}): Unit = { arr =>
val d = arr.data
var sum = 0.0
import breeze.macros._
cforRange(0 until d.length by 5) {
sum += d(_)
}
sum
}
*/
/*
def timeSumMatrix(reps: Int) = runWith(reps, {randomMatrix(1024, 40)}): Unit = { arr =>
sum(arr)
}
def timeSumMatrixRows(reps: Int) = runWith(reps, {randomMatrix(1024, 40)}): Unit = { arr =>
sum(arr(*, ::))
}
def timeSumMatrixRowsLoop(reps: Int) = runWith(reps, {randomMatrix(1024, 40)}): Unit = { arr =>
val result = DenseVector.zeros[Double](1024)
for (i <- 0 until arr.cols) {
result += arr(::, i)
}
result
}
def timeSumMatrixCols(reps: Int) = runWith(reps, {randomMatrix(40, 1024)}): Unit = { arr =>
sum(arr(::, *))
}
def timeSumMatrixColsLoop(reps: Int) = runWith(reps, {randomMatrix(40, 1024)}): Unit = { arr =>
val result = DenseVector.zeros[Double](1024)
for (i <- 0 until arr.rows) {
result += arr(i, ::).t
}
result
}
*/
def timeMaxMatrixCols(reps: Int) = runWith(reps, { randomMatrix(40, 1024) }): Unit = { arr =>
max(arr(::, *))
}
def timeMaxMatrixRows(reps: Int) = runWith(reps, { randomMatrix(40, 1024) }): Unit = { arr =>
max(arr(*, ::))
}
def timeMinMatrixCols(reps: Int) = runWith(reps, { randomMatrix(40, 1024) }): Unit = { arr =>
min(arr(::, *))
}
def timeMinMatrixRows(reps: Int) = runWith(reps, { randomMatrix(40, 1024) }): Unit = { arr =>
max(arr(*, ::))
}
}
| scalanlp/breeze | benchmark/src/main/scala/breeze/linalg/support/CanTraverseValues.scala | Scala | apache-2.0 | 2,638 |
package exploration.utils
import java.io.FileWriter
import java.nio.file.{Files, Paths}
import exploration.{HighLevelRewrite, ParameterRewrite}
import rewriting.utils.{DumpToFile, Utils}
import scala.io.Source
/**
* Created by Toomas Remmelg on 09/05/16.
*/
object RenameLambdas {
def main(args: Array[String]) {
val topFolder = if (args.nonEmpty) args(0)
else "/home/s1042579/Documents/cgo-2016-kernels/mmTransposeA"
val top = topFolder.split("/").init.mkString("", "/", "/")
val kernels = Source.fromFile(topFolder + "/index")
val newTopFolder = topFolder + "NewHash"
kernels.getLines().foreach(location => {
try {
val fullFilename = top + location
if (Files.exists(Paths.get(fullFilename))) {
val lambda = HighLevelRewrite.finishRewriting(ParameterRewrite.readLambdaFromFile(fullFilename))
val stringRep = DumpToFile.dumpLambdaToString(lambda)
val sha256 = DumpToFile.Sha256Hash(stringRep)
println(location.split("/").last + ", " + sha256)
if (DumpToFile.dumpToFile(stringRep, sha256, newTopFolder)) {
// Add to index if it was unique
synchronized {
val idxFile = new FileWriter(newTopFolder + "/index", true)
idxFile.write(topFolder + "NewHash/" + sha256 + "\\n")
idxFile.close()
}
}
}
} catch {
case _: Throwable =>
}
})
}
}
| lift-project/lift | src/main/exploration/utils/RenameLambdas.scala | Scala | mit | 1,474 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.harness
import java.util.{Comparator, Queue => JQueue}
import org.apache.flink.api.common.typeinfo.BasicTypeInfo.{INT_TYPE_INFO, LONG_TYPE_INFO, STRING_TYPE_INFO}
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.functions.KeySelector
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.streaming.api.operators.OneInputStreamOperator
import org.apache.flink.streaming.api.watermark.Watermark
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord
import org.apache.flink.streaming.util.{KeyedOneInputStreamOperatorTestHarness, TestHarnessUtil}
import org.apache.flink.table.codegen.GeneratedAggregationsFunction
import org.apache.flink.table.functions.AggregateFunction
import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils
import org.apache.flink.table.functions.aggfunctions.{IntSumWithRetractAggFunction, LongMaxWithRetractAggFunction, LongMinWithRetractAggFunction}
import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils.getAccumulatorTypeOfAggregateFunction
import org.apache.flink.table.runtime.types.{CRow, CRowTypeInfo}
class HarnessTestBase {
val longMinWithRetractAggFunction: String =
UserDefinedFunctionUtils.serialize(new LongMinWithRetractAggFunction)
val longMaxWithRetractAggFunction: String =
UserDefinedFunctionUtils.serialize(new LongMaxWithRetractAggFunction)
val intSumWithRetractAggFunction: String =
UserDefinedFunctionUtils.serialize(new IntSumWithRetractAggFunction)
protected val MinMaxRowType = new RowTypeInfo(Array[TypeInformation[_]](
LONG_TYPE_INFO,
STRING_TYPE_INFO,
LONG_TYPE_INFO),
Array("rowtime", "a", "b"))
protected val SumRowType = new RowTypeInfo(Array[TypeInformation[_]](
LONG_TYPE_INFO,
INT_TYPE_INFO,
STRING_TYPE_INFO),
Array("a", "b", "c"))
protected val minMaxCRowType = new CRowTypeInfo(MinMaxRowType)
protected val sumCRowType = new CRowTypeInfo(SumRowType)
protected val minMaxAggregates: Array[AggregateFunction[_, _]] =
Array(new LongMinWithRetractAggFunction,
new LongMaxWithRetractAggFunction).asInstanceOf[Array[AggregateFunction[_, _]]]
protected val sumAggregates: Array[AggregateFunction[_, _]] =
Array(new IntSumWithRetractAggFunction).asInstanceOf[Array[AggregateFunction[_, _]]]
protected val minMaxAggregationStateType: RowTypeInfo =
new RowTypeInfo(minMaxAggregates.map(getAccumulatorTypeOfAggregateFunction(_)): _*)
protected val sumAggregationStateType: RowTypeInfo =
new RowTypeInfo(sumAggregates.map(getAccumulatorTypeOfAggregateFunction(_)): _*)
val minMaxCode: String =
s"""
|public class MinMaxAggregateHelper
| extends org.apache.flink.table.runtime.aggregate.GeneratedAggregations {
|
| transient org.apache.flink.table.functions.aggfunctions.LongMinWithRetractAggFunction
| fmin = null;
|
| transient org.apache.flink.table.functions.aggfunctions.LongMaxWithRetractAggFunction
| fmax = null;
|
| public MinMaxAggregateHelper() throws Exception {
|
| fmin = (org.apache.flink.table.functions.aggfunctions.LongMinWithRetractAggFunction)
| org.apache.flink.table.functions.utils.UserDefinedFunctionUtils
| .deserialize("$longMinWithRetractAggFunction");
|
| fmax = (org.apache.flink.table.functions.aggfunctions.LongMaxWithRetractAggFunction)
| org.apache.flink.table.functions.utils.UserDefinedFunctionUtils
| .deserialize("$longMaxWithRetractAggFunction");
| }
|
| public void setAggregationResults(
| org.apache.flink.types.Row accs,
| org.apache.flink.types.Row output) {
|
| org.apache.flink.table.functions.AggregateFunction baseClass0 =
| (org.apache.flink.table.functions.AggregateFunction) fmin;
| output.setField(3, baseClass0.getValue(
| (org.apache.flink.table.functions.aggfunctions.MinWithRetractAccumulator)
| accs.getField(0)));
|
| org.apache.flink.table.functions.AggregateFunction baseClass1 =
| (org.apache.flink.table.functions.AggregateFunction) fmax;
| output.setField(4, baseClass1.getValue(
| (org.apache.flink.table.functions.aggfunctions.MaxWithRetractAccumulator)
| accs.getField(1)));
| }
|
| public void accumulate(
| org.apache.flink.types.Row accs,
| org.apache.flink.types.Row input) {
|
| fmin.accumulate(
| ((org.apache.flink.table.functions.aggfunctions.MinWithRetractAccumulator)
| accs.getField(0)),
| (java.lang.Long) input.getField(2));
|
| fmax.accumulate(
| ((org.apache.flink.table.functions.aggfunctions.MaxWithRetractAccumulator)
| accs.getField(1)),
| (java.lang.Long) input.getField(2));
| }
|
| public void retract(
| org.apache.flink.types.Row accs,
| org.apache.flink.types.Row input) {
|
| fmin.retract(
| ((org.apache.flink.table.functions.aggfunctions.MinWithRetractAccumulator)
| accs.getField(0)),
| (java.lang.Long) input.getField(2));
|
| fmax.retract(
| ((org.apache.flink.table.functions.aggfunctions.MaxWithRetractAccumulator)
| accs.getField(1)),
| (java.lang.Long) input.getField(2));
| }
|
| public org.apache.flink.types.Row createAccumulators() {
|
| org.apache.flink.types.Row accs = new org.apache.flink.types.Row(2);
|
| accs.setField(
| 0,
| fmin.createAccumulator());
|
| accs.setField(
| 1,
| fmax.createAccumulator());
|
| return accs;
| }
|
| public void setForwardedFields(
| org.apache.flink.types.Row input,
| org.apache.flink.types.Row output) {
|
| output.setField(0, input.getField(0));
| output.setField(1, input.getField(1));
| output.setField(2, input.getField(2));
| }
|
| public org.apache.flink.types.Row createOutputRow() {
| return new org.apache.flink.types.Row(5);
| }
|
| public void open(org.apache.flink.api.common.functions.RuntimeContext ctx) {
| }
|
| public void cleanup() {
| }
|
| public void close() {
| }
|/******* This test does not use the following methods *******/
| public org.apache.flink.types.Row mergeAccumulatorsPair(
| org.apache.flink.types.Row a,
| org.apache.flink.types.Row b) {
| return null;
| }
|
| public void resetAccumulator(org.apache.flink.types.Row accs) {
| }
|
| public void setConstantFlags(org.apache.flink.types.Row output) {
| }
|}
""".stripMargin
val sumAggCode: String =
s"""
|public final class SumAggregationHelper
| extends org.apache.flink.table.runtime.aggregate.GeneratedAggregations {
|
|
|transient org.apache.flink.table.functions.aggfunctions.IntSumWithRetractAggFunction
|sum = null;
|private final org.apache.flink.table.runtime.aggregate.SingleElementIterable<org.apache
| .flink.table.functions.aggfunctions.SumWithRetractAccumulator> accIt0 =
| new org.apache.flink.table.runtime.aggregate.SingleElementIterable<org.apache.flink
| .table
| .functions.aggfunctions.SumWithRetractAccumulator>();
|
| public SumAggregationHelper() throws Exception {
|
|sum = (org.apache.flink.table.functions.aggfunctions.IntSumWithRetractAggFunction)
|org.apache.flink.table.functions.utils.UserDefinedFunctionUtils
|.deserialize("$intSumWithRetractAggFunction");
|}
|
| public final void setAggregationResults(
| org.apache.flink.types.Row accs,
| org.apache.flink.types.Row output) {
|
| org.apache.flink.table.functions.AggregateFunction baseClass0 =
| (org.apache.flink.table.functions.AggregateFunction)
| sum;
|
| output.setField(
| 1,
| baseClass0.getValue((org.apache.flink.table.functions.aggfunctions
| .SumWithRetractAccumulator) accs.getField(0)));
| }
|
| public final void accumulate(
| org.apache.flink.types.Row accs,
| org.apache.flink.types.Row input) {
|
| sum.accumulate(
| ((org.apache.flink.table.functions.aggfunctions.SumWithRetractAccumulator) accs
| .getField
| (0)),
| (java.lang.Integer) input.getField(1));
| }
|
|
| public final void retract(
| org.apache.flink.types.Row accs,
| org.apache.flink.types.Row input) {
| }
|
| public final org.apache.flink.types.Row createAccumulators()
| {
|
| org.apache.flink.types.Row accs =
| new org.apache.flink.types.Row(1);
|
| accs.setField(
| 0,
| sum.createAccumulator());
|
| return accs;
| }
|
| public final void setForwardedFields(
| org.apache.flink.types.Row input,
| org.apache.flink.types.Row output)
| {
|
| output.setField(
| 0,
| input.getField(0));
| }
|
| public final void setConstantFlags(org.apache.flink.types.Row output)
| {
|
| }
|
| public final org.apache.flink.types.Row createOutputRow() {
| return new org.apache.flink.types.Row(2);
| }
|
|
| public final org.apache.flink.types.Row mergeAccumulatorsPair(
| org.apache.flink.types.Row a,
| org.apache.flink.types.Row b)
| {
|
| return a;
|
| }
|
| public final void resetAccumulator(
| org.apache.flink.types.Row accs) {
| }
|
| public void open(org.apache.flink.api.common.functions.RuntimeContext ctx) {
| }
|
| public void cleanup() {
| }
|
| public void close() {
| }
|}
|""".stripMargin
protected val minMaxFuncName = "MinMaxAggregateHelper"
protected val sumFuncName = "SumAggregationHelper"
protected val genMinMaxAggFunction = GeneratedAggregationsFunction(minMaxFuncName, minMaxCode)
protected val genSumAggFunction = GeneratedAggregationsFunction(sumFuncName, sumAggCode)
def createHarnessTester[IN, OUT, KEY](
operator: OneInputStreamOperator[IN, OUT],
keySelector: KeySelector[IN, KEY],
keyType: TypeInformation[KEY]): KeyedOneInputStreamOperatorTestHarness[KEY, IN, OUT] = {
new KeyedOneInputStreamOperatorTestHarness[KEY, IN, OUT](operator, keySelector, keyType)
}
def verify(
expected: JQueue[Object],
actual: JQueue[Object],
comparator: Comparator[Object],
checkWaterMark: Boolean = false): Unit = {
if (!checkWaterMark) {
val it = actual.iterator()
while (it.hasNext) {
val data = it.next()
if (data.isInstanceOf[Watermark]) {
actual.remove(data)
}
}
}
TestHarnessUtil.assertOutputEqualsSorted("Verify Error...", expected, actual, comparator)
}
}
object HarnessTestBase {
/**
* Return 0 for equal Rows and non zero for different rows
*/
class RowResultSortComparator() extends Comparator[Object] with Serializable {
override def compare(o1: Object, o2: Object): Int = {
if (o1.isInstanceOf[Watermark] || o2.isInstanceOf[Watermark]) {
// watermark is not expected
-1
} else {
val row1 = o1.asInstanceOf[StreamRecord[CRow]].getValue
val row2 = o2.asInstanceOf[StreamRecord[CRow]].getValue
row1.toString.compareTo(row2.toString)
}
}
}
/**
* Tuple row key selector that returns a specified field as the selector function
*/
class TupleRowKeySelector[T](
private val selectorField: Int) extends KeySelector[CRow, T] {
override def getKey(value: CRow): T = {
value.row.getField(selectorField).asInstanceOf[T]
}
}
}
| zohar-mizrahi/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/runtime/harness/HarnessTestBase.scala | Scala | apache-2.0 | 13,277 |
package org.querki
/**
* Utilities for working with Scala.js.
*/
package object jsext {
/**
* A map of option values, which JSOptionBuilder builds up.
*/
type OptMap = Map[String, Any]
/**
* An initial empty map of option values, which you use to begin building up
* the options object.
*/
val noOpts = Map.empty[String, Any]
}
| aparo/scalajs-joda | src/main/scala/org/querki/jsext/package.scala | Scala | apache-2.0 | 354 |
package org.greencheek.spray.cache.memcached.keyhashing
import java.io.UnsupportedEncodingException
/**
* Created by dominictootell on 28/05/2014.
*/
object JenkinsKeyHashing extends KeyHashing {
def hashKey(key : String) : String = {
try {
var hash: Int = 0
for (bt <- key.getBytes("UTF-8")) {
hash += (bt & 0xFF)
hash += (hash << 10)
hash ^= (hash >>> 6)
}
hash += (hash << 3)
hash ^= (hash >>> 11)
hash += (hash << 15)
(hash & 0xFFFFFFFFl).toString
}
catch {
case e: UnsupportedEncodingException => {
throw new IllegalStateException("Hash function error", e)
}
}
}
}
| tootedom/spray-cache-spymemcached | src/main/scala/org/greencheek/spray/cache/memcached/keyhashing/JenkinsKeyHashing.scala | Scala | apache-2.0 | 681 |
import scala.reflect.runtime.universe._
import scala.tools.reflect.ToolBox
import scala.tools.reflect.Eval
object Test extends App {
{
def foo[W] = {
type U = W
type T = U
reify {
List[T](2)
}
}
val code = foo[Int]
println(code.tree.freeTypes)
val W = code.tree.freeTypes(2)
cm.mkToolBox().eval(code.tree, Map(W -> definitions.IntTpe))
println(code.eval)
}
} | felixmulder/scala | test/pending/run/reify_newimpl_09c.scala | Scala | bsd-3-clause | 423 |
package cromwell.server
import akka.actor.{Props, ActorSystem}
import com.typesafe.config.ConfigFactory
import cromwell.engine.backend.CromwellBackend
import cromwell.engine.workflow.{MaterializeWorkflowDescriptorActor, WorkflowManagerActor}
import cromwell.engine.workflow.WorkflowManagerActor
import cromwell.instrumentation.Instrumentation._
import collection.JavaConversions._
trait WorkflowManagerSystem {
Monitor.start()
protected def systemName = "cromwell-system"
protected def newActorSystem(): ActorSystem = ActorSystem(systemName)
implicit final lazy val actorSystem = newActorSystem()
def shutdownActorSystem(): Unit = {
actorSystem.shutdown()
}
def allowedBackends: List[String] = ConfigFactory.load.getConfig("backend").getStringList("backendsAllowed").toList
def defaultBackend: String = ConfigFactory.load.getConfig("backend").getString("defaultBackend")
CromwellBackend.initBackends(allowedBackends, defaultBackend, actorSystem)
// For now there's only one WorkflowManagerActor so no need to dynamically name it
lazy val workflowManagerActor = actorSystem.actorOf(WorkflowManagerActor.props(), "WorkflowManagerActor")
}
| cowmoo/cromwell | engine/src/main/scala/cromwell/server/WorkflowManagerSystem.scala | Scala | bsd-3-clause | 1,171 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2014 Alexey Aksenov ezh@ezh.msk.ru
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: ezh@ezh.msk.ru
*/
package org.digimead.tabuddy.desktop.logic.ui.support.digest
import org.digimead.tabuddy.desktop.core.support.{ App, WritableValue }
import org.digimead.tabuddy.desktop.logic.Messages
import org.digimead.tabuddy.model.serialization.digest.{ Mechanism, SimpleDigest }
import org.eclipse.core.databinding.DataBindingContext
import org.eclipse.jface.databinding.viewers.ViewersObservables
import org.eclipse.jface.dialogs.{ IDialogConstants, TitleAreaDialog }
import org.eclipse.jface.viewers.{ LabelProvider, StructuredSelection }
import org.eclipse.swt.SWT
import org.eclipse.swt.events.{ DisposeEvent, DisposeListener }
import org.eclipse.swt.layout.GridData
import org.eclipse.swt.widgets.{ Button, Composite, Control, Shell }
/**
* Adapter between model.serialization.digest.SimpleDigest and UI
*/
class SimpleDigestAdapter extends DigestAdapter {
/** Identifier of the digest mechanism. */
val identifier: Mechanism.Identifier = SimpleDigest.Identifier
/** Get composite for the digest configuration. */
def composite(parent: Composite, default: Option[Mechanism.Parameters]) =
default match {
case Some(p: SimpleDigest.Parameters) ⇒
Option(new SimpleDigestAdapter.SimpleDigestComposite(parent, SWT.NONE, Option(p)))
case Some(other) ⇒
throw new IllegalArgumentException(s"Expect Base.Parameters, but ${other} found")
case None ⇒
Option(new SimpleDigestAdapter.SimpleDigestComposite(parent, SWT.NONE, None))
}
/** Get dialog for the digest configuration. */
def dialog(parent: Shell, default: Option[Mechanism.Parameters], tag: String = Messages.digest_text) =
default match {
case Some(p: SimpleDigest.Parameters) ⇒
Option(new SimpleDigestAdapter.SimpleDigestDialog(parent, Option(p), tag))
case Some(other) ⇒
throw new IllegalArgumentException(s"Expect Base.Parameters, but ${other} found")
case None ⇒
Option(new SimpleDigestAdapter.SimpleDigestDialog(parent, None, tag))
}
/** Flag indicating whether the parameters are supported. */
def parameters: Boolean = true
}
object SimpleDigestAdapter {
/** All supported algorithms by mechanism. */
val algorithms = Seq(("SHA-512", "hash algorithms defined in the FIPS PUB 180-2.", SimpleDigest("SHA-512")),
("SHA-384", "hash algorithms defined in the FIPS PUB 180-2.", SimpleDigest("SHA-384")),
("SHA-256", "hash algorithms defined in the FIPS PUB 180-2.", SimpleDigest("SHA-256")),
("SHA", "hash algorithms defined in the FIPS PUB 180-2.", SimpleDigest("SHA-1")),
("MD5", "the MD5 message digest algorithm as defined in RFC 1321.", SimpleDigest("MD5")),
("MD2", "the MD2 message digest algorithm as defined in RFC 1319.", SimpleDigest("MD2"))).sortBy(_._1)
/**
* SimpleDigest adapter composite
*/
class SimpleDigestComposite(parent: Composite, style: Int, defaultParameters: Option[SimpleDigest.Parameters])
extends SimpleDigestAdapterSkel(parent, style) with DigestAdapter.Composite {
/** Binding context. */
lazy val bindingContext = new DataBindingContext(App.realm)
/** Digest algorythm field. */
lazy val digestAlgorithmField = App.execNGet(WritableValue[(String, String, SimpleDigest.Parameters)])
/** Composite result */
@volatile protected var result = Option.empty[Either[String, Mechanism.Parameters]]
initializeUI()
initializeBindings()
initializeDefaults()
this.addDisposeListener(new DisposeListener {
def widgetDisposed(e: DisposeEvent) = onDispose
})
/** Get an error or digest parameters. */
def get(): Option[Either[String, Mechanism.Parameters]] = result
/** On dispose callback. */
protected def onDispose {
updateResult
bindingContext.dispose()
digestAlgorithmField.dispose()
}
/** Initialize UI part. */
protected def initializeUI() {
App.assertEventThread()
val comboViewerDigestAlgorithm = getComboViewerDigestAlgorithm()
comboViewerDigestAlgorithm.setLabelProvider(new LabelProvider() {
override def getText(element: AnyRef): String = element match {
case (null, null, null) ⇒ Messages.Adapter_selectDigestAlgorithm_text
case (name, description: String, length) ⇒ s"${name} - ${description.capitalize}"
case unknown ⇒ super.getText(unknown)
}
})
val digestAlgorithmFieldBinding = bindingContext.bindValue(ViewersObservables.
observeDelayedValue(50, ViewersObservables.observeSingleSelection(comboViewerDigestAlgorithm)), digestAlgorithmField)
comboViewerDigestAlgorithm.getCCombo().addDisposeListener(new DisposeListener {
def widgetDisposed(e: DisposeEvent) = bindingContext.removeBinding(digestAlgorithmFieldBinding)
})
comboViewerDigestAlgorithm.add((null, null, null))
SimpleDigestAdapter.algorithms.foreach(comboViewerDigestAlgorithm.add)
}
/** Initialize binding part. */
protected def initializeBindings() {
digestAlgorithmField.addChangeListener { case ((name, description, length), event) ⇒ updateResult }
}
/** Initialize default values. */
protected def initializeDefaults() = defaultParameters match {
case Some(parameters) ⇒
SimpleDigestAdapter.algorithms.find(_._3 == parameters).foreach(value ⇒ getComboViewerDigestAlgorithm.setSelection(new StructuredSelection(value)))
case None ⇒
getComboViewerDigestAlgorithm.setSelection(new StructuredSelection((null, null, null)))
}
/** Update result value. */
protected def updateResult = {
for {
digestAlgorithm ← Option(digestAlgorithmField.value) if digestAlgorithm._3 != null
} yield digestAlgorithm._3
} match {
case Some(algorithm) ⇒
result = Some(Right(algorithm))
case _ ⇒
result = Some(Left(Messages.parametersRequired_text))
}
}
/**
* SimpleDigest adapter dialog
*/
class SimpleDigestDialog(parentShell: Shell, defaultValue: Option[SimpleDigest.Parameters], tag: String)
extends TitleAreaDialog(parentShell) with DigestAdapter.Dialog {
/** Private field with content's composite. */
@volatile protected var content: SimpleDigestComposite = null
/** Get an error or digest parameters. */
def get(): Option[Either[String, Mechanism.Parameters]] = Option(content) match {
case Some(content) ⇒ content.get()
case None ⇒ Some(Left(Messages.parametersRequired_text))
}
override protected def configureShell(shell: Shell) {
super.configureShell(shell)
shell.setText(Messages.Adapter_selectXParameters_text.format(SimpleDigest.Identifier.name.capitalize, tag))
}
override def create() {
super.create()
setTitle(SimpleDigest.Identifier.name.capitalize)
setMessage(SimpleDigest.Identifier.description.capitalize)
}
override protected def createButton(parent: Composite, id: Int, label: String, defaultButton: Boolean): Button = {
val button = super.createButton(parent, id, label, defaultButton)
if (id == IDialogConstants.OK_ID) {
button.setEnabled(content.get().map(_.isRight).getOrElse(false))
content.digestAlgorithmField.addChangeListener { case (_, event) ⇒ button.setEnabled(content.get().map(_.isRight).getOrElse(false)) }
}
button
}
override protected def createDialogArea(parent: Composite): Control = {
content = new SimpleDigestAdapter.SimpleDigestComposite(parent, SWT.NONE, defaultValue)
content.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1))
content
}
}
}
| digimead/digi-TABuddy-desktop | part-logic/src/main/scala/org/digimead/tabuddy/desktop/logic/ui/support/digest/SimpleDigestAdapter.scala | Scala | agpl-3.0 | 9,875 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.entity
import java.nio.charset.StandardCharsets
import com.typesafe.config.ConfigValue
import pureconfig._
import pureconfig.generic.auto._
import spray.json._
import ByteSize.formatError
object SizeUnits extends Enumeration {
sealed abstract class Unit() {
def toBytes(n: Long): Long
def toKBytes(n: Long): Long
def toMBytes(n: Long): Long
def toGBytes(n: Long): Long
}
case object BYTE extends Unit {
def toBytes(n: Long): Long = n
def toKBytes(n: Long): Long = n / 1024
def toMBytes(n: Long): Long = n / 1024 / 1024
def toGBytes(n: Long): Long = n / 1024 / 1024 / 1024
}
case object KB extends Unit {
def toBytes(n: Long): Long = n * 1024
def toKBytes(n: Long): Long = n
def toMBytes(n: Long): Long = n / 1024
def toGBytes(n: Long): Long = n / 1024 / 1024
}
case object MB extends Unit {
def toBytes(n: Long): Long = n * 1024 * 1024
def toKBytes(n: Long): Long = n * 1024
def toMBytes(n: Long): Long = n
def toGBytes(n: Long): Long = n / 1024
}
case object GB extends Unit {
def toBytes(n: Long): Long = n * 1024 * 1024 * 1024
def toKBytes(n: Long): Long = n * 1024 * 1024
def toMBytes(n: Long): Long = n * 1024
def toGBytes(n: Long): Long = n
}
}
case class ByteSize(size: Long, unit: SizeUnits.Unit) extends Ordered[ByteSize] {
require(size >= 0, "a negative size of an object is not allowed.")
def toBytes = unit.toBytes(size)
def toKB = unit.toKBytes(size)
def toMB = unit.toMBytes(size)
def +(other: ByteSize): ByteSize = {
val commonUnit = SizeUnits.BYTE
val commonSize = other.toBytes + toBytes
ByteSize(commonSize, commonUnit)
}
def -(other: ByteSize): ByteSize = {
val commonUnit = SizeUnits.BYTE
val commonSize = toBytes - other.toBytes
ByteSize(commonSize, commonUnit)
}
def *(other: Int): ByteSize = {
ByteSize(toBytes * other, SizeUnits.BYTE)
}
def /(other: ByteSize): Double = {
// Without throwing the exception the result would be `Infinity` here
if (other.toBytes == 0) {
throw new ArithmeticException
} else {
(1.0 * toBytes) / (1.0 * other.toBytes)
}
}
def /(other: Int): ByteSize = {
ByteSize(toBytes / other, SizeUnits.BYTE)
}
def compare(other: ByteSize) = toBytes compare other.toBytes
override def equals(that: Any): Boolean = that match {
case t: ByteSize => compareTo(t) == 0
case _ => false
}
override def toString = {
unit match {
case SizeUnits.BYTE => s"$size B"
case SizeUnits.KB => s"$size KB"
case SizeUnits.MB => s"$size MB"
case SizeUnits.GB => s"$size GB"
}
}
}
object ByteSize {
private val regex = """(?i)\\s?(\\d+)\\s?(GB|MB|KB|B|G|M|K)\\s?""".r.pattern
protected[entity] val formatError = """Size Unit not supported. Only "B", "K[B]", "M[B]" and "G[B]" are supported."""
def fromString(sizeString: String): ByteSize = {
val matcher = regex.matcher(sizeString)
if (matcher.matches()) {
val size = matcher.group(1).toLong
val unit = matcher.group(2).charAt(0).toUpper match {
case 'B' => SizeUnits.BYTE
case 'K' => SizeUnits.KB
case 'M' => SizeUnits.MB
case 'G' => SizeUnits.GB
}
ByteSize(size, unit)
} else {
throw new IllegalArgumentException(formatError)
}
}
}
object size {
implicit class SizeInt(n: Int) extends SizeConversion {
def sizeIn(unit: SizeUnits.Unit): ByteSize = ByteSize(n, unit)
}
implicit class SizeLong(n: Long) extends SizeConversion {
def sizeIn(unit: SizeUnits.Unit): ByteSize = ByteSize(n, unit)
}
implicit class SizeString(n: String) extends SizeConversion {
def sizeIn(unit: SizeUnits.Unit): ByteSize = ByteSize(n.getBytes(StandardCharsets.UTF_8).length, unit)
}
implicit class SizeOptionString(n: Option[String]) extends SizeConversion {
def sizeIn(unit: SizeUnits.Unit): ByteSize =
n map { s =>
s.sizeIn(unit)
} getOrElse {
ByteSize(0, unit)
}
}
// Creation of an intermediary Config object is necessary here, since "getBytes" is only part of that interface.
implicit val pureconfigReader =
ConfigReader[ConfigValue].map(v => ByteSize(v.atKey("key").getBytes("key"), SizeUnits.BYTE))
protected[core] implicit val serdes = new RootJsonFormat[ByteSize] {
def write(b: ByteSize) = JsString(b.toString)
def read(value: JsValue): ByteSize = value match {
case JsString(s) => ByteSize.fromString(s)
case _ => deserializationError(formatError)
}
}
}
trait SizeConversion {
def B = sizeIn(SizeUnits.BYTE)
def KB = sizeIn(SizeUnits.KB)
def MB = sizeIn(SizeUnits.MB)
def GB: ByteSize = sizeIn(SizeUnits.GB)
def bytes = B
def kilobytes = KB
def megabytes = MB
def gigabytes: ByteSize = GB
def sizeInBytes = sizeIn(SizeUnits.BYTE)
def sizeIn(unit: SizeUnits.Unit): ByteSize
}
| style95/openwhisk | common/scala/src/main/scala/org/apache/openwhisk/core/entity/Size.scala | Scala | apache-2.0 | 5,797 |
def f(i: Int) = {}
println(/* offset: 4, applicable: false */ f(_: Int, _: Int)) | ilinum/intellij-scala | testdata/resolve2/function/partial/TwoToOne.scala | Scala | apache-2.0 | 81 |
package net.revenj
import java.util
final class TreePath private(private val value: String, private val parts: Array[String]) {
override def hashCode: Int = value.hashCode
override def equals(obj: Any): Boolean = obj.isInstanceOf[TreePath] && obj.asInstanceOf[TreePath].value == this.value || obj.isInstanceOf[String] && obj == this.value
override def toString: String = value
def plus(other: TreePath): TreePath = {
if (other == null) this
else if (this.value.length == 0) other
else if (other.value.length == 0) this
else {
val values = util.Arrays.copyOf(this.parts, this.parts.length + other.parts.length)
var i = 0
while (i < other.parts.length) {
values(this.parts.length + i) = other.parts(i)
i += 1
}
new TreePath(this.value + "." + other.value, values)
}
}
def isAncestor(other: TreePath): Boolean = {
if (other == null) false
else if (this.value == other.value) true
else this.parts.length < other.parts.length && TreePath.compare(this.parts, other.parts, this.parts.length)
}
def isDescendant(other: TreePath): Boolean = {
if (other == null) false
else if (this.value == other.value) true
else this.parts.length > other.parts.length && TreePath.compare(this.parts, other.parts, other.parts.length)
}
}
object TreePath {
val Empty: TreePath = new TreePath("", new Array[String](0))
def create(value: String): TreePath = {
if (value == null || value.isEmpty) Empty
else {
val parts = value.split("\\.")
checkParts(parts)
new TreePath(value, parts)
}
}
private def checkParts(parts: Array[String]): Unit = {
var i = 0
while(i < parts.length) {
val p = parts(i)
i += 1
var j = 0
while (j < p.length) {
if (!Character.isLetterOrDigit(p.charAt(j))) throw new IllegalArgumentException("Invalid value for part: " + p + ". Only letters and digits allowed for labels")
j += 1
}
}
}
private def compare(left: Array[String], right: Array[String], count: Int): Boolean = {
var i = 0
var same = true
while (same && i < count) {
same = left(i) == right(i)
i += 1
}
same
}
}
| ngs-doo/revenj | scala/revenj-core/src/main/scala/net/revenj/TreePath.scala | Scala | bsd-3-clause | 2,299 |
package blended.itestsupport.jmx
import javax.management.remote.JMXServiceURL
trait JMXUrlProvider {
def serviceUrl : JMXServiceURL
}
case class KarafJMXUrlProvider(host: String = "localhost", port: Integer = 1099) extends JMXUrlProvider {
def withHost(h: String) = copy( host = h )
def withHost(p: Int) = copy( port = p )
override def serviceUrl =
new JMXServiceURL(s"service:jmx:rmi:///jndi/rmi://$host:$port/jmxrmi")
} | woq-blended/blended | blended.itestsupport/src/main/scala/blended/itestsupport/jmx/JMXUrlProvider.scala | Scala | apache-2.0 | 441 |
/**
* Copyright (c) 2014 MongoDB, Inc.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* For questions and comments about this product, please see the project page at:
*
* https://github.com/mongodb/mongo-scala-driver
*
*/
import sbt._
import sbt.Keys._
import sbt.Def.Initialize
import scala.xml.NodeBuffer
object Publish {
lazy val settings = Seq(
crossPaths := false,
pomExtra := driverPomExtra,
publishTo <<= sonatypePublishTo,
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials"),
pomIncludeRepository := { x => false },
publishMavenStyle := true,
publishArtifact in Test := false
)
def sonatypePublishTo: Initialize[Option[Resolver]] = {
version { v: String =>
val nexus = "https://oss.sonatype.org/"
if (v.trim.endsWith("SNAPSHOT"))
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
}
def driverPomExtra: NodeBuffer = {
<url>http://github.com/mongodb/mongo-scala-driver</url>
<licenses>
<license>
<name>Apache 2</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.html</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<url>git@github.com:mongodb/mongo-scala-driver.git</url>
<connection>scm:git:git@github.com:mongodb/mongo-scala-driver.git</connection>
</scm>
<developers>
<developer>
<id>ross</id>
<name>Ross Lawley</name>
<url>http://rosslawley.co.uk</url>
</developer>
</developers>
}
}
| antonnik/code-classifier | naive_bayes/resources/scala/Publish.scala | Scala | apache-2.0 | 2,384 |
package lila.tv
import akka.actor._
import com.typesafe.config.Config
import lila.common.PimpedConfig._
import scala.collection.JavaConversions._
import scala.concurrent.duration._
final class Env(
config: Config,
db: lila.db.Env,
hub: lila.hub.Env,
lightUser: String => Option[lila.common.LightUser],
system: ActorSystem,
scheduler: lila.common.Scheduler,
isProd: Boolean) {
private val FeaturedSelect = config duration "featured.select"
private val StreamingSearch = config duration "streaming.search"
lazy val tv = new Tv(tvActor)
private val tvActor =
system.actorOf(
Props(classOf[TvActor], hub.actor.renderer, hub.socket.round, lightUser),
name = "tv")
private lazy val streaming = new Streaming(
system = system,
renderer = hub.actor.renderer,
streamerList = streamerList)
lazy val streamerList = new StreamerList(new {
import reactivemongo.bson._
private val coll = db("flag")
def get = coll.find(BSONDocument("_id" -> "streamer")).one[BSONDocument].map {
~_.flatMap(_.getAs[String]("text"))
}
def set(text: String) =
coll.update(BSONDocument("_id" -> "streamer"), BSONDocument("text" -> text), upsert = true).void
})
object isStreamer {
private val cache = lila.memo.MixedCache.single[Set[String]](
f = streamerList.lichessIds,
timeToLive = 1 minute,
default = Set.empty)
def apply(id: String) = cache get true contains id
}
def streamsOnAir = streaming.onAir
{
import scala.concurrent.duration._
scheduler.message(FeaturedSelect) {
tvActor -> TvActor.Select
}
scheduler.once(2.seconds) {
streaming.actor ! Streaming.Search
scheduler.message(StreamingSearch) {
streaming.actor -> Streaming.Search
}
}
}
}
object Env {
lazy val current = "tv" boot new Env(
config = lila.common.PlayApp loadConfig "tv",
db = lila.db.Env.current,
hub = lila.hub.Env.current,
lightUser = lila.user.Env.current.lightUser,
system = lila.common.PlayApp.system,
scheduler = lila.common.PlayApp.scheduler,
isProd = lila.common.PlayApp.isProd)
}
| TangentialAlan/lila | modules/tv/src/main/Env.scala | Scala | mit | 2,166 |
package cvx
import breeze.linalg.{DenseMatrix, DenseVector, norm, sum}
import breeze.numerics.{abs, pow, sqrt}
/** Examples of objective functions (test cases).*/
object ObjectiveFunctions {
val rng = scala.util.Random
def normSquared(dim:Int) = new ObjectiveFunction(dim) {
def valueAt(x:DenseVector[Double]) = 0.5*(x dot x)
def gradientAt(x:DenseVector[Double]) = x
def hessianAt(x:DenseVector[Double]) = DenseMatrix.eye[Double](dim)
}
/** The objective function f(x) = 0.5*||R(x-x0)||².
* This one has a unique global minimum at x=x0.
*/
def quadraticObjectiveFunction(x0:DenseVector[Double],R:DenseMatrix[Double]):QuadraticObjectiveFunction = {
val dim=x0.length
assert(R.rows==dim,"\\nDimension mismatch R.rows = "+R.rows+" not equal to x0.length = "+dim+".\\n")
val Rx0 = R*x0
val norm_Rx0 = norm(Rx0)
val r = norm_Rx0*norm_Rx0/2
val a:DenseVector[Double] = -R.t*Rx0
val P:DenseMatrix[Double] = R.t*R
QuadraticObjectiveFunction(dim,r,a,P)
}
/** The objective function f(x) = 0.5*||R(x-x0)||² where the matrix R has entries uniformly random
* in [-1,1].
* This one has a unique global minimum at x=x0.
*/
def randomQuadraticObjectiveFunction(x0:DenseVector[Double],R:DenseMatrix[Double]):QuadraticObjectiveFunction = {
val dim = x0.length
val R = DenseMatrix.tabulate[Double](dim,dim)((i,j) => -1+2*rng.nextDouble())
quadraticObjectiveFunction(x0,R)
}
/** The objective function f(x)=0.5*(||Ax-b||²+delta*||x||²), where delta = 1e-7*norm(A)
* @param A an nxm matrix
* @param b a vector of length n
*/
def regularizedEquationResidual(
A:DenseMatrix[Double],b:DenseVector[Double], delta:Double
):ObjectiveFunction = {
val dim = A.cols
val norm_A = sqrt(sum(A:*A))
val I = DenseMatrix.eye[Double](dim)
val P = A.t*A + I*(delta*norm_A)
val a = -A.t*b
val r = 0.0
QuadraticObjectiveFunction(dim,r,a,P)
}
/** The L_p norm f(x)=||x||_p raised to power p, i.e.
* $f(x)=\\sum|x_j|^^p$.
*
* @param p must be >= 2 to ensure sufficient differentiability.
*/
def p_norm_p(dim:Int,p:Double):ObjectiveFunction = new ObjectiveFunction(dim) {
assert(p>=2,"\\np-norm needs p>=2 but p="+p+"\\n")
def sgn(u:Double):Double = if(abs(u)<1e-14) 0 else if (u>0) 1.0 else -1.0
def valueAt(x:DenseVector[Double]):Double = sum(pow(abs(x),p))
def gradientAt(x:DenseVector[Double]):DenseVector[Double] =
DenseVector.tabulate[Double](dim)(j => {val s=sgn(x(j)); s*p*pow(s*x(j),p-1)})
def hessianAt(x:DenseVector[Double]):DenseMatrix[Double] =
DenseMatrix.tabulate[Double](dim,dim)(
(i,j) => if(i==j) p*(p-1)*pow(abs(x(i)),p-2) else 0.0
)
}
} | spyqqqdia/cvx | src/main/scala/cvx/ObjectiveFunctions.scala | Scala | mit | 2,747 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.index.utils
import java.util.concurrent.TimeUnit
import java.util.concurrent.locks.{Lock, ReentrantLock}
trait DistributedLocking {
/**
* Gets and acquires a distributed lock based on the key.
* Make sure that you 'release' the lock in a finally block.
*
* @param key key to lock on - equivalent to a path in zookeeper
* @return the lock
*/
protected def acquireDistributedLock(key: String): Releasable
/**
* Gets and acquires a distributed lock based on the key.
* Make sure that you 'release' the lock in a finally block.
*
* @param key key to lock on - equivalent to a path in zookeeper
* @param timeOut how long to wait to acquire the lock, in millis
* @return the lock, if obtained
*/
protected def acquireDistributedLock(key: String, timeOut: Long): Option[Releasable]
}
trait LocalLocking extends DistributedLocking {
import LocalLocking.locks
override protected def acquireDistributedLock(key: String): Releasable = {
val lock = locks.synchronized(locks.getOrElseUpdate(key, new ReentrantLock()))
lock.lock()
Releasable(lock)
}
protected def acquireDistributedLock(key: String, timeOut: Long): Option[Releasable] = {
val lock = locks.synchronized(locks.getOrElseUpdate(key, new ReentrantLock()))
if (lock.tryLock(timeOut, TimeUnit.MILLISECONDS)) { Some(Releasable(lock)) } else { None }
}
}
object LocalLocking {
private val locks = scala.collection.mutable.Map.empty[String, Lock]
}
trait Releasable {
def release(): Unit
}
object Releasable {
def apply(lock: Lock): Releasable = new Releasable { override def release(): Unit = lock.unlock() }
}
| nagavallia/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/utils/DistributedLocking.scala | Scala | apache-2.0 | 2,156 |
package zzz.akka.avionics
import akka.actor.{Props, Actor, ActorLogging}
import zzz.akka.avionics.Pilots.{Controls, CoPilotReference}
//import zzz.akka.avionics.LeadFlightAttendant
import scala.concurrent.Await
import zzz.akka.avionics.IsolatedLifeCycleSupervisor.WaitForStart
import akka.util.Timeout
import akka.pattern.ask
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.Duration
object Plane {
// Returns the control surface to the Actor that
// asks for them
case object GiveMeControl
case object LostControl
case object RequestCoPilot
}
// We want the Plane to own the Altimeter and we're going to
// do that by passing in a specific factory we can use to
// build the Altimeter
class Plane extends Actor with ActorLogging {
this: AltimeterProvider
with HeadingIndicatorProvider
with PilotProvider
with WeatherBehaviourProvider
with LeadFlightAttendantProvider =>
import Altimeter._
import Plane._
import EventSource._
val cfgstr = "zzz.akka.avionics.flightcrew"
// override val newAltimeter = context.actorOf(
// Props(Altimeter()), "Altimeter")
// val controls = context.actorOf(Props(new ControlSurfaces(newAltimeter), "ControlSurfaces")
val config = context.system.settings.config
val pilotName = config.getString(s"$cfgstr.pilotName")
val copilotName = config.getString(s"$cfgstr.copilotName")
val attendantName = config.getString(s"$cfgstr.leadAttendantName")
// override val newPilot = context.actorOf(Props[Pilot],
// config.getString(s"$cfgstr.pilotName"))
// override val newCoPilot = context.actorOf(Props[CoPilot],
// config.getString(s"$cfgstr.copilotName"))
// override val newAutopilot = context.actorOf(
// Props[AutoPilot], "AutoPilot")
val flightAttendant = context.actorOf(
Props(LeadFlightAttendant()),
config.getString(s"$cfgstr.leadAttendantName"))
// Helps us look up Actors within the "Pilots" Supervisor
def actorForPilots(name: String) =
context.actorFor("Pilots/" + name)
override def preStart() {
import EventSource.RegisterListener
import Pilots.ReadyToGo
// Get our children going. Order is important here.
startEquipment()
startPeople()
// Bootstrap the system
actorForControls("Altimeter") ! RegisterListener(self)
actorForPilots(pilotName) ! ReadyToGo
actorForPilots(copilotName) ! ReadyToGo
actorForControls("AutoPilot") ! ReadyToGo
}
// There's going to be a couple of asks below and
// a timeout is necessary for that.
implicit val askTimeout = Timeout(1, TimeUnit.SECONDS)
def startEquipment() {
val controls = context.actorOf(
Props(new IsolatedResumeSupervisor
with OneForOneStrategyFactory {
def childStarter() {
val alt = context.actorOf(
Props(newAltimeter), "Altimeter")
val head = context.actorOf(
Props(newHeadingIndicator), "HeadingIndicator")
// These children get implicitly added to the
// hierarchy
context.actorOf(Props(newAutoPilot(self, context.parent)), "AutoPilot")
context.actorOf(Props(new ControlSurfaces(self, alt, head)),
"ControlSurfaces")
context.actorOf(newWeatherBehaviour(alt, head),
"WeatherBehaviour")
}
}), "Equipment")
Await.result(controls ? WaitForStart, Duration(1, TimeUnit.SECONDS))
}
// Helps us look up Actors within the "Equipment" Supervisor
def actorForControls(name: String) =
context.actorFor("Equipment/" + name)
def startPeople() {
val plane = self
// Note how we depend on the Actor structure beneath
// us here by using actorFor(). This should be
// resilient to change, since we'll probably be the
// ones making the changes
val heading = actorForControls("HeadingIndicator")
val altimeter = actorForControls("Altimeter")
val controls = actorForControls("ControlSurfaces")
val autopilot = actorForControls("AutoPilot")
val people = context.actorOf(
Props(new IsolatedStopSupervisor
with OneForOneStrategyFactory {
def childStarter() {
// These children get implicitly added
// to the hierarchy
context.actorOf(
Props(newCoPilot(plane, autopilot, controls)),
copilotName)
context.actorOf(
Props(newPilot(plane, autopilot,
heading, altimeter)),
pilotName)
}
}), "Pilots")
// Use the default strategy here, which
// restarts indefinitely
context.actorOf(Props(newLeadFlightAttendant), attendantName)
Await.result(people ? WaitForStart, Duration(1, TimeUnit.SECONDS))
}
// val leadAttendantName = context.system.settings.config.getString(
// "zzz.akka.avionics.flightcrew.leadAttendantName")
def receive = {
case AltitudeUpdate(altitude) =>
log info(s"Altitude is now: $altitude")
case GiveMeControl =>
val controls = actorForControls("ControlSurfaces")
log info ("Plane giving control.")
sender ! Controls(controls)
case RequestCoPilot =>
val coPilot = actorForPilots(copilotName)
sender ! CoPilotReference(coPilot)
}
}
| kevyin/akka-book-wyatt | src/main/scala/zzz/akka/avionics/Plane.scala | Scala | mit | 5,227 |
package com.datawizards.sparklocal.rdd
import com.datawizards.sparklocal.SparkLocalBaseTest
import org.apache.spark.HashPartitioner
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class IntersectionTest extends SparkLocalBaseTest {
test("Intersection result - Scala") {
assertRDDOperationResult(
RDDAPI(Seq(1,2,3,4)) intersection RDDAPI(Seq(5,4,3))
) {
Array(3,4)
}
}
test("Intersection result - Spark") {
assertRDDOperationResultWithSorted(
RDDAPI(sc.parallelize(Seq(1,2,3,4))) intersection RDDAPI(sc.parallelize(Seq(5,4,3)))
) {
Array(3,4)
}
}
test("Intersection result with partitions - Spark") {
assertRDDOperationResultWithSorted(
RDDAPI(sc.parallelize(Seq(1,2,3,4))).intersection(RDDAPI(sc.parallelize(Seq(5,4,3))), 2)
) {
Array(3,4)
}
}
test("Intersection result with partitioner - Spark") {
assertRDDOperationResultWithSorted(
RDDAPI(sc.parallelize(Seq(1,2,3,4))).intersection(RDDAPI(sc.parallelize(Seq(5,4,3))), new HashPartitioner(2))
) {
Array(3,4)
}
}
test("Intersection equal - scala intersection spark") {
val r2 = RDDAPI(Seq(3,4,5))
assertRDDOperationReturnsSameResultWithSorted(Seq(1,2,3,4)){
ds => ds intersection r2
}
}
test("Intersection equal - spark intersection scala") {
val r2 = RDDAPI(Seq(3,4,5))
assertRDDOperationReturnsSameResultWithSorted(Seq(1,2,3,4)) {
ds => r2 intersection ds
}
}
test("Intersection equal with partitions - scala intersection spark") {
val r2 = RDDAPI(Seq(3,4,5))
assertRDDOperationReturnsSameResultWithSorted(Seq(1,2,3,4)){
ds => ds.intersection(r2,2)
}
}
test("Intersection equal with partitions - spark intersection scala") {
val r2 = RDDAPI(Seq(3,4,5))
assertRDDOperationReturnsSameResultWithSorted(Seq(1,2,3,4)){
ds => r2.intersection(ds,2)
}
}
test("Intersection equal with partitioner - scala intersection spark") {
val r2 = RDDAPI(Seq(3,4,5))
assertRDDOperationReturnsSameResultWithSorted(Seq(1,2,3,4)){
ds => ds.intersection(r2,new HashPartitioner(2))
}
}
test("Intersection equal with partitioner - spark intersection scala") {
val r2 = RDDAPI(Seq(3,4,5))
assertRDDOperationReturnsSameResultWithSorted(Seq(1,2,3,4)){
ds => r2.intersection(ds,new HashPartitioner(2))
}
}
}
| piotr-kalanski/spark-local | src/test/scala/com/datawizards/sparklocal/rdd/IntersectionTest.scala | Scala | apache-2.0 | 2,442 |
package misc.examples
case class Object1(field: String)
case class Object2(fields: List[String])
trait CanBeMasked[T] {
def mask(o: T, f: String ⇒ String): T
}
object CanBeMasked {
implicit object O1 extends CanBeMasked[Object1] {
def mask(o: Object1, f: String ⇒ String) = o.copy(field = f(o.field))
}
implicit object O2 extends CanBeMasked[Object2] {
def mask(o: Object2, f: String ⇒ String) = o.copy(fields = o.fields.map(f))
}
}
trait MaskHelper {
def masked[T: CanBeMasked](o: T, f: String ⇒ String): T = implicitly[CanBeMasked[T]].mask(o, f)
}
| 4e6/sandbox | scala/core/src/main/scala/misc/examples/Typeclass.scala | Scala | mit | 582 |
package truerss.dto
case class Page[T](total: Int, resources: Iterable[T])
object Page {
def empty[T]: Page[T] = {
Page(0, Iterable.empty[T])
}
}
| truerss/truerss | dtos/src/main/scala/truerss/dto/Page.scala | Scala | mit | 156 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate
import java.io.File
import Asserts._
class TemplateEngineTest extends FunSuiteSupport {
val engine = new TemplateEngine
engine.workingDirectory = new File(baseDir, "target/test-data/TemplateEngineTest")
test("load file template") {
val template = engine.load(new File(baseDir, "src/test/resources/simple.ssp"))
val output = engine.layout("foo0.ssp", template).trim
assertContains(output, "1 + 2 = 3")
}
test("string template with custom bindings") {
val source = "hello ${name}"
val template = engine.compileSsp(source, List(Binding("name", "String")))
val output = engine.layout("foo1.ssp", template, Map("name" -> "James"))
expect("hello James") {output}
}
test("string template with attributes") {
val source = "<%@ val name: String %> hello ${name}"
val template = engine.compileSsp(source)
val output = engine.layout("foo2.ssp", template, Map("name" -> "Hiram"))
expect("hello Hiram") {output.trim}
}
test("load template") {
val template = engine.compileSsp("""
<%@ val name: String %>
Hello ${name}!
""")
val output = engine.layout("foo3.ssp", template, Map("name" -> "James")).trim
assertContains(output, "Hello James")
debug("template generated: " + output)
}
test("throws ResourceNotFoundException if template file does not exist") {
intercept[ResourceNotFoundException] {
engine.load("does-not-exist.ssp", Nil)
}
}
test("escape template") {
val templateText = """<%@ val t: Class[_] %>
<%@ val name: String = "it" %>
\<%@ val ${name} : ${t.getName} %>
<p>hello \${${name}} how are you?</p>
"""
val template = engine.compileSsp(templateText)
val output = engine.layout("foo4.ssp", template, Map("t" -> classOf[String])).trim
val lines = output.split('\n')
for (line <- lines) {
debug("line: " + line)
}
expect("<%@ val it : java.lang.String %>") {lines(0)}
expect("<p>hello ${it} how are you?</p>") {lines(1)}
}
}
| janurag/scalate | scalate-core/src/test/scala/org/fusesource/scalate/TemplateEngineTest.scala | Scala | apache-2.0 | 2,750 |
package org.genericConfig.admin.client.old.component
import org.genericConfig.admin.client.views.html.HtmlElementIds
import org.scalajs.dom.raw.WebSocket
import org.scalajs.jquery.jQuery
import util.CommonFunction
class Component(websocket: WebSocket) extends CommonFunction {
def addComponent(stepId: String, userId: String) = {
// println("stepId" + stepId)
cleanPage
val htmlMain =
"<dev id='main' class='main'>" +
"<p>Neuen Komponent erstellen</p>" +
drawInputField(HtmlElementIds.inputStepNameToShowHtml, "nameToShow") +
drawButton(HtmlElementIds.addStepHtml, "Speichern") +
drawButton(HtmlElementIds.getConfigsHtml, "Konfiguration") +
"</dev>"
drawNewMain(htmlMain)
// jQuery(HtmlElementIds.addStepJQuery).on("click", () => saveComponent(stepId))
jQuery(HtmlElementIds.getConfigsJQuery).on("click", () => getConfigs(userId))
}
// private def saveComponent(stepId: String) = {
//// println(stepId)
// val nameToShow: Dynamic = jQuery(HtmlElementIds.inputStepNameToShowJQuery).value()
//
// val jsonComponentOut = Json.toJson(JsonComponentIn(
// json = JsonNames.ADD_COMPONENT,
// params = JsonComponentParams(
// stepId = Some(stepId),
// nameToShow = Some(nameToShow.toString),
// kind = Some("immutable")
// )
// )
// ).toString
//
// println("OUT -> " + jsonComponentOut)
// websocket.send(jsonComponentOut)
// }
private def getConfigs(userId: String) = {
val jsonGetConfigs: String = ""
// Json.toJson(JsonGetConfigsIn(
// params = JsonGetConfigsParams(
// userId
// )
// )).toString
websocket.send(jsonGetConfigs)
}
// def updateStatus(jsonComponentOut: JsonComponentOut) = {
// val htmlHeader =
// s"<dev id='status' class='status'>" +
// jsonComponentOut.result.status.addComponent.get.status +
// " , " +
// jsonComponentOut.result.status.appendComponent.get.status +
// " ," +
// jsonComponentOut.result.status.common.get.status +
// "</dev>"
//
// jQuery("#status").remove()
// jQuery(htmlHeader).appendTo(jQuery("header"))
// }
}
| gennadij/admin | client/src/main/scala/org/genericConfig/admin/client/old/component/Component.scala | Scala | apache-2.0 | 2,185 |
package connectors.misp
import java.util.Date
import models.Artifact
import org.elastic4play.ErrorWithObject
import org.elastic4play.services.Attachment
import org.elastic4play.utils.Hash
sealed trait ArtifactData
case class SimpleArtifactData(data: String) extends ArtifactData
case class AttachmentArtifact(attachment: Attachment) extends ArtifactData {
def name: String = attachment.name
def hashes: Seq[Hash] = attachment.hashes
def size: Long = attachment.size
def contentType: String = attachment.contentType
def id: String = attachment.id
}
case class RemoteAttachmentArtifact(filename: String, reference: String, tpe: String) extends ArtifactData
case class MispAlert(
source: String,
sourceRef: String,
date: Date,
lastSyncDate: Date,
isPublished: Boolean,
extendsUuid: Option[String],
title: String,
description: String,
severity: Long,
tags: Seq[String],
tlp: Long,
caseTemplate: String
)
case class MispAttribute(id: String, category: String, tpe: String, date: Date, comment: String, value: String, tags: Seq[String], toIds: Boolean)
case class ExportedMispAttribute(
artifact: Artifact,
tpe: String,
category: String,
tlp: Long,
value: Either[String, Attachment],
comment: Option[String]
)
case class MispArtifact(value: ArtifactData, dataType: String, message: String, tlp: Long, tags: Seq[String], startDate: Date, ioc: Boolean)
case class MispExportError(message: String, artifact: Artifact) extends ErrorWithObject(message, artifact.attributes)
| CERT-BDF/TheHive | thehive-misp/app/connectors/misp/MispModel.scala | Scala | agpl-3.0 | 1,581 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package scalaguide.tests.specs2
import org.specs2.mutable.Specification
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.libs.ws._
import play.api.mvc._
import play.api.routing._
import play.api.routing.sird._
// #scalafunctionaltest-imports
import play.api.test._
// ###replace: import play.api.test.Helpers._
import play.api.test.Helpers.{GET => GET_REQUEST, _}
// #scalafunctionaltest-imports
import play.api.Application
trait ExampleSpecification extends Specification
with DefaultAwaitTimeout
with FutureAwaits
with Results
class ScalaFunctionalTestSpec extends ExampleSpecification {
// lie and make this look like a DB model.
case class Computer(name: String, introduced: Option[String])
object Computer {
def findById(id: Int): Option[Computer] = Some(Computer("Macintosh", Some("1984-01-24")))
}
"Scala Functional Test" should {
// #scalafunctionaltest-application
val application: Application = GuiceApplicationBuilder().build()
// #scalafunctionaltest-application
val applicationWithRouter = GuiceApplicationBuilder().router(Router.from {
case GET(p"/Bob") => Action {
Ok("Hello Bob") as "text/html; charset=utf-8"
}
}).build()
// #scalafunctionaltest-respondtoroute
"respond to the index Action" in new WithApplication(applicationWithRouter) {
// ###replace: val Some(result) = route(app, FakeRequest(GET, "/Bob"))
val Some(result) = route(app, FakeRequest(GET_REQUEST, "/Bob"))
status(result) must equalTo(OK)
contentType(result) must beSome("text/html")
charset(result) must beSome("utf-8")
contentAsString(result) must contain("Hello Bob")
}
// #scalafunctionaltest-respondtoroute
// #scalafunctionaltest-testview
"render index template" in new WithApplication {
val html = views.html.index("Coco")
contentAsString(html) must contain("Hello Coco")
}
// #scalafunctionaltest-testview
// #scalafunctionaltest-testmodel
def appWithMemoryDatabase = new GuiceApplicationBuilder().configure(inMemoryDatabase("test")).build()
"run an application" in new WithApplication(appWithMemoryDatabase) {
val Some(macintosh) = Computer.findById(21)
macintosh.name must equalTo("Macintosh")
macintosh.introduced must beSome.which(_ must beEqualTo("1984-01-24"))
}
// #scalafunctionaltest-testmodel
// #scalafunctionaltest-testwithbrowser
def applicationWithBrowser = new GuiceApplicationBuilder().router(Router.from {
case GET(p"/") =>
Action {
Ok(
"""
|<html>
|<body>
| <div id="title">Hello Guest</div>
| <a href="/login">click me</a>
|</body>
|</html>
""".stripMargin) as "text/html"
}
case GET(p"/login") =>
Action {
Ok(
"""
|<html>
|<body>
| <div id="title">Hello Coco</div>
|</body>
|</html>
""".stripMargin) as "text/html"
}
}).build()
"run in a browser" in new WithBrowser(webDriver = WebDriverFactory(HTMLUNIT), app = applicationWithBrowser) {
browser.goTo("/")
// Check the page
browser.$("#title").text() must equalTo("Hello Guest")
browser.$("a").click()
browser.url must equalTo("login")
browser.$("#title").text() must equalTo("Hello Coco")
}
// #scalafunctionaltest-testwithbrowser
val testPort = 19001
val myPublicAddress = s"localhost:$testPort"
val testPaymentGatewayURL = s"http://$myPublicAddress"
// #scalafunctionaltest-testpaymentgateway
"test server logic" in new WithServer(app = applicationWithBrowser, port = testPort) {
// The test payment gateway requires a callback to this server before it returns a result...
val callbackURL = s"http://$myPublicAddress/callback"
val ws = app.injector.instanceOf[WSClient]
// await is from play.api.test.FutureAwaits
val response = await(ws.url(testPaymentGatewayURL).withQueryString("callbackURL" -> callbackURL).get())
response.status must equalTo(OK)
}
// #scalafunctionaltest-testpaymentgateway
// #scalafunctionaltest-testws
val appWithRoutes = GuiceApplicationBuilder().router(Router.from {
case GET(p"/") => Action {
Ok("ok")
}
}).build()
"test WS logic" in new WithServer(app = appWithRoutes, port = 3333) {
val ws = app.injector.instanceOf[WSClient]
await(ws.url("http://localhost:3333").get()).status must equalTo(OK)
}
// #scalafunctionaltest-testws
// #scalafunctionaltest-testmessages
"messages" should {
import play.api.i18n._
implicit val lang = Lang("en-US")
"provide default messages with the Java API" in new WithApplication() with Injecting {
val javaMessagesApi = inject[play.i18n.MessagesApi]
val msg = javaMessagesApi.get(new play.i18n.Lang(lang), "constraint.email")
msg must ===("Email")
}
"provide default messages with the Scala API" in new WithApplication() with Injecting {
val messagesApi = inject[MessagesApi]
val msg = messagesApi("constraint.email")
msg must ===("Email")
}
}
// #scalafunctionaltest-testmessages
}
}
| Shruti9520/playframework | documentation/manual/working/scalaGuide/main/tests/code/specs2/ScalaFunctionalTestSpec.scala | Scala | apache-2.0 | 5,472 |
package models.database.queries.auth
import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.impl.providers.OpenIDInfo
import models.database.queries.BaseQueries
import models.database.{ Row, Statement }
import play.api.libs.json.{ JsValue, Json }
import utils.DateUtils
object OpenIdInfoQueries extends BaseQueries[OpenIDInfo] {
override protected val tableName = "openid_info"
override protected val columns = Seq("provider", "key", "id", "attributes", "created")
override protected val idColumns = Seq("provider", "key")
override protected val searchColumns = Seq("key")
val getById = GetById
val removeById = RemoveById
case class CreateOpenIdInfo(l: LoginInfo, o: OpenIDInfo) extends Statement {
override val sql = insertSql
override val values = Seq(l.providerID, l.providerKey) ++ toDataSeq(o)
}
case class UpdateOpenIdInfo(l: LoginInfo, o: OpenIDInfo) extends Statement {
override val sql = s"update $tableName set id = ?, attributes = ?, created = ? where provider = ? and key = ?"
val attributes = Json.prettyPrint(Json.toJson(o.attributes))
override val values = toDataSeq(o) ++ Seq(l.providerID, l.providerKey)
}
override protected def fromRow(row: Row) = {
val id = row.as[String]("id")
val attributesString = row.as[String]("attributes")
val attributes = Json.parse(attributesString).as[Map[String, JsValue]].map(x => x._1 -> x._2.as[String])
OpenIDInfo(id, attributes)
}
override protected def toDataSeq(o: OpenIDInfo) = {
val attributes = Json.prettyPrint(Json.toJson(o.attributes))
Seq(o.id, attributes, DateUtils.now)
}
}
| agilemobiledev/boilerplay | app/models/database/queries/auth/OpenIdInfoQueries.scala | Scala | apache-2.0 | 1,645 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.time.Duration
import java.util
import java.util.Properties
import org.apache.kafka.clients.consumer._
import org.apache.kafka.clients.producer.{ProducerConfig, ProducerRecord}
import org.apache.kafka.common.record.TimestampType
import org.apache.kafka.common.TopicPartition
import kafka.utils.{ShutdownableThread, TestUtils}
import kafka.server.{BaseRequestTest, KafkaConfig}
import org.junit.Assert._
import org.junit.Before
import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, Buffer}
import org.apache.kafka.clients.producer.KafkaProducer
import org.apache.kafka.common.errors.WakeupException
import scala.collection.mutable
/**
* Extension point for consumer integration tests.
*/
abstract class AbstractConsumerTest extends BaseRequestTest {
val epsilon = 0.1
override def brokerCount: Int = 3
val topic = "topic"
val part = 0
val tp = new TopicPartition(topic, part)
val part2 = 1
val tp2 = new TopicPartition(topic, part2)
val group = "my-test"
val producerClientId = "ConsumerTestProducer"
val consumerClientId = "ConsumerTestConsumer"
val groupMaxSessionTimeoutMs = 30000L
this.producerConfig.setProperty(ProducerConfig.ACKS_CONFIG, "all")
this.producerConfig.setProperty(ProducerConfig.CLIENT_ID_CONFIG, producerClientId)
this.consumerConfig.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, consumerClientId)
this.consumerConfig.setProperty(ConsumerConfig.GROUP_ID_CONFIG, group)
this.consumerConfig.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
this.consumerConfig.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false")
this.consumerConfig.setProperty(ConsumerConfig.METADATA_MAX_AGE_CONFIG, "100")
override protected def brokerPropertyOverrides(properties: Properties): Unit = {
properties.setProperty(KafkaConfig.ControlledShutdownEnableProp, "false") // speed up shutdown
properties.setProperty(KafkaConfig.OffsetsTopicReplicationFactorProp, "3") // don't want to lose offset
properties.setProperty(KafkaConfig.OffsetsTopicPartitionsProp, "1")
properties.setProperty(KafkaConfig.GroupMinSessionTimeoutMsProp, "100") // set small enough session timeout
properties.setProperty(KafkaConfig.GroupMaxSessionTimeoutMsProp, groupMaxSessionTimeoutMs.toString)
properties.setProperty(KafkaConfig.GroupInitialRebalanceDelayMsProp, "10")
}
@Before
override def setUp() {
super.setUp()
// create the test topic with all the brokers as replicas
createTopic(topic, 2, brokerCount)
}
protected class TestConsumerReassignmentListener extends ConsumerRebalanceListener {
var callsToAssigned = 0
var callsToRevoked = 0
def onPartitionsAssigned(partitions: java.util.Collection[TopicPartition]) {
info("onPartitionsAssigned called.")
callsToAssigned += 1
}
def onPartitionsRevoked(partitions: java.util.Collection[TopicPartition]) {
info("onPartitionsRevoked called.")
callsToRevoked += 1
}
}
protected def createConsumerWithGroupId(groupId: String): KafkaConsumer[Array[Byte], Array[Byte]] = {
val groupOverrideConfig = new Properties
groupOverrideConfig.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId)
createConsumer(configOverrides = groupOverrideConfig)
}
protected def sendRecords(producer: KafkaProducer[Array[Byte], Array[Byte]], numRecords: Int,
tp: TopicPartition): Seq[ProducerRecord[Array[Byte], Array[Byte]]] = {
val records = (0 until numRecords).map { i =>
val record = new ProducerRecord(tp.topic(), tp.partition(), i.toLong, s"key $i".getBytes, s"value $i".getBytes)
producer.send(record)
record
}
producer.flush()
records
}
protected def consumeAndVerifyRecords(consumer: Consumer[Array[Byte], Array[Byte]],
numRecords: Int,
startingOffset: Int,
startingKeyAndValueIndex: Int = 0,
startingTimestamp: Long = 0L,
timestampType: TimestampType = TimestampType.CREATE_TIME,
tp: TopicPartition = tp,
maxPollRecords: Int = Int.MaxValue) {
val records = consumeRecords(consumer, numRecords, maxPollRecords = maxPollRecords)
val now = System.currentTimeMillis()
for (i <- 0 until numRecords) {
val record = records(i)
val offset = startingOffset + i
assertEquals(tp.topic, record.topic)
assertEquals(tp.partition, record.partition)
if (timestampType == TimestampType.CREATE_TIME) {
assertEquals(timestampType, record.timestampType)
val timestamp = startingTimestamp + i
assertEquals(timestamp.toLong, record.timestamp)
} else
assertTrue(s"Got unexpected timestamp ${record.timestamp}. Timestamp should be between [$startingTimestamp, $now}]",
record.timestamp >= startingTimestamp && record.timestamp <= now)
assertEquals(offset.toLong, record.offset)
val keyAndValueIndex = startingKeyAndValueIndex + i
assertEquals(s"key $keyAndValueIndex", new String(record.key))
assertEquals(s"value $keyAndValueIndex", new String(record.value))
// this is true only because K and V are byte arrays
assertEquals(s"key $keyAndValueIndex".length, record.serializedKeySize)
assertEquals(s"value $keyAndValueIndex".length, record.serializedValueSize)
}
}
protected def consumeRecords[K, V](consumer: Consumer[K, V],
numRecords: Int,
maxPollRecords: Int = Int.MaxValue): ArrayBuffer[ConsumerRecord[K, V]] = {
val records = new ArrayBuffer[ConsumerRecord[K, V]]
def pollAction(polledRecords: ConsumerRecords[K, V]): Boolean = {
assertTrue(polledRecords.asScala.size <= maxPollRecords)
records ++= polledRecords.asScala
records.size >= numRecords
}
TestUtils.pollRecordsUntilTrue(consumer, pollAction, waitTimeMs = 60000,
msg = s"Timed out before consuming expected $numRecords records. " +
s"The number consumed was ${records.size}.")
records
}
protected def sendAndAwaitAsyncCommit[K, V](consumer: Consumer[K, V],
offsetsOpt: Option[Map[TopicPartition, OffsetAndMetadata]] = None): Unit = {
def sendAsyncCommit(callback: OffsetCommitCallback) = {
offsetsOpt match {
case Some(offsets) => consumer.commitAsync(offsets.asJava, callback)
case None => consumer.commitAsync(callback)
}
}
class RetryCommitCallback extends OffsetCommitCallback {
var isComplete = false
var error: Option[Exception] = None
override def onComplete(offsets: util.Map[TopicPartition, OffsetAndMetadata], exception: Exception): Unit = {
exception match {
case e: RetriableCommitFailedException =>
sendAsyncCommit(this)
case e =>
isComplete = true
error = Option(e)
}
}
}
val commitCallback = new RetryCommitCallback
sendAsyncCommit(commitCallback)
TestUtils.pollUntilTrue(consumer, () => commitCallback.isComplete,
"Failed to observe commit callback before timeout", waitTimeMs = 10000)
assertEquals(None, commitCallback.error)
}
/**
* Create 'numOfConsumersToAdd' consumers add then to the consumer group 'consumerGroup', and create corresponding
* pollers for these consumers. Wait for partition re-assignment and validate.
*
* Currently, assignment validation requires that total number of partitions is greater or equal to
* number of consumers, so subscriptions.size must be greater or equal the resulting number of consumers in the group
*
* @param numOfConsumersToAdd number of consumers to create and add to the consumer group
* @param consumerGroup current consumer group
* @param consumerPollers current consumer pollers
* @param topicsToSubscribe topics to which new consumers will subscribe to
* @param subscriptions set of all topic partitions
*/
def addConsumersToGroupAndWaitForGroupAssignment(numOfConsumersToAdd: Int,
consumerGroup: mutable.Buffer[KafkaConsumer[Array[Byte], Array[Byte]]],
consumerPollers: mutable.Buffer[ConsumerAssignmentPoller],
topicsToSubscribe: List[String],
subscriptions: Set[TopicPartition],
group: String = group): (mutable.Buffer[KafkaConsumer[Array[Byte], Array[Byte]]], mutable.Buffer[ConsumerAssignmentPoller]) = {
assertTrue(consumerGroup.size + numOfConsumersToAdd <= subscriptions.size)
addConsumersToGroup(numOfConsumersToAdd, consumerGroup, consumerPollers, topicsToSubscribe, subscriptions, group)
// wait until topics get re-assigned and validate assignment
validateGroupAssignment(consumerPollers, subscriptions)
(consumerGroup, consumerPollers)
}
/**
* Create 'numOfConsumersToAdd' consumers add then to the consumer group 'consumerGroup', and create corresponding
* pollers for these consumers.
*
*
* @param numOfConsumersToAdd number of consumers to create and add to the consumer group
* @param consumerGroup current consumer group
* @param consumerPollers current consumer pollers
* @param topicsToSubscribe topics to which new consumers will subscribe to
* @param subscriptions set of all topic partitions
*/
def addConsumersToGroup(numOfConsumersToAdd: Int,
consumerGroup: mutable.Buffer[KafkaConsumer[Array[Byte], Array[Byte]]],
consumerPollers: mutable.Buffer[ConsumerAssignmentPoller],
topicsToSubscribe: List[String],
subscriptions: Set[TopicPartition],
group: String = group): (mutable.Buffer[KafkaConsumer[Array[Byte], Array[Byte]]], mutable.Buffer[ConsumerAssignmentPoller]) = {
for (_ <- 0 until numOfConsumersToAdd) {
val consumer = createConsumerWithGroupId(group)
consumerGroup += consumer
consumerPollers += subscribeConsumerAndStartPolling(consumer, topicsToSubscribe)
}
(consumerGroup, consumerPollers)
}
/**
* Wait for consumers to get partition assignment and validate it.
*
* @param consumerPollers consumer pollers corresponding to the consumer group we are testing
* @param subscriptions set of all topic partitions
* @param msg message to print when waiting for/validating assignment fails
*/
def validateGroupAssignment(consumerPollers: mutable.Buffer[ConsumerAssignmentPoller],
subscriptions: Set[TopicPartition],
msg: Option[String] = None,
waitTime: Long = 10000L): Unit = {
val assignments = mutable.Buffer[Set[TopicPartition]]()
TestUtils.waitUntilTrue(() => {
assignments.clear()
consumerPollers.foreach(assignments += _.consumerAssignment())
isPartitionAssignmentValid(assignments, subscriptions)
}, msg.getOrElse(s"Did not get valid assignment for partitions $subscriptions. Instead, got $assignments"), waitTime)
}
/**
* Subscribes consumer 'consumer' to a given list of topics 'topicsToSubscribe', creates
* consumer poller and starts polling.
* Assumes that the consumer is not subscribed to any topics yet
*
* @param consumer consumer
* @param topicsToSubscribe topics that this consumer will subscribe to
* @return consumer poller for the given consumer
*/
def subscribeConsumerAndStartPolling(consumer: Consumer[Array[Byte], Array[Byte]],
topicsToSubscribe: List[String],
partitionsToAssign: Set[TopicPartition] = Set.empty[TopicPartition]): ConsumerAssignmentPoller = {
assertEquals(0, consumer.assignment().size)
val consumerPoller = if (topicsToSubscribe.nonEmpty)
new ConsumerAssignmentPoller(consumer, topicsToSubscribe)
else
new ConsumerAssignmentPoller(consumer, partitionsToAssign)
consumerPoller.start()
consumerPoller
}
protected def awaitRebalance(consumer: Consumer[_, _], rebalanceListener: TestConsumerReassignmentListener): Unit = {
val numReassignments = rebalanceListener.callsToAssigned
TestUtils.pollUntilTrue(consumer, () => rebalanceListener.callsToAssigned > numReassignments,
"Timed out before expected rebalance completed")
}
protected def ensureNoRebalance(consumer: Consumer[_, _], rebalanceListener: TestConsumerReassignmentListener): Unit = {
// The best way to verify that the current membership is still active is to commit offsets.
// This would fail if the group had rebalanced.
val initialRevokeCalls = rebalanceListener.callsToRevoked
sendAndAwaitAsyncCommit(consumer)
assertEquals(initialRevokeCalls, rebalanceListener.callsToRevoked)
}
protected class CountConsumerCommitCallback extends OffsetCommitCallback {
var successCount = 0
var failCount = 0
var lastError: Option[Exception] = None
override def onComplete(offsets: util.Map[TopicPartition, OffsetAndMetadata], exception: Exception): Unit = {
if (exception == null) {
successCount += 1
} else {
failCount += 1
lastError = Some(exception)
}
}
}
protected class ConsumerAssignmentPoller(consumer: Consumer[Array[Byte], Array[Byte]],
topicsToSubscribe: List[String],
partitionsToAssign: Set[TopicPartition])
extends ShutdownableThread("daemon-consumer-assignment", false) {
def this(consumer: Consumer[Array[Byte], Array[Byte]], topicsToSubscribe: List[String]) {
this(consumer, topicsToSubscribe, Set.empty[TopicPartition])
}
def this(consumer: Consumer[Array[Byte], Array[Byte]], partitionsToAssign: Set[TopicPartition]) {
this(consumer, List.empty[String], partitionsToAssign)
}
@volatile var thrownException: Option[Throwable] = None
@volatile var receivedMessages = 0
@volatile private var partitionAssignment: Set[TopicPartition] = partitionsToAssign
@volatile private var subscriptionChanged = false
private var topicsSubscription = topicsToSubscribe
val rebalanceListener: ConsumerRebalanceListener = new ConsumerRebalanceListener {
override def onPartitionsAssigned(partitions: util.Collection[TopicPartition]) = {
partitionAssignment = collection.immutable.Set(consumer.assignment().asScala.toArray: _*)
}
override def onPartitionsRevoked(partitions: util.Collection[TopicPartition]) = {
partitionAssignment = Set.empty[TopicPartition]
}
}
if (partitionAssignment.isEmpty) {
consumer.subscribe(topicsToSubscribe.asJava, rebalanceListener)
} else {
consumer.assign(partitionAssignment.asJava)
}
def consumerAssignment(): Set[TopicPartition] = {
partitionAssignment
}
/**
* Subscribe consumer to a new set of topics.
* Since this method most likely be called from a different thread, this function
* just "schedules" the subscription change, and actual call to consumer.subscribe is done
* in the doWork() method
*
* This method does not allow to change subscription until doWork processes the previous call
* to this method. This is just to avoid race conditions and enough functionality for testing purposes
* @param newTopicsToSubscribe
*/
def subscribe(newTopicsToSubscribe: List[String]): Unit = {
if (subscriptionChanged)
throw new IllegalStateException("Do not call subscribe until the previous subscribe request is processed.")
if (partitionsToAssign.nonEmpty)
throw new IllegalStateException("Cannot call subscribe when configured to use manual partition assignment")
topicsSubscription = newTopicsToSubscribe
subscriptionChanged = true
}
def isSubscribeRequestProcessed: Boolean = {
!subscriptionChanged
}
override def initiateShutdown(): Boolean = {
val res = super.initiateShutdown()
consumer.wakeup()
res
}
override def doWork(): Unit = {
if (subscriptionChanged) {
consumer.subscribe(topicsSubscription.asJava, rebalanceListener)
subscriptionChanged = false
}
try {
receivedMessages += consumer.poll(Duration.ofMillis(50)).count()
} catch {
case _: WakeupException => // ignore for shutdown
case e: Throwable =>
thrownException = Some(e)
throw e
}
}
}
/**
* Check whether partition assignment is valid
* Assumes partition assignment is valid iff
* 1. Every consumer got assigned at least one partition
* 2. Each partition is assigned to only one consumer
* 3. Every partition is assigned to one of the consumers
*
* @param assignments set of consumer assignments; one per each consumer
* @param partitions set of partitions that consumers subscribed to
* @return true if partition assignment is valid
*/
def isPartitionAssignmentValid(assignments: Buffer[Set[TopicPartition]],
partitions: Set[TopicPartition]): Boolean = {
val allNonEmptyAssignments = assignments.forall(assignment => assignment.nonEmpty)
if (!allNonEmptyAssignments) {
// at least one consumer got empty assignment
return false
}
// make sure that sum of all partitions to all consumers equals total number of partitions
val totalPartitionsInAssignments = (0 /: assignments) (_ + _.size)
if (totalPartitionsInAssignments != partitions.size) {
// either same partitions got assigned to more than one consumer or some
// partitions were not assigned
return false
}
// The above checks could miss the case where one or more partitions were assigned to more
// than one consumer and the same number of partitions were missing from assignments.
// Make sure that all unique assignments are the same as 'partitions'
val uniqueAssignedPartitions = (Set[TopicPartition]() /: assignments) (_ ++ _)
uniqueAssignedPartitions == partitions
}
}
| KevinLiLu/kafka | core/src/test/scala/integration/kafka/api/AbstractConsumerTest.scala | Scala | apache-2.0 | 19,445 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.thriftserver
import java.io.PrintStream
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{SparkSession, SQLContext}
import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils}
import org.apache.spark.util.Utils
/** A singleton object for the master program. The slaves should not access this. */
private[hive] object SparkSQLEnv extends Logging {
logDebug("Initializing SparkSQLEnv")
var sqlContext: SQLContext = _
var sparkContext: SparkContext = _
def init() {
if (sqlContext == null) {
val sparkConf = new SparkConf(loadDefaults = true)
// If user doesn't specify the appName, we want to get [SparkSQL::localHostName] instead of
// the default appName [SparkSQLCLIDriver] in cli or beeline.
val maybeAppName = sparkConf
.getOption("spark.app.name")
.filterNot(_ == classOf[SparkSQLCLIDriver].getName)
.filterNot(_ == classOf[HiveThriftServer2].getName)
sparkConf
.setAppName(maybeAppName.getOrElse(s"SparkSQL::${Utils.localHostName()}"))
val sparkSession = SparkSession.builder.config(sparkConf).enableHiveSupport().getOrCreate()
sparkContext = sparkSession.sparkContext
sqlContext = sparkSession.sqlContext
val metadataHive = sparkSession
.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog]
.client.newSession()
metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))
metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8"))
metadataHive.setError(new PrintStream(System.err, true, "UTF-8"))
sparkSession.conf.set("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
}
}
/** Cleans up and shuts down the Spark SQL environments. */
def stop() {
logDebug("Shutting down Spark SQL Environment")
// Stop the SparkContext
if (SparkSQLEnv.sparkContext != null) {
sparkContext.stop()
sparkContext = null
sqlContext = null
}
}
}
| aokolnychyi/spark | sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala | Scala | apache-2.0 | 2,861 |
package com.karasiq.bittorrent.format
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
import akka.util.ByteString
case class TorrentPiece(index: Int, size: Int, sha1: ByteString, file: TorrentFile)
case class TorrentPieceBlock(piece: TorrentPiece, offset: Int, size: Int)
object TorrentPiece {
def pieces(files: TorrentContent): IndexedSeq[TorrentPiece] = {
// Total torrent size
val totalSize = files.files.map(_.size).sum
@tailrec
def pieceSequenceRec(buffer: ArrayBuffer[TorrentPiece], offset: Long, fileOffset: Long, pieceIndex: Int, fileSeq: Seq[TorrentFile]): IndexedSeq[TorrentPiece] = fileSeq match {
case Seq(currentFile, fs @ _*) if fs.nonEmpty && fileOffset >= currentFile.size ⇒
pieceSequenceRec(buffer, offset, 0L, pieceIndex, fs)
case fs @ Seq(currentFile, _*) if offset < totalSize ⇒
val length = Array(files.pieceSize.toLong, totalSize - offset).min
require(length <= Int.MaxValue)
val sha1 = files.pieces.slice(pieceIndex * 20, (pieceIndex * 20) + 20)
val piece = TorrentPiece(buffer.length, length.toInt, sha1, currentFile)
pieceSequenceRec(buffer :+ piece, offset + length, fileOffset + length, pieceIndex + 1, fs)
case other ⇒
buffer.result()
}
pieceSequenceRec(new ArrayBuffer[TorrentPiece](files.pieces.length / 20), 0L, 0L, 0, files.files)
}
def blocks(piece: TorrentPiece, sizeLimit: Int): IndexedSeq[TorrentPieceBlock] = {
@tailrec
def pieceBlockRec(buffer: ArrayBuffer[TorrentPieceBlock], offset: Int): IndexedSeq[TorrentPieceBlock] = {
if (offset >= piece.size) {
buffer.result()
} else {
val block = TorrentPieceBlock(piece, offset, Array(sizeLimit, piece.size - offset).min)
pieceBlockRec(buffer :+ block, offset + block.size)
}
}
pieceBlockRec(new ArrayBuffer[TorrentPieceBlock](piece.size / sizeLimit + 1), 0)
}
} | Karasiq/torrentstream | library/src/main/scala/com/karasiq/bittorrent/format/TorrentPiece.scala | Scala | apache-2.0 | 1,950 |
package climadata.raster
import scala.language.implicitConversions
trait RasterBuilder[@specialized A, @specialized B] {
def map(r:Raster[A], f:A => B)(implicit nda:NoData[A], ndb:NoData[B]):Raster[B]
}
trait RasterBuilders {
implicit object B2B extends RasterBuilder[Byte, Byte]{
def map(r: Raster[Byte], f:Byte => Byte)(implicit nda:NoData[Byte], ndb:NoData[Byte]) = r match {
case r:ConstantRaster[Byte] => Raster(f(r.const))
case r:Raster[Byte] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object B2S extends RasterBuilder[Byte, Short]{
def map(r: Raster[Byte], f:Byte => Short)(implicit nda:NoData[Byte], ndb:NoData[Short]) = r match {
case r:ConstantRaster[Byte] => Raster(f(r.const))
case r:Raster[Byte] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object B2I extends RasterBuilder[Byte, Int]{
def map(r: Raster[Byte], f:Byte => Int)(implicit nda:NoData[Byte], ndb:NoData[Int]) = r match {
case r:ConstantRaster[Byte] => Raster(f(r.const))
case r:Raster[Byte] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object B2F extends RasterBuilder[Byte, Float]{
def map(r: Raster[Byte], f:Byte => Float)(implicit nda:NoData[Byte], ndb:NoData[Float]) = r match {
case r:ConstantRaster[Byte] => Raster(f(r.const))
case r:Raster[Byte] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object B2D extends RasterBuilder[Byte, Double]{
def map(r: Raster[Byte], f:Byte => Double)(implicit nda:NoData[Byte], ndb:NoData[Double]) = r match {
case r:ConstantRaster[Byte] => Raster(f(r.const))
case r:Raster[Byte] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object S2B extends RasterBuilder[Short, Byte]{
def map(r: Raster[Short], f:Short => Byte)(implicit nda:NoData[Short], ndb:NoData[Byte]) = r match {
case r:ConstantRaster[Short] => Raster(f(r.const))
case r:Raster[Short] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object S2S extends RasterBuilder[Short, Short]{
def map(r: Raster[Short], f:Short => Short)(implicit nda:NoData[Short], ndb:NoData[Short]) = r match {
case r:ConstantRaster[Short] => Raster(f(r.const))
case r:Raster[Short] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object S2I extends RasterBuilder[Short, Int]{
def map(r: Raster[Short], f:Short => Int)(implicit nda:NoData[Short], ndb:NoData[Int]) = r match {
case r:ConstantRaster[Short] => Raster(f(r.const))
case r:Raster[Short] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object S2F extends RasterBuilder[Short, Float]{
def map(r: Raster[Short], f:Short => Float)(implicit nda:NoData[Short], ndb:NoData[Float]) = r match {
case r:ConstantRaster[Short] => Raster(f(r.const))
case r:Raster[Short] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object S2D extends RasterBuilder[Short, Double]{
def map(r: Raster[Short], f:Short => Double)(implicit nda:NoData[Short], ndb:NoData[Double]) = r match {
case r:ConstantRaster[Short] => Raster(f(r.const))
case r:Raster[Short] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object I2B extends RasterBuilder[Int, Byte]{
def map(r: Raster[Int], f:Int => Byte)(implicit nda:NoData[Int], ndb:NoData[Byte]) = r match {
case r:ConstantRaster[Int] => Raster(f(r.const))
case r:Raster[Int] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object I2S extends RasterBuilder[Int, Short]{
def map(r: Raster[Int], f:Int => Short)(implicit nda:NoData[Int], ndb:NoData[Short]) = r match {
case r:ConstantRaster[Int] => Raster(f(r.const))
case r:Raster[Int] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object I2I extends RasterBuilder[Int, Int]{
def map(r: Raster[Int], f:Int => Int)(implicit nda:NoData[Int], ndb:NoData[Int]) = r match {
case r:ConstantRaster[Int] => Raster(f(r.const))
case r:Raster[Int] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object I2F extends RasterBuilder[Int, Float]{
def map(r: Raster[Int], f:Int => Float)(implicit nda:NoData[Int], ndb:NoData[Float]) = r match {
case r:ConstantRaster[Int] => Raster(f(r.const))
case r:Raster[Int] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object I2D extends RasterBuilder[Int, Double]{
def map(r: Raster[Int], f:Int => Double)(implicit nda:NoData[Int], ndb:NoData[Double]) = r match {
case r:ConstantRaster[Int] => Raster(f(r.const))
case r:Raster[Int] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object F2B extends RasterBuilder[Float, Byte]{
def map(r: Raster[Float], f:Float => Byte)(implicit nda:NoData[Float], ndb:NoData[Byte]) = r match {
case r:ConstantRaster[Float] => Raster(f(r.const))
case r:Raster[Float] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object F2S extends RasterBuilder[Float, Short]{
def map(r: Raster[Float], f:Float => Short)(implicit nda:NoData[Float], ndb:NoData[Short]) = r match {
case r:ConstantRaster[Float] => Raster(f(r.const))
case r:Raster[Float] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object F2I extends RasterBuilder[Float, Int]{
def map(r: Raster[Float], f:Float => Int)(implicit nda:NoData[Float], ndb:NoData[Int]) = r match {
case r:ConstantRaster[Float] => Raster(f(r.const))
case r:Raster[Float] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object F2F extends RasterBuilder[Float, Float]{
def map(r: Raster[Float], f:Float => Float)(implicit nda:NoData[Float], ndb:NoData[Float]) = r match {
case r:ConstantRaster[Float] => Raster(f(r.const))
case r:Raster[Float] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object F2D extends RasterBuilder[Float, Double]{
def map(r: Raster[Float], f:Float => Double)(implicit nda:NoData[Float], ndb:NoData[Double]) = r match {
case r:ConstantRaster[Float] => Raster(f(r.const))
case r:Raster[Float] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object D2B extends RasterBuilder[Double, Byte]{
def map(r: Raster[Double], f:Double => Byte)(implicit nda:NoData[Double], ndb:NoData[Byte]) = r match {
case r:ConstantRaster[Double] => Raster(f(r.const))
case r:Raster[Double] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object D2S extends RasterBuilder[Double, Short]{
def map(r: Raster[Double], f:Double => Short)(implicit nda:NoData[Double], ndb:NoData[Short]) = r match {
case r:ConstantRaster[Double] => Raster(f(r.const))
case r:Raster[Double] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object D2I extends RasterBuilder[Double, Int]{
def map(r: Raster[Double], f:Double => Int)(implicit nda:NoData[Double], ndb:NoData[Int]) = r match {
case r:ConstantRaster[Double] => Raster(f(r.const))
case r:Raster[Double] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object D2F extends RasterBuilder[Double, Float]{
def map(r: Raster[Double], f:Double => Float)(implicit nda:NoData[Double], ndb:NoData[Float]) = r match {
case r:ConstantRaster[Double] => Raster(f(r.const))
case r:Raster[Double] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
implicit object D2D extends RasterBuilder[Double, Double]{
def map(r: Raster[Double], f:Double => Double)(implicit nda:NoData[Double], ndb:NoData[Double]) = r match {
case r:ConstantRaster[Double] => Raster(f(r.const))
case r:Raster[Double] => Raster(r.data.map(d => if(nda.isNodata(d)) ndb.nodata else f(d)), r.cols, r.rows)
}
}
}
| castovoid/simpleraster | src/main/scala/climadata/raster/RasterBuilder.scala | Scala | mit | 8,798 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.