code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package teststate.example.selenium
import org.openqa.selenium.WebDriver
import scala.concurrent.duration._
import teststate.example.selenium.MyTestState._
import utest._
object SeleniumExample extends TestSuite {
type Ref = WebDriver
def openBrowser(): WebDriver = {
val driver = newChrome()
driver.get("https://japgolly.github.io/scalajs-react/#examples/ajax-1")
driver
}
class Obs($: FastDomZipperSelenium) {
val clickButton: () => Unit =
$("button", 1 of 3).prepare(_.dom().click())
val responseText: Option[String] =
$.collect01("table").map(_("td", 2 of 2).innerText)
// println(s"responseText = ${responseText.toString.replace("\\n", " ").take(60)}")
}
val observer: Observer[Ref, Obs, String] = Observer(ref => new Obs(FastDomZipperSelenium.html(ref)))
val dsl = Dsl[Ref, Obs, Unit]
val clickGet = dsl.action("Click GET")(_.obs.clickButton())
val responseText = dsl.focus("Response text").option(_.obs.responseText)
def runTest() = {
val driver = openBrowser()
val plan = Plan.action(clickGet +> responseText.assert.exists("Response", _ contains "Response"))
val report = plan
.test(observer)
.stateless
.withRef(driver)
.withRetryPolicy(Retry.Policy.fixedIntervalWithTimeout(200 millis, 60 seconds))
.run()
driver.quit()
report.assert()
}
override def tests = CI match {
case None => Tests {
runTest()
}
case Some(_) => Tests {}
}
}
| japgolly/test-state | example-selenium/src/test/scala/teststate/example/selenium/SeleniumExample.scala | Scala | apache-2.0 | 1,486 |
package org.katis.capnproto.runtime
object ReaderOptions {
val DEFAULT_TRAVERSAL_LIMIT_IN_WORDS = 8 * 1024 * 1024
val DEFAULT_NESTING_LIMIT = 64
val DEFAULT_READER_OPTIONS = new ReaderOptions(DEFAULT_TRAVERSAL_LIMIT_IN_WORDS, DEFAULT_NESTING_LIMIT)
}
class ReaderOptions(val traversalLimitInWords: Long, val nestingLimit: Int)
| katis/capnp-scala | runtime/shared/src/main/scala-2.11/org/katis/capnproto/runtime/ReaderOptions.scala | Scala | mit | 338 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.builders
import java.util.concurrent.TimeUnit
import monix.execution.cancelables.MultiAssignCancelable
import monix.execution.{Cancelable, Ack}
import monix.execution.Ack.{Stop, Continue}
import monix.reactive.Observable
import monix.reactive.observers.Subscriber
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
private[reactive] final
class RepeatedValueObservable[A](initialDelay: FiniteDuration, period: FiniteDuration, unit: A)
extends Observable[A] {
def unsafeSubscribeFn(subscriber: Subscriber[A]): Cancelable = {
val task = MultiAssignCancelable()
val r = runnable(subscriber, task)
if (initialDelay.length <= 0)
r.run()
else {
task := subscriber.scheduler
.scheduleOnce(initialDelay.length, initialDelay.unit, r)
}
task
}
private[this] def runnable(subscriber: Subscriber[A], task: MultiAssignCancelable): Runnable =
new Runnable { self =>
private[this] implicit val s = subscriber.scheduler
private[this] val periodMs = period.toMillis
private[this] var startedAt = 0L
def syncScheduleNext(): Unit = {
val initialDelay = {
val duration = s.clockMonotonic(MILLISECONDS) - startedAt
val d = periodMs - duration
if (d >= 0L) d else 0L
}
// No need to synchronize, since we have a happens-before
// relationship between scheduleOnce invocations.
task := s.scheduleOnce(initialDelay, TimeUnit.MILLISECONDS, self)
}
def asyncScheduleNext(r: Try[Ack]): Unit = r match {
case Success(ack) =>
if (ack == Continue) syncScheduleNext()
case Failure(ex) =>
s.reportFailure(ex)
}
def run(): Unit = {
startedAt = s.clockMonotonic(MILLISECONDS)
val ack = subscriber.onNext(unit)
if (ack == Continue)
syncScheduleNext()
else if (ack != Stop)
ack.onComplete(asyncScheduleNext)
}
}
}
| Wogan/monix | monix-reactive/shared/src/main/scala/monix/reactive/internal/builders/RepeatedValueObservable.scala | Scala | apache-2.0 | 2,693 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.chart
import java.io.OutputStream
import java.io.OutputStreamWriter
import com.netflix.atlas.chart.model._
import com.netflix.atlas.core.model.SummaryStats
/**
* Returns a handful of summary stats instead of all the raw data for a given graph.
*/
class StatsJsonGraphEngine extends GraphEngine {
import com.netflix.atlas.chart.GraphEngine._
def name: String = "stats.json"
def contentType: String = "application/json"
def write(config: GraphDef, output: OutputStream): Unit = {
val writer = new OutputStreamWriter(output, "UTF-8")
val seriesList = config.plots.flatMap(_.lines)
val gen = jsonFactory.createGenerator(writer)
gen.writeStartObject()
gen.writeNumberField("start", config.startTime.toEpochMilli)
gen.writeNumberField("end", config.endTime.toEpochMilli)
gen.writeNumberField("step", config.step)
gen.writeArrayFieldStart("legend")
seriesList.foreach { series =>
val label = series.data.label
gen.writeString(label)
}
gen.writeEndArray()
gen.writeArrayFieldStart("metrics")
seriesList.foreach { series =>
gen.writeStartObject()
series.data.tags.toList.sortWith(_._1 < _._1).foreach { t =>
gen.writeStringField(t._1, t._2)
}
gen.writeEndObject()
}
gen.writeEndArray()
gen.writeArrayFieldStart("stats")
seriesList.foreach { series =>
val stats =
SummaryStats(series.data, config.startTime.toEpochMilli, config.endTime.toEpochMilli)
gen.writeStartObject()
gen.writeNumberField("count", stats.count)
if (seriesList.nonEmpty) {
gen.writeNumberField("avg", stats.avg)
gen.writeNumberField("total", stats.total)
gen.writeNumberField("max", stats.max)
gen.writeNumberField("min", stats.min)
gen.writeNumberField("last", stats.last)
}
gen.writeEndObject()
}
gen.writeEndArray()
gen.writeArrayFieldStart("notices")
config.warnings.foreach(gen.writeString)
gen.writeEndArray()
gen.writeEndObject()
gen.flush()
}
}
| Netflix/atlas | atlas-chart/src/main/scala/com/netflix/atlas/chart/StatsJsonGraphEngine.scala | Scala | apache-2.0 | 2,693 |
package model
trait BiofoodFixtures {
def html(mealNames: Seq[String]) =
<div class="col-md-3 hors-menu text-center">
<h2 id="tillmenyn">Dagens lunch Mån. 10/11</h2>
{
mealNames.map { name =>
<div class="row">
<div class="col-xs-2"></div>
<div class="col-xs-10 text-left">{name}</div>
</div>
}
}
<p class="small"></p>
</div>
def defaultMealNames(nMeals: Int) = (1 to nMeals).map("Meal" + _)
}
object BiofoodFixtures extends BiofoodFixtures
trait FossilenFixtures {
def html(mealName: String = "meal") =
<div class="sv-text-portlet-content">
<h2 class="h2" id="h-Menyvecka7915februari">Meny vecka 7, 9-15 februari</h2>
<h3 class="h3" id="h-Mandag">Måndag</h3>
<p class="brodtext">mon-{mealName}1 </p>
<p class="brodtext">mon-{mealName}2</p>
<p class="brodtext">mon-{mealName}3 </p>
<p class="brodtext">mon-{mealName}4</p>
<h3 class="h3" id="h-Tisdag">Tisdag</h3>
<p class="brodtext">tue-{mealName}1</p>
<p class="brodtext">tue-{mealName}2</p>
<p class="brodtext">tue-{mealName}3</p>
<p class="brodtext">tue-{mealName}4</p>
<h3 class="h3" id="h-Onsdag">Onsdag</h3>
<p class="brodtext">wed-{mealName}1</p>
<p class="brodtext">wed-{mealName}2</p>
<p class="brodtext">wed-{mealName}3</p>
<p class="brodtext">wed-{mealName}4</p>
<h3 class="h3" id="h-Torsdag">Torsdag</h3>
<p class="brodtext">thu-{mealName}1 </p>
<p class="brodtext">thu-{mealName}2 </p>
<p class="brodtext">thu-{mealName}3 </p>
<p class="brodtext">thu-{mealName}4 </p>
<h3 class="h3" id="h-Fredag">Fredag</h3>
<p class="brodtext">fri-{mealName}1</p>
<p class="brodtext">fri-{mealName}2</p>
<p class="brodtext">fri-{mealName}3</p>
<p class="brodtext">fri-{mealName}4</p>
<h3 class="h3" id="h-Lordagsondag">Lördag, söndag</h3>
<p class="brodtext">
sunsat-{mealName}1 <br/><br/>
sunsat-{mealName}2 <br/><br/>
sunsat-{mealName}3 <br/><br/>
sunsat-{mealName}4
</p>
</div>
}
object FossilenFixtures extends FossilenFixtures
trait KraftanFixtures {
def html(mealName: String = "meal") =
<div class="post-content no-thumbnail">
<div class="post-info top">
<span class="post-type-icon-wrap"><span class="post-type-icon"></span></span>
<span class="post-date">16 november, 2014</span>
<span class="no-caps post-autor"> by <a href="http://www.kraftan.nu/author/kraftan/" title="Inlägg av kraftan" rel="author">kraftan</a></span>
</div>
<div class="post-title-wrapper">
<h2 class="post-title"><a href="http://www.kraftan.nu/menyer/lunchmeny-v-47-2/">Lunchmeny v.47</a></h2>
</div>
<div class="clear"></div>
<div class="post-content-content">
Måndag<br/>
mon-{mealName}1<br/>
**<br/>
mon-{mealName}2<br/>
<br/>
Tisdag <br/>
tue-{mealName}1<br/>
**<br/>
tue-{mealName}2<br/>
<br/>
Onsdag<br/>
wed-{mealName}1<br/>
**<br/>
wed-{mealName}2<br/>
<br/>
Torsdag<br/>
thu-{mealName}1<br/>
**<br/>
thu-{mealName}2<br/>
<br/>
Fredag<br/>
fri-{mealName}1<br/>
**<br/>
fri-{mealName}2<br/>
<br/>
foo bar<br/>
<div class="clear"></div>
<div class="post-info bottom">
<span class="post-type-icon-wrap"><span class="post-type-icon"></span></span>
<span class="no-caps">in</span><a href="http://www.kraftan.nu/category/menyer/" rel="category tag">Menyer</a>
<span class="comments-number"><a href="http://www.kraftan.nu/menyer/lunchmeny-v-47-2/#comments">0<span class="no-caps">comments</span></a></span>
</div>
<div class="clear"></div>
</div>
</div>
}
object KraftanFixtures extends KraftanFixtures
trait LantisFixtures {
def html(mealNames: Seq[String]) =
<div class="col-md-3 hors-menu text-center">
<h2 id="tillmenyn">Dagens lunch Mån. 10/11</h2>
{
mealNames.map { name =>
<div class="row">
<div class="col-xs-2"></div>
<div class="col-xs-10 text-left">{name}</div>
</div>
}
}
<p class="small"></p>
</div>
def defaultMealNames(nMeals: Int) = (1 to nMeals).map("Meal" + _)
}
object LantisFixtures extends LantisFixtures
| bwestlin/su-lunch | test/model/Fixtures.scala | Scala | apache-2.0 | 4,630 |
/*
* Copyright 2016 Tamer AbdulRadi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package troy
package macros
import java.util.UUID
import com.datastax.driver.core._
import scala.concurrent.Future
class DslSpec extends CassandraSpec {
import troy.driver.DSL._
import troy.dsl._
import scala.concurrent.ExecutionContext.Implicits.global
override val testDataFixtures =
"""
INSERT INTO test.posts (author_id, post_id , author_name , post_rating, post_title)
VALUES ( uuid(), now(), 'test author', 5, 'test post') ;
INSERT INTO test.post_details (author_id, id , tags , comment_ids, comment_userIds, comment_bodies , comments)
VALUES ( uuid(), now(), {'test1', 'test2'}, {1, 2}, [1, 2], ['test1', 'test2'], {1: 'test1', 2 : 'test2'}) ;
"""
case class Post(id: UUID, author_name: String, title: String)
"The Macro" should "support no params" in {
val q = withSchema { () =>
cql"SELECT post_id, author_name, post_title FROM test.posts;".prepared.executeAsync.all.as(Post)
}
q(): Future[Seq[Post]]
}
it should "support prepare & execute async" in {
val q = withSchema { () =>
cql"SELECT post_id, author_name, post_title FROM test.posts;".prepared.executeAsync.all.as(Post)
}
q(): Future[Seq[Post]]
}
it should "support single param" in {
val q = withSchema { (title: String) =>
cql"SELECT post_id, author_name, post_title FROM test.posts WHERE post_title = $title;".prepared.executeAsync.all.as(Post)
}
q("test"): Future[Seq[Post]]
}
"The Macro" should "support returning the BoundStatement directly with no params" in {
val q = withSchema { () =>
cql"SELECT post_id, author_name, post_title FROM test.posts;".prepared
}
q(): Statement
}
"The Macro" should "support returning the BoundStatement directly with params" in {
val q = withSchema { (title: String) =>
cql"SELECT post_id, author_name, post_title FROM test.posts WHERE post_title = $title;".prepared
}
q("test"): Statement
}
it should "support returning the ResultSet" in {
val query = withSchema { () =>
cql"SELECT post_id, author_name, post_title FROM test.posts;".prepared.execute
}
val result: ResultSet = query()
result.all().size() shouldBe 1
}
it should "support returning the ResultSet asynchronously" in {
val q = withSchema { () =>
cql"SELECT post_id, author_name, post_title FROM test.posts;".prepared.executeAsync
}
q(): Future[ResultSet]
}
it should "support returning one element" in {
val q = withSchema { () =>
cql"SELECT post_id, author_name, post_title FROM test.posts;".prepared.executeAsync.oneOption
}
q(): Future[Option[Row]]
}
it should "allow specifying consistency level" in {
withSchema { () =>
cql"SELECT post_id, author_name, post_title FROM test.posts;"
.prepared
.setConsistencyLevel(ConsistencyLevel.LOCAL_QUORUM)
.setSerialConsistencyLevel(ConsistencyLevel.LOCAL_SERIAL)
.executeAsync
.all
.as(Post)
}
}
it should "support parsing one row async" in {
val q = withSchema { () =>
cql"SELECT post_id, author_name, post_title FROM test.posts;".prepared.executeAsync.oneOption.as(Post)
}
q(): Future[Option[Post]]
}
it should "support parsing one row sync" in {
val q = withSchema { () =>
cql"SELECT post_id, author_name, post_title FROM test.posts;".prepared.execute.oneOption.as(Post)
}
q(): Option[Post]
}
it should "support select * with no params" in {
val q = withSchema { () =>
cql"SELECT * FROM test.posts;".prepared.execute.oneOption
}
q(): Option[Row]
}
it should "support select with orderby" in {
val listByAuthor = withSchema { (authorId: UUID) =>
cql"""
SELECT author_id, author_name, post_title
FROM test.posts
WHERE author_id = $authorId
ORDER BY post_id DESC ;
"""
.prepared
.executeAsync
.all
.as(Post)
}
listByAuthor(UUID.randomUUID()): Future[Seq[Post]]
}
// TODO https://github.com/tabdulradi/troy/issues/37
// it should "support parsing select * with class/function matching the whole table" in {
// val q = withSchema { () =>
// cql"SELECT * FROM test.posts;".prepared.execute.all.as(AuthorAndPost)
// }
// val res: Seq[AuthorAndPost] = q()
// }
it should "support specifying minimum version" in {
val q = withSchema.minVersion(1) { () =>
cql"SELECT author_id FROM test.posts;".prepared.execute.oneOption
}
q(): Option[Row]
}
it should "support specifying minimum version and maximum version" in {
val q = withSchema.minVersion(1).maxVersion(2) { () =>
cql"SELECT author_id FROM test.posts;".prepared.execute.oneOption
}
q(): Option[Row]
}
}
| schemasafe/troy | troy-macro/src/test/scala/troy/macros/DslSpec.scala | Scala | apache-2.0 | 5,414 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.index
import java.nio.charset.StandardCharsets
import com.google.common.primitives.{Bytes, Longs}
import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.data.{Range => aRange}
import org.apache.hadoop.io.Text
import org.geotools.factory.Hints
import org.locationtech.geomesa.accumulo.data.stats.GeoMesaStats
import org.locationtech.geomesa.accumulo.data.tables.{GeoMesaTable, Z2Table}
import org.locationtech.geomesa.accumulo.iterators._
import org.locationtech.geomesa.curve.Z2SFC
import org.locationtech.geomesa.utils.geotools.WholeWorldPolygon
import org.locationtech.geomesa.utils.index.VisibilityLevel
import org.locationtech.sfcurve.zorder.Z2
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
import org.opengis.filter.spatial._
class Z2IdxStrategy(val filter: QueryFilter) extends Strategy with LazyLogging with IndexFilterHelpers {
/**
* Plans the query - strategy implementations need to define this
*/
override def getQueryPlan(queryPlanner: QueryPlanner, hints: Hints, output: ExplainerOutputType): QueryPlan = {
import QueryHints.{LOOSE_BBOX, RichHints}
import Z2IdxStrategy._
import org.locationtech.geomesa.filter.FilterHelper._
import org.locationtech.geomesa.filter._
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType._
val ds = queryPlanner.ds
val sft = queryPlanner.sft
val isInclude = QueryFilterSplitter.isFullTableScan(filter)
if (isInclude) {
// allow for full table scans - we use the z2 index for queries that can't be satisfied elsewhere
filter.secondary.foreach { f =>
logger.warn(s"Running full table scan for schema ${sft.getTypeName} with filter ${filterToString(f)}")
}
}
// TODO GEOMESA-1215 this can handle OR'd geoms, but the query splitter won't currently send them
val geometryToCover =
filter.singlePrimary.flatMap(extractSingleGeometry(_, sft.getGeomField)).getOrElse(WholeWorldPolygon)
output(s"GeomsToCover: $geometryToCover")
val looseBBox = if (hints.containsKey(LOOSE_BBOX)) Boolean.unbox(hints.get(LOOSE_BBOX)) else ds.config.looseBBox
val ecql: Option[Filter] = if (isInclude || !looseBBox || sft.nonPoints) {
// if this is a full table scan, we can just use the filter option to get the secondary ecql
// if the user has requested strict bounding boxes, we apply the full filter
// if this is a non-point geometry, the index is coarse-grained, so we apply the full filter
filter.filter
} else {
// for normal bboxes, the index is fine enough that we don't need to apply the filter on top of it
// this may cause some minor errors at extremely fine resolution, but the performance is worth it
// if we have a complicated geometry predicate, we need to pass it through to be evaluated
val complexGeomFilter = filterListAsAnd(filter.primary.filter(isComplicatedSpatialFilter))
(complexGeomFilter, filter.secondary) match {
case (Some(gf), Some(fs)) => filterListAsAnd(Seq(gf, fs))
case (None, fs) => fs
case (gf, None) => gf
}
}
val (iterators, kvsToFeatures, colFamily, hasDupes) = if (hints.isBinQuery) {
// if possible, use the pre-computed values
// can't use if there are non-st filters or if custom fields are requested
val (iters, cf) =
if (filter.secondary.isEmpty && BinAggregatingIterator.canUsePrecomputedBins(sft, hints)) {
// TODO GEOMESA-1254 per-attribute vis + bins
val idOffset = Z2Table.getIdRowOffset(sft)
(Seq(BinAggregatingIterator.configurePrecomputed(sft, ecql, hints, idOffset, sft.nonPoints)), Z2Table.BIN_CF)
} else {
val iter = BinAggregatingIterator.configureDynamic(sft, ecql, hints, sft.nonPoints)
(Seq(iter), Z2Table.FULL_CF)
}
(iters, BinAggregatingIterator.kvsToFeatures(), cf, false)
} else if (hints.isDensityQuery) {
val iter = Z2DensityIterator.configure(sft, ecql, hints)
(Seq(iter), KryoLazyDensityIterator.kvsToFeatures(), Z2Table.FULL_CF, false)
} else if (hints.isStatsIteratorQuery) {
val iter = KryoLazyStatsIterator.configure(sft, ecql, hints, sft.nonPoints)
(Seq(iter), KryoLazyStatsIterator.kvsToFeatures(sft), Z2Table.FULL_CF, false)
} else if (hints.isMapAggregatingQuery) {
val iter = KryoLazyMapAggregatingIterator.configure(sft, ecql, hints, sft.nonPoints)
(Seq(iter), queryPlanner.defaultKVsToFeatures(hints), Z2Table.FULL_CF, false)
} else {
val iters = KryoLazyFilterTransformIterator.configure(sft, ecql, hints).toSeq
(iters, queryPlanner.defaultKVsToFeatures(hints), Z2Table.FULL_CF, sft.nonPoints)
}
val z2table = ds.getTableName(sft.getTypeName, Z2Table)
val numThreads = ds.getSuggestedThreads(sft.getTypeName, Z2Table)
val (ranges, z2Iter) = if (isInclude) {
val range = if (sft.isTableSharing) {
aRange.prefix(new Text(sft.getTableSharingPrefix.getBytes(StandardCharsets.UTF_8)))
} else {
new aRange()
}
(Seq(range), None)
} else {
// setup Z2 iterator
val env = geometryToCover.getEnvelopeInternal
val (lx, ly, ux, uy) = (env.getMinX, env.getMinY, env.getMaxX, env.getMaxY)
val getRanges: (Seq[Array[Byte]], (Double, Double), (Double, Double)) => Seq[aRange] =
if (sft.isPoints) getPointRanges else getGeomRanges
val prefixes = if (sft.isTableSharing) {
val ts = sft.getTableSharingPrefix.getBytes(StandardCharsets.UTF_8)
Z2Table.SPLIT_ARRAYS.map(ts ++ _)
} else {
Z2Table.SPLIT_ARRAYS
}
val ranges = getRanges(prefixes, (lx, ux), (ly, uy))
// index space values for comparing in the iterator
def decode(x: Double, y: Double): (Int, Int) = if (sft.isPoints) {
Z2SFC.index(x, y).decode
} else {
Z2(Z2SFC.index(x, y).z & Z2Table.GEOM_Z_MASK).decode
}
val (xmin, ymin) = decode(lx, ly)
val (xmax, ymax) = decode(ux, uy)
val zIter = Z2Iterator.configure(sft.isPoints, sft.isTableSharing, xmin, xmax, ymin, ymax, Z2IdxStrategy.Z2_ITER_PRIORITY)
(ranges, Some(zIter))
}
val perAttributeIter = sft.getVisibilityLevel match {
case VisibilityLevel.Feature => Seq.empty
case VisibilityLevel.Attribute => Seq(KryoVisibilityRowEncoder.configure(sft, Z2Table))
}
val cf = if (perAttributeIter.isEmpty) colFamily else GeoMesaTable.AttributeColumnFamily
val iters = perAttributeIter ++ iterators ++ z2Iter
BatchScanPlan(filter, z2table, ranges, iters, Seq(cf), kvsToFeatures, numThreads, hasDupes)
}
def getPointRanges(prefixes: Seq[Array[Byte]], x: (Double, Double), y: (Double, Double)): Seq[aRange] = {
Z2SFC.ranges(x, y).flatMap { case indexRange =>
val startBytes = Longs.toByteArray(indexRange.lower)
val endBytes = Longs.toByteArray(indexRange.upper)
prefixes.map { prefix =>
val start = new Text(Bytes.concat(prefix, startBytes))
val end = aRange.followingPrefix(new Text(Bytes.concat(prefix, endBytes)))
new aRange(start, true, end, false)
}
}
}
def getGeomRanges(prefixes: Seq[Array[Byte]], x: (Double, Double), y: (Double, Double)): Seq[aRange] = {
Z2SFC.ranges(x, y, 8 * Z2Table.GEOM_Z_NUM_BYTES).flatMap { indexRange =>
val startBytes = Longs.toByteArray(indexRange.lower).take(Z2Table.GEOM_Z_NUM_BYTES)
val endBytes = Longs.toByteArray(indexRange.upper).take(Z2Table.GEOM_Z_NUM_BYTES)
prefixes.map { prefix =>
val start = new Text(Bytes.concat(prefix, startBytes))
val end = aRange.followingPrefix(new Text(Bytes.concat(prefix, endBytes)))
new aRange(start, true, end, false)
}
}
}
}
object Z2IdxStrategy extends StrategyProvider {
val Z2_ITER_PRIORITY = 23
val FILTERING_ITER_PRIORITY = 25
override protected def statsBasedCost(sft: SimpleFeatureType,
filter: QueryFilter,
transform: Option[SimpleFeatureType],
stats: GeoMesaStats): Option[Long] = {
filter.singlePrimary match {
case None => Some(Long.MaxValue)
// add one so that we prefer the z3 index even if geometry is the limiting factor, resulting in the same count
case Some(f) => stats.getCount(sft, f, exact = false).map(c => if (c == 0L) 0L else c + 1L)
}
}
/**
* More than id lookups (at 1), high-cardinality attributes (at 1), z3 (at 200).
* Less than spatial-only z3 (at 401), unknown cardinality attributes (at 999).
*/
override protected def indexBasedCost(sft: SimpleFeatureType,
filter: QueryFilter,
transform: Option[SimpleFeatureType]): Long = 400L
def isComplicatedSpatialFilter(f: Filter): Boolean = {
f match {
case _: BBOX => false
case _: DWithin => true
case _: Contains => true
case _: Crosses => true
case _: Intersects => true
case _: Overlaps => true
case _: Within => true
case _ => false // Beyond, Disjoint, DWithin, Equals, Touches
}
}
}
| mdzimmerman/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/index/Z2IdxStrategy.scala | Scala | apache-2.0 | 9,802 |
package net.surguy.runnertrack.scraper
import java.io.File
import java.nio.file.Files
import net.surguy.runnertrack.model._
import net.surguy.runnertrack.TimeUtils._
import org.openqa.selenium.{By, WebDriver}
import scala.collection.JavaConversions._
class CopenhagenMarathonScraper(raceId: String) extends RaceScraper with WebDriverTools {
val distanceParser = new GenericDistanceParser()
// Julius Kiprono Mutai: 1
val baseUrl = s"http://live.ultimate.dk/desktop/front/data.php?eventid=$raceId&mode=participantinfo&language=us&pid=%s"
override def scrape(browser: WebDriver)(runnerId: String) = {
browser.navigate().to(baseUrl.format(runnerId))
// The page content is a JavaScript fragment, containing the HTML within a
// document.getElementById('PARTICIPANTINFO').innerHTML='...'; element.
// So, we extract the HTML, and save it to a local file so Selenium can browse to it
val source = browser.getPageSource
val html = extractHtml(source)
val f = File.createTempFile("runtrack", "_id"+runnerId+".html")
Files.write(f.toPath, html.getBytes)
try {
browser.navigate().to(f.toURI.toURL)
parse(browser)
} finally {
f.delete()
}
}
override def cacheKey: String = "copenhagen"+raceId
private[scraper] def extractHtml(source: String): String = {
val sourceStart = source.indexOf("innerHTML='") + "innerHTML='".length
val sourceEnd = source.lastIndexOf("';")
source.substring(sourceStart, sourceEnd)
}
def parse(implicit browser: WebDriver): Runner = {
val name = $x("(//table[@class='participant_table_data']//span[@class='participant_value_big'])[1]")
val club = $x("(//table[@class='participant_table_data'])[4]//table[1]//tr[1]//td[2]")
val startTimeText = $x("(//table[@class='participant_table_data'][6])//table[1]//tr[2]//td[2]")
val startTime = tryParseTime(startTimeText)
val finish = if (browser.getPageSource.contains("rank overall")) {
val finishTimeText = $x("(//table[@class='participant_table_data'][6])//table[1]//tr[3]//td[2]")
val finishTime = tryParseDuration(finishTimeText.split(" ").headOption.getOrElse(""))
val placeText = $x("((//table[@class='participant_table_data'][6])//table)[2]//tr[1]//td[2]")
val place = if (placeText.contains("of")) placeText.substring(0,placeText.indexOf(" ")).toInt else -1
finishTime.map( t => Finish(place, t))
} else None
val splits = for (row <- browser.findElements(By.xpath("((//table[@class='participant_table_data'][7])//table)//tr[td/@class='split_time']"))) yield {
val distanceText = $x("td[1]", row)
val timeText = $x("td[2]", row)
val duration = tryParseDuration(timeText)
duration.map( d => Split(distanceParser.parseDistance(distanceText), d) )
}
Runner(name, splits.flatten, club, startTime, finish)
}
}
object CopenhagenMarathonScraper {
val RACE_ID_2014 = "2186"
val RACE_ID_2015 = "2601"
} | inigo/runnertrack | app/net/surguy/runnertrack/scraper/CopenhagenMarathonScraper.scala | Scala | agpl-3.0 | 2,962 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.avocado.algorithms.hmm
import org.bdgenomics.adam.algorithms.prefixtrie.DNAPrefixTrie
import org.bdgenomics.adam.util.PhredUtils
import scala.annotation.tailrec
import scala.math.log10
class FastAligner(reference: String, rLen: Int) extends Aligner {
// cut into a trie
protected val trie = DNAPrefixTrie(reference.sliding(rLen)
.zipWithIndex
.toMap)
protected val refLen = reference.length
protected val matchSeq = "M" * rLen
private def score(stat: (Option[Int], Int), testQualities: String, testSequence: String): Alignment = {
val (mismatchPos, alignPos) = stat
val quals = testQualities.toArray
.zipWithIndex
val (eQual, mQualArray, stateSeq) = if (mismatchPos.isDefined) {
val mp = mismatchPos.get
(log10(PhredUtils.phredToErrorProbability(quals(mp)._1.toInt - 33)),
quals.filter(kv => kv._2 != mp),
("M" * mp) + "X" + ("M" * (rLen - mp - 1)))
} else {
(0.0, quals, matchSeq)
}
val qual = eQual + mQualArray.map(c => log10(PhredUtils.phredToSuccessProbability(c._1.toInt - 33)))
.reduce(_ + _)
val alignedSequence = ("_" * alignPos) + testSequence + ("_" * (refLen - alignPos - rLen))
val stateSequence = ("P" * alignPos) + stateSeq + ("P" * (refLen - alignPos - rLen))
new Alignment(qual, 0.0, reference, alignedSequence, stateSequence)
}
/**
* Aligns sequences.
*
* @param refSequence Reference sequence over the active region.
* @param testSequence Sequence being scored.
* @param testQualities String of qualities. Not currently used.
* @return Alignment which stores the aligned sequences and likelihoods
*/
def alignSequences(refSequence: String, testSequence: String, testQualities: String): Alignment = {
@tailrec def missedAlignmentHelper(pos: Iterator[Int], testSeq: String): (Option[Int], Int) = {
if (!pos.hasNext) {
throw new IllegalArgumentException("Couldn't find alignment")
} else {
val p = pos.next
val entries = trie.search(testSeq.take(p) + "*" + testSeq.drop(p + 1))
if (!entries.isEmpty) {
(Some(p), entries.head._2)
} else {
missedAlignmentHelper(pos, testSeq)
}
}
}
val alignmentStat = trie.getIfExists(testSequence)
.fold(missedAlignmentHelper(testQualities.toSeq
.zipWithIndex
.sortBy(kv => kv._1)
.map(kv => kv._2)
.toIterator, testSequence))(p => (None.asInstanceOf[Option[Int]], p))
score(alignmentStat, testQualities, testSequence)
}
}
| tdanford/avocado | avocado-core/src/main/scala/org/bdgenomics/avocado/algorithms/hmm/FastAligner.scala | Scala | apache-2.0 | 3,371 |
package com.featurefm.riversong.client
import akka.actor.ActorSystem
import akka.event.Logging
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
import akka.stream.scaladsl.{Flow, Sink, Source}
import scala.concurrent.Future
/**
* Created by yardena on 11/1/15.
*/
class HttpClient private (flow: => Flow[HttpRequest, HttpResponse, Any], host: String, port: Int)(implicit val system: ActorSystem)
extends HttpClientInterface {
protected val log = Logging(system, getClass)
lazy val name: String = s"$host:$port"
def send(request: HttpRequest)(implicit naming: NamedHttpRequest): Future[HttpResponse] = {
Source.single(request).via(flow).runWith(Sink.head)
}
}
object HttpClient extends HttpClientFactory[HttpClient] with MetricImplicits {
def http(host: String, port: Int = 80)(implicit system: ActorSystem) = {
require(host.startsWith("http://") || host.indexOf("://") < 0, "Protocol must be HTTP")
new HttpClient(Http().outgoingConnection(host, port), host, port)
}
def https(host: String, port: Int = 443)(implicit system: ActorSystem) = {
require(host.startsWith("https://") || host.indexOf("://") < 0, "Protocol must be HTTPS")
new HttpClient(Http().outgoingConnectionHttps(host, port), host, port)
}
}
| ListnPlay/RiverSong | src/main/scala/com/featurefm/riversong/client/HttpClient.scala | Scala | mit | 1,299 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.pipes.matching
import scala.collection.JavaConverters._
import org.neo4j.graphdb.traversal.{TraversalDescription, Evaluators}
import org.neo4j.graphdb._
import org.neo4j.kernel.{Uniqueness, Traversal}
import org.neo4j.cypher.internal.commands.Predicate
import org.neo4j.cypher.internal.symbols._
import scala.Some
import org.neo4j.cypher.internal.symbols.RelationshipType
class PatternRelationship(key: String,
val startNode: PatternNode,
val endNode: PatternNode,
val relTypes: Seq[String],
val dir: Direction,
val optional: Boolean,
val predicate: Predicate)
extends PatternElement(key) {
def identifiers2: Map[String, CypherType] = Map(startNode.key -> NodeType(), endNode.key -> NodeType(), key -> RelationshipType())
def getOtherNode(node: PatternNode) = if (startNode == node) endNode else startNode
lazy val neo4jRelTypes = relTypes.map(t => DynamicRelationshipType.withName(t))
def getGraphRelationships(node: PatternNode, realNode: Node): Seq[GraphRelationship] = {
val result = (if (relTypes.isEmpty) {
realNode.getRelationships(getDirection(node))
} else {
realNode.getRelationships(getDirection(node), neo4jRelTypes: _*)
}).asScala.toStream.map(new SingleGraphRelationship(_))
if (startNode == endNode)
result.filter(r => r.getOtherNode(realNode) == realNode)
else
result
}
protected def getDirection(node: PatternNode): Direction = {
dir match {
case Direction.OUTGOING => if (node == startNode) Direction.OUTGOING else Direction.INCOMING
case Direction.INCOMING => if (node == endNode) Direction.OUTGOING else Direction.INCOMING
case Direction.BOTH => Direction.BOTH
}
}
override def equals(other: Any): Boolean = other match {
case that: PatternRelationship => this.key == that.key
case _ => false
}
override def toString = key
def traverse[T](shouldFollow: (PatternElement) => Boolean,
visitNode: (PatternNode, T) => T,
visitRelationship: (PatternRelationship, T) => T,
data: T,
comingFrom: PatternNode,
path: Seq[PatternElement]) {
if (!path.contains(this)) {
val moreData = visitRelationship(this, data)
val otherNode = getOtherNode(comingFrom)
if (shouldFollow(otherNode)) {
otherNode.traverse(shouldFollow, visitNode, visitRelationship, moreData, path :+ this)
}
}
}
def traverse[T](shouldFollow: (PatternElement) => Boolean,
visitNode: (PatternNode, T) => T,
visitRelationship: (PatternRelationship, T) => T,
data: T,
path: Seq[PatternElement]) {
if (!path.contains(this)) {
val moreData = visitRelationship(this, data)
Seq(startNode, endNode).filter(shouldFollow).foreach(n => n.traverse(shouldFollow, visitNode, visitRelationship, moreData, path :+ this))
}
}
}
class VariableLengthPatternRelationship(pathName: String,
val start: PatternNode,
val end: PatternNode,
val relIterable: Option[String],
minHops: Option[Int],
maxHops: Option[Int],
relType: Seq[String],
dir: Direction,
optional: Boolean,
predicate: Predicate)
extends PatternRelationship(pathName, start, end, relType, dir, optional, predicate) {
override def identifiers2: Map[String, CypherType] =
Map(startNode.key -> NodeType(),
endNode.key -> NodeType(),
key -> new CollectionType(RelationshipType())) ++ relIterable.map(_ -> new CollectionType(RelationshipType())).toMap
override def getGraphRelationships(node: PatternNode, realNode: Node): Seq[GraphRelationship] = {
val depthEval = (minHops, maxHops) match {
case (None, None) => Evaluators.fromDepth(1)
case (Some(min), None) => Evaluators.fromDepth(min)
case (None, Some(max)) => Evaluators.includingDepths(1, max)
case (Some(min), Some(max)) => Evaluators.includingDepths(min, max)
}
val baseTraversalDescription: TraversalDescription = Traversal.description()
.evaluator(depthEval)
.uniqueness(Uniqueness.RELATIONSHIP_PATH)
val traversalDescription = if (relType.isEmpty) {
baseTraversalDescription.expand(Traversal.expanderForAllTypes(getDirection(node)))
} else {
val emptyExpander = Traversal.emptyExpander()
val dir = getDirection(node)
val expander = relType.foldLeft(emptyExpander) {
case (e, t) => e.add(DynamicRelationshipType.withName(t), dir)
}
baseTraversalDescription.expand(expander)
}
traversalDescription.traverse(realNode).asScala.toStream.map(p => VariableLengthGraphRelationship(p))
}
}
| dksaputra/community | cypher/src/main/scala/org/neo4j/cypher/internal/pipes/matching/PatternRelationship.scala | Scala | gpl-3.0 | 6,035 |
package com.mogproject.mogami.core.move
import com.mogproject.mogami._
import com.mogproject.mogami.core.io._
import com.mogproject.mogami.core.io.kif.{Ki2Factory, Ki2Like}
import com.mogproject.mogami.core.move.Movement.{Dropped, Movement}
import com.mogproject.mogami.core.state.State
import com.mogproject.mogami.util.Implicits._
import scala.annotation.tailrec
import scala.util.{Failure, Success, Try}
/**
*
*/
case class MoveBuilderKi2(player: Player,
to: Option[Square],
oldPtype: Ptype,
movement: Option[Movement],
promote: Option[Boolean]) extends MoveBuilder with Ki2Like {
/**
* @param state state
* @param moveTo move to
* @return None if failed to find 'from'
* Some(None) from hand
* Some(Some(sq)) from board
*/
protected[move] def findMoveFrom(state: State, moveTo: Square): Option[Option[Square]] = movement match {
case None =>
// calculate the from position from the state
val boardCandidate = state.attackBBOnBoard(player).filter { case (sq, bb) => state.board.get(sq).exists(_.ptype == oldPtype) && bb.get(moveTo) }
val canDrop = oldPtype.isHandType && state.attackBBInHand.get(Hand(player, oldPtype)).exists(_.get(moveTo))
(boardCandidate.size, canDrop) match {
case (0, true) => Some(None) // drop
case (1, _) => Some(Some(boardCandidate.head._1)) // move
case _ => None // ambiguous
}
case Some(Dropped) => Some(None)
case Some(mvmt) =>
state.attackBBOnBoard(player).find { case (sq, bb) =>
state.board(sq).ptype == oldPtype && bb.get(moveTo) && getMovement(state, Some(sq), moveTo, oldPtype).exists(compareMovement(_, mvmt))
}.map { case (sq, _) => Some(sq) }
}
private[this] def compareMovement(a: Movement, b: Movement): Boolean = {
/** @note allows ambiguity between Upward and Vertical */
a == b || a == Movement.Upward && b == Movement.Vertical
}
override def toMove(state: State, lastMoveTo: Option[Square] = None, isStrict: Boolean): Option[Move] = for {
moveTo <- to match {
case Some(t) => Some(t)
case None => lastMoveTo
}
from <- findMoveFrom(state, moveTo)
pr = promote.contains(true)
isSame = to.isEmpty
newPtype = pr.fold(oldPtype.promoted, oldPtype)
captured = state.board.get(moveTo).map(_.ptype).filter(_ != KING)
isCheck = isCheckMove(state, from, moveTo, newPtype)
} yield {
Move(player, from, moveTo, newPtype, pr, isSame, movement, captured, isCheck, None, isStrict)
}
override def toKi2String: String = Seq(
player.toSymbolString(false),
to.map(_.toKifString).getOrElse("同"),
oldPtype.toKifString, // note: "龍" is used
movement.map(_.kifString).getOrElse(""),
promote.map(_.fold("成", "不成")).getOrElse("")
).mkString
}
object MoveBuilderKi2 extends Ki2Factory[MoveBuilderKi2] {
private[this] def parseNotation(lineNo: LineNo, s: String): MoveBuilderKi2 = {
def tryTwice[T](parser: NonEmptyLines => T)(s: String): (Int, T) = Try(parser(NonEmptyLines(lineNo, s.take(1)))) match {
case Success(p) => (1, p)
case Failure(_) => (2, parser(NonEmptyLines(lineNo, s.take(2))))
}
@tailrec
def f(stage: Int = 0,
rest: String,
player: Player = BLACK,
to: Option[Square] = None,
oldPtype: Ptype = PAWN,
movement: Option[Movement] = None,
promote: Option[Boolean] = None): MoveBuilderKi2 = (stage, rest) match {
case (n, "") if n >= 3 => MoveBuilderKi2(player, to, oldPtype, movement, promote)
case (_, "") => throw new RecordFormatException(lineNo, s"incomplete move string: ${s}")
case (0, _) => // evaluate player
val pl = Player.constructor.find(p => rest.startsWith(p.toSymbolString(false))).getOrElse(
throw new RecordFormatException(lineNo, s"invalid player expression: ${rest.head}")
)
f(1, rest.tail, pl, to, oldPtype, movement, promote)
case (1, _) => //evaluate destination
val (num, t) = if (rest.startsWith("同")) (1, None) else (2, Some(Square.parseKifString(NonEmptyLines(lineNo, rest.take(2)))))
f(2, rest.drop(num), player, t, oldPtype, movement, promote)
case (2, _) => // evaluate piece type
val (num, p) = tryTwice(Ptype.parseKifString)(rest)
f(3, rest.drop(num), player, to, p, movement, promote)
case (3, _) => //evaluate promote
val (num, pr) = Seq("不成" -> false, "生" -> false, "成" -> true) // 不成 must be prior to 成
.find { case (k, _) => rest.endsWith(k) }
.map[(Int, Option[Boolean])] { case (k, v) => (k.length, Some(v)) }
.getOrElse((0, None))
f(4, rest.dropRight(num), player, to, oldPtype, movement, pr)
case (4, _) => // evaluate movement
Movement.find(rest) match {
case Some(m) => f(5, "", player, to, oldPtype, Some(m), promote)
case None => throw new RecordFormatException(lineNo, s"invalid movement expression: ${rest}")
}
}
f(0, s)
}
override def parseKi2String(nel: NonEmptyLines): MoveBuilderKi2 = {
if (nel.lines.length >= 2) {
throw new RecordFormatException(nel.lines(1)._2, s"too long move expression: ${nel.lines(1)._1}")
} else {
val (mv, n) = nel.lines.head
parseNotation(n, mv)
}
}
}
| mogproject/mog-core-scala | shared/src/main/scala/com/mogproject/mogami/core/move/MoveBuilderKi2.scala | Scala | apache-2.0 | 5,458 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package util
/** An implementation of Austin Appleby's MurmurHash 3.0 algorithm
* (32 bit version); reference: https://github.com/aappleby/smhasher
*
* This is the hash used by collections and case classes (including
* tuples).
*
* @author Rex Kerr
* @version 2.9
* @since 2.9
*/
import java.lang.Integer.{ rotateLeft => rotl }
import scala.collection.Iterator
/** A class designed to generate well-distributed non-cryptographic
* hashes. It is designed to be passed to a collection's foreach method,
* or can take individual hash values with append. Its own hash code is
* set equal to the hash code of whatever it is hashing.
*/
@deprecated("use the object MurmurHash3 instead", "2.10.0")
class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => Unit) {
import MurmurHash._
private var h = startHash(seed)
private var c = hiddenMagicA
private var k = hiddenMagicB
private var hashed = false
private var hashvalue = h
/** Begin a new hash using the same seed. */
def reset() {
h = startHash(seed)
c = hiddenMagicA
k = hiddenMagicB
hashed = false
}
/** Incorporate the hash value of one item. */
def apply(t: T) {
h = extendHash(h,t.##,c,k)
c = nextMagicA(c)
k = nextMagicB(k)
hashed = false
}
/** Incorporate a known hash value. */
def append(i: Int) {
h = extendHash(h,i,c,k)
c = nextMagicA(c)
k = nextMagicB(k)
hashed = false
}
/** Retrieve the hash value */
def hash = {
if (!hashed) {
hashvalue = finalizeHash(h)
hashed = true
}
hashvalue
}
override def hashCode = hash
}
/** An object designed to generate well-distributed non-cryptographic
* hashes. It is designed to hash a collection of integers; along with
* the integers to hash, it generates two magic streams of integers to
* increase the distribution of repetitive input sequences. Thus,
* three methods need to be called at each step (to start and to
* incorporate a new integer) to update the values. Only one method
* needs to be called to finalize the hash.
*/
@deprecated("use the object MurmurHash3 instead", "2.10.0")
// NOTE: Used by SBT 0.13.0-M2 and below
object MurmurHash {
// Magic values used for MurmurHash's 32 bit hash.
// Don't change these without consulting a hashing expert!
final private val visibleMagic = 0x971e137b
final private val hiddenMagicA = 0x95543787
final private val hiddenMagicB = 0x2ad7eb25
final private val visibleMixer = 0x52dce729
final private val hiddenMixerA = 0x7b7d159c
final private val hiddenMixerB = 0x6bce6396
final private val finalMixer1 = 0x85ebca6b
final private val finalMixer2 = 0xc2b2ae35
// Arbitrary values used for hashing certain classes
final private val seedString = 0xf7ca7fd2
final private val seedArray = 0x3c074a61
/** The first 23 magic integers from the first stream are stored here */
val storedMagicA =
Iterator.iterate(hiddenMagicA)(nextMagicA).take(23).toArray
/** The first 23 magic integers from the second stream are stored here */
val storedMagicB =
Iterator.iterate(hiddenMagicB)(nextMagicB).take(23).toArray
/** Begin a new hash with a seed value. */
def startHash(seed: Int) = seed ^ visibleMagic
/** The initial magic integers in the first stream. */
def startMagicA = hiddenMagicA
/** The initial magic integer in the second stream. */
def startMagicB = hiddenMagicB
/** Incorporates a new value into an existing hash.
*
* @param hash the prior hash value
* @param value the new value to incorporate
* @param magicA a magic integer from the stream
* @param magicB a magic integer from a different stream
* @return the updated hash value
*/
def extendHash(hash: Int, value: Int, magicA: Int, magicB: Int) = {
(hash ^ rotl(value*magicA,11)*magicB)*3 + visibleMixer
}
/** Given a magic integer from the first stream, compute the next */
def nextMagicA(magicA: Int) = magicA*5 + hiddenMixerA
/** Given a magic integer from the second stream, compute the next */
def nextMagicB(magicB: Int) = magicB*5 + hiddenMixerB
/** Once all hashes have been incorporated, this performs a final mixing */
def finalizeHash(hash: Int) = {
var i = (hash ^ (hash>>>16))
i *= finalMixer1
i ^= (i >>> 13)
i *= finalMixer2
i ^= (i >>> 16)
i
}
/** Compute a high-quality hash of an array */
def arrayHash[@specialized T](a: Array[T]) = {
var h = startHash(a.length * seedArray)
var c = hiddenMagicA
var k = hiddenMagicB
var j = 0
while (j < a.length) {
h = extendHash(h, a(j).##, c, k)
c = nextMagicA(c)
k = nextMagicB(k)
j += 1
}
finalizeHash(h)
}
/** Compute a high-quality hash of a string */
def stringHash(s: String) = {
var h = startHash(s.length * seedString)
var c = hiddenMagicA
var k = hiddenMagicB
var j = 0
while (j+1 < s.length) {
val i = (s.charAt(j)<<16) + s.charAt(j+1)
h = extendHash(h,i,c,k)
c = nextMagicA(c)
k = nextMagicB(k)
j += 2
}
if (j < s.length) h = extendHash(h,s.charAt(j).toInt,c,k)
finalizeHash(h)
}
/** Compute a hash that is symmetric in its arguments--that is,
* where the order of appearance of elements does not matter.
* This is useful for hashing sets, for example.
*/
def symmetricHash[T](xs: scala.collection.TraversableOnce[T], seed: Int) = {
var a,b,n = 0
var c = 1
xs.seq.foreach(i => {
val h = i.##
a += h
b ^= h
if (h != 0) c *= h
n += 1
})
var h = startHash(seed * n)
h = extendHash(h, a, storedMagicA(0), storedMagicB(0))
h = extendHash(h, b, storedMagicA(1), storedMagicB(1))
h = extendHash(h, c, storedMagicA(2), storedMagicB(2))
finalizeHash(h)
}
}
| jvican/scala | src/library/scala/util/MurmurHash.scala | Scala | bsd-3-clause | 6,420 |
#!/usr/bin/env scala -classpath bin -deprecation -nocompdaemon -Dfile.encoding=UTF-8
//!#
// finds models whose graphics windows' saved sizes don't match the size you should get if you
// compute from the saved patch size and screen-edge-x/y
import sys.process.Process
for{path <- Process("find models -name *.nlogo").lines
lines = io.Source.fromFile(path).getLines.toSeq}
{
val version = lines.find(_.matches("""NetLogo [0-9]\..*""")).get.drop("NetLogo ".size)
val graphics = lines.dropWhile(_ != "GRAPHICS-WINDOW").takeWhile(_ != "")
val (x1, y1, x2, y2) = (graphics(1).toInt, graphics(2).toInt, graphics(3).toInt, graphics(4).toInt)
val patchSize = graphics(7).toDouble
val maxx = if(graphics.size > 18) graphics(18).toInt else graphics(5).toInt
val maxy = if(graphics.size > 20) graphics(20).toInt else graphics(6).toInt
val minx = if(graphics.size > 17) graphics(17).toInt else -maxx
val miny = if(graphics.size > 19) graphics(19).toInt else -maxy
val (worldWidth, worldHeight) = (maxx - minx + 1, maxy - miny + 1)
val (extraWidth, extraHeight) =
// take control strip and gray border into account
if(List("1.3", "2.", "3.", "4.", "5.").exists(version.startsWith(_))) (10, 31) else (0, 0)
val computedWidth = extraWidth + patchSize * worldWidth
val computedHeight = extraHeight + patchSize * worldHeight
if(maxx < 0 || minx > 0 || maxy < 0 || miny > 0)
println(path + " (" + version + "): bad world dimensions: " + (maxx, minx, maxy, miny))
if(computedWidth != x2 - x1)
println(path + " (" + version + "): computed width " + computedWidth + ", actual width " + (x2 - x1))
if(computedHeight != y2 - y1)
println(path + " (" + version + "): computed height " + computedHeight + ", actual height " + (y2 - y1))
}
| oscarmartinezm/Galapagos | public/modelslib/bin/findbadgraphicswindows.scala | Scala | gpl-2.0 | 1,773 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package scalaguide.async.scalastream
import java.io.{ByteArrayInputStream, InputStream}
import javax.inject.Inject
import akka.stream.scaladsl.{FileIO, Source, StreamConverters}
import akka.util.ByteString
import play.api.http.HttpEntity
import play.api.mvc.{BaseController, ControllerComponents, ResponseHeader, Result}
import scala.concurrent.ExecutionContext
class ScalaStreamController @Inject()(val controllerComponents: ControllerComponents)(implicit executionContext: ExecutionContext) extends BaseController {
//#by-default
def index = Action {
Ok("Hello World")
}
//#by-default
//#by-default-http-entity
def action = Action {
Result(
header = ResponseHeader(200, Map.empty),
body = HttpEntity.Strict(ByteString("Hello world"), Some("text/plain"))
)
}
//#by-default-http-entity
private def createSourceFromFile = {
//#create-source-from-file
val file = new java.io.File("/tmp/fileToServe.pdf")
val path: java.nio.file.Path = file.toPath
val source: Source[ByteString, _] = FileIO.fromPath(path)
//#create-source-from-file
}
//#streaming-http-entity
def streamed = Action {
val file = new java.io.File("/tmp/fileToServe.pdf")
val path: java.nio.file.Path = file.toPath
val source: Source[ByteString, _] = FileIO.fromPath(path)
Result(
header = ResponseHeader(200, Map.empty),
body = HttpEntity.Streamed(source, None, Some("application/pdf"))
)
}
//#streaming-http-entity
//#streaming-http-entity-with-content-length
def streamedWithContentLength = Action {
val file = new java.io.File("/tmp/fileToServe.pdf")
val path: java.nio.file.Path = file.toPath
val source: Source[ByteString, _] = FileIO.fromPath(path)
val contentLength = Some(file.length())
Result(
header = ResponseHeader(200, Map.empty),
body = HttpEntity.Streamed(source, contentLength, Some("application/pdf"))
)
}
//#streaming-http-entity-with-content-length
//#serve-file
def file = Action {
Ok.sendFile(new java.io.File("/tmp/fileToServe.pdf"))
}
//#serve-file
//#serve-file-with-name
def fileWithName = Action {
Ok.sendFile(
content = new java.io.File("/tmp/fileToServe.pdf"),
fileName = _ => "termsOfService.pdf"
)
}
//#serve-file-with-name
//#serve-file-attachment
def fileAttachment = Action {
Ok.sendFile(
content = new java.io.File("/tmp/fileToServe.pdf"),
inline = false
)
}
//#serve-file-attachment
private def getDataStream: InputStream = new ByteArrayInputStream("hello".getBytes())
private def sourceFromInputStream = {
//#create-source-from-input-stream
val data = getDataStream
val dataContent: Source[ByteString, _] = StreamConverters.fromInputStream(() => data)
//#create-source-from-input-stream
}
//#chunked-from-input-stream
def chunked = Action {
val data = getDataStream
val dataContent: Source[ByteString, _] = StreamConverters.fromInputStream(() => data)
Ok.chunked(dataContent)
}
//#chunked-from-input-stream
//#chunked-from-source
def chunkedFromSource = Action {
val source = Source.apply(List("kiki", "foo", "bar"))
Ok.chunked(source)
}
//#chunked-from-source
}
| Shenker93/playframework | documentation/manual/working/scalaGuide/main/async/code/scalaguide/async/scalastream/ScalaStream.scala | Scala | apache-2.0 | 3,335 |
/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
package scala
package util
import scala.reflect.ClassTag
import scala.math.Ordering
/** The `Sorting` object provides convenience wrappers for `java.util.Arrays.sort`.
* Methods that defer to `java.util.Arrays.sort` say that they do or under what
* conditions that they do.
*
* `Sorting` also implements a general-purpose quicksort and stable (merge) sort
* for those cases where `java.util.Arrays.sort` could only be used at the cost
* of a large memory penalty. If performance rather than memory usage is the
* primary concern, one may wish to find alternate strategies to use
* `java.util.Arrays.sort` directly e.g. by boxing primitives to use
* a custom ordering on them.
*
* `Sorting` provides methods where you can provide a comparison function, or
* can request a sort of items that are [[scala.math.Ordered]] or that
* otherwise have an implicit or explicit [[scala.math.Ordering]].
*
* Note also that high-performance non-default sorts for numeric types
* are not provided. If this is required, it is advisable to investigate
* other libraries that cover this use case.
*
* @author Ross Judson
* @author Adriaan Moors
* @author Rex Kerr
* @version 1.1
*/
object Sorting {
/** Sort an array of Doubles using `java.util.Arrays.sort`. */
def quickSort(a: Array[Double]): Unit = java.util.Arrays.sort(a)
/** Sort an array of Ints using `java.util.Arrays.sort`. */
def quickSort(a: Array[Int]): Unit = java.util.Arrays.sort(a)
/** Sort an array of Floats using `java.util.Arrays.sort`. */
def quickSort(a: Array[Float]): Unit = java.util.Arrays.sort(a)
private final val qsortThreshold = 16
/** Sort array `a` with quicksort, using the Ordering on its elements.
* This algorithm sorts in place, so no additional memory is used aside from
* what might be required to box individual elements during comparison.
*/
def quickSort[K: Ordering](a: Array[K]): Unit = {
// Must have iN >= i0 or math will fail. Also, i0 >= 0.
def inner(a: Array[K], i0: Int, iN: Int, ord: Ordering[K]): Unit = {
if (iN - i0 < qsortThreshold) insertionSort(a, i0, iN, ord)
else {
val iK = (i0 + iN) >>> 1 // Unsigned div by 2
// Find index of median of first, central, and last elements
var pL =
if (ord.compare(a(i0), a(iN - 1)) <= 0)
if (ord.compare(a(i0), a(iK)) < 0)
if (ord.compare(a(iN - 1), a(iK)) < 0) iN - 1 else iK
else i0
else
if (ord.compare(a(i0), a(iK)) < 0) i0
else
if (ord.compare(a(iN - 1), a(iK)) <= 0) iN - 1
else iK
val pivot = a(pL)
// pL is the start of the pivot block; move it into the middle if needed
if (pL != iK) { a(pL) = a(iK); a(iK) = pivot; pL = iK }
// Elements equal to the pivot will be in range pL until pR
var pR = pL + 1
// Items known to be less than pivot are below iA (range i0 until iA)
var iA = i0
// Items known to be greater than pivot are at or above iB (range iB until iN)
var iB = iN
// Scan through everything in the buffer before the pivot(s)
while (pL - iA > 0) {
val current = a(iA)
ord.compare(current, pivot) match {
case 0 =>
// Swap current out with pivot block
a(iA) = a(pL - 1)
a(pL - 1) = current
pL -= 1
case x if x < 0 =>
// Already in place. Just update indices.
iA += 1
case _ if iB > pR =>
// Wrong side. There's room on the other side, so swap
a(iA) = a(iB - 1)
a(iB - 1) = current
iB -= 1
case _ =>
// Wrong side and there is no room. Swap by rotating pivot block.
a(iA) = a(pL - 1)
a(pL - 1) = a(pR - 1)
a(pR - 1) = current
pL -= 1
pR -= 1
iB -= 1
}
}
// Get anything remaining in buffer after the pivot(s)
while (iB - pR > 0) {
val current = a(iB - 1)
ord.compare(current, pivot) match {
case 0 =>
// Swap current out with pivot block
a(iB - 1) = a(pR)
a(pR) = current
pR += 1
case x if x > 0 =>
// Already in place. Just update indices.
iB -= 1
case _ =>
// Wrong side and we already know there is no room. Swap by rotating pivot block.
a(iB - 1) = a(pR)
a(pR) = a(pL)
a(pL) = current
iA += 1
pL += 1
pR += 1
}
}
// Use tail recursion on large half (Sedgewick's method) so we don't blow up the stack if pivots are poorly chosen
if (iA - i0 < iN - iB) {
inner(a, i0, iA, ord) // True recursion
inner(a, iB, iN, ord) // Should be tail recursion
}
else {
inner(a, iB, iN, ord) // True recursion
inner(a, i0, iA, ord) // Should be tail recursion
}
}
}
inner(a, 0, a.length, implicitly[Ordering[K]])
}
private final val mergeThreshold = 32
// Ordering[T] might be slow especially for boxed primitives, so use binary search variant of insertion sort
// Caller must pass iN >= i0 or math will fail. Also, i0 >= 0.
private def insertionSort[@specialized T](a: Array[T], i0: Int, iN: Int, ord: Ordering[T]): Unit = {
val n = iN - i0
if (n < 2) return
if (ord.compare(a(i0), a(i0+1)) > 0) {
val temp = a(i0)
a(i0) = a(i0+1)
a(i0+1) = temp
}
var m = 2
while (m < n) {
// Speed up already-sorted case by checking last element first
val next = a(i0 + m)
if (ord.compare(next, a(i0+m-1)) < 0) {
var iA = i0
var iB = i0 + m - 1
while (iB - iA > 1) {
val ix = (iA + iB) >>> 1 // Use bit shift to get unsigned div by 2
if (ord.compare(next, a(ix)) < 0) iB = ix
else iA = ix
}
val ix = iA + (if (ord.compare(next, a(iA)) < 0) 0 else 1)
var i = i0 + m
while (i > ix) {
a(i) = a(i-1)
i -= 1
}
a(ix) = next
}
m += 1
}
}
// Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0.
private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = {
if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord)
else {
val iK = (i0 + iN) >>> 1 // Bit shift equivalent to unsigned math, no overflow
val sc = if (scratch eq null) new Array[T](iK - i0) else scratch
mergeSort(a, i0, iK, ord, sc)
mergeSort(a, iK, iN, ord, sc)
mergeSorted(a, i0, iK, iN, ord, sc)
}
}
// Must have 0 <= i0 < iK < iN
private def mergeSorted[@specialized T](a: Array[T], i0: Int, iK: Int, iN: Int, ord: Ordering[T], scratch: Array[T]): Unit = {
// Check to make sure we're not already in order
if (ord.compare(a(iK-1), a(iK)) > 0) {
var i = i0
val jN = iK - i0
var j = 0
while (i < iK) {
scratch (j) = a(i)
i += 1
j += 1
}
var k = i0
j = 0
while (i < iN && j < jN) {
if (ord.compare(a(i), scratch(j)) < 0) { a(k) = a(i); i += 1 }
else { a(k) = scratch(j); j += 1 }
k += 1
}
while (j < jN) { a(k) = scratch(j); j += 1; k += 1 }
// Don't need to finish a(i) because it's already in place, k = i
}
}
// Why would you even do this?
private def booleanSort(a: Array[Boolean]): Unit = {
var i = 0
var n = 0
while (i < a.length) {
if (!a(i)) n += 1
i += 1
}
i = 0
while (i < n) {
a(i) = false
i += 1
}
while (i < a.length) {
a(i) = true
i += 1
}
}
// TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible)
// Maybe also rename all these methods to `sort`.
@inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match {
case _: Array[AnyRef] =>
// Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes)
if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering")
java.util.Arrays.sort(a, ord)
case a: Array[Int] => if (ord eq Ordering.Int) java.util.Arrays.sort(a) else mergeSort[Int](a, 0, a.length, ord)
case a: Array[Double] => mergeSort[Double](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful!
case a: Array[Long] => if (ord eq Ordering.Long) java.util.Arrays.sort(a) else mergeSort[Long](a, 0, a.length, ord)
case a: Array[Float] => mergeSort[Float](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful!
case a: Array[Char] => if (ord eq Ordering.Char) java.util.Arrays.sort(a) else mergeSort[Char](a, 0, a.length, ord)
case a: Array[Byte] => if (ord eq Ordering.Byte) java.util.Arrays.sort(a) else mergeSort[Byte](a, 0, a.length, ord)
case a: Array[Short] => if (ord eq Ordering.Short) java.util.Arrays.sort(a) else mergeSort[Short](a, 0, a.length, ord)
case a: Array[Boolean] => if (ord eq Ordering.Boolean) booleanSort(a) else mergeSort[Boolean](a, 0, a.length, ord)
// Array[Unit] is matched as an Array[AnyRef] due to covariance in runtime matching. Not worth catching it as a special case.
case null => throw new NullPointerException
}
// TODO: remove unnecessary ClassTag (not binary compatible)
/** Sort array `a` using the Ordering on its elements, preserving the original ordering where possible. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
def stableSort[K: ClassTag: Ordering](a: Array[K]): Unit = sort(a, Ordering[K])
// TODO: Remove unnecessary ClassTag (not binary compatible)
// TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering)
/** Sort array `a` using function `f` that computes the less-than relation for each element. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean): Unit = sort(a, Ordering fromLessThan f)
/** A sorted Array, using the Ordering for the elements in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = {
val ret = a.toArray
sort(ret, Ordering[K])
ret
}
// TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering)
/** A sorted Array, given a function `f` that computes the less-than relation for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
val ret = a.toArray
sort(ret, Ordering fromLessThan f)
ret
}
/** A sorted Array, given an extraction function `f` that returns an ordered key for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = {
val ret = a.toArray
sort(ret, Ordering[M] on f)
ret
}
}
| felixmulder/scala | src/library/scala/util/Sorting.scala | Scala | bsd-3-clause | 12,038 |
package model
import java.time.ZonedDateTime
import play.api.libs.json.{JsPath, Reads}
import play.api.libs.functional.syntax._
/** Encapsulates a TV programme. */
case class Programme(name: String, programmeId: Long, programmeType: String, startDateTime: ZonedDateTime,
runningTime: Int, seriesId: Long)
object Programme {
implicit val programmeReads: Reads[Programme] = (
(JsPath \ "name").read[String] and
(JsPath \ "programmeId").read[Long] and
(JsPath \ "type").read[String] and
(JsPath \ "startDateTime").read[ZonedDateTime] and
(JsPath \ "runningTime").read[Int] and
(JsPath \ "seriesId").read[Long]
)(Programme.apply _)
}
/** Encapsulates a TV channel with listing (possibly empty). */
case class Channel(name: String, id: Long, listing: Seq[Programme])
object Channel {
implicit val channelReads: Reads[Channel] = (
(JsPath \ "name").read[String] and
(JsPath \ "id").read[Long] and
(JsPath \ "listing").read[Seq[Programme]]//.orElse(Reads.pure(Nil))
)(Channel.apply _)
}
| chrisnappin/home-media-microservices | web-front-end/app/model/Programme.scala | Scala | apache-2.0 | 1,050 |
package io.dylemma.spac
package xml
package impl
class XmlParserOptionalAttribute[N: AsQName](attributeName: N) extends Parser.Stateless[XmlEvent, Option[String]] {
def step(in: XmlEvent) = in.asElemStart match {
case Some(elem) => Left(elem.attr(attributeName))
case None => Right(this)
}
def finish() = None
override def toString = s"XmlParser.attrOpt(${AsQName.show(attributeName)})"
}
| dylemma/xml-spac | xml/src/main/scala/io/dylemma/spac/xml/impl/XmlParserOptionalAttribute.scala | Scala | mit | 398 |
package org.openapitools.models
import io.circe._
import io.finch.circe._
import io.circe.generic.semiauto._
import io.circe.java8.time._
import org.openapitools._
import org.openapitools.models.GithubOrganizationlinks
/**
*
* @param Underscoreclass
* @param Underscorelinks
* @param jenkinsOrganizationPipeline
* @param name
*/
case class GithubOrganization(Underscoreclass: Option[String],
Underscorelinks: Option[GithubOrganizationlinks],
jenkinsOrganizationPipeline: Option[Boolean],
name: Option[String]
)
object GithubOrganization {
/**
* Creates the codec for converting GithubOrganization from and to JSON.
*/
implicit val decoder: Decoder[GithubOrganization] = deriveDecoder
implicit val encoder: ObjectEncoder[GithubOrganization] = deriveEncoder
}
| cliffano/swaggy-jenkins | clients/scala-finch/generated/src/main/scala/org/openapitools/models/GithubOrganization.scala | Scala | mit | 858 |
package controllers
import db.{ShasDao, ShasWriteDao}
import io.flow.common.v0.models.UserReference
import io.flow.delta.v0.models.Sha
import io.flow.delta.v0.models.json._
import io.flow.play.controllers.FlowControllerComponents
import io.flow.postgresql.Authorization
import play.api.libs.json._
import play.api.mvc._
@javax.inject.Singleton
class Shas @javax.inject.Inject() (
helpers: Helpers,
shasDao: ShasDao,
shasWriteDao: ShasWriteDao,
val controllerComponents: ControllerComponents,
val flowControllerComponents: FlowControllerComponents
) extends BaseIdentifiedRestController {
def get(
id: Option[Seq[String]],
project: Option[String],
branch: Option[String],
hash: Option[String],
limit: Long,
offset: Long,
sort: String
) = Identified { request =>
helpers.withOrderBy(sort) { orderBy =>
Ok(
Json.toJson(
shasDao.findAll(
authorization(request),
ids = optionals(id),
projectId = project,
branch = branch,
hash = hash,
limit = Some(limit),
offset = offset,
orderBy = orderBy
)
)
)
}
}
def getById(id: String) = Identified { request =>
withSha(request.user, id) { sha =>
Ok(Json.toJson(sha))
}
}
def deleteById(id: String) = Identified { request =>
withSha(request.user, id) { sha =>
shasWriteDao.delete(request.user, sha)
NoContent
}
}
def withSha(user: UserReference, id: String)(
f: Sha => Result
): Result = {
shasDao.findById(Authorization.User(user.id), id) match {
case None => {
Results.NotFound
}
case Some(sha) => {
f(sha)
}
}
}
}
| flowcommerce/delta | api/app/controllers/Shas.scala | Scala | mit | 1,757 |
/**
* Copyright (C) 2011 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fb
import org.orbeon.oxf.fb.XMLNames._
import org.orbeon.oxf.fr.FormRunner._
import org.orbeon.oxf.fr.NodeInfoCell._
import org.orbeon.oxf.fr.XMLNames._
import org.orbeon.oxf.util.StringUtils._
import org.orbeon.oxf.xforms.NodeInfoFactory
import org.orbeon.oxf.xforms.NodeInfoFactory.elementInfo
import org.orbeon.oxf.xforms.XFormsConstants.APPEARANCE_QNAME
import org.orbeon.oxf.xforms.action.XFormsAPI._
import org.orbeon.oxf.xforms.xbl.BindingDescriptor._
import org.orbeon.saxon.om.NodeInfo
import org.orbeon.scaxon.Implicits._
import org.orbeon.scaxon.NodeConversions._
import org.orbeon.scaxon.SimplePath._
import org.orbeon.xforms.XFormsId
import org.orbeon.oxf.util.CoreUtils._
trait ContainerOps extends ControlOps {
self: GridOps ⇒ // funky dependency, to resolve at some point
def containerById(containerId: String)(implicit ctx: FormBuilderDocContext): NodeInfo =
findContainerById(containerId).get
def findContainerById(containerId: String)(implicit ctx: FormBuilderDocContext): Option[NodeInfo] = {
// Support effective id, to make it easier to use from XForms (i.e. no need to call
// XFormsUtils.getStaticIdFromId every time)
val staticId = XFormsId.getStaticIdFromId(containerId)
findInViewTryIndex(ctx.formDefinitionRootElem, staticId) filter IsContainer
}
def findNestedContainers(containerElem: NodeInfo): Seq[NodeInfo] =
containerElem descendant FRContainerTest
def findNestedControls(containerElem: NodeInfo): Seq[NodeInfo] =
containerElem descendant CellTest child * filter IsControl
// Find all siblings of the given element with the given name, excepting the given element
def findSiblingsWithName(element: NodeInfo, siblingName: String): Seq[NodeInfo] =
element parent * child * filter
(_.name == siblingName) filterNot
(_ == element)
def getInitialIterationsAttribute(controlElem: NodeInfo): Option[String] =
controlElem attValueOpt InitialIterations flatMap trimAllToOpt
// Return all the container controls in the view
def getAllContainerControlsWithIds(inDoc: NodeInfo): Seq[NodeInfo] = getAllControlsWithIds(inDoc) filter IsContainer
def getAllContainerControls(inDoc: NodeInfo): Seq[NodeInfo] = getFormRunnerBodyElem(inDoc) descendant * filter IsContainer
// A container can be removed if it's not the last one at that level
def canDeleteContainer(containerElem: NodeInfo): Boolean =
containerElem sibling FRContainerTest nonEmpty
def containerPosition(containerId: String)(implicit ctx: FormBuilderDocContext): ContainerPosition = {
val container = containerById(containerId)
ContainerPosition(
findAncestorContainersLeafToRoot(container).headOption flatMap getControlNameOpt, // top-level container doesn't have a name
precedingSiblingOrSelfContainers(container).headOption map getControlName
)
}
// Delete the entire container and contained controls
def deleteContainerById(
canDelete : NodeInfo ⇒ Boolean,
containerId : String)(implicit
ctx : FormBuilderDocContext
): Option[UndoAction] = {
val container = containerById(containerId)
canDelete(container) option {
val undo =
UndoAction.DeleteContainer(
containerPosition(containerId),
ToolboxOps.controlOrContainerElemToXcv(container)
)
deleteContainer(container)
undo
}
}
def deleteContainer(containerElem: NodeInfo): Unit = {
implicit val ctx = FormBuilderDocContext()
// Find the new td to select if we are removing the currently selected td
val newCellToSelectOpt = findNewCellToSelect(containerElem descendant CellTest)
def recurse(container: NodeInfo): Seq[NodeInfo] = {
// NOTE: Deleting is tricky because NodeInfo is mutation-averse as it keeps a node's index, among others.
// So deleting a node under a given NodeInfo can cause the position of following siblings to be out of date
// and cause errors. So we delete from back to front. But a safer solution should be found.
// Go depth-first so we delete containers after all their content has been deleted
// NOTE: Use toList to make sure we are not lazy, otherwise items might be deleted as we go!
val children = childrenContainers(container).reverse.toList flatMap recurse
val gridContent =
if (IsGrid(container))
container descendant CellTest child * filter IsControl reverse
else
Nil
children ++ gridContent :+ container
}
// Start with top-level container
val controls = recurse(containerElem)
// Delete all controls in order
controls flatMap controlElementsToDelete foreach (delete(_))
// Update templates
updateTemplatesCheckContainers(findAncestorRepeatNames(containerElem).to[Set])
// Adjust selected td if needed
newCellToSelectOpt foreach selectCell
}
// Move a container based on a move function
def moveContainer(
containerElem : NodeInfo,
otherContainer : NodeInfo,
move : (NodeInfo, NodeInfo) ⇒ NodeInfo)(implicit
ctx : FormBuilderDocContext
): Unit = {
// Get names before moving the container
val nameOption = getControlNameOpt(containerElem)
val otherNameOption = getControlNameOpt(otherContainer)
val doc = containerElem.getDocumentRoot
// Move container control itself
move(containerElem, otherContainer)
// Try to move holders and binds based on name of other element
(nameOption, otherNameOption) match {
case (Some(name), Some(otherName)) ⇒
// Move data holder only
for {
holder ← findDataHolders(name)
otherHolder ← findDataHolders(otherName)
} yield
move(holder, otherHolder)
// Move bind
for {
bind ← findBindByName(doc, name)
otherBind ← findBindByName(doc, otherName)
} yield
move(bind, otherBind)
// Try to move resource elements to a good place
// TODO: We move the container resource holder, but we should also move together the contained controls' resource holders
def firstControl(s: Seq[NodeInfo]) =
s find (getControlNameOpt(_).isDefined)
def tryToMoveHolders(siblingName: String, moveOp: (NodeInfo, NodeInfo) ⇒ NodeInfo): Unit =
findResourceHolders(name) foreach {
holder ⇒
findSiblingsWithName(holder, siblingName).headOption foreach
(moveOp(holder, _))
}
val movedContainer = findInViewTryIndex(doc, containerElem.id).get // must get new reference
(firstControl(movedContainer preceding *), firstControl(movedContainer following *)) match {
case (Some(preceding), _) ⇒ tryToMoveHolders(getControlName(preceding), moveElementAfter)
case (_, Some(following)) ⇒ tryToMoveHolders(getControlName(following), moveElementBefore)
case _ ⇒
}
// Moving sections can impact templates
updateTemplates(None)
case _ ⇒
}
}
// Whether it is possible to move an item into the given container
// Currently: must be a section without section template content
// Later: fr:tab (maybe fr:tabview), wizard
def canMoveInto(containerElem: NodeInfo): Boolean =
IsSection(containerElem) && ! (containerElem / * exists isSectionTemplateContent)
def isCustomIterationName(controlName: String, iterationName: String): Boolean =
defaultIterationName(controlName) != iterationName
def setRepeatProperties(
controlName : String,
repeat : Boolean,
min : String,
max : String,
iterationNameOrEmpty : String,
applyDefaults : Boolean,
initialIterations : String)(implicit
ctx : FormBuilderDocContext
): Unit = {
// TODO: Remove once `ctx` is used everywhere
val inDoc = ctx.formDefinitionRootElem
findControlByName(inDoc, controlName) foreach { control ⇒
val wasRepeat = isRepeat(control)
val oldInitialIterationsAttribute = getInitialIterationsAttribute(control)
val minOpt = minMaxForAttribute(min)
val maxOpt = minMaxForAttribute(max)
val initialIterationsOpt = initialIterations.trimAllToOpt
// Update control attributes first
// A missing or invalid min/max value is taken as the default value: 0 for min, none for max. In both cases, we
// don't set the attribute value. This means that in the end we only set positive integer values.
toggleAttribute(control, "repeat", RepeatContentToken, repeat)
toggleAttribute(control, "min", minOpt.get, repeat && minOpt.isDefined)
toggleAttribute(control, "max", maxOpt.get, repeat && maxOpt.isDefined)
toggleAttribute(control, "template", makeInstanceExpression(templateId(controlName)), repeat)
toggleAttribute(control, "apply-defaults", "true", repeat && applyDefaults)
toggleAttribute(control, InitialIterations, initialIterationsOpt.get, repeat && initialIterationsOpt.isDefined)
if (! wasRepeat && repeat) {
// Insert new bind and template
val iterationName = iterationNameOrEmpty.trimAllToOpt getOrElse defaultIterationName(controlName)
// Make sure there are no nested binds
val oldNestedBinds = findBindByName(inDoc, controlName).toList / *
delete(oldNestedBinds)
// Insert nested iteration bind
findControlByName(inDoc, controlName) foreach { control ⇒
ensureBinds(findContainerNamesForModel(control) :+ controlName :+ iterationName)
}
val controlBind = findBindByName(inDoc, controlName)
val iterationBind = controlBind.toList / *
insert(into = iterationBind, origin = oldNestedBinds)
// Insert nested iteration data holders
// NOTE: There can be multiple existing data holders due to enclosing repeats
findDataHolders(controlName) foreach { holder ⇒
val nestedHolders = holder / *
delete(nestedHolders)
insert(into = holder, origin = elementInfo(iterationName, nestedHolders))
}
// Update existing templates
// NOTE: Could skip if top-level repeat
updateTemplatesCheckContainers(findAncestorRepeatNames(control).to[Set])
// Ensure new template rooted at iteration
ensureTemplateReplaceContent(
controlName,
createTemplateContentFromBind(iterationBind.head, ctx.componentBindings)
)
} else if (wasRepeat && ! repeat) {
// Remove bind, holders and template
// Move bind up
val controlBind = findBindByName(inDoc, controlName).toList
val oldNestedBinds = controlBind / * / *
delete(controlBind / *)
insert(into = controlBind, origin = oldNestedBinds)
// Mover data holders up and keep only the first iteration
findDataHolders(controlName) foreach { holder ⇒
val nestedHolders = holder / * take 1 child *
delete(holder / *)
insert(into = holder, origin = nestedHolders)
}
// Remove template
findTemplateInstance(inDoc, controlName) foreach (delete(_))
// Update existing templates
updateTemplatesCheckContainers(findAncestorRepeatNames(control).to[Set])
} else if (repeat) {
// Template should already exists an should have already been renamed if needed
// MAYBE: Ensure template just in case.
val newInitialIterationsAttribute = getInitialIterationsAttribute(control)
if (oldInitialIterationsAttribute != newInitialIterationsAttribute)
updateTemplatesCheckContainers(findAncestorRepeatNames(control, includeSelf = true).to[Set])
} else if (! repeat) {
// Template should not exist
// MAYBE: Delete template just in case.
}
}
}
def renameTemplate(oldName: String, newName: String)(implicit ctx: FormBuilderDocContext): Unit =
for {
root ← findTemplateRoot(oldName)
instance ← root.parentOption
} locally {
ensureAttribute(instance, "id", templateId(newName))
}
def findTemplateInstance(doc: NodeInfo, controlName: String): Option[NodeInfo] =
instanceElem(doc, templateId(controlName))
def ensureTemplateReplaceContent(
controlName : String,
content : NodeInfo)(implicit
ctx : FormBuilderDocContext
): Unit = {
val templateInstanceId = templateId(controlName)
val modelElement = getModelElem(ctx.formDefinitionRootElem)
modelElement / XFInstanceTest find (_.hasIdValue(templateInstanceId)) match {
case Some(templateInstance) ⇒
// clear existing template instance content
delete(templateInstance / *)
insert(into = templateInstance , origin = content)
case None ⇒
// Insert template instance if not present
val template: NodeInfo =
<xf:instance
xmlns:xf="http://www.w3.org/2002/xforms"
xmlns:fb="http://orbeon.org/oxf/xml/form-builder"
xmlns:xxf="http://orbeon.org/oxf/xml/xforms"
id={templateInstanceId}
fb:readonly="true"
xxf:exclude-result-prefixes="#all">{nodeInfoToElem(content)}</xf:instance>
insert(into = modelElement, after = modelElement / XFInstanceTest takeRight 1, origin = template)
}
}
def createTemplateContentFromBindName(
bindName : String,
bindings : Seq[NodeInfo])(implicit
ctx : FormBuilderDocContext
): Option[NodeInfo] =
findBindByName(ctx.formDefinitionRootElem, bindName) map (createTemplateContentFromBind(_, bindings))
private val AttributeRe = "@(.+)".r
// Create an instance template based on a hierarchy of binds rooted at the given bind
// This checks each control binding in case the control specifies a custom data holder.
def createTemplateContentFromBind(
startBindElem : NodeInfo,
bindings : Seq[NodeInfo])(implicit
ctx : FormBuilderDocContext
): NodeInfo = {
val inDoc = startBindElem.getDocumentRoot
val descriptors = getAllRelevantDescriptors(bindings)
def holderForBind(bind: NodeInfo, topLevel: Boolean): Option[NodeInfo] = {
val controlName = getBindNameOrEmpty(bind)
val controlElemOpt = findControlByName(inDoc, controlName)
// Handle non-standard cases, see https://github.com/orbeon/orbeon-forms/issues/2470
def fromNonStandardRef =
bind attValueOpt "ref" match {
case Some(AttributeRe(att)) ⇒ Some(Some(NodeInfoFactory.attributeInfo(att)))
case Some(".") ⇒ Some(None)
case _ ⇒ None
}
def fromBinding =
for {
controlElem ← controlElemOpt
appearances = controlElem attTokens APPEARANCE_QNAME
descriptor ← findMostSpecificWithoutDatatype(controlElem.uriQualifiedName, appearances, descriptors)
binding ← descriptor.binding
} yield {
Some(FormBuilder.newDataHolder(controlName, binding))
}
def fromPlainControlName =
Some(Some(elementInfo(controlName)))
val elementTemplateOpt = fromNonStandardRef orElse fromBinding orElse fromPlainControlName flatten
elementTemplateOpt foreach { elementTemplate ⇒
val iterationCount = {
// If the current control is a repeated fr:grid or fr:section with the attribute set, find the first occurrence
// in the data of this repeat, and use its concrete initial number of iterations to update the template. We
// can imagine other values for the attribute in the future, maybe an integer value (`0`, `1`, ...) setting
// the initial number of iterations.
// See https://github.com/orbeon/orbeon-forms/issues/2379
def useInitialIterations(controlElem: NodeInfo) =
! topLevel && isRepeat(controlElem) && getInitialIterationsAttribute(controlElem).contains("first")
controlElemOpt match {
case Some(controlElem) if useInitialIterations(controlElem) ⇒
val firstDataHolder = findDataHolders(controlName) take 1
val iterationsHolders = firstDataHolder / *
iterationsHolders.size
case _ ⇒
1
}
}
// Recursively insert elements in the template
if (iterationCount > 0) {
// If iterationCount > 1, we just duplicate the children `iterationCount` times. In practice, this means
// multiple iteration elements:
//
// <repeated-section-2-iteration>
// ...
// </repeated-section-2-iteration>
// <repeated-section-2-iteration>
// ...
// </repeated-section-2-iteration>
val nested = bind / "*:bind" flatMap (holderForBind(_, topLevel = false))
val repeatedNested = (1 to iterationCount) flatMap (_ ⇒ nested)
insert(into = elementTemplate, origin = repeatedNested)
}
}
elementTemplateOpt
}
holderForBind(startBindElem, topLevel = true) getOrElse (throw new IllegalStateException)
}
// Make sure all template instances reflect the current bind structure
def updateTemplates(ancestorContainerNames: Option[Set[String]])(implicit ctx: FormBuilderDocContext): Unit =
for {
templateInstance ← templateInstanceElements(ctx.formDefinitionRootElem)
repeatName = controlNameFromId(templateInstance.id)
if ancestorContainerNames.isEmpty || ancestorContainerNames.exists(_(repeatName))
iterationName ← findRepeatIterationName(ctx.formDefinitionRootElem, repeatName)
template ← createTemplateContentFromBindName(iterationName, ctx.componentBindings)
} locally {
ensureTemplateReplaceContent(repeatName, template)
}
// Update templates but only those which might contain one of specified names
def updateTemplatesCheckContainers(ancestorContainerNames: Set[String])(implicit ctx: FormBuilderDocContext): Unit =
updateTemplates(Some(ancestorContainerNames))
} | brunobuzzi/orbeon-forms | form-builder/jvm/src/main/scala/org/orbeon/oxf/fb/ContainerOps.scala | Scala | lgpl-2.1 | 19,155 |
package com.twitter.finagle.thrift
import com.twitter.conversions.time._
import com.twitter.finagle.Service
import com.twitter.finagle.util.ByteArrays
import com.twitter.util.{Await, Future}
import java.nio.charset.StandardCharsets.UTF_8
import org.apache.thrift.protocol.{TMessage, TMessageType}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class TTwitterServerFilterTest extends FunSuite {
val protocolFactory = Protocols.binaryFactory()
test("handles legacy client_id headers") {
val filter = new TTwitterServerFilter("test", protocolFactory)
// Upgrade the protocol.
val service = new Service[Array[Byte], Array[Byte]] {
def apply(req: Array[Byte]) =
Future.value(ClientId.current.map(_.name)
.getOrElse("NOCLIENT")
.getBytes(UTF_8))
}
val upgraded = {
val buffer = new OutputBuffer(protocolFactory)
buffer().writeMessageBegin(
new TMessage(ThriftTracing.CanTraceMethodName, TMessageType.CALL, 0))
val options = new thrift.ConnectionOptions
options.write(buffer())
buffer().writeMessageEnd()
filter(buffer.toArray, service)
}
assert(upgraded.isDefined)
Await.result(upgraded, 10.seconds)
val req = {
val buffer = new OutputBuffer(protocolFactory)
buffer().writeMessageBegin(
new TMessage("testrpc", TMessageType.CALL, 0))
buffer().writeMessageEnd()
val header = new thrift.RequestHeader
header.setClient_id(new thrift.ClientId("testclient"))
val bytes = ByteArrays.concat(
OutputBuffer.messageToArray(header, protocolFactory),
buffer.toArray)
filter(bytes, service) map { bytes =>
// Strip the response header.
InputBuffer.peelMessage(bytes, new thrift.ResponseHeader, protocolFactory)
}
}
assert(req.isDefined)
val rep = Await.result(req, 10.seconds)
val clientId = new String(rep, UTF_8)
assert(clientId == "testclient")
}
}
| koshelev/finagle | finagle-thrift/src/test/scala/com/twitter/finagle/thrift/TTwitterServerFilterTest.scala | Scala | apache-2.0 | 2,073 |
/**
*
*/
package db
// Use SQLiteDriver to connect or create a sqlite database
import scala.slick.driver.SQLiteDriver.simple._
// Use the implicit threadLocalSession
import Database.threadLocalSession
/**
* A simple table holding cats
*/
object Cats extends Table[(Int, String)]("CATS") {
def id = column[Int]("ID", O.PrimaryKey) // The primary key column
def name = column[String]("NAME")
// Every table needs a * projection with the same type as the table's type parameter
def * = id ~ name
}
/**
* some cat owners
*/
object Owners extends Table[(String, String, Int)]("OWNERS") {
def owner = column[String]("OWNER")
def cat = column[String]("CAT")
def id = column[Int]("ID")
def * = owner ~ cat ~ id
def pk = primaryKey("PK_OWNER_ID", (owner, id))
}
/**
* @author manuel
*
* Shows how to create a sqlite db, create a table,
* populate table and run a query, do an insert, and an update.
*/
object SampleSQLite extends App {
println("Here we go!")
val fileName = System.getProperty("user.dir") + "/cats.db"
val dbFile = new java.io.File(fileName)
val existed = dbFile.exists()
Database.forURL("jdbc:sqlite:" + fileName,
driver = "org.sqlite.JDBC") withSession {
if (existed) {
println("Dropping the tables")
// drop the table
(Cats.ddl ++ Owners.ddl).drop
}
println("(Re)creating tables")
// create the table
(Cats.ddl ++ Owners.ddl).create
// Insert some cats
Cats.insert(67, "Vlad")
Cats.insert(85, "Igor")
Cats.insert(12, "Felix")
Cats.insert(11, "Tom")
Cats.insert(13, "Tom")
Cats.insert(66, "Fritz")
Owners.insert("MC", "Vlad", 67)
Owners.insert("MC", "Igor", 85)
Owners.insert("Pat", "Felix", 12)
Owners.insert("Jerry", "Tom", 11)
Owners.insert("Bommel", "Tom", 13)
Owners.insert("Robert", "Frits", 66)
// Iterate through all cats
Query(Cats) foreach {
case (id, name) =>
println(" " + name + "\\t" + id)
}
val owners = Owners.map(_.owner).list
println(owners)
val mc = for {
o <- Owners
if (o.owner === "MC")
} yield (o.cat, o.id)
println(mc.list)
// Now do an insert // Now do an update
Cats.insert(77, "Casper")
Owners.insert("Susna", "Casper", 77)
println(Owners.map(_.owner).list)
// Now do an update
val errorRec = for { o <- Owners if o.owner === "Susna" } yield o.owner
errorRec.update("Susan")
println(Owners.map(_.owner).list)
}
} | manree/slickexp | src/main/scala/db/SampleSQLite.scala | Scala | unlicense | 2,578 |
package net.lshift.diffa.kernel.util
import org.junit.Test
import org.junit.Assert._
import scala.collection.JavaConversions._
import org.springframework.mock.web.MockHttpServletRequest
import net.lshift.diffa.participant.scanning._
import net.lshift.diffa.kernel.config._
class CategoryUtilTest {
val baseStringCategory = new SetCategoryDescriptor(Set("a", "b"))
val baseIntCategory = new RangeCategoryDescriptor("integer", "5", "12")
val baseDateCategory = new RangeCategoryDescriptor("date", "2011-01-01", "2011-12-31", "individual")
val stringOverrideCategory = new SetCategoryDescriptor(Set("a"))
val endpointCategories =
Map("someString" -> baseStringCategory, "someInt" -> baseIntCategory, "someDate" -> baseDateCategory)
val stringOverrideCategories = Map("someString" -> stringOverrideCategory)
val dateOverrideCategories = Map("someDate" -> new RangeCategoryDescriptor("date", "2011-05-05", "2011-06-01"))
val dateOverrideWithGranCategories = Map("someDate" -> new RangeCategoryDescriptor("date", "2011-05-05", "2011-06-01", "monthly"))
val views = Seq(
EndpointView(name = "lessStrings", categories = stringOverrideCategories),
EndpointView(name = "lessDates", categories = dateOverrideCategories),
EndpointView(name = "lessDatesMoreGranularity", categories = dateOverrideWithGranCategories)
)
@Test
def shouldReturnEndpointCategoriesWhenViewIsNone() {
val fused = CategoryUtil.fuseViewCategories(endpointCategories, views, None)
assertEquals(endpointCategories, fused)
}
@Test
def shouldApplySetCategoryOverride() {
val fused = CategoryUtil.fuseViewCategories(endpointCategories, views, Some("lessStrings"))
assertEquals(
Map("someString" -> stringOverrideCategory, "someInt" -> baseIntCategory, "someDate" -> baseDateCategory),
fused)
}
@Test
def shouldApplyDateCategoryOverrideAndInheritGranularitySettings() {
val fused = CategoryUtil.fuseViewCategories(endpointCategories, views, Some("lessDates"))
val fusedDateCategory = new RangeCategoryDescriptor("date", "2011-05-05", "2011-06-01", "individual")
assertEquals(
Map("someString" -> baseStringCategory, "someInt" -> baseIntCategory, "someDate" -> fusedDateCategory),
fused)
}
@Test
def shouldApplyDateCategoryOverrideAndOverrideGranularitySettings() {
val fused = CategoryUtil.fuseViewCategories(endpointCategories, views, Some("lessDatesMoreGranularity"))
val fusedDateCategory = new RangeCategoryDescriptor("date", "2011-05-05", "2011-06-01", "monthly")
assertEquals(
Map("someString" -> baseStringCategory, "someInt" -> baseIntCategory, "someDate" -> fusedDateCategory),
fused)
}
@Test
def shouldInferDateBoundsFromUpperToLower() {
val unboundedUpperDateCategory = Map("someDate" -> new RangeCategoryDescriptor("date", "2011-01-01", null, "individual"))
val unboundedLowerDateCategory = Map("someDate" -> new RangeCategoryDescriptor("date", null, "2011-12-31", "individual"))
val undoundedLowerView = Seq(EndpointView(name = "dates", categories = unboundedLowerDateCategory))
val undoundedUpperView = Seq(EndpointView(name = "dates", categories = unboundedUpperDateCategory))
val fusedForwards = CategoryUtil.fuseViewCategories(unboundedUpperDateCategory, undoundedLowerView, Some("dates"))
val fusedBackwards = CategoryUtil.fuseViewCategories(unboundedLowerDateCategory, undoundedUpperView, Some("dates"))
assertEquals(
Map("someDate" -> new RangeCategoryDescriptor("date", "2011-01-01", "2011-12-31", "individual")),
fusedForwards)
assertEquals(
Map("someDate" -> new RangeCategoryDescriptor("date", "2011-01-01", "2011-12-31", "individual")),
fusedBackwards)
}
@Test
def shouldApplyASetConstraintToAConstraintsBuilder() {
val req = new MockHttpServletRequest
req.addParameter("someString", "aaa")
req.addParameter("someString", "bbb")
val builder = new ConstraintsBuilder(req)
CategoryUtil.buildConstraints(builder, Map("someString" -> new SetCategoryDescriptor(Set("aaa", "bbb"))))
assertEquals(
Seq(new SetConstraint("someString", Set("aaa", "bbb"))),
builder.toList.toSeq)
}
@Test
def shouldApplyAPrefixCategoryToAConstraintsBuilder() {
val req = new MockHttpServletRequest
req.addParameter("someString-prefix", "abc")
val builder = new ConstraintsBuilder(req)
CategoryUtil.buildConstraints(builder, Map("someString" -> new PrefixCategoryDescriptor(1, 10, 1)))
assertEquals(
Seq(new StringPrefixConstraint("someString", "abc")),
builder.toList.toSeq)
}
@Test
def shouldApplyADateCategoryToAConstraintsBuilder() {
val req = new MockHttpServletRequest
req.addParameter("bizDate-start", "2011-06-01")
req.addParameter("bizDate-end", "2011-06-30")
val builder = new ConstraintsBuilder(req)
CategoryUtil.buildConstraints(builder, Map("bizDate" -> new RangeCategoryDescriptor("date")))
assertEquals(
Seq(new DateRangeConstraint("bizDate", "2011-06-01", "2011-06-30")),
builder.toList.toSeq)
}
@Test
def shouldApplyATimeCategoryToAConstraintsBuilder() {
val req = new MockHttpServletRequest
req.addParameter("bizTime-start", "2011-06-01T15:14:13.000Z")
req.addParameter("bizTime-end", "2011-06-30T12:31:00.000Z")
val builder = new ConstraintsBuilder(req)
CategoryUtil.buildConstraints(builder, Map("bizTime" -> new RangeCategoryDescriptor("datetime")))
assertEquals(
Seq(new TimeRangeConstraint("bizTime", "2011-06-01T15:14:13.000Z", "2011-06-30T12:31:00.000Z")),
builder.toList.toSeq)
}
@Test
def shouldApplyAnIntegerCategoryToAConstraintsBuilder() {
val req = new MockHttpServletRequest
req.addParameter("someInt-start", "15")
req.addParameter("someInt-end", "32")
val builder = new ConstraintsBuilder(req)
CategoryUtil.buildConstraints(builder, Map("someInt" -> new RangeCategoryDescriptor("int")))
assertEquals(
Seq(new IntegerRangeConstraint("someInt", "15", "32")),
builder.toList.toSeq)
}
@Test
def shouldAcceptValidConstraintsAgainstTheirCategories() {
CategoryUtil.mergeAndValidateConstraints(Map(
"someString" -> new SetCategoryDescriptor(Set("aaa", "bbb")),
"somePString" -> new PrefixCategoryDescriptor(1, 10, 1),
"bizDate" -> new RangeCategoryDescriptor("date", "2011-06-01", "2011-06-30"),
"bizTime" -> new RangeCategoryDescriptor("datetime", "2011-06-01T15:14:13.000Z", "2011-06-30T12:31:00.000Z"),
"someInt" -> new RangeCategoryDescriptor("int", "5", "52")
),
Seq(
new SetConstraint("someString", Set("aaa")),
new StringPrefixConstraint("somePString", "abc"),
new DateRangeConstraint("bizDate", "2011-06-15", "2011-06-30"),
new TimeRangeConstraint("bizTime", "2011-06-01T15:14:13.000Z", "2011-06-18T00:21:15.000Z"),
new IntegerRangeConstraint("someInt", "10", "15")
)
)
}
@Test
def shouldRejectAnInvalidSetConstraint() {
expectConstraintException(
Map("someString" -> new SetCategoryDescriptor(Set("aaa", "bbb"))),
Seq(new SetConstraint("someString", Set("abc"))),
"someString: Not all of the values [abc] are supported by category [aaa, bbb]"
)
}
@Test
def shouldRejectATooShortInvalidPrefixConstraint() {
expectConstraintException(
Map("somePString" -> new PrefixCategoryDescriptor(5, 10, 1)),
Seq(new StringPrefixConstraint("somePString", "abc")),
"somePString: Prefix abc is shorter than configured start length 5")
}
@Test
def shouldRejectATooLongInvalidPrefixConstraint() {
expectConstraintException(
Map("somePString" -> new PrefixCategoryDescriptor(5, 10, 1)),
Seq(new StringPrefixConstraint("somePString", "abcdefabcdef")),
"somePString: Prefix abcdefabcdef is longer than configured max length 10")
}
@Test
def shouldRejectAnOutOfRangeDateConstraint() {
expectConstraintException(
Map("bizDate" -> new RangeCategoryDescriptor("date", "2011-06-01", "2011-06-30")),
Seq(new DateRangeConstraint("bizDate", "2011-05-15", "2011-06-30")),
"bizDate: DateRangeConstraint{name=bizDate, start=2011-05-15, end=2011-06-30} isn't contained within DateRangeConstraint{name=bizDate, start=2011-06-01, end=2011-06-30}")
}
@Test
def shouldRejectAnOutOfRangeTimeConstraint() {
expectConstraintException(
Map("bizTime" -> new RangeCategoryDescriptor("datetime", "2011-06-01T15:14:13.000Z", "2011-06-30T12:31:00.000Z")),
Seq(new TimeRangeConstraint("bizTime", "2011-05-01T15:14:13.000Z", "2011-06-18T00:21:15.000Z")),
"bizTime: TimeRangeConstraint{name=bizTime, start=2011-05-01T15:14:13.000Z, end=2011-06-18T00:21:15.000Z} isn't contained within TimeRangeConstraint{name=bizTime, start=2011-06-01T15:14:13.000Z, end=2011-06-30T12:31:00.000Z}")
}
@Test
def shouldDifferenceEmptyListsOfCategories() {
assertEquals(Seq(), CategoryUtil.differenceCategories(Map(), Map()))
}
@Test
def shouldDetectAddedCategoryDescriptor() {
val sc = new SetCategoryDescriptor(Set("aaa", "bbb"))
assertEquals(Seq(CategoryChange("someSet", None, Some(sc))),
CategoryUtil.differenceCategories(Map(), Map("someSet" -> sc)))
}
@Test
def shouldDetectRemovedCategoryDescriptor() {
val sc = new SetCategoryDescriptor(Set("aaa", "bbb"))
assertEquals(Seq(CategoryChange("someSet", Some(sc), None)),
CategoryUtil.differenceCategories(Map("someSet" -> sc), Map()))
}
@Test
def shouldNotDetectUnchangedCategoryDescriptor() {
val sc = new SetCategoryDescriptor(Set("aaa", "bbb"))
assertEquals(Seq(), CategoryUtil.differenceCategories(Map("someSet" -> sc), Map("someSet" -> sc)))
}
@Test
def shouldDetectChangedCategoryDescriptor() {
val sc = new SetCategoryDescriptor(Set("aaa", "bbb"))
val sc2 = new SetCategoryDescriptor(Set("aaa", "bbb", "ccc"))
assertEquals(Seq(CategoryChange("someSet", Some(sc), Some(sc2))),
CategoryUtil.differenceCategories(Map("someSet" -> sc), Map("someSet" -> sc2)))
}
private def expectConstraintException(categories:Map[String, CategoryDescriptor], constraints:Seq[ScanConstraint], message:String) {
try {
CategoryUtil.mergeAndValidateConstraints(categories, constraints)
} catch {
case e:InvalidConstraintException => assertEquals(message, e.getMessage)
}
}
} | aprescott/diffa | kernel/src/test/scala/net/lshift/diffa/kernel/util/CategoryUtilTest.scala | Scala | apache-2.0 | 10,462 |
package views.html
import play.templates._
import play.templates.TemplateMagic._
import play.api.templates._
import play.api.templates.PlayMagic._
import models._
import controllers._
import play.api.i18n._
import play.api.mvc._
import play.api.data._
import views.html._
/* adminLogin Template File */
object adminLogin extends BaseScalaTemplate[play.api.templates.HtmlFormat.Appendable,Format[play.api.templates.HtmlFormat.Appendable]](play.api.templates.HtmlFormat) with play.api.templates.Template1[Form[AdminFormData],play.api.templates.HtmlFormat.Appendable] {
/* adminLogin Template File */
def apply/*2.2*/(adminForm: Form[AdminFormData]):play.api.templates.HtmlFormat.Appendable = {
_display_ {import helper._
import helper.twitterBootstrap._
import play.api.i18n.Messages
Seq[Any](format.raw/*2.34*/("""
"""),format.raw/*6.1*/("""
"""),_display_(/*7.2*/main("Admin")/*7.15*/{_display_(Seq[Any](format.raw/*7.16*/("""
<fieldset>
<legend>Admin Login</legend>
"""),_display_(/*10.5*/form(action = routes.Auth.adminLoginPost())/*10.48*/{_display_(Seq[Any](format.raw/*10.49*/("""
"""),_display_(/*11.6*/inputText(
adminForm("email"),
'_label -> "Email",
'_showConstraints -> false
)),format.raw/*15.6*/("""
"""),_display_(/*16.6*/inputPassword(
adminForm("password"),
'_label -> "Password",
'_showConstraints -> false
)),format.raw/*20.6*/("""
<button class="btn btn-primary">Proceed</button>
""")))}),format.raw/*22.5*/("""
</fieldset>
"""),_display_(/*24.3*/adminForm/*24.12*/.globalErrors.map/*24.29*/{ error =>_display_(Seq[Any](format.raw/*24.39*/("""
<div class="alert alert-error">"""),_display_(/*25.35*/Messages(error.message)),format.raw/*25.58*/("""</div>
""")))}),format.raw/*26.3*/("""
""")))}),format.raw/*27.2*/("""
"""))}
}
def render(adminForm:Form[AdminFormData]): play.api.templates.HtmlFormat.Appendable = apply(adminForm)
def f:((Form[AdminFormData]) => play.api.templates.HtmlFormat.Appendable) = (adminForm) => apply(adminForm)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Tue Jul 01 01:00:33 IST 2014
SOURCE: /home/nagarjuna/FooService/app/views/adminLogin.scala.html
HASH: b8527297f3e1565a921ed67d97fd401ec0af4fe2
MATRIX: 626->32|833->64|860->147|887->149|908->162|946->163|1020->211|1072->254|1111->255|1143->261|1261->359|1293->365|1421->473|1508->530|1550->546|1568->555|1594->572|1642->582|1704->617|1748->640|1787->649|1819->651
LINES: 19->2|27->2|28->6|29->7|29->7|29->7|32->10|32->10|32->10|33->11|37->15|38->16|42->20|44->22|46->24|46->24|46->24|46->24|47->25|47->25|48->26|49->27
-- GENERATED --
*/
| pamu/FooService | FooService2/target/scala-2.10/src_managed/main/views/html/adminLogin.template.scala | Scala | apache-2.0 | 2,850 |
println(/* resolved: false */ Double.getClass)
println(classOf[/* */ Double])
1.0D.asInstanceOf[Double]./* */ toFloat | ilinum/intellij-scala | testdata/resolve2/predef/literal/Double.scala | Scala | apache-2.0 | 117 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable
object compat {
type BuildFrom[-From, -A, +C] = CanBuildFrom[From, A, C]
private[monix] object internal {
type IterableOnce[+X] = scala.collection.GenTraversableOnce[X]
def toIterator[X](i: IterableOnce[X]): Iterator[X] = i.toIterator
def hasDefiniteSize[X](i: IterableOnce[X]): Boolean = i.hasDefiniteSize
def newBuilder[From, A, C](bf: BuildFrom[From, A, C], from: From): mutable.Builder[A, C] = bf.apply(from)
@inline def toSeq[A](array: Array[AnyRef]): Seq[A] =
new scala.collection.mutable.WrappedArray.ofRef(array).toSeq.asInstanceOf[Seq[A]]
}
}
| alexandru/monifu | monix-execution/shared/src/main/scala_2.13-/monix/execution/compat.scala | Scala | apache-2.0 | 1,367 |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.examples
import scala.collection.immutable
import scala.collection.mutable.Map
import tdb._
import tdb.list._
import tdb.TDB._
import tdb.util._
class ColumnPageRankAdjust
(links: ColumnListInput[Int], epsilon: Double, iters: Int)
extends Adjustable[Unit] {
def run(implicit c: Context) = {
for (i <- 0 until iters) {
def mapper(key: Int, column1: Any, column2: Any, c: Context) {
val edges = column1.asInstanceOf[Array[Int]]
val rank = column2.asInstanceOf[Double]
val v = (rank / edges.size) * .85
val values: (String, Iterable[(Int, Any)]) =
(i + 1) + "" ->
((for (edge <- edges) yield (edge, v)) ++ Iterable(key -> .15))
putIn(links, values)(c)
}
links.getAdjustableList().projection2("edges", i + "", mapper, links)
}
}
}
class ColumnChunkPageRankAdjust
(links: ColumnListInput[Int], epsilon: Double, iters: Int)
extends Adjustable[Unit] {
def run(implicit c: Context) = {
for (i <- 0 until iters) {
def mapper(keys: Iterable[Int], column1s: Iterable[Any], column2s: Iterable[Any], c: Context) {
val contribs = Map[Int, Double]()
val keyIter = keys.iterator
val column1Iter = column1s.iterator
val column2Iter = column2s.iterator
while (keyIter.hasNext) {
val key = keyIter.next
val edges = column1Iter.next.asInstanceOf[Array[Int]]
val rank = column2Iter.next.asInstanceOf[Double]
val v = (rank / edges.size) * .85
contribs(key) = .15 + contribs.getOrElse(key, 0.0)
for (edge <- edges) {
contribs(edge) = v + contribs.getOrElse(edge, 0.0)
}
}
putIn(links, (i + 1).toString -> contribs)(c)
}
links.getAdjustableList().projection2Chunk("edges", i.toString, mapper)
}
}
}
class ColumnPageRankAlgorithm(_conf: AlgorithmConf)
extends Algorithm[Unit](_conf) {
var columns = immutable.Map(
"key" -> (StringColumn(), -1),
"edges" -> (StringColumn(), ""),
"0" -> (AggregatedDoubleColumn(), 1.0),
"1" -> (AggregatedDoubleColumn(), 0.0))
for (i <- 2 to conf.iters) {
columns = columns + (i + "" -> (AggregatedDoubleColumn(), 0.0))
}
val columnConf = ColumnListConf(
columns = columns, chunkSize = conf.listConf.chunkSize, partitions = conf.listConf.partitions)
val input = mutator.createList[Int, Array[Int]](columnConf)
.asInstanceOf[ColumnListInput[Int]]
val data = new GraphColumnData(input, conf.file, conf.runs, conf.updateRepeat)
//val data = new LiveJournalData(input)
val adjust = new ColumnPageRankAdjust(
input, conf.epsilon, conf.iters)
var naiveTable: Map[Int, Array[Int]] = _
def generateNaive() {
data.generate()
naiveTable = data.table
}
def runNaive() {
naiveHelper(naiveTable)
}
private def naiveHelper(links: Map[Int, Array[Int]]) = {
var ranks = links.map(pair => (pair._1, 1.0))
for (i <- 0 until conf.iters) {
val joined = Map[Int, (Array[Int], Double)]()
for ((url, rank) <- ranks) {
joined(url) = (links(url), rank)
}
val contribs = joined.toSeq.flatMap { case (page, (links, rank)) =>
val contrib = rank / links.size * .85
links.map(url => (url, contrib)) ++ Iterable((page, .15))
}
val reducedContribs = Map[Int, Double]()
for ((url, contrib) <- contribs) {
reducedContribs(url) = contrib + reducedContribs.getOrElse(url, 0.0)
}
ranks = reducedContribs.map(pair => (pair._1, pair._2))
}
ranks
}
def loadInitial() {
data.load()
}
def hasUpdates() = data.hasUpdates()
def loadUpdate() = data.update()
val epsilon = 0.1
def checkOutput(output: Unit) = {
val buf = input.getAdjustableList().toBuffer(mutator)
val out = buf.map {
case (k, v) => (k, v(conf.iters + "").asInstanceOf[Double])
}
val answer = naiveHelper(data.table)
var check = out.size == answer.size
var error = 0.0
for ((node, rank) <- out) {
error += (answer(node) - rank) / answer(node)
if (!answer.contains(node)) {
check = false
}
}
val averageError = (error / answer.size).abs
println("average error = " + averageError)
//println("output = " + out)
//println("answer = " + answer)
check && averageError < epsilon
}
}
| twmarshall/tdb | core/src/main/scala/tdb/examples/ColumnPageRankAlgorithm.scala | Scala | apache-2.0 | 5,023 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.apollo.broker
import org.apache.activemq.apollo.util._
import scala.collection.immutable.List
import org.apache.activemq.apollo.dto._
import java.util.concurrent.TimeUnit
import org.fusesource.hawtdispatch._
import collection.mutable.{HashSet, HashMap, ListBuffer}
import security.SecuredResource
import java.util.concurrent.atomic.AtomicInteger
/**
* <p>
* A logical messaging topic
* </p>
*
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
class Topic(val router:LocalRouter, val address:DestinationAddress, var config_updater: ()=>TopicDTO) extends DomainDestination with SecuredResource {
val topic_metrics = new DestMetricsDTO
topic_metrics.enqueue_ts = now
topic_metrics.dequeue_ts = now
val resource_kind =SecuredResource.TopicKind
var proxy_sessions = new HashSet[DeliverySession]()
var topic_queue_consumers = new HashMap[DeliveryConsumer, DeliveryConsumer]()
@transient
var retained_message: Delivery = _
import language.implicitConversions
implicit def from_link(from:LinkDTO):(Long,Long,Long)=(from.enqueue_item_counter, from.enqueue_size_counter, from.enqueue_ts)
implicit def from_session(from:DeliverySession):(Long,Long,Long)=(from.enqueue_item_counter, from.enqueue_size_counter, from.enqueue_ts)
def add_link_counters(to:LinkDTO, from:(Long,Long,Long)):Unit = {
to.enqueue_item_counter += from._1
to.enqueue_size_counter += from._2
to.enqueue_ts = to.enqueue_ts max from._3
}
def add_enqueue_counters(to:DestMetricsDTO, from:(Long,Long,Long)):Unit = {
to.enqueue_item_counter += from._1
to.enqueue_size_counter += from._2
to.enqueue_ts = to.enqueue_ts max from._3
}
def add_dequeue_counters(to:DestMetricsDTO, from:(Long,Long,Long)):Unit = {
to.dequeue_item_counter += from._1
to.dequeue_size_counter += from._2
to.dequeue_ts = to.enqueue_ts max from._3
}
val producer_tracker = new DeliveryConsumer {
def retained() = 0
def retain() {}
def release() {}
def matches(message: Delivery) = true
def is_persistent = false
def dispatch_queue = null
def connect(producer: DeliveryProducer) = ProxyProducerSession(producer)
}
case class ProxyProducerSession(val producer:DeliveryProducer) extends DeliverySession {
dispatch_queue {
proxy_sessions.add(this)
}
def remaining_capacity = 1
var enqueue_ts = 0L
var enqueue_size_counter = 0L
var enqueue_item_counter = 0L
var refiller:Task = null
def offer(value: Delivery) = {
enqueue_item_counter += 1
enqueue_size_counter += value.size
enqueue_ts = now
value.retain match {
case RetainSet =>
// TODO: perhaps persist so that we can recall what was
// retained across broker restarts.
retained_message = value;
case RetainRemove =>
retained_message = null;
case _ =>
}
if( value.ack != null ) {
value.ack(Consumed, value.uow)
}
true
}
def close = {
dispatch_queue {
proxy_sessions.remove(this)
producers.get(producer.asInstanceOf[BindableDeliveryProducer]) match {
case Some(link) => add_link_counters(link, this)
case _ => add_enqueue_counters(topic_metrics, this)
}
}
}
def full = false
def consumer = producer_tracker
}
case class ProxyConsumerSession(proxy:ProxyDeliveryConsumer, session:DeliverySession) extends DeliverySession with SessionSinkFilter[Delivery] {
override def toString = proxy.consumer.toString + " (via "+address+")"
def downstream = session
dispatch_queue {
proxy_sessions.add(this)
}
def close = {
session.close
dispatch_queue {
proxy_sessions.remove(this)
consumers.get(proxy.registered) match {
case Some(proxy) => add_link_counters(proxy.link, this)
case _ =>
proxy.consumer match {
case queue:Queue =>
case _ =>
add_dequeue_counters(topic_metrics, this)
}
}
}
}
def producer = session.producer
def consumer = session.consumer
val ack_pass_through = proxy.link.kind == "dsub"
def offer(value: Delivery) = {
val copy = value.copy();
copy.sender ::= address
if ( ack_pass_through ) {
copy.ack = value.ack
copy.uow = value.uow
}
val accepted = downstream.offer(copy)
// If we don't ack now, then the sender's ack will
// wait for the consumers ack which might be a nice option to give folks.
if( accepted && !ack_pass_through && value.ack!=null ) {
value.ack(Consumed, value.uow)
}
accepted
}
}
case class ProxyDeliveryConsumer(consumer:DeliveryConsumer, link:LinkDTO, registered:DeliveryConsumer) extends DeliveryConsumerFilter(consumer) {
override def connect(producer: DeliveryProducer) = {
new ProxyConsumerSession(this, next.connect(producer))
}
}
val producers = HashMap[BindableDeliveryProducer, LinkDTO]()
val consumers = HashMap[DeliveryConsumer, ProxyDeliveryConsumer]()
var durable_subscriptions = ListBuffer[Queue]()
var idled_at = 0L
val created_at = now
var auto_delete_after = 0
var config:TopicDTO = _
refresh_config
import OptionSupport._
override def toString = address.toString
def virtual_host: VirtualHost = router.virtual_host
def now = virtual_host.broker.now
def dispatch_queue = virtual_host.dispatch_queue
def slow_consumer_policy = config.slow_consumer_policy.getOrElse("block")
def status(show_producers:Boolean, show_consumers:Boolean): FutureResult[TopicStatusDTO] = {
val rc = FutureResult[TopicStatusDTO]()
status(show_producers, show_consumers, x => rc.set(Success(x)))
rc
}
var state = "STARTED"
def status(show_producers:Boolean, show_consumers:Boolean, on_complete:(TopicStatusDTO)=>Unit):Unit = {
dispatch_queue.assertExecuting()
val rc = new TopicStatusDTO
rc.id = this.id
rc.state = state
rc.state_since = this.created_at
rc.config = this.config
rc.metrics.producer_count = producers.size
rc.metrics.consumer_count = consumers.size
this.durable_subscriptions.foreach { q =>
rc.dsubs.add(q.id)
}
def copy(o:LinkDTO) = {
val rc = new LinkDTO()
rc.id = o.id
rc.kind = o.kind
rc.label = o.label
rc.enqueue_ts = o.enqueue_ts
add_link_counters(rc, o);
rc
}
// build the list of producer and consumer links..
val producer_links = HashMap[BindableDeliveryProducer, LinkDTO]()
val consumers_links = HashMap[DeliveryConsumer, LinkDTO]()
this.producers.foreach { case (producer, link) =>
val o = copy(link)
producer_links.put(producer, o)
rc.producers.add(o)
}
this.consumers.foreach { case (consumer, proxy) =>
val o = copy(proxy.link)
consumers_links.put(proxy.consumer, o)
rc.consumers.add(o)
}
if( topic_queue !=null ) {
val link = new LinkDTO()
link.kind = "topic-queue"
link.id = topic_queue.store_id.toString()
link.label = "shared queue"
link.enqueue_ts = now
rc.consumers.add(link)
}
// Add in the counters from the live sessions..
proxy_sessions.foreach{ session =>
val stats = from_session(session)
session match {
case session:ProxyProducerSession =>
for( link <- producer_links.get(session.producer.asInstanceOf[BindableDeliveryProducer]) ) {
add_link_counters(link, stats)
}
case session:ProxyConsumerSession =>
for( link <- consumers_links.get(session.consumer) ) {
add_link_counters(link, stats)
}
}
}
// Now update the topic counters..
rc.metrics.current_time = now
DestinationMetricsSupport.add_destination_metrics(rc.metrics, topic_metrics)
producer_links.values.foreach { link =>
add_enqueue_counters(rc.metrics, link)
}
if( retained_message!=null ) {
rc.retained = 1
}
if( !show_producers ) {
rc.producers = null
}
if( !show_consumers ) {
rc.consumers = null
}
var futures = List[Future[(TopicStatusDTO)=>Unit]]()
if ( topic_queue!=null ) {
val future = Future[(TopicStatusDTO)=>Unit]()
futures ::= future
topic_queue.dispatch_queue {
val metrics = topic_queue.get_queue_metrics
metrics.enqueue_item_counter = 0
metrics.enqueue_size_counter = 0
metrics.enqueue_ts = 0
metrics.producer_counter = 0
metrics.producer_count = 0
// metrics.consumer_counter = 0
// metrics.consumer_count = 0
future.set((rc)=>{
DestinationMetricsSupport.add_destination_metrics(rc.metrics, metrics)
})
}
}
consumers_links.foreach { case (consumer, link) =>
consumer match {
case queue:Queue =>
// aggregate the queue stats instead of the link stats.
val future = Future[(TopicStatusDTO)=>Unit]()
futures ::= future
queue.dispatch_queue {
val metrics = queue.get_queue_metrics
metrics.enqueue_item_counter = 0
metrics.enqueue_size_counter = 0
metrics.enqueue_ts = 0
metrics.producer_counter = 0
metrics.producer_count = 0
metrics.consumer_counter = 0
metrics.consumer_count = 0
future.set((rc)=>{
DestinationMetricsSupport.add_destination_metrics(rc.metrics, metrics)
})
}
case _ =>
// plain link, add it's ats.
add_dequeue_counters(rc.metrics, link)
}
}
Future.all(futures).onComplete{ data=>
data.foreach(_(rc))
on_complete(rc)
}
}
def browse(from_seq:Long, to:Option[Long], max:Long)(func: (BrowseResult)=>Unit):Unit = {
val msg = retained_message
if ( msg==null ) {
func(BrowseResult(0, 0, 0, Array()))
} else {
val status = new EntryStatusDTO()
status.seq = retained_message.seq
status.size = retained_message.size
status.state = "loaded"
status.is_prefetched = true;
func(BrowseResult(status.seq, status.seq, 1, Array((status, retained_message))))
}
}
def update(on_completed:Task) = {
refresh_config
on_completed.run
}
def refresh_config = {
import OptionSupport._
config = config_updater()
auto_delete_after = config.auto_delete_after.getOrElse(30)
if( auto_delete_after!= 0 ) {
// we don't auto delete explicitly configured destinations.
if( !LocalRouter.is_wildcard_destination(config.id) ) {
auto_delete_after = 0
}
}
check_idle
}
def delete:Option[String] = {
dispatch_queue.assertExecuting()
state match {
case "STARTED" =>
if (producers.isEmpty && consumers.isEmpty) {
state = "DELETED"
router.local_topic_domain.remove_destination(address.path, this)
DestinationMetricsSupport.add_destination_metrics(router.virtual_host.dead_topic_metrics, topic_metrics)
None
} else {
Some("Topic is in use.")
}
case _ =>
Some("Topic already deleted.")
}
}
def check_idle {
if (producers.isEmpty && consumers.isEmpty && topic_queue==null) {
if (idled_at==0) {
val previously_idle_at = now
idled_at = previously_idle_at
if( auto_delete_after!=0 ) {
dispatch_queue.after(auto_delete_after, TimeUnit.SECONDS) {
if( previously_idle_at == idled_at ) {
delete
}
}
}
}
} else {
idled_at = 0
}
}
var topic_queue:Queue = null
def bind(address: BindAddress, consumer:DeliveryConsumer, on_bind:()=>Unit):Unit = {
val remaining = new AtomicInteger(1)
var bind_release:()=>Unit = ()=> {
if( remaining.decrementAndGet() == 0 ) {
on_bind()
}
}
def send_retained = {
val r = retained_message
if (r != null) {
val copy = r.copy()
copy.sender ::= address
val producer = new DeliveryProducerRoute(router) {
refiller = NOOP
def dispatch_queue = Topic.this.dispatch_queue
override protected def on_connected = {
copy.ack = (d,x) => consumer.dispatch_queue {
unbind(consumer :: Nil)
}
offer(copy) // producer supports 1 message overflow.
}
}
producer.bind(consumer :: Nil, ()=>{})
producer.connected()
}
}
val target = address.domain match {
case "queue" | "dsub"=>
// durable sub or mirrored queue case.
consumer
case "topic"=>
slow_consumer_policy match {
case "queue" =>
// create a temp queue so that it can spool
if ( topic_queue==null ) {
topic_queue = router._create_queue(new TempQueueBinding(id, Topic.this.address, Option(config.subscription).getOrElse(new QueueSettingsDTO)))
producers.keys.foreach({ r=>
remaining.incrementAndGet()
r.bind(List(topic_queue), bind_release)
})
}
val proxy = new DeliveryConsumerFilter(consumer) {
// Make this consumer act like a continuous queue browser
override def browser = true
override def start_from_tail = true
override def close_on_drain = false
override def exclusive = false
}
topic_queue_consumers.put(consumer, proxy)
topic_queue.bind(List(proxy), bind_release)
send_retained
return
case "block" =>
// just have dispatcher dispatch directly to them..
consumer
}
}
val link = new LinkDTO()
link.kind = "unknown"
link.label = "unknown"
link.enqueue_ts = now
target match {
case queue:Queue =>
queue.binding match {
case x:TempQueueBinding =>
link.kind = "topic-queue"
link.id = queue.store_id.toString()
link.label = "shared queue"
case x:QueueDomainQueueBinding =>
link.kind = "queue"
link.id = queue.id
link.label = queue.id
case x:DurableSubscriptionQueueBinding =>
link.kind = "dsub"
link.id = queue.id
link.label = queue.id
}
case _ =>
for(connection <- target.connection) {
link.kind = "connection"
link.id = connection.id.toString
link.label = connection.transport.getRemoteAddress.toString
}
}
send_retained
val proxy = ProxyDeliveryConsumer(target, link, consumer)
consumers.put(consumer, proxy)
topic_metrics.consumer_counter += 1
val list = proxy :: Nil
producers.keys.foreach({ r=>
remaining.incrementAndGet()
r.bind(list, bind_release)
})
bind_release()
check_idle
}
def unbind (consumer:DeliveryConsumer, persistent:Boolean) = {
val list = topic_queue_consumers.remove(consumer) match {
case Some(consumer)=>
topic_queue.unbind(List(consumer))
// Once we don't have any subscribers.. delete the queue.
if( topic_queue_consumers.isEmpty ) {
val queue = topic_queue
topic_queue = null
queue.dispatch_queue {
if( queue.all_subscriptions.isEmpty ) {
val metrics = queue.get_queue_metrics
router.dispatch_queue {
if(router.service_state.is_started) {
router._destroy_queue(queue)
}
}
dispatch_queue {
topic_metrics.dequeue_item_counter += metrics.dequeue_item_counter
topic_metrics.dequeue_size_counter += metrics.dequeue_size_counter
topic_metrics.dequeue_ts = topic_metrics.dequeue_ts max metrics.dequeue_ts
topic_metrics.nack_item_counter += metrics.nack_item_counter
topic_metrics.nack_size_counter += metrics.nack_size_counter
topic_metrics.nack_ts = topic_metrics.nack_ts max metrics.nack_ts
topic_metrics.expired_item_counter += metrics.expired_item_counter
topic_metrics.expired_size_counter += metrics.expired_size_counter
topic_metrics.expired_ts = topic_metrics.expired_ts max metrics.expired_ts
}
}
}
}
List()
case None =>
consumers.remove(consumer) match {
case Some(consumer)=>
add_dequeue_counters(topic_metrics, consumer.link)
List(consumer.consumer)
case None =>
List()
}
}
for( producer <- producers.keys ) {
producer.unbind(list)
}
check_idle
}
def bind_durable_subscription(address: SubscriptionAddress, queue:Queue) = {
if( !durable_subscriptions.contains(queue) ) {
durable_subscriptions += queue
bind(address, queue, ()=>{})
}
check_idle
}
def unbind_durable_subscription(queue:Queue) = {
if( durable_subscriptions.contains(queue) ) {
unbind(queue, false)
durable_subscriptions -= queue
}
check_idle
}
def connect (address:ConnectAddress, producer:BindableDeliveryProducer) = {
val link = new LinkDTO()
producer.connection match {
case Some(connection) =>
link.kind = "connection"
link.id = connection.id.toString
link.label = connection.transport.getRemoteAddress.toString
case _ =>
link.kind = "unknown"
link.label = "unknown"
}
producers.put(producer, link)
topic_metrics.producer_counter += 1
var targets:List[DeliveryConsumer] = producer_tracker :: consumers.values.toList
if( topic_queue !=null ) {
targets ::= topic_queue
}
producer.bind(targets, ()=>{})
check_idle
}
def disconnect (producer:BindableDeliveryProducer) = {
for(link <- producers.remove(producer) ) {
add_enqueue_counters(topic_metrics, link)
}
var targets:List[DeliveryConsumer] = producer_tracker :: consumers.values.toList
if( topic_queue !=null ) {
targets ::= topic_queue
}
producer.unbind(targets)
check_idle
}
def disconnect_producers:Unit ={
for( (_, link) <- producers ) {
add_enqueue_counters(topic_metrics, link)
}
producers.clear
check_idle
}
}
| chirino/activemq-apollo | apollo-broker/src/main/scala/org/apache/activemq/apollo/broker/Topic.scala | Scala | apache-2.0 | 19,499 |
/**
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.crossdata.connector.cassandra
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class CassandraCreateExternalTableIT extends CassandraWithSharedContext {
"The Cassandra connector" should "execute natively create a External Table" in {
val createTableQUeryString =
s"""|CREATE EXTERNAL TABLE $Catalog.newtable (id Integer, name String)
|USING $SourceProvider
|OPTIONS (
|keyspace '$Catalog',
|cluster '$ClusterName',
|pushdown "true",
|spark_cassandra_connection_host '$CassandraHost',
|primary_key_string 'id'
|)
""".stripMargin.replaceAll("\n", " ")
//Experimentation
val result = sql(createTableQUeryString).collect()
//Expectations
val table = xdContext.table(s"$Catalog.newtable")
table should not be null
table.schema.fieldNames should contain ("name")
}
it should "execute natively create a External Table with no existing Keyspace" in {
val createTableQUeryString =
s"""|CREATE EXTERNAL TABLE newkeyspace.othertable (id Integer, name String)
|USING $SourceProvider
|OPTIONS (
|keyspace 'newkeyspace',
|cluster '$ClusterName',
|pushdown "true",
|spark_cassandra_connection_host '$CassandraHost',
|primary_key_string 'id',
|with_replication "{'class' : 'SimpleStrategy', 'replication_factor' : 3}"
|)
""".stripMargin.replaceAll("\n", " ")
try {
//Experimentation
val result = sql(createTableQUeryString).collect()
//Expectations
val table = xdContext.table(s"newkeyspace.othertable")
table should not be null
table.schema.fieldNames should contain("name")
}finally {
//AFTER
client.get._2.execute(s"DROP KEYSPACE newkeyspace")
}
}
it should "fail execute natively create a External Table with no existing Keyspace without with_replication" in {
val createTableQUeryString =
s"""|CREATE EXTERNAL TABLE NoKeyspaceCreatedBefore.newTable (id Integer, name String)
|USING $SourceProvider
|OPTIONS (
|keyspace 'NoKeyspaceCreatedBefore',
|cluster '$ClusterName',
|pushdown "true",
|spark_cassandra_connection_host '$CassandraHost',
|primary_key_string 'id'
|)
""".stripMargin.replaceAll("\n", " ")
//Experimentation
the [IllegalArgumentException] thrownBy {
sql(createTableQUeryString).collect()
} should have message "requirement failed: with_replication required when use CREATE EXTERNAL TABLE command"
}
it should "fail execute natively create a External Table without keyspace" in {
val createTableQUeryString =
s"""|CREATE EXTERNAL TABLE newtable (id Integer, name String)
|USING $SourceProvider
|OPTIONS (
|keyspace '$Catalog',
|cluster '$ClusterName',
|pushdown "true",
|spark_cassandra_connection_host '$CassandraHost',
|primary_key_string 'id'
|)
""".stripMargin.replaceAll("\n", " ")
//Experimentation
the [IllegalArgumentException] thrownBy {
sql(createTableQUeryString).collect()
} should have message "requirement failed: Catalog is required required when use CREATE EXTERNAL TABLE command"
}
}
| luismcl/crossdata | cassandra/src/test/scala/com/stratio/crossdata/connector/cassandra/CassandraCreateExternalTableIT.scala | Scala | apache-2.0 | 4,048 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2006-2009, Ross Judson **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.util
import scala.reflect.ClassManifest
/** The Sorting object provides functions that can sort various kinds of
* objects. You can provide a comparison function, or you can request a sort
* of items that are viewable as <code>Ordered</code>. Some sorts that
* operate directly on a subset of value types are also provided. These
* implementations are derived from those in the Sun JDK.
*
* Note that stability doesn't matter for value types, so use the quickSort
* variants for those. <code>stableSort</code> is intended to be used with
* objects when the prior ordering should be preserved, where possible.
*
* @author Ross Judson
* @version 1.0
*/
object Sorting {
/** Provides implicit access to sorting on arbitrary sequences of orderable
* items. This doesn't quite work the way that I want yet -- K should be
* bounded as viewable, but the compiler rejects that.
*/
// implicit def seq2RichSort[K <: Ordered[K] : ClassManifest](s: Seq[K]) = new RichSorting[K](s)
/** Quickly sort an array of Doubles. */
def quickSort(a: Array[Double]) { sort1(a, 0, a.length) }
/** Quickly sort an array of items with an implicit Ordering. */
def quickSort[K](a: Array[K])(implicit ord: Ordering[K]) { sort1(a, 0, a.length) }
/** Quickly sort an array of Ints. */
def quickSort(a: Array[Int]) { sort1(a, 0, a.length) }
/** Quickly sort an array of Floats. */
def quickSort(a: Array[Float]) { sort1(a, 0, a.length) }
/** Sort an array of K where K is Ordered, preserving the existing order
* where the values are equal. */
def stableSort[K](a: Array[K])(implicit m: ClassManifest[K], ord: Ordering[K]) {
stableSort(a, 0, a.length-1, new Array[K](a.length), ord.lt _)
}
/** Sorts an array of <code>K</code> given an ordering function
* <code>f</code>. <code>f</code> should return <code>true</code> iff
* its first parameter is strictly less than its second parameter.
*/
def stableSort[K : ClassManifest](a: Array[K], f: (K,K) => Boolean) {
stableSort(a, 0, a.length-1, new Array[K](a.length), f)
}
/** Sorts an arbitrary sequence into an array, given a comparison function
* that should return <code>true</code> iff parameter one is strictly less
* than parameter two.
*
* @param a the sequence to be sorted.
* @param f the comparison function.
* @return the sorted sequence of items.
*/
def stableSort[K : ClassManifest](a: Seq[K], f: (K,K) => Boolean): Array[K] = {
val ret = a.toArray
stableSort(ret, f)
ret
}
/** Sorts an arbitrary sequence of items that are viewable as ordered. */
def stableSort[K](a: Seq[K])(implicit m: ClassManifest[K], ord: Ordering[K]): Array[K] =
stableSort(a, ord.lt _)
/** Stably sorts a sequence of items given an extraction function that will
* return an ordered key from an item.
*
* @param a the sequence to be sorted.
* @param f the comparison function.
* @return the sorted sequence of items.
*/
def stableSort[K, M](a: Seq[K], f: K => M)(implicit m: ClassManifest[K], ord: Ordering[M]): Array[K] =
stableSort(a)(m, ord on f)
private def sort1[K](x: Array[K], off: Int, len: Int)(implicit ord: Ordering[K]) {
import ord._
def swap(a: Int, b: Int) {
val t = x(a)
x(a) = x(b)
x(b) = t
}
def vecswap(_a: Int, _b: Int, n: Int) {
var a = _a
var b = _b
var i = 0
while (i < n) {
swap(a, b)
i += 1
a += 1
b += 1
}
}
def med3(a: Int, b: Int, c: Int) = {
if (x(a) < x(b)) {
if (x(b) < x(c)) b else if (x(a) < x(c)) c else a
} else {
if (x(b) > x(c)) b else if (x(a) > x(c)) c else a
}
}
def sort2(off: Int, len: Int) {
// Insertion sort on smallest arrays
if (len < 7) {
var i = off
while (i < len + off) {
var j = i
while (j > off && x(j-1) > x(j)) {
swap(j, j-1)
j -= 1
}
i += 1
}
} else {
// Choose a partition element, v
var m = off + (len >> 1) // Small arrays, middle element
if (len > 7) {
var l = off
var n = off + len - 1
if (len > 40) { // Big arrays, pseudomedian of 9
var s = len / 8
l = med3(l, l+s, l+2*s)
m = med3(m-s, m, m+s)
n = med3(n-2*s, n-s, n)
}
m = med3(l, m, n) // Mid-size, med of 3
}
val v = x(m)
// Establish Invariant: v* (<v)* (>v)* v*
var a = off
var b = a
var c = off + len - 1
var d = c
var done = false
while (!done) {
while (b <= c && x(b) <= v) {
if (x(b) == v) {
swap(a, b)
a += 1
}
b += 1
}
while (c >= b && x(c) >= v) {
if (x(c) == v) {
swap(c, d)
d -= 1
}
c -= 1
}
if (b > c) {
done = true
} else {
swap(b, c)
c -= 1
b += 1
}
}
// Swap partition elements back to middle
val n = off + len
var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
s = b - a
if (s > 1)
sort2(off, s)
s = d - c
if (s > 1)
sort2(n-s, s)
}
}
sort2(off, len)
}
private def sort1(x: Array[Int], off: Int, len: Int) {
def swap(a: Int, b: Int) {
val t = x(a)
x(a) = x(b)
x(b) = t
}
def vecswap(_a: Int, _b: Int, n: Int) {
var a = _a
var b = _b
var i = 0
while (i < n) {
swap(a, b)
i += 1
a += 1
b += 1
}
}
def med3(a: Int, b: Int, c: Int) = {
if (x(a) < x(b)) {
if (x(b) < x(c)) b else if (x(a) < x(c)) c else a
} else {
if (x(b) > x(c)) b else if (x(a) > x(c)) c else a
}
}
def sort2(off: Int, len: Int) {
// Insertion sort on smallest arrays
if (len < 7) {
var i = off
while (i < len + off) {
var j = i
while (j>off && x(j-1) > x(j)) {
swap(j, j-1)
j -= 1
}
i += 1
}
} else {
// Choose a partition element, v
var m = off + (len >> 1) // Small arrays, middle element
if (len > 7) {
var l = off
var n = off + len - 1
if (len > 40) { // Big arrays, pseudomedian of 9
var s = len / 8
l = med3(l, l+s, l+2*s)
m = med3(m-s, m, m+s)
n = med3(n-2*s, n-s, n)
}
m = med3(l, m, n) // Mid-size, med of 3
}
val v = x(m)
// Establish Invariant: v* (<v)* (>v)* v*
var a = off
var b = a
var c = off + len - 1
var d = c
var done = false
while (!done) {
while (b <= c && x(b) <= v) {
if (x(b) == v) {
swap(a, b)
a += 1
}
b += 1
}
while (c >= b && x(c) >= v) {
if (x(c) == v) {
swap(c, d)
d -= 1
}
c -= 1
}
if (b > c) {
done = true
} else {
swap(b, c)
c -= 1
b += 1
}
}
// Swap partition elements back to middle
val n = off + len
var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
s = b - a
if (s > 1)
sort2(off, s)
s = d - c
if (s > 1)
sort2(n-s, s)
}
}
sort2(off, len)
}
private def sort1(x: Array[Double], off: Int, len: Int) {
def swap(a: Int, b: Int) {
val t = x(a)
x(a) = x(b)
x(b) = t
}
def vecswap(_a: Int, _b: Int, n: Int) {
var a = _a
var b = _b
var i = 0
while (i < n) {
swap(a, b)
i += 1
a += 1
b += 1
}
}
def med3(a: Int, b: Int, c: Int) = {
val ab = x(a) compare x(b)
val bc = x(b) compare x(c)
val ac = x(a) compare x(c)
if (ab < 0) {
if (bc < 0) b else if (ac < 0) c else a
} else {
if (bc > 0) b else if (ac > 0) c else a
}
}
def sort2(off: Int, len: Int) {
// Insertion sort on smallest arrays
if (len < 7) {
var i = off
while (i < len + off) {
var j = i
while (j > off && (x(j-1) compare x(j)) > 0) {
swap(j, j-1)
j -= 1
}
i += 1
}
} else {
// Choose a partition element, v
var m = off + (len >> 1) // Small arrays, middle element
if (len > 7) {
var l = off
var n = off + len - 1
if (len > 40) { // Big arrays, pseudomedian of 9
var s = len / 8
l = med3(l, l+s, l+2*s)
m = med3(m-s, m, m+s)
n = med3(n-2*s, n-s, n)
}
m = med3(l, m, n) // Mid-size, med of 3
}
val v = x(m)
// Establish Invariant: v* (<v)* (>v)* v*
var a = off
var b = a
var c = off + len - 1
var d = c
var done = false
while (!done) {
var bv = x(b) compare v
while (b <= c && bv <= 0) {
if (bv == 0) {
swap(a, b)
a += 1
}
b += 1
if (b <= c) bv = x(b) compare v
}
var cv = x(c) compare v
while (c >= b && cv >= 0) {
if (cv == 0) {
swap(c, d)
d -= 1
}
c -= 1
if (c >= b) cv = x(c) compare v
}
if (b > c) {
done = true
} else {
swap(b, c)
c -= 1
b += 1
}
}
// Swap partition elements back to middle
val n = off + len
var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
s = b - a
if (s > 1)
sort2(off, s)
s = d - c
if (s > 1)
sort2(n-s, s)
}
}
sort2(off, len)
}
private def sort1(x: Array[Float], off: Int, len: Int) {
def swap(a: Int, b: Int) {
val t = x(a)
x(a) = x(b)
x(b) = t
}
def vecswap(_a: Int, _b: Int, n: Int) {
var a = _a
var b = _b
var i = 0
while (i < n) {
swap(a, b)
i += 1
a += 1
b += 1
}
}
def med3(a: Int, b: Int, c: Int) = {
val ab = x(a) compare x(b)
val bc = x(b) compare x(c)
val ac = x(a) compare x(c)
if (ab < 0) {
if (bc < 0) b else if (ac < 0) c else a
} else {
if (bc > 0) b else if (ac > 0) c else a
}
}
def sort2(off: Int, len: Int) {
// Insertion sort on smallest arrays
if (len < 7) {
var i = off
while (i < len + off) {
var j = i
while (j > off && (x(j-1) compare x(j)) > 0) {
swap(j, j-1)
j -= 1
}
i += 1
}
} else {
// Choose a partition element, v
var m = off + (len >> 1) // Small arrays, middle element
if (len > 7) {
var l = off
var n = off + len - 1
if (len > 40) { // Big arrays, pseudomedian of 9
var s = len / 8
l = med3(l, l+s, l+2*s)
m = med3(m-s, m, m+s)
n = med3(n-2*s, n-s, n)
}
m = med3(l, m, n) // Mid-size, med of 3
}
val v = x(m)
// Establish Invariant: v* (<v)* (>v)* v*
var a = off
var b = a
var c = off + len - 1
var d = c
var done = false
while (!done) {
var bv = x(b) compare v
while (b <= c && bv <= 0) {
if (bv == 0) {
swap(a, b)
a += 1
}
b += 1
if (b <= c) bv = x(b) compare v
}
var cv = x(c) compare v
while (c >= b && cv >= 0) {
if (cv == 0) {
swap(c, d)
d -= 1
}
c -= 1
if (c >= b) cv = x(c) compare v
}
if (b > c) {
done = true
} else {
swap(b, c)
c -= 1
b += 1
}
}
// Swap partition elements back to middle
val n = off + len
var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
s = b - a
if (s > 1)
sort2(off, s)
s = d - c
if (s > 1)
sort2(n-s, s)
}
}
sort2(off, len)
}
private def stableSort[K : ClassManifest](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) {
if (lo < hi) {
val mid = (lo+hi) / 2
stableSort(a, lo, mid, scratch, f)
stableSort(a, mid+1, hi, scratch, f)
var k, t_lo = lo
var t_hi = mid + 1
while (k <= hi) {
if ((t_lo <= mid) && ((t_hi > hi) || (!f(a(t_hi), a(t_lo))))) {
scratch(k) = a(t_lo)
t_lo += 1
} else {
scratch(k) = a(t_hi)
t_hi += 1
}
k += 1
}
k = lo
while (k <= hi) {
a(k) = scratch(k)
k += 1
}
}
}
}
/** <p>
* A <code>RichSorting</code> object is generally created implicitly through
* the use of the <code>sort</code> function on an arbitrary sequence, where
* the items are ordered.
* </p>
*/
class RichSorting[K](s: Seq[K])(implicit m: ClassManifest[K], ord: Ordering[K]) {
/** Returns an array with a sorted copy of the RichSorting's sequence.
*/
def sort = Sorting.stableSort(s)
}
| cran/rkafkajars | java/scala/util/Sorting.scala | Scala | apache-2.0 | 14,952 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.client
import org.apache.spark.{Logging, SparkFunSuite}
import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.util.Utils
/**
* A simple set of tests that call the methods of a hive ClientInterface, loading different version
* of hive from maven central. These tests are simple in that they are mostly just testing to make
* sure that reflective calls are not throwing NoSuchMethod error, but the actually functionality
* is not fully tested.
*/
class VersionsSuite extends SparkFunSuite with Logging {
private def buildConf() = {
lazy val warehousePath = Utils.createTempDir()
lazy val metastorePath = Utils.createTempDir()
metastorePath.delete()
Map(
"javax.jdo.option.ConnectionURL" -> s"jdbc:derby:;databaseName=$metastorePath;create=true",
"hive.metastore.warehouse.dir" -> warehousePath.toString)
}
test("success sanity check") {
val badClient = IsolatedClientLoader.forVersion("13", buildConf()).client
val db = new HiveDatabase("default", "")
badClient.createDatabase(db)
}
private def getNestedMessages(e: Throwable): String = {
var causes = ""
var lastException = e
while (lastException != null) {
causes += lastException.toString + "\\n"
lastException = lastException.getCause
}
causes
}
private val emptyDir = Utils.createTempDir().getCanonicalPath
private def partSpec = {
val hashMap = new java.util.LinkedHashMap[String, String]
hashMap.put("key", "1")
hashMap
}
// Its actually pretty easy to mess things up and have all of your tests "pass" by accidentally
// connecting to an auto-populated, in-process metastore. Let's make sure we are getting the
// versions right by forcing a known compatibility failure.
// TODO: currently only works on mysql where we manually create the schema...
ignore("failure sanity check") {
val e = intercept[Throwable] {
val badClient = quietly { IsolatedClientLoader.forVersion("13", buildConf()).client }
}
assert(getNestedMessages(e) contains "Unknown column 'A0.OWNER_NAME' in 'field list'")
}
private val versions = Seq("12", "13")
private var client: ClientInterface = null
versions.foreach { version =>
test(s"$version: create client") {
client = null
client = IsolatedClientLoader.forVersion(version, buildConf()).client
}
test(s"$version: createDatabase") {
val db = HiveDatabase("default", "")
client.createDatabase(db)
}
test(s"$version: createTable") {
val table =
HiveTable(
specifiedDatabase = Option("default"),
name = "src",
schema = Seq(HiveColumn("key", "int", "")),
partitionColumns = Seq.empty,
properties = Map.empty,
serdeProperties = Map.empty,
tableType = ManagedTable,
location = None,
inputFormat =
Some(classOf[org.apache.hadoop.mapred.TextInputFormat].getName),
outputFormat =
Some(classOf[org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat[_, _]].getName),
serde =
Some(classOf[org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe].getName()))
client.createTable(table)
}
test(s"$version: getTable") {
client.getTable("default", "src")
}
test(s"$version: listTables") {
assert(client.listTables("default") === Seq("src"))
}
test(s"$version: currentDatabase") {
assert(client.currentDatabase === "default")
}
test(s"$version: getDatabase") {
client.getDatabase("default")
}
test(s"$version: alterTable") {
client.alterTable(client.getTable("default", "src"))
}
test(s"$version: set command") {
client.runSqlHive("SET spark.sql.test.key=1")
}
test(s"$version: create partitioned table DDL") {
client.runSqlHive("CREATE TABLE src_part (value INT) PARTITIONED BY (key INT)")
client.runSqlHive("ALTER TABLE src_part ADD PARTITION (key = '1')")
}
test(s"$version: getPartitions") {
client.getAllPartitions(client.getTable("default", "src_part"))
}
test(s"$version: loadPartition") {
client.loadPartition(
emptyDir,
"default.src_part",
partSpec,
false,
false,
false,
false)
}
test(s"$version: loadTable") {
client.loadTable(
emptyDir,
"src",
false,
false)
}
test(s"$version: loadDynamicPartitions") {
client.loadDynamicPartitions(
emptyDir,
"default.src_part",
partSpec,
false,
1,
false,
false)
}
}
}
| andrewor14/iolap | sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala | Scala | apache-2.0 | 5,522 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.mathematics
import org.scalacheck.Arbitrary
import org.scalacheck.Properties
import org.scalacheck.Prop.forAll
import org.scalacheck.Gen._
object SizeHintProps extends Properties("SizeHint") {
val noClueGen = const(NoClue)
val finiteHintGen = for {
rows <- choose(-1L, 1000000L)
cols <- choose(-1L, 1000000L)
} yield FiniteHint(rows, cols)
val sparseHintGen = for {
rows <- choose(-1L, 1000000L)
cols <- choose(-1L, 1000000L)
sparsity <- choose(0.0, 1.0)
} yield SparseHint(sparsity, rows, cols)
implicit val finiteArb: Arbitrary[FiniteHint] = Arbitrary(finiteHintGen)
implicit val sparseArb: Arbitrary[SparseHint] = Arbitrary(sparseHintGen)
implicit val genHint: Arbitrary[SizeHint] = Arbitrary(oneOf(noClueGen, finiteHintGen, sparseHintGen))
property("a+b is at least as big as a") = forAll { (a: SizeHint, b: SizeHint) =>
val addT = for {
ta <- a.total
tsum <- (a + b).total
} yield (tsum >= ta)
addT.getOrElse(true)
}
property("a#*#b is at most as big as a") = forAll { (a: SizeHint, b: SizeHint) =>
val addT = for {
ta <- a.total
tsum <- (a #*# b).total
} yield (tsum <= ta)
addT.getOrElse(true)
}
property("ordering makes sense") = forAll { (a: SizeHint, b: SizeHint) =>
(List(a, b).max.total.getOrElse(BigInt(-1L)) >= a.total.getOrElse(BigInt(-1L)))
}
property("addition increases sparsity fraction") = forAll { (a: SparseHint, b: SparseHint) =>
(a + b).asInstanceOf[SparseHint].sparsity >= a.sparsity
}
property("Hadamard product does not increase sparsity fraction") = forAll {
(a: SparseHint, b: SparseHint) =>
(a #*# b).asInstanceOf[SparseHint].sparsity == (a.sparsity.min(b.sparsity))
}
property("transpose preserves size") = forAll { (a: SizeHint) =>
a.transpose.total == a.total
}
property("squaring a finite hint preserves size") = forAll { (a: FiniteHint) =>
val sq = a.setRowsToCols
val sq2 = a.setColsToRows
(sq.total == (sq * sq).total) && (sq2.total == (sq2 * sq2).total)
}
property("adding a finite hint to itself preserves size") = forAll { (a: FiniteHint) =>
(a + a).total == a.total
}
property("hadamard product of a finite hint to itself preserves size") = forAll { (a: FiniteHint) =>
(a #*# a).total == a.total
}
property("adding a sparse matrix to itself doesn't decrease size") = forAll { (a: SparseHint) =>
(for {
doubleSize <- (a + a).total
asize <- a.total
} yield (doubleSize >= asize)).getOrElse(true)
}
property("diagonals are smaller") = forAll { (a: FiniteHint) =>
SizeHint.asDiagonal(a).total.getOrElse(BigInt(-2L)) < a.total.getOrElse(-1L)
}
property("diagonals are about as big as the min(rows,cols)") = forAll { (a: FiniteHint) =>
SizeHint.asDiagonal(a).total.getOrElse(BigInt(-1L)) <= (a.rows.min(a.cols))
SizeHint.asDiagonal(a).total.getOrElse(BigInt(-1L)) >= ((a.rows.min(a.cols)) - 1L)
}
property("transpose law is obeyed in total") = forAll { (a: SizeHint, b: SizeHint) =>
// (A B)^T = B^T A^T
(a * b).transpose.total == ((b.transpose) * (a.transpose)).total
}
}
| twitter/scalding | scalding-core/src/test/scala/com/twitter/scalding/mathematics/SizeHintTest.scala | Scala | apache-2.0 | 3,731 |
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.agent.rest
import javax.ws.rs.core.{UriInfo, Response}
/**
* Helper class to provide commonly used HTTP responses
*/
object ResponseUtils {
/**
* Constructs an HTTP 201 response for the given identifier in the given context
*/
def resourceCreated(id:String, uriInfo:UriInfo) = {
val uri = uriInfo.getAbsolutePathBuilder().path(id).build()
Response.created(uri).build()
}
/**
* Constructs an HTTP 201 response for the given identifier in the given context
*/
def resourceDeleted() = {
Response.noContent().build()
}
} | lshift/diffa | agent/src/main/scala/net/lshift/diffa/agent/rest/ResponseUtils.scala | Scala | apache-2.0 | 1,193 |
package org.zbizaca.mastersvoice.attribute
/**
* Created by zbizaca on 12/18/15.
*/
object AttributeType extends Enumeration {
type AttributeType = Value
val Text, Real, Integer, Empty = Value
}
| zbizaca/mastersvoice | src/main/scala/org/zbizaca/mastersvoice/attribute/AttributeType.scala | Scala | gpl-2.0 | 204 |
object i0 {
1 match {
def this(): Int // error
def this() // error
}
}
| dotty-staging/dotty | tests/neg/i5004.scala | Scala | apache-2.0 | 75 |
import com.github.paulp.optional
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
class StubApp extends optional.Application {
var fish_ : Option[String] = _
var fowl_ : String = _
var args_ : List[String] = _
def main (fish: Option[String], fowl: String) = {
fish_ = fish
fowl_ = fowl
args_ = getArgs()
}
}
class TestSuite extends FunSuite with ShouldMatchers {
test ("absent optional arg is None") {
val app = new StubApp
app.main (Array ("--fowl", "turkey"))
app.fish_ should equal (None)
}
test ("present optional arg is Some") {
val app = new StubApp
app.main (Array ("--fowl", "chicken", "--fish", "halibut"))
app.fish_ should equal (Some("halibut"))
}
test ("present compulsory arg is passed") {
val app = new StubApp
app.main (Array ("--fowl", "chicken", "--fish", "halibut"))
app.fowl_ should equal ("chicken")
}
test ("positional arguments are passed") {
val app = new StubApp
app.main (Array ("taxrises", "spiritlevel", "--fowl", "goose", "trickledown", "spendingcuts"))
app.args_ should equal (List ("taxrises", "spiritlevel", "trickledown", "spendingcuts"))
}
}
| ornicar/optional | src/test/scala/simple.scala | Scala | bsd-3-clause | 1,203 |
package filodb.gateway
import java.net.InetSocketAddress
import java.nio.charset.Charset
import java.util.concurrent.Executors
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.control.NonFatal
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.scalalogging.StrictLogging
import kamon.Kamon
import monix.eval.Task
import monix.execution.Scheduler
import monix.kafka._
import monix.reactive.Observable
import net.ceedubs.ficus.Ficus._
import org.jboss.netty.bootstrap.ServerBootstrap
import org.jboss.netty.buffer.ChannelBuffer
import org.jboss.netty.channel.{ChannelPipeline, ChannelPipelineFactory, Channels}
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory
import org.jboss.netty.handler.ssl.SslContext
import org.jboss.netty.handler.ssl.util.SelfSignedCertificate
import org.jctools.queues.MpscGrowableArrayQueue
import org.rogach.scallop._
import filodb.coordinator.{FilodbSettings, ShardMapper, StoreFactory}
import filodb.core.GlobalConfig
import filodb.core.binaryrecord2.RecordBuilder
import filodb.core.metadata.Dataset
import filodb.gateway.conversion._
import filodb.memory.MemFactory
import filodb.timeseries.TestTimeseriesProducer
/**
* Gateway server to ingest source streams of data, shard, batch, and write output to Kafka
* built using high performance Netty TCP code
*
* It usually takes one arg: the source config file which contains # Kafka partitions/shards and other config
* Also pass in -Dconfig.file=.... as usual, with a config that points to the dataset metadata.
* For local setups, simply run `./dev-gateway.sh`.
* For help pass in `--help`.
*
* NOTE: set `kamon.prometheus.embedded-server.port` to avoid conflicting with FiloDB itself.
*
* There are options that can be used to generate test data, such as `--gen-hist-data`. The -n and -p options can
* also be used together to control the # of samples per series and # of time series.
* To generate Histogram schema test data, one must create the following dataset:
* ./filo-cli -Dconfig.file=conf/timeseries-filodb-server.conf --command create --dataset histogram \
* --dataColumns timestamp:ts,sum:long,count:long,h:hist:counter=true --partitionColumns metric:string,tags:map \
* --shardKeyColumns metric --metricColumn metric
* create a Kafka topic:
* kafka-topics --create --zookeeper localhost:2181 --replication-factor 1 --partitions 4 --topic histogram-dev
* and use the `conf/histogram-dev-source.conf` config file.
* Oh, and you have to observe on shards 1 and 3.
*/
object GatewayServer extends StrictLogging {
// Get global configuration using universal FiloDB/Akka-based config
val config = GlobalConfig.systemConfig
val storeFactory = StoreFactory(new FilodbSettings(config), Scheduler.io())
// ==== Metrics ====
val numInfluxMessages = Kamon.counter("num-influx-messages")
val numInfluxParseErrors = Kamon.counter("num-influx-parse-errors")
val numDroppedMessages = Kamon.counter("num-dropped-messages")
val numContainersSent = Kamon.counter("num-containers-sent")
val containersSize = Kamon.histogram("containers-size-bytes")
// Most options are for generating test data
class GatewayOptions(args: Seq[String]) extends ScallopConf(args) {
val samplesPerSeries = opt[Int](short = 'n', default = Some(100),
descr = "# of samples per time series")
val numSeries = opt[Int](short = 'p', default = Some(20), descr = "# of total time series")
val sourceConfigPath = trailArg[String](descr = "Path to source config, eg conf/timeseries-dev-source.conf")
val genHistData = toggle(noshort = true, descrYes = "Generate histogram-schema test data and exit")
val genPromData = toggle(noshort = true, descrYes = "Generate Prometheus-schema test data and exit")
verify()
}
//scalastyle:off method.length
def main(args: Array[String]): Unit = {
Kamon.loadReportersFromConfig()
val userOpts = new GatewayOptions(args)
val numSamples = userOpts.samplesPerSeries() * userOpts.numSeries()
val numSeries = userOpts.numSeries()
val sourceConfig = ConfigFactory.parseFile(new java.io.File(userOpts.sourceConfigPath()))
val numShards = sourceConfig.getInt("num-shards")
val dataset = Dataset.fromConfig(sourceConfig)
// NOTE: the spread MUST match the default spread used in the HTTP module for consistency between querying
// and ingestion sharding
val spread = config.getInt("filodb.spread-default")
val shardMapper = new ShardMapper(numShards)
val queueFullWait = config.as[FiniteDuration]("gateway.queue-full-wait").toMillis
val (shardQueues, containerStream) = shardingPipeline(config, numShards, dataset)
def calcShardAndQueueHandler(buf: ChannelBuffer): Unit = {
val initIndex = buf.readerIndex
val len = buf.readableBytes
numInfluxMessages.increment
InfluxProtocolParser.parse(buf, dataset.options) map { record =>
logger.trace(s"Enqueuing: $record")
val shard = shardMapper.ingestionShard(record.shardKeyHash, record.partitionKeyHash, spread)
if (!shardQueues(shard).offer(record)) {
// Prioritize recent data. This means dropping messages when full, so new data may have a chance.
logger.warn(s"Queue for shard=$shard is full. Dropping data.")
numDroppedMessages.increment
// Thread sleep queueFullWait
}
} getOrElse {
numInfluxParseErrors.increment
logger.warn(s"Could not parse:\n${buf.toString(initIndex, len, Charset.defaultCharset)}")
}
}
// TODO: allow configurable sinks, maybe multiple sinks for say writing to multiple Kafka clusters/DCs
setupKafkaProducer(sourceConfig, containerStream)
val genHist = userOpts.genHistData.getOrElse(false)
val genProm = userOpts.genPromData.getOrElse(false)
if (genHist || genProm) {
val startTime = System.currentTimeMillis
logger.info(s"Generating $numSamples samples starting at $startTime....")
val stream = if (genHist) TestTimeseriesProducer.genHistogramData(startTime, dataset, numSeries)
else TestTimeseriesProducer.timeSeriesData(startTime, numSeries)
stream.take(numSamples).foreach { rec =>
val shard = shardMapper.ingestionShard(rec.shardKeyHash, rec.partitionKeyHash, spread)
if (!shardQueues(shard).offer(rec)) {
// Prioritize recent data. This means dropping messages when full, so new data may have a chance.
logger.warn(s"Queue for shard=$shard is full. Dropping data.")
numDroppedMessages.increment
}
}
Thread sleep 10000
TestTimeseriesProducer.logQueryHelp(numSamples, numSeries, startTime)
logger.info(s"Waited for containers to be sent, exiting...")
sys.exit(0)
} else {
setupTCPService(config, calcShardAndQueueHandler)
}
}
//scalastyle:on method.length
def setupTCPService(config: Config, handler: ChannelBuffer => Unit): Unit = {
val influxPort = config.getInt("gateway.influx-port")
// Configure SSL.
val SSL = config.getBoolean("gateway.tcp.ssl-enabled")
val sslCtx = if (SSL) {
val ssc = new SelfSignedCertificate()
Some(SslContext.newServerContext(ssc.certificate(), ssc.privateKey()))
} else {
None
}
// Configure the bootstrap.
val bootstrap = new ServerBootstrap(
new NioServerSocketChannelFactory(
Executors.newCachedThreadPool(),
Executors.newCachedThreadPool()))
bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
def getPipeline(): ChannelPipeline = {
val p = Channels.pipeline();
sslCtx.foreach { ctx => p.addLast("ssl", ctx.newHandler()) }
p.addLast("influxProtocol", new NettySocketHandler(Some('\n'), handler));
p
}
})
val rcvBufferSize = config.getInt("gateway.tcp.netty-receive-buffer-size")
val sendBufferSize = config.getInt("gateway.tcp.netty-send-buffer-size")
bootstrap.setOption("child.tcpNoDelay", true)
bootstrap.setOption("child.receiveBufferSize", rcvBufferSize)
bootstrap.setOption("child.sendBufferSize", sendBufferSize)
// Bind and start to accept incoming connections.
logger.info(s"Starting GatewayServer with TCP port for Influx data at $influxPort....")
bootstrap.bind(new InetSocketAddress(influxPort))
}
// Returns (Array[Queue] for shards, containerObservable)
def shardingPipeline(config: Config, numShards: Int, dataset: Dataset):
(Array[MpscGrowableArrayQueue[InputRecord]], Observable[(Int, Seq[Array[Byte]])]) = {
val parallelism = config.getInt("gateway.producer-parallelism")
val minQueueSize = config.getInt("gateway.min-queue-size")
val maxQueueSize = config.getInt("gateway.max-queue-size")
// Create queues and RecordBuilders, one per shard
val shardQueues = (0 until numShards).map { _ =>
new MpscGrowableArrayQueue[InputRecord](minQueueSize, maxQueueSize) }.toArray
val lastSendTime = Array.fill(numShards)(0L)
val builders = (0 until numShards).map(s => new RecordBuilder(MemFactory.onHeapFactory, dataset.ingestionSchema))
.toArray
val producing = Array.fill(numShards)(false)
var curShard = 0
// require(parallelism < numShards)
// Create a multithreaded pipeline to read from the shard queues and populate the RecordBuilders.
// The way it works is as follows:
// producing array above keeps track of which shards are being worked on at any time.
// The producing observable produces a stream of the next shard to work on. If a shard is already being worked
// on then it will be skipped -- this ensures that a shard is never worked on in parallel
// Next tasks are created and executed to pull from queue and build records in a parallel pool
// Each Task produces (shard, Container) pairs which get flushed by the sink
val shardIt = Iterator.from(0).map { _ =>
while (producing(curShard)) {
curShard = (curShard + 1) % numShards
Thread sleep 1
} // else keep going. If we have gone around just wait
val shardToWorkOn = curShard
producing(shardToWorkOn) = true
curShard = (curShard + 1) % numShards
shardToWorkOn
}
val containerStream = Observable.fromIterator(shardIt)
.mapAsync(parallelism) { shard =>
buildShardContainers(shard, shardQueues(shard), builders(shard), lastSendTime)
.map { output =>
// Mark this shard as done producing for now to allow another go
producing(shard) = false
output
}
}
logger.info(s"Created $numShards container builder queues with $parallelism parallel workers...")
(shardQueues, containerStream)
}
def buildShardContainers(shard: Int,
queue: MpscGrowableArrayQueue[InputRecord],
builder: RecordBuilder,
sendTime: Array[Long]): Task[(Int, Seq[Array[Byte]])] = Task {
// While there are still messages in the queue and there aren't containers to send, pull and build
while (!queue.isEmpty && builder.allContainers.length <= 1) {
queue.poll().addToBuilder(builder)
// TODO: add metrics
}
// Is there a container to send? Or has the time since the last send been more than a second?
// Send only full containers or if time has elapsed, send and reset current container
val numContainers = builder.allContainers.length
if (numContainers > 1 ||
(numContainers > 0 && !builder.allContainers.head.isEmpty &&
(System.currentTimeMillis - sendTime(shard)) > 1000)) {
sendTime(shard) = System.currentTimeMillis
val out = if (numContainers > 1) { // First container probably full. Send only the first container
numContainersSent.increment(numContainers - 1)
(shard, builder.nonCurrentContainerBytes(reset = true))
} else { // only one container. Get the smallest bytes possible as its probably not full
numContainersSent.increment
(shard, builder.optimalContainerBytes(reset = true))
}
logger.debug(s"Sending ${out._2.length} containers, ${out._2.map(_.size).sum} bytes from shard=$shard")
out
} else {
(shard, Nil)
}
}
def setupKafkaProducer(sourceConf: Config, containerStream: Observable[(Int, Seq[Array[Byte]])]): Future[Unit] = {
// Now create Kafka config, sink
// TODO: use the official KafkaIngestionStream stuff to parse the file. This is just faster for now.
val producerCfg = KafkaProducerConfig.default.copy(
bootstrapServers = sourceConf.getString("sourceconfig.bootstrap.servers").split(',').toList
)
val topicName = sourceConf.getString("sourceconfig.filo-topic-name")
implicit val io = Scheduler.io("kafka-producer")
val sink = new KafkaContainerSink(producerCfg, topicName)
sink.writeTask(containerStream)
.runAsync
.map { _ => logger.info(s"Finished producing messages into topic $topicName") }
// TODO: restart stream in case of failure?
.recover { case NonFatal(e) => logger.error("Error occurred while producing messages to Kafka", e) }
}
} | velvia/FiloDB | gateway/src/main/scala/filodb/gateway/GatewayServer.scala | Scala | apache-2.0 | 13,620 |
/*
* Exercise 8: Implement `exists`
*
* We choose to emit all intermediate values, and not halt.
* See `existsResult` below for a trimmed version.
*/
def exists[I](f: I => Boolean): Process[I,Boolean] =
lift(f) |> any
/* Emits whether a `true` input has ever been received. */
def any: Process[Boolean,Boolean] =
loop(false)((b:Boolean,s) => (s || b, s || b))
/* A trimmed `exists`, containing just the final result. */
def existsResult[I](f: I => Boolean) =
exists(f) |> takeThrough(!_) |> dropWhile(!_) |> echo.orElse(emit(false))
/*
* Like `takeWhile`, but includes the first element that tests
* false.
*/
def takeThrough[I](f: I => Boolean): Process[I,I] =
takeWhile(f) ++ echo
/* Awaits then emits a single value, then halts. */
def echo[I]: Process[I,I] = await(i => emit(i))
| ud3sh/coursework | functional-programming-in-scala-textbook/answerkey/streamingio/08.answer.scala | Scala | unlicense | 803 |
/*
* Copyright 2016 Coral realtime streaming analytics (http://coral-streaming.github.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.coral.api.security
import java.util.UUID
import akka.testkit.{TestActorRef, TestKit}
import com.typesafe.config.ConfigFactory
import io.coral.TestHelper
import io.coral.api.CoralConfig
import io.coral.api.security.Authenticator.{GetUserTable, CleanUpRules, GetAuthorizationTable, Invalidate}
import spray.http.{BasicHttpCredentials, StatusCodes}
import com.github.t3hnar.bcrypt._
import io.coral.TestHelper._
import scala.concurrent.Await
import akka.pattern.ask
class CoralAuthenticatorSpec
extends AuthenticatorSpec("coral") {
override def createActorSystem = {
val c = new CoralConfig(ConfigFactory.parseString(
s"""coral.authentication.mode = "coral"
|akka.actor.provider = "akka.actor.LocalActorRefProvider"
|coral.cluster.enable = false
""".stripMargin)
.withFallback(ConfigFactory.load()))
initiateWithConfig(c)
}
override def afterAll() {
TestKit.shutdownActorSystem(system)
}
"A CoralAuthenticator actor" when {
"Allow multiple sub-paths with a wildcard" in {
checkResponseWithNonOwner(List(
nonOwnerPermission("GET", "/api/runtimes/ab12cd-runtime1/*", allowed = true)),
"/api/runtimes/ab12cd-runtime1/actors",
StatusCodes.OK)
Get("/api/runtimes/runtime1/actors/generator1").withHeaders(AcceptHeader) ~>
addCredentials(BasicHttpCredentials(user2, pass2)) ~> sealRoute(route) ~> check {
// The actual actor does not exist but that is OK as long as it is not forbidden
assert(status != StatusCodes.Forbidden)
}
}
"A non-runtime owner cannot find a runtime created by another user by default" in {
// We start with 3 runtimes already defined but
// clear them out here since we want to start over
deleteAllRuntimes()
// Create runtime runtime1 by user ab12cd
// Access it through /api/runtimes/ab12cd-runtime1/actors with user ab12cd => OK
// Access it through /api/runtimes/runtime1/actors with user ab12cd => OK
// Access it through /api/runtimes/runtime1/actors with user cd34ef => Not found
// Grant access to /api/runtimes/runtime1/actors with user cd34ef
// Access it through /api/runtimes/ab12cd-runtime1/actors with user cd34ef => OK
runtime1NotDefined()
val jsonDef = TestHelper.json(userUUID1, "someruntime")
val uuid1 = postRuntime(jsonDef, StatusCodes.Created)
waitAWhile()
assert(uuid1.isDefined)
runtimeDefined("someruntime", jsonDef, uuid1.get)
runtimeDefined("ab12cd-someruntime", jsonDef, uuid1.get)
runtimeDefined("someruntime", jsonDef, uuid1.get)
runtimeNotDefined("someruntime", user2, pass2)
val permissionHandler = system.actorSelection("/user/root/authenticator/permissionHandler")
TestHelper.insertPermission(permissionHandler, UUID.randomUUID(), userUUID2, uuid1.get,
"GET", "/api/runtimes/ab12cd-someruntime", allowed = true)
authenticator ! Invalidate()
waitAWhile()
runtimeNotDefined("someruntime", user2, pass2)
runtimeDefined("ab12cd-someruntime", jsonDef, user2, pass2, uuid1.get)
}
"Return a filled user table" in {
val actual = Await.result(authenticator.ask(GetUserTable()), timeout.duration)
val expected = Map[String, User](
user1 -> User(
userUUID1,
"Test user 1",
Some("Department of business"),
"user1@ing.nl",
user1,
Some("+3162543556"),
pass1.bcrypt(salt),
timestamp,
Some(timestamp)
), user2 -> User(
userUUID2,
"Test user 2",
Some("Department of business"),
"user2@ing.nl",
user2,
Some("+3162543556"),
pass2.bcrypt(salt),
timestamp,
Some(timestamp)
), user3 -> User(
userUUID3,
"Test user 3",
Some("Department of business"),
"user3@ing.nl",
user3,
Some("+3162543556"),
pass3.bcrypt(salt),
timestamp,
Some(timestamp)
)
)
assert(actual == expected)
}
"Return a filled authorization list" in {
TestHelper.clearAllTables()
val id1 = UUID.randomUUID()
val id2 = UUID.randomUUID()
val id3 = UUID.randomUUID()
insertPermission(permissionHandler, id1, userUUID1,
runtimeUUID1, "*", "/api/runtimes/runtime1/*", allowed = true)
insertPermission(permissionHandler, id2, userUUID2,
runtimeUUID2, "POST", "/api/runtimes/runtime1/actors/actor1/shunt", allowed = false)
insertPermission(permissionHandler, id3, userUUID3,
runtimeUUID3, "DELETE", "/api/runtimes/runtime1/actors/1", allowed = false)
authenticator ! Invalidate()
val actual = Await.result(authenticator.ask(GetAuthorizationTable()), timeout.duration)
.asInstanceOf[List[Permission]].sortBy(_.id)
val expected = List(
Permission(id1, userUUID1, runtimeUUID1, "*",
"/api/runtimes/runtime1/*", allowed = true),
Permission(id2, userUUID2, runtimeUUID2, "POST",
"/api/runtimes/runtime1/actors/actor1/shunt", allowed = false),
Permission(id3, userUUID3, runtimeUUID3, "DELETE",
"/api/runtimes/runtime1/actors/1", allowed = false)).sortBy(_.id)
assert(actual == expected)
}
"Filter the list with duplicate values" in {
val list: List[Permission] = List(
Permission(permissionUUID1, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true),
Permission(UUID.randomUUID(), userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true),
Permission(UUID.randomUUID(), userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true),
Permission(UUID.randomUUID(), userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true),
Permission(permissionUUID2, userUUID2, runtimeUUID2,
"GET", "/api/runtimes/runtime2/actors", allowed = true))
val actual = Await.result(authenticator.ask(CleanUpRules(list)), timeout.duration)
val expected: List[Permission] = List(
Permission(permissionUUID1, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true),
Permission(permissionUUID2, userUUID2, runtimeUUID2,
"GET", "/api/runtimes/runtime2/actors", allowed = true))
assert(actual == expected)
}
"Filter the list with allowed = true and allowed = false for the same rule" in {
var list: List[Permission] = List(
Permission(permissionUUID1, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true),
Permission(permissionUUID2, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = false))
var actual = Await.result(authenticator.ask(CleanUpRules(list)), timeout.duration)
var expected: List[Permission] = List(
Permission(permissionUUID2, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = false))
assert(actual == expected)
list = List(
Permission(permissionUUID1, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = false),
Permission(permissionUUID2, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true))
actual = Await.result(authenticator.ask(CleanUpRules(list)), timeout.duration)
expected = List(
Permission(permissionUUID1, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = false))
assert(actual == expected)
val uuid3 = UUID.randomUUID()
list = List(
Permission(permissionUUID1, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = false),
Permission(permissionUUID2, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true),
Permission(permissionUUID3, userUUID2, runtimeUUID1,
"POST", "/api/runtimes/1", allowed = true))
actual = Await.result(authenticator.ask(CleanUpRules(list)), timeout.duration)
expected = List(
Permission(permissionUUID1, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = false),
Permission(permissionUUID3, userUUID2, runtimeUUID1,
"POST", "/api/runtimes/1", allowed = true))
assert(actual == expected)
}
"Filter the list with duplicate values and allowed = true and allowed = false" in {
val list: List[Permission] = List(
Permission(permissionUUID1, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true),
Permission(permissionUUID2, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = true),
Permission(permissionUUID3, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = false))
val actual = Await.result(authenticator.ask(CleanUpRules(list)), timeout.duration)
val expected: List[Permission] = List(
Permission(permissionUUID3, userUUID1, runtimeUUID1,
"GET", "/api/runtimes/runtime1/actors", allowed = false))
assert(actual == expected)
}
}
} | coral-streaming/coral | src/test/scala/io/coral/api/security/CoralAuthenticatorSpec.scala | Scala | apache-2.0 | 9,423 |
package chandu0101.scalajs.react.components
package util
import scala.scalajs.js.Date
object DateTime {
def addDays( d: Date, days: Int) = {
val newDate = clone(d)
newDate.setDate(d.getDate() + days)
newDate
}
def clone( d: Date) = new Date(d.getTime())
def addMonths( d: Date, months: Int) = {
val newDate = clone(d)
newDate.setMonth(d.getMonth() + months)
newDate
}
def getFirstDayOfMonth( d: Date) = new Date(d.getFullYear(),d.getMonth(),1)
def getDaysInMonth( d: Date) = {
val resultDate = getFirstDayOfMonth(d)
resultDate.setMonth(resultDate.getMonth() + 1)
resultDate.setDate(resultDate.getDate() - 1)
resultDate.getDate()
}
def getFullMonth(d: Date) = {
d.getMonth() match {
case 0 => "January"
case 1 => "February"
case 2 => "March"
case 3 => "April"
case 4 => "May"
case 5 => "June"
case 6 => "July"
case 7 => "August"
case 8 => "September"
case 9 => "October"
case 10 => "November"
case 11 => "December"
case _ => "Unknown"
}
}
def getShortMonth( d: Date) = {
d.getMonth() match {
case 0 => "Jan"
case 1 => "Feb"
case 2 => "Mar"
case 3 => "Apr"
case 4 => "May"
case 5 => "Jun"
case 6 => "Jul"
case 7 => "Aug"
case 8 => "Sep"
case 9 => "Oct"
case 10 => "Nov"
case 11 => "Dec"
case _ => "Unknown"
}
}
def getDayOfWeek( d: Date) = {
d.getDay() match {
case 0 => "Sunday";
case 1 => "Monday";
case 2 => "Tuesday";
case 3 => "Wednesday";
case 4 => "Thursday";
case 5 => "Friday";
case 6 => "Saturday";
case _ => "Unknown"
}
}
def getWeekArray( d: Date) = {
val daysInMonth = getDaysInMonth(d)
val dayArray = (1 to daysInMonth).toList.map(i => new Date(d.getFullYear(), d.getMonth(), i)).toList
getWeeks(dayArray)
}
def getWeeks( days: List[Date]) = {
var i = 0
var weeks: List[List[Date]] = List()
while(i < days.length) {
val firstDayOfWeek = days(i).getDay()
val daysInWeek = 7 - firstDayOfWeek
val emptyDays = 7 - daysInWeek
weeks = weeks.:+(List.fill(emptyDays)(null).++(days.slice(i,i+daysInWeek)))
i = i+daysInWeek
}
weeks
}
def format(d: Date) = {
s"${d.getMonth()+1} / ${d.getDate()} / ${d.getFullYear()}"
}
def isEqualDate( d1: Date ,d2: Date) = {
if(d1 != null && d2 != null) {
(d1.getFullYear() == d2.getFullYear()) &&
(d1.getMonth() == d2.getMonth()) &&
(d1.getDate() == d2.getDate())
} else false
}
}
| tpdi/scalajs-react-components | core/src/main/scala/chandu0101/scalajs/react/components/util/DateTime.scala | Scala | apache-2.0 | 2,706 |
package scala.collection.mutable
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
@Threads(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
class LinkedHashMapBenchmark {
@Param(Array("10", "100", "1000"))
var size: Int = _
@Param(Array("true"))
var useMissingValues = true
@Param(Array("false"))
var stringsOnly = false
var existingKeys: Array[Any] = _
var missingKeys: Array[Any] = _
@Setup(Level.Trial) def initKeys(): Unit = {
existingKeys = (0 to size).map(i => (i % 4) match {
case _ if stringsOnly => i.toString
case 0 => i.toString
case 1 => i.toChar
case 2 => i.toDouble
case 3 => i.toInt
}).toArray
missingKeys = (size to 2 * size).toArray.map(_.toString)
}
var map: collection.mutable.LinkedHashMap[Any, Any] = null
@Setup(Level.Trial) def initialize = {
map = collection.mutable.LinkedHashMap(existingKeys.map(x => (x, x)) : _*)
}
@Benchmark def updateWith(bh: Blackhole): Unit = {
var i = 0;
while (i < size) {
val res = i % 4 match {
case 0 => map.updateWith(existingKeys(i % existingKeys.length))(_ => None)
case 1 => map.updateWith(existingKeys(i % existingKeys.length))(_ => Some(existingKeys(i % existingKeys.length)))
case 2 => map.updateWith(missingKeys(i % missingKeys.length))(_ => None)
case 3 => map.updateWith(missingKeys(i % missingKeys.length))(_ => Some(existingKeys(i % existingKeys.length)))
}
bh.consume(res)
i += 1
}
}
}
| scala/scala | test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark.scala | Scala | apache-2.0 | 1,682 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.registrationprogress
import play.api.mvc.Call
case class Section(
name: String,
status: Status,
hasChanged: Boolean,
call: Call
)
| hmrc/amls-frontend | app/models/registrationprogress/Section.scala | Scala | apache-2.0 | 850 |
package pl.pholda.malpompaaligxilo.form
sealed abstract class ValidationResult {
def isSuccess: Boolean
}
case object SuccessValidation extends ValidationResult {
override def isSuccess: Boolean = true
}
case class FailureValidation(errors: Map[Field[_], Seq[FormError]]) extends ValidationResult {
override def isSuccess: Boolean = false
}
| pholda/MalpompaAligxilo | core/shared/src/main/scala/pl/pholda/malpompaaligxilo/form/ValidationResult.scala | Scala | gpl-3.0 | 351 |
package streams
import common._
/**
* This trait represents the layout and building blocks of the game
*
* @TODO: SHOULD RENAME `x` and `y` in class Pos to `row` and `col`. It's
* confusing to have `x` being the vertical axis.
*/
trait GameDef {
/**
* The case class `Pos` encodes positions in the terrain.
*
* IMPORTANT NOTE
* - The `x` coordinate denotes the position on the vertical axis
* - The `y` coordinate is used for the horizontal axis
* - The coordinates increase when moving down and right
*
* Illustration:
*
* 0 1 2 3 <- y axis
* 0 o o o o
* 1 o o o o
* 2 o # o o # is at position Pos(2, 1)
* 3 o o o o
*
* ^
* |
*
* x axis
*/
case class Pos(x: Int, y: Int) {
/** The position obtained by changing the `x` coordinate by `d` */
def dx(d: Int) = copy(x = x + d)
/** The position obtained by changing the `y` coordinate by `d` */
def dy(d: Int) = copy(y = y + d)
}
/**
* The position where the block is located initially.
*
* This value is left abstract, it will be defined in concrete
* instances of the game.
*/
val startPos: Pos
/**
* The target position where the block has to go.
* This value is left abstract.
*/
val goal: Pos
/**
* The terrain is represented as a function from positions to
* booleans. The function returns `true` for every position that
* is inside the terrain.
*
* As explained in the documentation of class `Pos`, the `x` axis
* is the vertical one and increases from top to bottom.
*/
type Terrain = Pos => Boolean
/**
* The terrain of this game. This value is left abstract.
*/
val terrain: Terrain
/**
* In Bloxorz, we can move left, right, Up or down.
* These moves are encoded as case objects.
*/
sealed abstract class Move
case object Left extends Move
case object Right extends Move
case object Up extends Move
case object Down extends Move
/**
* This function returns the block at the start position of
* the game.
*/
def startBlock: Block = ???
/**
* A block is represented by the position of the two cubes that
* it consists of. We make sure that `b1` is lexicographically
* smaller than `b2`.
*/
case class Block(b1: Pos, b2: Pos) {
// checks the requirement mentioned above
require(b1.x <= b2.x && b1.y <= b2.y, "Invalid block position: b1=" + b1 + ", b2=" + b2)
/**
* Returns a block where the `x` coordinates of `b1` and `b2` are
* changed by `d1` and `d2`, respectively.
*/
def dx(d1: Int, d2: Int) = Block(b1.dx(d1), b2.dx(d2))
/**
* Returns a block where the `y` coordinates of `b1` and `b2` are
* changed by `d1` and `d2`, respectively.
*/
def dy(d1: Int, d2: Int) = Block(b1.dy(d1), b2.dy(d2))
/** The block obtained by moving left */
def left = if (isStanding) dy(-2, -1)
else if (b1.x == b2.x) dy(-1, -2)
else dy(-1, -1)
/** The block obtained by moving right */
def right = if (isStanding) dy(1, 2)
else if (b1.x == b2.x) dy(2, 1)
else dy(1, 1)
/** The block obtained by moving up */
def up = if (isStanding) dx(-2, -1)
else if (b1.x == b2.x) dx(-1, -1)
else dx(-1, -2)
/** The block obtained by moving down */
def down = if (isStanding) dx(1, 2)
else if (b1.x == b2.x) dx(1, 1)
else dx(2, 1)
/**
* Returns the list of blocks that can be obtained by moving
* the current block, together with the corresponding move.
*/
def neighbors: List[(Block, Move)] = ???
/**
* Returns the list of positions reachable from the current block
* which are inside the terrain.
*/
def legalNeighbors: List[(Block, Move)] = ???
/**
* Returns `true` if the block is standing.
*/
def isStanding: Boolean = ???
/**
* Returns `true` if the block is entirely inside the terrain.
*/
def isLegal: Boolean = ???
}
}
| TraitRDS/progfun | assignments/streams/src/main/scala/streams/GameDef.scala | Scala | mit | 4,215 |
package petrovich.data
sealed abstract class Case(val index: Int)
object Case {
case object Nominative extends Case(-1)
case object Genitive extends Case(0)
case object Dative extends Case(1)
case object Accusative extends Case(2)
case object Instrumental extends Case(3)
case object Prepositional extends Case(4)
}
| fomkin/petrovich-scala | petrovich-scala/src/main/scala/petrovich/data/Case.scala | Scala | apache-2.0 | 330 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.util.concurrent.Semaphore
import scala.collection.mutable
import scala.collection.JavaConverters._
import org.scalatest.Matchers
import org.apache.spark._
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.util.{ResetSystemProperties, RpcUtils}
class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Matchers
with ResetSystemProperties {
/** Length of time to wait while draining listener events. */
val WAIT_TIMEOUT_MILLIS = 10000
val jobCompletionTime = 1421191296660L
val queuingStrategy = List("true", "false")
queuingStrategy.foreach(qS => {
def getConf: SparkConf = {
val conf = new SparkConf()
conf.set("spark.scheduler.listenerbus.eventqueue.drop", qS)
conf
}
def name(testName: String): String = {
if (qS == "true") {
s"dropping event bus: $testName"
} else {
s"Waiting event bus: $testName"
}
}
test(name("don't call sc.stop in listener")) {
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val listener = new SparkContextStoppingListener(sc)
val bus = new LiveListenerBus(sc)
bus.addListener(listener)
// Starting listener bus should flush all buffered events
bus.start()
bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded))
bus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
bus.stop()
assert(listener.sparkExSeen)
}
test(name("basic creation and shutdown of LiveListenerBus")) {
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val counter = new BasicJobCounter
val bus = new LiveListenerBus(sc)
bus.addListener(counter)
// Listener bus hasn't started yet, so posting events should not increment counter
(1 to 5).foreach { _ => bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded)) }
assert(counter.count === 0)
// Starting listener bus should flush all buffered events
bus.start()
bus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(counter.count === 5)
// After listener bus has stopped, posting events should not increment counter
bus.stop()
(1 to 5).foreach { _ => bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded)) }
assert(counter.count === 5)
// Listener bus must not be started twice
intercept[IllegalStateException] {
val bus = new LiveListenerBus(sc)
bus.start()
bus.start()
}
// ... or stopped before starting
intercept[IllegalStateException] {
val bus = new LiveListenerBus(sc)
bus.stop()
}
}
test(name("bus.stop() waits for the event queue to completely drain")) {
@volatile var drained = false
// When Listener has started
val listenerStarted = new Semaphore(0)
// Tells the listener to stop blocking
val listenerWait = new Semaphore(0)
// When stopper has started
val stopperStarted = new Semaphore(0)
// When stopper has returned
val stopperReturned = new Semaphore(0)
class BlockingListener extends SparkListener {
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = {
listenerStarted.release()
listenerWait.acquire()
drained = true
}
}
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val bus = new LiveListenerBus(sc)
val blockingListener = new BlockingListener
bus.addListener(blockingListener)
bus.start()
bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded))
listenerStarted.acquire()
// Listener should be blocked after start
assert(!drained)
new Thread("ListenerBusStopper") {
override def run() {
stopperStarted.release()
// stop() will block until notify() is called below
bus.stop()
stopperReturned.release()
}
}.start()
stopperStarted.acquire()
// Listener should remain blocked after stopper started
assert(!drained)
// unblock Listener to let queue drain
listenerWait.release()
stopperReturned.acquire()
assert(drained)
}
test(name("basic creation of StageInfo")) {
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val listener = new SaveStageAndTaskInfo
sc.addSparkListener(listener)
val rdd1 = sc.parallelize(1 to 100, 4)
val rdd2 = rdd1.map(_.toString)
rdd2.setName("Target RDD")
rdd2.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {1}
val (stageInfo, taskInfoMetrics) = listener.stageInfos.head
stageInfo.rddInfos.size should be {2}
stageInfo.rddInfos.forall(_.numPartitions == 4) should be {true}
stageInfo.rddInfos.exists(_.name == "Target RDD") should be {true}
stageInfo.numTasks should be {4}
stageInfo.submissionTime should be ('defined)
stageInfo.completionTime should be ('defined)
taskInfoMetrics.length should be {4}
}
test(name("basic creation of StageInfo with shuffle")) {
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val listener = new SaveStageAndTaskInfo
sc.addSparkListener(listener)
val rdd1 = sc.parallelize(1 to 100, 4)
val rdd2 = rdd1.filter(_ % 2 == 0).map(i => (i, i))
val rdd3 = rdd2.reduceByKey(_ + _)
rdd1.setName("Un")
rdd2.setName("Deux")
rdd3.setName("Trois")
rdd1.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {1}
val stageInfo1 = listener.stageInfos.keys.find(_.stageId == 0).get
stageInfo1.rddInfos.size should be {1} // ParallelCollectionRDD
stageInfo1.rddInfos.forall(_.numPartitions == 4) should be {true}
stageInfo1.rddInfos.exists(_.name == "Un") should be {true}
listener.stageInfos.clear()
rdd2.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {1}
val stageInfo2 = listener.stageInfos.keys.find(_.stageId == 1).get
stageInfo2.rddInfos.size should be {3}
stageInfo2.rddInfos.forall(_.numPartitions == 4) should be {true}
stageInfo2.rddInfos.exists(_.name == "Deux") should be {true}
listener.stageInfos.clear()
rdd3.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {2} // Shuffle map stage + result stage
val stageInfo3 = listener.stageInfos.keys.find(_.stageId == 3).get
stageInfo3.rddInfos.size should be {1} // ShuffledRDD
stageInfo3.rddInfos.forall(_.numPartitions == 4) should be {true}
stageInfo3.rddInfos.exists(_.name == "Trois") should be {true}
}
test(name("StageInfo with fewer tasks than partitions")) {
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val listener = new SaveStageAndTaskInfo
sc.addSparkListener(listener)
val rdd1 = sc.parallelize(1 to 100, 4)
val rdd2 = rdd1.map(_.toString)
sc.runJob(rdd2, (items: Iterator[String]) => items.size, Seq(0, 1))
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {1}
val (stageInfo, _) = listener.stageInfos.head
stageInfo.numTasks should be {2}
stageInfo.rddInfos.size should be {2}
stageInfo.rddInfos.forall(_.numPartitions == 4) should be {true}
}
test(name("local metrics")) {
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val listener = new SaveStageAndTaskInfo
sc.addSparkListener(listener)
sc.addSparkListener(new StatsReportListener)
// just to make sure some of the tasks take a noticeable amount of time
val w = { i: Int =>
if (i == 0) {
Thread.sleep(100)
}
i
}
val numSlices = 16
val d = sc.parallelize(0 to 10000, numSlices).map(w)
d.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be (1)
val d2 = d.map { i => w(i) -> i * 2 }.setName("shuffle input 1")
val d3 = d.map { i => w(i) -> (0 to (i % 5)) }.setName("shuffle input 2")
val d4 = d2.cogroup(d3, numSlices).map { case (k, (v1, v2)) =>
w(k) -> (v1.size, v2.size)
}
d4.setName("A Cogroup")
d4.collectAsMap()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be (4)
listener.stageInfos.foreach { case (stageInfo, taskInfoMetrics) =>
/**
* Small test, so some tasks might take less than 1 millisecond,
* but average should be greater than 0 ms.
*/
checkNonZeroAvg(
taskInfoMetrics.map(_._2.executorRunTime),
stageInfo + " executorRunTime")
checkNonZeroAvg(
taskInfoMetrics.map(_._2.executorDeserializeTime),
stageInfo + " executorDeserializeTime")
/* Test is disabled (SEE SPARK-2208)
if (stageInfo.rddInfos.exists(_.name == d4.name)) {
checkNonZeroAvg(
taskInfoMetrics.map(_._2.shuffleReadMetrics.get.fetchWaitTime),
stageInfo + " fetchWaitTime")
}
*/
taskInfoMetrics.foreach { case (taskInfo, taskMetrics) =>
taskMetrics.resultSize should be > (0L)
if (stageInfo.rddInfos.exists(info => info.name == d2.name || info.name == d3.name)) {
assert(taskMetrics.shuffleWriteMetrics.bytesWritten > 0L)
}
if (stageInfo.rddInfos.exists(_.name == d4.name)) {
assert(taskMetrics.shuffleReadMetrics.totalBlocksFetched == 2 * numSlices)
assert(taskMetrics.shuffleReadMetrics.localBlocksFetched == 2 * numSlices)
assert(taskMetrics.shuffleReadMetrics.remoteBlocksFetched == 0)
assert(taskMetrics.shuffleReadMetrics.remoteBytesRead == 0L)
}
}
}
}
test(name("onTaskGettingResult() called when result fetched remotely")) {
val conf = getConf.set("spark.rpc.message.maxSize", "1")
sc = new SparkContext("local", "SparkListenerSuite", conf)
val listener = new SaveTaskEvents
sc.addSparkListener(listener)
// Make a task whose result is larger than the RPC message size
val maxRpcMessageSize = RpcUtils.maxMessageSizeBytes(conf)
assert(maxRpcMessageSize === 1024 * 1024)
val result = sc.parallelize(Seq(1), 1)
.map { x => 1.to(maxRpcMessageSize).toArray }
.reduce { case (x, y) => x }
assert(result === 1.to(maxRpcMessageSize).toArray)
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
val TASK_INDEX = 0
assert(listener.startedTasks.contains(TASK_INDEX))
assert(listener.startedGettingResultTasks.contains(TASK_INDEX))
assert(listener.endedTasks.contains(TASK_INDEX))
}
test(name("onTaskGettingResult() not called when result sent directly")) {
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val listener = new SaveTaskEvents
sc.addSparkListener(listener)
// Make a task whose result is larger than the RPC message size
val result = sc.parallelize(Seq(1), 1).map(2 * _).reduce { case (x, y) => x }
assert(result === 2)
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
val TASK_INDEX = 0
assert(listener.startedTasks.contains(TASK_INDEX))
assert(listener.startedGettingResultTasks.isEmpty)
assert(listener.endedTasks.contains(TASK_INDEX))
}
test(name("onTaskEnd() should be called for all started tasks, even after job has been killed"))
{
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val WAIT_TIMEOUT_MILLIS = 10000
val listener = new SaveTaskEvents
sc.addSparkListener(listener)
val numTasks = 10
val f = sc.parallelize(1 to 10000, numTasks).map { i => Thread.sleep(10); i }.countAsync()
// Wait until one task has started (because we want to make sure that any tasks that are
// started have corresponding end events sent to the listener).
var finishTime = System.currentTimeMillis + WAIT_TIMEOUT_MILLIS
listener.synchronized {
var remainingWait = finishTime - System.currentTimeMillis
while (listener.startedTasks.isEmpty && remainingWait > 0) {
listener.wait(remainingWait)
remainingWait = finishTime - System.currentTimeMillis
}
assert(listener.startedTasks.nonEmpty)
}
f.cancel()
// Ensure that onTaskEnd is called for all started tasks.
finishTime = System.currentTimeMillis + WAIT_TIMEOUT_MILLIS
listener.synchronized {
var remainingWait = finishTime - System.currentTimeMillis
while (listener.endedTasks.size < listener.startedTasks.size && remainingWait > 0) {
listener.wait(finishTime - System.currentTimeMillis)
remainingWait = finishTime - System.currentTimeMillis
}
assert(listener.endedTasks.size === listener.startedTasks.size)
}
}
test(name("SparkListener moves on if a listener throws an exception")) {
val badListener = new BadListener
val jobCounter1 = new BasicJobCounter
val jobCounter2 = new BasicJobCounter
sc = new SparkContext("local", "SparkListenerSuite", getConf)
val bus = new LiveListenerBus(sc)
// Propagate events to bad listener first
bus.addListener(badListener)
bus.addListener(jobCounter1)
bus.addListener(jobCounter2)
bus.start()
// Post events to all listeners, and wait until the queue is drained
(1 to 5).foreach { _ => bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded)) }
bus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
// The exception should be caught, and the event should be propagated to other listeners
assert(bus.listenerThreadIsAlive)
assert(jobCounter1.count === 5)
assert(jobCounter2.count === 5)
}
test(name("registering listeners via spark.extraListeners")) {
val listeners = Seq(
classOf[ListenerThatAcceptsSparkConf],
classOf[FirehoseListenerThatAcceptsSparkConf],
classOf[BasicJobCounter])
val conf = getConf.setMaster("local").setAppName("test")
.set("spark.extraListeners", listeners.map(_.getName).mkString(","))
sc = new SparkContext(conf)
sc.listenerBus.listeners.count(_.isInstanceOf[BasicJobCounter]) should be (1)
sc.listenerBus.listeners.count(_.isInstanceOf[ListenerThatAcceptsSparkConf]) should be (1)
sc.listenerBus.listeners
.count(_.isInstanceOf[FirehoseListenerThatAcceptsSparkConf]) should be (1)
}
})
/**
* Assert that the given list of numbers has an average that is greater than zero.
*/
private def checkNonZeroAvg(m: Traversable[Long], msg: String) {
assert(m.sum / m.size.toDouble > 0.0, msg)
}
/**
* A simple listener that saves all task infos and task metrics.
*/
private class SaveStageAndTaskInfo extends SparkListener {
val stageInfos = mutable.Map[StageInfo, Seq[(TaskInfo, TaskMetrics)]]()
var taskInfoMetrics = mutable.Buffer[(TaskInfo, TaskMetrics)]()
override def onTaskEnd(task: SparkListenerTaskEnd) {
val info = task.taskInfo
val metrics = task.taskMetrics
if (info != null && metrics != null) {
taskInfoMetrics += ((info, metrics))
}
}
override def onStageCompleted(stage: SparkListenerStageCompleted) {
stageInfos(stage.stageInfo) = taskInfoMetrics
taskInfoMetrics = mutable.Buffer.empty
}
}
/**
* A simple listener that saves the task indices for all task events.
*/
private class SaveTaskEvents extends SparkListener {
val startedTasks = new mutable.HashSet[Int]()
val startedGettingResultTasks = new mutable.HashSet[Int]()
val endedTasks = new mutable.HashSet[Int]()
override def onTaskStart(taskStart: SparkListenerTaskStart): Unit = synchronized {
startedTasks += taskStart.taskInfo.index
notify()
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = synchronized {
endedTasks += taskEnd.taskInfo.index
notify()
}
override def onTaskGettingResult(taskGettingResult: SparkListenerTaskGettingResult) {
startedGettingResultTasks += taskGettingResult.taskInfo.index
}
}
/**
* A simple listener that throws an exception on job end.
*/
private class BadListener extends SparkListener {
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = { throw new Exception }
}
}
// These classes can't be declared inside of the SparkListenerSuite class because we don't want
// their constructors to contain references to SparkListenerSuite:
/**
* A simple listener that counts the number of jobs observed.
*/
private class BasicJobCounter extends SparkListener {
var count = 0
override def onJobEnd(job: SparkListenerJobEnd): Unit = count += 1
}
/**
* A simple listener that tries to stop SparkContext.
*/
private class SparkContextStoppingListener(val sc: SparkContext) extends SparkListener {
@volatile var sparkExSeen = false
override def onJobEnd(job: SparkListenerJobEnd): Unit = {
try {
sc.stop()
} catch {
case se: SparkException =>
sparkExSeen = true
}
}
}
private class ListenerThatAcceptsSparkConf(conf: SparkConf) extends SparkListener {
var count = 0
override def onJobEnd(job: SparkListenerJobEnd): Unit = count += 1
}
private class FirehoseListenerThatAcceptsSparkConf(conf: SparkConf) extends SparkFirehoseListener {
var count = 0
override def onEvent(event: SparkListenerEvent): Unit = event match {
case job: SparkListenerJobEnd => count += 1
case _ =>
}
}
| bOOm-X/spark | core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala | Scala | apache-2.0 | 18,719 |
package org.jetbrains.plugins.scala.lang.psi.uast.expressions
import com.intellij.psi.PsiElement
import org.jetbrains.annotations.Nullable
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.uast.baseAdapters.{
ScUAnnotated,
ScUExpression
}
import org.jetbrains.plugins.scala.lang.psi.uast.converter.Scala2UastConverter._
import org.jetbrains.plugins.scala.lang.psi.uast.internals.LazyUElement
import org.jetbrains.uast.{
UExpression,
UNamedExpression,
UNamedExpressionAdapter
}
/**
* [[ScExpression]] adapter for the [[UNamedExpression]].
* Represents expression which has name but it is omitted at the use site
* (e.g. plain parameters without specified name).
*
* Example: ----------V
* {{{@MyAnnotation(value)}}}
*
* @param scExpression Scala PSI element representing expression
* which have name omitted (e.g. argument)
* @see [[ScUNamedExpression]]
*/
final class ScUUnnamedExpression(
override protected val scExpression: ScExpression,
override protected val parent: LazyUElement
) extends UNamedExpressionAdapter
with ScUExpression
with ScUAnnotated {
override def getExpression: UExpression =
scExpression.convertToUExpressionOrEmpty(this)
@Nullable
override def getSourcePsi: PsiElement = null
@Nullable
override def getName: String = null
override def asLogString: String = "UUnnamedExpression"
}
| JetBrains/intellij-scala | scala/uast/src/org/jetbrains/plugins/scala/lang/psi/uast/expressions/ScUUnnamedExpression.scala | Scala | apache-2.0 | 1,452 |
package com.twitter.diffy.lifter
import com.twitter.concurrent.NamedPoolThreadFactory
import com.twitter.util.{ExecutorServiceFuturePool, Future, FuturePool}
import java.util.concurrent.{ArrayBlockingQueue, ThreadPoolExecutor, TimeUnit}
case class Message(endpoint: Option[String], result: FieldMap[Any])
trait MapLifter {
def apply(input: Array[Byte]): Future[Message]
}
object MapLifterPool {
val QueueSizeDefault = 5
def apply(mapLifterFactory: => MapLifter) = {
val executorService =
new ThreadPoolExecutor(
3, // core pool size
10, // max pool size
500, // keep alive time
TimeUnit.MILLISECONDS,
new ArrayBlockingQueue[Runnable](10), // work queue
new NamedPoolThreadFactory("maplifter", makeDaemons = true),
new ThreadPoolExecutor.AbortPolicy()
)
executorService.prestartCoreThread()
new MapLifterPool(mapLifterFactory, new ExecutorServiceFuturePool(executorService))
}
}
class MapLifterPool(underlying: MapLifter, futurePool: FuturePool) extends MapLifter {
override def apply(input: Array[Byte]): Future[Message] =
(futurePool { underlying(input) }).flatten
}
| wzrdtales/diffy | src/main/scala/com/twitter/diffy/lifter/MapLifter.scala | Scala | apache-2.0 | 1,172 |
package zzb.srvbox.task
import zzb.shell.Task
import zzb.srvbox.SrvManageProtocol._
import akka.pattern.ask
import scala.util.{Failure, Success}
import akka.util.Timeout
import scala.concurrent.duration._
import zzb.shell.remote.ShellProtocol.KickAll
/**
* Created with IntelliJ IDEA.
* User: Simon Xiao
* Date: 13-10-21
* Time: 上午9:07
* Copyright baoxian.com 2012~2020
*/
class List extends Task {
implicit val timeout = Timeout(5.seconds)
val boxActor = system.actorSelection("/user/boxActor")
import system.dispatcher
boxActor.ask(RequestList).onComplete {
case Success(Services(services)) =>
Console.withOut(out) {
for (s <- services) {
val status = if (s.running) "Running" else "Stoped"
println(s" ${s.name} \\t\\t ------\\t [$status]")
}
}
case Success(v) => ()
case Failure(ex) => Console.withOut(out) {
println(ex.getMessage)
}
}
}
class Stop extends Task {
implicit val timeout = Timeout(5.seconds)
val boxActor = system.actorSelection("/user/boxActor")
import system.dispatcher
override def usage = "usage: srv-stop serviceName"
override def checkArgs = args.size == 1
val serviceName = args.head
boxActor.ask(RequestStop(serviceName)).onComplete {
case Success(ServiceNotExist) => Console.withOut(out) {
println(s"not fouund service '$serviceName'")
}
case Success(ServiceStatus(name,running)) => Console.withOut(out) {
val status = if (running) "Running" else "Stoped"
println(s" $name \\t\\t ------\\t [$status]")
}
case Success(v) => ()
case Failure(ex) => Console.withOut(out) {
println(ex.getMessage)
}
}
}
class Start extends Task {
implicit val timeout = Timeout(5.seconds)
val boxActor = system.actorSelection("/user/boxActor")
override def usage = "usage: srv-start serviceName"
override def checkArgs = args.size == 1
val serviceName = args.head
import system.dispatcher
boxActor.ask(RequestStart(serviceName)).onComplete {
case Success(ServiceNotExist) => Console.withOut(out) {
println(s"not fouund service '$serviceName'")
}
case Success(ServiceStatus(name,running)) => Console.withOut(out) {
val status = if (running) "Running" else "Stoped"
println(s" $name \\t\\t ------\\t [$status]")
}
case Success(v) => ()
case Failure(ex) => Console.withOut(out) {
println(ex.getMessage)
}
}
}
class Halt extends Task {
implicit val timeout = Timeout(5.seconds)
val boxActor = system.actorSelection("/user/boxActor")
val sessionManager = system.actorSelection("/user/sessionManager")
import system.dispatcher
system.scheduler.scheduleOnce(1.seconds,
new Runnable { override def run() = sessionManager ! KickAll })
system.scheduler.scheduleOnce(5.seconds,
new Runnable { override def run() = boxActor ! zzb.srvbox.SrvManageProtocol.Halt })
}
| stepover/zzb | zzb-box/src/main/scala/zzb/srvbox/task/ServiceManage.scala | Scala | mit | 2,919 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.objects
import java.lang.reflect.{Method, Modifier}
import scala.collection.JavaConverters._
import scala.collection.mutable.Builder
import scala.reflect.ClassTag
import scala.util.Try
import org.apache.spark.{SparkConf, SparkEnv}
import org.apache.spark.serializer._
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, ScalaReflection}
import org.apache.spark.sql.catalyst.ScalaReflection.universe.TermName
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, ArrayData, GenericArrayData, MapData}
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
/**
* Common base class for [[StaticInvoke]], [[Invoke]], and [[NewInstance]].
*/
trait InvokeLike extends Expression with NonSQLExpression {
def arguments: Seq[Expression]
def propagateNull: Boolean
protected lazy val needNullCheck: Boolean = propagateNull && arguments.exists(_.nullable)
/**
* Prepares codes for arguments.
*
* - generate codes for argument.
* - use ctx.splitExpressions() to not exceed 64kb JVM limit while preparing arguments.
* - avoid some of nullability checking which are not needed because the expression is not
* nullable.
* - when needNullCheck == true, short circuit if we found one of arguments is null because
* preparing rest of arguments can be skipped in the case.
*
* @param ctx a [[CodegenContext]]
* @return (code to prepare arguments, argument string, result of argument null check)
*/
def prepareArguments(ctx: CodegenContext): (String, String, ExprValue) = {
val resultIsNull = if (needNullCheck) {
val resultIsNull = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "resultIsNull")
JavaCode.isNullGlobal(resultIsNull)
} else {
FalseLiteral
}
val argValues = arguments.map { e =>
val argValue = ctx.addMutableState(CodeGenerator.javaType(e.dataType), "argValue")
argValue
}
val argCodes = if (needNullCheck) {
val reset = s"$resultIsNull = false;"
val argCodes = arguments.zipWithIndex.map { case (e, i) =>
val expr = e.genCode(ctx)
val updateResultIsNull = if (e.nullable) {
s"$resultIsNull = ${expr.isNull};"
} else {
""
}
s"""
if (!$resultIsNull) {
${expr.code}
$updateResultIsNull
${argValues(i)} = ${expr.value};
}
"""
}
reset +: argCodes
} else {
arguments.zipWithIndex.map { case (e, i) =>
val expr = e.genCode(ctx)
s"""
${expr.code}
${argValues(i)} = ${expr.value};
"""
}
}
val argCode = ctx.splitExpressionsWithCurrentInputs(argCodes)
(argCode, argValues.mkString(", "), resultIsNull)
}
/**
* Evaluate each argument with a given row, invoke a method with a given object and arguments,
* and cast a return value if the return type can be mapped to a Java Boxed type
*
* @param obj the object for the method to be called. If null, perform s static method call
* @param method the method object to be called
* @param arguments the arguments used for the method call
* @param input the row used for evaluating arguments
* @param dataType the data type of the return object
* @return the return object of a method call
*/
def invoke(
obj: Any,
method: Method,
arguments: Seq[Expression],
input: InternalRow,
dataType: DataType): Any = {
val args = arguments.map(e => e.eval(input).asInstanceOf[Object])
if (needNullCheck && args.exists(_ == null)) {
// return null if one of arguments is null
null
} else {
val ret = method.invoke(obj, args: _*)
val boxedClass = ScalaReflection.typeBoxedJavaMapping.get(dataType)
if (boxedClass.isDefined) {
boxedClass.get.cast(ret)
} else {
ret
}
}
}
}
/**
* Common trait for [[DecodeUsingSerializer]] and [[EncodeUsingSerializer]]
*/
trait SerializerSupport {
/**
* If true, Kryo serialization is used, otherwise the Java one is used
*/
val kryo: Boolean
/**
* The serializer instance to be used for serialization/deserialization in interpreted execution
*/
lazy val serializerInstance: SerializerInstance = SerializerSupport.newSerializer(kryo)
/**
* Adds a immutable state to the generated class containing a reference to the serializer.
* @return a string containing the name of the variable referencing the serializer
*/
def addImmutableSerializerIfNeeded(ctx: CodegenContext): String = {
val (serializerInstance, serializerInstanceClass) = {
if (kryo) {
("kryoSerializer",
classOf[KryoSerializerInstance].getName)
} else {
("javaSerializer",
classOf[JavaSerializerInstance].getName)
}
}
val newSerializerMethod = s"${classOf[SerializerSupport].getName}$$.MODULE$$.newSerializer"
// Code to initialize the serializer
ctx.addImmutableStateIfNotExists(serializerInstanceClass, serializerInstance, v =>
s"""
|$v = ($serializerInstanceClass) $newSerializerMethod($kryo);
""".stripMargin)
serializerInstance
}
}
object SerializerSupport {
/**
* It creates a new `SerializerInstance` which is either a `KryoSerializerInstance` (is
* `useKryo` is set to `true`) or a `JavaSerializerInstance`.
*/
def newSerializer(useKryo: Boolean): SerializerInstance = {
// try conf from env, otherwise create a new one
val conf = Option(SparkEnv.get).map(_.conf).getOrElse(new SparkConf)
val s = if (useKryo) {
new KryoSerializer(conf)
} else {
new JavaSerializer(conf)
}
s.newInstance()
}
}
/**
* Invokes a static function, returning the result. By default, any of the arguments being null
* will result in returning null instead of calling the function.
*
* @param staticObject The target of the static call. This can either be the object itself
* (methods defined on scala objects), or the class object
* (static methods defined in java).
* @param dataType The expected return type of the function call
* @param functionName The name of the method to call.
* @param arguments An optional list of expressions to pass as arguments to the function.
* @param propagateNull When true, and any of the arguments is null, null will be returned instead
* of calling the function.
* @param returnNullable When false, indicating the invoked method will always return
* non-null value.
*/
case class StaticInvoke(
staticObject: Class[_],
dataType: DataType,
functionName: String,
arguments: Seq[Expression] = Nil,
propagateNull: Boolean = true,
returnNullable: Boolean = true) extends InvokeLike {
val objectName = staticObject.getName.stripSuffix("$")
val cls = if (staticObject.getName == objectName) {
staticObject
} else {
Utils.classForName(objectName)
}
override def nullable: Boolean = needNullCheck || returnNullable
override def children: Seq[Expression] = arguments
lazy val argClasses = ScalaReflection.expressionJavaClasses(arguments)
@transient lazy val method = cls.getDeclaredMethod(functionName, argClasses : _*)
override def eval(input: InternalRow): Any = {
invoke(null, method, arguments, input, dataType)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = CodeGenerator.javaType(dataType)
val (argCode, argString, resultIsNull) = prepareArguments(ctx)
val callFunc = s"$objectName.$functionName($argString)"
val prepareIsNull = if (nullable) {
s"boolean ${ev.isNull} = $resultIsNull;"
} else {
ev.isNull = FalseLiteral
""
}
val evaluate = if (returnNullable) {
if (CodeGenerator.defaultValue(dataType) == "null") {
s"""
${ev.value} = $callFunc;
${ev.isNull} = ${ev.value} == null;
"""
} else {
val boxedResult = ctx.freshName("boxedResult")
s"""
${CodeGenerator.boxedType(dataType)} $boxedResult = $callFunc;
${ev.isNull} = $boxedResult == null;
if (!${ev.isNull}) {
${ev.value} = $boxedResult;
}
"""
}
} else {
s"${ev.value} = $callFunc;"
}
val code = code"""
$argCode
$prepareIsNull
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!$resultIsNull) {
$evaluate
}
"""
ev.copy(code = code)
}
}
/**
* Calls the specified function on an object, optionally passing arguments. If the `targetObject`
* expression evaluates to null then null will be returned.
*
* In some cases, due to erasure, the schema may expect a primitive type when in fact the method
* is returning java.lang.Object. In this case, we will generate code that attempts to unbox the
* value automatically.
*
* @param targetObject An expression that will return the object to call the method on.
* @param functionName The name of the method to call.
* @param dataType The expected return type of the function.
* @param arguments An optional list of expressions, whose evaluation will be passed to the
* function.
* @param propagateNull When true, and any of the arguments is null, null will be returned instead
* of calling the function.
* @param returnNullable When false, indicating the invoked method will always return
* non-null value.
*/
case class Invoke(
targetObject: Expression,
functionName: String,
dataType: DataType,
arguments: Seq[Expression] = Nil,
propagateNull: Boolean = true,
returnNullable : Boolean = true) extends InvokeLike {
lazy val argClasses = ScalaReflection.expressionJavaClasses(arguments)
override def nullable: Boolean = targetObject.nullable || needNullCheck || returnNullable
override def children: Seq[Expression] = targetObject +: arguments
private lazy val encodedFunctionName = TermName(functionName).encodedName.toString
@transient lazy val method = targetObject.dataType match {
case ObjectType(cls) =>
val m = cls.getMethods.find(_.getName == encodedFunctionName)
if (m.isEmpty) {
sys.error(s"Couldn't find $encodedFunctionName on $cls")
} else {
m
}
case _ => None
}
override def eval(input: InternalRow): Any = {
val obj = targetObject.eval(input)
if (obj == null) {
// return null if obj is null
null
} else {
val invokeMethod = if (method.isDefined) {
method.get
} else {
obj.getClass.getDeclaredMethod(functionName, argClasses: _*)
}
invoke(obj, invokeMethod, arguments, input, dataType)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = CodeGenerator.javaType(dataType)
val obj = targetObject.genCode(ctx)
val (argCode, argString, resultIsNull) = prepareArguments(ctx)
val returnPrimitive = method.isDefined && method.get.getReturnType.isPrimitive
val needTryCatch = method.isDefined && method.get.getExceptionTypes.nonEmpty
def getFuncResult(resultVal: String, funcCall: String): String = if (needTryCatch) {
s"""
try {
$resultVal = $funcCall;
} catch (Exception e) {
org.apache.spark.unsafe.Platform.throwException(e);
}
"""
} else {
s"$resultVal = $funcCall;"
}
val evaluate = if (returnPrimitive) {
getFuncResult(ev.value, s"${obj.value}.$encodedFunctionName($argString)")
} else {
val funcResult = ctx.freshName("funcResult")
// If the function can return null, we do an extra check to make sure our null bit is still
// set correctly.
val assignResult = if (!returnNullable) {
s"${ev.value} = (${CodeGenerator.boxedType(javaType)}) $funcResult;"
} else {
s"""
if ($funcResult != null) {
${ev.value} = (${CodeGenerator.boxedType(javaType)}) $funcResult;
} else {
${ev.isNull} = true;
}
"""
}
s"""
Object $funcResult = null;
${getFuncResult(funcResult, s"${obj.value}.$encodedFunctionName($argString)")}
$assignResult
"""
}
val code = obj.code + code"""
boolean ${ev.isNull} = true;
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${obj.isNull}) {
$argCode
${ev.isNull} = $resultIsNull;
if (!${ev.isNull}) {
$evaluate
}
}
"""
ev.copy(code = code)
}
override def toString: String = s"$targetObject.$functionName"
}
object NewInstance {
def apply(
cls: Class[_],
arguments: Seq[Expression],
dataType: DataType,
propagateNull: Boolean = true): NewInstance =
new NewInstance(cls, arguments, propagateNull, dataType, None)
}
/**
* Constructs a new instance of the given class, using the result of evaluating the specified
* expressions as arguments.
*
* @param cls The class to construct.
* @param arguments A list of expression to use as arguments to the constructor.
* @param propagateNull When true, if any of the arguments is null, then null will be returned
* instead of trying to construct the object.
* @param dataType The type of object being constructed, as a Spark SQL datatype. This allows you
* to manually specify the type when the object in question is a valid internal
* representation (i.e. ArrayData) instead of an object.
* @param outerPointer If the object being constructed is an inner class, the outerPointer for the
* containing class must be specified. This parameter is defined as an optional
* function, which allows us to get the outer pointer lazily,and it's useful if
* the inner class is defined in REPL.
*/
case class NewInstance(
cls: Class[_],
arguments: Seq[Expression],
propagateNull: Boolean,
dataType: DataType,
outerPointer: Option[() => AnyRef]) extends InvokeLike {
private val className = cls.getName
override def nullable: Boolean = needNullCheck
override def children: Seq[Expression] = arguments
override lazy val resolved: Boolean = {
// If the class to construct is an inner class, we need to get its outer pointer, or this
// expression should be regarded as unresolved.
// Note that static inner classes (e.g., inner classes within Scala objects) don't need
// outer pointer registration.
val needOuterPointer =
outerPointer.isEmpty && cls.isMemberClass && !Modifier.isStatic(cls.getModifiers)
childrenResolved && !needOuterPointer
}
@transient private lazy val constructor: (Seq[AnyRef]) => Any = {
val paramTypes = ScalaReflection.expressionJavaClasses(arguments)
val getConstructor = (paramClazz: Seq[Class[_]]) => {
ScalaReflection.findConstructor(cls, paramClazz).getOrElse {
sys.error(s"Couldn't find a valid constructor on $cls")
}
}
outerPointer.map { p =>
val outerObj = p()
val d = outerObj.getClass +: paramTypes
val c = getConstructor(outerObj.getClass +: paramTypes)
(args: Seq[AnyRef]) => {
c(outerObj +: args)
}
}.getOrElse {
val c = getConstructor(paramTypes)
(args: Seq[AnyRef]) => {
c(args)
}
}
}
override def eval(input: InternalRow): Any = {
val argValues = arguments.map(_.eval(input))
constructor(argValues.map(_.asInstanceOf[AnyRef]))
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = CodeGenerator.javaType(dataType)
val (argCode, argString, resultIsNull) = prepareArguments(ctx)
val outer = outerPointer.map(func => Literal.fromObject(func()).genCode(ctx))
ev.isNull = resultIsNull
val constructorCall = cls.getConstructors.size match {
// If there are no constructors, the `new` method will fail. In
// this case we can try to call the apply method constructor
// that might be defined on the companion object.
case 0 => s"$className$$.MODULE$$.apply($argString)"
case _ => outer.map { gen =>
s"${gen.value}.new ${cls.getSimpleName}($argString)"
}.getOrElse {
s"new $className($argString)"
}
}
val code = code"""
$argCode
${outer.map(_.code).getOrElse("")}
final $javaType ${ev.value} = ${ev.isNull} ?
${CodeGenerator.defaultValue(dataType)} : $constructorCall;
"""
ev.copy(code = code)
}
override def toString: String = s"newInstance($cls)"
}
/**
* Given an expression that returns on object of type `Option[_]`, this expression unwraps the
* option into the specified Spark SQL datatype. In the case of `None`, the nullbit is set instead.
*
* @param dataType The expected unwrapped option type.
* @param child An expression that returns an `Option`
*/
case class UnwrapOption(
dataType: DataType,
child: Expression) extends UnaryExpression with NonSQLExpression with ExpectsInputTypes {
override def nullable: Boolean = true
override def inputTypes: Seq[AbstractDataType] = ObjectType :: Nil
override def eval(input: InternalRow): Any = {
val inputObject = child.eval(input)
if (inputObject == null) {
null
} else {
inputObject.asInstanceOf[Option[_]].orNull
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = CodeGenerator.javaType(dataType)
val inputObject = child.genCode(ctx)
val code = inputObject.code + code"""
final boolean ${ev.isNull} = ${inputObject.isNull} || ${inputObject.value}.isEmpty();
$javaType ${ev.value} = ${ev.isNull} ? ${CodeGenerator.defaultValue(dataType)} :
(${CodeGenerator.boxedType(javaType)}) ${inputObject.value}.get();
"""
ev.copy(code = code)
}
}
/**
* Converts the result of evaluating `child` into an option, checking both the isNull bit and
* (in the case of reference types) equality with null.
*
* @param child The expression to evaluate and wrap.
* @param optType The type of this option.
*/
case class WrapOption(child: Expression, optType: DataType)
extends UnaryExpression with NonSQLExpression with ExpectsInputTypes {
override def dataType: DataType = ObjectType(classOf[Option[_]])
override def nullable: Boolean = false
override def inputTypes: Seq[AbstractDataType] = optType :: Nil
override def eval(input: InternalRow): Any = Option(child.eval(input))
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val inputObject = child.genCode(ctx)
val code = inputObject.code + code"""
scala.Option ${ev.value} =
${inputObject.isNull} ?
scala.Option$$.MODULE$$.apply(null) : new scala.Some(${inputObject.value});
"""
ev.copy(code = code, isNull = FalseLiteral)
}
}
/**
* A placeholder for the loop variable used in [[MapObjects]]. This should never be constructed
* manually, but will instead be passed into the provided lambda function.
*/
case class LambdaVariable(
value: String,
isNull: String,
dataType: DataType,
nullable: Boolean = true) extends LeafExpression with NonSQLExpression {
private val accessor: (InternalRow, Int) => Any = InternalRow.getAccessor(dataType, nullable)
// Interpreted execution of `LambdaVariable` always get the 0-index element from input row.
override def eval(input: InternalRow): Any = {
assert(input.numFields == 1,
"The input row of interpreted LambdaVariable should have only 1 field.")
accessor(input, 0)
}
override def genCode(ctx: CodegenContext): ExprCode = {
val isNullValue = if (nullable) {
JavaCode.isNullVariable(isNull)
} else {
FalseLiteral
}
ExprCode(value = JavaCode.variable(value, dataType), isNull = isNullValue)
}
// This won't be called as `genCode` is overrided, just overriding it to make
// `LambdaVariable` non-abstract.
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = ev
}
/**
* When constructing [[MapObjects]], the element type must be given, which may not be available
* before analysis. This class acts like a placeholder for [[MapObjects]], and will be replaced by
* [[MapObjects]] during analysis after the input data is resolved.
* Note that, ideally we should not serialize and send unresolved expressions to executors, but
* users may accidentally do this(e.g. mistakenly reference an encoder instance when implementing
* Aggregator). Here we mark `function` as transient because it may reference scala Type, which is
* not serializable. Then even users mistakenly reference unresolved expression and serialize it,
* it's just a performance issue(more network traffic), and will not fail.
*/
case class UnresolvedMapObjects(
@transient function: Expression => Expression,
child: Expression,
customCollectionCls: Option[Class[_]] = None) extends UnaryExpression with Unevaluable {
override lazy val resolved = false
override def dataType: DataType = customCollectionCls.map(ObjectType.apply).getOrElse {
throw new UnsupportedOperationException("not resolved")
}
}
object MapObjects {
private val curId = new java.util.concurrent.atomic.AtomicInteger()
/**
* Construct an instance of MapObjects case class.
*
* @param function The function applied on the collection elements.
* @param inputData An expression that when evaluated returns a collection object.
* @param elementType The data type of elements in the collection.
* @param elementNullable When false, indicating elements in the collection are always
* non-null value.
* @param customCollectionCls Class of the resulting collection (returning ObjectType)
* or None (returning ArrayType)
*/
def apply(
function: Expression => Expression,
inputData: Expression,
elementType: DataType,
elementNullable: Boolean = true,
customCollectionCls: Option[Class[_]] = None): MapObjects = {
val id = curId.getAndIncrement()
val loopValue = s"MapObjects_loopValue$id"
val loopIsNull = if (elementNullable) {
s"MapObjects_loopIsNull$id"
} else {
"false"
}
val loopVar = LambdaVariable(loopValue, loopIsNull, elementType, elementNullable)
MapObjects(
loopValue, loopIsNull, elementType, function(loopVar), inputData, customCollectionCls)
}
}
/**
* Applies the given expression to every element of a collection of items, returning the result
* as an ArrayType or ObjectType. This is similar to a typical map operation, but where the lambda
* function is expressed using catalyst expressions.
*
* The type of the result is determined as follows:
* - ArrayType - when customCollectionCls is None
* - ObjectType(collection) - when customCollectionCls contains a collection class
*
* The following collection ObjectTypes are currently supported on input:
* Seq, Array, ArrayData, java.util.List
*
* @param loopValue the name of the loop variable that used when iterate the collection, and used
* as input for the `lambdaFunction`
* @param loopIsNull the nullity of the loop variable that used when iterate the collection, and
* used as input for the `lambdaFunction`
* @param loopVarDataType the data type of the loop variable that used when iterate the collection,
* and used as input for the `lambdaFunction`
* @param lambdaFunction A function that take the `loopVar` as input, and used as lambda function
* to handle collection elements.
* @param inputData An expression that when evaluated returns a collection object.
* @param customCollectionCls Class of the resulting collection (returning ObjectType)
* or None (returning ArrayType)
*/
case class MapObjects private(
loopValue: String,
loopIsNull: String,
loopVarDataType: DataType,
lambdaFunction: Expression,
inputData: Expression,
customCollectionCls: Option[Class[_]]) extends Expression with NonSQLExpression {
override def nullable: Boolean = inputData.nullable
override def children: Seq[Expression] = lambdaFunction :: inputData :: Nil
// The data with UserDefinedType are actually stored with the data type of its sqlType.
// When we want to apply MapObjects on it, we have to use it.
lazy private val inputDataType = inputData.dataType match {
case u: UserDefinedType[_] => u.sqlType
case _ => inputData.dataType
}
private def executeFuncOnCollection(inputCollection: Seq[_]): Iterator[_] = {
val row = new GenericInternalRow(1)
inputCollection.toIterator.map { element =>
row.update(0, element)
lambdaFunction.eval(row)
}
}
private lazy val convertToSeq: Any => Seq[_] = inputDataType match {
case ObjectType(cls) if classOf[Seq[_]].isAssignableFrom(cls) =>
_.asInstanceOf[Seq[_]]
case ObjectType(cls) if cls.isArray =>
_.asInstanceOf[Array[_]].toSeq
case ObjectType(cls) if classOf[java.util.List[_]].isAssignableFrom(cls) =>
_.asInstanceOf[java.util.List[_]].asScala
case ObjectType(cls) if cls == classOf[Object] =>
(inputCollection) => {
if (inputCollection.getClass.isArray) {
inputCollection.asInstanceOf[Array[_]].toSeq
} else {
inputCollection.asInstanceOf[Seq[_]]
}
}
case ArrayType(et, _) =>
_.asInstanceOf[ArrayData].toSeq[Any](et)
}
private lazy val mapElements: Seq[_] => Any = customCollectionCls match {
case Some(cls) if classOf[Seq[_]].isAssignableFrom(cls) =>
// Scala sequence
executeFuncOnCollection(_).toSeq
case Some(cls) if classOf[scala.collection.Set[_]].isAssignableFrom(cls) =>
// Scala set
executeFuncOnCollection(_).toSet
case Some(cls) if classOf[java.util.List[_]].isAssignableFrom(cls) =>
// Java list
if (cls == classOf[java.util.List[_]] || cls == classOf[java.util.AbstractList[_]] ||
cls == classOf[java.util.AbstractSequentialList[_]]) {
// Specifying non concrete implementations of `java.util.List`
executeFuncOnCollection(_).toSeq.asJava
} else {
val constructors = cls.getConstructors()
val intParamConstructor = constructors.find { constructor =>
constructor.getParameterCount == 1 && constructor.getParameterTypes()(0) == classOf[Int]
}
val noParamConstructor = constructors.find { constructor =>
constructor.getParameterCount == 0
}
val constructor = intParamConstructor.map { intConstructor =>
(len: Int) => intConstructor.newInstance(len.asInstanceOf[Object])
}.getOrElse {
(_: Int) => noParamConstructor.get.newInstance()
}
// Specifying concrete implementations of `java.util.List`
(inputs) => {
val results = executeFuncOnCollection(inputs)
val builder = constructor(inputs.length).asInstanceOf[java.util.List[Any]]
results.foreach(builder.add(_))
builder
}
}
case None =>
// array
x => new GenericArrayData(executeFuncOnCollection(x).toArray)
case Some(cls) =>
throw new RuntimeException(s"class `${cls.getName}` is not supported by `MapObjects` as " +
"resulting collection.")
}
override def eval(input: InternalRow): Any = {
val inputCollection = inputData.eval(input)
if (inputCollection == null) {
return null
}
mapElements(convertToSeq(inputCollection))
}
override def dataType: DataType =
customCollectionCls.map(ObjectType.apply).getOrElse(
ArrayType(lambdaFunction.dataType, containsNull = lambdaFunction.nullable))
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val elementJavaType = CodeGenerator.javaType(loopVarDataType)
ctx.addMutableState(elementJavaType, loopValue, forceInline = true, useFreshName = false)
val genInputData = inputData.genCode(ctx)
val genFunction = lambdaFunction.genCode(ctx)
val dataLength = ctx.freshName("dataLength")
val convertedArray = ctx.freshName("convertedArray")
val loopIndex = ctx.freshName("loopIndex")
val convertedType = CodeGenerator.boxedType(lambdaFunction.dataType)
// Because of the way Java defines nested arrays, we have to handle the syntax specially.
// Specifically, we have to insert the [$dataLength] in between the type and any extra nested
// array declarations (i.e. new String[1][]).
val arrayConstructor = if (convertedType contains "[]") {
val rawType = convertedType.takeWhile(_ != '[')
val arrayPart = convertedType.reverse.takeWhile(c => c == '[' || c == ']').reverse
s"new $rawType[$dataLength]$arrayPart"
} else {
s"new $convertedType[$dataLength]"
}
// In RowEncoder, we use `Object` to represent Array or Seq, so we need to determine the type
// of input collection at runtime for this case.
val seq = ctx.freshName("seq")
val array = ctx.freshName("array")
val determineCollectionType = inputData.dataType match {
case ObjectType(cls) if cls == classOf[Object] =>
val seqClass = classOf[Seq[_]].getName
s"""
$seqClass $seq = null;
$elementJavaType[] $array = null;
if (${genInputData.value}.getClass().isArray()) {
$array = ($elementJavaType[]) ${genInputData.value};
} else {
$seq = ($seqClass) ${genInputData.value};
}
"""
case _ => ""
}
// `MapObjects` generates a while loop to traverse the elements of the input collection. We
// need to take care of Seq and List because they may have O(n) complexity for indexed accessing
// like `list.get(1)`. Here we use Iterator to traverse Seq and List.
val (getLength, prepareLoop, getLoopVar) = inputDataType match {
case ObjectType(cls) if classOf[Seq[_]].isAssignableFrom(cls) =>
val it = ctx.freshName("it")
(
s"${genInputData.value}.size()",
s"scala.collection.Iterator $it = ${genInputData.value}.toIterator();",
s"$it.next()"
)
case ObjectType(cls) if cls.isArray =>
(
s"${genInputData.value}.length",
"",
s"${genInputData.value}[$loopIndex]"
)
case ObjectType(cls) if classOf[java.util.List[_]].isAssignableFrom(cls) =>
val it = ctx.freshName("it")
(
s"${genInputData.value}.size()",
s"java.util.Iterator $it = ${genInputData.value}.iterator();",
s"$it.next()"
)
case ArrayType(et, _) =>
(
s"${genInputData.value}.numElements()",
"",
CodeGenerator.getValue(genInputData.value, et, loopIndex)
)
case ObjectType(cls) if cls == classOf[Object] =>
val it = ctx.freshName("it")
(
s"$seq == null ? $array.length : $seq.size()",
s"scala.collection.Iterator $it = $seq == null ? null : $seq.toIterator();",
s"$it == null ? $array[$loopIndex] : $it.next()"
)
}
// Make a copy of the data if it's unsafe-backed
def makeCopyIfInstanceOf(clazz: Class[_ <: Any], value: String) =
s"$value instanceof ${clazz.getSimpleName}? ${value}.copy() : $value"
val genFunctionValue: String = lambdaFunction.dataType match {
case StructType(_) => makeCopyIfInstanceOf(classOf[UnsafeRow], genFunction.value)
case ArrayType(_, _) => makeCopyIfInstanceOf(classOf[UnsafeArrayData], genFunction.value)
case MapType(_, _, _) => makeCopyIfInstanceOf(classOf[UnsafeMapData], genFunction.value)
case _ => genFunction.value
}
val loopNullCheck = if (loopIsNull != "false") {
ctx.addMutableState(
CodeGenerator.JAVA_BOOLEAN, loopIsNull, forceInline = true, useFreshName = false)
inputDataType match {
case _: ArrayType => s"$loopIsNull = ${genInputData.value}.isNullAt($loopIndex);"
case _ => s"$loopIsNull = $loopValue == null;"
}
} else {
""
}
val (initCollection, addElement, getResult): (String, String => String, String) =
customCollectionCls match {
case Some(cls) if classOf[Seq[_]].isAssignableFrom(cls) ||
classOf[scala.collection.Set[_]].isAssignableFrom(cls) =>
// Scala sequence or set
val getBuilder = s"${cls.getName}$$.MODULE$$.newBuilder()"
val builder = ctx.freshName("collectionBuilder")
(
s"""
${classOf[Builder[_, _]].getName} $builder = $getBuilder;
$builder.sizeHint($dataLength);
""",
genValue => s"$builder.$$plus$$eq($genValue);",
s"(${cls.getName}) $builder.result();"
)
case Some(cls) if classOf[java.util.List[_]].isAssignableFrom(cls) =>
// Java list
val builder = ctx.freshName("collectionBuilder")
(
if (cls == classOf[java.util.List[_]] || cls == classOf[java.util.AbstractList[_]] ||
cls == classOf[java.util.AbstractSequentialList[_]]) {
s"${cls.getName} $builder = new java.util.ArrayList($dataLength);"
} else {
val param = Try(cls.getConstructor(Integer.TYPE)).map(_ => dataLength).getOrElse("")
s"${cls.getName} $builder = new ${cls.getName}($param);"
},
genValue => s"$builder.add($genValue);",
s"$builder;"
)
case None =>
// array
(
s"""
$convertedType[] $convertedArray = null;
$convertedArray = $arrayConstructor;
""",
genValue => s"$convertedArray[$loopIndex] = $genValue;",
s"new ${classOf[GenericArrayData].getName}($convertedArray);"
)
}
val code = genInputData.code + code"""
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${genInputData.isNull}) {
$determineCollectionType
int $dataLength = $getLength;
$initCollection
int $loopIndex = 0;
$prepareLoop
while ($loopIndex < $dataLength) {
$loopValue = ($elementJavaType) ($getLoopVar);
$loopNullCheck
${genFunction.code}
if (${genFunction.isNull}) {
${addElement("null")}
} else {
${addElement(genFunctionValue)}
}
$loopIndex += 1;
}
${ev.value} = $getResult
}
"""
ev.copy(code = code, isNull = genInputData.isNull)
}
}
/**
* Similar to [[UnresolvedMapObjects]], this is a placeholder of [[CatalystToExternalMap]].
*
* @param child An expression that when evaluated returns a map object.
* @param keyFunction The function applied on the key collection elements.
* @param valueFunction The function applied on the value collection elements.
* @param collClass The type of the resulting collection.
*/
case class UnresolvedCatalystToExternalMap(
child: Expression,
@transient keyFunction: Expression => Expression,
@transient valueFunction: Expression => Expression,
collClass: Class[_]) extends UnaryExpression with Unevaluable {
override lazy val resolved = false
override def dataType: DataType = ObjectType(collClass)
}
object CatalystToExternalMap {
private val curId = new java.util.concurrent.atomic.AtomicInteger()
def apply(u: UnresolvedCatalystToExternalMap): CatalystToExternalMap = {
val id = curId.getAndIncrement()
val keyLoopValue = s"CatalystToExternalMap_keyLoopValue$id"
val mapType = u.child.dataType.asInstanceOf[MapType]
val keyLoopVar = LambdaVariable(keyLoopValue, "", mapType.keyType, nullable = false)
val valueLoopValue = s"CatalystToExternalMap_valueLoopValue$id"
val valueLoopIsNull = if (mapType.valueContainsNull) {
s"CatalystToExternalMap_valueLoopIsNull$id"
} else {
"false"
}
val valueLoopVar = LambdaVariable(valueLoopValue, valueLoopIsNull, mapType.valueType)
CatalystToExternalMap(
keyLoopValue, u.keyFunction(keyLoopVar),
valueLoopValue, valueLoopIsNull, u.valueFunction(valueLoopVar),
u.child, u.collClass)
}
}
/**
* Expression used to convert a Catalyst Map to an external Scala Map.
* The collection is constructed using the associated builder, obtained by calling `newBuilder`
* on the collection's companion object.
*
* @param keyLoopValue the name of the loop variable that is used when iterating over the key
* collection, and which is used as input for the `keyLambdaFunction`
* @param keyLambdaFunction A function that takes the `keyLoopVar` as input, and is used as
* a lambda function to handle collection elements.
* @param valueLoopValue the name of the loop variable that is used when iterating over the value
* collection, and which is used as input for the `valueLambdaFunction`
* @param valueLoopIsNull the nullability of the loop variable that is used when iterating over
* the value collection, and which is used as input for the
* `valueLambdaFunction`
* @param valueLambdaFunction A function that takes the `valueLoopVar` as input, and is used as
* a lambda function to handle collection elements.
* @param inputData An expression that when evaluated returns a map object.
* @param collClass The type of the resulting collection.
*/
case class CatalystToExternalMap private(
keyLoopValue: String,
keyLambdaFunction: Expression,
valueLoopValue: String,
valueLoopIsNull: String,
valueLambdaFunction: Expression,
inputData: Expression,
collClass: Class[_]) extends Expression with NonSQLExpression {
override def nullable: Boolean = inputData.nullable
override def children: Seq[Expression] =
keyLambdaFunction :: valueLambdaFunction :: inputData :: Nil
private lazy val inputMapType = inputData.dataType.asInstanceOf[MapType]
private lazy val keyConverter =
CatalystTypeConverters.createToScalaConverter(inputMapType.keyType)
private lazy val valueConverter =
CatalystTypeConverters.createToScalaConverter(inputMapType.valueType)
private lazy val (newMapBuilderMethod, moduleField) = {
val clazz = Utils.classForName(collClass.getCanonicalName + "$")
(clazz.getMethod("newBuilder"), clazz.getField("MODULE$").get(null))
}
private def newMapBuilder(): Builder[AnyRef, AnyRef] = {
newMapBuilderMethod.invoke(moduleField).asInstanceOf[Builder[AnyRef, AnyRef]]
}
override def eval(input: InternalRow): Any = {
val result = inputData.eval(input).asInstanceOf[MapData]
if (result != null) {
val builder = newMapBuilder()
builder.sizeHint(result.numElements())
val keyArray = result.keyArray()
val valueArray = result.valueArray()
var i = 0
while (i < result.numElements()) {
val key = keyConverter(keyArray.get(i, inputMapType.keyType))
val value = valueConverter(valueArray.get(i, inputMapType.valueType))
builder += Tuple2(key, value)
i += 1
}
builder.result()
} else {
null
}
}
override def dataType: DataType = ObjectType(collClass)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// The data with PythonUserDefinedType are actually stored with the data type of its sqlType.
// When we want to apply MapObjects on it, we have to use it.
def inputDataType(dataType: DataType) = dataType match {
case p: PythonUserDefinedType => p.sqlType
case _ => dataType
}
val mapType = inputDataType(inputData.dataType).asInstanceOf[MapType]
val keyElementJavaType = CodeGenerator.javaType(mapType.keyType)
ctx.addMutableState(keyElementJavaType, keyLoopValue, forceInline = true, useFreshName = false)
val genKeyFunction = keyLambdaFunction.genCode(ctx)
val valueElementJavaType = CodeGenerator.javaType(mapType.valueType)
ctx.addMutableState(valueElementJavaType, valueLoopValue, forceInline = true,
useFreshName = false)
val genValueFunction = valueLambdaFunction.genCode(ctx)
val genInputData = inputData.genCode(ctx)
val dataLength = ctx.freshName("dataLength")
val loopIndex = ctx.freshName("loopIndex")
val tupleLoopValue = ctx.freshName("tupleLoopValue")
val builderValue = ctx.freshName("builderValue")
val keyArray = ctx.freshName("keyArray")
val valueArray = ctx.freshName("valueArray")
val getKeyLoopVar = CodeGenerator.getValue(keyArray, inputDataType(mapType.keyType), loopIndex)
val getValueLoopVar = CodeGenerator.getValue(
valueArray, inputDataType(mapType.valueType), loopIndex)
// Make a copy of the data if it's unsafe-backed
def makeCopyIfInstanceOf(clazz: Class[_ <: Any], value: String) =
s"$value instanceof ${clazz.getSimpleName}? $value.copy() : $value"
def genFunctionValue(lambdaFunction: Expression, genFunction: ExprCode) =
lambdaFunction.dataType match {
case StructType(_) => makeCopyIfInstanceOf(classOf[UnsafeRow], genFunction.value)
case ArrayType(_, _) => makeCopyIfInstanceOf(classOf[UnsafeArrayData], genFunction.value)
case MapType(_, _, _) => makeCopyIfInstanceOf(classOf[UnsafeMapData], genFunction.value)
case _ => genFunction.value
}
val genKeyFunctionValue = genFunctionValue(keyLambdaFunction, genKeyFunction)
val genValueFunctionValue = genFunctionValue(valueLambdaFunction, genValueFunction)
val valueLoopNullCheck = if (valueLoopIsNull != "false") {
ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, valueLoopIsNull, forceInline = true,
useFreshName = false)
s"$valueLoopIsNull = $valueArray.isNullAt($loopIndex);"
} else {
""
}
val builderClass = classOf[Builder[_, _]].getName
val constructBuilder = s"""
$builderClass $builderValue = ${collClass.getName}$$.MODULE$$.newBuilder();
$builderValue.sizeHint($dataLength);
"""
val tupleClass = classOf[(_, _)].getName
val appendToBuilder = s"""
$tupleClass $tupleLoopValue;
if (${genValueFunction.isNull}) {
$tupleLoopValue = new $tupleClass($genKeyFunctionValue, null);
} else {
$tupleLoopValue = new $tupleClass($genKeyFunctionValue, $genValueFunctionValue);
}
$builderValue.$$plus$$eq($tupleLoopValue);
"""
val getBuilderResult = s"${ev.value} = (${collClass.getName}) $builderValue.result();"
val code = genInputData.code + code"""
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${genInputData.isNull}) {
int $dataLength = ${genInputData.value}.numElements();
$constructBuilder
ArrayData $keyArray = ${genInputData.value}.keyArray();
ArrayData $valueArray = ${genInputData.value}.valueArray();
int $loopIndex = 0;
while ($loopIndex < $dataLength) {
$keyLoopValue = ($keyElementJavaType) ($getKeyLoopVar);
$valueLoopValue = ($valueElementJavaType) ($getValueLoopVar);
$valueLoopNullCheck
${genKeyFunction.code}
${genValueFunction.code}
$appendToBuilder
$loopIndex += 1;
}
$getBuilderResult
}
"""
ev.copy(code = code, isNull = genInputData.isNull)
}
}
object ExternalMapToCatalyst {
private val curId = new java.util.concurrent.atomic.AtomicInteger()
def apply(
inputMap: Expression,
keyType: DataType,
keyConverter: Expression => Expression,
keyNullable: Boolean,
valueType: DataType,
valueConverter: Expression => Expression,
valueNullable: Boolean): ExternalMapToCatalyst = {
val id = curId.getAndIncrement()
val keyName = "ExternalMapToCatalyst_key" + id
val keyIsNull = if (keyNullable) {
"ExternalMapToCatalyst_key_isNull" + id
} else {
"false"
}
val valueName = "ExternalMapToCatalyst_value" + id
val valueIsNull = if (valueNullable) {
"ExternalMapToCatalyst_value_isNull" + id
} else {
"false"
}
ExternalMapToCatalyst(
keyName,
keyIsNull,
keyType,
keyConverter(LambdaVariable(keyName, keyIsNull, keyType, keyNullable)),
valueName,
valueIsNull,
valueType,
valueConverter(LambdaVariable(valueName, valueIsNull, valueType, valueNullable)),
inputMap
)
}
}
/**
* Converts a Scala/Java map object into catalyst format, by applying the key/value converter when
* iterate the map.
*
* @param key the name of the map key variable that used when iterate the map, and used as input for
* the `keyConverter`
* @param keyIsNull the nullability of the map key variable that used when iterate the map, and
* used as input for the `keyConverter`
* @param keyType the data type of the map key variable that used when iterate the map, and used as
* input for the `keyConverter`
* @param keyConverter A function that take the `key` as input, and converts it to catalyst format.
* @param value the name of the map value variable that used when iterate the map, and used as input
* for the `valueConverter`
* @param valueIsNull the nullability of the map value variable that used when iterate the map, and
* used as input for the `valueConverter`
* @param valueType the data type of the map value variable that used when iterate the map, and
* used as input for the `valueConverter`
* @param valueConverter A function that take the `value` as input, and converts it to catalyst
* format.
* @param child An expression that when evaluated returns the input map object.
*/
case class ExternalMapToCatalyst private(
key: String,
keyIsNull: String,
keyType: DataType,
keyConverter: Expression,
value: String,
valueIsNull: String,
valueType: DataType,
valueConverter: Expression,
child: Expression)
extends UnaryExpression with NonSQLExpression {
override def foldable: Boolean = false
override def dataType: MapType = MapType(
keyConverter.dataType, valueConverter.dataType, valueContainsNull = valueConverter.nullable)
private lazy val mapCatalystConverter: Any => (Array[Any], Array[Any]) = {
val rowBuffer = InternalRow.fromSeq(Array[Any](1))
def rowWrapper(data: Any): InternalRow = {
rowBuffer.update(0, data)
rowBuffer
}
child.dataType match {
case ObjectType(cls) if classOf[java.util.Map[_, _]].isAssignableFrom(cls) =>
(input: Any) => {
val data = input.asInstanceOf[java.util.Map[Any, Any]]
val keys = new Array[Any](data.size)
val values = new Array[Any](data.size)
val iter = data.entrySet().iterator()
var i = 0
while (iter.hasNext) {
val entry = iter.next()
val (key, value) = (entry.getKey, entry.getValue)
keys(i) = if (key != null) {
keyConverter.eval(rowWrapper(key))
} else {
throw new RuntimeException("Cannot use null as map key!")
}
values(i) = if (value != null) {
valueConverter.eval(rowWrapper(value))
} else {
null
}
i += 1
}
(keys, values)
}
case ObjectType(cls) if classOf[scala.collection.Map[_, _]].isAssignableFrom(cls) =>
(input: Any) => {
val data = input.asInstanceOf[scala.collection.Map[Any, Any]]
val keys = new Array[Any](data.size)
val values = new Array[Any](data.size)
var i = 0
for ((key, value) <- data) {
keys(i) = if (key != null) {
keyConverter.eval(rowWrapper(key))
} else {
throw new RuntimeException("Cannot use null as map key!")
}
values(i) = if (value != null) {
valueConverter.eval(rowWrapper(value))
} else {
null
}
i += 1
}
(keys, values)
}
}
}
override def eval(input: InternalRow): Any = {
val result = child.eval(input)
if (result != null) {
val (keys, values) = mapCatalystConverter(result)
new ArrayBasedMapData(new GenericArrayData(keys), new GenericArrayData(values))
} else {
null
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val inputMap = child.genCode(ctx)
val genKeyConverter = keyConverter.genCode(ctx)
val genValueConverter = valueConverter.genCode(ctx)
val length = ctx.freshName("length")
val index = ctx.freshName("index")
val convertedKeys = ctx.freshName("convertedKeys")
val convertedValues = ctx.freshName("convertedValues")
val entry = ctx.freshName("entry")
val entries = ctx.freshName("entries")
val keyElementJavaType = CodeGenerator.javaType(keyType)
val valueElementJavaType = CodeGenerator.javaType(valueType)
ctx.addMutableState(keyElementJavaType, key, forceInline = true, useFreshName = false)
ctx.addMutableState(valueElementJavaType, value, forceInline = true, useFreshName = false)
val (defineEntries, defineKeyValue) = child.dataType match {
case ObjectType(cls) if classOf[java.util.Map[_, _]].isAssignableFrom(cls) =>
val javaIteratorCls = classOf[java.util.Iterator[_]].getName
val javaMapEntryCls = classOf[java.util.Map.Entry[_, _]].getName
val defineEntries =
s"final $javaIteratorCls $entries = ${inputMap.value}.entrySet().iterator();"
val defineKeyValue =
s"""
final $javaMapEntryCls $entry = ($javaMapEntryCls) $entries.next();
$key = (${CodeGenerator.boxedType(keyType)}) $entry.getKey();
$value = (${CodeGenerator.boxedType(valueType)}) $entry.getValue();
"""
defineEntries -> defineKeyValue
case ObjectType(cls) if classOf[scala.collection.Map[_, _]].isAssignableFrom(cls) =>
val scalaIteratorCls = classOf[Iterator[_]].getName
val scalaMapEntryCls = classOf[Tuple2[_, _]].getName
val defineEntries = s"final $scalaIteratorCls $entries = ${inputMap.value}.iterator();"
val defineKeyValue =
s"""
final $scalaMapEntryCls $entry = ($scalaMapEntryCls) $entries.next();
$key = (${CodeGenerator.boxedType(keyType)}) $entry._1();
$value = (${CodeGenerator.boxedType(valueType)}) $entry._2();
"""
defineEntries -> defineKeyValue
}
val keyNullCheck = if (keyIsNull != "false") {
ctx.addMutableState(
CodeGenerator.JAVA_BOOLEAN, keyIsNull, forceInline = true, useFreshName = false)
s"$keyIsNull = $key == null;"
} else {
""
}
val valueNullCheck = if (valueIsNull != "false") {
ctx.addMutableState(
CodeGenerator.JAVA_BOOLEAN, valueIsNull, forceInline = true, useFreshName = false)
s"$valueIsNull = $value == null;"
} else {
""
}
val arrayCls = classOf[GenericArrayData].getName
val mapCls = classOf[ArrayBasedMapData].getName
val convertedKeyType = CodeGenerator.boxedType(keyConverter.dataType)
val convertedValueType = CodeGenerator.boxedType(valueConverter.dataType)
val code = inputMap.code +
code"""
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${inputMap.isNull}) {
final int $length = ${inputMap.value}.size();
final Object[] $convertedKeys = new Object[$length];
final Object[] $convertedValues = new Object[$length];
int $index = 0;
$defineEntries
while($entries.hasNext()) {
$defineKeyValue
$keyNullCheck
$valueNullCheck
${genKeyConverter.code}
if (${genKeyConverter.isNull}) {
throw new RuntimeException("Cannot use null as map key!");
} else {
$convertedKeys[$index] = ($convertedKeyType) ${genKeyConverter.value};
}
${genValueConverter.code}
if (${genValueConverter.isNull}) {
$convertedValues[$index] = null;
} else {
$convertedValues[$index] = ($convertedValueType) ${genValueConverter.value};
}
$index++;
}
${ev.value} = new $mapCls(new $arrayCls($convertedKeys), new $arrayCls($convertedValues));
}
"""
ev.copy(code = code, isNull = inputMap.isNull)
}
}
/**
* Constructs a new external row, using the result of evaluating the specified expressions
* as content.
*
* @param children A list of expression to use as content of the external row.
*/
case class CreateExternalRow(children: Seq[Expression], schema: StructType)
extends Expression with NonSQLExpression {
override def dataType: DataType = ObjectType(classOf[Row])
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = {
val values = children.map(_.eval(input)).toArray
new GenericRowWithSchema(values, schema)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val rowClass = classOf[GenericRowWithSchema].getName
val values = ctx.freshName("values")
val childrenCodes = children.zipWithIndex.map { case (e, i) =>
val eval = e.genCode(ctx)
s"""
|${eval.code}
|if (${eval.isNull}) {
| $values[$i] = null;
|} else {
| $values[$i] = ${eval.value};
|}
""".stripMargin
}
val childrenCode = ctx.splitExpressionsWithCurrentInputs(
expressions = childrenCodes,
funcName = "createExternalRow",
extraArguments = "Object[]" -> values :: Nil)
val schemaField = ctx.addReferenceObj("schema", schema)
val code =
code"""
|Object[] $values = new Object[${children.size}];
|$childrenCode
|final ${classOf[Row].getName} ${ev.value} = new $rowClass($values, $schemaField);
""".stripMargin
ev.copy(code = code, isNull = FalseLiteral)
}
}
/**
* Serializes an input object using a generic serializer (Kryo or Java).
*
* @param kryo if true, use Kryo. Otherwise, use Java.
*/
case class EncodeUsingSerializer(child: Expression, kryo: Boolean)
extends UnaryExpression with NonSQLExpression with SerializerSupport {
override def nullSafeEval(input: Any): Any = {
serializerInstance.serialize(input).array()
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val serializer = addImmutableSerializerIfNeeded(ctx)
// Code to serialize.
val input = child.genCode(ctx)
val javaType = CodeGenerator.javaType(dataType)
val serialize = s"$serializer.serialize(${input.value}, null).array()"
val code = input.code + code"""
final $javaType ${ev.value} =
${input.isNull} ? ${CodeGenerator.defaultValue(dataType)} : $serialize;
"""
ev.copy(code = code, isNull = input.isNull)
}
override def dataType: DataType = BinaryType
}
/**
* Serializes an input object using a generic serializer (Kryo or Java). Note that the ClassTag
* is not an implicit parameter because TreeNode cannot copy implicit parameters.
*
* @param kryo if true, use Kryo. Otherwise, use Java.
*/
case class DecodeUsingSerializer[T](child: Expression, tag: ClassTag[T], kryo: Boolean)
extends UnaryExpression with NonSQLExpression with SerializerSupport {
override def nullSafeEval(input: Any): Any = {
val inputBytes = java.nio.ByteBuffer.wrap(input.asInstanceOf[Array[Byte]])
serializerInstance.deserialize(inputBytes)
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val serializer = addImmutableSerializerIfNeeded(ctx)
// Code to deserialize.
val input = child.genCode(ctx)
val javaType = CodeGenerator.javaType(dataType)
val deserialize =
s"($javaType) $serializer.deserialize(java.nio.ByteBuffer.wrap(${input.value}), null)"
val code = input.code + code"""
final $javaType ${ev.value} =
${input.isNull} ? ${CodeGenerator.defaultValue(dataType)} : $deserialize;
"""
ev.copy(code = code, isNull = input.isNull)
}
override def dataType: DataType = ObjectType(tag.runtimeClass)
}
/**
* Initialize a Java Bean instance by setting its field values via setters.
*/
case class InitializeJavaBean(beanInstance: Expression, setters: Map[String, Expression])
extends Expression with NonSQLExpression {
override def nullable: Boolean = beanInstance.nullable
override def children: Seq[Expression] = beanInstance +: setters.values.toSeq
override def dataType: DataType = beanInstance.dataType
private lazy val resolvedSetters = {
assert(beanInstance.dataType.isInstanceOf[ObjectType])
val ObjectType(beanClass) = beanInstance.dataType
setters.map {
case (name, expr) =>
// Looking for known type mapping.
// But also looking for general `Object`-type parameter for generic methods.
val paramTypes = ScalaReflection.expressionJavaClasses(Seq(expr)) ++ Seq(classOf[Object])
val methods = paramTypes.flatMap { fieldClass =>
try {
Some(beanClass.getDeclaredMethod(name, fieldClass))
} catch {
case e: NoSuchMethodException => None
}
}
if (methods.isEmpty) {
throw new NoSuchMethodException(s"""A method named "$name" is not declared """ +
"in any enclosing class nor any supertype")
}
methods.head -> expr
}
}
override def eval(input: InternalRow): Any = {
val instance = beanInstance.eval(input)
if (instance != null) {
val bean = instance.asInstanceOf[Object]
resolvedSetters.foreach {
case (setter, expr) =>
val paramVal = expr.eval(input)
// We don't call setter if input value is null.
if (paramVal != null) {
setter.invoke(bean, paramVal.asInstanceOf[AnyRef])
}
}
}
instance
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val instanceGen = beanInstance.genCode(ctx)
val javaBeanInstance = ctx.freshName("javaBean")
val beanInstanceJavaType = CodeGenerator.javaType(beanInstance.dataType)
val initialize = setters.map {
case (setterMethod, fieldValue) =>
val fieldGen = fieldValue.genCode(ctx)
s"""
|${fieldGen.code}
|if (!${fieldGen.isNull}) {
| $javaBeanInstance.$setterMethod(${fieldGen.value});
|}
""".stripMargin
}
val initializeCode = ctx.splitExpressionsWithCurrentInputs(
expressions = initialize.toSeq,
funcName = "initializeJavaBean",
extraArguments = beanInstanceJavaType -> javaBeanInstance :: Nil)
val code = instanceGen.code +
code"""
|$beanInstanceJavaType $javaBeanInstance = ${instanceGen.value};
|if (!${instanceGen.isNull}) {
| $initializeCode
|}
""".stripMargin
ev.copy(code = code, isNull = instanceGen.isNull, value = instanceGen.value)
}
}
/**
* Asserts that input values of a non-nullable child expression are not null.
*
* Note that there are cases where `child.nullable == true`, while we still need to add this
* assertion. Consider a nullable column `s` whose data type is a struct containing a non-nullable
* `Int` field named `i`. Expression `s.i` is nullable because `s` can be null. However, for all
* non-null `s`, `s.i` can't be null.
*/
case class AssertNotNull(child: Expression, walkedTypePath: Seq[String] = Nil)
extends UnaryExpression with NonSQLExpression {
override def dataType: DataType = child.dataType
override def foldable: Boolean = false
override def nullable: Boolean = false
override def flatArguments: Iterator[Any] = Iterator(child)
private val errMsg = "Null value appeared in non-nullable field:" +
walkedTypePath.mkString("\\n", "\\n", "\\n") +
"If the schema is inferred from a Scala tuple/case class, or a Java bean, " +
"please try to use scala.Option[_] or other nullable types " +
"(e.g. java.lang.Integer instead of int/scala.Int)."
override def eval(input: InternalRow): Any = {
val result = child.eval(input)
if (result == null) {
throw new NullPointerException(errMsg)
}
result
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val childGen = child.genCode(ctx)
// Use unnamed reference that doesn't create a local field here to reduce the number of fields
// because errMsgField is used only when the value is null.
val errMsgField = ctx.addReferenceObj("errMsg", errMsg)
val code = childGen.code + code"""
if (${childGen.isNull}) {
throw new NullPointerException($errMsgField);
}
"""
ev.copy(code = code, isNull = FalseLiteral, value = childGen.value)
}
}
/**
* Returns the value of field at index `index` from the external row `child`.
* This class can be viewed as [[GetStructField]] for [[Row]]s instead of [[InternalRow]]s.
*
* Note that the input row and the field we try to get are both guaranteed to be not null, if they
* are null, a runtime exception will be thrown.
*/
case class GetExternalRowField(
child: Expression,
index: Int,
fieldName: String) extends UnaryExpression with NonSQLExpression {
override def nullable: Boolean = false
override def dataType: DataType = ObjectType(classOf[Object])
private val errMsg = s"The ${index}th field '$fieldName' of input row cannot be null."
override def eval(input: InternalRow): Any = {
val inputRow = child.eval(input).asInstanceOf[Row]
if (inputRow == null) {
throw new RuntimeException("The input external row cannot be null.")
}
if (inputRow.isNullAt(index)) {
throw new RuntimeException(errMsg)
}
inputRow.get(index)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Use unnamed reference that doesn't create a local field here to reduce the number of fields
// because errMsgField is used only when the field is null.
val errMsgField = ctx.addReferenceObj("errMsg", errMsg)
val row = child.genCode(ctx)
val code = code"""
${row.code}
if (${row.isNull}) {
throw new RuntimeException("The input external row cannot be null.");
}
if (${row.value}.isNullAt($index)) {
throw new RuntimeException($errMsgField);
}
final Object ${ev.value} = ${row.value}.get($index);
"""
ev.copy(code = code, isNull = FalseLiteral)
}
}
/**
* Validates the actual data type of input expression at runtime. If it doesn't match the
* expectation, throw an exception.
*/
case class ValidateExternalType(child: Expression, expected: DataType)
extends UnaryExpression with NonSQLExpression with ExpectsInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(ObjectType(classOf[Object]))
override def nullable: Boolean = child.nullable
override val dataType: DataType = RowEncoder.externalDataTypeForInput(expected)
private val errMsg = s" is not a valid external type for schema of ${expected.catalogString}"
private lazy val checkType: (Any) => Boolean = expected match {
case _: DecimalType =>
(value: Any) => {
value.isInstanceOf[java.math.BigDecimal] || value.isInstanceOf[scala.math.BigDecimal] ||
value.isInstanceOf[Decimal]
}
case _: ArrayType =>
(value: Any) => {
value.getClass.isArray || value.isInstanceOf[Seq[_]]
}
case _ =>
val dataTypeClazz = ScalaReflection.javaBoxedType(dataType)
(value: Any) => {
dataTypeClazz.isInstance(value)
}
}
override def eval(input: InternalRow): Any = {
val result = child.eval(input)
if (checkType(result)) {
result
} else {
throw new RuntimeException(s"${result.getClass.getName}$errMsg")
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Use unnamed reference that doesn't create a local field here to reduce the number of fields
// because errMsgField is used only when the type doesn't match.
val errMsgField = ctx.addReferenceObj("errMsg", errMsg)
val input = child.genCode(ctx)
val obj = input.value
val typeCheck = expected match {
case _: DecimalType =>
Seq(classOf[java.math.BigDecimal], classOf[scala.math.BigDecimal], classOf[Decimal])
.map(cls => s"$obj instanceof ${cls.getName}").mkString(" || ")
case _: ArrayType =>
s"$obj.getClass().isArray() || $obj instanceof ${classOf[Seq[_]].getName}"
case _ =>
s"$obj instanceof ${CodeGenerator.boxedType(dataType)}"
}
val code = code"""
${input.code}
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${input.isNull}) {
if ($typeCheck) {
${ev.value} = (${CodeGenerator.boxedType(dataType)}) $obj;
} else {
throw new RuntimeException($obj.getClass().getName() + $errMsgField);
}
}
"""
ev.copy(code = code, isNull = input.isNull)
}
}
| aosagie/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala | Scala | apache-2.0 | 67,094 |
package marge.map
/**
*
* User: mikio
* Date: 4/14/11
* Time: 2:25 PM
*/
import org.junit._
import Assert._
class MostFrequentTokensTest {
@Test
def simple() {
val base = Seq("a a a a a b b b b c c c d d e f g h i j k l m n")
val f = new MostFrequentTokens[String,String](3, base, x => x.split(" "))
println(f.features)
println(f("a b c d e f g c c c c"))
}
} | mikiobraun/marge | src/test/scala/marge/map/MostFrequentTokensTest.scala | Scala | mit | 389 |
object Test extends App {
class A { class V }
abstract class B[S] {
def foo(t: S, a: A)(v: a.V)
}
val b1 = new B[String] {
def foo(t: String, a: A)(v: a.V) = () // Bridge method required here!
}
b1.foo("", null)(null)
}
| felixmulder/scala | test/files/run/t6135.scala | Scala | bsd-3-clause | 243 |
package io.soheila.um.daos.accounts
import javax.inject.Inject
import com.mohiva.play.silhouette.api.LoginInfo
import io.soheila.um.entities.User
import io.soheila.um.exceptions.UMDAOException
import io.soheila.commons.crud.MongoCRUDDAO
import play.api.libs.json._
import play.modules.reactivemongo.ReactiveMongoApi
import play.modules.reactivemongo.json._
import reactivemongo.api.indexes.IndexType.Geo2DSpherical
import reactivemongo.api.indexes.{ Index, IndexType }
import reactivemongo.play.json.collection.JSONCollection
import scala.concurrent.{ ExecutionContext, Future }
/**
* DAO for User entity in MongoDB.
*/
class MongoUserDAO @Inject() (val reactiveMongoApi: ReactiveMongoApi)(implicit override val ec: ExecutionContext)
extends MongoCRUDDAO[User, String] with UserDAO {
override def collection: Future[JSONCollection] = reactiveMongoApi.database.map(_.collection[JSONCollection]("users"))
def find(loginInfo: LoginInfo): Future[Option[User]] = {
collection.flatMap(_.find(Json.obj("loginInfo" -> loginInfo)).one[User])
}
override def archive(uuid: String): Future[Either[UMDAOException, Boolean]] = {
archive(uuid, archived = true)
}
override def restore(uuid: String): Future[Either[UMDAOException, Boolean]] = {
archive(uuid, archived = false)
}
private def archive(uuid: String, archived: Boolean): Future[Either[UMDAOException, Boolean]] = {
collection.flatMap(col => {
val updateOp = col.updateModifier(Json.obj("$set" -> Json.obj("archived" -> archived)), fetchNewObject = true)
col.findAndModify(Json.obj("uuid" -> uuid), updateOp) map {
entity => Right(true)
}
}).recover {
case err =>
Left(UMDAOException(err.getMessage, err))
}
}
override def indexSet: Set[Index] = Set(
Index(Seq("uuid" -> IndexType.Ascending), unique = true),
Index(Seq("email" -> IndexType.Ascending), unique = true),
Index(Seq("roles" -> IndexType.Ascending)),
Index(List("firstName" -> IndexType.Ascending, "lastName" -> IndexType.Ascending, "fullName" -> IndexType.Text)),
Index(Seq("createdOn" -> IndexType.Descending, "updatedOn" -> IndexType.Descending)),
Index(Seq(("coordinate", Geo2DSpherical)), Some("geo2DSphericalIdx")),
Index(List("attributes.key" -> IndexType.Ascending, "attributes.value" -> IndexType.Ascending))
)
}
| esfand-r/soheila-um | src/main/scala/io/soheila/um/daos/accounts/MongoUserDAO.scala | Scala | apache-2.0 | 2,358 |
package de.htwg.zeta.common.format.project.gdsl.style
import scala.util.parsing.combinator.JavaTokenParsers
import de.htwg.zeta.common.models.project.gdsl.style.Color
import play.api.libs.json.JsError
import play.api.libs.json.JsObject
import play.api.libs.json.JsResult
import play.api.libs.json.JsSuccess
import play.api.libs.json.JsValue
import play.api.libs.json.Json
import play.api.libs.json.Reads
class ColorFormat() extends Reads[Color] {
def writes(color: Color): JsObject = {
val Color(r, g, b, a) = color
Json.obj(
"r" -> r,
"g" -> g,
"b" -> b,
"a" -> a,
"rgb" -> s"rgb($r,$g,$b)",
"rgba" -> s"rgba($r,$g,$b,$a)",
"hex" -> "#%02x%02x%02x".format(color.r, g, b)
)
}
override def reads(json: JsValue): JsResult[Color] = {
val parseResult = ColorParser.parseColor(json.toString)
if (parseResult.successful) {
JsSuccess(parseResult.getOrElse(Color.defaultColor))
} else {
JsError()
}
}
private object ColorParser extends JavaTokenParsers {
def parseColor(input: String): ParseResult[Color] = parseAll(parser, input.trim)
def parser: Parser[Color] = {
val comma = ","
val highComma = "\\""
def natural_number: Parser[Int] = "\\\\d+".r ^^ {
_.toInt
}
def argument_double: Parser[Double] = "[+-]?\\\\d+(\\\\.\\\\d+)?".r ^^ {
_.toDouble
}
(opt(highComma) ~> "rgba(" ~> natural_number) ~ (comma ~> natural_number) ~
(comma ~> natural_number) ~ (comma ~> argument_double <~ (")" ~ opt(highComma))) ^^ {
case r ~ g ~ b ~ alpha => Color(r, g, b, alpha)
}
}
}
}
object ColorFormat {
def apply(): ColorFormat = new ColorFormat()
}
| Zeta-Project/zeta | api/common/src/main/scala/de/htwg/zeta/common/format/project/gdsl/style/ColorFormat.scala | Scala | bsd-2-clause | 1,718 |
package models
import org.mindrot.jbcrypt.BCrypt
import reactivemongo.bson.BSONObjectID
case class User(
var _id: Option[BSONObjectID],
email: String,
password: String,
firstName: String,
lastName: String,
pushToken: Option[String],
showIntro: Option[Boolean] = Some(true),
active: Boolean = true) {
def checkPassword(password: String): Boolean = BCrypt.checkpw(password, this.password)
def userWithoutPassword = copy (password = "")
}
| meip/bettermeeting | app/models/user.scala | Scala | apache-2.0 | 579 |
package io.hydrosphere.mist.job
import java.io.File
import java.net.URLClassLoader
import io.hydrosphere.mist.core.CommonData.Action
import io.hydrosphere.mist.job
import io.hydrosphere.mist.utils.{Err, TryLoad}
class FunctionInstanceLoader(val classLoader: ClassLoader) {
def loadFnInstance(className: String, action: Action): TryLoad[JvmFunctionInstance] = {
loadClass(className).flatMap({
case clz if FunctionInstance.isInstance(clz) =>
TryLoad(job.FunctionInstance.loadObject(clz))
.orElse(TryLoad(job.FunctionInstance.loadClass(clz)))
case _ =>
val e = new IllegalStateException(s"Can not instantiate job class: $className for action $action")
Err(e)
})
}
private def loadClass(name: String): TryLoad[Class[_]] =
TryLoad(Class.forName(name, false, classLoader))
}
object FunctionInstanceLoader {
val Common = new FunctionInstanceLoader(this.getClass.getClassLoader)
def fromJar(file: File): FunctionInstanceLoader = {
val url = file.toURI.toURL
val loader = new URLClassLoader(Array(url), getClass.getClassLoader)
new FunctionInstanceLoader(loader)
}
}
| Hydrospheredata/mist | mist/worker/src/main/scala/io/hydrosphere/mist/job/FunctionInstanceLoader.scala | Scala | apache-2.0 | 1,150 |
package com.github.dcapwell.docker.builder.lang
import com.github.dcapwell.docker.builder.Base
import scalaz.Scalaz._
class TypeCheckerTest extends Base {
private[this] val centos = Trait.unapply(Lexer.lex(source("traits/centos.docker")).get).get
private[this] val java = Trait.unapply(Lexer.lex(source("traits/openjdk-7.docker")).get).get
private[this] val hadoop = Trait.unapply(Lexer.lex(source("traits/hadoop-datanode-24.docker")).get).get
Seq(
List(),
List(centos),
List(centos, java),
List(centos, java, hadoop)
).foreach { input =>
s"${input.mkString(" with ")} should typecheck" in {
val rsp = TypeChecker.typecheck(input)
rsp shouldBe input.successNel[Error]
}
}
Seq(
(List(java), Error("Self requirements not satisfied; Some(Named(java:openjdk-1.7)) requires List(Name(rhel))")),
(List(hadoop), Error("Self requirements not satisfied; Some(Named(datanode:datanode-2.4)) requires List(Name(rhel), Name(java))")),
(List(hadoop, java), Error("Self requirements not fully satisfied")), //TODO improve
(List(hadoop, java, centos), Error("Only root trait can define the FROM instruction; Some(Named(rhel:centos6)) has From(centos:centos6)"))
).foreach {
case (input, error) =>
s"${input.mkString(" with ")} should be rejected typecheck" in {
val rsp = TypeChecker.typecheck(input)
rsp shouldNot be(input.successNel[Error])
rsp shouldBe error.failNel[List[Trait]]
}
}
}
| dcapwell/docker-builder | dockerbuilder/src/test/scala/com/github/dcapwell/docker/builder/lang/TypeCheckerTest.scala | Scala | mit | 1,489 |
package eventstore
package core
package operations
import scala.util.Try
private[eventstore] trait Inspection {
def expected: Class[_]
def pf: PartialFunction[Try[In], Inspection.Decision]
}
private[eventstore] object Inspection {
sealed trait Decision
object Decision {
case object Stop extends Decision
case object Retry extends Decision
case object Unexpected extends Decision
final case class Fail(value: EsException) extends Decision
}
} | EventStore/EventStore.JVM | core/src/main/scala/eventstore/core/operations/Inspection.scala | Scala | bsd-3-clause | 472 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Input}
case class CP35(value: Option[Int]) extends CtBoxIdentifier(name = "Vehicle expenses") with CtOptionalInteger with Input
object CP35 {
def apply(int: Int): CP35 = CP35(Some(int))
}
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP35.scala | Scala | apache-2.0 | 898 |
package wdltool
import java.nio.file.Paths
import cats.data.Validated.{Invalid, Valid}
import wdl4s.wdl.formatter.{AnsiSyntaxHighlighter, HtmlSyntaxHighlighter, SyntaxFormatter}
import wdl4s.wdl._
import spray.json._
import wdltool.graph.{GraphPrint, WomGraph}
import scala.util.{Failure, Success}
object Main extends App {
sealed trait Termination {
def output: String
def returnCode: Int
}
case class SuccessfulTermination(output: String) extends Termination {
override val returnCode = 0
}
case class UnsuccessfulTermination(output: String) extends Termination {
override val returnCode = 1
}
case object BadUsageTermination extends Termination {
override val returnCode = 1
override val output = UsageMessage
}
def dispatchCommand(args: Seq[String]): Termination = {
getAction(args) match {
case Some(x) if x == Actions.Validate => validate(args.tail)
case Some(x) if x == Actions.Highlight => highlight(args.tail)
case Some(x) if x == Actions.Inputs => inputs(args.tail)
case Some(x) if x == Actions.Parse => parse(args.tail)
case Some(x) if x == Actions.Graph => graph(args.tail)
case Some(x) if x == Actions.Womgraph => womGraph(args.tail)
case _ => BadUsageTermination
}
}
def validate(args: Seq[String]): Termination = {
continueIf(args.length == 1) {
loadWdl(args.head) { _ => SuccessfulTermination("") }
}
}
def highlight(args: Seq[String]): Termination = {
continueIf(args.length == 2 && Seq("html", "console").contains(args(1))) {
loadWdl(args.head) { namespace =>
val formatter = new SyntaxFormatter(if (args(1) == "html") HtmlSyntaxHighlighter else AnsiSyntaxHighlighter)
SuccessfulTermination(formatter.format(namespace))
}
}
}
def inputs(args: Seq[String]): Termination = {
continueIf(args.length == 1) {
loadWdl(args.head) { namespace =>
import wdl4s.wdl.types.WdlTypeJsonFormatter._
val msg = namespace match {
case x: WdlNamespaceWithWorkflow => x.workflow.inputs.toJson.prettyPrint
case _ => "WDL does not have a local workflow"
}
SuccessfulTermination(msg)
}
}
}
def parse(args: Seq[String]): Termination = {
continueIf(args.length == 1) {
SuccessfulTermination(AstTools.getAst(Paths.get(args.head)).toPrettyString)
}
}
def graph(args: Seq[String]): Termination = {
continueIf(args.length == 1) {
val file = args.head
val workflowDigraph = GraphPrint.generateWorkflowDigraph(file)
val result = s"""|digraph ${workflowDigraph.workflowName} {
| compound=true;
| ${workflowDigraph.digraph.links.mkString(System.lineSeparator + " ")}
| ${workflowDigraph.digraph.nodes.mkString(System.lineSeparator + " ")}
|}
|"""
SuccessfulTermination(result.stripMargin)
}
}
def womGraph(args: Seq[String]): Termination = {
continueIf(args.nonEmpty) {
val (mainFile, auxFiles) = (args.head, args.tail)
WomGraph.fromFiles(mainFile, auxFiles) match {
case Valid(womGraph) => SuccessfulTermination(womGraph.digraphDot)
case Invalid(errors) => UnsuccessfulTermination("Unable to construct wom graph:" + errors.toList.mkString("\\n", "\\n", "\\n"))
}
}
}
private[this] def continueIf(valid: => Boolean)(block: => Termination): Termination = if (valid) block else BadUsageTermination
private[this] def loadWdl(path: String)(f: WdlNamespace => Termination): Termination = {
WdlNamespace.loadUsingPath(Paths.get(path), None, None) match {
case Success(namespace) => f(namespace)
case Failure(t) => UnsuccessfulTermination(t.getMessage)
}
}
private def getAction(args: Seq[String]): Option[Actions.Value] = for {
arg <- args.headOption
argCapitalized = arg.capitalize
action <- Actions.values find (_.toString == argCapitalized)
} yield action
object Actions extends Enumeration {
val Parse, Validate, Highlight, Inputs, Graph, Womgraph = Value
}
val UsageMessage = """
|java -jar wdltool.jar <action> <parameters>
|
|Actions:
|validate <WDL file>
|
| Performs full validation of the WDL file including syntax
| and semantic checking
|
|inputs <WDL file>
|
| Print a JSON skeleton file of the inputs needed for this
| workflow. Fill in the values in this JSON document and
| pass it in to the 'run' subcommand.
|
|highlight <WDL file> <html|console>
|
| Reformats and colorizes/tags a WDL file. The second
| parameter is the output type. "html" will output the WDL
| file with <span> tags around elements. "console" mode
| will output colorized text to the terminal
|
|parse <WDL file>
|
| Compares a WDL file against the grammar and prints out an
| abstract syntax tree if it is valid, and a syntax error
| otherwise. Note that higher-level AST checks are not done
| via this sub-command and the 'validate' subcommand should
| be used for full validation.
|
|graph <WDL file>
|
| Reads a WDL file against the grammar and prints out a
| .dot of the DAG if it is valid, and a syntax error
| otherwise.
|
|womgraph <WDL or CWL file> [ancillary files]
|
| Reads a WDL or CWL file from the first argument and
| converts it to a WOM representation then prints out a graph
| of the WOM produced.
| Any imported files can be supplied as subsequent arguments.
|
""".stripMargin
val termination = dispatchCommand(args)
termination match {
case SuccessfulTermination(s) => println(s)
case UnsuccessfulTermination(s) => Console.err.println(s)
case BadUsageTermination => Console.err.println(UsageMessage)
}
sys.exit(termination.returnCode)
}
| broadinstitute/wdltool | src/main/scala/wdltool/Main.scala | Scala | bsd-3-clause | 6,782 |
/*
* Copyright 2007-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package widgets {
package autocomplete {
import _root_.scala.xml.{NodeSeq, Node, Elem, PCData, Text, Unparsed}
import _root_.net.liftweb.common._
import _root_.net.liftweb.util._
import _root_.net.liftweb.http._
import _root_.net.liftweb.http.js._
import JsCmds._
import JE._
import S._
import SHtml._
import Helpers._
object AutoComplete {
def apply(start: String,
options: (String, Int) => Seq[String],
onSubmit: String => Unit,
attrs: (String, String)*) = new AutoComplete().render(start, options, onSubmit, attrs:_*)
def autocompleteObj[T](options: Seq[(T, String)],
default: Box[T],
onSubmit: T => Unit): Elem = new AutoComplete().autocompleteObj(options, default, onSubmit)
/**
* register the resources with lift (typically in boot)
*/
def init() {
import net.liftweb.http.ResourceServer
ResourceServer.allow({
case "autocomplete" :: _ => true
})
}
}
class AutoComplete {
/**
* Create an autocomplete form based on a sequence.
*/
def autocompleteObj[T](options: Seq[(T, String)],
default: Box[T],
onSubmit: T => Unit): Elem = {
val (nonces, defaultNonce, secureOnSubmit) = secureOptions(options, default, onSubmit)
val defaultString = default.flatMap(d => options.find(_._1 == d).map(_._2))
autocomplete_*(nonces, defaultString, defaultNonce, secureOnSubmit)
}
private def autocomplete_*(options: Seq[(String, String)], default: Box[String],
defaultNonce: Box[String], onSubmit: AFuncHolder): Elem = {
val id = Helpers.nextFuncName
fmapFunc(onSubmit){hidden =>
val data = JsArray(options.map {
case (nonce, name) => JsObj("name" -> name, "nonce" -> nonce)
} :_*)
val autocompleteOptions = JsRaw("""{
minChars: 0,
matchContains: true,
formatItem: function(row, i, max) { return row.name; },
}""")
val onLoad = JsRaw("""
jQuery(document).ready(function(){
var data = """+data.toJsCmd+""";
jQuery("#"""+id+"""").autocomplete(data, """+autocompleteOptions.toJsCmd+""").result(function(event, dt, formatted) {
jQuery("#"""+hidden+"""").val(formatted);
});
});""")
<span>
<head>
<link rel="stylesheet" href={"/" + LiftRules.resourceServerPath +"/autocomplete/jquery.autocomplete.css"} type="text/css" />
<script type="text/javascript" src={"/" + LiftRules.resourceServerPath +"/autocomplete/jquery.autocomplete.js"} />
<script type="text/javascript">{Unparsed(onLoad.toJsCmd)}</script>
</head>
<input type="text" id={id} value={default.openOr("")} />
<input type="hidden" name={hidden} id={hidden} value={defaultNonce.openOr("")} />
</span>
}
}
private def secureOptions[T](options: Seq[(T, String)], default: Box[T],
onSubmit: T => Unit): (Seq[(String, String)], Box[String], AFuncHolder) = {
val secure = options.map{case (obj, txt) => (obj, randomString(20), txt)}
val defaultNonce = default.flatMap(d => secure.find(_._1 == d).map(_._2))
val nonces = secure.map{case (obj, nonce, txt) => (nonce, txt)}
def process(nonce: String): Unit = secure.find(_._2 == nonce).map(x => onSubmit(x._1))
(nonces, defaultNonce, SFuncHolder(process))
}
/**
* Render a text field with Ajax autocomplete support
*
* @param start - the initial input string
* @param option - the function to be called when user is typing text. The text and th options limit is provided to this functions
* @param attrs - the attributes that can be added to the input text field
*/
def render(start: String, options: (String, Int) => Seq[String],
onSubmit: String => Unit, attrs: (String, String)*): Elem = {
val f = (ignore: String) => {
val q = S.param("q").openOr("")
val limit = S.param("limit").flatMap(asInt).openOr(10)
PlainTextResponse(options(q, limit).map(s => s+"|"+s).mkString("\\n"))
}
fmapFunc(SFuncHolder(f)){ func =>
val what: String = encodeURL(S.contextPath + "/" + LiftRules.ajaxPath+"?"+func+"=foo")
val id = Helpers.nextFuncName
fmapFunc(SFuncHolder(onSubmit)){hidden =>
val autocompleteOptions = JsRaw("""{
minChars: 0,
matchContains: true
}""")
val onLoad = JsRaw("""
jQuery(document).ready(function(){
var data = """+what.encJs+""";
jQuery("#"""+id+"""").autocomplete(data, """+autocompleteOptions.toJsCmd+""").result(function(event, dt, formatted) {
jQuery("#"""+hidden+"""").val(formatted);
});
});""")
<span>
<head>
<link rel="stylesheet" href={"/" + LiftRules.resourceServerPath +"/autocomplete/jquery.autocomplete.css"} type="text/css" />
<script type="text/javascript" src={"/" + LiftRules.resourceServerPath +"/autocomplete/jquery.autocomplete.js"} />
<script type="text/javascript">{Unparsed(onLoad.toJsCmd)}</script>
</head>
{
attrs.foldLeft(<input type="text" id={id} value={start} />)(_ % _)
}
<input type="hidden" name={hidden} id={hidden} value={start} />
</span>
}
}
}
}
}
}
}
| jeppenejsum/liftweb | framework/lift-modules/lift-widgets/src/main/scala/net/liftweb/widgets/autocomplete/AutoComplete.scala | Scala | apache-2.0 | 6,022 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.metadata
import org.apache.flink.table.connector.source.ScanTableSource
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.functions.utils.ScalarSqlFunction
import org.apache.flink.table.planner.plan.`trait`.RelModifiedMonotonicity
import org.apache.flink.table.planner.plan.metadata.FlinkMetadata.ModifiedMonotonicity
import org.apache.flink.table.planner.plan.nodes.calcite.{Expand, Rank, TableAggregate, WindowAggregate, WindowTableAggregate}
import org.apache.flink.table.planner.plan.nodes.logical._
import org.apache.flink.table.planner.plan.nodes.physical.batch.{BatchPhysicalCorrelate, BatchPhysicalGroupAggregateBase}
import org.apache.flink.table.planner.plan.nodes.physical.stream._
import org.apache.flink.table.planner.plan.schema.{FlinkPreparingTableBase, TableSourceTable}
import org.apache.flink.table.planner.plan.stats.{WithLower, WithUpper}
import org.apache.flink.table.planner.{JByte, JDouble, JFloat, JList, JLong, JShort}
import org.apache.flink.types.RowKind
import org.apache.calcite.plan.hep.HepRelVertex
import org.apache.calcite.plan.volcano.RelSubset
import org.apache.calcite.rel.core._
import org.apache.calcite.rel.metadata._
import org.apache.calcite.rel.{RelCollation, RelFieldCollation, RelNode}
import org.apache.calcite.rex.{RexCall, RexCallBinding, RexInputRef, RexNode}
import org.apache.calcite.sql.fun.{SqlCountAggFunction, SqlMinMaxAggFunction, SqlSumAggFunction, SqlSumEmptyIsZeroAggFunction}
import org.apache.calcite.sql.validate.SqlMonotonicity
import org.apache.calcite.sql.validate.SqlMonotonicity._
import org.apache.calcite.sql.{SqlKind, SqlOperatorBinding}
import org.apache.calcite.util.Util
import java.math.{BigDecimal => JBigDecimal}
import java.sql.{Date, Time, Timestamp}
import java.util.Collections
import scala.collection.JavaConversions._
/**
* FlinkRelMdModifiedMonotonicity supplies a default implementation of
* [[FlinkRelMetadataQuery#getRelModifiedMonotonicity]] for logical algebra.
*/
class FlinkRelMdModifiedMonotonicity private extends MetadataHandler[ModifiedMonotonicity] {
override def getDef: MetadataDef[ModifiedMonotonicity] = FlinkMetadata.ModifiedMonotonicity.DEF
def getRelModifiedMonotonicity(rel: TableScan, mq: RelMetadataQuery): RelModifiedMonotonicity = {
val monotonicity: RelModifiedMonotonicity = rel match {
case _: FlinkLogicalDataStreamTableScan | _: StreamPhysicalDataStreamScan =>
val table = rel.getTable.unwrap(classOf[FlinkPreparingTableBase])
table.getStatistic.getRelModifiedMonotonicity
case _: FlinkLogicalTableSourceScan | _: StreamPhysicalTableSourceScan =>
val table = rel.getTable.unwrap(classOf[TableSourceTable])
table.tableSource match {
case sts: ScanTableSource if !sts.getChangelogMode.containsOnly(RowKind.INSERT) =>
// changelog source can't produce CONSTANT ModifiedMonotonicity
new RelModifiedMonotonicity(Array.fill(rel.getRowType.getFieldCount)(NOT_MONOTONIC))
case _ => null
}
case _ => null
}
if (monotonicity != null) {
monotonicity
} else {
new RelModifiedMonotonicity(Array.fill(rel.getRowType.getFieldCount)(CONSTANT))
}
}
def getRelModifiedMonotonicity(rel: Project, mq: RelMetadataQuery): RelModifiedMonotonicity = {
getProjectMonotonicity(rel.getProjects, rel.getInput, mq)
}
def getRelModifiedMonotonicity(rel: Calc, mq: RelMetadataQuery): RelModifiedMonotonicity = {
val projects = rel.getProgram.getProjectList.map(rel.getProgram.expandLocalRef)
getProjectMonotonicity(projects, rel.getInput, mq)
}
private def getProjectMonotonicity(
projects: JList[RexNode],
input: RelNode,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
// contains delete
if (containsDelete(mq, input)) {
return null
}
// all append
if (allAppend(fmq, input)) {
return new RelModifiedMonotonicity(Array.fill(projects.size)(CONSTANT))
}
// contains update
// init field monotonicities
val fieldMonotonicities = Array.fill(projects.size())(NOT_MONOTONIC)
val inputFieldMonotonicities = fmq.getRelModifiedMonotonicity(input).fieldMonotonicities
def getInputFieldIndex(node: RexNode, indexInProject: Int): Int = {
node match {
case ref: RexInputRef =>
fieldMonotonicities(indexInProject) = inputFieldMonotonicities(ref.getIndex)
ref.getIndex
case a: RexCall if a.getKind == SqlKind.AS || a.getKind == SqlKind.CAST =>
getInputFieldIndex(a.getOperands.get(0), indexInProject)
case c: RexCall if c.getOperands.size() == 1 =>
c.getOperator match {
case ssf: ScalarSqlFunction =>
val inputIndex = getInputFieldIndex(c.getOperands.get(0), indexInProject)
// collations of stream node are empty currently.
val binding = RexCallBinding.create(
input.getCluster.getTypeFactory, c, Collections.emptyList[RelCollation])
val udfMonotonicity = getUdfMonotonicity(ssf, binding)
val inputMono = if (inputIndex > -1) {
inputFieldMonotonicities(inputIndex)
} else {
NOT_MONOTONIC
}
if (inputMono == udfMonotonicity) {
fieldMonotonicities(indexInProject) = inputMono
} else {
fieldMonotonicities(indexInProject) = NOT_MONOTONIC
}
inputIndex
case _ => -1
}
case _ => -1
}
}
// copy child mono
projects.zipWithIndex.foreach { case (expr, idx) =>
getInputFieldIndex(expr, idx)
}
new RelModifiedMonotonicity(fieldMonotonicities)
}
def getRelModifiedMonotonicity(rel: Expand, mq: RelMetadataQuery): RelModifiedMonotonicity = {
getMonotonicity(rel.getInput(0), mq, rel.getRowType.getFieldCount)
}
def getRelModifiedMonotonicity(rel: Rank, mq: RelMetadataQuery): RelModifiedMonotonicity = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
val inputMonotonicity = fmq.getRelModifiedMonotonicity(rel.getInput)
// If child monotonicity is null, we should return early.
if (inputMonotonicity == null) {
return null
}
// if partitionBy a update field or partitionBy a field whose mono is null, just return null
if (rel.partitionKey.exists(e => inputMonotonicity.fieldMonotonicities(e) != CONSTANT)) {
return null
}
val fieldCount = rel.getRowType.getFieldCount
// init current mono
val currentMonotonicity = notMonotonic(fieldCount)
// 1. partitionBy field is CONSTANT
rel.partitionKey.foreach(e => currentMonotonicity.fieldMonotonicities(e) = CONSTANT)
// 2. row number filed is CONSTANT
if (rel.outputRankNumber) {
currentMonotonicity.fieldMonotonicities(fieldCount - 1) = CONSTANT
}
// 3. time attribute field is increasing
(0 until fieldCount).foreach(e => {
if (FlinkTypeFactory.isTimeIndicatorType(rel.getRowType.getFieldList.get(e).getType)) {
inputMonotonicity.fieldMonotonicities(e) = INCREASING
}
})
val fieldCollations = rel.orderKey.getFieldCollations
if (fieldCollations.nonEmpty) {
// 4. process the first collation field, we can only deduce the first collation field
val firstCollation = fieldCollations.get(0)
// Collation field index in child node will be same with Rank node,
// see ProjectToLogicalProjectAndWindowRule for details.
val fieldMonotonicity = inputMonotonicity.fieldMonotonicities(firstCollation.getFieldIndex)
val result = fieldMonotonicity match {
case SqlMonotonicity.INCREASING | SqlMonotonicity.CONSTANT
if firstCollation.direction == RelFieldCollation.Direction.DESCENDING => INCREASING
case SqlMonotonicity.DECREASING | SqlMonotonicity.CONSTANT
if firstCollation.direction == RelFieldCollation.Direction.ASCENDING => DECREASING
case _ => NOT_MONOTONIC
}
currentMonotonicity.fieldMonotonicities(firstCollation.getFieldIndex) = result
}
currentMonotonicity
}
def getRelModifiedMonotonicity(
rel: StreamExecDeduplicate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
if (allAppend(mq, rel.getInput)) {
val mono = new RelModifiedMonotonicity(
Array.fill(rel.getRowType.getFieldCount)(NOT_MONOTONIC))
rel.getUniqueKeys.foreach(e => mono.fieldMonotonicities(e) = CONSTANT)
mono
} else {
null
}
}
def getRelModifiedMonotonicity(
rel: StreamPhysicalChangelogNormalize,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
val mono = new RelModifiedMonotonicity(Array.fill(rel.getRowType.getFieldCount)(NOT_MONOTONIC))
rel.uniqueKeys.foreach(e => mono.fieldMonotonicities(e) = CONSTANT)
mono
}
def getRelModifiedMonotonicity(
rel: StreamPhysicalDropUpdateBefore,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
getMonotonicity(rel.getInput, mq, rel.getRowType.getFieldCount)
}
def getRelModifiedMonotonicity(
rel: StreamPhysicalWatermarkAssigner,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
getMonotonicity(rel.getInput, mq, rel.getRowType.getFieldCount)
}
def getRelModifiedMonotonicity(
rel: StreamPhysicalMiniBatchAssigner,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
getMonotonicity(rel.getInput, mq, rel.getRowType.getFieldCount)
}
def getRelModifiedMonotonicity(rel: Exchange, mq: RelMetadataQuery): RelModifiedMonotonicity = {
// for exchange, get correspond from input
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
fmq.getRelModifiedMonotonicity(rel.getInput)
}
def getRelModifiedMonotonicity(rel: Aggregate, mq: RelMetadataQuery): RelModifiedMonotonicity = {
getRelModifiedMonotonicityOnAggregate(rel.getInput, mq, rel.getAggCallList.toList,
rel.getGroupSet.toArray)
}
def getRelModifiedMonotonicity(
rel: WindowTableAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
if (allAppend(mq, rel.getInput)) {
constants(rel.getRowType.getFieldCount)
} else {
null
}
}
def getRelModifiedMonotonicity(
rel: TableAggregate, mq: RelMetadataQuery): RelModifiedMonotonicity = {
getRelModifiedMonotonicityOnTableAggregate(
rel.getInput, rel.getGroupSet.toArray, rel.getRowType.getFieldCount, mq)
}
def getRelModifiedMonotonicity(
rel: BatchPhysicalGroupAggregateBase,
mq: RelMetadataQuery): RelModifiedMonotonicity = null
def getRelModifiedMonotonicity(
rel: StreamPhysicalGroupAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
getRelModifiedMonotonicityOnAggregate(rel.getInput, mq, rel.aggCalls.toList, rel.grouping)
}
def getRelModifiedMonotonicity(
rel: StreamPhysicalGroupTableAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
getRelModifiedMonotonicityOnTableAggregate(
rel.getInput, rel.grouping, rel.getRowType.getFieldCount, mq)
}
def getRelModifiedMonotonicity(
rel: StreamPhysicalGlobalGroupAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
// global and local agg should have same update monotonicity
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
fmq.getRelModifiedMonotonicity(rel.getInput)
}
def getRelModifiedMonotonicity(
rel: StreamPhysicalLocalGroupAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
getRelModifiedMonotonicityOnAggregate(rel.getInput, mq, rel.aggCalls.toList, rel.grouping)
}
def getRelModifiedMonotonicity(
rel: StreamPhysicalIncrementalGroupAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
getRelModifiedMonotonicityOnAggregate(
rel.getInput, mq, rel.finalAggCalls.toList, rel.finalAggGrouping)
}
def getRelModifiedMonotonicity(
rel: WindowAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = null
def getRelModifiedMonotonicity(
rel: StreamPhysicalGroupWindowAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
if (allAppend(mq, rel.getInput) && !rel.emitStrategy.produceUpdates) {
constants(rel.getRowType.getFieldCount)
} else {
null
}
}
def getRelModifiedMonotonicity(
rel: StreamPhysicalGroupWindowTableAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
if (allAppend(mq, rel.getInput)) {
constants(rel.getRowType.getFieldCount)
} else {
null
}
}
def getRelModifiedMonotonicity(
rel: FlinkLogicalOverAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = constants(rel.getRowType.getFieldCount)
def getRelModifiedMonotonicity(
rel: StreamExecOverAggregate,
mq: RelMetadataQuery): RelModifiedMonotonicity = constants(rel.getRowType.getFieldCount)
def getRelModifiedMonotonicityOnTableAggregate(
input: RelNode,
grouping: Array[Int],
rowSize: Int,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
val inputMonotonicity = fmq.getRelModifiedMonotonicity(input)
// if group by an update field or group by a field mono is null, just return null
if (inputMonotonicity == null ||
grouping.exists(e => inputMonotonicity.fieldMonotonicities(e) != CONSTANT)) {
return null
}
val groupCnt = grouping.length
val fieldMonotonicity =
Array.fill(groupCnt)(CONSTANT) ++ Array.fill(rowSize - grouping.length)(NOT_MONOTONIC)
new RelModifiedMonotonicity(fieldMonotonicity)
}
def getRelModifiedMonotonicityOnAggregate(
input: RelNode,
mq: RelMetadataQuery,
aggCallList: List[AggregateCall],
grouping: Array[Int]): RelModifiedMonotonicity = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
val inputMonotonicity = fmq.getRelModifiedMonotonicity(input)
// if group by a update field or group by a field mono is null, just return null
if (inputMonotonicity == null ||
grouping.exists(e => inputMonotonicity.fieldMonotonicities(e) != CONSTANT)) {
return null
}
val groupCnt = grouping.length
// init monotonicity for group keys and agg calls
val fieldMonotonicities =
Array.fill(groupCnt)(CONSTANT) ++ Array.fill(aggCallList.size)(NOT_MONOTONIC)
// get original monotonicity ignore input
aggCallList.zipWithIndex.foreach { case (aggCall, idx) =>
val aggCallMonotonicity = getMonotonicityOnAggCall(aggCall, fmq, input)
fieldMonotonicities(idx + groupCnt) = aggCallMonotonicity
}
// need to re-calculate monotonicity if child contains update
if (containsUpdate(fmq, input)) {
aggCallList.zipWithIndex.foreach { case (aggCall, idx) =>
val index = groupCnt + idx
if (aggCall.getArgList.size() > 1) {
fieldMonotonicities(index) = NOT_MONOTONIC
} else if (aggCall.getArgList.size() == 1) {
val childMono = inputMonotonicity.fieldMonotonicities(aggCall.getArgList.head)
val currentMono = fieldMonotonicities(index)
if (childMono != currentMono &&
!aggCall.getAggregation.isInstanceOf[SqlCountAggFunction]) {
// count will Increasing even child is NOT_MONOTONIC
fieldMonotonicities(index) = NOT_MONOTONIC
}
}
}
}
new RelModifiedMonotonicity(fieldMonotonicities)
}
def getMonotonicityOnAggCall(
aggCall: AggregateCall,
mq: RelMetadataQuery,
input: RelNode): SqlMonotonicity = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
aggCall.getAggregation match {
case _: SqlCountAggFunction => INCREASING
case minMax: SqlMinMaxAggFunction => minMax.kind match {
case SqlKind.MAX => INCREASING
case SqlKind.MIN => DECREASING
case _ => NOT_MONOTONIC
}
case _: SqlSumAggFunction | _: SqlSumEmptyIsZeroAggFunction =>
val valueInterval = fmq.getFilteredColumnInterval(
input, aggCall.getArgList.head, aggCall.filterArg)
if (valueInterval == null) {
NOT_MONOTONIC
} else {
valueInterval match {
case n1: WithLower =>
val compare = isValueGreaterThanZero(n1.lower)
if (compare >= 0) {
INCREASING
} else {
NOT_MONOTONIC
}
case n2: WithUpper =>
val compare = isValueGreaterThanZero(n2.upper)
if (compare <= 0) {
DECREASING
} else {
NOT_MONOTONIC
}
case _ =>
// value range has no lower end
NOT_MONOTONIC
}
}
case _ => NOT_MONOTONIC
}
}
def getRelModifiedMonotonicity(rel: Join, mq: RelMetadataQuery): RelModifiedMonotonicity = {
val joinType = rel.getJoinType
if (joinType.equals(JoinRelType.ANTI)) {
return null
}
val left = rel.getLeft
val right = rel.getRight
val joinInfo = rel.analyzeCondition
val leftKeys = joinInfo.leftKeys
val rightKeys = joinInfo.rightKeys
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
// if group set contains update return null
val containDelete = containsDelete(fmq, left) || containsDelete(fmq, right)
val containUpdate = containsUpdate(fmq, left) || containsUpdate(fmq, right)
def isAllConstantOnKeys(rel: RelNode, keys: Array[Int]): Boolean = {
val mono = fmq.getRelModifiedMonotonicity(rel)
keys.forall(mono != null && mono.fieldMonotonicities(_) == CONSTANT)
}
val isKeyAllAppend = isAllConstantOnKeys(left, leftKeys.toIntArray) &&
isAllConstantOnKeys(right, rightKeys.toIntArray)
if (!containDelete && isKeyAllAppend && (containUpdate && joinInfo.isEqui || !containUpdate)) {
// output rowtype of semi equals to the rowtype of left child
if (joinType.equals(JoinRelType.SEMI)) {
fmq.getRelModifiedMonotonicity(left)
} else {
val leftFieldMonotonicities = fmq.getRelModifiedMonotonicity(left).fieldMonotonicities
val rightFieldMonotonicities = fmq.getRelModifiedMonotonicity(right).fieldMonotonicities
new RelModifiedMonotonicity(leftFieldMonotonicities ++ rightFieldMonotonicities)
}
} else {
null
}
}
def getRelModifiedMonotonicity(
rel: StreamExecIntervalJoin,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
// window join won't have update
constants(rel.getRowType.getFieldCount)
}
def getRelModifiedMonotonicity(rel: Correlate, mq: RelMetadataQuery): RelModifiedMonotonicity = {
getMonotonicity(rel.getInput(0), mq, rel.getRowType.getFieldCount)
}
def getRelModifiedMonotonicity(
rel: BatchPhysicalCorrelate,
mq: RelMetadataQuery): RelModifiedMonotonicity = null
def getRelModifiedMonotonicity(
rel: StreamPhysicalCorrelate,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
getMonotonicity(rel.getInput(0), mq, rel.getRowType.getFieldCount)
}
// TODO supports temporal table function join
def getRelModifiedMonotonicity(rel: Union, mq: RelMetadataQuery): RelModifiedMonotonicity = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
if (rel.getInputs.exists(p => containsDelete(fmq, p))) {
null
} else {
val inputMonotonicities = rel.getInputs.map(fmq.getRelModifiedMonotonicity)
val head = inputMonotonicities.head
if (inputMonotonicities.forall(head.equals(_))) {
head
} else {
notMonotonic(rel.getRowType.getFieldCount)
}
}
}
def getRelModifiedMonotonicity(
hepRelVertex: HepRelVertex,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
fmq.getRelModifiedMonotonicity(hepRelVertex.getCurrentRel)
}
def getRelModifiedMonotonicity(
subset: RelSubset,
mq: RelMetadataQuery): RelModifiedMonotonicity = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
val rel = Util.first(subset.getBest, subset.getOriginal)
fmq.getRelModifiedMonotonicity(rel)
}
def getRelModifiedMonotonicity(rel: RelNode, mq: RelMetadataQuery): RelModifiedMonotonicity = null
/**
* Utility to create a RelModifiedMonotonicity which all fields is modified constant which
* means all the field's value will not be modified.
*/
def constants(fieldCount: Int): RelModifiedMonotonicity = {
new RelModifiedMonotonicity(Array.fill(fieldCount)(CONSTANT))
}
def notMonotonic(fieldCount: Int): RelModifiedMonotonicity = {
new RelModifiedMonotonicity(Array.fill(fieldCount)(NOT_MONOTONIC))
}
/**
* These operator won't generate update itself
*/
def getMonotonicity(
input: RelNode,
mq: RelMetadataQuery,
fieldCount: Int): RelModifiedMonotonicity = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
if (containsDelete(fmq, input)) {
null
} else if (allAppend(fmq, input)) {
new RelModifiedMonotonicity(Array.fill(fieldCount)(CONSTANT))
} else {
new RelModifiedMonotonicity(Array.fill(fieldCount)(NOT_MONOTONIC))
}
}
def containsDelete(mq: RelMetadataQuery, node: RelNode): Boolean = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
fmq.getRelModifiedMonotonicity(node) == null
}
def containsUpdate(mq: RelMetadataQuery, node: RelNode): Boolean = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
if (containsDelete(fmq, node)) {
false
} else {
val monotonicity = fmq.getRelModifiedMonotonicity(node)
monotonicity.fieldMonotonicities.exists(_ != CONSTANT)
}
}
def allAppend(mq: RelMetadataQuery, node: RelNode): Boolean = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
if (containsDelete(fmq, node)) {
false
} else {
val monotonicity = fmq.getRelModifiedMonotonicity(node)
monotonicity.fieldMonotonicities.forall(_ == CONSTANT)
}
}
def getUdfMonotonicity(udf: ScalarSqlFunction, binding: SqlOperatorBinding): SqlMonotonicity = {
// get monotonicity info from ScalarSqlFunction directly.
udf.getMonotonicity(binding)
}
private def isValueGreaterThanZero[T](value: Comparable[T]): Int = {
value match {
case i: Integer => i.compareTo(0)
case l: JLong => l.compareTo(0L)
case db: JDouble => db.compareTo(0d)
case f: JFloat => f.compareTo(0f)
case s: JShort => s.compareTo(0.toShort)
case b: JByte => b.compareTo(0.toByte)
case dec: JBigDecimal => dec.compareTo(JBigDecimal.ZERO)
case _: Date | _: Time | _: Timestamp | _: String =>
//not interested here, just return negative
-1
case _ =>
// other numeric types
value.asInstanceOf[Comparable[Any]].compareTo(0.asInstanceOf[Comparable[Any]])
}
}
}
object FlinkRelMdModifiedMonotonicity {
private val INSTANCE = new FlinkRelMdModifiedMonotonicity
val SOURCE: RelMetadataProvider = ReflectiveRelMetadataProvider.reflectiveSource(
FlinkMetadata.ModifiedMonotonicity.METHOD, INSTANCE)
}
| aljoscha/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/metadata/FlinkRelMdModifiedMonotonicity.scala | Scala | apache-2.0 | 24,150 |
package lila.round
import scala.concurrent.duration._
import scala.math
import play.api.libs.json._
import lila.common.PimpedJson._
import lila.game.{ Pov, Game, PerfPicker, Source, GameRepo, CorrespondenceClock }
import lila.pref.Pref
import lila.user.{ User, UserRepo }
import chess.format.Forsyth
import chess.{ Color, Clock }
import actorApi.SocketStatus
final class JsonView(
chatApi: lila.chat.ChatApi,
noteApi: NoteApi,
userJsonView: lila.user.JsonView,
getSocketStatus: String => Fu[SocketStatus],
canTakeback: Game => Fu[Boolean],
baseAnimationDuration: Duration,
moretimeSeconds: Int) {
import JsonView._
private def checkCount(game: Game, color: Color) =
(game.variant == chess.variant.ThreeCheck) option game.checkCount(color)
def playerJson(
pov: Pov,
pref: Pref,
apiVersion: Int,
playerUser: Option[User],
initialFen: Option[String],
withBlurs: Boolean): Fu[JsObject] =
getSocketStatus(pov.game.id) zip
(pov.opponent.userId ?? UserRepo.byId) zip
canTakeback(pov.game) zip
getPlayerChat(pov.game, playerUser) map {
case (((socket, opponentUser), takebackable), chat) =>
import pov._
Json.obj(
"game" -> gameJson(game, initialFen),
"clock" -> game.clock.map(clockJson),
"correspondence" -> game.correspondenceClock,
"player" -> Json.obj(
"id" -> playerId,
"color" -> player.color.name,
"version" -> socket.version,
"spectator" -> false,
"user" -> playerUser.map { userJsonView(_, true) },
"rating" -> player.rating,
"ratingDiff" -> player.ratingDiff,
"provisional" -> player.provisional.option(true),
"offeringRematch" -> player.isOfferingRematch.option(true),
"offeringDraw" -> player.isOfferingDraw.option(true),
"proposingTakeback" -> player.isProposingTakeback.option(true),
"onGame" -> (player.isAi || socket.onGame(player.color)),
"checks" -> checkCount(game, player.color),
"hold" -> (withBlurs option hold(player)),
"blurs" -> (withBlurs option blurs(game, player))
).noNull,
"opponent" -> Json.obj(
"color" -> opponent.color.name,
"ai" -> opponent.aiLevel,
"user" -> opponentUser.map { userJsonView(_, true) },
"rating" -> opponent.rating,
"ratingDiff" -> opponent.ratingDiff,
"provisional" -> opponent.provisional.option(true),
"offeringRematch" -> opponent.isOfferingRematch.option(true),
"offeringDraw" -> opponent.isOfferingDraw.option(true),
"proposingTakeback" -> opponent.isProposingTakeback.option(true),
"onGame" -> (opponent.isAi || socket.onGame(opponent.color)),
"isGone" -> (!opponent.isAi && socket.isGone(opponent.color)),
"checks" -> checkCount(game, opponent.color),
"hold" -> (withBlurs option hold(opponent)),
"blurs" -> (withBlurs option blurs(game, opponent))
).noNull,
"url" -> Json.obj(
"socket" -> s"/$fullId/socket/v$apiVersion",
"round" -> s"/$fullId"
),
"pref" -> Json.obj(
"blindfold" -> pref.isBlindfold,
"animationDuration" -> animationDuration(pov, pref),
"highlight" -> (pref.highlight || pref.isBlindfold),
"destination" -> (pref.destination && !pref.isBlindfold),
"coords" -> pref.coords,
"replay" -> pref.replay,
"autoQueen" -> (pov.game.variant == chess.variant.Antichess).fold(Pref.AutoQueen.NEVER, pref.autoQueen),
"clockTenths" -> pref.clockTenths,
"clockBar" -> pref.clockBar,
"clockSound" -> pref.clockSound,
"enablePremove" -> pref.premove,
"showCaptured" -> pref.captured,
"submitMove" -> {
import Pref.SubmitMove._
pref.submitMove match {
case _ if game.hasAi => false
case ALWAYS => true
case CORRESPONDENCE_UNLIMITED if game.isCorrespondence => true
case CORRESPONDENCE_ONLY if game.hasCorrespondenceClock => true
case _ => false
}
},
"confirmResign" -> (pref.confirmResign == Pref.ConfirmResign.YES).option(true)),
"chat" -> chat.map { c =>
JsArray(c.lines map {
case lila.chat.UserLine(username, text, _) => Json.obj(
"u" -> username,
"t" -> text)
case lila.chat.PlayerLine(color, text) => Json.obj(
"c" -> color.name,
"t" -> text)
})
},
"possibleMoves" -> possibleMoves(pov),
"takebackable" -> takebackable).noNull
}
def watcherJson(
pov: Pov,
pref: Pref,
apiVersion: Int,
user: Option[User],
tv: Option[OnTv],
withBlurs: Boolean,
initialFen: Option[String] = None,
withMoveTimes: Boolean) =
getSocketStatus(pov.game.id) zip
getWatcherChat(pov.game, user) zip
UserRepo.pair(pov.player.userId, pov.opponent.userId) map {
case ((socket, chat), (playerUser, opponentUser)) =>
import pov._
Json.obj(
"game" -> {
gameJson(game, initialFen) ++ Json.obj(
"moveTimes" -> withMoveTimes.option(game.moveTimes),
"opening" -> game.opening,
"joinable" -> game.joinable,
"importedBy" -> game.pgnImport.flatMap(_.user)).noNull
},
"clock" -> game.clock.map(clockJson),
"correspondence" -> game.correspondenceClock,
"player" -> Json.obj(
"color" -> color.name,
"version" -> socket.version,
"spectator" -> true,
"ai" -> player.aiLevel,
"user" -> playerUser.map { userJsonView(_, true) },
"name" -> player.name,
"rating" -> player.rating,
"ratingDiff" -> player.ratingDiff,
"provisional" -> player.provisional.option(true),
"onGame" -> (player.isAi || socket.onGame(player.color)),
"checks" -> checkCount(game, player.color),
"berserk" -> player.berserk.option(true),
"hold" -> (withBlurs option hold(player)),
"blurs" -> (withBlurs option blurs(game, player))
).noNull,
"opponent" -> Json.obj(
"color" -> opponent.color.name,
"ai" -> opponent.aiLevel,
"user" -> opponentUser.map { userJsonView(_, true) },
"name" -> opponent.name,
"rating" -> opponent.rating,
"ratingDiff" -> opponent.ratingDiff,
"provisional" -> opponent.provisional.option(true),
"onGame" -> (opponent.isAi || socket.onGame(opponent.color)),
"checks" -> checkCount(game, opponent.color),
"berserk" -> opponent.berserk.option(true),
"hold" -> (withBlurs option hold(opponent)),
"blurs" -> (withBlurs option blurs(game, opponent))
).noNull,
"orientation" -> pov.color.name,
"url" -> Json.obj(
"socket" -> s"/$gameId/${color.name}/socket",
"round" -> s"/$gameId/${color.name}"
),
"pref" -> Json.obj(
"animationDuration" -> animationDuration(pov, pref),
"highlight" -> pref.highlight,
"coords" -> pref.coords,
"replay" -> pref.replay,
"clockTenths" -> pref.clockTenths,
"clockBar" -> pref.clockBar,
"showCaptured" -> pref.captured
),
"tv" -> tv.map { onTv =>
Json.obj("channel" -> onTv.channel, "flip" -> onTv.flip)
},
"chat" -> chat.map { c =>
JsArray(c.lines map {
case lila.chat.UserLine(username, text, _) => Json.obj(
"u" -> username,
"t" -> text)
})
}
).noNull
}
def userAnalysisJson(pov: Pov, pref: Pref, orientation: chess.Color, owner: Boolean) =
(pov.game.pgnMoves.nonEmpty ?? GameRepo.initialFen(pov.game)) map { initialFen =>
import pov._
val fen = Forsyth >> game.toChess
Json.obj(
"game" -> Json.obj(
"id" -> gameId,
"variant" -> game.variant,
"initialFen" -> {
if (pov.game.pgnMoves.isEmpty) fen
else (initialFen | chess.format.Forsyth.initial)
},
"fen" -> fen,
"turns" -> game.turns,
"player" -> game.turnColor.name,
"status" -> game.status),
"player" -> Json.obj(
"id" -> owner.option(pov.playerId),
"color" -> color.name
),
"opponent" -> Json.obj(
"color" -> opponent.color.name
),
"orientation" -> orientation.name,
"pref" -> Json.obj(
"animationDuration" -> animationDuration(pov, pref),
"highlight" -> pref.highlight,
"destination" -> pref.destination,
"coords" -> pref.coords
),
"path" -> pov.game.turns,
"userAnalysis" -> true)
}
private def gameJson(game: Game, initialFen: Option[String]) = Json.obj(
"id" -> game.id,
"variant" -> game.variant,
"speed" -> game.speed.key,
"perf" -> PerfPicker.key(game),
"rated" -> game.rated,
"initialFen" -> (initialFen | chess.format.Forsyth.initial),
"fen" -> (Forsyth >> game.toChess),
"moves" -> game.pgnMoves.mkString(" "),
"player" -> game.turnColor.name,
"winner" -> game.winnerColor.map(_.name),
"turns" -> game.turns,
"startedAtTurn" -> game.startedAtTurn,
"lastMove" -> game.castleLastMoveTime.lastMoveString,
"threefold" -> game.toChessHistory.threefoldRepetition,
"check" -> game.check.map(_.key),
"rematch" -> game.next,
"source" -> game.source.map(sourceJson),
"status" -> game.status,
"boosted" -> game.boosted.option(true),
"tournamentId" -> game.tournamentId).noNull
private def blurs(game: Game, player: lila.game.Player) = {
val percent = game.playerBlurPercent(player.color)
(percent > 30) option Json.obj(
"nb" -> player.blurs,
"percent" -> percent
)
}
private def hold(player: lila.game.Player) = player.holdAlert map { h =>
Json.obj(
"ply" -> h.ply,
"mean" -> h.mean,
"sd" -> h.sd)
}
private def getPlayerChat(game: Game, forUser: Option[User]): Fu[Option[lila.chat.MixedChat]] =
game.hasChat optionFu {
chatApi.playerChat find game.id map (_ forUser forUser)
}
private def getWatcherChat(game: Game, forUser: Option[User]): Fu[Option[lila.chat.UserChat]] =
forUser ?? { user =>
chatApi.userChat find s"${game.id}/w" map (_ forUser user.some) map (_.some)
}
private def getUsers(game: Game) = UserRepo.pair(
game.whitePlayer.userId,
game.blackPlayer.userId)
private def sourceJson(source: Source) = source.name
private def clockJson(clock: Clock): JsObject =
clockWriter.writes(clock) + ("moretime" -> JsNumber(moretimeSeconds))
private def possibleMoves(pov: Pov) = (pov.game playableBy pov.player) option {
pov.game.toChess.situation.destinations map {
case (from, dests) => from.key -> dests.mkString
}
}
private def animationFactor(pref: Pref): Float = pref.animation match {
case 0 => 0
case 1 => 0.5f
case 2 => 1
case 3 => 2
case _ => 1
}
private def animationDuration(pov: Pov, pref: Pref) = math.round {
animationFactor(pref) * baseAnimationDuration.toMillis * pov.game.finished.fold(
1,
math.max(0, math.min(1.2, ((pov.game.estimateTotalTime - 60) / 60) * 0.2))
)
}
}
object JsonView {
implicit val variantWriter: OWrites[chess.variant.Variant] = OWrites { v =>
Json.obj(
"key" -> v.key,
"name" -> v.name,
"short" -> v.shortName,
"title" -> v.title)
}
implicit val statusWriter: OWrites[chess.Status] = OWrites { s =>
Json.obj(
"id" -> s.id,
"name" -> s.name)
}
implicit val clockWriter: OWrites[Clock] = OWrites { c =>
Json.obj(
"running" -> c.isRunning,
"initial" -> c.limit,
"increment" -> c.increment,
"white" -> c.remainingTime(Color.White),
"black" -> c.remainingTime(Color.Black),
"emerg" -> c.emergTime)
}
implicit val correspondenceWriter: OWrites[CorrespondenceClock] = OWrites { c =>
Json.obj(
"daysPerTurn" -> c.daysPerTurn,
"increment" -> c.increment,
"white" -> c.whiteTime,
"black" -> c.blackTime,
"emerg" -> c.emerg)
}
implicit val openingWriter: OWrites[chess.Opening] = OWrites { o =>
Json.obj(
"code" -> o.code,
"name" -> o.name,
"size" -> o.size
)
}
}
| TangentialAlan/lila | modules/round/src/main/JsonView.scala | Scala | mit | 13,267 |
package processes.freeMonads.vanillaScala.multiple
import processes.freeMonads.multiple.CompleteProgramParts
import processes.freeMonads.vanillaScala.MultipleMachinery
import scala.language.higherKinds
/*
* Defined in a separate file because otherwise the lookup of the implicits failed.
*/
trait BranchingUtils { _: MultipleMachinery with CompleteProgramParts =>
trait Brancher[F[_], G[_], L] {
type Out[x]
def branch[R, A](right: F[A])(branch: A => Either[G[L], R]): Free[Out, R]
}
object Brancher {
/*
* This looks like a scary method, and it probably is. It uses the A value in F[A]
* to determine if it needs to branch. If it needs to branch it will secretly
* (using the Branch[L]#Instance[R] type) insert the branch based on the L value
* in G[L].
*
* It becomes a bit complicated because we do not know anything about F[_] and
* G[_], to get at the values that are inside of them we need to wrap them in
* Free. In order to wrap them in Free we need to know a few things:
* - The program type O[_]
* - A way to inject F into O
* - A way to inject G into O
*
* Last but not least we need to inject the branch into O
*/
implicit def forProgram[F[_], G[_], L, O[_]](
implicit programType: ProgramType[O],
injectF: F ~> O,
injectG: G ~> O,
injectBranch: Branch[L]#Instance ~> O) =
new Brancher[F, G, L] {
type Out[x] = O[x]
def branch[R, A](right: F[A])(branch: A => Either[G[L], R]): Free[O, R] =
right.flatMap { a =>
branch(a) match {
case Left(left) => left flatMap Branch[L, R]
case Right(r) => Free(r)
}
}
}
}
} | EECOLOR/scala-clean-code-patterns | src/main/scala/processes/freeMonads/vanillaScala/multiple/BranchingUtils.scala | Scala | mit | 1,751 |
package org.bitcoins.spvnode.store
import org.bitcoins.core.gen.BlockchainElementsGenerator
import org.bitcoins.core.protocol.blockchain.BlockHeader
import org.scalacheck.{Gen, Prop, Properties}
/**
* Created by chris on 9/5/16.
*/
class BlockHeaderStoreSpec extends Properties("BlockHeaderStoreSpec") {
def size = Gen.choose(0,5).sample.get
/* property("serialization symmetry to file") ={
Prop.forAll(Gen.listOfN(size,BlockchainElementsGenerator.blockHeader)) { case headers : Seq[BlockHeader] =>
println("Working")
val file = new java.io.File("src/test/resources/block_header_spec_1.dat")
BlockHeaderStore.append(headers,file)
val headersFromFile = BlockHeaderStore.read(file)
val result = headersFromFile == headers
file.delete()
result
}
}
property("read the last stored blockheader stored in a file") = {
Prop.forAll(Gen.listOfN(size,BlockchainElementsGenerator.blockHeader)) { case headers: Seq[BlockHeader] =>
println("working")
val file = new java.io.File("src/test/resources/block_header_spec_2.dat")
BlockHeaderStore.append(headers,file)
val lastHeader = BlockHeaderStore.lastHeader(file)
val expectedLastHeader = if (headers.isEmpty) None else Some(headers.last)
val result = lastHeader == expectedLastHeader
file.delete()
result
}
}*/
}
| Christewart/bitcoin-s-spv-node | src/test/scala/org/bitcoins/spvnode/store/BlockHeaderStoreSpec.scala | Scala | mit | 1,374 |
package mesosphere.marathon
package raml
trait ResourceLimitsConversion {
import ResourceLimitsConversion.{resourceLimitFromDouble, resourceLimitToDouble}
implicit val ramlResourceLimitsRead = Reads[ResourceLimits, state.ResourceLimits] { resourceLimits =>
state.ResourceLimits(
cpus = resourceLimits.cpus.map(resourceLimitToDouble),
mem = resourceLimits.mem.map(resourceLimitToDouble)
)
}
implicit val ramlResourceLimitsWrite = Writes[state.ResourceLimits, ResourceLimits] { resourceLimits =>
ResourceLimits(
cpus = resourceLimits.cpus.map(resourceLimitFromDouble),
mem = resourceLimits.mem.map(resourceLimitFromDouble)
)
}
implicit val resourceLimitsProtoRamlWrites = Writes[Protos.ResourceLimits, ResourceLimits] { proto =>
ResourceLimits(
cpus = if (proto.hasCpus) Some(resourceLimitFromDouble(proto.getCpus)) else None,
mem = if (proto.hasMem) Some(resourceLimitFromDouble(proto.getMem)) else None
)
}
}
object ResourceLimitsConversion {
def resourceLimitToDouble(resourceLimit: ResourceLimit): Double =
resourceLimit match {
case ResourceLimitUnlimited("unlimited") =>
// This value is understood by protobuf as infinity, and Mesos consequently also understands it
Double.PositiveInfinity
case ResourceLimitUnlimited(text) =>
throw new IllegalStateException(
s"ResourceLimitUnlimited(${text}) encountered, should be ResourceLimitUnlimited(unlimited)"
) // we should never get here
case ResourceLimitNumber(value) =>
value
}
def resourceLimitFromDouble(limit: Double): ResourceLimit =
if (limit == Double.PositiveInfinity)
ResourceLimitUnlimited("unlimited")
else
ResourceLimitNumber(limit)
}
| mesosphere/marathon | src/main/scala/mesosphere/marathon/raml/ResourceLimitsConversion.scala | Scala | apache-2.0 | 1,778 |
package org.scalatra
package atmosphere
import java.io.IOException
import java.util
import javax.servlet.http.{ HttpServletRequest, HttpServletResponse }
import javax.servlet.{ FilterConfig, ServletConfig, ServletContext, ServletException }
import _root_.akka.actor.ActorSystem
import com.typesafe.config.ConfigFactory
import grizzled.slf4j.Logger
import org.apache.catalina.CometProcessor
import org.atmosphere.cache.UUIDBroadcasterCache
import org.atmosphere.client.TrackMessageSizeInterceptor
import org.atmosphere.container.{ JBossWebCometSupport, Tomcat7CometSupport, TomcatCometSupport }
import org.atmosphere.cpr._
import org.atmosphere.interceptor.SessionCreationInterceptor
import org.jboss.servlet.http.{ HttpEvent, HttpEventServlet }
import org.json4s._
import org.scalatra.json.JsonSupport
import org.scalatra.servlet.ScalatraAsyncSupport
import org.scalatra.util.RicherString._
import scala.collection.JavaConverters._
import scala.util.control.Exception.allCatch
trait AtmosphereSupport extends Initializable with Handler with CometProcessor with HttpEventServlet with org.apache.catalina.comet.CometProcessor with ScalatraAsyncSupport { self: ScalatraBase with org.scalatra.SessionSupport with JsonSupport[_] =>
private[this] val logger = Logger[this.type]
private[this] val _defaultWireformat = new JacksonSimpleWireformat
/**
* Override this to use another ScalaBroadcaster like RedisScalatraBroadcaster
*
* Example: RedisScalatraBroadcasterConfig(URI.create("redis://127.0.0.1"), Some("password"))
*/
protected val broadcasterConfig: BroadcasterConf = ScalatraBroadcasterConfig(classOf[DefaultScalatraBroadcaster])
implicit protected def wireFormat: WireFormat = _defaultWireformat
implicit def json2JsonMessage(json: JValue): OutboundMessage = JsonMessage(json)
implicit def string2Outbound(text: String): OutboundMessage = text.blankOption map { txt =>
if (txt.startsWith("{") || txt.startsWith("["))
parseOpt(txt) map JsonMessage.apply getOrElse TextMessage(txt)
else
TextMessage(txt)
} getOrElse TextMessage("")
private[this] def isFilter = self match {
case _: ScalatraFilter => true
case _ => false
}
val atmosphereFramework = new ScalatraAtmosphereFramework(isFilter, false)
implicit protected def scalatraActorSystem: ActorSystem =
servletContext.get(ActorSystemKey).map(_.asInstanceOf[ActorSystem]) getOrElse {
val msg = "Scalatra Actor system not present. Creating a private actor system"
logger.info(msg)
val cfg = ConfigFactory.load
val defRef = ConfigFactory.defaultReference
ActorSystem("scalatra", cfg.getConfig("scalatra").withFallback(defRef))
}
private[this] implicit def filterConfig2servletConfig(fc: FilterConfig): ServletConfig = {
new ServletConfig {
def getInitParameter(name: String): String = getServletContext.getInitParameter(name)
def getInitParameterNames() = getServletContext.getInitParameterNames()
def getServletName() = fc.getFilterName()
def getServletContext() = fc.getServletContext()
}
}
abstract override def initialize(config: ConfigT) {
super.initialize(config)
val cfg: ServletConfig = config match {
case c: FilterConfig => c
case c: ServletConfig => new ServletConfig {
def getInitParameterNames: util.Enumeration[String] = getServletContext.getInitParameterNames
def getServletName: String = c.getServletName
def getInitParameter(name: String): String = getServletContext.getInitParameter(name)
def getServletContext: ServletContext = c.getServletContext
}
}
allCatch.withApply(ex => logger.error(ex.getMessage, ex)) {
atmosphereFramework.enableSessionSupport()
configureBroadcasterCache()
configureBroadcasterFactory()
configureInterceptors(cfg)
atmosphereFramework.init(cfg)
setupAtmosphereHandlerMappings(cfg)
}
}
/**
* Servlets that want to track atmosphere message size should override this.
* @see [[TrackMessageSizeInterceptor]]
*/
protected def trackMessageSize: Boolean = false
protected def configureInterceptors(cfg: ServletConfig) = {
atmosphereFramework.interceptor(new SessionCreationInterceptor)
if (cfg.getInitParameter(ApplicationConfig.PROPERTY_NATIVE_COMETSUPPORT).isBlank)
cfg.getServletContext.setInitParameter(ApplicationConfig.PROPERTY_NATIVE_COMETSUPPORT, "true")
if (trackMessageSize || cfg.getInitParameter(TrackMessageSize).blankOption.map(_.toCheckboxBool).getOrElse(false))
atmosphereFramework.interceptor(new TrackMessageSizeInterceptor)
}
private[this] def setupAtmosphereHandlerMappings(cfg: ServletConfig) {
// TODO: also support filters?
val servletRegistration = ScalatraBase.getServletRegistration(this)
servletRegistration foreach { reg =>
reg.getMappings.asScala foreach { mapping =>
atmosphereFramework.addAtmosphereHandler(mapping, new ScalatraAtmosphereHandler).initAtmosphereHandler(cfg)
}
}
}
/**
* Handles a request and renders a response.
*
* $ 1. If the request lacks a character encoding, `defaultCharacterEncoding`
* is set to the request.
*
* $ 2. Sets the response's character encoding to `defaultCharacterEncoding`.
*
* $ 3. Binds the current `request`, `response`, and `multiParams`, and calls
* `executeRoutes()`.
*/
abstract override def handle(request: HttpServletRequest, response: HttpServletResponse) {
withRequestResponse(request, response) {
val atmoRoute = atmosphereRoute(request)
if (atmoRoute.isDefined) {
request(AtmosphereRouteKey) = atmoRoute.get
request.getSession(true) // force session creation
if (request.get(FrameworkConfig.ATMOSPHERE_HANDLER_WRAPPER).isEmpty)
atmosphereFramework.doCometSupport(AtmosphereRequest.wrap(request), AtmosphereResponse.wrap(response))
} else {
super.handle(request, response)
}
}
}
private[this] def noGetRoute = sys.error("You are using the AtmosphereSupport without defining any Get route," +
"you should get rid of it.")
private[this] def atmosphereRoutes = routes.methodRoutes.getOrElse(Get, noGetRoute).filter(_.metadata.contains('Atmosphere))
private[this] def atmosphereRoute(req: HttpServletRequest) = (for {
route <- atmosphereRoutes.toStream
matched <- route(requestPath)
} yield matched).headOption
private[this] def configureBroadcasterFactory() {
val factory = new ScalatraBroadcasterFactory(
atmosphereFramework.getAtmosphereConfig,
broadcasterConfig)
atmosphereFramework.setDefaultBroadcasterClassName(broadcasterConfig.broadcasterClass.getName)
atmosphereFramework.setBroadcasterFactory(factory)
}
private[this] def configureBroadcasterCache() {
if (atmosphereFramework.getBroadcasterCacheClassName.isBlank)
atmosphereFramework.setBroadcasterCacheClassName(classOf[UUIDBroadcasterCache].getName)
}
private[atmosphere] val Atmosphere: RouteTransformer = { (route: Route) =>
route.copy(metadata = route.metadata + ('Atmosphere -> 'Atmosphere))
}
def atmosphere(transformers: RouteTransformer*)(block: => AtmosphereClient) = {
val newTransformers = transformers :+ Atmosphere
get(newTransformers: _*)(block)
post(newTransformers: _*) { () }
}
/**
* Hack to support Tomcat AIO like other WebServer. This method is invoked
* by Tomcat when it detect a [[javax.servlet.Servlet]] implements the interface
* [[org.apache.catalina.CometProcessor]] without invoking [[javax.servlet.Servlet#service]]
*
* @param cometEvent the [[org.apache.catalina.CometEvent]]
* @throws java.io.IOException
* @throws javax.servlet.ServletException
*/
@throws(classOf[IOException])
@throws(classOf[ServletException])
def event(cometEvent: org.apache.catalina.CometEvent) {
val req = cometEvent.getHttpServletRequest
val res = cometEvent.getHttpServletResponse
req.setAttribute(TomcatCometSupport.COMET_EVENT, cometEvent)
atmosphereFramework.setupTomcat()
handle(req, res)
val transport = cometEvent.getHttpServletRequest.getParameter(HeaderConfig.X_ATMOSPHERE_TRANSPORT)
if (transport != null && transport.equalsIgnoreCase(HeaderConfig.WEBSOCKET_TRANSPORT)) {
cometEvent.close()
}
}
/**
* Hack to support Tomcat 7 AIO
*/
@throws(classOf[IOException])
@throws(classOf[ServletException])
def event(cometEvent: org.apache.catalina.comet.CometEvent) {
val req = cometEvent.getHttpServletRequest
val res = cometEvent.getHttpServletResponse
req.setAttribute(Tomcat7CometSupport.COMET_EVENT, cometEvent)
atmosphereFramework.setupTomcat7()
handle(req, res)
val transport = cometEvent.getHttpServletRequest.getParameter(HeaderConfig.X_ATMOSPHERE_TRANSPORT)
if (transport != null && transport.equalsIgnoreCase(HeaderConfig.WEBSOCKET_TRANSPORT)) {
cometEvent.close()
}
}
/**
* Hack to support JBossWeb AIO like other WebServer. This method is invoked
* by Tomcat when it detect a [[javax.servlet.Servlet]] implements the interface
* [[org.jboss.servlet.http.HttpEventServlet]] without invoking [[javax.servlet.Servlet#service]]
*
* @param httpEvent the [[org.jboss.servlet.http.HttpEvent]]
* @throws java.io.IOException
* @throws javax.servlet.ServletException
*/
@throws(classOf[IOException])
@throws(classOf[ServletException])
def event(httpEvent: HttpEvent) {
val req = httpEvent.getHttpServletRequest
val res = httpEvent.getHttpServletResponse
req.setAttribute(JBossWebCometSupport.HTTP_EVENT, httpEvent)
atmosphereFramework.setupJBoss()
handle(req, res)
}
}
| 0xfaded/scalatra | atmosphere/src/main/scala/org/scalatra/atmosphere/AtmosphereSupport.scala | Scala | bsd-2-clause | 9,747 |
package com.microsoft.partnercatalyst.fortis.spark.dba
import com.microsoft.partnercatalyst.fortis.spark.dto.{BlacklistedItem, SiteSettings}
import com.microsoft.partnercatalyst.fortis.spark.sources.streamprovider.ConnectorConfig
import org.apache.spark.SparkContext
trait ConfigurationManager {
def fetchConnectorConfigs(sparkContext: SparkContext, pipeline: String): List[ConnectorConfig]
def fetchSiteSettings(sparkContext: SparkContext): SiteSettings
def fetchWatchlist(sparkContext: SparkContext): Map[String, Seq[String]]
def fetchBlacklist(sparkContext: SparkContext): Seq[BlacklistedItem]
}
| CatalystCode/project-fortis-spark | src/main/scala/com/microsoft/partnercatalyst/fortis/spark/dba/ConfigurationManager.scala | Scala | mit | 610 |
// Control Structures : For Expressions
// Scala’s for expression is a Swiss army knife.
// It lets you combine a few simple ingredients
// in different ways to express a variety of enumerations.
// For now lets just see a simple example...
for (i <- 1 to 10) {
println(i)
}
// Ok, one neat trick.. loop guards
for {
i <- 0 to 10
if i % 2 == 0
} print(i + " ")
// Which prints...
0 2 4 6 8 10
// 'for' is very powerful, they are in fact
// similar to set comprehensions. More to come..
| agconti/scala-school | 01-intro-to-scala/slides/slide017.scala | Scala | mit | 505 |
object HelloWorld {
def main(args: Array[String]): Unit = {
val message = Option("Scala") match {
case Some(name) ⇒ s"Hello $name"
case _ ⇒ "Hello world"
}
println(message)
}
}
| aiyanbo/sbt-simple-project | src/main/scala/HelloWorld.scala | Scala | apache-2.0 | 221 |
package com.mongo.spark
import java.io.{FileSystem => _, _}
import java.text.SimpleDateFormat
import java.util.{Calendar, Date, Properties}
import com.mongodb.spark.config._
import com.mongodb.spark.sql._
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
object HDFSFileService {
def getOneDayStart_time(date:Date):Long={
val dateFormat = new SimpleDateFormat("yyyy-MM-dd")
val a = dateFormat.parse(dateFormat.format(date)).getTime
val str = a+""
str.toLong
}
def getOneDayEnd_time(date:Date):Long={
var cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, 23);
cal.set(Calendar.MINUTE, 59);
cal.set(Calendar.SECOND, 59);
cal.set(Calendar.MILLISECOND, 999);
val a = cal.getTime.getTime
val str = a+""
str.toLong
}
def getDt(date:Date) = {
val dateFormat: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
val dt = dateFormat.format(date)
dt
}
def getDaysBefore(dt: Date, interval: Int):Date = {
val dateFormat: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")
val cal: Calendar = Calendar.getInstance()
cal.setTime(dt);
cal.add(Calendar.DATE, - interval)
val day = cal.getTime()
day
}
def getSparkSession(mongoUrl: String, sparkUrl:String,asSeedhost:String,asHost:String): SparkSession = {
val uri: String = "mongodb://" + mongoUrl + "/mpush.app?readPreference=primary"
val conf = new SparkConf()
.setMaster("spark://" + sparkUrl)
.setAppName("MongoSparkConnectorTour")
.set("spark.app.id", "MongoSparkConnectorTour")
.set("spark.mongodb.input.uri", uri)
.set("spark.mongodb.output.uri", uri)
.set("aerospike.seedhost", asSeedhost)
.set("aerospike.port", asHost)
.set("aerospike.namespace", "push")
val spark = SparkSession.builder().config(conf).master("spark://" + sparkUrl).appName("scalamongoas").getOrCreate()
spark
}
//../spark-2.1.0-bin-hadoop2.7/bin/./spark-submit --class com.mongo.spark.HDFSFileService --master spark://172.23.5.113:7077 scalamongoas-assembly-1.3.1.jar
//./spark-submit --class com.mongo.spark.HDFSFileService --master spark://172.23.5.113:7077 scalamongoas-assembly-1.3.1.jar
def main(args: Array[String]): Unit = {
//======读取配置文件start======
val filePath =System.getProperty("user.dir")
println("filePath:"+filePath)
val postgprop = new Properties
val ipstream = new BufferedInputStream(new FileInputStream(filePath+"/conf/config.properties"))
postgprop.load(ipstream)
//======读取配置文件end======
val mongoUrl = postgprop.getProperty("mongo.userName")+":"+postgprop.getProperty("mongo.password")+"@"+postgprop.getProperty("mongo.host")
val sparkUrl = postgprop.getProperty("spark.host") + ":" + postgprop.getProperty("spark.port")
val session = getSparkSession(mongoUrl,sparkUrl,postgprop.getProperty("aerospike.seedhost"),postgprop.getProperty("aerospike.port"))
val sqlContext = session.sqlContext
val dfApp = sqlContext.loadFromMongoDB(ReadConfig(Map("uri" -> ("mongodb://" + mongoUrl + "/mpush.app?readPreference=primary"), "partitioner" -> "MongoSplitVectorPartitioner")))
val dfTd = sqlContext.loadFromMongoDB(ReadConfig(Map("uri" -> ("mongodb://" + mongoUrl + "/mpush.td?readPreference=primary"), "partitioner" -> "MongoSplitVectorPartitioner")))
val dfTdAppWithOutId = sqlContext.loadFromMongoDB(ReadConfig(Map("uri" -> ("mongodb://" + mongoUrl + "/mpush.td?readPreference=primary"), "partitioner" -> "MongoSplitVectorPartitioner"))).where("source='app'").drop("_id")
val dfTdGameWithOutId = sqlContext.loadFromMongoDB(ReadConfig(Map("uri" -> ("mongodb://" + mongoUrl + "/mpush.td?readPreference=primary"), "partitioner" -> "MongoSplitVectorPartitioner"))).where("source='game'").drop("_id")
for(a <- 1 to 7){
var oneDay = getDaysBefore(new Date(),a)
val oneDayStart_time = getOneDayStart_time(oneDay)
val oneDayEnd_time = getOneDayEnd_time(oneDay)
val dfPushOneDay = sqlContext.loadFromMongoDB(ReadConfig(Map("uri" -> ("mongodb://" + mongoUrl + "/mpush.push?readPreference=primary"), "partitioner" -> "MongoSplitVectorPartitioner"))).where("ct>"+oneDayStart_time+" and ct<"+oneDayEnd_time)//查询某一天的push总量
val dfActivityOneDay = sqlContext.loadFromMongoDB(ReadConfig(Map("uri" -> ("mongodb://" + mongoUrl + "/mpush.activity?readPreference=primary"), "partitioner" -> "MongoSplitVectorPartitioner"))).where("ct>"+oneDayStart_time+" and ct<"+oneDayEnd_time)//查询某一天的activity总量
val dfDvOneDayWithCt = session.read.format("com.aerospike.spark.sql").option("aerospike.set", "dv").load.filter("ct is not null and (tp='a' or (tp = 'i' and length(token) > 0)) and "+"ct>"+oneDayStart_time+" and ct<"+oneDayEnd_time);
dfDvOneDayWithCt.cache
val dfDvOneDayWithMt = session.read.format("com.aerospike.spark.sql").option("aerospike.set", "dv").load.filter("mt is not null and "+"mt>"+oneDayStart_time+" and mt<"+oneDayEnd_time);
val appApp = dfApp.join(dfTdAppWithOutId,dfApp("_id")===dfTdAppWithOutId("app"))
val appGame = dfApp.join(dfTdGameWithOutId,dfApp("_id")===dfTdGameWithOutId("app"))
dfApp.createOrReplaceTempView("dfApp")
dfTd.createOrReplaceTempView("dfTd")
val appMpush = sqlContext.sql("select * from dfApp where not exists (select * from dfTd WHERE dfApp._id=dfTd.app)")
val dvCount_App = appApp.join(dfDvOneDayWithCt,appApp("_id.oid")===dfDvOneDayWithCt("app")).count()// 每日贡献的新增设备数 6321
val dvCount_Game =appGame.join(dfDvOneDayWithCt,appGame("_id.oid")===dfDvOneDayWithCt("app")).count()// 每日贡献的新增设备数 1200
val dvCount_Mpush =appMpush.join(dfDvOneDayWithCt,appMpush("_id.oid")===dfDvOneDayWithCt("app")).count()
val pushCount_App = appApp.join(dfPushOneDay,appApp("_id")===dfPushOneDay("app")).count()//app每日贡献push数 4838
val pushCount_Game = appGame.join(dfPushOneDay,appGame("_id")===dfPushOneDay("app")).count()//game每日贡献push数 1
val pushCount_Mpush = appMpush.join(dfPushOneDay,appMpush("_id")===dfPushOneDay("app")).count()
val activityCount_App = appApp.join(dfActivityOneDay, appApp("_id") === dfActivityOneDay("app")).count()//app每日贡献activity数 1
val activityCount_Game = appGame.join(dfActivityOneDay,appGame("_id")===dfActivityOneDay("app")).count() //game每日贡献activity数 1
val activityCount_Mpush = appMpush.join(dfActivityOneDay,appMpush("_id")===dfActivityOneDay("app")).count()
val activeDv_App = appApp.join(dfDvOneDayWithMt,appApp("_id.oid")===dfDvOneDayWithMt("app")).count()// 每日贡献的活跃设备数 3800
val activeDv_Game =appGame.join(dfDvOneDayWithMt,appGame("_id.oid")===dfDvOneDayWithMt("app")).count()// 每日贡献的活跃设备数 1200
val activeDv_Mpush =appMpush.join(dfDvOneDayWithMt,appMpush("_id.oid")===dfDvOneDayWithMt("app")).count()
val appAddCount = appApp.where("ct>"+oneDayStart_time+" and ct<"+oneDayEnd_time).count() //app每日新增数 96
val gameAddCount = appGame.where("ct>"+oneDayStart_time+" and ct<"+oneDayEnd_time).count() //game每日新增数 37
val mpushAddCount = appMpush.where("ct>"+oneDayStart_time+" and ct<"+oneDayEnd_time).count() // 0
val appTotalCount = appApp.count() //app累计数
val gameTotalCount = appGame.count() //game累计数
val mpushTotalCount = appMpush.count() //mpush累计数
val fileName = "statistics_result_" + getDt(oneDay) + ".txt"
val writer = new PrintWriter(new File(fileName))
writer.write("app每日贡献push数:"+pushCount_App+"\\n")
writer.write("game每日贡献push数:"+pushCount_Game+"\\n")
writer.write("mpush每日贡献push数:"+pushCount_Mpush+"\\n")
writer.write("app每日贡献activity数:"+activityCount_App+"\\n")
writer.write("game每日贡献activity数:"+activityCount_Game+"\\n")
writer.write("mpush每日贡献activity数:"+activityCount_Mpush+"\\n")
writer.write("app每日贡献的新增token数:"+dvCount_App+"\\n")
writer.write("game每日贡献的新增token数:"+dvCount_Game+"\\n")
writer.write("mpush每日贡献的新增token数:"+dvCount_Mpush+"\\n")
writer.write("app每日贡献的活跃设备数:"+activeDv_App+"\\n")
writer.write("game每日贡献的活跃设备数:"+activeDv_Game+"\\n")
writer.write("mpush每日贡献的活跃设备数:"+activeDv_Mpush+"\\n")
writer.write("app每日新增数:"+appAddCount+",累计数:"+appTotalCount+"\\n")
writer.write("game每日新增数:"+gameAddCount+",累计数:"+gameTotalCount+"\\n")
writer.write("mpush每日新增数:"+mpushAddCount+",累计数:"+mpushTotalCount+"\\n")
writer.close()
}
}
}
| xiaotiejiang888/scalamongoas | src/main/scala/com/mongo/spark/HDFSFileService.scala | Scala | apache-2.0 | 8,871 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
package reporters
import scala.annotation.unused
import scala.collection.mutable
import scala.reflect.internal
import scala.reflect.internal.util.{Position, ScalaClassLoader}
/** This class exists for sbt compatibility. Global.reporter holds a FilteringReporter.
* The only Reporter that is *not* a FilteringReporter is the one created by sbt.
* The Global.reporter_= setter wraps that in a delegating [[MakeFilteringForwardingReporter]].
*/
abstract class Reporter extends internal.Reporter {
// used by sbt
@deprecated("Use echo, as internal.Reporter does not support unforced info", since="2.13.0")
final def info(pos: Position, msg: String, @unused force: Boolean): Unit = info0(pos, msg, INFO, force = true)
// allow calling info0 in MakeFilteringForwardingReporter
private[reporters] final def nonProtectedInfo0(pos: Position, msg: String, severity: Severity): Unit =
info0(pos, msg, severity, force = true)
// overridden by sbt, IDE -- should not be in the reporting interface
// (IDE receives comments from ScaladocAnalyzer using this hook method)
// TODO: IDE should override a hook method in the parser instead
def comment(pos: Position, msg: String): Unit = ()
// used by sbt (via unit.cancel) to cancel a compile (see hasErrors)
// TODO: figure out how sbt uses this, come up with a separate interface for controlling the build
private[this] var _cancelled: Boolean = false
def cancelled: Boolean = _cancelled
def cancelled_=(b: Boolean): Unit = _cancelled = b
override def hasErrors: Boolean = super.hasErrors || cancelled
override def reset(): Unit = {
super.reset()
cancelled = false
}
}
object Reporter {
/** The usual way to create the configured reporter.
* Errors are reported through `settings.errorFn` and also by throwing an exception.
*/
def apply(settings: Settings): FilteringReporter = {
//val loader = ScalaClassLoader(getClass.getClassLoader) // apply does not make delegate
val loader = new ClassLoader(getClass.getClassLoader) with ScalaClassLoader
loader.create[FilteringReporter](settings.reporter.value, settings.errorFn)(settings)
}
/** Take the message with its explanation, if it has one, but stripping the separator line.
*/
def explanation(msg: String): String =
if (msg == null) {
msg
} else {
val marker = msg.indexOf("\\n----\\n")
if (marker > 0) msg.substring(0, marker + 1) + msg.substring(marker + 6) else msg
}
/** Drop any explanation from the message, including the newline between the message and separator line.
*/
def stripExplanation(msg: String): String =
if (msg == null) {
msg
} else {
val marker = msg.indexOf("\\n----\\n")
if (marker > 0) msg.substring(0, marker) else msg
}
/** Split the message into main message and explanation, as iterators of the text. */
def splitExplanation(msg: String): (Iterator[String], Iterator[String]) = {
val (err, exp) = msg.linesIterator.span(!_.startsWith("----"))
(err, exp.drop(1))
}
}
/** The reporter used in a Global instance.
*
* It filters messages based on
* - settings.nowarn
* - settings.maxerrs / settings.maxwarns
* - positions (only one error at a position, no duplicate messages on a position)
*/
abstract class FilteringReporter extends Reporter {
def settings: Settings
// this should be the abstract method all the way up in reflect.internal.Reporter, but sbt compat
def doReport(pos: Position, msg: String, severity: Severity): Unit
@deprecatedOverriding("override doReport instead", "2.13.1") // overridden in scalameta for example
protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = doReport(pos, msg, severity)
private lazy val positions = mutable.Map[Position, Severity]() withDefaultValue INFO
private lazy val messages = mutable.Map[Position, List[String]]() withDefaultValue Nil
private def maxErrors: Int = settings.maxerrs.value
private def maxWarnings: Int = settings.maxwarns.value
override def filter(pos: Position, msg: String, severity: Severity): Int = {
import internal.Reporter.{ERROR => Error, WARNING => Warning, _}
def maxOk = severity match {
case Error => maxErrors < 0 || errorCount < maxErrors
case Warning => maxWarnings < 0 || warningCount < maxWarnings
case _ => true
}
// Invoked when an error or warning is filtered by position.
@inline def suppress = {
if (settings.prompt) doReport(pos, msg, severity)
else if (settings.isDebug) doReport(pos, s"[ suppressed ] $msg", severity)
Suppress
}
if (!duplicateOk(pos, severity, msg)) suppress else if (!maxOk) Count else Display
}
/** Returns `true` if the message should be reported. Messages are skipped if:
* - there was already some error at the position. After an error, no further
* messages at that position are issued.
* - the same warning/info message was already issued at the same position.
* Note: two positions are considered identical for logging if they have the same point.
*/
private def duplicateOk(pos: Position, severity: Severity, msg: String): Boolean = {
// was a prefix of the msg already reported at this position for purposes of suppressing repetition?
def matchAt(pos: Position, msg: String): Boolean = messages(pos).exists(msg.startsWith)
// always report at null / NoPosition
pos == null || !pos.isDefined || {
val fpos = pos.focus
val show = positions(fpos) match {
case internal.Reporter.ERROR => false // already error at position
case s if s.id > severity.id => false // already message higher than present severity
case `severity` => !matchAt(fpos, msg) // already issued this (in)exact message
case _ => true // good to go
}
if (show) {
positions(fpos) = severity
messages(fpos) ::= Reporter.stripExplanation(msg) // ignore explanatory suffix for suppressing duplicates
}
show
}
}
override def reset(): Unit = {
super.reset()
positions.clear()
messages.clear()
}
}
| lrytz/scala | src/compiler/scala/tools/nsc/reporters/Reporter.scala | Scala | apache-2.0 | 6,565 |
package component
import core._
import akka.actor.{Actor, ActorRefFactory, ActorLogging, ActorRef, Props}
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model.headers.Accept
import akka.http.scaladsl.server.{Route, RouteResult}
import akka.http.scaladsl.server.Directives._
import akka.pattern.ask
import akka.util.Timeout
import java.util.UUID
import org.joda.time.DateTime
import scala.concurrent.Future
import scala.concurrent.duration._
class UserService(prefix: String) extends Actor with ActorLogging {
import scala.concurrent.ExecutionContext.Implicits.global
implicit val timeout = Timeout(3.seconds)
def receive = {
case UseModel(Some(modelActor)) => context.become(process(modelActor))
case msg => log.warning("Unknown message: {}", msg)
}
def process(modelActor: ActorRef): Receive = {
case GetServiceRoute(optUser) =>
sender ! UserDirectives.userService(prefix, modelActor ? _)(optUser)
case UseModel(None) => context.become(receive)
case msg => log.warning("Unknown message at process: {}", msg)
}
}
object UserService {
def apply(prefix: String)(implicit factory: ActorRefFactory) =
factory.actorOf(Props(new UserService(prefix)))
}
object UserDirectives extends CommonDirectives with UserFormats {
import scala.concurrent.ExecutionContext.Implicits.global
val modelUser = Supervisor.getChild(ModelUser.name)
val userService = (prefix: String, modelFunction: Model.Function) =>
(optUser: Option[User]) => pathPrefix(prefix) {
handleUsers(modelFunction)(optUser)
}
def handleUsers(modelFunction: Model.Function)(optUser: Option[User]) = pathEnd {
val links = Right.mapActions(optUser, Map(
userListLink("self") -> Authenticated
))
respondWithLinks(links:_*) {
headComplete ~
getList[User](modelFunction, User)()()
}
} ~
pathPrefix(Segment)(handleUser(modelFunction: Model.Function))
def handleUser(modelFunction: Model.Function)(userId: String) = pathEnd {
headComplete ~
getEntity[User](modelFunction, userId)() ~
putEntity[User](modelFunction, _.copy(id = userId), userId)() ~
deleteEntity[User](modelFunction, userId)()
}
def optionalUser(token: Option[Token])(route: Option[User] => Route): Route = {
val user = token match {
case Some(t) =>
(modelUser ? GetEntity[User](t.userId)) map {
case Some(user: User) => Some(user)
case _ => None
}
case _ => Future(None)
}
ctx => user flatMap ( u => route(u)(ctx) )
}
def userListLink(rel: String, methods: List[HttpMethod] = List(GET)) =
collectionLink(s"/users", rel, "", "login name", methods:_*)
def userItemLink(rel: String, userId: String = ":userId",
methods: List[HttpMethod] = List(GET)) =
mtLink(s"/users/$userId", rel,
`application/vnd.enpassant.user+json`, methods:_*)
def userMenuLinks(optUser: Option[User]) = {
val links = Right.mapActions(optUser, Map(
collectionLink(s"/users", "users", "List Users", "login name", GET) -> Authenticated
))
respondWithLinks(links:_*)
}
def userItemLinks() = respondWithLinks(
mtLink(s"/users/:userId", "user", `application/vnd.enpassant.user+json`, GET)
)
}
| enpassant/jeeves | src/main/scala/auth/UserDirectives.scala | Scala | apache-2.0 | 3,285 |
package dsmoq.persistence
object GroupAccessLevel {
val Deny = 0
val LimitedPublic = 1
val FullPublic = 2
val Provider = 3
}
| nkawa/dsmoq | server/common/src/main/scala/dsmoq/persistence/GroupAccessLevel.scala | Scala | apache-2.0 | 134 |
package com.twitter.finagle.factory
import com.twitter.finagle._
import com.twitter.finagle.loadbalancer.LoadBalancerFactory
import com.twitter.finagle.param.{Label, Stats}
import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver}
import com.twitter.finagle.tracing.Trace
import com.twitter.finagle.util.{Drv, Rng, Showable}
import com.twitter.util._
import scala.collection.immutable
/**
* Proxies requests to the current definiton of 'name', queueing
* requests while it is pending.
*/
private class DynNameFactory[Req, Rep](
name: Activity[NameTree[Name.Bound]],
cache: ServiceFactoryCache[NameTree[Name.Bound], Req, Rep])
extends ServiceFactory[Req, Rep] {
private sealed trait State
private case class Pending(q: immutable.Queue[(ClientConnection, Promise[Service[Req, Rep]])])
extends State
private case class Named(name: NameTree[Name.Bound]) extends State
private case class Failed(exc: Throwable) extends State
private case class Closed() extends State
override def status = state match {
case Pending(_) => Status.Busy
case Named(name) => cache.status(name)
case Failed(_) | Closed() => Status.Closed
}
@volatile private[this] var state: State = Pending(immutable.Queue.empty)
private[this] val sub = name.run.changes respond {
case Activity.Ok(name) => synchronized {
state match {
case Pending(q) =>
state = Named(name)
for ((conn, p) <- q) p.become(apply(conn))
case Failed(_) | Named(_) =>
state = Named(name)
case Closed() =>
}
}
case Activity.Failed(exc) => synchronized {
state match {
case Pending(q) =>
// wrap the exception in a Failure.Naming, so that it can
// be identified for tracing
for ((_, p) <- q) p.setException(Failure.adapt(exc, Failure.Naming))
state = Failed(exc)
case Failed(_) =>
// if already failed, just update the exception; the promises
// must already be satisfied.
state = Failed(exc)
case Named(_) | Closed() =>
}
}
case Activity.Pending =>
}
def apply(conn: ClientConnection): Future[Service[Req, Rep]] = {
state match {
case Named(name) => cache(name, conn)
// wrap the exception in a Failure.Naming, so that it can
// be identified for tracing
case Failed(exc) => Future.exception(Failure.adapt(exc, Failure.Naming))
// don't trace these, since they're not a namer failure
case Closed() => Future.exception(new ServiceClosedException)
case Pending(_) => applySync(conn)
}
}
private[this] def applySync(conn: ClientConnection): Future[Service[Req, Rep]] = synchronized {
state match {
case Pending(q) =>
val p = new Promise[Service[Req, Rep]]
val el = (conn, p)
p setInterruptHandler { case exc =>
synchronized {
state match {
case Pending(q) if q contains el =>
state = Pending(q filter (_ != el))
p.setException(new CancelledConnectionException(exc))
case _ =>
}
}
}
state = Pending(q enqueue el)
p
case other => apply(conn)
}
}
def close(deadline: Time) = {
val prev = synchronized {
val prev = state
state = Closed()
prev
}
prev match {
case Pending(q) =>
val exc = new ServiceClosedException
for ((_, p) <- q)
p.setException(exc)
case _ =>
}
sub.close(deadline)
}
}
/**
* Builds a factory from a [[com.twitter.finagle.NameTree]]. Leaves
* are taken from the given
* [[com.twitter.finagle.factory.ServiceFactoryCache]]; Unions become
* random weighted distributors.
*/
private[finagle] object NameTreeFactory {
def apply[Key, Req, Rep](
path: Path,
tree: NameTree[Key],
factoryCache: ServiceFactoryCache[Key, Req, Rep],
rng: Rng = Rng.threadLocal
): ServiceFactory[Req, Rep] = {
lazy val noBrokersAvailableFactory = Failed(new NoBrokersAvailableException(path.show))
case class Failed(exn: Throwable) extends ServiceFactory[Req, Rep] {
val service: Future[Service[Req, Rep]] = Future.exception(exn)
def apply(conn: ClientConnection) = service
override def status = Status.Closed
def close(deadline: Time) = Future.Done
}
case class Leaf(key: Key) extends ServiceFactory[Req, Rep] {
def apply(conn: ClientConnection) = factoryCache.apply(key, conn)
override def status = factoryCache.status(key)
def close(deadline: Time) = Future.Done
}
case class Weighted(
drv: Drv,
factories: Seq[ServiceFactory[Req, Rep]]
) extends ServiceFactory[Req, Rep] {
def apply(conn: ClientConnection) = factories(drv(rng)).apply(conn)
override def status = Status.worstOf[ServiceFactory[Req, Rep]](factories, _.status)
def close(deadline: Time) = Future.Done
}
def factoryOfTree(tree: NameTree[Key]): ServiceFactory[Req, Rep] =
tree match {
case NameTree.Neg | NameTree.Fail | NameTree.Empty => noBrokersAvailableFactory
case NameTree.Leaf(key) => Leaf(key)
// it's an invariant of Namer.bind that it returns no Alts
case NameTree.Alt(_*) => Failed(new IllegalArgumentException("NameTreeFactory"))
case NameTree.Union(weightedTrees@_*) =>
val (weights, trees) = weightedTrees.unzip { case NameTree.Weighted(w, t) => (w, t) }
Weighted(Drv.fromWeights(weights), trees.map(factoryOfTree))
}
factoryOfTree(tree)
}
}
/**
* A factory that routes to the local binding of the passed-in
* [[com.twitter.finagle.Path Path]]. It calls `newFactory` to mint a
* new [[com.twitter.finagle.ServiceFactory ServiceFactory]] for novel
* name evaluations.
*
* A three-level caching scheme is employed for efficiency:
*
* First, the [[ServiceFactory]] for a [[Path]] is cached by the local
* [[com.twitter.finagle.Dtab Dtab]]. This permits sharing in the
* common case that no local [[Dtab]] is given. (It also papers over the
* mutability of [[Dtab.base]].)
*
* Second, the [[ServiceFactory]] for a [[Path]] (relative to a
* [[Dtab]]) is cached by the [[com.twitter.finagle.NameTree
* NameTree]] it is bound to by that [[Dtab]]. Binding a path results
* in an [[com.twitter.util.Activity Activity]], so this cache permits
* sharing when the same tree is returned in different updates of the
* [[Activity]]. (In particular it papers over nuisance updates of the
* [[Activity]] where the value is unchanged.)
*
* Third, the ServiceFactory for a [[com.twitter.finagle.Name.Bound
* Name.Bound]] appearing in a [[NameTree]] is cached by its
* [[Name.Bound]]. This permits sharing when the same [[Name.Bound]]
* appears in different [[NameTree]]s (or the same [[NameTree]]
* resulting from different bindings of the [[Path]]).
*
* @bug This is far too complicated, though it seems necessary for
* efficiency when namers are occasionally overriden.
*
* @bug 'status' has a funny definition.
*/
private[finagle] class BindingFactory[Req, Rep](
path: Path,
newFactory: Name.Bound => ServiceFactory[Req, Rep],
baseDtab: () => Dtab = BindingFactory.DefaultBaseDtab,
statsReceiver: StatsReceiver = NullStatsReceiver,
maxNameCacheSize: Int = 8,
maxNameTreeCacheSize: Int = 8,
maxNamerCacheSize: Int = 4,
record: (String, String) => Unit = Trace.recordBinary)
extends ServiceFactory[Req, Rep] {
private[this] val tree = NameTree.Leaf(path)
private[this] val nameCache =
new ServiceFactoryCache[Name.Bound, Req, Rep](
bound => new ServiceFactoryProxy(newFactory(bound)) {
private val boundShow = Showable.show(bound)
override def apply(conn: ClientConnection) = {
record("namer.name", boundShow)
super.apply(conn)
}
},
statsReceiver.scope("namecache"),
maxNameCacheSize)
private[this] val nameTreeCache =
new ServiceFactoryCache[NameTree[Name.Bound], Req, Rep](
tree => new ServiceFactoryProxy(NameTreeFactory(path, tree, nameCache)) {
private val treeShow = tree.show
override def apply(conn: ClientConnection) = {
record("namer.tree", treeShow)
super.apply(conn)
}
},
statsReceiver.scope("nametreecache"),
maxNameTreeCacheSize)
private[this] val dtabCache = {
val latencyStat = statsReceiver.stat("bind_latency_us")
val newFactory: ((Dtab, Dtab)) => ServiceFactory[Req, Rep] = { case (baseDtab, localDtab) =>
val factory = new DynNameFactory(
(baseDtab ++ localDtab).bind(tree),
nameTreeCache)
new ServiceFactoryProxy(factory) {
private val pathShow = path.show
private val baseDtabShow = baseDtab.show
override def apply(conn: ClientConnection) = {
val elapsed = Stopwatch.start()
record("namer.path", pathShow)
record("namer.dtab.base", baseDtabShow)
// dtab.local is annotated on the client & server tracers.
super.apply(conn) rescue {
// DynNameFactory wraps naming exceptions for tracing
case f@Failure(maybeExc) if f.isFlagged(Failure.Naming) =>
record("namer.failure", maybeExc.getOrElse(f.show).getClass.getName)
Future.exception(f)
// we don't have the dtabs handy at the point we throw
// the exception; fill them in on the way out
case e: NoBrokersAvailableException =>
Future.exception(new NoBrokersAvailableException(e.name, baseDtab, localDtab))
} respond { _ =>
latencyStat.add(elapsed().inMicroseconds)
}
}
}
}
new ServiceFactoryCache[(Dtab, Dtab), Req, Rep](
newFactory,
statsReceiver.scope("dtabcache"),
maxNamerCacheSize)
}
def apply(conn: ClientConnection): Future[Service[Req, Rep]] =
dtabCache((baseDtab(), Dtab.local), conn)
def close(deadline: Time) =
Closable.sequence(dtabCache, nameTreeCache, nameCache).close(deadline)
override def status = dtabCache.status((baseDtab(), Dtab.local))
}
object BindingFactory {
val role = Stack.Role("Binding")
/**
* A class eligible for configuring a
* [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.factory.BindingFactory]] with a destination
* [[com.twitter.finagle.Name]] to bind.
*/
case class Dest(dest: Name) {
def mk(): (Dest, Stack.Param[Dest]) =
(this, Dest.param)
}
object Dest {
implicit val param = Stack.Param(Dest(Name.Path(Path.read("/$/fail"))))
}
private[finagle] val DefaultBaseDtab = () => Dtab.base
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.factory.BindingFactory]] with a
* [[com.twitter.finagle.Dtab]].
*/
case class BaseDtab(baseDtab: () => Dtab) {
def mk(): (BaseDtab, Stack.Param[BaseDtab]) =
(this, BaseDtab.param)
}
object BaseDtab {
implicit val param = Stack.Param(BaseDtab(DefaultBaseDtab))
}
/**
* Base type for BindingFactory modules. Implementers may handle
* bound residual paths in a protocol-specific way.
*
* The module creates a new `ServiceFactory` based on the module
* above it for each distinct [[com.twitter.finagle.Name.Bound]]
* resolved from `BindingFactory.Dest` (with caching of previously
* seen `Name.Bound`s).
*/
private[finagle] trait Module[Req, Rep] extends Stack.Module[ServiceFactory[Req, Rep]] {
val role = BindingFactory.role
val description = "Bind destination names to endpoints"
val parameters = Seq(
implicitly[Stack.Param[BaseDtab]],
implicitly[Stack.Param[Dest]],
implicitly[Stack.Param[Label]],
implicitly[Stack.Param[Stats]])
/**
* A request filter that is aware of the bound residual path.
*
* The returned filter is applied around the ServiceFactory built from the rest of the stack.
*/
protected[this] def boundPathFilter(path: Path): Filter[Req, Rep, Req, Rep]
def make(params: Stack.Params, next: Stack[ServiceFactory[Req, Rep]]) = {
val Label(label) = params[Label]
val Stats(stats) = params[Stats]
val Dest(dest) = params[Dest]
def newStack(errorLabel: String, bound: Name.Bound) = {
val client = next.make(
params +
// replace the possibly unbound Dest with the definitely bound
// Dest because (1) it's needed by AddrMetadataExtraction and
// (2) it seems disingenuous not to.
Dest(bound) +
LoadBalancerFactory.Dest(bound.addr) +
LoadBalancerFactory.ErrorLabel(errorLabel))
boundPathFilter(bound.path) andThen client
}
val factory = dest match {
case bound@Name.Bound(addr) => newStack(label, bound)
case Name.Path(path) =>
val BaseDtab(baseDtab) = params[BaseDtab]
new BindingFactory(path, newStack(path.show, _), baseDtab, stats.scope("namer"))
}
Stack.Leaf(role, factory)
}
}
/**
* Creates a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.factory.BindingFactory]].
*
* Ignores bound residual paths.
*/
private[finagle] def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Module[Req, Rep] {
private[this] val f = Filter.identity[Req, Rep]
protected[this] def boundPathFilter(path: Path) = f
}
}
| travisbrown/finagle | finagle-core/src/main/scala/com/twitter/finagle/factory/BindingFactory.scala | Scala | apache-2.0 | 13,553 |
/**
* Copyright (C) 2016 DANS - Data Archiving and Networked Services (info@dans.knaw.nl)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.license
// TODO replace this object with a 'commons-library-call' (see also EASY-Stage-FileItem)
object FileAccessRight extends Enumeration {
type FileAccessRight = Value
val
ANONYMOUS,
KNOWN,
RESTRICTED_REQUEST,
RESTRICTED_GROUP,
NONE
= Value
def valueOf(s: String) = FileAccessRight.values.find(_.toString == s)
}
| rvanheest-DANS-KNAW/easy-license-creator | src/main/scala/nl/knaw/dans/easy/license/FileAccessRight.scala | Scala | apache-2.0 | 1,020 |
package me.yingrui.segment.math
import scala.util.Random
class DenseMatrixBuilder extends MatrixBuilder {
override def vector(data: Seq[Double]): Matrix = new DenseMatrix(1, data.length, data.toArray)
override def apply(data: Array[Double]): Matrix = vector(data)
override def apply(data: Seq[Double]): Matrix = vector(data)
override def apply(row: Int, col: Int): Matrix = new DenseMatrix(row, col, new Array[Double](row * col))
override def apply(size: Int, identity: Boolean): Matrix = {
val m = new DenseMatrix(size, size, new Array[Double](size * size))
if (identity) {
0 until size foreach ((i: Int) => { m(i, i) = 1D })
}
m
}
override def apply(data: Array[Array[Double]]): Matrix = new DenseMatrix(data.length, data(0).length, data.flatten.toArray)
override def apply(row: Int, col: Int, data: Array[Double]): Matrix = new DenseMatrix(row, col, data)
override def applyBoolean(row: Int, col: Int, data: Array[Boolean]): Matrix = new DenseMatrix(row, col, data.map(b => if (b) 1D else -1D))
override def randomize(row: Int, col: Int, min: Double, max: Double) = {
val data = new Array[Double](row * col)
for (i <- 0 until data.length) {
data(i) = (Math.random() * (max - min)) + min
}
apply(row, col, data)
}
override def randomize(row: Int, col: Int): Matrix = {
val data = (0 until row * col).map(i => 1e-5 * Random.nextInt(100).toDouble)
apply(row, col, data.toArray)
}
}
| yingrui/mahjong | lib-segment/src/main/scala/me/yingrui/segment/math/DenseMatrixBuilder.scala | Scala | gpl-3.0 | 1,473 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.magic.builtin
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, FunSpec}
class BuiltinLoaderSpec extends FunSpec with Matchers with MockitoSugar {
describe("BuiltinLoader") {
describe("#getClasses") {
it("should return classes in a package") {
val pkg = this.getClass.getPackage.getName
val classes = new BuiltinLoader().getClasses(pkg)
classes.size shouldNot be(0)
}
}
describe("#loadClasses") {
it("should return class objects for classes in a package") {
val pkg = this.getClass.getPackage.getName
val classes = new BuiltinLoader().loadClasses(pkg).toList
classes.contains(this.getClass) should be (true)
}
}
}
}
| chipsenkbeil/incubator-toree | kernel/src/test/scala/org/apache/toree/magic/builtin/BuiltinLoaderSpec.scala | Scala | apache-2.0 | 1,571 |
package github.gphat
import java.util.concurrent.atomic.AtomicInteger
import scala.concurrent.{ExecutionContext,Future,Promise}
import scala.concurrent.duration._
import scala.util.Try
/** The result of an experiment.
*
* Please note that the Result does not test equality of the underlying `Future`'s
* results, it merely checks that the `Try`s therein didn't fail.
*
* @constructor Create a new result
* @param name the name of the experiment that generated this result
* @param control the future for the control
* @param candidate the future for the candidate
* @param controlDuration the amount of time for the control to complete
* @param candidateDuration the amount of time for the candidate to complete
* @param succeeded convenience boolean that signals if both future's underlying `Try`s were successful
*/
case class Result[A](
name: Option[String],
control: Future[A],
candidate: Future[A],
controlDuration: Duration,
candidateDuration: Duration,
succeeded: Boolean
) {
/** Determine if our result equal each other. Note that this only work if the
* equals method works for the type `A`.
*/
def equalled = succeeded && control.value.get.equals(candidate.value.get)
}
/** An experiment!
*
* @constructor Create a new experiment
* @param name an optional name, good for naming metrics or emitting logs, help future you remember what this is!
* @param control the control future that you want to verify against
* @param candidateEnd the candidate future you are testing out
* @param xc an optional execution context, uses `ExeuectionContext.global` by default
*/
class Experiment[A](
val name: Option[String] = None,
val control: Future[A],
val candidate: Future[A]
)(implicit xc: ExecutionContext = ExecutionContext.global) {
// Keep up with how many have completed
val counter = new AtomicInteger(2)
// times
val experimentBegin = System.currentTimeMillis
var controlEnd: Option[Long] = None
var candidateEnd: Option[Long] = None
// Promises to watch
val promise = Promise[A]()
val experimentPromise = Promise[Result[A]]()
/** Get a Future that is completed when both the control and candidate have
* completed.
*/
def getTotalFuture = experimentPromise.future
/** Begin the experiment, returning a Future that completes when the control
* completes so that you can return a result from it regardless of how slow
* or fast the candidate might be.
*/
def perform = {
// Install a handler on both futures
control.onComplete(measureControl)
candidate.onComplete(measureCandidate)
// Tie our returned future to the control
promise.completeWith(control)
// Give 'em the future
promise.future
}
private def measureCandidate(result: Try[A]) = {
candidateEnd = Some(System.currentTimeMillis)
measure
}
private def measureControl(result: Try[A]) = {
controlEnd = Some(System.currentTimeMillis)
measure
}
private def measure = {
counter.synchronized {
// Increment our counter. If we hit zero then
// we're done and can complete the experiment.
if(counter.decrementAndGet == 0) {
experimentPromise.success(Result[A](
name = name,
control = control,
candidate = candidate,
// Safe cuz we know we set both end times above!
controlDuration = Duration(controlEnd.get - experimentBegin, MILLISECONDS),
candidateDuration = Duration(candidateEnd.get - experimentBegin, MILLISECONDS),
succeeded = control.value.map({ _.isSuccess }).getOrElse(false)
&& candidate.value.map({ _.isSuccess }).getOrElse(false)
))
}
}
}
}
| gphat/provost | src/main/scala/github/gphat/provost/Experiment.scala | Scala | mit | 3,726 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer
import org.apache.spark.sql.catalyst.expressions.objects.AssertNotNull
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, Project}
import org.apache.spark.sql.types._
class NullExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
def testAllTypes(testFunc: (Any, DataType) => Unit): Unit = {
testFunc(false, BooleanType)
testFunc(1.toByte, ByteType)
testFunc(1.toShort, ShortType)
testFunc(1, IntegerType)
testFunc(1L, LongType)
testFunc(1.0F, FloatType)
testFunc(1.0, DoubleType)
testFunc(Decimal(1.5), DecimalType(2, 1))
testFunc(new java.sql.Date(10), DateType)
testFunc(new java.sql.Timestamp(10), TimestampType)
testFunc("abcd", StringType)
}
test("isnull and isnotnull") {
testAllTypes { (value: Any, tpe: DataType) =>
checkEvaluation(IsNull(Literal.create(value, tpe)), false)
checkEvaluation(IsNotNull(Literal.create(value, tpe)), true)
checkEvaluation(IsNull(Literal.create(null, tpe)), true)
checkEvaluation(IsNotNull(Literal.create(null, tpe)), false)
}
}
test("AssertNotNUll") {
val ex = intercept[RuntimeException] {
evaluate(AssertNotNull(Literal(null), Seq.empty[String]))
}.getMessage
assert(ex.contains("Null value appeared in non-nullable field"))
}
test("IsNaN") {
checkEvaluation(IsNaN(Literal(Double.NaN)), true)
checkEvaluation(IsNaN(Literal(Float.NaN)), true)
checkEvaluation(IsNaN(Literal(math.log(-3))), true)
checkEvaluation(IsNaN(Literal.create(null, DoubleType)), false)
checkEvaluation(IsNaN(Literal(Double.PositiveInfinity)), false)
checkEvaluation(IsNaN(Literal(Float.MaxValue)), false)
checkEvaluation(IsNaN(Literal(5.5f)), false)
}
test("nanvl") {
checkEvaluation(NaNvl(Literal(5.0), Literal.create(null, DoubleType)), 5.0)
checkEvaluation(NaNvl(Literal.create(null, DoubleType), Literal(5.0)), null)
checkEvaluation(NaNvl(Literal.create(null, DoubleType), Literal(Double.NaN)), null)
checkEvaluation(NaNvl(Literal(Double.NaN), Literal(5.0)), 5.0)
checkEvaluation(NaNvl(Literal(Double.NaN), Literal.create(null, DoubleType)), null)
assert(NaNvl(Literal(Double.NaN), Literal(Double.NaN)).
eval(EmptyRow).asInstanceOf[Double].isNaN)
}
test("coalesce") {
testAllTypes { (value: Any, tpe: DataType) =>
val lit = Literal.create(value, tpe)
val nullLit = Literal.create(null, tpe)
checkEvaluation(Coalesce(Seq(nullLit)), null)
checkEvaluation(Coalesce(Seq(lit)), value)
checkEvaluation(Coalesce(Seq(nullLit, lit)), value)
checkEvaluation(Coalesce(Seq(nullLit, lit, lit)), value)
checkEvaluation(Coalesce(Seq(nullLit, nullLit, lit)), value)
}
}
test("SPARK-16602 Nvl should support numeric-string cases") {
def analyze(expr: Expression): Expression = {
val relation = LocalRelation()
SimpleAnalyzer.execute(Project(Seq(Alias(expr, "c")()), relation)).expressions.head
}
val intLit = Literal.create(1, IntegerType)
val doubleLit = Literal.create(2.2, DoubleType)
val stringLit = Literal.create("c", StringType)
val nullLit = Literal.create(null, NullType)
val floatNullLit = Literal.create(null, FloatType)
val floatLit = Literal.create(1.01f, FloatType)
val timestampLit = Literal.create("2017-04-12", TimestampType)
val decimalLit = Literal.create(10.2, DecimalType(20, 2))
assert(analyze(new Nvl(decimalLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(doubleLit, decimalLit)).dataType == DoubleType)
assert(analyze(new Nvl(decimalLit, doubleLit)).dataType == DoubleType)
assert(analyze(new Nvl(decimalLit, floatLit)).dataType == DoubleType)
assert(analyze(new Nvl(floatLit, decimalLit)).dataType == DoubleType)
assert(analyze(new Nvl(timestampLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(intLit, doubleLit)).dataType == DoubleType)
assert(analyze(new Nvl(intLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(stringLit, doubleLit)).dataType == StringType)
assert(analyze(new Nvl(doubleLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(nullLit, intLit)).dataType == IntegerType)
assert(analyze(new Nvl(doubleLit, nullLit)).dataType == DoubleType)
assert(analyze(new Nvl(nullLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(floatLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(floatLit, doubleLit)).dataType == DoubleType)
assert(analyze(new Nvl(floatNullLit, intLit)).dataType == FloatType)
}
test("AtLeastNNonNulls") {
val mix = Seq(Literal("x"),
Literal.create(null, StringType),
Literal.create(null, DoubleType),
Literal(Double.NaN),
Literal(5f))
val nanOnly = Seq(Literal("x"),
Literal(10.0),
Literal(Float.NaN),
Literal(math.log(-2)),
Literal(Double.MaxValue))
val nullOnly = Seq(Literal("x"),
Literal.create(null, DoubleType),
Literal.create(null, DecimalType.USER_DEFAULT),
Literal(Float.MaxValue),
Literal(false))
checkEvaluation(AtLeastNNonNulls(2, mix), true, EmptyRow)
checkEvaluation(AtLeastNNonNulls(3, mix), false, EmptyRow)
checkEvaluation(AtLeastNNonNulls(3, nanOnly), true, EmptyRow)
checkEvaluation(AtLeastNNonNulls(4, nanOnly), false, EmptyRow)
checkEvaluation(AtLeastNNonNulls(3, nullOnly), true, EmptyRow)
checkEvaluation(AtLeastNNonNulls(4, nullOnly), false, EmptyRow)
}
test("Coalesce should not throw 64kb exception") {
val inputs = (1 to 2500).map(x => Literal(s"x_$x"))
checkEvaluation(Coalesce(inputs), "x_1")
}
test("AtLeastNNonNulls should not throw 64kb exception") {
val inputs = (1 to 4000).map(x => Literal(s"x_$x"))
checkEvaluation(AtLeastNNonNulls(1, inputs), true)
}
}
| adrian-ionescu/apache-spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/NullExpressionsSuite.scala | Scala | apache-2.0 | 6,807 |
/* Copyright 2009-2016 EPFL, Lausanne */
object Sequencing4 {
def test(): Int = {
var x = 5
{x = x + 1; x} + {x = x * 2; x}
} ensuring(res => res == 18)
}
| epfl-lara/leon | src/test/resources/regression/verification/xlang/valid/Sequencing4.scala | Scala | gpl-3.0 | 172 |
package com.rikmuld.camping.common.inventory
import com.rikmuld.camping.CampingMod
import com.rikmuld.corerm.gui.slots.{SlotDisable, SlotNot}
import net.minecraft.inventory.{IInventory, Slot}
import net.minecraft.item.Item
class SlotBackpack(inv: IInventory, index: Int, x:Int, y:Int, active: Boolean) extends Slot(inv, index, x, y) with SlotDisable with SlotNot {
if(!active) disable()
override def getBanItems: Vector[Item] =
Vector(CampingMod.OBJ.backpack)
} | Rikmuld/MC-Camping | scala/com/rikmuld/camping/common/inventory/SlotBackpack.scala | Scala | gpl-3.0 | 472 |
package skinny.controller.feature
import skinny.engine.context.SkinnyEngineContext
import skinny._
import skinny.view.velocity._
/**
* Velocity template engine support.
*/
trait VelocityTemplateEngineFeature extends TemplateEngineFeature {
lazy val sbtProjectPath: Option[String] = None
lazy val velocity: Velocity =
Velocity(VelocityViewConfig.viewWithServletContext(servletContext, sbtProjectPath))
val velocityExtension: String = "vm"
override protected def templatePaths(path: String)(implicit format: Format = Format.HTML): List[String] = {
List(templatePath(path)(context, format))
}
protected def templatePath(path: String)(
implicit ctx: SkinnyEngineContext, format: Format = Format.HTML): String = {
s"${path}.${format.name}.${velocityExtension}".replaceAll("//", "/")
}
override protected def templateExists(path: String)(implicit format: Format = Format.HTML): Boolean = {
velocity.templateExists(templatePath(path)(context, format))
}
override protected def renderWithTemplate(path: String)(
implicit ctx: SkinnyEngineContext, format: Format = Format.HTML): String = {
velocity.render(templatePath(path)(ctx, format), requestScope(ctx).toMap, ctx.request, ctx.response)
}
}
| holycattle/skinny-framework | velocity/src/main/scala/skinny/controller/feature/VelocityTemplateEngineFeature.scala | Scala | mit | 1,248 |
package postgresweb.controllers
import ch.wsl.model.shared._
import japgolly.scalajs.react.{CallbackTo, Callback, ReactElement}
import postgresweb.model.Menu
import postgresweb.services.TableClient
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.scalajs.js
/**
* Created by andreaminetti on 14/03/16.
*/
class TableController extends CRUDController{
private def client = TableClient(container.model)
override def load(jq: JSONQuery): Future[Table] = client.Helpers.filter2table(jq)
override def get(id: String): Future[js.Any] = client.get(id)
override def schemaAsString: Future[String] = client.schema
override def uiSchema: Future[JSONSchemaUI] = client.form.map(JSONSchemaUI.fromJSONFields)
override def get: Future[js.Any] = client.get(id)
override def onUpdate(data: js.Any): Callback = CallbackTo(client.update(id,data))
override def onInsert(data: js.Any): Callback = CallbackTo(client.insert(data))
override def leftMenu: Future[Vector[String]] = TableClient.models()
override def leftMenuTitle: String = "Tables"
}
| minettiandrea/postgres-restify | client/src/main/scala/postgresweb/controllers/TableController.scala | Scala | apache-2.0 | 1,118 |
package streams
import common._
/**
* This component implements a parser to define terrains from a
* graphical ASCII representation.
*
* When mixing in that component, a level can be defined by
* defining the field `level` in the following form:
*
* val level =
* """------
* |--ST--
* |--oo--
* |--oo--
* |------""".stripMargin
*
* - The `-` character denotes parts which are outside the terrain
* - `o` denotes fields which are part of the terrain
* - `S` denotes the start position of the block (which is also considered
inside the terrain)
* - `T` denotes the final position of the block (which is also considered
inside the terrain)
*
* In this example, the first and last lines could be omitted, and
* also the columns that consist of `-` characters only.
*/
trait StringParserTerrain extends GameDef {
/**
* A ASCII representation of the terrain. This field should remain
* abstract here.
*/
val level: String
/**
* This method returns terrain function that represents the terrain
* in `levelVector`. The vector contains parsed version of the `level`
* string. For example, the following level
*
* val level =
* """ST
* |oo
* |oo""".stripMargin
*
* is represented as
*
* Vector(Vector('S', 'T'), Vector('o', 'o'), Vector('o', 'o'))
*
* The resulting function should return `true` if the position `pos` is
* a valid position (not a '-' character) inside the terrain described
* by `levelVector`.
*/
def terrainFunction(levelVector: Vector[Vector[Char]]): Pos => Boolean = {
case Pos(x,y) => {
if(levelVector.size <= x||x<0) false
else if(levelVector(x).size <= y||y<0) false
else Array('S','T','o').contains(levelVector(x)(y))
}
}
/**
* This function should return the position of character `c` in the
* terrain described by `levelVector`. You can assume that the `c`
* appears exactly once in the terrain.
*
* Hint: you can use the functions `indexWhere` and / or `indexOf` of the
* `Vector` class
*/
def findChar(c: Char, levelVector: Vector[Vector[Char]]): Pos = {
val ys = levelVector map ( _ indexOf c )
val x = ys indexWhere (_ >= 0)
Pos( x, ys(x) )
}
private lazy val vector: Vector[Vector[Char]] =
Vector(level.split("\\n").map(str => Vector(str: _*)): _*)
lazy val terrain: Terrain = terrainFunction(vector)
lazy val startPos: Pos = findChar('S', vector)
lazy val goal: Pos = findChar('T', vector)
}
| foomorrow/coursera-scala | streams/src/main/scala/streams/StringParserTerrain.scala | Scala | gpl-2.0 | 2,559 |
package logful.server
import java.util.concurrent.TimeUnit
import io.gatling.core.Predef._
import logful.server.config.LogFileReqConfig
import scala.concurrent.duration.FiniteDuration
class SmallLogConstantSimulation extends Simulation {
val usersPerSec = 1000
val time = 60
val during = new FiniteDuration(time, TimeUnit.SECONDS)
val second = during.toSeconds
val c = new LogFileReqConfig((usersPerSec * during.toSeconds).toInt)
setUp(c.scn.inject(constantUsersPerSec(usersPerSec).during(during)).protocols(c.httpProtocol))
}
| foxundermoon/gatling-test | src/gatling/scala/logful/server/SmallLogConstantSimulation.scala | Scala | mit | 546 |
/*
* SelectFactory.scala
* Methods to create factors for Select and Dist elements.
*
* Created By: Glenn Takata (gtakata@cra.com)
* Creation Date: Dec 15, 2014
*
* Copyright 2014 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.algorithm.factored.factors.factory
import com.cra.figaro.language._
import com.cra.figaro.algorithm.factored.factors._
import com.cra.figaro.library.compound._
import com.cra.figaro.util._
import com.cra.figaro.algorithm.lazyfactored._
import com.cra.figaro.algorithm.structured.ComponentCollection
import com.cra.figaro.algorithm.lazyfactored.Star
/**
* A Sub-Factory for Select or Dist Elements
*/
object SelectFactory {
/**
* Factor constructor for an AtomicDistFlip
*/
def makeFactors[T](cc: ComponentCollection, dist: AtomicDist[T]): List[Factor[Double]] = {
val (intermed, clauseFactors) = intermedAndClauseFactors(cc, dist)
val intermedFactor = makeSimpleDistribution(intermed, dist.probs)
intermedFactor :: clauseFactors
}
/**
* Factor constructor for a CompoundDist
*/
def makeFactors[T](cc: ComponentCollection, dist: CompoundDist[T]): List[Factor[Double]] = {
val (intermed, clauseFactors) = intermedAndClauseFactors(cc, dist)
val probVars = dist.probs map (Factory.getVariable(cc, _))
val intermedFactor = makeComplexDistribution(cc, intermed, probVars)
intermedFactor :: clauseFactors
}
/**
* Factor constructor for an AtomicSelect
*/
def makeFactors[T](cc: ComponentCollection, select: AtomicSelect[T]): List[Factor[Double]] = {
val selectVar = Factory.getVariable(cc, select)
if (selectVar.range.exists(!_.isRegular)) {
assert(selectVar.range.size == 1) // Select's range must either be a list of regular values or {*}
StarFactory.makeStarFactor(cc, select)
} else {
val probs = getProbs(cc, select)
List(makeSimpleDistribution(selectVar, probs))
}
}
/**
* Factor constructor for a CompoundSelect
*/
def makeFactors[T](cc: ComponentCollection, select: CompoundSelect[T]): List[Factor[Double]] = {
val selectVar = Factory.getVariable(cc, select)
val probs = getProbs(cc, select)
val probVars = probs map (Factory.getVariable(cc, _))
List(makeComplexDistribution(cc, selectVar, probVars))
}
/**
* Factor constructor for a ParameterizedSelect
*/
def makeFactors[T](cc: ComponentCollection, select: ParameterizedSelect[T], parameterized: Boolean): List[Factor[Double]] = {
if (parameterized) {
val selectVar = Factory.getVariable(cc, select)
val probs = parameterizedGetProbs(cc, select)
List(makeSimpleDistribution(selectVar, probs))
} else {
val selectVar: Variable[T] = Factory.getVariable(cc, select)
if (selectVar.range.exists(!_.isRegular)) {
// If the select has * in its range, the parameter must not have been added (because the parameter is an atomic beta)
val factor = new BasicFactor[Double](List(), List(selectVar))
for { (selectXval, i) <- selectVar.range.zipWithIndex } {
val entry = if (selectXval.isRegular) 0.0 else 1.0
factor.set(List(i), entry)
}
List(factor)
} else {
val paramVar: Variable[Array[Double]] = Factory.getVariable(cc, select.parameter)
val factor = new BasicFactor[Double](List(paramVar), List(selectVar))
for {
(paramVal, paramIndex) <- paramVar.range.zipWithIndex
(selectVal, selectIndex) <- selectVar.range.zipWithIndex
} {
val entry = paramVal.value(selectIndex)
factor.set(List(paramIndex, selectIndex), entry)
}
List(factor)
}
}
}
/**
* Factor constructor for an IntSelector
*/
def makeFactors[T](cc: ComponentCollection, select: IntSelector): List[Factor[Double]] = {
val elementVar = Factory.getVariable(cc, select)
val counterVar = Factory.getVariable(cc, select.counter)
val comb = new BasicFactor[Double](List(counterVar), List(elementVar))
comb.fillByRule((l: List[Any]) => {
val counterValue :: elementValue :: _ = l.asInstanceOf[List[Extended[Int]]]
if (counterValue.isRegular && elementValue.isRegular) {
if (elementValue.value < counterValue.value) 1.0 / counterValue.value; else 0.0
} else 1.0
})
List(comb)
}
private def getProbs[U, T](cc: ComponentCollection, select: Select[U, T]): List[U] = getProbs(cc, select, select.clauses)
/**
* Get the potential (probability) for each value of an element, based on supplied rules
*/
def getProbs[U, T](cc: ComponentCollection, elem: Element[T], clauses: List[(U, T)]): List[U] = {
val selectVar = Factory.getVariable(cc, elem)
def getProb(xvalue: Extended[T]): U = {
clauses.find(_._2 == xvalue.value).get._1 // * cannot be a value of a Select
}
val probs =
for { xvalue <- selectVar.range.filter(_.isRegular) } yield getProb(xvalue)
probs
}
private def parameterizedGetProbs[T](cc: ComponentCollection, select: ParameterizedSelect[T]): List[Double] = {
val outcomes = select.outcomes
val map = select.parameter.MAPValue
for {
xvalue <- Factory.getVariable(cc, select).range
} yield {
if (xvalue.isRegular) map(outcomes.indexOf(xvalue.value)) else 0.0
}
}
private def intermedAndClauseFactors[U, T](cc: ComponentCollection, dist: Dist[U, T]): (Variable[Int], List[Factor[Double]]) = {
val intermed = Factory.makeVariable(cc, ValueSet.withoutStar((0 until dist.clauses.size).toSet))
val distVar = Factory.getVariable(cc, dist)
val (pairVar, pairFactor) = Factory.makeTupleVarAndFactor(cc, None, intermed, distVar)
val clauseFactors = dist.outcomes.zipWithIndex map (pair =>
Factory.makeConditionalSelector(pairVar, Regular(pair._2), Factory.getVariable(cc, pair._1), Set()))
(intermed, pairFactor :: clauseFactors)
}
/**
* Constructs a BasicFactor from a probability distribution. It assumes that the probabilities
* are assigned to the Variable in the same order as it's values.
*/
def makeSimpleDistribution[T](target: Variable[T], probs: List[Double]): Factor[Double] = {
val factor = new BasicFactor[Double](List(), List(target))
for { (prob, index) <- probs.zipWithIndex } {
factor.set(List(index), prob)
}
factor
}
/*
* When one of the probability elements includes * in its range, so will the target element.
* This is necessary because when the value of any of the probability elements is *, the normalizing factor is unknown in this case,
* so we cannot assign a specific probability to any of the regular values. Instead, we assign probability 1 to *.
* The code in the method below is designed to take into account this case correctly.
*/
private def makeComplexDistribution[T](cc: ComponentCollection, target: Variable[T], probVars: List[Variable[Double]]): Factor[Double] = {
val nVars = probVars.size
val factor = new BasicFactor[Double](probVars, List(target))
val probVals: List[List[Extended[Double]]] = probVars map (_.range)
if (target.range.forall(_.isRegular)) {
/*
* This is the easy case. For each list of indices to the factor, the first nVars indices will be indices into the range of the
* probability variables, while the last index will be the index into the range of the target variable.
* The correct probability is the value of the probability element in the given position with the appropriate index.
* Because the probabilities may vary, we need to normalize them before putting in the factor.
*
* Note that the variables in the factor are ordered with the probability variables first and the target variable last.
* But in probVals, the first index is the position into the target variable, and only then do we have the probability indices.
*/
for { indices <- factor.getIndices } {
val unnormalized =
for { (probIndex, position) <- indices.toList.take(nVars).zipWithIndex } yield {
val xprob = probVals(position)(probIndex) // The probability of the particular value of the probability element in this position
xprob.value
}
val normalized = normalize(unnormalized).toArray
factor.set(indices, normalized(indices.last))
}
} else {
/*
* In this case, the range of the target includes *. We do not assume any particular location in the range,
* so we set targetStarIndex to the correct location.
* Now, the indices to the factor will have nVar entries for each of the probability variables, plus one for the target.
* When we get the entries for the probability variables, we get nVars. But the range of the target is nVars plus one,
* because it includes *. So we extend the indices to probPlusStarIndices, with the targetStarIndex spliced in the correct place.
*
* Next, we distinguish between two cases. In the first case, one of the probability variables is *, so we assign probability 1
* to the target being * and probability 0 everywhere. In the other case, all probability variables are regular, so we
* assign the appropriate probability to the each regular position. We are careful to get the value of the
* probability variable from its original position in the indices, and make sure we don't get the value of *.
*/
val targetStarIndex: Int = target.range.indexWhere(!_.isRegular)
for { indices <- factor.getIndices } {
val probIndices: List[Int] = indices.toList.take(nVars)
val probPlusStarIndices: List[Int] =
probIndices.slice(0, targetStarIndex) ::: List(indices(targetStarIndex)) ::: probIndices.slice(targetStarIndex, probIndices.length)
val allProbsRegularFlags: List[Boolean] =
for { (probPlusStarIndex, position) <- probPlusStarIndices.zipWithIndex } yield {
if (position < targetStarIndex) probVals(position)(probPlusStarIndex).isRegular
else if (position == targetStarIndex) true
else probVals(position)(probPlusStarIndex - 1).isRegular
}
val allProbsRegular: Boolean = allProbsRegularFlags.forall((b: Boolean) => b)
val unnormalized: List[Double] =
for { (probPlusStarIndex, position) <- probPlusStarIndices.zipWithIndex } yield {
val xprob: Extended[Double] =
if (position < targetStarIndex) probVals(position)(probPlusStarIndex)
else if (position == targetStarIndex) Star[Double]
else probVals(position)(probPlusStarIndex - 1)
if (allProbsRegular) {
if (position != targetStarIndex) xprob.value else 0.0
} else {
if (position == targetStarIndex) 1.0 else 0.0
}
}
val normalized: Array[Double] = normalize(unnormalized).toArray
// The first variable specifies the position of the remaining variables, so indices(0) is the correct probability
factor.set(indices, normalized(indices.last))
}
}
factor
}
}
| scottcb/figaro | Figaro/src/main/scala/com/cra/figaro/algorithm/factored/factors/factory/SelectFactory.scala | Scala | bsd-3-clause | 11,316 |
package akka.persistence.kafka.journal
import akka.actor._
import akka.persistence.kafka.Event
import akka.persistence.kafka.journal.EventFormats.{EventDataFormat, EventFormat}
import akka.serialization._
import com.google.protobuf.ByteString
class KafkaEventSerializer(system: ExtendedActorSystem) extends Serializer {
def identifier: Int = 15443
def includeManifest: Boolean = false
def toBinary(o: AnyRef): Array[Byte] = o match {
case e: Event => eventFormatBuilder(e).build().toByteArray
case _ => throw new IllegalArgumentException(s"Can't serialize object of type ${o.getClass}")
}
def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef =
event(EventFormat.parseFrom(bytes))
def eventFormatBuilder(event: Event): EventFormat.Builder = {
val builder = EventFormat.newBuilder
builder.setPersistenceId(event.persistenceId)
builder.setSequenceNr(event.sequenceNr)
builder.setData(eventDataFormatBuilder(event.data.asInstanceOf[AnyRef]))
builder
}
def eventDataFormatBuilder(payload: AnyRef): EventDataFormat.Builder = {
val serializer = SerializationExtension(system).findSerializerFor(payload)
val builder = EventDataFormat.newBuilder()
if (serializer.includeManifest)
builder.setDataManifest(ByteString.copyFromUtf8(payload.getClass.getName))
builder.setData(ByteString.copyFrom(serializer.toBinary(payload)))
builder.setSerializerId(serializer.identifier)
builder
}
def event(eventFormat: EventFormat): Event = {
Event(
eventFormat.getPersistenceId,
eventFormat.getSequenceNr,
eventData(eventFormat.getData))
}
def eventData(eventDataFormat: EventDataFormat): Any = {
val eventDataClass = if (eventDataFormat.hasDataManifest)
Some(system.dynamicAccess.getClassFor[AnyRef](eventDataFormat.getDataManifest.toStringUtf8).get) else None
SerializationExtension(system).deserialize(
eventDataFormat.getData.toByteArray,
eventDataFormat.getSerializerId,
eventDataClass).get
}
}
| crispywalrus/akka-persistence-kafka | src/main/scala/akka/persistence/kafka/journal/KafkaEventSerializer.scala | Scala | apache-2.0 | 2,054 |
package oocamp
/**
* Created by twer on 14/11/22.
*/
class ParkingBoy(parkingLots: Array[ParkingLot]) extends BaseParkingBoy(parkingLots) {
def chooseAvailableParkingLot() = parkingLots.find(_.freeCapacity >= 1)
}
| focusj/OOCamp_Scala | src/main/scala/oocamp/ParkingBoy.scala | Scala | gpl-2.0 | 221 |
package ohnosequences.scalaguide
trait Type {
type Me = this.type
type Raw
implicit def typeOf(value: ValueOf[Me]): Me = this
}
final class ValueOf[T <: Type](val value: T#Raw) extends AnyVal {}
object Type {
type RawOf[T <: Type] = T#Raw
implicit class TypeOps[T <: Type](val t: T) {
def =>>(value: Type.RawOf[T]): ValueOf[T] = new ValueOf[T](value)
def apply(value: Type.RawOf[T]): ValueOf[T] = new ValueOf[T](value)
}
implicit def toRaw[T <: Type](value: ValueOf[T]): T#Raw = value.value
implicit class AnyOps[T](val t: T) {
def isA[TT <: Type { type Raw = T } ](tt: TT): ValueOf[TT] = new ValueOf[TT](t)
}
}
trait VertexType extends Type
object User extends VertexType {
type Raw = Int
implicit def userOps(value: ValueOf[User.type]) = new VertexValueOps[User.type](value) {
def sayHi = 23
}
}
object Dog extends VertexType {
type Raw = String
implicit def dogOps(value: ValueOf[Dog.type]) = new VertexValueOps[Dog.type](value) {
def sayHi = "fuck you Tagged"
}
}
abstract class VertexValueOps[V <: VertexType](val value: ValueOf[V]) {
def sayHi: V#Raw
}
trait useUser {
val buh = User =>> 234234
val z: Int = buh.sayHi
val doge = Dog =>> "woof"
val uho = Dog("hola")
println(doge.sayHi)
}
| ohnosequences/scala-guide | src/main/scala/taggedTypes.scala | Scala | agpl-3.0 | 1,292 |
/*
* Copyright (C) 2016 University of Basel, Graphics and Vision Research Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package scalismo.ui.model
import breeze.linalg.DenseVector
import scalismo.common.interpolation.NearestNeighborInterpolator3D
import scalismo.common.{DiscreteDomain, UnstructuredPointsDomain}
import scalismo.geometry.{_3D, EuclideanVector, Point}
import scalismo.statisticalmodel.{DiscreteLowRankGaussianProcess, LowRankGaussianProcess}
// This used to be a case class, but since it is extended by the discrete version, it can no longer be.
// Therefore, the copy methods have to be defined manually.
class LowRankGpPointTransformation protected (val gp: LowRankGaussianProcess[_3D, EuclideanVector[_3D]],
val coefficients: DenseVector[Double])
extends PointTransformation {
private lazy val vectorField = gp.instance(coefficients)
override def apply(point: Point[_3D]): Point[_3D] = {
point + vectorField(point)
}
def copy(coefficients: DenseVector[Double]): LowRankGpPointTransformation =
new LowRankGpPointTransformation(gp, coefficients)
}
object LowRankGpPointTransformation {
def apply(gp: LowRankGaussianProcess[_3D, EuclideanVector[_3D]],
coefficients: DenseVector[Double]): LowRankGpPointTransformation =
new LowRankGpPointTransformation(gp, coefficients)
def apply(gp: LowRankGaussianProcess[_3D, EuclideanVector[_3D]]): LowRankGpPointTransformation =
apply(gp, DenseVector.zeros[Double](gp.rank))
}
class DiscreteLowRankGpPointTransformation private (
val dgp: DiscreteLowRankGaussianProcess[_3D, UnstructuredPointsDomain, EuclideanVector[_3D]],
gp: LowRankGaussianProcess[_3D, EuclideanVector[_3D]],
coefficients: DenseVector[Double]
) extends LowRankGpPointTransformation(gp, coefficients) {
protected def this(dgp: DiscreteLowRankGaussianProcess[_3D, UnstructuredPointsDomain, EuclideanVector[_3D]],
coefficients: DenseVector[Double]) = {
this(dgp, dgp.interpolate(NearestNeighborInterpolator3D()), coefficients)
}
// no need to re-interpolate if the gp didn't change
override def copy(coefficients: DenseVector[Double]): DiscreteLowRankGpPointTransformation =
new DiscreteLowRankGpPointTransformation(dgp, gp, coefficients)
}
object DiscreteLowRankGpPointTransformation {
def apply(
dgp: DiscreteLowRankGaussianProcess[_3D, UnstructuredPointsDomain, EuclideanVector[_3D]]
): DiscreteLowRankGpPointTransformation = apply(dgp, DenseVector.zeros[Double](dgp.rank))
def apply(dgp: DiscreteLowRankGaussianProcess[_3D, UnstructuredPointsDomain, EuclideanVector[_3D]],
coefficients: DenseVector[Double]): DiscreteLowRankGpPointTransformation =
new DiscreteLowRankGpPointTransformation(dgp, coefficients)
}
| unibas-gravis/scalismo-ui | src/main/scala/scalismo/ui/model/LowRankGpPointTransformation.scala | Scala | gpl-3.0 | 3,421 |
/** Licensed to Gravity.com under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Gravity.com licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gravity.hbase.schema
import org.apache.hadoop.hbase.util.Bytes
import scala.collection._
import org.joda.time.{DateMidnight, DateTime}
import org.apache.hadoop.io.BytesWritable
import java.io._
/* )\\._.,--....,'``.
.b--. /; _.. \\ _\\ (`._ ,.
`=,-,-'~~~ `----(,_..'--(,_..'`-.;.' */
class AnyNotSupportedException() extends Exception("Any not supported")
trait AnyConverterSignal
/**
* This is the standard set of types that can be auto converted into hbase values (they work as families, columns, and values)
*/
object DefaultConverters {
def toBytesWritable[T](item: T)(implicit c: ByteConverter[T]) = {
c.toBytesWritable(item)
}
def fromBytesWritable[T](bytes: BytesWritable)(implicit c: ByteConverter[T]) = {
c.fromBytesWritable(bytes)
}
type FamilyExtractor[T <: HbaseTable[T, R, _], R, F, K, V] = (T) => ColumnFamily[T, R, F, K, V]
type ColumnExtractor[T <: HbaseTable[T, R, _], R, F, K, V] = (T) => Column[T, R, F, K, V]
implicit object AnyConverter extends ByteConverter[Any] with AnyConverterSignal {
override def toBytes(t: Any) = throw new AnyNotSupportedException()
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = throw new AnyNotSupportedException()
}
implicit object StringConverter extends ByteConverter[String] {
override def toBytes(t: String) = Bytes.toBytes(t)
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = Bytes.toString(bytes, offset, length)
}
implicit object StringSeqConverter extends SeqConverter[String]
implicit object StringSetConverter extends SetConverter[String]
implicit object IntConverter extends ByteConverter[Int] {
override def toBytes(t: Int) = Bytes.toBytes(t)
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = Bytes.toInt(bytes, offset, length)
}
implicit object IntSeqConverter extends SeqConverter[Int]
implicit object IntSetConverter extends SetConverter[Int]
implicit object ShortConverter extends ByteConverter[Short] {
override def toBytes(t: Short) = Bytes.toBytes(t)
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = Bytes.toShort(bytes, offset, length)
}
implicit object ShortSeqConverter extends SeqConverter[Short]
implicit object ShortSetConverter extends SetConverter[Short]
implicit object BooleanConverter extends ByteConverter[Boolean] {
override def toBytes(t: Boolean) = Bytes.toBytes(t)
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = {
(bytes(offset) != 0)
}
}
implicit object BooleanSeqConverter extends SeqConverter[Boolean]
implicit object BooleanSetConverter extends SetConverter[Boolean]
implicit object LongConverter extends ByteConverter[Long] {
override def toBytes(t: Long) = Bytes.toBytes(t)
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = Bytes.toLong(bytes, offset, length)
}
implicit object LongSeqConverter extends SeqConverter[Long]
implicit object LongSetConverter extends SetConverter[Long]
implicit object DoubleConverter extends ByteConverter[Double] {
override def toBytes(t: Double) = Bytes.toBytes(t)
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = Bytes.toDouble(bytes, offset)
}
implicit object DoubleSeqConverter extends SeqConverter[Double]
implicit object DoubleSetConverter extends SetConverter[Double]
implicit object FloatConverter extends ByteConverter[Float] {
override def toBytes(t: Float) = Bytes.toBytes(t)
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = Bytes.toFloat(bytes, offset)
}
implicit object FloatSeqConverter extends SeqConverter[Float]
implicit object FloatSetConverter extends SetConverter[Float]
implicit object CommaSetConverter extends ByteConverter[CommaSet] {
val SPLITTER = ",".r
override def toBytes(t: CommaSet) = Bytes.toBytes(t.items.mkString(","))
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = new CommaSet(SPLITTER.split(Bytes.toString(bytes, offset, length)).toSet)
}
implicit object CommaSetSeqConverter extends SeqConverter[CommaSet]
implicit object CommaSetSetConverter extends SetConverter[CommaSet]
implicit object YearDayConverter extends ByteConverter[YearDay] {
val SPLITTER = "_".r
override def toBytes(t: YearDay) = Bytes.toBytes(t.year.toString + "_" + t.day.toString)
override def fromBytes(bytes: Array[Byte], offset: Int, length: Int) = {
val strRep = Bytes.toString(bytes, offset, length)
val strRepSpl = SPLITTER.split(strRep)
val year = strRepSpl(0).toInt
val day = strRepSpl(1).toInt
YearDay(year, day)
}
}
implicit object YearDaySeqConverter extends SeqConverter[YearDay]
implicit object YearDaySetConverter extends SetConverter[YearDay]
implicit object DateMidnightConverter extends ComplexByteConverter[DateMidnight] {
override def write(dm: DateMidnight, output: PrimitiveOutputStream) {
output.writeLong(dm.getMillis)
}
override def read(input: PrimitiveInputStream) = new DateMidnight(input.readLong())
def apply(year: Int, day: Int) = new DateMidnight().withYear(year).withDayOfYear(day)
}
implicit object DateTimeConverter extends ComplexByteConverter[DateTime] {
override def write(dm: DateTime, output: PrimitiveOutputStream) {
output.writeLong(dm.getMillis)
}
override def read(input: PrimitiveInputStream) = new DateTime(input.readLong())
}
implicit object DateTimeSeqConverter extends SeqConverter[DateTime]
implicit object DateTimeSetConverter extends SetConverter[DateTime]
implicit object DateMidnightSeqConverter extends SeqConverter[DateMidnight]
implicit object DateMidnightSetConverter extends SetConverter[DateMidnight]
implicit object StringLongMap extends MapConverter[String, Long]
implicit object ImmutableStringLongMap extends ImmutableMapConverter[String, Long]
implicit object MutableStringLongMap extends MutableMapConverter[String, Long]
implicit object StringStringMap extends MapConverter[String,String]
implicit object MutableStringStringMap extends MutableMapConverter[String,String]
implicit object ImmutableStringStringMap extends ImmutableMapConverter[String,String]
/*
Helper function to make byte arrays out of arbitrary values.
*/
def makeBytes(writer: (PrimitiveOutputStream) => Unit): Array[Byte] = {
val bos = new ByteArrayOutputStream()
val dataOutput = new PrimitiveOutputStream(bos)
writer(dataOutput)
bos.toByteArray
}
def makeWritable(writer: (PrimitiveOutputStream) => Unit): BytesWritable = new BytesWritable(makeBytes(writer))
def readBytes[T](bytes: Array[Byte])(reader: (PrimitiveInputStream) => T): T = {
val bis = new ByteArrayInputStream(bytes)
val dis = new PrimitiveInputStream(bis)
val results = reader(dis)
dis.close()
results
}
def readWritable[T](bytesWritable: BytesWritable)(reader: (PrimitiveInputStream) => T): T = readBytes(bytesWritable.getBytes)(reader)
} | unicredit/HPaste | src/main/scala/com/gravity/hbase/schema/DefaultConverters.scala | Scala | apache-2.0 | 7,925 |
package ch.epfl.yinyang.api
import reflect.runtime.universe._
/**
* A VarType describes the free variables of a DSL program that are used in
* compilation. Variables which aren't used in any compilation decisions should
* be marked as `NonCompVar` and will result in holes in the compiled DSL.
* There are four `CompVar` types, each with an associated guard function that
* decides when recompilation is triggered.
*
* Firstly, a variable can be static or dynamic. If it is static, then values
* with different executions always need recompilation. But if for example we
* have multiplication variants for sparse and dense matrices, then we only
* need to recompile when the new value doesn't fall into the same category.
* So variables need to be marked as dynamic if some values don't require
* recompilation but should lead to a different execution anyhow (they appear
* as variables in the compiled code instead of being fixed to the value used
* when compiling). In this case, the DSL needs to implement the function:
* LiftEvidence[T: TypeTag, Ret].mixed(v: T, hole: Ret): Ret
* The value v is used for optimization decisions (e.g. sparse vs. dense
* matrix), and the hole as variable in the generated code.
*
* The second characteristic is whether a variable is required for compilation
* or optional. For required variables, optimized code will always be
* generated. For optional variables, runtime statistics are being collected
* and optimized code is only generated when a variable is sufficiently stable,
* otherwise generic code with a variable should be generated. Optional
* variables are also represented as mixed nodes in the DSL body. The stable
* variables are passed as a set of holeIds to the generateCode method and can
* be treated like required variables, respecting their static/dynamic nature,
* and will be guarded with the provided guard function. For the unstable ones,
* generic code only using the hole and not the value of the mixed node has to
* be generated. The value will not be guarded.
*/
abstract class VarType {
/**
* Composes two VarTypes, resulting into the VarType with the higher of each
* characteristic and composed guards:
* CompVar > NonCompVar
* Dynamic > Static
* Required > Optional
* For example:
* - OptionalStaticCompVar(g).and(NonCompVar) -> OptionalStaticCompVar(g)
* - RequiredStaticCompVar(g1).and(OptionalDynamicCompVar(g2))
* -> RequiredDynamicCompVar(g1.and(g2))
*/
def and(other: VarType): VarType
}
case class NonCompVar() extends VarType {
override def and(other: VarType) = other
}
trait CompVar extends VarType {
def guard: Guard
def and(guard: Guard): VarType
}
object CompVar {
def equality(tpe: String) = RequiredStaticCompVar(List(({ v: String => v }, tpe)))
}
trait Optional
trait Required
trait Static
trait Dynamic
case class OptionalStaticCompVar(val guard: Guard) extends CompVar with Optional with Static {
def and(guard: Guard) = OptionalStaticCompVar(guard.and(this.guard))
def and(other: VarType) = other match {
case NonCompVar() => this
case o: CompVar => o.and(guard)
}
}
object OptionalStaticCompVar {
def apply(optKeys: List[(String => String, String)]): OptionalStaticCompVar = OptionalStaticCompVar(Guard(Nil, optKeys))
def equality(tpe: String) = OptionalStaticCompVar(List(({ v: String => v }, tpe)))
}
case class OptionalDynamicCompVar(val guard: Guard) extends CompVar with Optional with Dynamic {
def and(guard: Guard) = OptionalDynamicCompVar(guard.and(this.guard))
def and(other: VarType) = other match {
case NonCompVar() => this
case OptionalStaticCompVar(g) => OptionalDynamicCompVar(guard.and(g))
case o: CompVar => o.and(guard)
}
}
object OptionalDynamicCompVar {
def apply(optKeys: List[(String => String, String)]): OptionalDynamicCompVar = OptionalDynamicCompVar(Guard(Nil, optKeys))
def equality(tpe: String) = OptionalDynamicCompVar(List(({ v: String => v }, tpe)))
}
case class RequiredStaticCompVar(val guard: Guard) extends CompVar with Required with Static {
def and(guard: Guard) = RequiredStaticCompVar(guard.and(this.guard))
def and(other: VarType) = other match {
case NonCompVar() => this
case OptionalStaticCompVar(g) => RequiredStaticCompVar(guard.and(g))
case OptionalDynamicCompVar(g) => RequiredDynamicCompVar(guard.and(g))
case o: CompVar => o.and(guard)
}
}
object RequiredStaticCompVar {
def apply(reqKeys: List[(String => String, String)]): RequiredStaticCompVar = RequiredStaticCompVar(Guard(reqKeys, Nil))
def equality(tpe: String) = RequiredStaticCompVar(List(({ v: String => v }, tpe)))
}
case class RequiredDynamicCompVar(val guard: Guard) extends CompVar with Required with Dynamic {
def and(guard: Guard) = RequiredDynamicCompVar(guard.and(this.guard))
def and(other: VarType) = other match {
case NonCompVar() => this
case v @ (OptionalStaticCompVar(_) | OptionalDynamicCompVar(_) | RequiredStaticCompVar(_)) =>
RequiredDynamicCompVar(guard.and(v.asInstanceOf[CompVar].guard))
case o: CompVar => o.and(guard)
}
}
object RequiredDynamicCompVar {
def apply(reqKeys: List[(String => String, String)]): RequiredDynamicCompVar = RequiredDynamicCompVar(Guard(reqKeys, Nil))
def equality(tpe: String) = RequiredDynamicCompVar(List(({ v: String => v }, tpe)))
}
/**
* A guard defines how compilation variables are guarded, e.g. when
* recompilation is triggered. It encapsulates the body of an anonymous
* function deciding whether two arguments named "t1" and "t2" of type Any are
* equivalent as far as the DSL optimizations are concerned, or whether the
* program has to be recompiled.
*/
case class Guard(private val reqKeys: List[(String => String, String)],
private val optKeys: List[(String => String, String)]) {
/** Composes two guards. */
def and(other: Guard): Guard = other match {
case Guard(rk, ok) => new Guard(reqKeys ++ rk, optKeys ++ ok)
}
override def toString = {
val req = getReqKeys.map({ case (fun, tpe) => s"{ v => (" + fun("v") + s"): $tpe }" }).mkString("(", ", ", ")")
val opt = getOptKeys.map({ case (fun, tpe) => s"{ v => (" + fun("v") + s"): $tpe }" }).mkString("(", ", ", ")")
s"Guard(req: $req, opt: $opt)"
}
lazy val getReqKeys = reqKeys.distinct
lazy val getOptKeys = optKeys.distinct diff reqKeys
}
| amirsh/yin-yang | components/yin-yang/src/api/VarType.scala | Scala | bsd-3-clause | 6,450 |
package com.avsystem.scex
package util
import java.{lang => jl, util => ju}
import com.avsystem.scex.compiler.Markers.{ExpressionUtil, JavaGetterAdapter, ProfileObject, Synthetic}
import com.avsystem.scex.compiler.annotation._
import com.avsystem.scex.util.CommonUtils._
import scala.reflect.api.Universe
trait MacroUtils {
val universe: Universe
import universe._
lazy val adapterType = typeOf[JavaGetterAdapter]
lazy val syntheticType = typeOf[Synthetic]
lazy val expressionUtilType = typeOf[ExpressionUtil]
lazy val profileObjectType = typeOf[ProfileObject]
lazy val inputAnnotType = typeOf[Input]
lazy val rootValueAnnotType = typeOf[RootValue]
lazy val rootAdapterAnnotType = typeOf[RootAdapter]
lazy val notValidatedAnnotType = typeOf[NotValidated]
lazy val any2stringadd = typeOf[Predef.type].member(TermName("any2stringadd"))
lazy val stringAddPlus = typeOf[any2stringadd[_]].member(TermName("+").encodedName)
lazy val stringConcat = typeOf[String].member(TermName("+").encodedName)
lazy val stringTpe = typeOf[String]
lazy val booleanTpe = typeOf[Boolean]
lazy val jBooleanTpe = typeOf[jl.Boolean]
lazy val dynamicTpe = typeOf[Dynamic]
object DecodedTermName {
def unapply(name: TermName) =
Some(name.decodedName.toString)
}
object DecodedTypeName {
def unapply(name: TermName) =
Some(name.decodedName.toString)
}
object LiteralString {
def unapply(tree: Tree) = tree match {
case Literal(Constant(str: String)) =>
Some(str)
case _ =>
None
}
}
// extractor that matches compiler-generated applications of static implicit conversions
object ImplicitlyConverted {
def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
case Apply(fun, List(prefix))
if isGlobalImplicitConversion(fun) && (tree.pos == NoPosition || prefix.pos == NoPosition || tree.pos == prefix.pos) =>
Some((prefix, fun))
case _ =>
None
}
}
object NewInstance {
def unapply(tree: Tree) = tree match {
case Apply(Select(New(tpeTree), termNames.CONSTRUCTOR), args) =>
Some((tpeTree, args))
case _ =>
None
}
}
object SelectDynamic {
def unapply(tree: Tree) = tree match {
case Apply(Select(qual, TermName("selectDynamic")), List(lit@Literal(Constant(name: String))))
if qual.tpe != null && qual.tpe <:< dynamicTpe && lit.pos.isTransparent =>
Some((qual, name))
case _ => None
}
}
object StringInterpolation {
def unapply(tree: Apply) = tree match {
case Apply(Select(StringContextApply(parts), _), args) => Some((parts, args))
case _ => None
}
}
object StringContextTree {
def unapply(tree: Tree) = tree match {
case Ident(name) if name.decodedName.toString == "StringContext" => true
case Select(_, name) if name.decodedName.toString == "StringContext" => true
case _ => false
}
}
object StringContextApply {
def unapply(tree: Tree) = tree match {
case Apply(Select(StringContextTree(), name), parts) if name.decodedName.toString == "apply" => Some(parts)
case Apply(StringContextTree(), parts) => Some(parts)
case _ => None
}
}
def isProperPosition(pos: Position) =
pos != null && pos != NoPosition
def isModuleOrPackage(symbol: Symbol) = symbol != null &&
(symbol.isModule || symbol.isModuleClass || symbol.isPackage || symbol.isPackageClass)
def isJavaField(symbol: Symbol) =
symbol != null && symbol.isJava && symbol.isTerm && !symbol.isMethod && !isModuleOrPackage(symbol)
def isConstructor(s: Symbol) =
s.isMethod && s.asMethod.isConstructor
def memberSignature(s: Symbol) =
if (s != null) s"${s.fullName}${paramsSignature(s)}" else null
def paramsSignature(s: Symbol) =
s.info.paramLists.map(_.map(_.typeSignature.toString).mkString("(", ",", ")")).mkString
def erasureFullName(tpe: Type) =
tpe.erasure.typeSymbol.fullName
def isStableTerm(s: Symbol) =
s.isTerm && s.asTerm.isStable
def stripTypeApply(tree: Tree): Tree = tree match {
case TypeApply(prefix, _) => stripTypeApply(prefix)
case _ => tree
}
def paramsOf(tpe: Type): (List[List[Symbol]], List[Symbol]) = tpe match {
case PolyType(tp, resultType) =>
paramsOf(resultType)
case MethodType(params, resultType) =>
val (moreParams, implParams) = paramsOf(resultType)
if (params.nonEmpty && params.head.isImplicit)
(moreParams, params ::: implParams)
else
(params :: moreParams, implParams)
case _ => (Nil, Nil)
}
def path(tree: Tree): String = tree match {
case Select(prefix, name) => s"${path(prefix)}.${name.decodedName.toString}"
case Ident(name) => name.decodedName.toString
case This(name) => name.decodedName.toString
case EmptyTree => "<none>"
case _ => throw new IllegalArgumentException("This tree does not represent simple path: " + showRaw(tree))
}
/**
* Is this tree a path that starts with package and goes through stable symbols (vals and objects)?
*
* @return
*/
def isStableGlobalPath(tree: Tree): Boolean = tree match {
case Select(prefix, _) => isStableTerm(tree.symbol) && isStableGlobalPath(prefix)
case Ident(_) => tree.symbol.isStatic && isStableTerm(tree.symbol)
case This(_) => tree.symbol.isPackageClass
case _ => false
}
def isGlobalImplicitConversion(tree: Tree): Boolean = tree match {
case TypeApply(prefix, _) => isGlobalImplicitConversion(prefix)
//TODO handle apply method on implicit function values
case Select(prefix, name) =>
tree.symbol.isMethod && tree.symbol.isImplicit && isStableGlobalPath(prefix)
case _ => false
}
// https://groups.google.com/forum/#!topic/scala-user/IeD2siVXyss
def fixOverride(s: Symbol) =
if(s.isTerm && s.asTerm.isOverloaded) {
s.alternatives.filterNot(_.isSynthetic).head
} else s
def withOverrides(s: Symbol) =
s :: s.overrides.map(fixOverride)
lazy val toplevelSymbols = Set(typeOf[Any], typeOf[AnyRef], typeOf[AnyVal]).map(_.typeSymbol)
def isStaticModule(symbol: Symbol) =
symbol != null && symbol.isModule && symbol.isStatic
def isFromToplevelType(symbol: Symbol) =
withOverrides(symbol).exists(toplevelSymbols contains _.owner)
def isJavaParameterlessMethod(symbol: Symbol) =
symbol != null && symbol.isPublic && symbol.isJava && symbol.isMethod &&
symbol.asMethod.paramLists == List(List()) && !symbol.typeSignature.takesTypeArgs
def isJavaStaticType(tpe: Type) = {
val symbol = tpe.typeSymbol
symbol != null && symbol.isJava && isModuleOrPackage(symbol)
}
def isJavaClass(symbol: Symbol) =
symbol.isJava && symbol.isClass && !symbol.isModuleClass && !symbol.isPackageClass
def isStaticOrConstructor(symbol: Symbol) =
symbol.isStatic || (symbol.isMethod && symbol.asMethod.isConstructor)
def reifyOption[A, B](opt: Option[A], innerReify: A => Expr[B]): Expr[Option[B]] = opt match {
case Some(x) => reify(Some(innerReify(x).splice))
case None => reify(None)
}
def isBooleanType(tpe: Type) =
tpe <:< typeOf[Boolean] || tpe <:< typeOf[jl.Boolean]
lazy val getClassSymbol = typeOf[Any].member(TermName("getClass"))
def isGetClass(symbol: Symbol) =
symbol.name == TermName("getClass") && withOverrides(symbol).contains(getClassSymbol)
def isBeanGetter(symbol: Symbol) = symbol.isMethod && {
val methodSymbol = symbol.asMethod
val name = symbol.name.decodedName.toString
!isGetClass(methodSymbol) && methodSymbol.paramLists == List(List()) && methodSymbol.typeParams.isEmpty &&
(BeanGetterNamePattern.pattern.matcher(name).matches ||
BooleanBeanGetterNamePattern.pattern.matcher(name).matches && isBooleanType(methodSymbol.returnType))
}
def isParameterless(s: TermSymbol) =
!s.isMethod || {
val paramss = s.asMethod.paramLists
paramss == Nil || paramss == List(Nil)
}
def methodTypesMatch(originalTpe: Type, implicitTpe: Type): Boolean = {
def paramsMatch(origParams: List[Symbol], implParams: List[Symbol]): Boolean =
(origParams, implParams) match {
case (origHead :: origTail, implHead :: implTail) =>
implHead.typeSignature <:< origHead.typeSignature && paramsMatch(origTail, implTail)
case (Nil, Nil) => true
case _ => false
}
(originalTpe, implicitTpe) match {
case (MethodType(origParams, origResultType), MethodType(implParams, implResultType)) =>
paramsMatch(origParams, implParams) && methodTypesMatch(origResultType, implResultType)
case (MethodType(_, _), _) | (_, MethodType(_, _)) => false
case (_, _) => true
}
}
def takesSingleParameter(symbol: MethodSymbol) =
symbol.paramLists match {
case List(List(_)) => true
case _ => false
}
def isBeanSetter(symbol: Symbol) =
symbol.isMethod && {
val methodSymbol = symbol.asMethod
val name = symbol.name.decodedName.toString
takesSingleParameter(methodSymbol) && methodSymbol.typeParams.isEmpty &&
methodSymbol.returnType =:= typeOf[Unit] &&
BeanSetterNamePattern.pattern.matcher(name).matches
}
/**
* Accessible members include methods, modules, val/var setters and getters and Java fields.
*/
def accessibleMembers(tpe: Type) =
tpe.members.toList.collect { case s if s.isPublic && s.isTerm &&
(s.isJava || (!s.asTerm.isVal && !s.asTerm.isVar)) && !s.isImplementationArtifact => s.asTerm
}
def hasType[T: TypeTag](tree: Tree) =
tree.tpe <:< typeOf[T]
def toStringSymbol(tpe: Type) =
alternatives(tpe.member(TermName("toString")))
.find(s => s.isTerm && isParameterless(s.asTerm))
.getOrElse(NoSymbol)
lazy val standardStringInterpolations =
Set("s", "raw").map(name => typeOf[StringContext].member(TermName(name)))
def alternatives(sym: Symbol) = sym match {
case termSymbol: TermSymbol => termSymbol.alternatives
case NoSymbol => Nil
case _ => List(sym)
}
def isExpressionUtil(symbol: Symbol): Boolean =
symbol != null && symbol != NoSymbol &&
(isExpressionUtilObject(symbol) || isExpressionUtil(symbol.owner))
def isExpressionUtilObject(symbol: Symbol): Boolean =
nonBottomSymbolType(symbol) <:< expressionUtilType
def isProfileObject(symbol: Symbol) =
nonBottomSymbolType(symbol) <:< profileObjectType
def isScexSynthetic(symbol: Symbol): Boolean =
symbol != null && symbol != NoSymbol &&
(nonBottomSymbolType(symbol) <:< syntheticType || isScexSynthetic(symbol.owner))
def isAdapter(tpe: Type): Boolean =
tpe != null && !isBottom(tpe) && tpe <:< adapterType
def isBottom(tpe: Type) =
tpe <:< definitions.NullTpe || tpe <:< definitions.NothingTpe
/**
* Is this symbol the 'wrapped' field of Java getter adapter?
*/
def isAdapterWrappedMember(symbol: Symbol): Boolean =
if (symbol != null && symbol.isTerm) {
val ts = symbol.asTerm
(ts.isGetter && ts.name == TermName("_wrapped") && ts.owner.isType && isAdapter(ts.owner.asType.toType)
|| ts.isVal && isAdapterWrappedMember(ts.getter))
} else false
def isRootAdapter(tpe: Type) =
tpe != null && isAnnotatedWith(tpe.widen, rootAdapterAnnotType)
def isAnnotatedWith(tpe: Type, annotTpe: Type): Boolean = tpe match {
case AnnotatedType(annots, underlying) =>
annots.exists(_.tree.tpe <:< annotTpe) || isAnnotatedWith(underlying, annotTpe)
case ExistentialType(_, underlying) =>
isAnnotatedWith(underlying, annotTpe)
case _ => false
}
// gets Java getter called by implicit wrapper
def getJavaGetter(symbol: Symbol, javaTpe: Type): Symbol = {
val getterName = "get" + symbol.name.toString.capitalize
val booleanGetterName = "is" + symbol.name.toString.capitalize
def fail = throw new Exception(s"Could not find Java getter for property ${symbol.name} on $javaTpe")
def findGetter(getterName: String) =
alternatives(javaTpe.member(TermName(getterName))).find(isBeanGetter)
if (isBooleanType(symbol.asMethod.returnType)) {
findGetter(booleanGetterName) orElse findGetter(getterName) getOrElse fail
} else {
findGetter(getterName) getOrElse fail
}
}
def symbolType(symbol: Symbol) =
if (symbol == null) NoType
else if (symbol.isType) symbol.asType.toType
else symbol.typeSignature
def nonBottomSymbolType(symbol: Symbol) = {
val tpe = symbolType(symbol)
if (tpe <:< definitions.NullTpe || tpe <:< definitions.NothingTpe) NoType else tpe
}
def isAdapterConversion(symbol: Symbol) =
isProfileObject(symbol.owner) && symbol.isImplicit && symbol.isMethod && isAdapter(symbol.asMethod.returnType)
def annotations(sym: Symbol) =
sym.annotations ++ (if (sym.isTerm) {
val tsym = sym.asTerm
if (tsym.isGetter) tsym.accessed.annotations else Nil
} else Nil)
def debugTree(pref: String, tree: Tree): Unit = {
println(pref)
tree.foreach { t =>
println(show(t.pos).padTo(15, ' ') + ("" + t.tpe).padTo(50, ' ') + show(t))
}
println()
}
}
object MacroUtils {
def apply(u: Universe) = new MacroUtils {
val universe: u.type = u
}
}
| pnf/scex | scex-core/src/main/scala/com/avsystem/scex/util/MacroUtils.scala | Scala | apache-2.0 | 13,256 |
package org.precompiler.scala101
import org.precompiler.scala101.ch04.Currency.Currency
/**
*
* @author Richard Li
*/
package object ch04 {
def convert(money: Money, to: Currency) = {
println(s"converting ${money} to ${to}")
}
}
| precompiler/scala-101 | learning-scala/src/main/scala/org/precompiler/scala101/ch04/package.scala | Scala | apache-2.0 | 245 |
/******************************************************************************
* Copyright (c) 2014, Equal Experts Ltd
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the Midas Project.
******************************************************************************/
package com.ee.midas.pipes
import java.io.{OutputStream, InputStream}
import com.ee.midas.utils.Loggable
trait Interceptable extends Loggable {
/**
* Default Interception does nothing, simply copies data from input to output.
* @param src InputStream
* @param tgt OutputStream
* @return number of bytes read or None for EOF (End of stream).
*/
def intercept(src: InputStream, tgt: OutputStream): Int = {
val name = getClass.getName
val data = new Array[Byte](1024 * 16)
val bytesRead = src.read(data)
logInfo(name + " Bytes Read = " + bytesRead)
if (bytesRead > 0) {
tgt.write(data, 0, bytesRead)
logInfo(name + " Bytes Written = " + bytesRead)
tgt.flush
}
bytesRead
}
}
object Interceptable {
def apply() = new Interceptable {}
} | EqualExperts/Midas | src/main/scala/com/ee/midas/pipes/Interceptable.scala | Scala | bsd-2-clause | 2,531 |
package org.coursera.naptime.ari.graphql.resolvers
import com.linkedin.data.schema.RecordDataSchema
import com.typesafe.scalalogging.StrictLogging
import org.coursera.naptime.ResourceName
import org.coursera.naptime.ResponsePagination
import org.coursera.naptime.ari.Request
import org.coursera.naptime.ari.Response
import org.coursera.naptime.ari.graphql.SangriaGraphQlContext
import org.coursera.naptime.ari.graphql.schema.DataMapWithParent
import org.coursera.naptime.ari.graphql.schema.NaptimeResourceUtils
import org.coursera.naptime.ari.graphql.schema.ParentModel
import org.coursera.naptime.schema.AuthOverride
import play.api.libs.json.JsArray
import play.api.libs.json.JsNull
import play.api.libs.json.JsValue
import play.api.mvc.RequestHeader
import sangria.execution.deferred.Deferred
import sangria.execution.deferred.DeferredResolver
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
case class NaptimeRequest(
idx: RequestId,
resourceName: ResourceName,
arguments: Set[(String, JsValue)],
resourceSchema: RecordDataSchema,
paginationOverride: Option[ResponsePagination] = None,
authOverride: Option[AuthOverride] = None)
case class NaptimeResponse(
elements: List[DataMapWithParent],
pagination: Option[ResponsePagination],
url: String,
status: Int = 200,
errorMessage: Option[String] = None)
case class NaptimeError(url: String, status: Int, errorMessage: String)
sealed trait DeferredNaptime {
def toNaptimeRequest(idx: Int): NaptimeRequest
}
case class DeferredNaptimeRequest(
resourceName: ResourceName,
arguments: Set[(String, JsValue)],
resourceSchema: RecordDataSchema,
paginationOverride: Option[ResponsePagination] = None,
authOverride: Option[AuthOverride] = None)
extends Deferred[Either[NaptimeError, NaptimeResponse]]
with DeferredNaptime {
def toNaptimeRequest(idx: Int): NaptimeRequest = {
NaptimeRequest(
RequestId(idx),
resourceName,
arguments,
resourceSchema,
paginationOverride,
authOverride)
}
}
case class DeferredNaptimeElement(
resourceName: ResourceName,
idOpt: Option[JsValue],
arguments: Set[(String, JsValue)],
resourceSchema: RecordDataSchema,
authOverride: Option[AuthOverride] = None)
extends Deferred[Either[NaptimeError, NaptimeResponse]]
with DeferredNaptime {
def toNaptimeRequest(idx: Int): NaptimeRequest = {
NaptimeRequest(
RequestId(idx),
resourceName,
arguments ++ idOpt
.map(id => List("ids" -> JsArray(List(id))))
.getOrElse(List.empty),
resourceSchema,
paginationOverride = None,
authOverride)
}
}
case class RequestId(idx: Int)
class NaptimeResolver extends DeferredResolver[SangriaGraphQlContext] with StrictLogging {
def resolve(deferred: Vector[Deferred[Any]], ctx: SangriaGraphQlContext, queryState: Any)(
implicit ec: ExecutionContext): Vector[Future[Any]] = {
val naptimeRequests = deferred.zipWithIndex.collect {
case (d: DeferredNaptime, idx: Int) => d.toNaptimeRequest(idx)
}
val dataByResource = naptimeRequests
.groupBy(_.resourceName)
.map {
case (resourceName, requests) =>
// Handle MultiGet and Non-Multigets differently, since multigets can be batched
val (multiGetRequests, nonMultiGetRequests) =
requests.partition(_.arguments.exists(_._1 == "ids"))
// partition forward requests by auth type. it will make a separate batched request for
// each distinct auth override type found.
val multiGetRelations = Future
.sequence(multiGetRequests.groupBy(_.authOverride).map {
case (authOverride, selectedRequests) =>
fetchMultiGetRelations(selectedRequests, resourceName, ctx, authOverride)
})
.map(_.flatten.toMap)
val nonMultiGetRelations =
fetchNonMultiGetRelations(nonMultiGetRequests, resourceName, ctx)
val allRelations = List(multiGetRelations, nonMultiGetRelations)
Future.sequence(allRelations).map(_.flatten.toMap)
}
val allData = Future.sequence(dataByResource).map(_.flatten.toMap)
deferred.zipWithIndex.map {
case (_, idx) =>
allData.map(_.getOrElse(RequestId(idx), {
throw new RuntimeException("Error in NaptimeResolver. Could not find outgoing request.")
}))
}
}
/**
* Fetches multiget relations for a specific resource given a list of requests.
* This implementation optimizes fetches by merging multigets into as few requests as possible.
*
* Multiget requests can be merged if all other query parameters are the same.
*
* In the event of an error, a NaptimeError is returned instead of a NaptimeResponse
*
* @param requests list of NaptimeRequests containing the endpoint and arguments (including ids)
* @param resourceName resource that the requests is made against
* @param context request context (includes things like header)
* @return Map of request ids (indexes from the deferred request batching) to either a
* NaptimeError or NaptimeResponse
*/
private[this] def fetchMultiGetRelations(
requests: Vector[NaptimeRequest],
resourceName: ResourceName,
context: SangriaGraphQlContext,
authOverride: Option[AuthOverride])(implicit ec: ExecutionContext)
: Future[Map[RequestId, Either[NaptimeError, NaptimeResponse]]] = {
Future
.sequence {
mergeMultigetRequests(context.requestHeader, requests, resourceName, authOverride)
.map {
case (request, sourceRequests) =>
context.fetcher.data(request, context.debugMode).map {
case Right(successfulResponse) =>
val parsedElements =
parseElements(request, successfulResponse, requests.head.resourceSchema)
val parsedElementsMap = parsedElements.map { element =>
val id = Option(element.element.get("id"))
.map(NaptimeResourceUtils.parseToJson)
.getOrElse(JsNull)
id -> element
}.toMap
sourceRequests.map { sourceRequest =>
// TODO(bryan): Clean this up
val elements = parseIds(sourceRequest).flatMap(parsedElementsMap.get).toList
val url = successfulResponse.url.getOrElse("???")
sourceRequest.idx ->
Right[NaptimeError, NaptimeResponse](
NaptimeResponse(elements, sourceRequest.paginationOverride, url, 200, None))
}.toMap
case Left(error) =>
sourceRequests.map { sourceRequest =>
sourceRequest.idx ->
Left(NaptimeError(error.url.getOrElse("???"), error.code, error.message))
}.toMap
}
}
}
.map(_.flatten.toMap)
}
/**
* Converts a list of forward requests into the most optimal list of requests to execute.
* Multiget requests can be merged into a single request,
* but _only_ if all of the query parameters are the same for the requests.
*
* @param requests A list of NaptimeRequests specifying the resource and arguments
* @return a map of TopLevelRequests -> list of NaptimeRequests that it fulfills
*/
private[this] def mergeMultigetRequests(
header: RequestHeader,
requests: Vector[NaptimeRequest],
resourceName: ResourceName,
authOverride: Option[AuthOverride]): Map[Request, Vector[NaptimeRequest]] = {
requests
.groupBy(_.arguments.filterNot(_._1 == "ids"))
.map {
case (nonIdArguments, innerRequests) =>
// TODO(bryan): Limit multiget requests by number of ids as well, to avoid http limits
Request(
header,
resourceName,
nonIdArguments + ("ids" -> JsArray(parseAndMergeIds(innerRequests))),
authOverride) -> innerRequests
}
}
private[this] def parseAndMergeIds(requests: Vector[NaptimeRequest]): Seq[JsValue] = {
requests.flatMap(parseIds).distinct
}
private[this] def parseIds(request: NaptimeRequest): Seq[JsValue] = {
// .toSeq here is to preserve id ordering in related resource id arrays
request.arguments.toSeq
.filter { case (key, _) => key == "ids" }
.map(_._2)
.flatMap {
case JsArray(idValues) => idValues
case value: JsValue => List(value)
}
.distinct
}
/**
* Fetches non-multi get relations for a specific resource given a list of requests
*
* In the event of an error, a NaptimeError is returned instead of a NaptimeResponse
*
* @param requests list of NaptimeRequests containing the endpoint and arguments (including ids)
* @param resourceName resource that the requests is made against
* @param context request context (includes things like header)
* @return Map of request ids (indexes from the deferred request batching) to either a
* NaptimeError or NaptimeResponse
*/
def fetchNonMultiGetRelations(
requests: Vector[NaptimeRequest],
resourceName: ResourceName,
context: SangriaGraphQlContext)(implicit ec: ExecutionContext)
: Future[Map[RequestId, Either[NaptimeError, NaptimeResponse]]] = {
Future
.sequence {
requests.map { request =>
val fetcherRequest =
Request(context.requestHeader, resourceName, request.arguments, request.authOverride)
context.fetcher
.data(fetcherRequest, context.debugMode)
.map {
case Right(response) =>
val elements = parseElements(fetcherRequest, response, requests.head.resourceSchema)
Right(
NaptimeResponse(
elements,
Some(response.pagination),
response.url.getOrElse("???")))
case Left(error) =>
Left(NaptimeError(error.url.getOrElse("???"), error.code, error.message))
}
.map(res => Map(request.idx -> res))
}
}
.map(_.flatten.toMap)
}
/**
* Helper to parse the elements in a response into a map of JsValue -> DataMapWithParent
* @param response Response from the network call, containing data returned
* @param resourceSchema schema that defines the shape of the response, for later use
* @return Map of JsValue ids to DataMapWithParents
*/
def parseElements(
request: Request,
response: Response,
resourceSchema: RecordDataSchema): List[DataMapWithParent] = {
response.data.map { element =>
DataMapWithParent(element, ParentModel(request.resource, element, resourceSchema))
}
}
/**
* Extracts a resource name from a list of NaptimeRequests.
*
* This method assumes that all requests will be for the same resource,
* otherwise it will return None.
*
* @param requests a list of NaptimeRequests
* @return a ResourceName if all requests are for the same resource, otherwise None
*/
def getResourceName(requests: Vector[NaptimeRequest]): Option[ResourceName] = {
val byResourceName = requests.groupBy(_.resourceName)
if (byResourceName.size > 1) {
logger.error("getResourceName detected a list of requests for more than one resource")
None
} else {
byResourceName.headOption.map(_._1)
}
}
}
| coursera/naptime | naptime-graphql/src/main/scala/org/coursera/naptime/ari/graphql/resolvers/NaptimeResolver.scala | Scala | apache-2.0 | 11,581 |
/**
* Copyright (C) 2012 Typesafe, Inc. <http://www.typesafe.com>
*/
package org.pantsbuild.zinc
import java.io.File
import java.util.{ List => JList }
import sbt.inc.ClassfileManager
import sbt.inc.IncOptions.{ Default => DefaultIncOptions }
import sbt.Level
import sbt.Path._
import scala.collection.JavaConverters._
import scala.util.matching.Regex
import xsbti.compile.CompileOrder
/**
* All parsed command-line options.
*/
case class Settings(
help: Boolean = false,
version: Boolean = false,
consoleLog: ConsoleOptions = ConsoleOptions(),
captureLog: Option[File] = None,
sources: Seq[File] = Seq.empty,
classpath: Seq[File] = Seq.empty,
classesDirectory: File = new File("."),
scala: ScalaLocation = ScalaLocation(),
scalacOptions: Seq[String] = Seq.empty,
javaHome: Option[File] = None,
forkJava: Boolean = false,
javaOnly: Boolean = false,
javacOptions: Seq[String] = Seq.empty,
compileOrder: CompileOrder = CompileOrder.Mixed,
sbt: SbtJars = SbtJars(),
incOptions: IncOptions = IncOptions(),
analysis: AnalysisOptions = AnalysisOptions(),
properties: Seq[String] = Seq.empty
)
/** Due to the limit of 22 elements in a case class, options must get broken down into sub-groups.
* TODO: further break options into sensible subgroups. */
case class ConsoleOptions(
logLevel: Level.Value = Level.Info,
color: Boolean = true,
logPhases: Boolean = false,
printProgress: Boolean = false,
heartbeatSecs: Int = 0,
fileFilters: Seq[Regex] = Seq.empty,
msgFilters: Seq[Regex] = Seq.empty
)
/**
* Alternative ways to locate the scala jars.
*/
case class ScalaLocation(
home: Option[File] = None,
path: Seq[File] = Seq.empty,
compiler: Option[File] = None,
library: Option[File] = None,
extra: Seq[File] = Seq.empty
)
object ScalaLocation {
/**
* Java API for creating ScalaLocation.
*/
def create(
home: File,
path: JList[File],
compiler: File,
library: File,
extra: JList[File]): ScalaLocation =
ScalaLocation(
Option(home),
path.asScala,
Option(compiler),
Option(library),
extra.asScala
)
/**
* Java API for creating ScalaLocation with scala home.
*/
def fromHome(home: File) = ScalaLocation(home = Option(home))
/**
* Java API for creating ScalaLocation with scala path.
*/
def fromPath(path: JList[File]) = ScalaLocation(path = path.asScala)
}
/**
* Locating the sbt jars needed for zinc compile.
*/
case class SbtJars(
sbtInterface: Option[File] = None,
compilerInterfaceSrc: Option[File] = None
)
object SbtJars {
/**
* Select the sbt jars from a path.
*/
def fromPath(path: Seq[File]): SbtJars = {
val sbtInterface = path find (_.getName matches Setup.SbtInterface.pattern)
val compilerInterfaceSrc = path find (_.getName matches Setup.CompilerInterfaceSources.pattern)
SbtJars(sbtInterface, compilerInterfaceSrc)
}
/**
* Java API for selecting sbt jars from a path.
*/
def fromPath(path: JList[File]): SbtJars = fromPath(path.asScala)
}
/**
* Wrapper around incremental compiler options.
*/
case class IncOptions(
transitiveStep: Int = DefaultIncOptions.transitiveStep,
recompileAllFraction: Double = DefaultIncOptions.recompileAllFraction,
relationsDebug: Boolean = DefaultIncOptions.relationsDebug,
apiDebug: Boolean = DefaultIncOptions.apiDebug,
apiDiffContextSize: Int = DefaultIncOptions.apiDiffContextSize,
apiDumpDirectory: Option[File] = DefaultIncOptions.apiDumpDirectory,
transactional: Boolean = false,
backup: Option[File] = None,
recompileOnMacroDef: Boolean = DefaultIncOptions.recompileOnMacroDef,
nameHashing: Boolean = DefaultIncOptions.nameHashing
) {
@deprecated("Use the primary constructor instead.", "0.3.5.2")
def this(
transitiveStep: Int,
recompileAllFraction: Double,
relationsDebug: Boolean,
apiDebug: Boolean,
apiDiffContextSize: Int,
apiDumpDirectory: Option[File],
transactional: Boolean,
backup: Option[File]
) = {
this(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
apiDumpDirectory, transactional, backup, DefaultIncOptions.recompileOnMacroDef,
DefaultIncOptions.nameHashing)
}
def options: sbt.inc.IncOptions = {
sbt.inc.IncOptions(
transitiveStep,
recompileAllFraction,
relationsDebug,
apiDebug,
apiDiffContextSize,
apiDumpDirectory,
classfileManager,
recompileOnMacroDef,
nameHashing
)
}
def classfileManager: () => ClassfileManager = {
if (transactional && backup.isDefined)
ClassfileManager.transactional(backup.get)
else
DefaultIncOptions.newClassfileManager
}
}
/**
* Configuration for sbt analysis and analysis output options.
*/
case class AnalysisOptions(
cache: Option[File] = None,
cacheMap: Map[File, File] = Map.empty,
forceClean: Boolean = false
)
object Settings {
/**
* All available command-line options.
*/
val options = Seq(
header("Output options:"),
boolean( ("-help", "-h"), "Print this usage message", (s: Settings) => s.copy(help = true)),
boolean( "-version", "Print version", (s: Settings) => s.copy(version = true)),
header("Logging Options:"),
boolean( "-debug", "Set log level for stdout to debug",
(s: Settings) => s.copy(consoleLog = s.consoleLog.copy(logLevel = Level.Debug))),
string( "-log-level", "level", "Set log level for stdout (debug|info|warn|error)",
(s: Settings, l: String) => s.copy(consoleLog = s.consoleLog.copy(logLevel = Level.withName(l)))),
boolean( "-no-color", "No color in logging to stdout",
(s: Settings) => s.copy(consoleLog = s.consoleLog.copy(color = false))),
boolean( "-log-phases", "Log phases of compilation for each file to stdout",
(s: Settings) => s.copy(consoleLog = s.consoleLog.copy(logPhases = true))),
boolean( "-print-progress", "Periodically print compilation progress to stdout",
(s: Settings) => s.copy(consoleLog = s.consoleLog.copy(printProgress = true))),
int( "-heartbeat", "interval (sec)", "Print '.' to stdout every n seconds while compiling",
(s: Settings, b: Int) => s.copy(consoleLog = s.consoleLog.copy(heartbeatSecs = b))),
string( "-msg-filter", "regex", "Filter warning messages matching the given regex",
(s: Settings, re: String) => s.copy(consoleLog = s.consoleLog.copy(msgFilters = s.consoleLog.msgFilters :+ re.r))),
string( "-file-filter", "regex", "Filter warning messages from filenames matching the given regex",
(s: Settings, re: String) => s.copy(consoleLog = s.consoleLog.copy(fileFilters = s.consoleLog.fileFilters :+ re.r))),
file( "-capture-log", "file", "Captures all logging (unfiltered) to the given file",
(s: Settings, f: File) => s.copy(captureLog = Some(f))),
header("Compile options:"),
path( ("-classpath", "-cp"), "path", "Specify the classpath", (s: Settings, cp: Seq[File]) => s.copy(classpath = cp)),
file( "-d", "directory", "Destination for compiled classes", (s: Settings, f: File) => s.copy(classesDirectory = f)),
header("Scala options:"),
file( "-scala-home", "directory", "Scala home directory (for locating jars)", (s: Settings, f: File) => s.copy(scala = s.scala.copy(home = Some(f)))),
path( "-scala-path", "path", "Specify all Scala jars directly", (s: Settings, sp: Seq[File]) => s.copy(scala = s.scala.copy(path = sp))),
file( "-scala-compiler", "file", "Specify Scala compiler jar directly" , (s: Settings, f: File) => s.copy(scala = s.scala.copy(compiler = Some(f)))),
file( "-scala-library", "file", "Specify Scala library jar directly" , (s: Settings, f: File) => s.copy(scala = s.scala.copy(library = Some(f)))),
path( "-scala-extra", "path", "Specify extra Scala jars directly", (s: Settings, e: Seq[File]) => s.copy(scala = s.scala.copy(extra = e))),
prefix( "-S", "<scalac-option>", "Pass option to scalac", (s: Settings, o: String) => s.copy(scalacOptions = s.scalacOptions :+ o)),
header("Java options:"),
file( "-java-home", "directory", "Select javac home directory (and fork)", (s: Settings, f: File) => s.copy(javaHome = Some(f))),
boolean( "-fork-java", "Run java compiler in separate process", (s: Settings) => s.copy(forkJava = true)),
string( "-compile-order", "order", "Compile order for Scala and Java sources", (s: Settings, o: String) => s.copy(compileOrder = compileOrder(o))),
boolean( "-java-only", "Don't add scala library to classpath", (s: Settings) => s.copy(javaOnly = true)),
prefix( "-C", "<javac-option>", "Pass option to javac", (s: Settings, o: String) => s.copy(javacOptions = s.javacOptions :+ o)),
header("sbt options:"),
file( "-sbt-interface", "file", "Specify sbt interface jar", (s: Settings, f: File) => s.copy(sbt = s.sbt.copy(sbtInterface = Some(f)))),
file( "-compiler-interface", "file", "Specify compiler interface sources jar", (s: Settings, f: File) => s.copy(sbt = s.sbt.copy(compilerInterfaceSrc = Some(f)))),
header("Incremental compiler options:"),
int( "-transitive-step", "n", "Steps before transitive closure", (s: Settings, i: Int) => s.copy(incOptions = s.incOptions.copy(transitiveStep = i))),
fraction( "-recompile-all-fraction", "x", "Limit before recompiling all sources", (s: Settings, d: Double) => s.copy(incOptions = s.incOptions.copy(recompileAllFraction = d))),
boolean( "-debug-relations", "Enable debug logging of analysis relations", (s: Settings) => s.copy(incOptions = s.incOptions.copy(relationsDebug = true))),
boolean( "-debug-api", "Enable analysis API debugging", (s: Settings) => s.copy(incOptions = s.incOptions.copy(apiDebug = true))),
file( "-api-dump", "directory", "Destination for analysis API dump", (s: Settings, f: File) => s.copy(incOptions = s.incOptions.copy(apiDumpDirectory = Some(f)))),
int( "-api-diff-context-size", "n", "Diff context size (in lines) for API debug", (s: Settings, i: Int) => s.copy(incOptions = s.incOptions.copy(apiDiffContextSize = i))),
boolean( "-transactional", "Restore previous class files on failure", (s: Settings) => s.copy(incOptions = s.incOptions.copy(transactional = true))),
file( "-backup", "directory", "Backup location (if transactional)", (s: Settings, f: File) => s.copy(incOptions = s.incOptions.copy(backup = Some(f)))),
boolean( "-recompileOnMacroDefDisabled", "Disable recompilation of all dependencies of a macro def",
(s: Settings) => s.copy(incOptions = s.incOptions.copy(recompileOnMacroDef = false))),
boolean( "-no-name-hashing", "Disable improved incremental compilation algorithm",
(s: Settings) => s.copy(incOptions = s.incOptions.copy(nameHashing = false))),
header("Analysis options:"),
file( "-analysis-cache", "file", "Cache file for compile analysis", (s: Settings, f: File) => s.copy(analysis = s.analysis.copy(cache = Some(f)))),
fileMap( "-analysis-map", "Upstream analysis mapping (file:file,...)", (s: Settings, m: Map[File, File]) => s.copy(analysis = s.analysis.copy(cacheMap = m))),
boolean( "-force-clean", "Force clean classes on empty analysis", (s: Settings) => s.copy(analysis = s.analysis.copy(forceClean = true))),
header("JVM options:"),
prefix( "-D", "property=value", "Pass property to runtime system", (s: Settings, o: String) => s.copy(properties = s.properties :+ o)),
dummy( "-J<flag>", "Set JVM flag directly for this process")
)
val allOptions: Set[OptionDef[Settings]] = options.toSet
/**
* Print out the usage message.
*/
def printUsage(): Unit = {
val column = options.map(_.length).max + 2
println("Usage: %s <options> <sources>" format Setup.Command)
options foreach { opt => if (opt.extraline) println(); println(opt.usage(column)) }
println()
}
/**
* Anything starting with '-' is considered an option, not a source file.
*/
def isOpt(s: String) = s startsWith "-"
/**
* Parse all args into a Settings object.
* Residual args are either unknown options or source files.
*/
def parse(args: Seq[String]): Parsed[Settings] = {
val Parsed(settings, remaining, errors) = Options.parse(Settings(), allOptions, args, stopOnError = false)
val (unknown, residual) = remaining partition isOpt
val sources = residual map (new File(_))
val unknownErrors = unknown map ("Unknown option: " + _)
Parsed(settings.copy(sources = sources), Seq.empty, errors ++ unknownErrors)
}
/**
* Create a CompileOrder value based on string input.
*/
def compileOrder(order: String): CompileOrder = {
order.toLowerCase match {
case "mixed" => CompileOrder.Mixed
case "java" | "java-then-scala" | "javathenscala" => CompileOrder.JavaThenScala
case "scala" | "scala-then-java" | "scalathenjava" => CompileOrder.ScalaThenJava
}
}
/**
* Normalise all relative paths to the actual current working directory, if provided.
*/
def normalise(settings: Settings, cwd: Option[File]): Settings = {
if (cwd.isEmpty) settings
else {
import settings._
settings.copy(
sources = Util.normaliseSeq(cwd)(sources),
classpath = Util.normaliseSeq(cwd)(classpath),
classesDirectory = Util.normalise(cwd)(classesDirectory),
scala = scala.copy(
home = Util.normaliseOpt(cwd)(scala.home),
path = Util.normaliseSeq(cwd)(scala.path),
compiler = Util.normaliseOpt(cwd)(scala.compiler),
library = Util.normaliseOpt(cwd)(scala.library),
extra = Util.normaliseSeq(cwd)(scala.extra)
),
javaHome = Util.normaliseOpt(cwd)(javaHome),
sbt = sbt.copy(
sbtInterface = Util.normaliseOpt(cwd)(sbt.sbtInterface),
compilerInterfaceSrc = Util.normaliseOpt(cwd)(sbt.compilerInterfaceSrc)
),
incOptions = incOptions.copy(
apiDumpDirectory = Util.normaliseOpt(cwd)(incOptions.apiDumpDirectory),
backup = Util.normaliseOpt(cwd)(incOptions.backup)
),
analysis = analysis.copy(
cache = Util.normaliseOpt(cwd)(analysis.cache),
cacheMap = Util.normaliseMap(cwd)(analysis.cacheMap)
)
)
}
}
// helpers for creating options
def boolean(opt: String, desc: String, action: Settings => Settings) = new BooleanOption[Settings](Seq(opt), desc, action)
def boolean(opts: (String, String), desc: String, action: Settings => Settings) = new BooleanOption[Settings](Seq(opts._1, opts._2), desc, action)
def string(opt: String, arg: String, desc: String, action: (Settings, String) => Settings) = new StringOption[Settings](Seq(opt), arg, desc, action)
def int(opt: String, arg: String, desc: String, action: (Settings, Int) => Settings) = new IntOption[Settings](Seq(opt), arg, desc, action)
def double(opt: String, arg: String, desc: String, action: (Settings, Double) => Settings) = new DoubleOption[Settings](Seq(opt), arg, desc, action)
def fraction(opt: String, arg: String, desc: String, action: (Settings, Double) => Settings) = new FractionOption[Settings](Seq(opt), arg, desc, action)
def file(opt: String, arg: String, desc: String, action: (Settings, File) => Settings) = new FileOption[Settings](Seq(opt), arg, desc, action)
def path(opt: String, arg: String, desc: String, action: (Settings, Seq[File]) => Settings) = new PathOption[Settings](Seq(opt), arg, desc, action)
def path(opts: (String, String), arg: String, desc: String, action: (Settings, Seq[File]) => Settings) = new PathOption[Settings](Seq(opts._1, opts._2), arg, desc, action)
def prefix(pre: String, arg: String, desc: String, action: (Settings, String) => Settings) = new PrefixOption[Settings](pre, arg, desc, action)
def filePair(opt: String, arg: String, desc: String, action: (Settings, (File, File)) => Settings) = new FilePairOption[Settings](Seq(opt), arg, desc, action)
def fileMap(opt: String, desc: String, action: (Settings, Map[File, File]) => Settings) = new FileMapOption[Settings](Seq(opt), desc, action)
def fileSeqMap(opt: String, desc: String, action: (Settings, Map[Seq[File], File]) => Settings) = new FileSeqMapOption[Settings](Seq(opt), desc, action)
def header(label: String) = new HeaderOption[Settings](label)
def dummy(opt: String, desc: String) = new DummyOption[Settings](opt, desc)
}
| areitz/pants | src/scala/org/pantsbuild/zinc/Settings.scala | Scala | apache-2.0 | 17,456 |
package com.programmaticallyspeaking.ncd.nashorn
import com.programmaticallyspeaking.ncd.host.{HitBreakpoint, PrintMessage, ScriptEvent, StackFrame}
import com.programmaticallyspeaking.ncd.messaging.Observer
import com.programmaticallyspeaking.ncd.testing.UnitTest
import org.scalatest.concurrent.Eventually
import scala.concurrent.{ExecutionContext, Promise}
trait PrintTestFixture extends NashornScriptHostTestFixture with Eventually with FairAmountOfPatience {
override implicit val executionContext: ExecutionContext = ExecutionContext.global
def runScriptAndCollectEvents(script: String)(handler: Seq[PrintMessage] => Unit) = {
var events = Seq.empty[PrintMessage]
val obs = Observer.from[ScriptEvent] {
case ev: PrintMessage => events :+= ev
}
observeAndRunScriptSync(script, obs) { host =>
eventually {
assert(events.nonEmpty)
}
handler(events)
}
}
def runScriptAndCollectEventsWhilePaused(code: String)(handler: Seq[PrintMessage] => Unit) = {
var events = Seq.empty[PrintMessage]
val stackframesPromise = Promise[Seq[StackFrame]]()
val obs = Observer.from[ScriptEvent] {
case ev: PrintMessage => events :+= ev
case bp: HitBreakpoint => stackframesPromise.success(bp.stackFrames)
}
observeAndRunScriptAsync("debugger;", obs) { host =>
stackframesPromise.future.map { sf =>
host.evaluateOnStackFrame(sf.head.id, code)
handler(events)
}
}
}
}
class PrintTest extends UnitTest with PrintTestFixture {
"Capture of Nashorn's print extension" - {
"emits a PrintMessage event" in {
expectMessage("print('hello world');", "hello world")
}
"puts space inbetween arguments" in {
expectMessage("print('hello', 'world');", "hello world")
}
"uses JS stringification" in {
expectMessage("print({});", "[object Object]")
}
"handles null" in {
expectMessage("print(null, 'foo');", "null foo")
}
"emits a PrintMessage even if the no-newline version is used" in {
useNashornArguments(Seq("print-no-newline"))
expectMessage("print('hello world');", "hello world")
}
"is ignored when the debugger is paused to avoid deadlock" in {
runScriptAndCollectEventsWhilePaused("print('ignored');") { events =>
events should be ('empty)
}
}
}
private def expectMessage(script: String, message: String): Unit = {
runScriptAndCollectEvents(script) { events =>
expectMessage(events, message)
}
}
private def expectMessage(events: Seq[PrintMessage], message: String): Unit = {
val found = events.find(_.message == message)
found should be ('defined)
}
}
| provegard/ncdbg | src/test/scala/com/programmaticallyspeaking/ncd/nashorn/PrintTest.scala | Scala | bsd-3-clause | 2,711 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.