code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Copyright 2013 Marek Radonsky * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import play.api._ import play.api.mvc._ import play.api.mvc.Results._ import models.Error import play.extras.iteratees.GzipFilter object Global extends WithFilters(new GzipFilter()) { override def onError(request: RequestHeader, ex: Throwable) = { InternalServerError( Error(ex).toJson ) } }
radonsky/Taurus
app/Global.scala
Scala
apache-2.0
916
package omnibus.api.exceptions class RequestTimeoutException extends Exception("The request is taking longer than expected")
agourlay/omnibus
src/main/scala/omnibus/api/exceptions/RequestExceptions.scala
Scala
apache-2.0
125
/* * Copyright (c) 2012-2017 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package com.snowplowanalytics.iglu.core.circe // specs2 import org.specs2.Specification import cats.syntax.either._ // circe import io.circe._ import io.circe.literal._ // This library import com.snowplowanalytics.iglu.core._ class ContainersSpec extends Specification { def is = s2""" Specification for container types extract SelfDescribingData $e1 extract SelfDescribingSchema $e2 normalize SelfDescribingData $e3 normalize SelfDescribingSchema $e4 stringify SelfDescribingData $e5 stringify SelfDescribingSchema $e6 """ import implicits._ def e1 = { val result: Json = json""" { "schema": "iglu:com.snowplowanalytics.snowplow/geolocation_context/jsonschema/1-1-0", "data": { "latitude": 32.2, "longitude": 53.23, "speed": 40 } } """ val key = SchemaKey("com.snowplowanalytics.snowplow", "geolocation_context", "jsonschema", SchemaVer.Full(1,1,0)) val data = json""" { "latitude": 32.2, "longitude": 53.23, "speed": 40 } """ SelfDescribingData.parse(result) must beRight(SelfDescribingData(key, data)) } def e2 = { val result: Json = json""" { "self": { "vendor": "com.acme", "name": "keyvalue", "format": "jsonschema", "version": "1-1-0" }, "type": "object", "properties": { "name": { "type": "string" }, "value": { "type": "string" } } } """ val self = SchemaMap("com.acme", "keyvalue", "jsonschema", SchemaVer.Full(1,1,0)) val schema = json""" { "type": "object", "properties": { "name": { "type": "string" }, "value": { "type": "string" } } } """ // With AttachTo[JValue] with ToData[JValue] in scope .toSchema won't be even available SelfDescribingSchema.parse(result) must beRight(SelfDescribingSchema(self, schema)) } def e3 = { val schema = SchemaKey("com.snowplowanalytics.snowplow", "geolocation_context", "jsonschema", SchemaVer.Full(1,1,0)) val data: Json = json""" { "latitude": 32.2, "longitude": 53.23, "speed": 40 } """ val expected: Json = json""" { "schema": "iglu:com.snowplowanalytics.snowplow/geolocation_context/jsonschema/1-1-0", "data": { "latitude": 32.2, "longitude": 53.23, "speed": 40 } } """ val result = SelfDescribingData(schema, data).normalize result must beEqualTo(expected) } def e4 = { val self = SchemaMap("com.acme", "keyvalue", "jsonschema", SchemaVer.Full(1,1,0)) val schema: Json = json""" { "type": "object", "properties": { "name": { "type": "string" }, "value": { "type": "string" } } } """ val expected: Json = json""" { "self": { "vendor": "com.acme", "name": "keyvalue", "format": "jsonschema", "version": "1-1-0" }, "type": "object", "properties": { "name": { "type": "string" }, "value": { "type": "string" } } } """ val result = SelfDescribingSchema(self, schema) result.normalize must beEqualTo(expected) } def e5 = { val schema = SchemaKey("com.snowplowanalytics.snowplow", "geolocation_context", "jsonschema", SchemaVer.Full(1,1,0)) val data: Json = json""" { "latitude": 32.2, "longitude": 53.23, "speed": 40 } """ val expected: String = """{"schema":"iglu:com.snowplowanalytics.snowplow/geolocation_context/jsonschema/1-1-0","data":{"latitude":32.2,"longitude":53.23,"speed":40}}""" val result = SelfDescribingData(schema, data).asString result must beEqualTo(expected) } def e6 = { val self = SchemaMap("com.acme", "keyvalue", "jsonschema", SchemaVer.Full(1,1,0)) val schema: Json = json""" { "type": "object", "properties": { "name": { "type": "string" }, "value": { "type": "string" } } } """ val expected: String = """{"type":"object","properties":{"name":{"type":"string"},"value":{"type":"string"}},"self":{"vendor":"com.acme","name":"keyvalue","format":"jsonschema","version":"1-1-0"}}""" val result = SelfDescribingSchema(self, schema) result.asString must beEqualTo(expected) } }
snowplow/iglu
0-common/scala-core/iglu-core-circe/src/test/scala/com/snowplowanalytics/iglu/core/circe/ContainersSpec.scala
Scala
apache-2.0
5,399
package rere.ql.queries import java.util.UUID import io.circe.{Json, JsonObject} import rere.ql.types._ import rere.ql.values._ /** * Implicit converters for standard types */ trait ValueQueries { implicit def expr(nullRef: Null): ReqlNull = new ReqlNullQuery(nullRef) implicit def expr(bool: Boolean): ReqlBoolean = new ReqlBooleanQuery(bool) implicit def expr(number: Int): ReqlInteger = new ReqlIntQuery(number) implicit def expr(number: Long): ReqlInteger = new ReqlLongQuery(number) implicit def expr(number: BigInt): ReqlInteger = new ReqlBigIntQuery(number) implicit def expr(number: Double): ReqlFloat = new ReqlDoubleQuery(number) implicit def expr(number: BigDecimal): ReqlFloat = new ReqlBigDecimalQuery(number) implicit def expr(string: String): ReqlString = new ReqlStringQuery(string) implicit def expr(uuid: UUID): ReqlUUID = new ReqlUUIDQuery(uuid) implicit def expr(jsonArray: List[Json]): ReqlArray[ReqlJson] = new ReqlJsonArrayQuery(jsonArray) implicit def expr[T <: ReqlDatum](iterable: Iterable[T]): ReqlArray[T] = new ReqlMakeArrayFromIterableQuery(iterable) implicit def expr(jsonObj: JsonObject): ReqlJsonObject = new ReqlJsonObjectQuery(jsonObj) implicit def expr(reqlObj: Map[String, ReqlDatum]): ReqlObject = new ReqlMakeObjFromMapQuery(reqlObj) implicit def expr(json: Json): ReqlJson = new ReqlJsonQuery(json) }
pbaun/rere
modules/ql/src/main/scala/rere/ql/queries/ValueQueries.scala
Scala
apache-2.0
1,392
// Copyright 2012 Twitter, Inc. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.twitter.cassie import com.twitter.cassie.connection.CCluster import com.twitter.cassie.connection.{ClusterClientProvider, SocketAddressCluster, RetryPolicy} import com.twitter.concurrent.Spool import com.twitter.finagle.builder.{Cluster => FCluster} import com.twitter.finagle.ServiceFactory import com.twitter.finagle.stats.StatsReceiver import com.twitter.finagle.tracing.{ Tracer, NullTracer } import com.twitter.finagle.WriteException import org.slf4j.LoggerFactory import com.twitter.util.{ Duration, Future, Promise, Return, Time, JavaTimer } import java.io.IOException import java.net.{ InetSocketAddress, SocketAddress } import java.util.concurrent.TimeUnit import scala.collection.JavaConversions._ import scala.collection.SeqProxy import scala.util.parsing.json.JSON /** * Given a seed host and port, returns a set of nodes in the cluster. * * @param keyspace the keyspace to map * @param seeds seed node addresses * @param port the Thrift port of client nodes */ object ClusterRemapper { private val log = LoggerFactory.getLogger(this.getClass) } private class ClusterRemapper( keyspace: String, seeds: Seq[InetSocketAddress], remapPeriod: Duration, port: Int = 9160, statsReceiver: StatsReceiver, tracerFactory: Tracer.Factory, username: String, password: String ) extends CCluster[SocketAddress] { import ClusterRemapper._ private[this] var hosts = seeds private[this] var user = username private[this] var pass = password private[this] var changes = new Promise[Spool[FCluster.Change[SocketAddress]]] // Timer keeps updating the host list. Variables "hosts" and "changes" together reflect the cluster consistently // at any time private[cassie] var timer = new JavaTimer(true) timer.schedule(Time.now, remapPeriod) { fetchHosts(hosts) onSuccess { ring => log.debug("Received: %s", ring) val (added, removed) = synchronized { val oldSet = hosts.toSet hosts = ring.flatMap { h => collectionAsScalaIterable(h.endpoints).map { new InetSocketAddress(_, port) } }.toSeq val newSet = hosts.toSet (newSet &~ oldSet, oldSet &~ newSet) } added foreach { host => appendChange(FCluster.Add(host)) } removed foreach { host => appendChange(FCluster.Rem(host)) } } onFailure { error => log.error("error mapping ring", error) statsReceiver.counter("ClusterRemapFailure." + error.getClass().getName()).incr } } private[this] def appendChange(change: FCluster.Change[SocketAddress]) = { val newTail = new Promise[Spool[FCluster.Change[SocketAddress]]] changes() = Return(change *:: newTail) changes = newTail } def close = timer.stop() def snap: (Seq[SocketAddress], Future[Spool[FCluster.Change[SocketAddress]]]) = (hosts, changes) private[this] def fetchHosts(hosts: Seq[SocketAddress]) = { val ccp = new ClusterClientProvider( new SocketAddressCluster(hosts), keyspace, retries = 5, timeout = Duration(5, TimeUnit.SECONDS), requestTimeout = Duration(1, TimeUnit.SECONDS), connectTimeout = Duration(1, TimeUnit.SECONDS), minConnectionsPerHost = 1, maxConnectionsPerHost = 1, hostConnectionMaxWaiters = 100, statsReceiver = statsReceiver, tracerFactory = tracerFactory, retryPolicy = RetryPolicy.Idempotent, true, user, pass ) ccp map { log.info("Mapping cluster...") _.describe_ring(keyspace) } ensure { ccp.close() } } }
lookout/zipkin
zipkin-cassandra/src/main/scala/com/twitter/cassie/ClusterRemapper.scala
Scala
apache-2.0
4,143
class C { def foo(a: Int): Unit = () => a }
folone/dotty
tests/untried/pos/t6028/t6028_1.scala
Scala
bsd-3-clause
48
package controllers.circs.report_changes import app.ConfigProperties._ import play.api.test.FakeRequest import models.domain.{CircumstancesOtherInfo, MockForm} import models.view.CachedChangeOfCircs import play.api.test.Helpers._ import org.specs2.mutable._ import utils.pageobjects.circumstances.report_changes.GOtherChangeInfoPage import utils.{WithBrowser, WithApplication} class GOtherChangeInfoSpec extends Specification { val otherInfo = "other info" val otherChangeInfoInput = Seq("changeInCircs" -> otherInfo) val otherChangePath = "DWPCAChangeOfCircumstances//OtherChanges//Answer" section("unit", models.domain.CircumstancesOtherInfo.id) "Circumstances - OtherChangeInfo - Controller" should { "present 'Other Change Information' " in new WithApplication with MockForm { val request = FakeRequest() val result = GOtherChangeInfo.present(request) status(result) mustEqual OK } "add submitted form to the cached claim" in new WithApplication with MockForm { val request = FakeRequest() .withFormUrlEncodedBody(otherChangeInfoInput: _*) val result = GOtherChangeInfo.submit(request) val claim = getClaimFromCache(result,CachedChangeOfCircs.key) claim.questionGroup[CircumstancesOtherInfo] must beLike { case Some(f: CircumstancesOtherInfo) => { f.change must equalTo(otherInfo) } } } "redirect to the next page after a valid submission" in new WithApplication with MockForm { val request = FakeRequest() .withFormUrlEncodedBody(otherChangeInfoInput: _*) val result = GOtherChangeInfo.submit(request) status(result) mustEqual SEE_OTHER } "handle gracefully when bad schema number passed to SchemaValidation getRestriction" in new WithApplication { val schemaVersion = "BAD-SCHEMA" schemaMaxLength(schemaVersion, otherChangePath) mustEqual -1 } "pull maxlength from xml commons OK" in new WithApplication { val schemaVersion = getStringProperty("xml.schema.version") schemaVersion must not be "NOT-SET" schemaMaxLength(schemaVersion, otherChangePath) mustEqual 3000 } "have text maxlength set correctly in present()" in new WithBrowser { browser.goTo(GOtherChangeInfoPage.url) val anythingElse = browser.$("#changeInCircs") val countdown = browser.$("#changeInCircs + .countdown") anythingElse.getAttribute("maxlength") mustEqual "3000" countdown.getText must contain( "3000 char") browser.pageSource must contain("maxChars:3000") } } section("unit", models.domain.CircumstancesOtherInfo.id) }
Department-for-Work-and-Pensions/ClaimCapture
c3/test/controllers/circs/report_changes/GOtherChangeInfoSpec.scala
Scala
mit
2,647
/* * Copyright (C) 2012-2014 Typesafe Inc. <http://www.typesafe.com> */ package com.qifun.statelessFuture package test package neg import org.junit.Test class NakedAwait { @Test def `await only allowed in async neg`() { import _root_.com.qifun.statelessFuture.test.Async._ expectError("`await` must be enclosed in a `Future` block") { """ | import _root_.com.qifun.statelessFuture.test.Async._ | (null: _root_.com.qifun.statelessFuture.Future[Any]).await """.stripMargin } } @Test def `await not allowed in by-name argument`() { expectError("await must not be used under a by-name argument.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | def foo(a: Int)(b: => Int) = 0 | async { foo(0)(await(0)) } """.stripMargin } } @Test def `await not allowed in boolean short circuit argument 1`() { expectError("await must not be used under a by-name argument.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { true && await(false) } """.stripMargin } } @Test def `await not allowed in boolean short circuit argument 2`() { expectError("await must not be used under a by-name argument.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { true || await(false) } """.stripMargin } } @Test def nestedObject() { expectError("await must not be used under a nested object.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { object Nested { await(false) } } """.stripMargin } } @Test def nestedTrait() { expectError("await must not be used under a nested trait.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { trait Nested { await(false) } } """.stripMargin } } @Test def nestedClass() { expectError("await must not be used under a nested class.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { class Nested { await(false) } } """.stripMargin } } @Test def nestedFunction() { expectError("await must not be used under a nested function.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { () => { await(false) } } """.stripMargin } } @Test def nestedPatMatFunction() { expectError("await must not be used under a nested class.") { // TODO more specific error message """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { { case x => { await(false) } } : PartialFunction[Any, Any] } """.stripMargin } } @Test def finallyBody() { expectError("await must not be used under a finally.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { try { () } finally { await(false) } } """.stripMargin } } @Test def guard() { expectError("await must not be used under a pattern guard.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { 1 match { case _ if await(true) => } } """.stripMargin } } @Test def nestedMethod() { expectError("await must not be used under a nested method.") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | async { def foo = await(false) } """.stripMargin } } @Test def returnIllegal() { expectError("return is illegal") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | def foo(): Any = async { return false } | () | |""".stripMargin } } @Test def lazyValIllegal() { expectError("await must not be used under a lazy val initializer") { """ | import _root_.com.qifun.statelessFuture.test.internal.AsyncId._ | def foo(): Any = async { val x = { lazy val y = await(0); y } } | () | |""".stripMargin } } }
Atry/stateless-future-test
test/src/test/scala/com/qifun/statelessFuture/test/neg/NakedAwait.scala
Scala
bsd-3-clause
4,220
/* * Copyright 2012 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.twitter.zipkin.query import com.google.common.base.Charsets.UTF_8 import com.twitter.finagle.stats.{DefaultStatsReceiver, Stat, StatsReceiver} import com.twitter.finagle.tracing.{Trace => FTrace} import com.twitter.logging.Logger import com.twitter.util.Future import com.twitter.zipkin.common.Span import com.twitter.zipkin.conversions.thrift._ import com.twitter.zipkin.query.adjusters._ import com.twitter.zipkin.query.constants._ import com.twitter.zipkin.storage._ import com.twitter.zipkin.thriftscala import com.twitter.zipkin.thriftscala.Dependencies import java.nio.ByteBuffer class ThriftQueryService( spanStore: SpanStore, dependencyStore: DependencyStore = new NullDependencyStore, traceDurationFetchBatchSize: Int = 500, stats: StatsReceiver = DefaultStatsReceiver.scope("ThriftQueryService"), log: Logger = Logger.get("ThriftQueryService") ) extends thriftscala.ZipkinQuery[Future] with thriftscala.DependencyStore[Future] { private[this] val methodStats = stats.scope("perMethod") private val timeSkewAdjuster = new TimeSkewAdjuster() private[this] def opt[T](param: T): Option[T] = param match { case null | "" => None case s => Some(s) } private[this] def adjustedTraces(spans: Seq[Seq[Span]], adjustClockSkew: Boolean): Seq[Trace] = { val traces = spans.map(Trace(_)) if (adjustClockSkew) { traces.map(t => timeSkewAdjuster.adjust(_)) } traces } private[this] def padTimestamp(timestamp: Long): Long = timestamp + TraceTimestampPadding.inMicroseconds private[this] def traceIdsIntersect(idSeqs: Seq[Seq[IndexedTraceId]]): Seq[IndexedTraceId] = { /* Find the trace IDs present in all the Seqs */ val idMaps = idSeqs.map(_.groupBy(_.traceId)) val traceIds = idMaps.map(_.keys.toSeq) val commonTraceIds = traceIds.tail.fold(traceIds(0))(_.intersect(_)) /* * Find the timestamps associated with each trace ID and construct a new IndexedTraceId * that has the trace ID's maximum timestamp (ending) as the timestamp */ commonTraceIds map { id => IndexedTraceId(id, idMaps.flatMap(_(id).map(_.timestamp)).max) } } private[this] def queryResponse( ids: Seq[IndexedTraceId], qr: thriftscala.QueryRequest ): Future[Seq[Long]] = { Future.value(ids.slice(0, qr.limit).map(_.traceId)) } private trait SliceQuery private case class SpanSliceQuery(name: String) extends SliceQuery private case class AnnotationSliceQuery(key: String, value: Option[ByteBuffer]) extends SliceQuery private[this] def querySlices(slices: Seq[SliceQuery], qr: thriftscala.QueryRequest): Future[Seq[Seq[IndexedTraceId]]] = Future.collect(slices map { case SpanSliceQuery(name) => spanStore.getTraceIdsByName(qr.serviceName, Some(name), qr.endTs, qr.limit) case AnnotationSliceQuery(key, value) => spanStore.getTraceIdsByAnnotation(qr.serviceName, key, value, qr.endTs, qr.limit) case s => Future.exception(new Exception("Uknown SliceQuery: %s".format(s))) }) private[this] def handle[T](name: String)(f: => Future[T]): Future[T] = { val errorStats = methodStats.scope("errors") val ret = try { Stat.timeFuture(methodStats.stat(name))(f) } catch { case e: Exception => Future.exception(e) } ret rescue { case e: Exception => log.error(e, "%s error".format(name)) errorStats.counter(name).incr() errorStats.scope(name).counter(e.getClass.getName).incr() Future.exception(thriftscala.QueryException(e.toString)) } } private[this] val noServiceNameError = Future.exception(thriftscala.QueryException("No service name provided")) private[this] def handleQuery[T](name: String, qr: thriftscala.QueryRequest)(f: => Future[T]): Future[T] = { if (!opt(qr.serviceName).isDefined) noServiceNameError else { FTrace.recordBinary("serviceName", qr.serviceName) FTrace.recordBinary("endTs", qr.endTs) FTrace.recordBinary("limit", qr.limit) handle(name)(f) } } def traceIds(qr: thriftscala.QueryRequest): Future[Seq[Long]] = { val sliceQueries = Seq[Option[Seq[SliceQuery]]]( qr.spanName.map { n => Seq(SpanSliceQuery(n)) }, qr.annotations.map { _.map { AnnotationSliceQuery(_, None) } }, qr.binaryAnnotations.map { _.map { e => AnnotationSliceQuery(e._1, Some(ByteBuffer.wrap(e._2.getBytes(UTF_8)))) }(collection.breakOut) } ).flatten.flatten sliceQueries match { case Nil => spanStore.getTraceIdsByName(qr.serviceName, None, qr.endTs, qr.limit) flatMap { queryResponse(_, qr) } case slice :: Nil => querySlices(sliceQueries, qr) flatMap { ids => queryResponse(ids.flatten, qr) } case _ => // TODO: timestamps endTs is the wrong name for all this querySlices(sliceQueries, qr.copy(limit = 1)) flatMap { ids => val ts = padTimestamp(ids.flatMap(_.map(_.timestamp)).reduceOption(_ min _).getOrElse(0)) querySlices(sliceQueries, qr.copy(endTs = ts)) flatMap { ids => queryResponse(traceIdsIntersect(ids), qr) } } } } override def getTraces(qr: thriftscala.QueryRequest): Future[Seq[thriftscala.Trace]] = handleQuery("getTraces", qr) { traceIds(qr).flatMap(getTracesByIds(_, qr.adjustClockSkew)) } override def getTracesByIds(traceIds: Seq[Long], adjustClockSkew: Boolean = true): Future[Seq[thriftscala.Trace]] = handle("getTracesByIds") { if (traceIds.isEmpty) { return Future.value(Seq.empty) } FTrace.recordBinary("numIds", traceIds.length) spanStore.getSpansByTraceIds(traceIds) map { adjustedTraces(_, adjustClockSkew).map(_.toThrift) } } override def getServiceNames: Future[Set[String]] = handle("getServiceNames") { spanStore.getAllServiceNames } override def getSpanNames(serviceName: String): Future[Set[String]] = handle("getSpanNames") { spanStore.getSpanNames(serviceName) } override def getDependencies(startTime: Option[Long], endTime: Option[Long]) = handle("getDependencies") { dependencyStore.getDependencies(startTime, endTime).map(_.toThrift) } override def storeDependencies(dependencies: Dependencies) = handle("storeDependencies") { dependencyStore.storeDependencies(dependencies.toDependencies) } }
coursera/zipkin
zipkin-query/src/main/scala/com/twitter/zipkin/query/ThriftQueryService.scala
Scala
apache-2.0
6,996
package net.ultrametrics.fractactor import scala.swing._ import java.awt.image.BufferedImage class RenderPanel() extends Panel { var image = null : BufferedImage def create(width: Int, height: Int) { image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB) // set initial background white to highlight async progress of actors for(x <- 0 until width) for(y <- 0 until height) image.setRGB(x, y, 0xffffff); } override def paintComponent(g: Graphics2D) = g.drawImage(image, 0, 0, null) def drawScanline(width: Int, y: Int, scanline: Array[Int]) = image.setRGB(0, y, width, 1, scanline, 0, 1) def drawPixel(x: Int, y: Int, value: Int) = image.setRGB(x, y, value) }
pchuck/fractactor
src/main/scala/net/ultrametrics/fractactor/RenderPanel.scala
Scala
bsd-2-clause
728
/** * Copyright (C) 2007 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.xforms.analysis import collection.JavaConverters._ import model.Model import collection.mutable.{LinkedHashMap, Buffer} import org.orbeon.oxf.xforms.event.EventHandlerImpl import org.orbeon.oxf.xforms.xbl.Scope import org.orbeon.oxf.util.CollectionUtils._ // Part analysis: models and instances information trait PartModelAnalysis extends TransientState { self: PartAnalysisImpl ⇒ private[PartModelAnalysis] val modelsByScope = LinkedHashMap[Scope, Buffer[Model]]() private[PartModelAnalysis] val modelsByPrefixedId = LinkedHashMap[String, Model]() private[PartModelAnalysis] val modelByInstancePrefixedId = LinkedHashMap[String, Model]() def getModel(prefixedId: String) = modelsByPrefixedId.get(prefixedId).orNull def getModelByInstancePrefixedId(prefixedId: String) = modelByInstancePrefixedId.get(prefixedId).orNull def getInstances(modelPrefixedId: String) = modelsByPrefixedId.get(modelPrefixedId).toSeq flatMap (_.instances.values) asJava def defaultModel = getDefaultModelForScope(startScope) def getDefaultModelForScope(scope: Scope) = modelsByScope.get(scope) flatMap (_.headOption) def getModelByScopeAndBind(scope: Scope, bindStaticId: String) = modelsByScope.get(scope) flatMap (_ find (_.bindsById.contains(bindStaticId))) orNull def getModelsForScope(scope: Scope) = modelsByScope.getOrElse(scope, Seq()) def findInstancePrefixedId(startScope: Scope, instanceStaticId: String): Option[String] = { val prefixedIdIt = for { scope ← Iterator.iterateOpt(startScope)(_.parent) model ← getModelsForScope(scope) if model.instancesMap.containsKey(instanceStaticId) } yield scope.prefixedIdForStaticId(instanceStaticId) prefixedIdIt.nextOption() } protected def indexModel(model: Model, eventHandlers: Buffer[EventHandlerImpl]): Unit = { val models = modelsByScope.getOrElseUpdate(model.scope, Buffer[Model]()) models += model modelsByPrefixedId += model.prefixedId → model for (instance ← model.instances.values) modelByInstancePrefixedId += instance.prefixedId → model } protected def deindexModel(model: Model): Unit = { modelsByScope.get(model.scope) foreach (_ -= model) modelsByPrefixedId -= model.prefixedId for (instance ← model.instances.values) modelByInstancePrefixedId -= instance.prefixedId } protected def analyzeModelsXPath() = for { models ← modelsByScope.valuesIterator model ← models.iterator } locally { model.analyzeXPath() } override def freeTransientState() = { super.freeTransientState() for (model ← modelsByPrefixedId.values) model.freeTransientState() } }
brunobuzzi/orbeon-forms
xforms/jvm/src/main/scala/org/orbeon/oxf/xforms/analysis/PartModelAnalysis.scala
Scala
lgpl-2.1
3,417
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.graphx import org.apache.spark.SparkContext import org.apache.spark.graphx.Graph._ import org.apache.spark.graphx.impl.EdgePartition import org.apache.spark.rdd._ import org.scalatest.FunSuite class GraphOpsSuite extends FunSuite with LocalSparkContext { test("joinVertices") { withSpark { sc => val vertices = sc.parallelize(Seq[(VertexId, String)]((1, "one"), (2, "two"), (3, "three")), 2) val edges = sc.parallelize((Seq(Edge(1, 2, "onetwo")))) val g: Graph[String, String] = Graph(vertices, edges) val tbl = sc.parallelize(Seq[(VertexId, Int)]((1, 10), (2, 20))) val g1 = g.joinVertices(tbl) { (vid: VertexId, attr: String, u: Int) => attr + u } val v = g1.vertices.collect().toSet assert(v === Set((1, "one10"), (2, "two20"), (3, "three"))) } } test("collectNeighborIds") { withSpark { sc => val graph = getCycleGraph(sc, 100) val nbrs = graph.collectNeighborIds(EdgeDirection.Either).cache() assert(nbrs.count === 100) assert(graph.numVertices === nbrs.count) nbrs.collect.foreach { case (vid, nbrs) => assert(nbrs.size === 2) } nbrs.collect.foreach { case (vid, nbrs) => val s = nbrs.toSet assert(s.contains((vid + 1) % 100)) assert(s.contains(if (vid > 0) vid - 1 else 99)) } } } test ("filter") { withSpark { sc => val n = 5 val vertices = sc.parallelize((0 to n).map(x => (x:VertexId, x))) val edges = sc.parallelize((1 to n).map(x => Edge(0, x, x))) val graph: Graph[Int, Int] = Graph(vertices, edges).cache() val filteredGraph = graph.filter( graph => { val degrees: VertexRDD[Int] = graph.outDegrees graph.outerJoinVertices(degrees) {(vid, data, deg) => deg.getOrElse(0)} }, vpred = (vid: VertexId, deg:Int) => deg > 0 ).cache() val v = filteredGraph.vertices.collect().toSet assert(v === Set((0,0))) // the map is necessary because of object-reuse in the edge iterator val e = filteredGraph.edges.map(e => Edge(e.srcId, e.dstId, e.attr)).collect().toSet assert(e.isEmpty) } } test("collectEdgesCycleDirectionOut") { withSpark { sc => val graph = getCycleGraph(sc, 100) val edges = graph.collectEdges(EdgeDirection.Out).cache() assert(edges.count == 100) edges.collect.foreach { case (vid, edges) => assert(edges.size == 1) } edges.collect.foreach { case (vid, edges) => val s = edges.toSet val edgeDstIds = s.map(e => e.dstId) assert(edgeDstIds.contains((vid + 1) % 100)) } } } test("collectEdgesCycleDirectionIn") { withSpark { sc => val graph = getCycleGraph(sc, 100) val edges = graph.collectEdges(EdgeDirection.In).cache() assert(edges.count == 100) edges.collect.foreach { case (vid, edges) => assert(edges.size == 1) } edges.collect.foreach { case (vid, edges) => val s = edges.toSet val edgeSrcIds = s.map(e => e.srcId) assert(edgeSrcIds.contains(if (vid > 0) vid - 1 else 99)) } } } test("collectEdgesCycleDirectionEither") { withSpark { sc => val graph = getCycleGraph(sc, 100) val edges = graph.collectEdges(EdgeDirection.Either).cache() assert(edges.count == 100) edges.collect.foreach { case (vid, edges) => assert(edges.size == 2) } edges.collect.foreach { case (vid, edges) => val s = edges.toSet val edgeIds = s.map(e => if (vid != e.srcId) e.srcId else e.dstId) assert(edgeIds.contains((vid + 1) % 100)) assert(edgeIds.contains(if (vid > 0) vid - 1 else 99)) } } } test("collectEdgesChainDirectionOut") { withSpark { sc => val graph = getChainGraph(sc, 50) val edges = graph.collectEdges(EdgeDirection.Out).cache() assert(edges.count == 49) edges.collect.foreach { case (vid, edges) => assert(edges.size == 1) } edges.collect.foreach { case (vid, edges) => val s = edges.toSet val edgeDstIds = s.map(e => e.dstId) assert(edgeDstIds.contains(vid + 1)) } } } test("collectEdgesChainDirectionIn") { withSpark { sc => val graph = getChainGraph(sc, 50) val edges = graph.collectEdges(EdgeDirection.In).cache() // We expect only 49 because collectEdges does not return vertices that do // not have any edges in the specified direction. assert(edges.count == 49) edges.collect.foreach { case (vid, edges) => assert(edges.size == 1) } edges.collect.foreach { case (vid, edges) => val s = edges.toSet val edgeDstIds = s.map(e => e.srcId) assert(edgeDstIds.contains((vid - 1) % 100)) } } } test("collectEdgesChainDirectionEither") { withSpark { sc => val graph = getChainGraph(sc, 50) val edges = graph.collectEdges(EdgeDirection.Either).cache() // We expect only 49 because collectEdges does not return vertices that do // not have any edges in the specified direction. assert(edges.count === 50) edges.collect.foreach { case (vid, edges) => if (vid > 0 && vid < 49) assert(edges.size == 2) else assert(edges.size == 1) } edges.collect.foreach { case (vid, edges) => val s = edges.toSet val edgeIds = s.map(e => if (vid != e.srcId) e.srcId else e.dstId) if (vid == 0) { assert(edgeIds.contains(1)) } else if (vid == 49) { assert(edgeIds.contains(48)) } else { assert(edgeIds.contains(vid + 1)) assert(edgeIds.contains(vid - 1)) } } } } private def getCycleGraph(sc: SparkContext, numVertices: Int): Graph[Double, Int] = { val cycle = (0 until numVertices).map(x => (x, (x + 1) % numVertices)) getGraphFromSeq(sc, cycle) } private def getChainGraph(sc: SparkContext, numVertices: Int): Graph[Double, Int] = { val chain = (0 until numVertices - 1).map(x => (x, (x + 1))) getGraphFromSeq(sc, chain) } private def getGraphFromSeq(sc: SparkContext, seq: IndexedSeq[(Int, Int)]): Graph[Double, Int] = { val rawEdges = sc.parallelize(seq, 3).map { case (s, d) => (s.toLong, d.toLong) } Graph.fromEdgeTuples(rawEdges, 1.0).cache() } }
sryza/spark
graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala
Scala
apache-2.0
7,233
/* * Copyright 2017 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.accounts.approval.boxes import uk.gov.hmrc.ct.accounts.frs10x.retriever.Frs10xAccountsBoxRetriever import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever import uk.gov.hmrc.ct.box.ValidatableBox._ import uk.gov.hmrc.ct.box._ case class AC199A(value: String) extends CtBoxIdentifier(name = "Approve accounts approver") with CtString with Input with ValidatableBox[AccountsBoxRetriever] { override def validate(boxRetriever: AccountsBoxRetriever): Set[CtValidation] = { validateStringMaxLength("AC199A", this.value, StandardCohoNameFieldLimit) ++ validateCohoNameField("AC199A", this) } }
liquidarmour/ct-calculations
src/main/scala/uk/gov/hmrc/ct/accounts/approval/boxes/AC199A.scala
Scala
apache-2.0
1,247
package com.gilt.pickling.avro import com.gilt.pickling.TestObjs.{SelfReferencingObject, SingleOptionObject} import org.apache.avro.Schema import com.gilt.pickling.TestUtils._ import scala.Some import org.apache.avro.generic.GenericData import org.scalatest.{Assertions, FunSuite} import scala.pickling._ object ComplexObjectsTest { val selfRef = SelfReferencingObject(1, Some(SelfReferencingObject(2, None))) } class ComplexObjectsTest extends FunSuite with Assertions { import ComplexObjectsTest._ test("Pickle a case class with self references obj") { val pckl = selfRef.pickle assert(generateBytesFromAvro(selfRef) === pckl.value) } test("Unpickle a case class with self references obj") { val bytes = generateBytesFromAvro(selfRef) val hydratedObj: SelfReferencingObject = bytes.unpickle[SelfReferencingObject] assert(selfRef === hydratedObj) } test("Round trip a case class with self references obj") { val pckl = selfRef.pickle val hydratedObj: SelfReferencingObject = pckl.unpickle[SelfReferencingObject] assert(hydratedObj === selfRef) } private def generateBytesFromAvro(obj: SelfReferencingObject) = { val schema: Schema = retrieveAvroSchemaFromFile("/avro/object/SelfReferencingObject.avsc") val innerRecord = new GenericData.Record(schema) obj.inner match { case Some(x) => innerRecord.put("id", x.id) innerRecord.put("inner", null) case _ => null } val record = new GenericData.Record(schema) record.put("id", obj.id) record.put("inner", innerRecord) convertToBytes(schema, record) } }
gilt/gfc-avro
src/test/scala/com/gilt/pickling/avro/SelfReferenceObjectsTest.scala
Scala
apache-2.0
1,623
package com.btcontract.wallet import Utils._ import R.string._ import android.net.Uri import android.text._ import android.view._ import android.webkit.URLUtil import android.widget._ import com.google.common.io.Files import org.bitcoinj.core._ import org.bitcoinj.core.listeners._ import android.widget.RadioGroup.OnCheckedChangeListener import info.hoang8f.android.segmented.SegmentedGroup import concurrent.ExecutionContext.Implicits.global import android.view.inputmethod.InputMethodManager import org.bitcoinj.crypto.KeyCrypterException import android.view.View.OnClickListener import org.bitcoinj.store.SPVBlockStore import android.app.AlertDialog.Builder import android.util.DisplayMetrics import org.bitcoinj.uri.BitcoinURI import scala.concurrent.Future import org.bitcoinj.wallet.{SendRequest, Wallet} import org.bitcoinj.wallet.Wallet.{CouldNotAdjustDownwards, ExceededMaxTransactionSize} import org.bitcoinj.wallet.listeners.{WalletChangeEventListener, WalletCoinsReceivedEventListener, WalletCoinsSentEventListener} import R.id.{amtInBit, amtInBtc, amtInSat, typeCNY, typeEUR, typeUSD} import com.btcontract.wallet.helper.{Fee, FiatRates, RandomGenerator} import android.text.method.{DigitsKeyListener, LinkMovementMethod} import android.content.{Context, DialogInterface, Intent} import java.text.{DecimalFormat, DecimalFormatSymbols} import java.util.{Locale, Timer, TimerTask} import scala.util.{Failure, Success, Try} import android.app.{Activity, Dialog} import DialogInterface.BUTTON_POSITIVE import ViewGroup.LayoutParams.WRAP_CONTENT import InputMethodManager.HIDE_NOT_ALWAYS import Transaction.MIN_NONDUST_OUTPUT import Context.INPUT_METHOD_SERVICE import android.os.Bundle import android.view.WindowManager.LayoutParams object Utils { me => type Bytes = Array[Byte] type TryCoin = Try[Coin] // Cannot have lazy var so use this construct var startupAppReference: WalletApp = null lazy val app = startupAppReference val passType = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_PASSWORD val textType = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD // Mapping from text to Android id integer val Seq(strDollar, strEuro, strYuan) = List("dollar", "euro", "yuan") val fiatMap = Map(typeUSD -> strDollar, typeEUR -> strEuro, typeCNY -> strYuan) val revFiatMap = Map(strDollar -> typeUSD, strEuro -> typeEUR, strYuan -> typeCNY) val appName = "Bitcoin" val nullFail = Failure(null) val rand = new RandomGenerator lazy val sumIn = app getString txs_sum_in lazy val sumOut = app getString txs_sum_out // Various denom format rules val locale = new Locale("en", "US") val baseFiat = new DecimalFormat("#.##") val baseBtc = new DecimalFormat("#.########") val baseSat = new DecimalFormat("###,###,###") val baseBit = new DecimalFormat("#,###,###.##") val symbols = new DecimalFormatSymbols(locale) baseFiat setDecimalFormatSymbols symbols baseBit setDecimalFormatSymbols symbols baseSat setDecimalFormatSymbols symbols baseBtc setDecimalFormatSymbols symbols def sat(coin: Coin) = baseSat format coin.value def bit(coin: Coin) = baseBit format BigDecimal(coin.value) / 100 def btc(coin: Coin) = baseBtc format BigDecimal(coin.value) / 100000000 // App wide utility functions def btcHuman(coin: Coin) = app getString input_alt_btc format btc(coin) def wrap(run: => Unit)(go: => Unit) = try go catch none finally run def humanAddr(adr: Address) = s"$adr" grouped 4 mkString "\\u0020" def none: PartialFunction[Any, Unit] = { case _ => } def runAnd[T](result: T)(action: Any) = result // Fiat rates related functions, all transform a Try monad def currentFiatName = app.prefs.getString(AbstractKit.CURRENCY, strDollar) def currentRate = for (rates <- FiatRates.rates) yield rates(currentFiatName) // Iff we have rates and amount then fiat price def inFiat(tc: TryCoin) = currentRate flatMap { rt => for (coin <- tc) yield coin.getValue * rt / 100000000 } def fiatSign(amt: Double) = baseFiat format amt match { case amount if currentFiatName == strYuan => s"$amount CNY" case amount if currentFiatName == strEuro => s"$amount €" case amount => s"&#36;$amount" } } // Info stack manager abstract class InfoActivity extends AnimatorActivity { me => val tracker = new WalletChangeEventListener with WalletCoinsReceivedEventListener with WalletCoinsSentEventListener with TransactionConfidenceEventListener { def onCoinsReceived(w: Wallet, tx: Transaction, pb: Coin, nb: Coin) = if (nb isGreaterThan pb) anim(me getString tx_received format btc(nb subtract pb), Informer.RECEIVED) def onTransactionConfidenceChanged(w: Wallet, tx: Transaction) = if (tx.getConfidence.getDepthInBlocks == 1) anim(getString(tx_1st_conf), Informer.TXCONFIRMED) def onCoinsSent(w: Wallet, tx: Transaction, pb: Coin, nb: Coin) = anim(me getString tx_sent format btc(pb subtract nb), Informer.DECSEND) def onWalletChanged(w: Wallet) = none } // Peers listeners class CatchTracker extends MyPeerDataListener { def onBlocksDownloaded(peer: Peer, block: Block, fBlock: FilteredBlock, left: Int) = { app.kit.peerGroup addBlocksDownloadedEventListener new NextTracker(blocksNumberLeftOnStart = left) app.kit.peerGroup removeBlocksDownloadedEventListener this } } class NextTracker(blocksNumberLeftOnStart: Int) extends MyPeerDataListener { def onBlocksDownloaded(peer: Peer, block: Block, fBlock: FilteredBlock, left: Int) = { if (blocksNumberLeftOnStart > 144) update(howManyBlocksLeftInPlainText format left, Informer.SYNC) if (left < 1) add(getString(info_progress_done), Informer.SYNC).timer.schedule(me del Informer.SYNC, 5000) if (left < 1) app.kit.peerGroup removeBlocksDownloadedEventListener this if (left < 1) app.kit.wallet saveToFile app.walletFile runOnUiThread(ui) } val howManyBlocksLeftInPlainText = me getString info_progress val howManyBlocksLeftOnStart = howManyBlocksLeftInPlainText format blocksNumberLeftOnStart if (blocksNumberLeftOnStart > 144) add(howManyBlocksLeftOnStart, Informer.SYNC) } val constListener = new PeerConnectedEventListener with PeerDisconnectedEventListener { def onPeerDisconnected(p: Peer, pc: Int) = me runOnUiThread update(mkTxt, Informer.PEERS).ui def onPeerConnected(p: Peer, pc: Int) = me runOnUiThread update(mkTxt, Informer.PEERS).ui def mkTxt = app.plurOrZero(peersInfoOpts, app.kit.peerGroup.numConnectedPeers) } lazy val peersInfoOpts = getResources getStringArray R.array.info_peers lazy val requestOpts = getResources getStringArray R.array.dialog_request // Activity lifecycle listeners management override def onOptionsItemSelected(m: MenuItem) = runAnd(true) { if (m.getItemId == R.id.actionRequestPayment) mkRequestForm else if (m.getItemId == R.id.actionTrustedNode) mkTrustedNodeForm else if (m.getItemId == R.id.actionSettings) mkSetsForm else if (m.getItemId == R.id.actionBuyCoins) { val payTo = app.kit.currentAddress.toString val msg = Html fromHtml getString(buy_info).format(payTo) mkForm(negBld(dialog_cancel), getString(action_buy), msg) } } def mkTrustedNodeForm = { val title = Html fromHtml getString(trusted_node_details) val (passAsk, address) = generatePasswordPromptView(textType, trusted_node_address) mkForm(mkChoiceDialog(check(address.getText.toString), none, dialog_ok, dialog_back), title, passAsk) address setText app.prefs.getString(AbstractKit.FULL_NODE_ADDRESS, "") def check(address: String) = if (address.isEmpty) app.prefs.edit.remove(AbstractKit.FULL_NODE_ADDRESS).commit else { app.prefs.edit.putString(AbstractKit.FULL_NODE_ADDRESS, address).commit toast(trusted_node_added) } } override def onCreateOptionsMenu(menu: Menu) = runAnd(true) { getMenuInflater.inflate(R.menu.transactions_ops, menu) } // Top bar reactions def goQRScan(top: View) = me goTo classOf[ScanActivity] def goLNWallet(top: View) = ??? //me goTo classOf[LNTxsActivity] def doReceive(top: View) = { val payData = PayData(app.kit.currentAddress, nullFail) app.TransData.value = Option apply payData me goTo classOf[RequestActivity] } def doPay(top: View): SpendManager = { val content = getLayoutInflater.inflate(R.layout.frag_input_spend, null, false) val address = content.findViewById(R.id.addressData).asInstanceOf[EditText] val alert = mkForm(negPosBld(dialog_cancel, dialog_next), null, content) val man = new RateManager(content) val ok = alert getButton BUTTON_POSITIVE ok setOnClickListener new OnClickListener { def onClick(proceed: View) = man.result match { case Failure(amountIsEmpty) => toast(dialog_sum_empty) case Success(cn) if Try(btcAddr).isFailure => toast(dialog_addr_wrong) case Success(cn) if cn isLessThan MIN_NONDUST_OUTPUT => toast(dialog_sum_dusty) case _ => rm(alert)(next.showForm) } } def next = new CompletePay(me) { def errorAction = doPay(top) set pay val pay = PayData(btcAddr, man.result) val title = pay pretty sumOut def confirm = { add(me getString tx_announce, Informer.DECSEND).ui.run <(announceTransaction, errorReact)(none) } } def btcAddr = app getTo address.getText.toString new SpendManager(address, man) } def mkRequestForm = { val requestText = me getString action_request_payment val content = getLayoutInflater.inflate(R.layout.frag_input_receive, null) val alert = mkForm(negPosBld(dialog_cancel, dialog_next), requestText, content) val man = new RateManager(content) val ok = alert getButton BUTTON_POSITIVE ok setOnClickListener new OnClickListener { def onClick(posButtonView: View) = rm(alert) { val pay = PayData(app.kit.currentAddress, man.result) val listCon = getLayoutInflater.inflate(R.layout.frag_center_list, null).asInstanceOf[ListView] val adapter = new ArrayAdapter(me, R.layout.frag_center_text, R.id.textItem, requestOpts) val dialog = mkForm(negBld(dialog_cancel), Html fromHtml pay.pretty(sumIn), listCon) listCon setOnItemClickListener onTap { position => rm(dialog) { /**/ choose(position, pay.string) /**/ } } app.TransData.value = Option(pay) listCon setAdapter adapter } } } def mkSetsForm: Unit = { val walletState = if (app.kit.wallet.isEncrypted) encrypted_yes else encrypted_no val headerText = getString(walletState) + getString(read_settings) val form = getLayoutInflater.inflate(R.layout.frag_settings, null) val dialog = mkForm(me negBld dialog_back, Html fromHtml headerText, form) val rescanWallet = form.findViewById(R.id.rescanWallet).asInstanceOf[Button] val viewMnemonic = form.findViewById(R.id.viewMnemonic).asInstanceOf[Button] val changePass = form.findViewById(R.id.changePass).asInstanceOf[Button] rescanWallet setOnClickListener new OnClickListener { def onClick(restoreWalletView: View) = rm(dialog)(openForm) def openForm = checkPass { pass => val dialog = mkChoiceDialog(go, mkSetsForm, dialog_ok, dialog_back) mkForm(dialog setMessage sets_rescan_ok, null, null) } def go = try { app.chainFile.delete app.kit.wallet.reset app.kit.store = new SPVBlockStore(app.params, app.chainFile) app.kit useCheckPoints app.kit.wallet.getEarliestKeyCreationTime app.kit.wallet saveToFile app.walletFile } catch none finally System exit 0 } changePass setOnClickListener new OnClickListener { def onClick(changePassView: View) = rm(dialog)(openForm) def openForm = checkPass { oldPass => shortCheck(password_new, password_too_short) { newPass => <(rotatePassword, _ => System exit 0)(_ => me toast sets_password_ok) add(app getString pass_changing, Informer.CODECHECK).ui.run timer.schedule(me del Informer.CODECHECK, 5000) def rotatePassword = { app.kit.wallet decrypt oldPass app.kit encryptWallet newPass } } } } viewMnemonic setOnClickListener new OnClickListener { def onClick(viewMnemonic: View) = rm(dialog)(openForm) def openForm: Unit = passPlus(back = mkSetsForm) { password => <(fun = Mnemonic decrypt password, _ => wrong) { walletSeed => getWindow.setFlags(LayoutParams.FLAG_SECURE, LayoutParams.FLAG_SECURE) val bld = new Builder(me) setCustomTitle getString(sets_noscreen) bld.setMessage(Mnemonic text walletSeed).show } } } // Check wallet password and inform if wrong def wrong = wrap(me toast password_wrong)(mkSetsForm) def checkPass(next: String => Unit) = passPlus(mkSetsForm) { txt => <(app.kit.wallet checkPassword txt, _ => wrong)(if (_) next(txt) else wrong) } // Check password length before proceeding def shortCheck(txtRes: Int, short: Int)(next: String => Unit) = { val (passwordAsk, secret) = generatePasswordPromptView(textType, txtRes) def check = if (secret.getText.length >= 6) next(secret.getText.toString) else toast(short) mkForm(mkChoiceDialog(check, mkSetsForm, dialog_ok, dialog_back), null, passwordAsk) } } } abstract class AnimatorActivity extends TimerActivity { lazy val ui = anyToRunnable(getActionBar setSubtitle infos.head.value) private[this] var currentAnimation = Option.empty[TimerTask] private[this] var infos = List.empty[Informer] def anim(text: String, infoType: Int) = { new Anim(app.kit.currentBalance, getActionBar.getTitle.toString) add(text, infoType).timer.schedule(this del infoType, 25000) runOnUiThread(ui) } // Informer CRUD def del(delTag: Int) = uiTask { infos = infos.filterNot(_.tag == delTag) ui } def add(text: String, addTag: Int) = { infos = new Informer(text, addTag) :: infos this } def update(text: String, tag: Int) = { for (inf <- infos if inf.tag == tag) inf.value = text this } // Title text animation class Anim(amt: Coin, curText: String) extends Runnable { val txt = if (amt.isZero) getString(wallet_empty) else btc(amt) val max = scala.math.max(txt.length, curText.length) var index = 1 override def run = { getActionBar setTitle s"${txt take index}${curText drop index}".trim if (index < max) index += 1 else for (an <- currentAnimation) an.cancel } for (an <- currentAnimation) an.cancel currentAnimation = Some apply uiTask(this) timer.schedule(currentAnimation.get, 0, 125) } // Password checking popup def passPlus(back: => Unit)(next: String => Unit) = { val (passAsk, secret) = generatePasswordPromptView(passType, password_old) mkForm(mkChoiceDialog(infoAndNext, back, dialog_next, dialog_back), null, passAsk) def infoAndNext = { add(app getString pass_checking, Informer.CODECHECK).ui.run timer.schedule(this del Informer.CODECHECK, 2500) next(secret.getText.toString) } } } abstract class TimerActivity extends Activity { me => val goTo: Class[_] => Unit = me startActivity new Intent(me, _) val exitTo: Class[_] => Unit = goto => wrap(finish)(goTo apply goto) val timer = new Timer // Screen size in inches and prefs reference lazy val maxDialog = metrics.densityDpi * 2.1 lazy val scrWidth = metrics.widthPixels.toDouble / metrics.densityDpi lazy val scrHeight = metrics.heightPixels.toDouble / metrics.densityDpi lazy val metrics = new DisplayMetrics match { case metrix => getWindowManager.getDefaultDisplay getMetrics metrix metrix } override def onCreate(savedInstanceState: Bundle): Unit = { Thread setDefaultUncaughtExceptionHandler new UncaughtHandler(me) super.onCreate(savedInstanceState) } // Timer utilities and toast override def onDestroy = wrap(super.onDestroy)(timer.cancel) implicit def anyToRunnable(process: => Unit): Runnable = new Runnable { def run = process } implicit def uiTask(process: => Runnable): TimerTask = new TimerTask { def run = me runOnUiThread process } def toast(message: Int) = Toast.makeText(app, message, Toast.LENGTH_LONG).show // Run computation in Future, deal with results on UI thread def <[T](fun: => T, no: Throwable => Unit)(ok: T => Unit) = <<(Future(fun), no)(ok) def <<[T](future: Future[T], no: Throwable => Unit)(ok: T => Unit) = future onComplete { case Success(rs) => runOnUiThread(ok apply rs) case Failure(ex) => runOnUiThread(no apply ex) } // Option dialog popup def choose(pos: Int, txt: String) = if (pos == 0) me goTo classOf[RequestActivity] else if (pos == 1) app setBuffer txt else share(txt) def share(text: String) = startActivity { val sendIntent = new Intent setType "text/plain" sendIntent.putExtra(Intent.EXTRA_TEXT, text) sendIntent.setAction(Intent.ACTION_SEND) } // Basis for dialog forms implicit def str2View(res: CharSequence): LinearLayout = { val view = getLayoutInflater.inflate(R.layout.frag_top_tip, null).asInstanceOf[LinearLayout] val textField = view.findViewById(R.id.actionTip).asInstanceOf[TextView] textField setMovementMethod LinkMovementMethod.getInstance textField setText res view } def rm(prev: Dialog)(fun: => Unit) = { timer.schedule(me anyToRunnable fun, 120) prev.dismiss } def generatePasswordPromptView(inpType: Int, txt: Int) = { val passAsk = getLayoutInflater.inflate(R.layout.frag_changer, null).asInstanceOf[LinearLayout] val secretInputField = passAsk.findViewById(R.id.secretInput).asInstanceOf[EditText] passAsk.findViewById(R.id.secretTip).asInstanceOf[TextView] setText txt secretInputField setInputType inpType (passAsk, secretInputField) } def negBld(neg: Int) = new Builder(me).setNegativeButton(neg, null) def negPosBld(neg: Int, pos: Int) = negBld(neg).setPositiveButton(pos, null) def mkForm(builder: Builder, title: View, content: View) = { val alertDialog = builder.setCustomTitle(title).setView(content).show if (scrWidth > 2.3) alertDialog.getWindow.setLayout(maxDialog.toInt, WRAP_CONTENT) alertDialog setCanceledOnTouchOutside false alertDialog } def mkChoiceDialog(ok: => Unit, no: => Unit, okRes: Int, noRes: Int) = { val cancel = new DialogInterface.OnClickListener { def onClick(x: DialogInterface, w: Int) = no } val again = new DialogInterface.OnClickListener { def onClick(x: DialogInterface, w: Int) = ok } new Builder(me).setPositiveButton(okRes, again).setNegativeButton(noRes, cancel) } def hideKeys(run: => Unit) = try { timer.schedule(me anyToRunnable run, 50) val mgr = getSystemService(INPUT_METHOD_SERVICE).asInstanceOf[InputMethodManager] mgr.hideSoftInputFromWindow(getCurrentFocus.getWindowToken, HIDE_NOT_ALWAYS) } catch none // Wrapper for a ListView onClick listener def onTap(go: Int => Unit) = new AdapterView.OnItemClickListener { def onItemClick(p: AdapterView[_], view: View, pos: Int, id: Long) = go(pos) } } class Spinner(tv: TextView) extends Runnable { override def run = tv.getText match { case text => if (text.length > 8) tv setText "★" else tv setText s"$text★" } } class RateManager(content: View) { me => val bitInput = content.findViewById(R.id.inputAmount).asInstanceOf[EditText] val fiatInput = content.findViewById(R.id.fiatInputAmount).asInstanceOf[EditText] val fiatType = content.findViewById(R.id.fiatType).asInstanceOf[SegmentedGroup] val bitType = content.findViewById(R.id.bitType).asInstanceOf[SegmentedGroup] val memoMap = Map(amtInBtc -> "btc", amtInBit -> "bit", amtInSat -> "sat") val memoRevMap = Map("btc" -> amtInBtc, "bit" -> amtInBit, "sat" -> amtInSat) val inputMap: Map[Int, Coin => String] = Map(amtInBtc -> btc, amtInBit -> bit, amtInSat -> sat) val hintMap = Map(amtInBtc -> input_hint_btc, amtInBit -> input_hint_bit, amtInSat -> input_hint_sat) val charMap = Map(amtInBtc -> ".0123456789", amtInBit -> ".,0123456789", amtInSat -> ",0123456789") def setSum(tc: TryCoin) = tc map inputMap(bitType.getCheckedRadioButtonId) map bitInput.setText def memoMode = memoRevMap apply app.prefs.getString(AbstractKit.BTC_DENOMINATION, "bit") def result = Try apply norm(bitType.getCheckedRadioButtonId) def norm(state: Int) = bitInput.getText.toString.replace(",", "") match { case raw if amtInBit == state => Coin valueOf (raw.toDouble * 100).toLong case raw if amtInBtc == state => Coin parseCoin raw case raw => Coin valueOf raw.toLong } val bitListener = new TextChangedWatcher { def upd = fiatInput.setText(inFiat(result) map baseFiat.format getOrElse null) def onTextChanged(s: CharSequence, st: Int, b: Int, c: Int) = if (bitInput.hasFocus) upd // Should have focus because may be filled automatically from QR or link bitInput.requestFocus } val fiatListener = new TextChangedWatcher { def upd = me setSum inBtc.map(Coin valueOf _.toLong) getOrElse bitInput.setText(null) def inBtc = currentRate.map(fiatInput.getText.toString.replace(",", "").toDouble / _ * 100000000) def onTextChanged(s: CharSequence, st: Int, b: Int, c: Int) = if (fiatInput.hasFocus) upd } bitType setOnCheckedChangeListener new OnCheckedChangeListener { def onCheckedChanged(radioGroup: RadioGroup, checkedButton: Int) = { bitInput setKeyListener DigitsKeyListener.getInstance(charMap apply bitType.getCheckedRadioButtonId) bitInput.setText(Try apply norm(memoMode) map inputMap(bitType.getCheckedRadioButtonId) getOrElse null) app.prefs.edit.putString(AbstractKit.BTC_DENOMINATION, memoMap apply bitType.getCheckedRadioButtonId).commit bitInput setHint hintMap(bitType.getCheckedRadioButtonId) } } fiatType setOnCheckedChangeListener new OnCheckedChangeListener { def onCheckedChanged(radioGroupView: RadioGroup, newFiatName: Int) = { app.prefs.edit.putString(AbstractKit.CURRENCY, fiatMap apply newFiatName).commit if (fiatInput.hasFocus) fiatListener.upd else bitListener.upd fiatInput setHint currentFiatName } } fiatInput addTextChangedListener fiatListener bitInput addTextChangedListener bitListener fiatType check revFiatMap(currentFiatName) bitType check memoMode } class SpendManager(address: EditText, man: RateManager) { me => def setAddressValue(adr: Address) = address setText adr.toString def set(uri: BitcoinURI) = { me setAddressValue uri.getAddress man setSum Try(uri.getAmount) } def set(data: PayData) = { me setAddressValue data.adr man setSum data.tc } } case class PayData(adr: Address, tc: TryCoin) { def route(sumDirection: String) = sumDirection format humanAddr(adr) def tryUri = for (cn <- tc) yield BitcoinURI.convertToBitcoinURI(adr, cn, null, null) def string = tryUri getOrElse adr.toString def pretty(way: String) = { val bitcoin = tc.map(cn => s"${this route way}<br><br>${Utils btcHuman cn}") getOrElse route(way) val fiat = inFiat(tc).map(amt => s"<br><font color=#999999>≈ ${Utils fiatSign amt}</font>") bitcoin + fiat.getOrElse(new String) } } abstract class CompletePay(host: AnimatorActivity) { val (passAsk, secretField) = host.generatePasswordPromptView(passType, wallet_password) val form = host.getLayoutInflater.inflate(R.layout.frag_input_spend_confirm, null) val choiceList = form.findViewById(R.id.choiceList).asInstanceOf[ListView] val dialog = host.mkChoiceDialog(confirm, none, dialog_pay, dialog_cancel) // Wrap fee rates with human readable text val feeDefault = host getString fee_default format sumOut.format(Utils btc Fee.default) val feeLive = host getString fee_live format sumOut.format(Utils btc Fee.rate) val infos = feeLive :: feeDefault :: Nil map Html.fromHtml val slot = android.R.layout.select_dialog_singlechoice def showForm = { form.asInstanceOf[LinearLayout].addView(passAsk, 0) host.mkForm(dialog, host.str2View(Html fromHtml title), form) choiceList setAdapter new ArrayAdapter(host, slot, infos.toArray) choiceList.setItemChecked(0, true) } def announceTransaction = { val all = app.kit.currentBalance subtract Fee.default isLessThan pay.tc.get val request = if (all) SendRequest emptyWallet pay.adr else SendRequest.to(pay.adr, pay.tc.get) request.feePerKb = if (choiceList.getCheckedItemPosition == 0) Fee.rate else Fee.default request.aesKey = app.kit.wallet.getKeyCrypter deriveKey secretField.getText.toString app.kit.wallet completeTx request // Block until at least one peer confirms it got our request app.kit.peerGroup.broadcastTransaction(request.tx, 1).broadcast.get } def errorReact(exc: Throwable): Unit = exc match { case e: InsufficientMoneyException => onError(app getString err_low_funds format btc(e.missing)) case _: ExceededMaxTransactionSize => onError(app getString err_transaction_too_large) case _: CouldNotAdjustDownwards => onError(app getString err_empty_shrunk) case _: KeyCrypterException => onError(app getString err_pass) case _: Throwable => onError(app getString err_general) } def onError(errorMessage: String) = try { val info = host.mkChoiceDialog(errorAction, none, dialog_ok, dialog_cancel) host.mkForm(info setMessage errorMessage, null, null) host.del(Informer.DECSEND).run } catch none val pay: PayData val title: String def errorAction def confirm } abstract class TextChangedWatcher extends TextWatcher { override def beforeTextChanged(s: CharSequence, x: Int, y: Int, z: Int) = none override def afterTextChanged(editableCharSequence: Editable) = none } trait MyPeerDataListener extends PeerDataEventListener { def onChainDownloadStarted(peer: Peer, blocksLeft: Int) = none def onPreMessageReceived(peer: Peer, message: Message) = message def getData(peer: Peer, m: GetDataMessage) = null }
btcontract/VisualBitcoinWallet
app/src/main/java/com/btcontract/wallet/Utils.scala
Scala
gpl-3.0
26,114
package com.wangc.fast.p2 import scala.collection.immutable.StringOps /** * Created by wangchao on 2017/6/6. */ object Study2_test { def main(args: Array[String]): Unit = { println( test1(1) ) test2() test4() test6("Hello") var so = new StringOps("asdfghjkl") println(so*3) println( so.canEqual(so) ) println( so.canEqual("a") ) println( so.canEqual("asdfghjkl") ) } def test1(x:Int): Int ={ if(x>0){ 1 }else if(x<0){ -1 }else{ 0 } } def test2(): Unit ={ var a = {} println(a) //值是 () ,类型是Unit } def test4(): Unit ={ // for(int i=10;i>=0;i--)System.out.println(i); // for(i<- 10.to(1).by(-1) ){ //正常的写法,下面一行是简写 for(i<- 10 to 1 by -1 ){ println(i) } } def test5(n:Int): Unit ={ for( i<- n to 0 by -1 ){ println(i) } } def test6(n:String): Unit ={ var total:Int = 1 for(i<-n){ total = total*i } println(total) } }
wang153723482/HelloWorld_my
HelloWorld_scala/src/com/wangc/fast/p2/Study2_test.scala
Scala
apache-2.0
1,058
package org.workcraft.graphics import java.awt.geom.AffineTransform import java.awt.geom.CubicCurve2D import java.awt.geom.NoninvertibleTransformException import java.awt.geom.Point2D import java.awt.geom.Rectangle2D import Java2DDecoration._ import java.awt.Color import java.awt.Stroke trait ParametricCurve { def pointOnCurve(t: Double): Point2D.Double def nearestPointT(pt: Point2D.Double): Double def boundingBox: Rectangle2D.Double // TODO: subdivide the parametric curve automatically, removing the need for manual Shape creation def shape(tStart: Double, tEnd: Double): java.awt.Shape def derivativeAt(t: Double): Point2D.Double def secondDerivativeAt(t: Double): Point2D.Double } case class Arrow(width: Double, length: Double) case class VisualCurveProperties(color: Color, arrow: Option[Arrow], stroke: Stroke) case class PartialCurveInfo(tStart: Double, tEnd: Double, arrowHeadPosition: Point2D.Double, arrowOrientation: Double) object Geometry { def lerp(p1: Point2D.Double, p2: Point2D.Double, t: Double): Point2D.Double = new Point2D.Double(p1.getX() * (1 - t) + p2.getX() * t, p1.getY() * (1 - t) + p2.getY() * t) def createRectangle(p1: Point2D, p2: Point2D): Rectangle2D.Double = { val rect: Rectangle2D.Double = new Rectangle2D.Double(p1.getX(), p1.getY(), 0, 0) rect.add(p2) return rect } def createCubicCurve(p1: Point2D.Double, cp1: Point2D.Double, cp2: Point2D.Double, p2: Point2D.Double) = new CubicCurve2D.Double(p1.getX(), p1.getY(), cp1.getX(), cp1.getY(), cp2.getX(), cp2.getY(), p2.getX(), p2.getY()) def optimisticInverse(transform: AffineTransform): AffineTransform = { try { return transform.createInverse() } catch { case ex: NoninvertibleTransformException => throw new RuntimeException("Matrix inverse failed! Pessimists win :( ") } } def getBorderPointParameter(collisionNode: Touchable, curve: ParametricCurve, tStart: Double, tEnd: Double): Double = { var point: Point2D.Double = new Point2D.Double() var tstart = tStart var tend = tEnd while (Math.abs(tend - tstart) > 1e-6) { var t: Double = (tstart + tend) * 0.5 point = curve.pointOnCurve(t) if (collisionNode.hitTest(point)) tstart = t else tend = t } return tstart } def buildConnectionCurveInfo(arrow: Option[Arrow], t1: Touchable, t2: Touchable, curve: ParametricCurve, endCutoff: Double): PartialCurveInfo = { var tstart = getBorderPointParameter(t1, curve, 0, 1) var tend = getBorderPointParameter(t2, curve, 1, endCutoff) var arrowPos = curve.pointOnCurve(tend) var arrowOrientation = 0.0 var dt = tend var t = 0.0 var pt = new Point2D.Double arrow match { case Some(Arrow(width, length)) => { val arrowLengthSq = length * length while (dt > 1e-6) { dt /= 2.0 t += dt pt = curve.pointOnCurve(t) if (arrowPos.distanceSq(pt) < arrowLengthSq) t -= dt } tend = t arrowOrientation = scala.math.atan2(arrowPos.getY - pt.getY, arrowPos.getX - pt.getX) } } PartialCurveInfo(tstart, tend, arrowPos, arrowOrientation) } /** * Interpretes points as complex numbers and multiplies them. * Can be used for the scale-with-rotate (translates 'a' from the basis of (b, rotate90CCW(b)) to the basis of ((1, 0), (0, 1))) */ def complexMultiply(a: Point2D.Double, b: Point2D.Double): Point2D.Double = { return new Point2D.Double(a.getX() * b.getX() - a.getY() * b.getY(), a.getX() * b.getY() + a.getY() * b.getX()) } def complexInverse(a: Point2D.Double): Option[Point2D.Double] = { var sq: Double = a.distanceSq(0, 0) if (sq < 0.0000001) return None else return Some(new Point2D.Double(a.getX() / sq, -a.getY() / sq)) } def changeBasis(p: Point2D, vx: Point2D, vy: Point2D): Point2D = { if ((vx dot vy) > 0.0000001) throw new RuntimeException("Vectors vx and vy must be orthogonal") var vysq = vy.distanceSq(0, 0) var vxsq = vx.distanceSq(0, 0) if (vysq < 0.0000001 || vxsq < 0.0000001) throw new RuntimeException("Vectors vx and vy must not have zero length") new Point2D.Double((p dot vx) / vxsq, (p dot vy) / vysq) } def crossProduct(p: Point2D, q: Point2D): Double = { var x1: Double = p.getX() var y1: Double = p.getY() var x2: Double = q.getX() var y2: Double = q.getY() return x1 * y2 - y1 * x2 } }
tuura/workcraft-2.2
Graphics/src/main/scala/org/workcraft/graphics/Geometry.scala
Scala
gpl-3.0
4,521
/* Copyright 2016-17, Hasso-Plattner-Institut fuer Softwaresystemtechnik GmbH Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package de.hpi.ingestion.datalake.models import java.util.UUID // scalastyle:off method.length object TestData { val idList = List.fill(10)(UUID.randomUUID()) def subject: Subject = Subject( id = idList.head, master = idList.head, datasource = "test", properties = Map( "key 1" -> List("value 1.1", "value 1.2"), "key 2" -> List("value 2.1"), "id_implisense" -> List("1"), "gen_urls" -> List("http://gen-urls.de"), "geo_coords" -> List("0;0") ) ) def normalizedProperties: Map[String, List[String]] = { Map( "id_implisense" -> List("1"), "gen_urls" -> List("http://gen-urls.de"), "geo_coords" -> List("0;0") ) } def master: Subject = Subject( id = idList.head, master = idList.head, datasource = "master", properties = Map( "key 1" -> List("value 1.1", "value 1.2"), "key 2" -> List("value 2.1"), "id_implisense" -> List("1"), "gen_urls" -> List("http://gen-urls.de"), "geo_coords" -> List("0;0") ), relations = Map( idList(1) -> Map("master" -> "0.5") ) ) def slave: Subject = Subject( id = idList(1), master = idList.head, datasource = "test", properties = Map( "key 1" -> List("value 1.3"), "key 3" -> List("value 3.1", "value 3.2"), "gen_urls" -> List("http://gen-urls.com") ), relations = Map( idList.head -> Map("slave" -> "0.5") ) ) def exportSubjects: List[Subject] = { List( Subject( id = UUID.fromString("224e1a50-13e2-11e7-9a30-7384674b582f"), master = UUID.fromString("224e1a50-13e2-11e7-9a30-7384674b582f"), datasource = "implisense", category = Option("business"), name = Option("Testunternehmen 1"), aliases = List("Unternehmen 1"), properties = Map( "cr_active" -> List("0"), "cr_court" -> List("1"), "cr_number" -> List("2"), "cr_ids" -> List("3"), "cr_formerCourt" -> List("4"), "cr_type" -> List("5"), "id_implisense" -> List("6"), "id_wikidata" -> List("7"), "id_dbpedia" -> List("8"), "id_kompass" -> List("9"), "id_vat" -> List("10"), "id_lei" -> List("11"), "id_ebid" -> List("12"), "id_buergel" -> List("13"), "id_wikipedia" -> List("14"), "id_freebase" -> List("15"), "id_wikimeda_commons" -> List("16"), "id_geonames" -> List("17"), "id_viaf" -> List("18"), "id_lccn" -> List("19"), "id_tax" -> List("20"), "sm_youtube" -> List("21"), "sm_googleplus" -> List("22"), "sm_xing" -> List("23"), "sm_linkedin" -> List("24"), "sm_instagram" -> List("25"), "sm_pinterest" -> List("26"), "sm_kununu" -> List("27"), "sm_flickr" -> List("28"), "sm_github" -> List("29"), "sm_slideshare" -> List("30"), "sm_foursquare" -> List("31"), "sm_twitter" -> List("32"), "sm_facebook" -> List("33"), "geo_postal" -> List("34"), "geo_street" -> List("35"), "geo_city" -> List("36"), "geo_country" -> List("37"), "geo_county" -> List("38"), "geo_coords" -> List("39"), "geo_quality" -> List("40"), "date_founding" -> List("41"), "gen_size" -> List("42"), "gen_employees" -> List("43"), "gen_sectors" -> List("44"), "gen_urls" -> List("45"), "gen_phones" -> List("46"), "gen_emails" -> List("47"), "gen_legal_form" -> List("48"), "gen_founder" -> List("49"), "gen_description" -> List("50"), "gen_turnover" -> List("51"), "gen_ceo" -> List("52"), "gen_capital" -> List("53"), "gen_management" -> List("54"), "gen_revenue" -> List("55"), "gen_sales" -> List("56")) ), Subject( id = UUID.fromString("324e1a50-13e2-11e7-9a30-7384674b582f"), master = UUID.fromString("324e1a50-13e2-11e7-9a30-7384674b582f"), datasource = "implisense", category = Option("organization"), aliases = List("Unternehmen 2", "Testunternehmen 2"), properties = Map( "cr_active" -> List("0"), "id_vat" -> List("10"), "id_tax" -> List("20"), "sm_slideshare" -> List("30"), "geo_quality" -> List("40"), "gen_description" -> List("50")) ) ) } def tsvSubjects: List[String] = { val quote = "\\"" List( s"${quote}224e1a50-13e2-11e7-9a30-7384674b582f$quote\\t${quote}Testunternehmen 1$quote\\t" + s"${quote}Unternehmen 1$quote\\t${quote}business$quote\\t" + (0 to 56).map(value => s"$quote$value$quote").mkString("\\t"), s"${quote}324e1a50-13e2-11e7-9a30-7384674b582f$quote\\t\\t${quote}Unternehmen 2$quote," + s"${quote}Testunternehmen 2$quote\\t${quote}organization$quote\\t" + (0 to 56).map(value => s"$quote$value$quote".filter(c => value % 10 == 0)).mkString("\\t") ) } } // scalastyle:on method.length
bpn1/ingestion
src/test/scala/de/hpi/ingestion/datalake/models/TestData.scala
Scala
apache-2.0
6,856
package elastic import akka.actor.Actor.Receive import akka.actor.{Props, Actor, ActorRef, ActorSystem} import com.google.inject.Inject import elastic.core.ElasticConnection import elastic.requester._ import play.api.Logger import scala.concurrent.Future /** * @author Artem Arakcheev * @since 24.06.15 */ trait ElasticDriver { def close(): Unit def connection(node: String): ElasticConnection } class DefaultElasticDriver(system: ActorSystem) extends ElasticDriver { @volatile private var _connections = List[ElasticConnection]() def connections: Seq[ElasticConnection] = _connections def close() = { //todo: Close connections connections foreach { c => c.close() } } def connection(node: String, failoverStrategy: FailoverStrategy) = { val requester = system.actorOf(RequesterActor.props(node)) val connection = new ElasticConnection(system, requester, failoverStrategy) this.synchronized { _connections = connection :: _connections } connection } override def connection(node: String): ElasticConnection = connection(node, FailoverStrategy()) }
reactools/play-elastic-scala
app/elastic/ElasticDriver.scala
Scala
apache-2.0
1,114
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.parquet import java.io.File import org.apache.spark.sql.Row import org.apache.spark.sql.test.SharedSQLContext class ParquetInteroperabilitySuite extends ParquetCompatibilityTest with SharedSQLContext { test("parquet files with different physical schemas but share the same logical schema") { import ParquetCompatibilityTest._ // This test case writes two Parquet files, both representing the following Catalyst schema // // StructType( // StructField( // "f", // ArrayType(IntegerType, containsNull = false), // nullable = false)) // // The first Parquet file comes with parquet-avro style 2-level LIST-annotated group, while the // other one comes with parquet-protobuf style 1-level unannotated primitive field. withTempDir { dir => val avroStylePath = new File(dir, "avro-style").getCanonicalPath val protobufStylePath = new File(dir, "protobuf-style").getCanonicalPath val avroStyleSchema = """message avro_style { | required group f (LIST) { | repeated int32 array; | } |} """.stripMargin writeDirect(avroStylePath, avroStyleSchema, { rc => rc.message { rc.field("f", 0) { rc.group { rc.field("array", 0) { rc.addInteger(0) rc.addInteger(1) } } } } }) logParquetSchema(avroStylePath) val protobufStyleSchema = """message protobuf_style { | repeated int32 f; |} """.stripMargin writeDirect(protobufStylePath, protobufStyleSchema, { rc => rc.message { rc.field("f", 0) { rc.addInteger(2) rc.addInteger(3) } } }) logParquetSchema(protobufStylePath) checkAnswer( spark.read.parquet(dir.getCanonicalPath), Seq( Row(Seq(0, 1)), Row(Seq(2, 3)))) } } }
aokolnychyi/spark
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetInteroperabilitySuite.scala
Scala
apache-2.0
2,874
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // scalastyle:off println package org.apache.spark.examples.ml // $example on$ import org.apache.spark.ml.feature.{CountVectorizer, CountVectorizerModel} // $example off$ import org.apache.spark.sql.SQLContext import org.apache.spark.{SparkConf, SparkContext} object CountVectorizerExample { def main(args: Array[String]) { val conf = new SparkConf().setAppName("CounterVectorizerExample") val sc = new SparkContext(conf) val sqlContext = new SQLContext(sc) // $example on$ val df = sqlContext.createDataFrame(Seq( (0, Array("a", "b", "c")), (1, Array("a", "b", "b", "c", "a")) )).toDF("id", "words") // fit a CountVectorizerModel from the corpus val cvModel: CountVectorizerModel = new CountVectorizer() .setInputCol("words") .setOutputCol("features") .setVocabSize(3) .setMinDF(2) .fit(df) // alternatively, define CountVectorizerModel with a-priori vocabulary val cvm = new CountVectorizerModel(Array("a", "b", "c")) .setInputCol("words") .setOutputCol("features") cvModel.transform(df).select("features").show() // $example off$ } } // scalastyle:on println
chenc10/Spark-PAF
examples/src/main/scala/org/apache/spark/examples/ml/CountVectorizerExample.scala
Scala
apache-2.0
1,979
package jsky.app.ot.gemini.editor.auxfile import edu.gemini.auxfile.api.{AuxFile, AuxFileException} import jsky.app.ot.OTOptions import scala.collection.JavaConverters._ import scala.swing.Component class CheckAction(c: Component, model: AuxFileModel) extends AuxFileAction("Mark Checked", c, model) { reactions += { case AuxFileStateEvent(e) => title = "Mark " + (if (e.exists(s => !s.selection.isEmpty && allChecked(s.selection))) "Unchecked" else "Checked") } private def allChecked(lst: List[AuxFile]) = lst.forall(_.isChecked) override def interpret(ex: AuxFileException) = s"Sorry, there was a problem checking files: '${ex.getMessage}'" override def currentEnabledState: Boolean = super.currentEnabledState && model.currentPid.exists(pid => OTOptions.isNGO(pid) || OTOptions.isStaff(pid)) && model.currentSelection.exists(!_.isEmpty) override def apply() { exec(model.currentSelection) { (client, pid, selection) => client.setChecked(pid, selection.map(_.getName).asJavaCollection, !allChecked(selection)) } } }
arturog8m/ocs
bundle/jsky.app.ot/src/main/scala/jsky/app/ot/gemini/editor/auxfile/CheckAction.scala
Scala
bsd-3-clause
1,076
package com.twitter.finagle.factory import java.util.concurrent.locks.ReentrantReadWriteLock import java.util.concurrent.atomic.AtomicInteger import com.twitter.finagle.{Status, Service, ServiceFactory, ClientConnection, ServiceProxy, ServiceFactoryProxy} import com.twitter.util.{Closable, Future, Stopwatch, Throw, Return, Time, Duration} import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver} import com.twitter.finagle.tracing.Trace import scala.collection.immutable /** * A service factory that keeps track of idling times to implement * cache eviction. */ private class IdlingFactory[Req, Rep](self: ServiceFactory[Req, Rep]) extends ServiceFactoryProxy[Req, Rep](self) { @volatile private[this] var watch = Stopwatch.start() private[this] val n = new AtomicInteger(0) override def apply(conn: ClientConnection): Future[Service[Req, Rep]] = { n.getAndIncrement() self(conn) transform { case Throw(exc) => decr() Future.exception(exc) case Return(service) => Future.value(new ServiceProxy(service) { override def close(deadline: Time) = { decr() super.close(deadline) } }) } } @inline private[this] def decr() { if (n.decrementAndGet() == 0) watch = Stopwatch.start() } /** * Returns the duration of time for which this factory has been * idle--i.e. has no outstanding services. * * @bug There is a small race here between checking n.get and * reading from the watch. (I.e. the factory can become nonidle * between the checks). This is fine. */ def idleFor = if (n.get > 0) Duration.Zero else watch() } /** * A "read-through" cache of service factories. Eviction is based on * idle time -- when no underlying factories are idle, one-shot * factories are created. This doesn't necessarily guarantee good * performance: one-shots could be created constantly for a hot cache * key, but should work well when there are a few hot keys. */ private class ServiceFactoryCache[Key, Req, Rep]( newFactory: Key => ServiceFactory[Req, Rep], statsReceiver: StatsReceiver = NullStatsReceiver, maxCacheSize: Int = 8) extends Closable { assert(maxCacheSize > 0) @volatile private[this] var cache = immutable.Map.empty: immutable.Map[Key, IdlingFactory[Req, Rep]] private[this] val (readLock, writeLock) = { val rw = new ReentrantReadWriteLock() (rw.readLock(), rw.writeLock()) } private[this] val nmiss = statsReceiver.counter("misses") private[this] val nevict = statsReceiver.counter("evicts") private[this] val noneshot = statsReceiver.counter("oneshots") private[this] val nidle = statsReceiver.addGauge("idle") { cache count { case (_, f) => f.idleFor > Duration.Zero } } /* * This returns a Service rather than a ServiceFactory to avoid * complicated bookkeeping around closing ServiceFactories. They can * be safely closed when evicted from the cache, when the entire * cache is closed, or in the case of one-shot services when the * service is closed; in all cases there are no references outside * of ServiceFactoryCache. */ def apply(key: Key, conn: ClientConnection): Future[Service[Req, Rep]] = { readLock.lock() try { if (cache contains key) return cache(key).apply(conn) } finally { readLock.unlock() } miss(key, conn) } private[this] def miss(key: Key, conn: ClientConnection): Future[Service[Req, Rep]] = { writeLock.lock() if (cache contains key) { readLock.lock() writeLock.unlock() try { return cache(key).apply(conn) } finally { readLock.unlock() } } val svc = try { nmiss.incr() val factory = new IdlingFactory(newFactory(key)) if (cache.size < maxCacheSize) { cache += (key -> factory) cache(key).apply(conn) } else { findEvictee() match { case Some(evicted) => nevict.incr() cache(evicted).close() cache = cache - evicted + (key -> factory) cache(key).apply(conn) case None => noneshot.incr() oneshot(factory, conn) } } } finally { writeLock.unlock() } svc } private[this] def oneshot(factory: ServiceFactory[Req, Rep], conn: ClientConnection) : Future[Service[Req, Rep]] = factory(conn) map { service => new ServiceProxy(service) { override def close(deadline: Time) = super.close(deadline) transform { case _ => factory.close(deadline) } } } private[this] def findEvictee(): Option[Key] = { val (evictNamer, evictFactory) = cache maxBy { case (_, fac) => fac.idleFor } if (evictFactory.idleFor > Duration.Zero) Some(evictNamer) else None } def close(deadline: Time) = Closable.all(cache.values.toSeq:_*).close(deadline) def status = Status.bestOf[IdlingFactory[Req, Rep]](cache.values, _.status) def status(key: Key): Status = { readLock.lock() try { if (cache.contains(key)) return cache(key).status } finally { readLock.unlock() } // This is somewhat dubious, as the status is outdated // pretty much right after we query it. val factory = newFactory(key) val status = factory.status factory.close() status } }
lysu/finagle
finagle-core/src/main/scala/com/twitter/finagle/factory/ServiceFactoryCache.scala
Scala
apache-2.0
5,423
import stainless.lang._ import stainless.proof._ import stainless.annotation._ object Laws1 { abstract class SomeLaw { def value: BigInt @law def lawNotZero: Boolean = { value != 0 } } abstract class RefinedLaw extends SomeLaw { @law override def lawNotZero: Boolean = { value > 1 } } case class ConcreteOverride() extends RefinedLaw { def value = 42 override def lawNotZero: Boolean = { value == 42 } } case class ConcreteNoOverride() extends RefinedLaw { def value = 42 } }
epfl-lara/stainless
frontends/benchmarks/verification/valid/MicroTests/Laws1.scala
Scala
apache-2.0
562
/** * Swaggy Jenkins * Jenkins API clients generated from Swagger / Open API specification * * The version of the OpenAPI document: 1.1.2-pre.0 * Contact: blah@cliffano.com * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package org.openapitools.client.model case class PipelineBranchesitemlatestRun ( durationInMillis: Option[Integer] = None, estimatedDurationInMillis: Option[Integer] = None, enQueueTime: Option[String] = None, endTime: Option[String] = None, id: Option[String] = None, organization: Option[String] = None, pipeline: Option[String] = None, result: Option[String] = None, runSummary: Option[String] = None, startTime: Option[String] = None, state: Option[String] = None, `type`: Option[String] = None, commitId: Option[String] = None, `class`: Option[String] = None )
cliffano/swaggy-jenkins
clients/scala-httpclient-deprecated/generated/src/main/scala/org/openapitools/client/model/PipelineBranchesitemlatestRun.scala
Scala
mit
945
/** * Copyright 2011-2016 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.jms.client import java.util.{ Hashtable => JHashtable } import io.gatling.core.config.Credentials import io.gatling.jms.protocol.JmsMessageMatcher import io.gatling.jms.request._ import com.typesafe.scalalogging.StrictLogging import javax.jms._ import javax.naming.{ Context, InitialContext } import scala.util.control.NonFatal /** * Trivial JMS client, allows sending messages and use of a MessageListener * @author jasonk@bluedevel.com */ class SimpleJmsClient( connectionFactoryName: String, destination: JmsDestination, replyDestination: JmsDestination, url: String, credentials: Option[Credentials], anonymousConnect: Boolean, contextFactory: String, deliveryMode: Int, messageMatcher: JmsMessageMatcher ) extends JmsClient with StrictLogging { // create InitialContext val properties = new JHashtable[String, String] properties.put(Context.INITIAL_CONTEXT_FACTORY, contextFactory) properties.put(Context.PROVIDER_URL, url) credentials.foreach { credentials => properties.put(Context.SECURITY_PRINCIPAL, credentials.username) properties.put(Context.SECURITY_CREDENTIALS, credentials.password) } val ctx = new InitialContext(properties) logger.info(s"Got InitialContext $ctx") // create QueueConnectionFactory val cf = ctx.lookup(connectionFactoryName).asInstanceOf[ConnectionFactory] logger.info(s"Got ConnectionFactory $cf") // create QueueConnection val conn = credentials match { case Some(creds) if !anonymousConnect => cf.createConnection(creds.username, creds.password) case _ => cf.createConnection } conn.start() val session = conn.createSession(false, Session.AUTO_ACKNOWLEDGE) logger.info(s"Got Connection $conn") // reply queue and target destination/producer val replyJmsDestination = createDestination(replyDestination) val producer = session.createProducer(createDestination(destination)) // delivery mode based on input from caller producer.setDeliveryMode(deliveryMode) private def createDestination(destination: JmsDestination): Destination = destination match { case JmsQueue(name) => session.createQueue(name) case JmsTopic(name) => session.createTopic(name) case JmsTemporaryQueue => session.createTemporaryQueue() case JmsTemporaryTopic => session.createTemporaryTopic() } override val replyDestinationName = replyJmsDestination.toString /** * Gets a new consumer for the reply queue */ def createReplyConsumer(selector: String = null): MessageConsumer = conn.createSession(false, Session.AUTO_ACKNOWLEDGE).createConsumer(replyJmsDestination, selector) /** * Writes a property map to the message properties */ private def writePropsToMessage(props: Map[String, Any], message: Message): Unit = props.foreach { case (key, value) => message.setObjectProperty(key, value) } /** * Wrapper to send a BytesMessage, returns the message ID of the sent message */ def sendBytesMessage(bytes: Array[Byte], props: Map[String, Any], jmsType: Option[String]): Message = { val message = session.createBytesMessage message.writeBytes(bytes) writePropsToMessage(props, message) jmsType.foreach(message.setJMSType) sendMessage(message) } /** * Wrapper to send a MapMessage, returns the message ID of the sent message * <p> * Note that map must match the javax.jms.MapMessage contract ie: "This method works only * for the objectified primitive object types (Integer, Double, Long ...), String objects, * and byte arrays." */ def sendMapMessage(map: Map[String, Any], props: Map[String, Any], jmsType: Option[String]): Message = { val message = session.createMapMessage map.foreach { case (key, value) => message.setObject(key, value) } writePropsToMessage(props, message) jmsType.foreach(message.setJMSType) sendMessage(message) } /** * Wrapper to send an ObjectMessage, returns the message ID of the sent message */ def sendObjectMessage(o: java.io.Serializable, props: Map[String, Any], jmsType: Option[String]): Message = { val message = session.createObjectMessage(o) writePropsToMessage(props, message) jmsType.foreach(message.setJMSType) sendMessage(message) } /** * Wrapper to send a TextMessage, returns the message ID of the sent message */ def sendTextMessage(messageText: String, props: Map[String, Any], jmsType: Option[String]): Message = { val message = session.createTextMessage(messageText) writePropsToMessage(props, message) jmsType.foreach(message.setJMSType) sendMessage(message) } /** * Sends a JMS message, returns the message of the sent message * <p> * Note that exceptions are allowed to bubble up to the caller */ def sendMessage(message: Message): Message = { message.setJMSReplyTo(replyJmsDestination) messageMatcher.prepareRequest(message) producer.send(message) // return the message message } def close(): Unit = { try { producer.close() session.close() conn.stop() } catch { case NonFatal(e) => logger.debug("Exception while closing SimpleJmsClient: " + e.getMessage) } } }
ryez/gatling
gatling-jms/src/main/scala/io/gatling/jms/client/SimpleJmsClient.scala
Scala
apache-2.0
5,942
/* * Copyright (C) 09/09/13 Romain Reuillon * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package fr.geocites.sugarscape import fr.geocites.simpuzzle._ import fr.geocites.simpuzzle.matrix.Torus2D import fr.geocites.simpuzzle.state.State trait SugarScapeState <: State { def maxSugarCells: Seq[Seq[Int]] case class SugarScapeState( step: Int, agents: Seq[(Position, Agent)], sugar: Seq[Seq[Sugar]]) extends Torus2D { type CELL = Cell def cells: Seq[Seq[Cell]] = { val cells = sugar.map(_.map { case Sugar(s, ms) => Cell(s, ms, None) }.toArray) for { ((x, y), agent) <- agents } { val c = cells(x)(y) cells(x)(y) = c.copy(agent = Some(agent)) } cells.map(_.toSeq) } } type Position = (Int, Int) case class Sugar(sugar: Double, maxSugar: Double) case class Cell(sugar: Double, maxSugar: Double, agent: Option[Agent]) case class Agent(sugar: Double, metabolism: Double, vision: Int) }
ISCPIF/PSEExperiments
simpuzzle-src/models/sugarscape/src/main/scala/fr/geocites/sugarscape/SugarScapeState.scala
Scala
agpl-3.0
1,639
package org.jetbrains.plugins.scala.lang.dfa.invocationInfo.tests import org.jetbrains.plugins.scala.lang.dfa.invocationInfo.InvocationInfoTestBase import org.jetbrains.plugins.scala.lang.dfa.invocationInfo.arguments.Argument.{PassByName, PassByValue} class RegularMethodCallInfoTest extends InvocationInfoTestBase { def testSimpleMethodCalls(): Unit = { val invocationInfo = generateInvocationInfoFor { s""" |class SomeClass { | def simpleFun(firstArg: Int, secondArg: Boolean, thirdArg: String, fourthArg: Int): Int = { | firstArg + fourthArg | } | | def main(): Int = { | ${markerStart}simpleFun(3 + 8, 5 > 9, "Hello", 9 * 4 - 2)${markerEnd} | } |} |""".stripMargin } val expectedArgCount = 1 + 4 // implicit "this" argument val expectedProperArgsInText = List("3 + 8", "5 > 9", "\"Hello\"", "9 * 4 - 2") val expectedMappedParamNames = List("firstArg", "secondArg", "thirdArg", "fourthArg") val expectedPassingMechanisms = (1 to expectedArgCount).map(_ => PassByValue).toList val expectedParamToArgMapping = (0 until expectedArgCount - 1).toList verifyInvokedElement(invocationInfo, "SomeClass#simpleFun") verifyArgumentsWithSingleArgList(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) } def testByNameArguments(): Unit = { val invocationInfo = generateInvocationInfoFor { s""" |class AnotherClass { | def funWithByNames(arg0: Int, arg1: => Boolean, arg2: String, arg3: => Int): Int = { | firstArg + fourthArg | } | | def main(): Int = { | val x = 3 | ${markerStart}funWithByNames(328944 * 22, 5 >= 3 && false, "Hello", -3324 + x)${markerEnd} | } |} |""".stripMargin } val expectedArgCount = 1 + 4 val expectedProperArgsInText = List("328944 * 22", "5 >= 3 && false", "\"Hello\"", "-3324 + x") val expectedMappedParamNames = List("arg0", "arg1", "arg2", "arg3") val expectedPassingMechanisms = List(PassByValue, PassByValue, PassByName, PassByValue, PassByName) val expectedParamToArgMapping = (0 until expectedArgCount - 1).toList verifyInvokedElement(invocationInfo, "AnotherClass#funWithByNames") verifyArgumentsWithSingleArgList(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) } def testMethodCallsOnInstance(): Unit = { val invocationInfo = generateInvocationInfoFor { s""" |object TestObject { | class Something(z: Double) { | def compareWith(x: Double, y: Double): Boolean = z < x && z < y | } | | def main(): Int = { | val newSomething = new Something(12.34) | ${markerStart}newSomething.compareWith(19.52 * 2.5, -11.0034 * (-1))${markerEnd} | } |} |""".stripMargin } val expectedArgCount = 1 + 2 val expectedProperArgsInText = List("19.52 * 2.5", "-11.0034 * (-1)") val expectedMappedParamNames = List("x", "y") val expectedPassingMechanisms = (1 to expectedArgCount).map(_ => PassByValue).toList val expectedParamToArgMapping = (0 until expectedArgCount - 1).toList verifyInvokedElement(invocationInfo, "Something#compareWith") verifyArgumentsWithSingleArgList(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) verifyThisExpression(invocationInfo, "newSomething") } def testJavaStaticMethods(): Unit = { val invocationInfo = generateInvocationInfoFor { s""" |import java.time.LocalDate | |object TestObject { | | def main(): Int = { | val date = ${markerStart}LocalDate.of(2012, 11, 23)${markerEnd} | date.getYear | } |} | |""".stripMargin } val expectedArgCount = 1 + 3 val expectedProperArgsInText = List("2012", "11", "23") val expectedMappedParamNames = List("year", "month", "dayOfMonth") val expectedPassingMechanisms = (1 to expectedArgCount).map(_ => PassByValue).toList val expectedParamToArgMapping = (0 until expectedArgCount - 1).toList verifyInvokedElement(invocationInfo, "LocalDate#of") verifyArgumentsWithSingleArgList(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) verifyThisExpression(invocationInfo, "LocalDate") } def testLocalFunctionsWithParams(): Unit = { val sugaredSyntax = "local(5, 9)" val desugaredSyntax = "local.apply(5, 9)" val code = (invocationSyntax: String) => s""" |object TestObject { | | def main(): Int = { | val local = (x: Int, y: Int) => x + y | ${markerStart}${invocationSyntax}${markerEnd} | } |} | |""".stripMargin for (invocationSyntax <- List(sugaredSyntax, desugaredSyntax)) { val invocationInfo = generateInvocationInfoFor(code(invocationSyntax)) val expectedArgCount = 1 + 2 val expectedProperArgsInText = List("5", "9") val expectedMappedParamNames = List("v1", "v2") val expectedPassingMechanisms = (1 to expectedArgCount).map(_ => PassByValue).toList val expectedParamToArgMapping = (0 until expectedArgCount - 1).toList verifyInvokedElement(invocationInfo, "Function2#apply") verifyArgumentsWithSingleArgList(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) verifyThisExpression(invocationInfo, "local") } } def testLocalFunctionsWithoutParams(): Unit = { val sugaredSyntax = "local()" val desugaredSyntax1 = "local.apply" val desugaredSyntax2 = "local.apply()" val code = (invocationSyntax: String) => s""" |object TestObject { | | def main(): Int = { | val local = () => "Hi" | ${markerStart}${invocationSyntax}${markerEnd} | 5 | } |} | |""".stripMargin for (invocationSyntax <- List(sugaredSyntax, desugaredSyntax1, desugaredSyntax2)) { val invocationInfo = generateInvocationInfoFor(code(invocationSyntax)) val expectedArgCount = 1 + 0 val expectedProperArgsInText = Nil val expectedMappedParamNames = Nil val expectedPassingMechanisms = (1 to expectedArgCount).map(_ => PassByValue).toList val expectedParamToArgMapping = (0 until expectedArgCount - 1).toList verifyInvokedElement(invocationInfo, "Function0#apply") verifyArgumentsWithSingleArgList(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) verifyThisExpression(invocationInfo, "local") } } }
JetBrains/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/lang/dfa/invocationInfo/tests/RegularMethodCallInfoTest.scala
Scala
apache-2.0
7,217
/** **\\ ** Copyright (c) 2012 Center for Organic and Medicinal Chemistry ** ** Zurich University of Applied Sciences ** ** Wädenswil, Switzerland ** \\** **/ package chemf.graph import scalaz._, Scalaz._, scalacheck.ScalaCheckBinding._ import org.scalacheck._, Prop._ /** * @author Stefan Höck */ object EdgeTest extends Properties ("Edge") { val edgeGen = for { a ← Gen choose (0, Int.MaxValue) b ← Gen choose (0, Int.MaxValue) if (a != b) } yield Edge(a,b) implicit val edgeArbitrary = Arbitrary (edgeGen) property ("equal") = Prop.forAll {es: (Edge,Edge) ⇒ val (ea, eb) = es (ea ≟ eb) ≟ ((ea.a ≟ eb.a) && (ea.b ≟ eb.b)) } property("order") = forAll {es: (Edge,Edge) ⇒ val (ea, eb) = es (ea ?|? eb) ≟ (ea.compare(eb) ?|? 0) } property ("aSmallerb") = Prop.forAll {e: Edge ⇒ e.a <= e.b } property ("isNeighbor") = Prop.forAll {ei: (Edge,Int) ⇒ val (e, i) = ei (e connects i) ≟ ((e.a ≟ i) || (e.b ≟ i)) } property ("compare") = Prop.forAll {es: (Edge,Edge) ⇒ val (a,b) = es (if (a.a > b.a) a > b else true) :| "greater by a" && (if (a.a < b.a) a < b else true) :| "smaller by a" && (if (a.a ≟ b.a) (a.b compare b.b) ≟ (a compare b) else true) :| "by b" } } // vim: set ts=2 sw=2 et:
stefan-hoeck/chemf
src/test/scala/chemf/graph/EdgeTest.scala
Scala
gpl-3.0
1,515
package org.jetbrains.plugins.scala.lang.dataFlow.impl.reachingDefs import com.intellij.openapi.util.TextRange import com.intellij.psi.util.PsiTreeUtil import org.jetbrains.plugins.scala.ScalaFileType import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil import org.jetbrains.plugins.scala.lang.psi.api.{ScControlFlowOwner, ScalaFile, ScalaPsiElement} import org.jetbrains.plugins.scala.lang.psi.dataFlow.impl.reachingDefs._ import org.jetbrains.plugins.scala.util.TestUtils import org.junit.Assert import scala.util.Sorting class ReachingDefinitionsCollectorTest extends ScalaLightCodeInsightFixtureTestAdapter { override protected def getBasePath: String = TestUtils.getTestDataPath + "/dataFlow/reachingDefsCollect/" override def setUp(): Unit = { super.setUp() myFixture.setTestDataPath(getBasePath) } def readTestData: (String, String) = { val list = TestUtils.readInput(s"$getBasePath${getTestName(true)}.test") (list.get(0), list.get(1)) } def doTest(): Unit = { val (input, output) = readTestData val file = myFixture.configureByText(ScalaFileType.INSTANCE, input).asInstanceOf[ScalaFile] val selection = getSelection(file) val startElement = file.findElementAt(selection.getStartOffset) val endElement = file.findElementAt(selection.getEndOffset) val selectedElements = ScalaPsiUtil.getElementsRange(startElement, endElement) val scope: ScalaPsiElement = { val commonParent = PsiTreeUtil.findCommonParent(startElement, endElement) val cfowner = PsiTreeUtil.getParentOfType(commonParent, classOf[ScControlFlowOwner], false) cfowner.getParent.asInstanceOf[ScalaPsiElement] } val infos = ReachingDefinitionsCollector.collectVariableInfo(selectedElements, scope) val actualCfOutput = dumpDefInfos(infos) Assert.assertEquals(output.trim, actualCfOutput.trim) } private def getSelection(file: ScalaFile): TextRange = { val model = myFixture.getEditor.getSelectionModel if (model.hasSelection) { TextRange.create(model.getSelectionStart, model.getSelectionEnd - 1) } else{ TextRange.create(0, file.getTextLength - 1) } } protected def dumpDefInfos(infos: FragmentVariableInfos): String = { def variablesText(info: Iterable[VariableInfo]): String = { val elementStrings = info.map(_.element.toString).toSeq Sorting.stableSort(elementStrings).mkString("\\n") } val inputElements = variablesText(infos.inputVariables) val outputElements = variablesText(infos.outputVariables) s"""INPUT: |$inputElements |OUTPUT: |$outputElements""".stripMargin.replace("\\r", "") } def testSimpleFragment(): Unit = doTest() def testClosure1(): Unit = doTest() }
JetBrains/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/lang/dataFlow/impl/reachingDefs/ReachingDefinitionsCollectorTest.scala
Scala
apache-2.0
2,824
package edu.berkeley.eecs.btrdb.sparkconn.rdd.partitioner class QuasarRDDPartitioner { }
SoftwareDefinedBuildings/quasar-spark-connector
src/main/scala/edu/berkeley/eecs/btrdb/sparkconn/rdd/partitioner/QuasarRDDPartitioner.scala
Scala
gpl-3.0
91
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.server import kafka.network.SocketServer import kafka.utils._ import java.io.File import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.protocol.{ApiKeys, Errors} import org.apache.kafka.common.requests.{AlterReplicaLogDirsRequest, AlterReplicaLogDirsResponse} import org.junit.Assert._ import org.junit.Test import scala.collection.JavaConverters._ import scala.collection.mutable import scala.util.Random class AlterReplicaLogDirsRequestTest extends BaseRequestTest { override val logDirCount = 5 override val numBrokers = 1 val topic = "topic" @Test def testAlterReplicaLogDirsRequest() { val partitionNum = 5 // Alter replica dir before topic creation val logDir1 = new File(servers.head.config.logDirs(Random.nextInt(logDirCount))).getAbsolutePath val partitionDirs1 = (0 until partitionNum).map(partition => new TopicPartition(topic, partition) -> logDir1).toMap val alterReplicaLogDirsResponse1 = sendAlterReplicaLogDirsRequest(partitionDirs1) // The response should show error REPLICA_NOT_AVAILABLE for all partitions (0 until partitionNum).foreach { partition => val tp = new TopicPartition(topic, partition) assertEquals(Errors.UNKNOWN_TOPIC_OR_PARTITION, alterReplicaLogDirsResponse1.responses().get(tp)) assertTrue(servers.head.logManager.getLog(tp).isEmpty) } createTopic(topic, partitionNum, 1) (0 until partitionNum).foreach { partition => assertEquals(logDir1, servers.head.logManager.getLog(new TopicPartition(topic, partition)).get.dir.getParent) } // Alter replica dir again after topic creation val logDir2 = new File(servers.head.config.logDirs(Random.nextInt(logDirCount))).getAbsolutePath val partitionDirs2 = (0 until partitionNum).map(partition => new TopicPartition(topic, partition) -> logDir2).toMap val alterReplicaLogDirsResponse2 = sendAlterReplicaLogDirsRequest(partitionDirs2) // The response should succeed for all partitions (0 until partitionNum).foreach { partition => val tp = new TopicPartition(topic, partition) assertEquals(Errors.NONE, alterReplicaLogDirsResponse2.responses().get(tp)) TestUtils.waitUntilTrue(() => { logDir2 == servers.head.logManager.getLog(new TopicPartition(topic, partition)).get.dir.getParent }, "timed out waiting for replica movement") } } @Test def testAlterReplicaLogDirsRequestErrorCode(): Unit = { val offlineDir = new File(servers.head.config.logDirs.tail.head).getAbsolutePath val validDir1 = new File(servers.head.config.logDirs(1)).getAbsolutePath val validDir2 = new File(servers.head.config.logDirs(2)).getAbsolutePath val validDir3 = new File(servers.head.config.logDirs(3)).getAbsolutePath // Test AlterReplicaDirRequest before topic creation val partitionDirs1 = mutable.Map.empty[TopicPartition, String] partitionDirs1.put(new TopicPartition(topic, 0), "invalidDir") partitionDirs1.put(new TopicPartition(topic, 1), validDir1) val alterReplicaDirResponse1 = sendAlterReplicaLogDirsRequest(partitionDirs1.toMap) assertEquals(Errors.LOG_DIR_NOT_FOUND, alterReplicaDirResponse1.responses().get(new TopicPartition(topic, 0))) assertEquals(Errors.UNKNOWN_TOPIC_OR_PARTITION, alterReplicaDirResponse1.responses().get(new TopicPartition(topic, 1))) createTopic(topic, 3, 1) // Test AlterReplicaDirRequest after topic creation val partitionDirs2 = mutable.Map.empty[TopicPartition, String] partitionDirs2.put(new TopicPartition(topic, 0), "invalidDir") partitionDirs2.put(new TopicPartition(topic, 1), validDir2) val alterReplicaDirResponse2 = sendAlterReplicaLogDirsRequest(partitionDirs2.toMap) assertEquals(Errors.LOG_DIR_NOT_FOUND, alterReplicaDirResponse2.responses().get(new TopicPartition(topic, 0))) assertEquals(Errors.NONE, alterReplicaDirResponse2.responses().get(new TopicPartition(topic, 1))) // Test AlterReplicaDirRequest after topic creation and log directory failure servers.head.logDirFailureChannel.maybeAddOfflineLogDir(offlineDir, "", new java.io.IOException()) TestUtils.waitUntilTrue(() => !servers.head.logManager.isLogDirOnline(offlineDir), s"timed out waiting for $offlineDir to be offline", 3000) val partitionDirs3 = mutable.Map.empty[TopicPartition, String] partitionDirs3.put(new TopicPartition(topic, 0), "invalidDir") partitionDirs3.put(new TopicPartition(topic, 1), validDir3) partitionDirs3.put(new TopicPartition(topic, 2), offlineDir) val alterReplicaDirResponse3 = sendAlterReplicaLogDirsRequest(partitionDirs3.toMap) assertEquals(Errors.LOG_DIR_NOT_FOUND, alterReplicaDirResponse3.responses().get(new TopicPartition(topic, 0))) assertEquals(Errors.KAFKA_STORAGE_ERROR, alterReplicaDirResponse3.responses().get(new TopicPartition(topic, 1))) assertEquals(Errors.KAFKA_STORAGE_ERROR, alterReplicaDirResponse3.responses().get(new TopicPartition(topic, 2))) } private def sendAlterReplicaLogDirsRequest(partitionDirs: Map[TopicPartition, String], socketServer: SocketServer = controllerSocketServer): AlterReplicaLogDirsResponse = { val request = new AlterReplicaLogDirsRequest.Builder(partitionDirs.asJava).build() val response = connectAndSend(request, ApiKeys.ALTER_REPLICA_LOG_DIRS, socketServer) AlterReplicaLogDirsResponse.parse(response, request.version) } }
mihbor/kafka
core/src/test/scala/unit/kafka/server/AlterReplicaLogDirsRequestTest.scala
Scala
apache-2.0
6,227
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.nn.keras import com.intel.analytics.bigdl.nn.abstractnn._ import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric import com.intel.analytics.bigdl.utils.Shape import scala.reflect.ClassTag /** * Apply multiplicative 1-centered Gaussian noise. * As it is a regularization layer, it is only active at training time. * * When you use this layer as the first layer of a model, you need to provide the argument * inputShape (a Single Shape, does not include the batch dimension). * * @param p Double, drop probability (as with 'Dropout'). * The multiplicative noise will have standard deviation 'sqrt(p/(1-p))'. * @tparam T Numeric type of parameter(e.g. weight, bias). Only support float/double now. */ class GaussianDropout[T: ClassTag]( val p: Double, val inputShape: Shape = null)(implicit ev: TensorNumeric[T]) extends KerasLayer[Tensor[T], Tensor[T], T](KerasLayer.addBatch(inputShape)) with IdentityOutputShape { override def doBuild(inputShape: Shape): AbstractModule[Tensor[T], Tensor[T], T] = { val layer = com.intel.analytics.bigdl.nn.GaussianDropout(rate = p) layer.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]] } } object GaussianDropout { def apply[@specialized(Float, Double) T: ClassTag]( p: Double, inputShape: Shape = null)(implicit ev: TensorNumeric[T]): GaussianDropout[T] = { new GaussianDropout[T](p, inputShape) } }
yiheng/BigDL
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/keras/GaussianDropout.scala
Scala
apache-2.0
2,106
import scala.io.StdIn /** * <a href="https://www.urionlinejudge.com.br/judge/en/problems/view/1726">1726 Friends</a> * * @author ricardo.staroski */ object Main { def main(args: Array[String]) { var line: String = StdIn.readLine(); while (line != null) { println(parseUnionOrDifference(StringBuilder.newBuilder.append(line))); line = StdIn.readLine(); } } // calcula a diferença (-) de dois conjuntos def computeDifference(set1: StringBuilder, set2: StringBuilder): StringBuilder = { var limit1: Int = set1.length - 1; var limit2: Int = set2.length - 1; var i2: Int = 1; while (i2 < limit2) { var second: Boolean = false; var symbol: Char = set2.charAt(i2); var i1: Int = 1; while (!second && i1 < limit1) { if (set1.charAt(i1) == symbol) { set1.deleteCharAt(i1); limit1 -= 1; second = true; } i1 += 1; } i2 += 1; } return set1; } // calcula a intersecção (*) de dois conjuntos def computeIntersection(set1: StringBuilder, set2: StringBuilder): StringBuilder = { var limit1: Int = set1.length - 1; var limit2: Int = set2.length - 1; var i1: Int = 1; while (i1 < limit1) { var first: Boolean = false; var symbol: Char = set1.charAt(i1); var i2: Int = 1; while (!first && i2 < limit2) { if (set2.charAt(i2) == symbol) { first = true; } i2 += 1; } if (!first) { set1.deleteCharAt(i1); i1 -= 1; limit1 -= 1; } i1 += 1; } return set1; } // calcula a união (+) de dois conjuntos def computeUnion(set1: StringBuilder, set2: StringBuilder): StringBuilder = { var limit1: Int = set1.length - 1; var limit2: Int = set2.length - 1; var i2: Int = 1; while (i2 < limit2) { var second: Boolean = false; var symbol: Char = set2.charAt(i2); var i1: Int = 1; while (!second && i1 < limit1) { if (set1.charAt(i1) == symbol) { second = true; } i1 += 1; } if (!second) { insert(symbol, set1, limit1); limit1 += 1; } i2 += 1; } return set1; } // insere um elemento de forma ordenada def insert(symbol: Char, set: StringBuilder, limit: Int) { for (i <- 1 until limit) { if (symbol < set.charAt(i)) { set.insert(i, symbol); return ; } } set.insert(limit, symbol); } // faz o parse de um bloco def parseBlock(expression: StringBuilder): StringBuilder = { var symbol: Char = expression.charAt(0); if (symbol != '(') { return StringBuilder.newBuilder; } expression.deleteCharAt(0); var result: StringBuilder = parseUnionOrDifference(expression); symbol = expression.charAt(0); expression.deleteCharAt(0); return result; } // faz o parse da operação '*' def parseIntersection(expression: StringBuilder): StringBuilder = { var value1: StringBuilder = parseSet(expression); while (expression.length > 0) { var symbol: Char = expression.charAt(0); if (symbol != '*') { return value1; } expression.deleteCharAt(0); var value2: StringBuilder = parseSet(expression); value1 = computeIntersection(value1, value2); } return value1; } // faz o parse de um conjunto def parseSet(expression: StringBuilder): StringBuilder = { var value: StringBuilder = parseBlock(expression); var result: StringBuilder = StringBuilder.newBuilder.append(value); if (expression.length > 0) { var symbol: Char = expression.charAt(0); if (symbol != '{') { return value; } do { symbol = expression.charAt(0); result.append(symbol); expression.deleteCharAt(0); } while (symbol != '}'); } return result; } // faz o parse das operações '+' e '-' def parseUnionOrDifference(expression: StringBuilder): StringBuilder = { var value1: StringBuilder = parseIntersection(expression); while (expression.length > 0) { var symbol: Char = expression.charAt(0); if (symbol != '+' && symbol != '-') { return value1; } expression.deleteCharAt(0); var value2: StringBuilder = parseIntersection(expression); if (symbol == '+') { value1 = computeUnion(value1, value2); } else { value1 = computeDifference(value1, value2); } } return value1; } }
staroski/URIOnlineJudge
1726-friends-scala/src/Main.scala
Scala
gpl-3.0
5,288
/* * Copyright 2001-2015 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.examples.asyncfeaturespec.beforeandafter import org.scalatest.AsyncFeatureSpec import org.scalatest.BeforeAndAfter import scala.concurrent.Future import scala.concurrent.ExecutionContext // Defining actor messages sealed abstract class StringOp case object Clear extends StringOp case class Append(value: String) extends StringOp case object GetValue class StringActor { // Simulating an actor private final val sb = new StringBuilder def !(op: StringOp): Unit = synchronized { op match { case Append(value) => sb.append(value) case Clear => sb.clear() } } def ?(get: GetValue.type)(implicit c: ExecutionContext): Future[String] = Future { synchronized { sb.toString } } } class ExampleSpec extends AsyncFeatureSpec with BeforeAndAfter { final val actor = new StringActor before { actor ! Append("ScalaTest is designed to ") // set up the fixture } after { actor ! Clear // clean up the fixture } feature("Simplicity") { scenario("User needs to read test code written by others") { actor ! Append("encourage clear code!") val futureString = actor ? GetValue futureString map { s => assert(s == "ScalaTest is designed to encourage clear code!") } } scenario("User needs to understand what the tests are doing") { actor ! Append("be easy to reason about!") val futureString = actor ? GetValue futureString map { s => assert(s == "ScalaTest is designed to be easy to reason about!") } } } }
dotty-staging/scalatest
examples/src/test/scala/org/scalatest/examples/asyncfeaturespec/beforeandafter/ExampleSpec.scala
Scala
apache-2.0
2,179
package org.scalatra import java.net.URI import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import servlet.ServletApiImplicits /** * Redirects unsecured requests to the corresponding secure URL. */ trait SslRequirement extends Handler with ServletApiImplicits { abstract override def handle(req: HttpServletRequest, res: HttpServletResponse) { if (!req.isSecure) { val oldUri = req.uri val port = securePortMap.lift(oldUri.getPort) getOrElse 443 val uri = new URI( "https", oldUri.getRawUserInfo, oldUri.getHost, port, oldUri.getPath, oldUri.getQuery, oldUri.getFragment).toString res.redirect(uri) } else { super.handle(req, res) } } /** * Maps unsecured ports to secure ports. By default, 80 redirects to * 443, and 8080 to 8443. */ protected def securePortMap: PartialFunction[Int, Int] = Map(80 -> 443, 8080 -> 8443) }
etorreborre/scalatra
core/src/main/scala/org/scalatra/SslRequirement.scala
Scala
bsd-2-clause
928
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.api import java.nio.ByteBuffer import kafka.common.{ErrorMapping, TopicAndPartition} import kafka.api.ApiUtils._ object OffsetResponse { def readFrom(buffer: ByteBuffer): OffsetResponse = { val correlationId = buffer.getInt val numTopics = buffer.getInt val pairs = (1 to numTopics).flatMap(_ => { val topic = readShortString(buffer) val numPartitions = buffer.getInt (1 to numPartitions).map(_ => { val partition = buffer.getInt val error = buffer.getShort val numOffsets = buffer.getInt val offsets = (1 to numOffsets).map(_ => buffer.getLong) (TopicAndPartition(topic, partition), PartitionOffsetsResponse(error, offsets)) }) }) OffsetResponse(correlationId, Map(pairs:_*)) } } case class PartitionOffsetsResponse(error: Short, offsets: Seq[Long]) { override def toString(): String = { new String("error: " + ErrorMapping.exceptionFor(error).getClass.getName + " offsets: " + offsets.mkString) } } case class OffsetResponse(override val correlationId: Int, partitionErrorAndOffsets: Map[TopicAndPartition, PartitionOffsetsResponse]) extends RequestOrResponse(correlationId = correlationId) { lazy val offsetsGroupedByTopic = partitionErrorAndOffsets.groupBy(_._1.topic) def hasError = partitionErrorAndOffsets.values.exists(_.error != ErrorMapping.NoError) val sizeInBytes = { 4 + /* correlation id */ 4 + /* topic count */ offsetsGroupedByTopic.foldLeft(0)((foldedTopics, currTopic) => { val (topic, errorAndOffsetsMap) = currTopic foldedTopics + shortStringLength(topic) + 4 + /* partition count */ errorAndOffsetsMap.foldLeft(0)((foldedPartitions, currPartition) => { foldedPartitions + 4 + /* partition id */ 2 + /* partition error */ 4 + /* offset array length */ currPartition._2.offsets.size * 8 /* offset */ }) }) } def writeTo(buffer: ByteBuffer) { buffer.putInt(correlationId) buffer.putInt(offsetsGroupedByTopic.size) // topic count offsetsGroupedByTopic.foreach { case((topic, errorAndOffsetsMap)) => writeShortString(buffer, topic) buffer.putInt(errorAndOffsetsMap.size) // partition count errorAndOffsetsMap.foreach { case((TopicAndPartition(_, partition), errorAndOffsets)) => buffer.putInt(partition) buffer.putShort(errorAndOffsets.error) buffer.putInt(errorAndOffsets.offsets.size) // offset array length errorAndOffsets.offsets.foreach(buffer.putLong(_)) } } } override def describe(details: Boolean):String = { toString } }
unix1986/universe
tool/kafka-0.8.1.1-src/core/src/main/scala/kafka/api/OffsetResponse.scala
Scala
bsd-2-clause
3,523
package models.gitolite import org.joda.time.DateTime import scalikejdbc._ case class UsersProjects( id: Int, userId: Int, projectId: Int, createdAt: DateTime, updatedAt: DateTime, projectAccess: Int, generatedForPublicAccess: Option[Boolean] = None) { def save()(implicit session: DBSession = UsersProjects.autoSession): UsersProjects = UsersProjects.save(this)(session) def destroy()(implicit session: DBSession = UsersProjects.autoSession): Unit = UsersProjects.destroy(this)(session) } object UsersProjects extends SQLSyntaxSupport[UsersProjects] { override val tableName = "users_projects" override val columns = Seq("id", "user_id", "project_id", "created_at", "updated_at", "project_access", "generated_for_public_access") def apply(up: SyntaxProvider[UsersProjects])(rs: WrappedResultSet): UsersProjects = apply(up.resultName)(rs) def apply(up: ResultName[UsersProjects])(rs: WrappedResultSet): UsersProjects = new UsersProjects( id = rs.get(up.id), userId = rs.get(up.userId), projectId = rs.get(up.projectId), createdAt = rs.get(up.createdAt), updatedAt = rs.get(up.updatedAt), projectAccess = rs.get(up.projectAccess), generatedForPublicAccess = rs.get(up.generatedForPublicAccess) ) val up = UsersProjects.syntax("up") override val autoSession = AutoSession def find(id: Int)(implicit session: DBSession = autoSession): Option[UsersProjects] = { withSQL { select.from(UsersProjects as up).where.eq(up.id, id) }.map(UsersProjects(up.resultName)).single.apply() } def findAll()(implicit session: DBSession = autoSession): List[UsersProjects] = { withSQL(select.from(UsersProjects as up)).map(UsersProjects(up.resultName)).list.apply() } def countAll()(implicit session: DBSession = autoSession): Long = { withSQL(select(sqls.count).from(UsersProjects as up)).map(rs => rs.long(1)).single.apply().get } def findBy(where: SQLSyntax)(implicit session: DBSession = autoSession): Option[UsersProjects] = { withSQL { select.from(UsersProjects as up).where.append(where) }.map(UsersProjects(up.resultName)).single.apply() } def findAllBy(where: SQLSyntax)(implicit session: DBSession = autoSession): List[UsersProjects] = { withSQL { select.from(UsersProjects as up).where.append(where) }.map(UsersProjects(up.resultName)).list.apply() } def countBy(where: SQLSyntax)(implicit session: DBSession = autoSession): Long = { withSQL { select(sqls.count).from(UsersProjects as up).where.append(where) }.map(_.long(1)).single.apply().get } def create( userId: Int, projectId: Int, createdAt: DateTime, updatedAt: DateTime, projectAccess: Int, generatedForPublicAccess: Option[Boolean] = None)(implicit session: DBSession = autoSession): UsersProjects = { val generatedKey = withSQL { insert.into(UsersProjects).columns( column.userId, column.projectId, column.createdAt, column.updatedAt, column.projectAccess, column.generatedForPublicAccess ).values( userId, projectId, createdAt, updatedAt, projectAccess, generatedForPublicAccess ) }.updateAndReturnGeneratedKey.apply() UsersProjects( id = generatedKey.toInt, userId = userId, projectId = projectId, createdAt = createdAt, updatedAt = updatedAt, projectAccess = projectAccess, generatedForPublicAccess = generatedForPublicAccess) } def save(entity: UsersProjects)(implicit session: DBSession = autoSession): UsersProjects = { withSQL { update(UsersProjects).set( column.id -> entity.id, column.userId -> entity.userId, column.projectId -> entity.projectId, column.createdAt -> entity.createdAt, column.updatedAt -> entity.updatedAt, column.projectAccess -> entity.projectAccess, column.generatedForPublicAccess -> entity.generatedForPublicAccess ).where.eq(column.id, entity.id) }.update.apply() entity } def destroy(entity: UsersProjects)(implicit session: DBSession = autoSession): Unit = { withSQL { delete.from(UsersProjects).where.eq(column.id, entity.id) }.update.apply() } }
thomaschoo/gitolite-to-gitbucket
src/main/scala/models/gitolite/UsersProjects.scala
Scala
mit
4,293
package org.jetbrains.plugins.scala.worksheet.settings.ui import com.intellij.application.options.ModulesComboBox import com.intellij.core.JavaPsiBundle import com.intellij.execution.ui.ConfigurationModuleSelector import com.intellij.openapi.project.Project import com.intellij.openapi.ui.ComboBox import com.intellij.openapi.vfs.VirtualFile import com.intellij.ui.SimpleListCellRenderer import com.intellij.ui.components.JBCheckBox import javax.swing._ import net.miginfocom.layout.CC import net.miginfocom.swing.MigLayout import org.jetbrains.plugins.scala.extensions.ObjectExt import org.jetbrains.plugins.scala.util.ui.TextWithMnemonic.AbstractButtonExt import org.jetbrains.plugins.scala.worksheet.settings.WorksheetExternalRunType import org.jetbrains.plugins.scala.worksheet.settings.ui.WorksheetSettingsPanel.TabTypeData import org.jetbrains.plugins.scala.worksheet.settings.ui.WorksheetSettingsPanel.TabTypeData._ import org.jetbrains.plugins.scala.worksheet.{WorksheetBundle, WorksheetUtils} private final class WorksheetSettingsPanel( tabTypeData: TabTypeData, settingsData: WorksheetSettingsData, availableProfilesProvider: () => Seq[String] ) extends JPanel { private val interactiveModeCheckBox = new JBCheckBox private val makeProjectBeforeRunCheckBox = new JBCheckBox private val moduleComboBox = new ModulesComboBox private val compilerProfileComboBox = new ComboBox[String] private val runTypeComboBox = new ComboBox[WorksheetExternalRunType] private val openCompilerProfileSettingsButton = new ShowCompilerProfileSettingsButton( () => selectedProfile, () => updateProfiles(selectedProfile, listOfProfiles) ).getActionButton locally { initLayout() initData(settingsData) } def filledSettingsData: WorksheetSettingsData = WorksheetSettingsData( interactiveModeCheckBox.isSelected, makeProjectBeforeRunCheckBox.isSelected, runTypeComboBox.getItem, moduleComboBox.getSelectedModule, compilerProfileComboBox.getItem ) private def selectedProfile: String = filledSettingsData.compilerProfile private def project: Project = tabTypeData.project private def listOfProfiles: Seq[String] = availableProfilesProvider() private def initData(settingsData: WorksheetSettingsData): Unit = { moduleComboBox.fillModules(project) // NOTE: this allows the selection to be empty only after combo box initialization // FIXME: Currently you can't unselect selected module, see: SCL-18054, IDEA-239791 if (tabTypeData.is[DefaultProjectSettingsTab]) { moduleComboBox.allowEmptySelection(JavaPsiBundle.message("list.item.no.module")) } val module = Option(settingsData.cpModule) module.foreach(moduleComboBox.setSelectedModule) val allowChangingModule = tabTypeData match { case DefaultProjectSettingsTab(_) => true case FileSettingsTab(project, virtualFile) => WorksheetUtils.isScratchWorksheet(project, virtualFile) } moduleComboBox.setEnabled(allowChangingModule) tabTypeData match { case DefaultProjectSettingsTab(_) => val note = WorksheetBundle.message("worksheet.settings.panel.default.setting.is.only.used.in.scratch.files.note") moduleComboBox.setToolTipText(note) case FileSettingsTab(_, _) => if (!allowChangingModule) { val note = WorksheetBundle.message("worksheet.settings.panel.setting.can.be.changed.in.scratch.files.note") moduleComboBox.setToolTipText(note) } } runTypeComboBox.setModel(new DefaultComboBoxModel(WorksheetExternalRunType.getAllRunTypes)) runTypeComboBox.setRenderer(SimpleListCellRenderer.create((label, runType, _: Int) => { label.setText(runType.getMenuText) })) runTypeComboBox.setSelectedItem(settingsData.runType) interactiveModeCheckBox.setSelected(settingsData.isInteractive) makeProjectBeforeRunCheckBox.setSelected(settingsData.isMakeBeforeRun) // setup custom renderer to allow showing unselected (null) item compilerProfileComboBox.setRenderer(new NullableListCellRenderer(WorksheetBundle.message("worksheet.settings.panel.no.profile.selected"))) if (allowDeselectingProfile) compilerProfileComboBox.setToolTipText(WorksheetBundle.message("worksheet.settings.panel.compiler.profile.of.worksheet.module.will.be.used.note")) updateProfiles(settingsData.compilerProfile, listOfProfiles) } private def updateProfiles(selectedProfile: String, profiles: Seq[String]): Unit = { compilerProfileComboBox.setSelectedItem(null) val modelItems = if (allowDeselectingProfile) null +: profiles else profiles compilerProfileComboBox.setModel(new DefaultComboBoxModel[String](modelItems.toArray)) compilerProfileComboBox.setSelectedItem(selectedProfile) } private def allowDeselectingProfile: Boolean = tabTypeData.is[TabTypeData.DefaultProjectSettingsTab] private class NullableListCellRenderer(emptySelectionText: String) extends com.intellij.ui.SimpleListCellRenderer[String] { override def customize(list: JList[_ <: String], value: String, index: Int, selected: Boolean, hasFocus: Boolean): Unit = { val displayValue = if (value == null) emptySelectionText else value setText(displayValue) } } private def wrap = new CC().wrap() private def c = new CC() private def initLayout(): Unit = { val root = this root.setLayout(new MigLayout) interactiveModeCheckBox.setTextWithMnemonic(WorksheetBundle.message("worksheet.settings.panel.interactive.mode")) root.add(interactiveModeCheckBox, wrap) makeProjectBeforeRunCheckBox.setTextWithMnemonic(WorksheetBundle.message("worksheet.settings.panel.change.make.button")) root.add(makeProjectBeforeRunCheckBox, wrap) root.add(new JLabel(WorksheetBundle.message("worksheet.settings.panel.run.type"))) root.add(runTypeComboBox, wrap.growX()) root.add(new JLabel(WorksheetBundle.message("worksheet.settings.panel.use.class.path.of.module"))) root.add(moduleComboBox, c.wrap().growX()) root.add(new JLabel(WorksheetBundle.message("worksheet.settings.panel.compiler.profile"))) root.add(compilerProfileComboBox, c.growX()) root.add(openCompilerProfileSettingsButton) } } object WorksheetSettingsPanel { sealed abstract class TabTypeData { def project: Project } object TabTypeData { final case class DefaultProjectSettingsTab(override val project: Project) extends TabTypeData final case class FileSettingsTab(override val project: Project, virtualFile: VirtualFile) extends TabTypeData } case class UiData(profiles: Seq[String]) }
JetBrains/intellij-scala
scala/worksheet/src/org/jetbrains/plugins/scala/worksheet/settings/ui/WorksheetSettingsPanel.scala
Scala
apache-2.0
6,682
/* * OneTimeMPE.scala * One-time algorithms that compute the most likely values of elements. * * Created By: Avi Pfeffer (apfeffer@cra.com) * Creation Date: Jan 1, 2009 * * Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc. * See http://www.cra.com or email figaro@cra.com for information. * * See http://www.github.com/p2t2/figaro for a copy of the software license. */ package com.cra.figaro.algorithm import com.cra.figaro.language._ /** * One-time algorithms that compute the most likely values of elements. * A class that implements this trait must implement run and mostLikelyValue methods. */ trait OneTimeMPE extends MPEAlgorithm with OneTime { protected def doMostLikelyValue[T](target: Element[T]): T = mostLikelyValue(target) }
jyuhuan/figaro
Figaro/src/main/scala/com/cra/figaro/algorithm/OneTimeMPE.scala
Scala
bsd-3-clause
783
/* * Copyright 2014 mengke@me.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.ibntab.eventful.aggregate import akka.actor.{Actor, ActorLogging} import io.ibntab.eventful.aggregate.Aggregator.AggregateStrategy import io.ibntab.eventful.{BaseEvent, EventType} import scala.collection.mutable // Created by ke.meng on 2014/11/17. class Aggregator(group: String, strategy: AggregateStrategy) extends Actor with ActorLogging { val results = mutable.HashMap.empty[EventType, Any] override def receive: Receive = { case BaseEvent(d, g, t) => require(g == group, s"OMG, the event hub send me a wrong event belongs to other group[$g]") results += (t -> d) strategy(results) match { case Some(data) => // send to sb. case None => log.info("A new event [group:{}, type:{}] was received, but it was not able to be" + " aggregated with other events", g, t) } } } object Aggregator { type AggregateStrategy[Data] = (mutable.Map[EventType, Any] => Option[Data]) }
mengke/eventful
eventful-core/src/main/scala/io/ibntab/eventful/aggregate/Aggregator.scala
Scala
apache-2.0
1,571
/* Generated File */ package models.queries.store import com.kyleu.projectile.models.database.{DatabaseField, Row} import com.kyleu.projectile.models.database.DatabaseFieldType._ import com.kyleu.projectile.models.queries.{BaseQueries, ResultFieldHelper} import com.kyleu.projectile.models.result.data.DataField import com.kyleu.projectile.models.result.filter.Filter import com.kyleu.projectile.models.result.orderBy.OrderBy import java.time.ZonedDateTime import models.store.StoreRow object StoreRowQueries extends BaseQueries[StoreRow]("storeRow", "store") { override val fields = Seq( DatabaseField(title = "Store Id", prop = "storeId", col = "store_id", typ = IntegerType), DatabaseField(title = "Manager Staff Id", prop = "managerStaffId", col = "manager_staff_id", typ = IntegerType), DatabaseField(title = "Address Id", prop = "addressId", col = "address_id", typ = IntegerType), DatabaseField(title = "Last Update", prop = "lastUpdate", col = "last_update", typ = TimestampZonedType) ) override val pkColumns = Seq("store_id") override protected val searchColumns = Seq("store_id", "manager_staff_id", "address_id", "last_update") def countAll(filters: Seq[Filter] = Nil) = onCountAll(filters) def getAll(filters: Seq[Filter] = Nil, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None) = { new GetAll(filters, orderBys, limit, offset) } def search(q: Option[String], filters: Seq[Filter] = Nil, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None) = { new Search(q, filters, orderBys, limit, offset) } def searchCount(q: Option[String], filters: Seq[Filter] = Nil) = new SearchCount(q, filters) def searchExact(q: String, orderBys: Seq[OrderBy], limit: Option[Int], offset: Option[Int]) = new SearchExact(q, orderBys, limit, offset) def getByPrimaryKey(storeId: Int) = new GetByPrimaryKey(Seq(storeId)) def getByPrimaryKeySeq(storeIdSeq: Seq[Int]) = new ColSeqQuery(column = "store_id", values = storeIdSeq) final case class CountByAddressId(addressId: Int) extends ColCount(column = "address_id", values = Seq(addressId)) final case class GetByAddressId(addressId: Int, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None) extends SeqQuery( whereClause = Some(quote("address_id") + " = ?"), orderBy = ResultFieldHelper.orderClause(fields, orderBys: _*), limit = limit, offset = offset, values = Seq(addressId) ) final case class GetByAddressIdSeq(addressIdSeq: Seq[Int]) extends ColSeqQuery(column = "address_id", values = addressIdSeq) final case class CountByLastUpdate(lastUpdate: ZonedDateTime) extends ColCount(column = "last_update", values = Seq(lastUpdate)) final case class GetByLastUpdate(lastUpdate: ZonedDateTime, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None) extends SeqQuery( whereClause = Some(quote("last_update") + " = ?"), orderBy = ResultFieldHelper.orderClause(fields, orderBys: _*), limit = limit, offset = offset, values = Seq(lastUpdate) ) final case class GetByLastUpdateSeq(lastUpdateSeq: Seq[ZonedDateTime]) extends ColSeqQuery(column = "last_update", values = lastUpdateSeq) final case class CountByManagerStaffId(managerStaffId: Int) extends ColCount(column = "manager_staff_id", values = Seq(managerStaffId)) final case class GetByManagerStaffId(managerStaffId: Int, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None) extends SeqQuery( whereClause = Some(quote("manager_staff_id") + " = ?"), orderBy = ResultFieldHelper.orderClause(fields, orderBys: _*), limit = limit, offset = offset, values = Seq(managerStaffId) ) final case class GetByManagerStaffIdSeq(managerStaffIdSeq: Seq[Int]) extends ColSeqQuery(column = "manager_staff_id", values = managerStaffIdSeq) final case class CountByStoreId(storeId: Int) extends ColCount(column = "store_id", values = Seq(storeId)) final case class GetByStoreId(storeId: Int, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None) extends SeqQuery( whereClause = Some(quote("store_id") + " = ?"), orderBy = ResultFieldHelper.orderClause(fields, orderBys: _*), limit = limit, offset = offset, values = Seq(storeId) ) final case class GetByStoreIdSeq(storeIdSeq: Seq[Int]) extends ColSeqQuery(column = "store_id", values = storeIdSeq) def insert(model: StoreRow) = new Insert(model) def insertBatch(models: Seq[StoreRow]) = new InsertBatch(models) def create(dataFields: Seq[DataField]) = new InsertFields(dataFields) def removeByPrimaryKey(storeId: Int) = new RemoveByPrimaryKey(Seq[Any](storeId)) def update(storeId: Int, fields: Seq[DataField]) = new UpdateFields(Seq[Any](storeId), fields) def updateBulk(pks: Seq[Seq[Any]], fields: Seq[DataField]) = new UpdateFieldsBulk(pks, fields) override def fromRow(row: Row) = StoreRow( storeId = IntegerType(row, "store_id"), managerStaffId = IntegerType(row, "manager_staff_id"), addressId = IntegerType(row, "address_id"), lastUpdate = TimestampZonedType(row, "last_update") ) }
KyleU/boilerplay
app/models/queries/store/StoreRowQueries.scala
Scala
cc0-1.0
5,165
import scala.language.dynamics object MyDynamic extends Dynamic { def selectDynamic(name: String): Any = ??? } object Test extends App { locally { import java.lang.String MyDynamic.id } }
som-snytt/dotty
tests/pos/t8364.scala
Scala
apache-2.0
204
package com.karasiq.bittorrent.dispatcher import java.net.InetSocketAddress import akka.util.ByteString import com.karasiq.bittorrent.protocol.extensions.PeerExtensions import scala.collection.BitSet trait TorrentPeerInfo { def id: ByteString def infoHash: ByteString def completed: BitSet } case class PeerData(address: InetSocketAddress, id: ByteString, infoHash: ByteString, extensions: PeerExtensions, choking: Boolean = true, interesting: Boolean = false, chokedBy: Boolean = true, interestedBy: Boolean = false, completed: BitSet = BitSet.empty) extends TorrentPeerInfo { require(id.length == 20, s"Invalid peer id: $id") require(infoHash.length == 20, s"Invalid info hash: $infoHash") } case class SeedData(id: ByteString, infoHash: ByteString, completed: BitSet = BitSet.empty)
Karasiq/torrentstream
library/src/main/scala/com/karasiq/bittorrent/dispatcher/PeerData.scala
Scala
apache-2.0
802
/** MACHINE-GENERATED FROM AVRO SCHEMA. DO NOT EDIT DIRECTLY */ package example.idl.java import other.ns.java.{ExternalDependency, Suit} sealed trait ImportProtocol extends Product with Serializable final case class DependentRecord(dependency: ExternalDependency, number: Int) extends ImportProtocol final case class DependentRecord2(dependency: Suit, name: String) extends ImportProtocol final case class DependentRecord3(dependency: Embedded, value: Boolean) extends ImportProtocol
julianpeeters/avrohugger
avrohugger-core/src/test/expected/standard/example/idl/java/ImportProtocol.scala
Scala
apache-2.0
488
class P { trait S1 val p = new P trait S2 { def f(x: p.S1): Int } } class P2 extends P { object O2 extends S2 { def f(x: S1) = 5 } }
yusuke2255/dotty
tests/untried/neg/abstract-class-2.scala
Scala
bsd-3-clause
155
package chee.cli import com.typesafe.config.{Config, ConfigFactory, ConfigRenderOptions} class ConfigCmd extends ScoptCommand { case class Opts( render: ConfigRenderOptions = ConfigRenderOptions.defaults().setOriginComments(false).setJson(false), all: Boolean = false ) type T = Opts val defaults = Opts() val name = "config" val parser = new Parser { opt[Unit]("origin") action { (_, c) => c.copy(render = c.render.setOriginComments(true)) } text ("Print comments showing the origin of the value.") opt[Unit]("json") action { (_, c) => c.copy(render = c.render.setJson(true).setComments(false)) } text ("Render the configuration in JSON.") opt[Unit]("all") action { (_, c) => c.copy(all = true) } text ("Show complete config, with system properties.") } def exec(cfg: Config, opts: Opts): Unit = { if (opts.all) outln(cfg.root().render(opts.render)) else { val c = ConfigFactory.empty().withValue("chee", cfg.getValue("chee")) outln(c.root().render(opts.render)) } } }
eikek/chee
src/main/scala/chee/cli/ConfigCmd.scala
Scala
gpl-3.0
1,072
package scales.report import scales.Coverage /** @author Stephen Samuel */ trait ScalesWriter { def write(coverage: Coverage) }
crvidya/scales
src/main/scala/scales/report/ScalesWriter.scala
Scala
apache-2.0
135
package org.jetbrains.plugins.scala package codeInspection package scaladoc import com.intellij.codeInspection._ import com.intellij.psi.{PsiElement, PsiElementVisitor, PsiErrorElement} import org.jetbrains.plugins.scala.lang.psi.api.ScalaPsiElement import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor import org.jetbrains.plugins.scala.lang.scaladoc.psi.api.ScDocComment /** * User: Dmitry Naidanov * Date: 11/19/11 */ class ScalaDocParserErrorInspection extends LocalInspectionTool { override def isEnabledByDefault: Boolean = true override def buildVisitor(holder: ProblemsHolder, isOnTheFly: Boolean): PsiElementVisitor = { new ScalaElementVisitor { override def visitDocComment(s: ScDocComment): Unit = { visitScaladocElement(s) } override def visitScaladocElement(element: ScalaPsiElement): Unit = { for (child <- element.getChildren) { child match { case a: PsiErrorElement => val startElement: PsiElement = if (a.getPrevSibling == null) a else a.getPrevSibling val endElement: PsiElement = if (a.getPrevSibling != null) { a } else if (a.getNextSibling != null) { a.getNextSibling } else { a.getParent } //noinspection ReferencePassedToNls holder.registerProblem(holder.getManager.createProblemDescriptor(startElement, endElement, a.getErrorDescription, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly)); case b: ScalaPsiElement if b.getChildren.nonEmpty => visitScaladocElement(b) case _ => //do nothing } } } } } }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/scaladoc/ScalaDocParserErrorInspection.scala
Scala
apache-2.0
1,747
/************************************************************************** Copyright 2014 Allen Institute for Artificial Intelligence Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ****************************************************************************/ package org.allenai.ari.solvers.graphmatch.tools import com.tinkerpop.blueprints.impls.tg.TinkerGraph import com.tinkerpop.blueprints.{ Direction, Vertex } import scala.collection.JavaConversions._ import org.allenai.ari.solvers.graphmatch.graph.path.{ PathTrait, MultiNodePath, SingleNodePath } /** Created by TomK on 7/14/14. */ object GraphToPhrase { def apply(g: TinkerGraph): String = NodesToPhrase(g.getVertices.toSeq) } object NodesToPhrase { def apply(nodes: Seq[Vertex]): String = { if (nodes.isEmpty) { "" } else { NodesToPhraseSeq(nodes).reduce(_ + " " + _) } } } object NodesToPhraseSeq { def apply(nodes: Seq[Vertex]): Seq[String] = { nodes.map(n => (n.getProperty("text"): String, n.getProperty("position"): Int)).sortBy[Int](_._2).map(p => p._1) } } object PathToPhraseSeq { def apply(path: PathTrait): Seq[String] = { path match { case path: SingleNodePath => Seq(path.outNode.getProperty("text").toString) case path: MultiNodePath => NodesToPhraseSeq(path.containedNodes) case _ => Seq.empty } } object PathToStemmedPhraseSeq { def apply(path: PathTrait): Seq[String] = { path match { case path: SingleNodePath => Seq(path.outNode.getProperty("stemmedtext").toString) case path: MultiNodePath => NodesToPhraseSeq(path.containedNodes) case _ => Seq.empty } } } } object RootToPhrase { def apply(root: Vertex): String = { getLabels(root).foldLeft("")((s, l) => s + " " + l._1).toString } def getLabels(root: Vertex): Seq[(String, Int)] = { val word = root.getProperty("text").toString val position: Int = root.getProperty("position") Seq((word, position)).++(root.getVertices(Direction.OUT).flatMap(v => getLabels(v))).sortBy(e => e._2) } } object GraphToWords { def apply(root: Vertex): Seq[String] = { root.getVertices(Direction.OUT).flatMap(v => getLabels(v)).toSeq } def getLabels(root: Vertex): Seq[String] = { val word = root.getProperty("text").toString Seq(word).++(root.getVertices(Direction.OUT).flatMap(v => getLabels(v))) } } object GraphToStemmedWords { def apply(root: Vertex): Seq[String] = { root.getVertices(Direction.OUT).flatMap(v => getLabels(v)).toSeq } def getLabels(root: Vertex): Seq[String] = { val word = root.getProperty("stemmedtext").toString Seq(word).++(root.getVertices(Direction.OUT).flatMap(v => getLabels(v))) } }
tomkwiat/dependency-graph-similarity-measure
src/main/scala/org/allenai/ari/solvers/graphmatch/tools/GraphToPhrase.scala
Scala
apache-2.0
3,245
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.joins import org.apache.spark.annotation.DeveloperApi import org.apache.spark.rdd.RDD import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.expressions.{Attribute, JoinedRow} import org.apache.spark.sql.execution.{BinaryNode, SparkPlan} import org.apache.spark.sql.metric.SQLMetrics /** * :: DeveloperApi :: */ @DeveloperApi case class CartesianProduct(left: SparkPlan, right: SparkPlan) extends BinaryNode { override def output: Seq[Attribute] = left.output ++ right.output override private[sql] lazy val metrics = Map( "numLeftRows" -> SQLMetrics.createLongMetric(sparkContext, "number of left rows"), "numRightRows" -> SQLMetrics.createLongMetric(sparkContext, "number of right rows"), "numOutputRows" -> SQLMetrics.createLongMetric(sparkContext, "number of output rows")) protected override def doExecute(): RDD[Row] = { val numLeftRows = longMetric("numLeftRows") val numRightRows = longMetric("numRightRows") val numOutputRows = longMetric("numOutputRows") val leftResults = left.execute().map { row => numLeftRows += 1 row.copy() } val rightResults = right.execute().map { row => numRightRows += 1 row.copy() } leftResults.cartesian(rightResults).mapPartitions { iter => val joinedRow = new JoinedRow iter.map { r => numOutputRows += 1 joinedRow(r._1, r._2) } } } }
andrewor14/iolap
sql/core/src/main/scala/org/apache/spark/sql/execution/joins/CartesianProduct.scala
Scala
apache-2.0
2,251
package eu.shiftforward.icfpc2015.model case class UnitPos(unit: CellUnit, pos: Cell) { lazy val cells: Set[Cell] = { val cubedPos = pos.cube val cubedUnitPivot = unit.pivot.cube val (x, y, z) = (cubedPos.x - cubedUnitPivot.x, cubedPos.y - cubedUnitPivot.y, cubedPos.z - cubedUnitPivot.z) unit.members.map { cell => val cubedCell = cell.cube Cell(cubedCell.copy(cubedCell.x + x, cubedCell.y + y, cubedCell.z + z)) } } lazy val kernel: Set[Cell] = UnitPos(unit.kernel, pos).cells lazy val topRow: Int = { val (topLeft, _) = unit.boundingBox Math.max(0, pos.y - unit.pivot.row - topLeft.row) } }
ShiftForward/icfpc2015
src/main/scala/eu/shiftforward/icfpc2015/model/UnitPos.scala
Scala
mit
652
/* * Copyright (c) 2014-2020 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.reactive.internal.consumers import monix.execution.Callback import monix.execution.Scheduler import monix.execution.cancelables.AssignableCancelable import monix.reactive.observers.Subscriber import monix.reactive.{Consumer, Observable, Pipe} /** Implementation for [[monix.reactive.Consumer.transformInput]]. */ private[reactive] final class TransformInputConsumer[In2, -In, +R]( source: Consumer[In, R], f: Observable[In2] => Observable[In]) extends Consumer[In2, R] { def createSubscriber(cb: Callback[Throwable, R], s: Scheduler): (Subscriber[In2], AssignableCancelable) = { val (input1, conn) = source.createSubscriber(cb, s) val (input2, output1) = Pipe.publishToOne[In2].transform(f).unicast output1.unsafeSubscribeFn(input1) (Subscriber(input2, input1.scheduler), conn) } }
alexandru/monifu
monix-reactive/shared/src/main/scala/monix/reactive/internal/consumers/TransformInputConsumer.scala
Scala
apache-2.0
1,502
package org.coursera.naptime.ari.graphql.schema import org.coursera.naptime.ResourceName sealed trait SchemaError { def resourceName: ResourceName def key: String def message: String } case class HasGetButMissingMultiGet(resourceName: ResourceName) extends SchemaError { val key = "HAS_GET_BUT_MISSING_MULTIGET" val message = "Resource has GET handler, but no MULTI_GET is available." } case class NoHandlersAvailable(resourceName: ResourceName) extends SchemaError { val key = "NO_HANDLERS_AVAILABLE" val message = "No handlers were detected on this resource." } case class MissingMergedType(resourceName: ResourceName) extends SchemaError { val key = "MISSING_MERGED_TYPE" val message = "No mergedType was available from the schemas.v1 endpoint." } case class HasForwardRelationButMissingMultiGet(resourceName: ResourceName, fieldName: String) extends SchemaError { val key = "HAS_FORWARD_RELATION_BUT_MISSING_MULTIGET" val message = s"There is a forward relation on $fieldName, but no MULTI_GET is available." } case class UnknownHandlerType(resourceName: ResourceName, handlerType: String) extends SchemaError { val key = "UNKNOWN_HANDLER_TYPE" val message = s"A handler type of $handlerType was not expected." } case class SchemaNotFound(resourceName: ResourceName) extends SchemaError { val key = "SCHEMA_NOT_FOUND" val message = "Could not find schema to build resource field." } case class MissingQParameterOnFinderRelation(resourceName: ResourceName, fieldName: String) extends SchemaError { val key = "MISSING_Q_PARAMETER" val message = s"Cannot have a finder relation on field $fieldName without having a `q` parameter" } case class UnhandledSchemaError(resourceName: ResourceName, error: String) extends SchemaError { val key = "UNHANDLED_SCHEMA_ERROR" val message = s"Unhandled error: $error" } case class SchemaErrors(errors: List[SchemaError]) { def ++(that: List[SchemaError]): SchemaErrors = { copy(errors = errors ++ that) } def ++(that: SchemaErrors): SchemaErrors = { copy(errors = errors ++ that.errors) } def +(error: SchemaError): SchemaErrors = { copy(errors = errors :+ error) } } object SchemaErrors { val empty = SchemaErrors(List.empty) } case class WithSchemaErrors[T](data: T, errors: SchemaErrors = SchemaErrors.empty)
coursera/naptime
naptime-graphql/src/main/scala/org/coursera/naptime/ari/graphql/schema/SchemaErrors.scala
Scala
apache-2.0
2,338
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.nn import com.intel.analytics.bigdl.dllib.tensor.Tensor import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest import scala.util.Random class ClampSerialTest extends ModuleSerializationTest { override def test(): Unit = { val input = Tensor[Float](10).apply1(e => Random.nextFloat()) val clamp = Clamp[Float](1, 10).setName("clamp") runSerializationTest(clamp, input) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/ClampSpec.scala
Scala
apache-2.0
1,059
package com.socrata.teaparty.components import com.socrata.teaparty.{TeaPot, TeaType} import com.socrata.teaparty.teadatabase.TeaDatabase trait TeaReaderComponent { type TeaReader <: TeaReaderLike trait TeaReaderLike { def lookupTea(tea:TeaType):Option[TeaPot] def list:List[TeaType] } def TeaReader():TeaReader } trait TeaReaderFromDatabase extends TeaReaderComponent { self:TeaDatabase => class TeaReader() extends TeaReaderLike { def lookupTea(tea: TeaType): Option[TeaPot] = { val temperature = self.lookup(tea) temperature.map(t => TeaPot(tea,t)) } def list: List[TeaType] = { self.list } } def TeaReader() = new TeaReader() }
socrata-platform/socrata-httparty
src/main/scala/com/socrata/teaparty/components/TeaReaderComponent.scala
Scala
mit
697
package com.github.sstone.amqp.samples import akka.pattern.ask import akka.actor.{Actor, Props, ActorSystem} import com.github.sstone.amqp._ import com.github.sstone.amqp.Amqp._ import com.github.sstone.amqp.RpcServer.{ProcessResult, IProcessor} import com.github.sstone.amqp.RpcClient.Request import akka.util.Timeout import concurrent.duration._ import concurrent.{ExecutionContext, Future} import util.{Failure, Success} import com.rabbitmq.client.ConnectionFactory import com.github.sstone.amqp.RpcServer.ProcessResult import com.github.sstone.amqp.Amqp.Publish import scala.util.Success import com.github.sstone.amqp.Amqp.ChannelParameters import scala.util.Failure import scala.Some import com.github.sstone.amqp.Amqp.QueueParameters import com.github.sstone.amqp.Amqp.Delivery object OneToAnyRpc extends App { import ExecutionContext.Implicits.global // typical "work queue" pattern, where a job can be picked up by any running node implicit val system = ActorSystem("mySystem") // create an AMQP connection val connFactory = new ConnectionFactory() connFactory.setUri("amqp://guest:guest@localhost/%2F") val conn = system.actorOf(ConnectionOwner.props(connFactory, 1 second)) val queueParams = QueueParameters("my_queue", passive = false, durable = false, exclusive = false, autodelete = true) // create 2 equivalent servers val rpcServers = for (i <- 1 to 2) yield { // create a "processor" // in real life you would use a serialization framework (json, protobuf, ....), define command messages, etc... // check the Akka AMQP proxies project for examples val processor = new IProcessor { def process(delivery: Delivery) = { // assume that the message body is a string val response = "response to " + new String(delivery.body) Future(ProcessResult(Some(response.getBytes))) } def onFailure(delivery: Delivery, e: Throwable) = ProcessResult(None) // we don't return anything } ConnectionOwner.createChildActor(conn, RpcServer.props(queueParams, StandardExchanges.amqDirect, "my_key", processor, ChannelParameters(qos = 1))) } val rpcClient = ConnectionOwner.createChildActor(conn, RpcClient.props()) // wait till everyone is actually connected to the broker Amqp.waitForConnection(system, rpcServers: _*).await() Amqp.waitForConnection(system, rpcClient).await() implicit val timeout: Timeout = 2 seconds for (i <- 0 to 5) { val request = ("request " + i).getBytes val f = (rpcClient ? Request(List(Publish("amq.direct", "my_key", request)))).mapTo[RpcClient.Response] f.onComplete { case Success(response) => println(new String(response.deliveries.head.body)) case Failure(error) => println(error) } } // wait 10 seconds and shut down Thread.sleep(10000) system.shutdown() }
sstone/amqp-client
src/main/scala/com/github/sstone/amqp/samples/OneToAnyRpc.scala
Scala
mit
2,830
package mr.merc.politics import mr.merc.economics.{Population, WorldConstants, WorldStateParliamentActions} import mr.merc.log.Logging import mr.merc.politics.PoliticsAi.{ChangeParty, GiveUpPower, PoliticsAiAction, UsurpPower} import mr.merc.politics.Regime.{Absolute, Constitutional, Democracy} class PoliticsAi(state: State, worldState: WorldStateParliamentActions) extends Logging { def changePoliticalSystemIfNeeded(): Option[PoliticsAiAction] = { if (needToChangeParty()) { selectBestPossibleParty().flatMap { newParty => (state.politicalSystem.rulingParty.regime, newParty.regime) match { case (Absolute, Absolute) => Some(ChangeParty(state, newParty)) case (Absolute, Constitutional) | (Constitutional, Democracy) => Some(GiveUpPower(state, newParty)) case (Constitutional, Absolute) | (Democracy, Constitutional) => Some(UsurpPower(state, newParty)) case (x, y) => warn(s"impossible to change $x to $y for parties ${state.politicalSystem.rulingParty} and $newParty") None } } } else None } private def needToChangeParty(): Boolean = { politicalHappiness(state.rulingParty) < WorldConstants.AI.PoliticalHappinessToChangeParty } private def politicalHappiness(party: Party): Double = { val popsHappiness = worldState.states(state).flatMap(_.regionPopulation.popsList).map( p => p.populationCount -> politicalHappinessByPop(p, party)) val totalPopulation = popsHappiness.map(_._1).sum popsHappiness.map { case (count, happy) => count * happy / totalPopulation }.sum } private def politicalHappinessByPop(population: Population, party: Party): Double = { val popPositions = population.politicalViews.currentViews(population.literacy).pointsOfView val partyPos = party.politicalPosition 1d - popPositions.map { case (popPos, v) => partyPos.diffWithPosition(popPos) * v }.sum * WorldConstants.Population.PoliticalHappinessDisagreementMultiplier } private def possibleParties: List[Party] = state.rulingParty.regime match { case Regime.Absolute => Party.RegimeParties(Absolute) ++ Party.RegimeParties(Constitutional) case Regime.Constitutional => state.rulingParty :: Party.RegimeParties(Absolute) ++ Party.RegimeParties(Democracy) case Regime.Democracy => state.rulingParty :: Party.RegimeParties(Constitutional) } private def selectBestPossibleParty(): Option[Party] = { val (party, happy) = possibleParties.map(p => p -> politicalHappiness(p)).maxBy(_._2) if (happy > WorldConstants.AI.PoliticalHappinessForNewParty) { Some(party) } else None } } object PoliticsAi { sealed trait PoliticsAiAction { def executePoliticsAction(actions: WorldStateParliamentActions): Unit } case class UsurpPower(state: State, newParty: Party) extends PoliticsAiAction { override def executePoliticsAction(actions: WorldStateParliamentActions): Unit = { actions.usurpPower(state, newParty) } } case class GiveUpPower(state: State, newParty: Party) extends PoliticsAiAction { override def executePoliticsAction(actions: WorldStateParliamentActions): Unit = { actions.giveUpPower(state, newParty) } } case class ChangeParty(state: State, newParty: Party) extends PoliticsAiAction { override def executePoliticsAction(actions: WorldStateParliamentActions): Unit = { actions.changeRulingParty(state, newParty) } } }
RenualdMarch/merc
src/main/scala/mr/merc/politics/PoliticsAi.scala
Scala
gpl-3.0
3,523
package com.sksamuel.elastic4s.http.search.queries.specialized import com.sksamuel.elastic4s.json.XContentBuilder import com.sksamuel.elastic4s.searches.queries.funcscorer._ object ScoreFunctionBuilderFn { def apply(func: ScoreFunction): XContentBuilder = func match { case r: RandomScoreFunction => RandomScoreFunctionBuilderFn(r) case g: GaussianDecayScore => GaussianDecayScoreBuilderFn(g) case s: ScriptScore => ScriptScoreBuilderFn(s) case f: FieldValueFactor => FieldValueFactorBuilderFn(f) case e: ExponentialDecayScore => ExponentialDecayScoreBuilderFn(e) case w: WeightScore => WeightBuilderFn(w) case l: LinearDecayScore => LinearDecayScoreBuilderFn(l) } }
Tecsisa/elastic4s
elastic4s-http/src/main/scala/com/sksamuel/elastic4s/http/search/queries/specialized/ScoreFunctionBuilderFn.scala
Scala
apache-2.0
756
package org.jetbrains.plugins.scala.codeInspection.methodSignature.quickfix import com.intellij.openapi.project.Project import com.intellij.codeInspection.ProblemDescriptor import org.jetbrains.plugins.scala.lang.psi.api.expr.ScMethodCall import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory import org.jetbrains.plugins.scala.codeInspection.AbstractFix /** * Pavel Fatin */ class RemoveCallParentheses(call: ScMethodCall) extends AbstractFix("Remove call parentheses", call) { def doApplyFix(project: Project, descriptor: ProblemDescriptor) { val text = call.getInvokedExpr.getText val exp = ScalaPsiElementFactory.createExpressionFromText(text, call.getManager) call.replace(exp) } }
consulo/consulo-scala
src/org/jetbrains/plugins/scala/codeInspection/methodSignature/quickfix/RemoveCallParentheses.scala
Scala
apache-2.0
726
/* * Copyright 1998-2016 Linux.org.ru * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ru.org.linux.tag case class TagInfo(name:String, topicCount:Int, id:Int)
kloun/lorsource
src/main/scala/ru/org/linux/tag/TagInfo.scala
Scala
apache-2.0
708
/** * Copyright (C) 2015 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.crossdata.connector.elasticsearch import com.sksamuel.elastic4s.{ElasticsearchClientUri, ElasticClient} import com.stratio.crossdata.connector.TableInventory.Table import com.stratio.crossdata.connector.elasticsearch.DefaultSource._ import org.apache.spark.sql.types._ import org.elasticsearch.client.IndicesAdminClient import org.elasticsearch.cluster.metadata.MappingMetaData import org.elasticsearch.common.collect.ImmutableOpenMap import org.elasticsearch.common.settings.ImmutableSettings import org.elasticsearch.hadoop.cfg.ConfigurationOptions._ object ElasticSearchConnectionUtils { def buildClient(parameters: Map[String, String]): ElasticClient = { val host: String = parameters.getOrElse(ES_NODES, ES_NODES_DEFAULT) // TODO support for multiple host, no documentation found with expected format. val port: Int = parameters.getOrElse(ElasticNativePort, "9300").toInt val clusterName = parameters(ElasticCluster) val uri = ElasticsearchClientUri(s"elasticsearch://$host:$port") val settings = ImmutableSettings.settingsBuilder().put("cluster.name", clusterName).build() ElasticClient.remote(settings, uri) } def extractIndexAndType(options: Map[String, String]): Option[(String, String)] = { options.get(ES_RESOURCE).map{ indexType => val indexTypeArray = indexType.split("/") require(indexTypeArray.size==2, s"$ES_RESOURCE option has an invalid format") (indexTypeArray(0), indexTypeArray(1)) } } def listTypes(options: Map[String, String]): Seq[Table] = { val adminClient = buildClient(options).admin.indices() val indexType: Option[(String, String)] = extractIndexAndType(options) val index = indexType.map(_._1).orElse(options.get(ElasticIndex)) index.fold(listAllIndexTypes(adminClient)){indexName => listIndexTypes(adminClient, indexName, indexType.map(_._2)) } } import collection.JavaConversions._ private def listAllIndexTypes(adminClient: IndicesAdminClient): Seq[Table] = { val mappings: ImmutableOpenMap[String, ImmutableOpenMap[String, MappingMetaData]] = adminClient.prepareGetIndex().get().mappings mappings.keys().flatMap { index => getIndexDetails(index.value, mappings.get(index.value)) } toSeq } private def listIndexTypes(adminClient: IndicesAdminClient, indexName: String, typeName: Option[String] = None): Seq[Table] = { val elasticBuilder = adminClient.prepareGetIndex().addIndices(indexName) val elasticBuilderWithTypes = typeName.fold(elasticBuilder)(elasticBuilder.addTypes(_)) val mappings: ImmutableOpenMap[String, ImmutableOpenMap[String, MappingMetaData]] = elasticBuilderWithTypes.get().mappings getIndexDetails(indexName, mappings.get(indexName)) } private def getIndexDetails(indexName:String, indexData: ImmutableOpenMap[String, MappingMetaData]): Seq[Table] ={ indexData.keys().map(typeES => new Table(typeES.value, Some(indexName), Some(buildStructType(indexData.get(typeES.value))))).toSeq } private def convertType(typeName:String): DataType = { typeName match { case "string"=> StringType case "integer" => IntegerType case "date" => DateType case "boolean" => BooleanType case "double" => DoubleType case "long" => LongType case "float" => FloatType case "null" => NullType case _ => throw new RuntimeException (s"The type $typeName isn't supported yet in Elasticsearch connector.") } } private def buildStructType(mapping: MappingMetaData): StructType ={ val esFields = mapping.sourceAsMap().get("properties").asInstanceOf[java.util.LinkedHashMap[String,java.util.LinkedHashMap[String, String]]].toMap; val fields: Seq[StructField] = esFields.map { case (colName, propertyValueMap) => StructField(colName, convertType(propertyValueMap.get("type")), false) }(collection.breakOut) StructType(fields) } }
luismcl/crossdata
elasticsearch/src/main/scala/com/stratio/crossdata/connector/elasticsearch/ElasticSearchConnectionUtils.scala
Scala
apache-2.0
4,563
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.coordinator.group import kafka.common.OffsetAndMetadata import kafka.server.{DelayedOperationPurgatory, KafkaConfig, ReplicaManager} import kafka.utils._ import kafka.utils.timer.MockTimer import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.protocol.Errors import org.apache.kafka.common.record.{MemoryRecords, RecordBatch} import org.apache.kafka.common.requests.ProduceResponse.PartitionResponse import org.apache.kafka.common.requests.{JoinGroupRequest, OffsetCommitRequest, OffsetFetchResponse, TransactionResult} import org.easymock.{Capture, EasyMock, IAnswer} import java.util.concurrent.TimeUnit import java.util.concurrent.locks.ReentrantLock import kafka.zk.KafkaZkClient import org.apache.kafka.common.internals.Topic import org.junit.Assert._ import org.junit.{After, Assert, Before, Test} import org.scalatest.junit.JUnitSuite import scala.collection._ import scala.concurrent.duration.Duration import scala.concurrent.{Await, Future, Promise, TimeoutException} class GroupCoordinatorTest extends JUnitSuite { type JoinGroupCallback = JoinGroupResult => Unit type SyncGroupCallbackParams = (Array[Byte], Errors) type SyncGroupCallback = (Array[Byte], Errors) => Unit type HeartbeatCallbackParams = Errors type HeartbeatCallback = Errors => Unit type CommitOffsetCallbackParams = Map[TopicPartition, Errors] type CommitOffsetCallback = Map[TopicPartition, Errors] => Unit type LeaveGroupCallbackParams = Errors type LeaveGroupCallback = Errors => Unit val ClientId = "consumer-test" val ClientHost = "localhost" val ConsumerMinSessionTimeout = 10 val ConsumerMaxSessionTimeout = 1000 val DefaultRebalanceTimeout = 500 val DefaultSessionTimeout = 500 val GroupInitialRebalanceDelay = 50 var timer: MockTimer = null var groupCoordinator: GroupCoordinator = null var replicaManager: ReplicaManager = null var scheduler: KafkaScheduler = null var zkClient: KafkaZkClient = null private val groupId = "groupId" private val protocolType = "consumer" private val memberId = "memberId" private val metadata = Array[Byte]() private val protocols = List(("range", metadata)) private var groupPartitionId: Int = -1 // we use this string value since its hashcode % #.partitions is different private val otherGroupId = "otherGroup" @Before def setUp() { val props = TestUtils.createBrokerConfig(nodeId = 0, zkConnect = "") props.setProperty(KafkaConfig.GroupMinSessionTimeoutMsProp, ConsumerMinSessionTimeout.toString) props.setProperty(KafkaConfig.GroupMaxSessionTimeoutMsProp, ConsumerMaxSessionTimeout.toString) props.setProperty(KafkaConfig.GroupInitialRebalanceDelayMsProp, GroupInitialRebalanceDelay.toString) // make two partitions of the group topic to make sure some partitions are not owned by the coordinator val ret = mutable.Map[String, Map[Int, Seq[Int]]]() ret += (Topic.GROUP_METADATA_TOPIC_NAME -> Map(0 -> Seq(1), 1 -> Seq(1))) replicaManager = EasyMock.createNiceMock(classOf[ReplicaManager]) zkClient = EasyMock.createNiceMock(classOf[KafkaZkClient]) // make two partitions of the group topic to make sure some partitions are not owned by the coordinator EasyMock.expect(zkClient.getTopicPartitionCount(Topic.GROUP_METADATA_TOPIC_NAME)).andReturn(Some(2)) EasyMock.replay(zkClient) timer = new MockTimer val config = KafkaConfig.fromProps(props) val heartbeatPurgatory = new DelayedOperationPurgatory[DelayedHeartbeat]("Heartbeat", timer, config.brokerId, reaperEnabled = false) val joinPurgatory = new DelayedOperationPurgatory[DelayedJoin]("Rebalance", timer, config.brokerId, reaperEnabled = false) groupCoordinator = GroupCoordinator(config, zkClient, replicaManager, heartbeatPurgatory, joinPurgatory, timer.time) groupCoordinator.startup(false) // add the partition into the owned partition list groupPartitionId = groupCoordinator.partitionFor(groupId) groupCoordinator.groupManager.addPartitionOwnership(groupPartitionId) } @After def tearDown() { EasyMock.reset(replicaManager) if (groupCoordinator != null) groupCoordinator.shutdown() } @Test def testOffsetsRetentionMsIntegerOverflow() { val props = TestUtils.createBrokerConfig(nodeId = 0, zkConnect = "") props.setProperty(KafkaConfig.OffsetsRetentionMinutesProp, Integer.MAX_VALUE.toString) val config = KafkaConfig.fromProps(props) val offsetConfig = GroupCoordinator.offsetConfig(config) assertEquals(offsetConfig.offsetsRetentionMs, Integer.MAX_VALUE * 60L * 1000L) } @Test def testJoinGroupWrongCoordinator() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(otherGroupId, memberId, protocolType, protocols) val joinGroupError = joinGroupResult.error assertEquals(Errors.NOT_COORDINATOR, joinGroupError) } @Test def testJoinGroupSessionTimeoutTooSmall() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols, sessionTimeout = ConsumerMinSessionTimeout - 1) val joinGroupError = joinGroupResult.error assertEquals(Errors.INVALID_SESSION_TIMEOUT, joinGroupError) } @Test def testJoinGroupSessionTimeoutTooLarge() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols, sessionTimeout = ConsumerMaxSessionTimeout + 1) val joinGroupError = joinGroupResult.error assertEquals(Errors.INVALID_SESSION_TIMEOUT, joinGroupError) } @Test def testJoinGroupUnknownConsumerNewGroup() { val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val joinGroupError = joinGroupResult.error assertEquals(Errors.UNKNOWN_MEMBER_ID, joinGroupError) } @Test def testInvalidGroupId() { val groupId = "" val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) assertEquals(Errors.INVALID_GROUP_ID, joinGroupResult.error) } @Test def testValidJoinGroup() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) } @Test def testJoinGroupInconsistentProtocolType() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val otherMemberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) assertEquals(Errors.NONE, joinGroupResult.error) EasyMock.reset(replicaManager) val otherJoinGroupResult = await(sendJoinGroup(groupId, otherMemberId, "connect", protocols), 1) assertEquals(Errors.INCONSISTENT_GROUP_PROTOCOL, otherJoinGroupResult.error) } @Test def testJoinGroupWithEmptyProtocolType() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, "", protocols) assertEquals(Errors.INCONSISTENT_GROUP_PROTOCOL, joinGroupResult.error) } @Test def testJoinGroupWithEmptyGroupProtocol() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, List()) assertEquals(Errors.INCONSISTENT_GROUP_PROTOCOL, joinGroupResult.error) } @Test def testJoinGroupInconsistentGroupProtocol() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val otherMemberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupFuture = sendJoinGroup(groupId, memberId, protocolType, List(("range", metadata))) EasyMock.reset(replicaManager) val otherJoinGroupResult = joinGroup(groupId, otherMemberId, protocolType, List(("roundrobin", metadata))) val joinGroupResult = await(joinGroupFuture, 1) assertEquals(Errors.NONE, joinGroupResult.error) assertEquals(Errors.INCONSISTENT_GROUP_PROTOCOL, otherJoinGroupResult.error) } @Test def testJoinGroupUnknownConsumerExistingGroup() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val otherMemberId = "memberId" val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) assertEquals(Errors.NONE, joinGroupResult.error) EasyMock.reset(replicaManager) val otherJoinGroupResult = await(sendJoinGroup(groupId, otherMemberId, protocolType, protocols), 1) assertEquals(Errors.UNKNOWN_MEMBER_ID, otherJoinGroupResult.error) } @Test def testHeartbeatWrongCoordinator() { val heartbeatResult = heartbeat(otherGroupId, memberId, -1) assertEquals(Errors.NOT_COORDINATOR, heartbeatResult) } @Test def testHeartbeatUnknownGroup() { val heartbeatResult = heartbeat(groupId, memberId, -1) assertEquals(Errors.UNKNOWN_MEMBER_ID, heartbeatResult) } @Test def testHeartbeatUnknownConsumerExistingGroup() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val otherMemberId = "memberId" val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedMemberId = joinGroupResult.memberId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val syncGroupResult = syncGroupLeader(groupId, joinGroupResult.generationId, assignedMemberId, Map(assignedMemberId -> Array[Byte]())) val syncGroupError = syncGroupResult._2 assertEquals(Errors.NONE, syncGroupError) EasyMock.reset(replicaManager) val heartbeatResult = heartbeat(groupId, otherMemberId, 1) assertEquals(Errors.UNKNOWN_MEMBER_ID, heartbeatResult) } @Test def testHeartbeatRebalanceInProgress() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedMemberId = joinGroupResult.memberId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val heartbeatResult = heartbeat(groupId, assignedMemberId, 2) assertEquals(Errors.REBALANCE_IN_PROGRESS, heartbeatResult) } @Test def testHeartbeatIllegalGeneration() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedMemberId = joinGroupResult.memberId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val syncGroupResult = syncGroupLeader(groupId, joinGroupResult.generationId, assignedMemberId, Map(assignedMemberId -> Array[Byte]())) val syncGroupError = syncGroupResult._2 assertEquals(Errors.NONE, syncGroupError) EasyMock.reset(replicaManager) val heartbeatResult = heartbeat(groupId, assignedMemberId, 2) assertEquals(Errors.ILLEGAL_GENERATION, heartbeatResult) } @Test def testValidHeartbeat() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedConsumerId = joinGroupResult.memberId val generationId = joinGroupResult.generationId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val syncGroupResult = syncGroupLeader(groupId, generationId, assignedConsumerId, Map(assignedConsumerId -> Array[Byte]())) val syncGroupError = syncGroupResult._2 assertEquals(Errors.NONE, syncGroupError) EasyMock.reset(replicaManager) val heartbeatResult = heartbeat(groupId, assignedConsumerId, 1) assertEquals(Errors.NONE, heartbeatResult) } @Test def testSessionTimeout() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedConsumerId = joinGroupResult.memberId val generationId = joinGroupResult.generationId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val (_, syncGroupError) = syncGroupLeader(groupId, generationId, assignedConsumerId, Map(assignedConsumerId -> Array[Byte]())) assertEquals(Errors.NONE, syncGroupError) EasyMock.reset(replicaManager) EasyMock.expect(replicaManager.getPartition(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupPartitionId))).andReturn(None) EasyMock.expect(replicaManager.getMagic(EasyMock.anyObject())).andReturn(Some(RecordBatch.MAGIC_VALUE_V1)).anyTimes() EasyMock.replay(replicaManager) timer.advanceClock(DefaultSessionTimeout + 100) EasyMock.reset(replicaManager) val heartbeatResult = heartbeat(groupId, assignedConsumerId, 1) assertEquals(Errors.UNKNOWN_MEMBER_ID, heartbeatResult) } @Test def testHeartbeatMaintainsSession() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val sessionTimeout = 1000 val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols, rebalanceTimeout = sessionTimeout, sessionTimeout = sessionTimeout) val assignedConsumerId = joinGroupResult.memberId val generationId = joinGroupResult.generationId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val (_, syncGroupError) = syncGroupLeader(groupId, generationId, assignedConsumerId, Map(assignedConsumerId -> Array[Byte]())) assertEquals(Errors.NONE, syncGroupError) timer.advanceClock(sessionTimeout / 2) EasyMock.reset(replicaManager) var heartbeatResult = heartbeat(groupId, assignedConsumerId, 1) assertEquals(Errors.NONE, heartbeatResult) timer.advanceClock(sessionTimeout / 2 + 100) EasyMock.reset(replicaManager) heartbeatResult = heartbeat(groupId, assignedConsumerId, 1) assertEquals(Errors.NONE, heartbeatResult) } @Test def testCommitMaintainsSession() { val sessionTimeout = 1000 val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val tp = new TopicPartition("topic", 0) val offset = OffsetAndMetadata(0) val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols, rebalanceTimeout = sessionTimeout, sessionTimeout = sessionTimeout) val assignedConsumerId = joinGroupResult.memberId val generationId = joinGroupResult.generationId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val (_, syncGroupError) = syncGroupLeader(groupId, generationId, assignedConsumerId, Map(assignedConsumerId -> Array[Byte]())) assertEquals(Errors.NONE, syncGroupError) timer.advanceClock(sessionTimeout / 2) EasyMock.reset(replicaManager) val commitOffsetResult = commitOffsets(groupId, assignedConsumerId, generationId, immutable.Map(tp -> offset)) assertEquals(Errors.NONE, commitOffsetResult(tp)) timer.advanceClock(sessionTimeout / 2 + 100) EasyMock.reset(replicaManager) val heartbeatResult = heartbeat(groupId, assignedConsumerId, 1) assertEquals(Errors.NONE, heartbeatResult) } @Test def testSessionTimeoutDuringRebalance() { // create a group with a single member val firstJoinResult = joinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols, rebalanceTimeout = 2000, sessionTimeout = 1000) val firstMemberId = firstJoinResult.memberId val firstGenerationId = firstJoinResult.generationId assertEquals(firstMemberId, firstJoinResult.leaderId) assertEquals(Errors.NONE, firstJoinResult.error) EasyMock.reset(replicaManager) val firstSyncResult = syncGroupLeader(groupId, firstGenerationId, firstMemberId, Map(firstMemberId -> Array[Byte]())) assertEquals(Errors.NONE, firstSyncResult._2) // now have a new member join to trigger a rebalance EasyMock.reset(replicaManager) val otherJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) timer.advanceClock(500) EasyMock.reset(replicaManager) var heartbeatResult = heartbeat(groupId, firstMemberId, firstGenerationId) assertEquals(Errors.REBALANCE_IN_PROGRESS, heartbeatResult) // letting the session expire should make the member fall out of the group timer.advanceClock(1100) EasyMock.reset(replicaManager) heartbeatResult = heartbeat(groupId, firstMemberId, firstGenerationId) assertEquals(Errors.UNKNOWN_MEMBER_ID, heartbeatResult) // and the rebalance should complete with only the new member val otherJoinResult = await(otherJoinFuture, DefaultSessionTimeout+100) assertEquals(Errors.NONE, otherJoinResult.error) } @Test def testRebalanceCompletesBeforeMemberJoins() { // create a group with a single member val firstJoinResult = joinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols, rebalanceTimeout = 1200, sessionTimeout = 1000) val firstMemberId = firstJoinResult.memberId val firstGenerationId = firstJoinResult.generationId assertEquals(firstMemberId, firstJoinResult.leaderId) assertEquals(Errors.NONE, firstJoinResult.error) EasyMock.reset(replicaManager) val firstSyncResult = syncGroupLeader(groupId, firstGenerationId, firstMemberId, Map(firstMemberId -> Array[Byte]())) assertEquals(Errors.NONE, firstSyncResult._2) // now have a new member join to trigger a rebalance EasyMock.reset(replicaManager) val otherJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) // send a couple heartbeats to keep the member alive while the rebalance finishes timer.advanceClock(500) EasyMock.reset(replicaManager) var heartbeatResult = heartbeat(groupId, firstMemberId, firstGenerationId) assertEquals(Errors.REBALANCE_IN_PROGRESS, heartbeatResult) timer.advanceClock(500) EasyMock.reset(replicaManager) heartbeatResult = heartbeat(groupId, firstMemberId, firstGenerationId) assertEquals(Errors.REBALANCE_IN_PROGRESS, heartbeatResult) // now timeout the rebalance, which should kick the unjoined member out of the group // and let the rebalance finish with only the new member timer.advanceClock(500) val otherJoinResult = await(otherJoinFuture, DefaultSessionTimeout+100) assertEquals(Errors.NONE, otherJoinResult.error) } @Test def testSyncGroupEmptyAssignment() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedConsumerId = joinGroupResult.memberId val generationId = joinGroupResult.generationId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val syncGroupResult = syncGroupLeader(groupId, generationId, assignedConsumerId, Map()) val syncGroupError = syncGroupResult._2 assertEquals(Errors.NONE, syncGroupError) assertTrue(syncGroupResult._1.isEmpty) EasyMock.reset(replicaManager) val heartbeatResult = heartbeat(groupId, assignedConsumerId, 1) assertEquals(Errors.NONE, heartbeatResult) } @Test def testSyncGroupNotCoordinator() { val generation = 1 val syncGroupResult = syncGroupFollower(otherGroupId, generation, memberId) assertEquals(Errors.NOT_COORDINATOR, syncGroupResult._2) } @Test def testSyncGroupFromUnknownGroup() { val generation = 1 val syncGroupResult = syncGroupFollower(groupId, generation, memberId) assertEquals(Errors.UNKNOWN_MEMBER_ID, syncGroupResult._2) } @Test def testSyncGroupFromUnknownMember() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedConsumerId = joinGroupResult.memberId val generationId = joinGroupResult.generationId assertEquals(Errors.NONE, joinGroupResult.error) EasyMock.reset(replicaManager) val syncGroupResult = syncGroupLeader(groupId, generationId, assignedConsumerId, Map(assignedConsumerId -> Array[Byte]())) val syncGroupError = syncGroupResult._2 assertEquals(Errors.NONE, syncGroupError) EasyMock.reset(replicaManager) val unknownMemberId = "blah" val unknownMemberSyncResult = syncGroupFollower(groupId, generationId, unknownMemberId) assertEquals(Errors.UNKNOWN_MEMBER_ID, unknownMemberSyncResult._2) } @Test def testSyncGroupFromIllegalGeneration() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedConsumerId = joinGroupResult.memberId val generationId = joinGroupResult.generationId assertEquals(Errors.NONE, joinGroupResult.error) EasyMock.reset(replicaManager) // send the sync group with an invalid generation val syncGroupResult = syncGroupLeader(groupId, generationId+1, assignedConsumerId, Map(assignedConsumerId -> Array[Byte]())) assertEquals(Errors.ILLEGAL_GENERATION, syncGroupResult._2) } @Test def testJoinGroupFromUnchangedFollowerDoesNotRebalance() { // to get a group of two members: // 1. join and sync with a single member (because we can't immediately join with two members) // 2. join and sync with the first member and a new member val firstJoinResult = joinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) val firstMemberId = firstJoinResult.memberId val firstGenerationId = firstJoinResult.generationId assertEquals(firstMemberId, firstJoinResult.leaderId) assertEquals(Errors.NONE, firstJoinResult.error) EasyMock.reset(replicaManager) val firstSyncResult = syncGroupLeader(groupId, firstGenerationId, firstMemberId, Map(firstMemberId -> Array[Byte]())) assertEquals(Errors.NONE, firstSyncResult._2) EasyMock.reset(replicaManager) val otherJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) EasyMock.reset(replicaManager) val joinFuture = sendJoinGroup(groupId, firstMemberId, protocolType, protocols) val joinResult = await(joinFuture, DefaultSessionTimeout+100) val otherJoinResult = await(otherJoinFuture, DefaultSessionTimeout+100) assertEquals(Errors.NONE, joinResult.error) assertEquals(Errors.NONE, otherJoinResult.error) assertTrue(joinResult.generationId == otherJoinResult.generationId) assertEquals(firstMemberId, joinResult.leaderId) assertEquals(firstMemberId, otherJoinResult.leaderId) val nextGenerationId = joinResult.generationId // this shouldn't cause a rebalance since protocol information hasn't changed EasyMock.reset(replicaManager) val followerJoinResult = await(sendJoinGroup(groupId, otherJoinResult.memberId, protocolType, protocols), 1) assertEquals(Errors.NONE, followerJoinResult.error) assertEquals(nextGenerationId, followerJoinResult.generationId) } @Test def testJoinGroupFromUnchangedLeaderShouldRebalance() { val firstJoinResult = joinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) val firstMemberId = firstJoinResult.memberId val firstGenerationId = firstJoinResult.generationId assertEquals(firstMemberId, firstJoinResult.leaderId) assertEquals(Errors.NONE, firstJoinResult.error) EasyMock.reset(replicaManager) val firstSyncResult = syncGroupLeader(groupId, firstGenerationId, firstMemberId, Map(firstMemberId -> Array[Byte]())) assertEquals(Errors.NONE, firstSyncResult._2) // join groups from the leader should force the group to rebalance, which allows the // leader to push new assignments when local metadata changes EasyMock.reset(replicaManager) val secondJoinResult = await(sendJoinGroup(groupId, firstMemberId, protocolType, protocols), 1) assertEquals(Errors.NONE, secondJoinResult.error) assertNotEquals(firstGenerationId, secondJoinResult.generationId) } @Test def testLeaderFailureInSyncGroup() { // to get a group of two members: // 1. join and sync with a single member (because we can't immediately join with two members) // 2. join and sync with the first member and a new member val firstJoinResult = joinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) val firstMemberId = firstJoinResult.memberId val firstGenerationId = firstJoinResult.generationId assertEquals(firstMemberId, firstJoinResult.leaderId) assertEquals(Errors.NONE, firstJoinResult.error) EasyMock.reset(replicaManager) val firstSyncResult = syncGroupLeader(groupId, firstGenerationId, firstMemberId, Map(firstMemberId -> Array[Byte]())) assertEquals(Errors.NONE, firstSyncResult._2) EasyMock.reset(replicaManager) val otherJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) EasyMock.reset(replicaManager) val joinFuture = sendJoinGroup(groupId, firstMemberId, protocolType, protocols) val joinResult = await(joinFuture, DefaultSessionTimeout+100) val otherJoinResult = await(otherJoinFuture, DefaultSessionTimeout+100) assertEquals(Errors.NONE, joinResult.error) assertEquals(Errors.NONE, otherJoinResult.error) assertTrue(joinResult.generationId == otherJoinResult.generationId) assertEquals(firstMemberId, joinResult.leaderId) assertEquals(firstMemberId, otherJoinResult.leaderId) val nextGenerationId = joinResult.generationId // with no leader SyncGroup, the follower's request should failure with an error indicating // that it should rejoin EasyMock.reset(replicaManager) val followerSyncFuture = sendSyncGroupFollower(groupId, nextGenerationId, otherJoinResult.memberId) timer.advanceClock(DefaultSessionTimeout + 100) val followerSyncResult = await(followerSyncFuture, DefaultSessionTimeout+100) assertEquals(Errors.REBALANCE_IN_PROGRESS, followerSyncResult._2) } @Test def testSyncGroupFollowerAfterLeader() { // to get a group of two members: // 1. join and sync with a single member (because we can't immediately join with two members) // 2. join and sync with the first member and a new member val firstJoinResult = joinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) val firstMemberId = firstJoinResult.memberId val firstGenerationId = firstJoinResult.generationId assertEquals(firstMemberId, firstJoinResult.leaderId) assertEquals(Errors.NONE, firstJoinResult.error) EasyMock.reset(replicaManager) val firstSyncResult = syncGroupLeader(groupId, firstGenerationId, firstMemberId, Map(firstMemberId -> Array[Byte]())) assertEquals(Errors.NONE, firstSyncResult._2) EasyMock.reset(replicaManager) val otherJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) EasyMock.reset(replicaManager) val joinFuture = sendJoinGroup(groupId, firstMemberId, protocolType, protocols) val joinResult = await(joinFuture, DefaultSessionTimeout+100) val otherJoinResult = await(otherJoinFuture, DefaultSessionTimeout+100) assertEquals(Errors.NONE, joinResult.error) assertEquals(Errors.NONE, otherJoinResult.error) assertTrue(joinResult.generationId == otherJoinResult.generationId) assertEquals(firstMemberId, joinResult.leaderId) assertEquals(firstMemberId, otherJoinResult.leaderId) val nextGenerationId = joinResult.generationId val leaderId = firstMemberId val leaderAssignment = Array[Byte](0) val followerId = otherJoinResult.memberId val followerAssignment = Array[Byte](1) EasyMock.reset(replicaManager) val leaderSyncResult = syncGroupLeader(groupId, nextGenerationId, leaderId, Map(leaderId -> leaderAssignment, followerId -> followerAssignment)) assertEquals(Errors.NONE, leaderSyncResult._2) assertEquals(leaderAssignment, leaderSyncResult._1) EasyMock.reset(replicaManager) val followerSyncResult = syncGroupFollower(groupId, nextGenerationId, otherJoinResult.memberId) assertEquals(Errors.NONE, followerSyncResult._2) assertEquals(followerAssignment, followerSyncResult._1) } @Test def testSyncGroupLeaderAfterFollower() { // to get a group of two members: // 1. join and sync with a single member (because we can't immediately join with two members) // 2. join and sync with the first member and a new member val joinGroupResult = joinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) val firstMemberId = joinGroupResult.memberId val firstGenerationId = joinGroupResult.generationId assertEquals(firstMemberId, joinGroupResult.leaderId) assertEquals(Errors.NONE, joinGroupResult.error) EasyMock.reset(replicaManager) val syncGroupResult = syncGroupLeader(groupId, firstGenerationId, firstMemberId, Map(firstMemberId -> Array[Byte]())) val syncGroupError = syncGroupResult._2 assertEquals(Errors.NONE, syncGroupError) EasyMock.reset(replicaManager) val otherJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) EasyMock.reset(replicaManager) val joinFuture = sendJoinGroup(groupId, firstMemberId, protocolType, protocols) val joinResult = await(joinFuture, DefaultSessionTimeout+100) val otherJoinResult = await(otherJoinFuture, DefaultSessionTimeout+100) assertEquals(Errors.NONE, joinResult.error) assertEquals(Errors.NONE, otherJoinResult.error) assertTrue(joinResult.generationId == otherJoinResult.generationId) val nextGenerationId = joinResult.generationId val leaderId = joinResult.leaderId val leaderAssignment = Array[Byte](0) val followerId = otherJoinResult.memberId val followerAssignment = Array[Byte](1) assertEquals(firstMemberId, joinResult.leaderId) assertEquals(firstMemberId, otherJoinResult.leaderId) EasyMock.reset(replicaManager) val followerSyncFuture = sendSyncGroupFollower(groupId, nextGenerationId, followerId) EasyMock.reset(replicaManager) val leaderSyncResult = syncGroupLeader(groupId, nextGenerationId, leaderId, Map(leaderId -> leaderAssignment, followerId -> followerAssignment)) assertEquals(Errors.NONE, leaderSyncResult._2) assertEquals(leaderAssignment, leaderSyncResult._1) val followerSyncResult = await(followerSyncFuture, DefaultSessionTimeout+100) assertEquals(Errors.NONE, followerSyncResult._2) assertEquals(followerAssignment, followerSyncResult._1) } @Test def testCommitOffsetFromUnknownGroup() { val generationId = 1 val tp = new TopicPartition("topic", 0) val offset = OffsetAndMetadata(0) val commitOffsetResult = commitOffsets(groupId, memberId, generationId, immutable.Map(tp -> offset)) assertEquals(Errors.ILLEGAL_GENERATION, commitOffsetResult(tp)) } @Test def testCommitOffsetWithDefaultGeneration() { val tp = new TopicPartition("topic", 0) val offset = OffsetAndMetadata(0) val commitOffsetResult = commitOffsets(groupId, OffsetCommitRequest.DEFAULT_MEMBER_ID, OffsetCommitRequest.DEFAULT_GENERATION_ID, immutable.Map(tp -> offset)) assertEquals(Errors.NONE, commitOffsetResult(tp)) } @Test def testFetchOffsets() { val tp = new TopicPartition("topic", 0) val offset = OffsetAndMetadata(0) val commitOffsetResult = commitOffsets(groupId, OffsetCommitRequest.DEFAULT_MEMBER_ID, OffsetCommitRequest.DEFAULT_GENERATION_ID, immutable.Map(tp -> offset)) assertEquals(Errors.NONE, commitOffsetResult(tp)) val (error, partitionData) = groupCoordinator.handleFetchOffsets(groupId, Some(Seq(tp))) assertEquals(Errors.NONE, error) assertEquals(Some(0), partitionData.get(tp).map(_.offset)) } @Test def testBasicFetchTxnOffsets() { val tp = new TopicPartition("topic", 0) val offset = OffsetAndMetadata(0) val producerId = 1000L val producerEpoch : Short = 2 val commitOffsetResult = commitTransactionalOffsets(groupId, producerId, producerEpoch, immutable.Map(tp -> offset)) assertEquals(Errors.NONE, commitOffsetResult(tp)) val (error, partitionData) = groupCoordinator.handleFetchOffsets(groupId, Some(Seq(tp))) // Validate that the offset isn't materialjzed yet. assertEquals(Errors.NONE, error) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData.get(tp).map(_.offset)) val offsetsTopic = new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupPartitionId) // Send commit marker. groupCoordinator.handleTxnCompletion(producerId, List(offsetsTopic), TransactionResult.COMMIT) // Validate that committed offset is materialized. val (secondReqError, secondReqPartitionData) = groupCoordinator.handleFetchOffsets(groupId, Some(Seq(tp))) assertEquals(Errors.NONE, secondReqError) assertEquals(Some(0), secondReqPartitionData.get(tp).map(_.offset)) } @Test def testFetchTxnOffsetsWithAbort() { val tp = new TopicPartition("topic", 0) val offset = OffsetAndMetadata(0) val producerId = 1000L val producerEpoch : Short = 2 val commitOffsetResult = commitTransactionalOffsets(groupId, producerId, producerEpoch, immutable.Map(tp -> offset)) assertEquals(Errors.NONE, commitOffsetResult(tp)) val (error, partitionData) = groupCoordinator.handleFetchOffsets(groupId, Some(Seq(tp))) assertEquals(Errors.NONE, error) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData.get(tp).map(_.offset)) val offsetsTopic = new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupPartitionId) // Validate that the pending commit is discarded. groupCoordinator.handleTxnCompletion(producerId, List(offsetsTopic), TransactionResult.ABORT) val (secondReqError, secondReqPartitionData) = groupCoordinator.handleFetchOffsets(groupId, Some(Seq(tp))) assertEquals(Errors.NONE, secondReqError) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), secondReqPartitionData.get(tp).map(_.offset)) } @Test def testFetchTxnOffsetsIgnoreSpuriousCommit() { val tp = new TopicPartition("topic", 0) val offset = OffsetAndMetadata(0) val producerId = 1000L val producerEpoch : Short = 2 val commitOffsetResult = commitTransactionalOffsets(groupId, producerId, producerEpoch, immutable.Map(tp -> offset)) assertEquals(Errors.NONE, commitOffsetResult(tp)) val (error, partitionData) = groupCoordinator.handleFetchOffsets(groupId, Some(Seq(tp))) assertEquals(Errors.NONE, error) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData.get(tp).map(_.offset)) val offsetsTopic = new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupPartitionId) groupCoordinator.handleTxnCompletion(producerId, List(offsetsTopic), TransactionResult.ABORT) val (secondReqError, secondReqPartitionData) = groupCoordinator.handleFetchOffsets(groupId, Some(Seq(tp))) assertEquals(Errors.NONE, secondReqError) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), secondReqPartitionData.get(tp).map(_.offset)) // Ignore spurious commit. groupCoordinator.handleTxnCompletion(producerId, List(offsetsTopic), TransactionResult.COMMIT) val (thirdReqError, thirdReqPartitionData) = groupCoordinator.handleFetchOffsets(groupId, Some(Seq(tp))) assertEquals(Errors.NONE, secondReqError) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), thirdReqPartitionData.get(tp).map(_.offset)) } @Test def testFetchTxnOffsetsOneProducerMultipleGroups() { // One producer, two groups located on separate offsets topic partitions. // Both group have pending offset commits. // Marker for only one partition is received. That commit should be materialized while the other should not. val partitions = List(new TopicPartition("topic1", 0), new TopicPartition("topic2", 0)) val offsets = List(OffsetAndMetadata(10), OffsetAndMetadata(15)) val producerId = 1000L val producerEpoch: Short = 3 val groupIds = List(groupId, otherGroupId) val offsetTopicPartitions = List(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupCoordinator.partitionFor(groupId)), new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupCoordinator.partitionFor(otherGroupId))) groupCoordinator.groupManager.addPartitionOwnership(offsetTopicPartitions(1).partition) val errors = mutable.ArrayBuffer[Errors]() val partitionData = mutable.ArrayBuffer[Map[TopicPartition, OffsetFetchResponse.PartitionData]]() val commitOffsetResults = mutable.ArrayBuffer[CommitOffsetCallbackParams]() // Ensure that the two groups map to different partitions. assertNotEquals(offsetTopicPartitions(0), offsetTopicPartitions(1)) commitOffsetResults.append(commitTransactionalOffsets(groupId, producerId, producerEpoch, immutable.Map(partitions(0) -> offsets(0)))) assertEquals(Errors.NONE, commitOffsetResults(0)(partitions(0))) commitOffsetResults.append(commitTransactionalOffsets(otherGroupId, producerId, producerEpoch, immutable.Map(partitions(1) -> offsets(1)))) assertEquals(Errors.NONE, commitOffsetResults(1)(partitions(1))) // We got a commit for only one __consumer_offsets partition. We should only materialize it's group offsets. groupCoordinator.handleTxnCompletion(producerId, List(offsetTopicPartitions(0)), TransactionResult.COMMIT) groupCoordinator.handleFetchOffsets(groupIds(0), Some(partitions)) match { case (error, partData) => errors.append(error) partitionData.append(partData) case _ => } groupCoordinator.handleFetchOffsets(groupIds(1), Some(partitions)) match { case (error, partData) => errors.append(error) partitionData.append(partData) case _ => } assertEquals(2, errors.size) assertEquals(Errors.NONE, errors(0)) assertEquals(Errors.NONE, errors(1)) // Exactly one offset commit should have been materialized. assertEquals(Some(offsets(0).offset), partitionData(0).get(partitions(0)).map(_.offset)) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData(0).get(partitions(1)).map(_.offset)) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData(1).get(partitions(0)).map(_.offset)) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData(1).get(partitions(1)).map(_.offset)) // Now we receive the other marker. groupCoordinator.handleTxnCompletion(producerId, List(offsetTopicPartitions(1)), TransactionResult.COMMIT) errors.clear() partitionData.clear() groupCoordinator.handleFetchOffsets(groupIds(0), Some(partitions)) match { case (error, partData) => errors.append(error) partitionData.append(partData) case _ => } groupCoordinator.handleFetchOffsets(groupIds(1), Some(partitions)) match { case (error, partData) => errors.append(error) partitionData.append(partData) case _ => } // Two offsets should have been materialized assertEquals(Some(offsets(0).offset), partitionData(0).get(partitions(0)).map(_.offset)) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData(0).get(partitions(1)).map(_.offset)) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData(1).get(partitions(0)).map(_.offset)) assertEquals(Some(offsets(1).offset), partitionData(1).get(partitions(1)).map(_.offset)) } @Test def testFetchTxnOffsetsMultipleProducersOneGroup() { // One group, two producers // Different producers will commit offsets for different partitions. // Each partition's offsets should be materialized when the corresponding producer's marker is received. val partitions = List(new TopicPartition("topic1", 0), new TopicPartition("topic2", 0)) val offsets = List(OffsetAndMetadata(10), OffsetAndMetadata(15)) val producerIds = List(1000L, 1005L) val producerEpochs: Seq[Short] = List(3, 4) val offsetTopicPartition = new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupCoordinator.partitionFor(groupId)) val errors = mutable.ArrayBuffer[Errors]() val partitionData = mutable.ArrayBuffer[Map[TopicPartition, OffsetFetchResponse.PartitionData]]() val commitOffsetResults = mutable.ArrayBuffer[CommitOffsetCallbackParams]() // producer0 commits the offsets for partition0 commitOffsetResults.append(commitTransactionalOffsets(groupId, producerIds(0), producerEpochs(0), immutable.Map(partitions(0) -> offsets(0)))) assertEquals(Errors.NONE, commitOffsetResults(0)(partitions(0))) // producer1 commits the offsets for partition1 commitOffsetResults.append(commitTransactionalOffsets(groupId, producerIds(1), producerEpochs(1), immutable.Map(partitions(1) -> offsets(1)))) assertEquals(Errors.NONE, commitOffsetResults(1)(partitions(1))) // producer0 commits its transaction. groupCoordinator.handleTxnCompletion(producerIds(0), List(offsetTopicPartition), TransactionResult.COMMIT) groupCoordinator.handleFetchOffsets(groupId, Some(partitions)) match { case (error, partData) => errors.append(error) partitionData.append(partData) case _ => } assertEquals(Errors.NONE, errors(0)) // We should only see the offset commit for producer0 assertEquals(Some(offsets(0).offset), partitionData(0).get(partitions(0)).map(_.offset)) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData(0).get(partitions(1)).map(_.offset)) // producer1 now commits its transaction. groupCoordinator.handleTxnCompletion(producerIds(1), List(offsetTopicPartition), TransactionResult.COMMIT) groupCoordinator.handleFetchOffsets(groupId, Some(partitions)) match { case (error, partData) => errors.append(error) partitionData.append(partData) case _ => } assertEquals(Errors.NONE, errors(1)) // We should now see the offset commits for both producers. assertEquals(Some(offsets(0).offset), partitionData(1).get(partitions(0)).map(_.offset)) assertEquals(Some(offsets(1).offset), partitionData(1).get(partitions(1)).map(_.offset)) } @Test def testFetchOffsetForUnknownPartition(): Unit = { val tp = new TopicPartition("topic", 0) val (error, partitionData) = groupCoordinator.handleFetchOffsets(groupId, Some(Seq(tp))) assertEquals(Errors.NONE, error) assertEquals(Some(OffsetFetchResponse.INVALID_OFFSET), partitionData.get(tp).map(_.offset)) } @Test def testFetchOffsetNotCoordinatorForGroup(): Unit = { val tp = new TopicPartition("topic", 0) val (error, partitionData) = groupCoordinator.handleFetchOffsets(otherGroupId, Some(Seq(tp))) assertEquals(Errors.NOT_COORDINATOR, error) assertTrue(partitionData.isEmpty) } @Test def testFetchAllOffsets() { val tp1 = new TopicPartition("topic", 0) val tp2 = new TopicPartition("topic", 1) val tp3 = new TopicPartition("other-topic", 0) val offset1 = OffsetAndMetadata(15) val offset2 = OffsetAndMetadata(16) val offset3 = OffsetAndMetadata(17) assertEquals((Errors.NONE, Map.empty), groupCoordinator.handleFetchOffsets(groupId)) val commitOffsetResult = commitOffsets(groupId, OffsetCommitRequest.DEFAULT_MEMBER_ID, OffsetCommitRequest.DEFAULT_GENERATION_ID, immutable.Map(tp1 -> offset1, tp2 -> offset2, tp3 -> offset3)) assertEquals(Errors.NONE, commitOffsetResult(tp1)) assertEquals(Errors.NONE, commitOffsetResult(tp2)) assertEquals(Errors.NONE, commitOffsetResult(tp3)) val (error, partitionData) = groupCoordinator.handleFetchOffsets(groupId) assertEquals(Errors.NONE, error) assertEquals(3, partitionData.size) assertTrue(partitionData.forall(_._2.error == Errors.NONE)) assertEquals(Some(offset1.offset), partitionData.get(tp1).map(_.offset)) assertEquals(Some(offset2.offset), partitionData.get(tp2).map(_.offset)) assertEquals(Some(offset3.offset), partitionData.get(tp3).map(_.offset)) } @Test def testCommitOffsetInCompletingRebalance() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val tp = new TopicPartition("topic", 0) val offset = OffsetAndMetadata(0) val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedMemberId = joinGroupResult.memberId val generationId = joinGroupResult.generationId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val commitOffsetResult = commitOffsets(groupId, assignedMemberId, generationId, immutable.Map(tp -> offset)) assertEquals(Errors.REBALANCE_IN_PROGRESS, commitOffsetResult(tp)) } @Test def testHeartbeatDuringRebalanceCausesRebalanceInProgress() { // First start up a group (with a slightly larger timeout to give us time to heartbeat when the rebalance starts) val joinGroupResult = joinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) val assignedConsumerId = joinGroupResult.memberId val initialGenerationId = joinGroupResult.generationId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) // Then join with a new consumer to trigger a rebalance EasyMock.reset(replicaManager) sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) // We should be in the middle of a rebalance, so the heartbeat should return rebalance in progress EasyMock.reset(replicaManager) val heartbeatResult = heartbeat(groupId, assignedConsumerId, initialGenerationId) assertEquals(Errors.REBALANCE_IN_PROGRESS, heartbeatResult) } @Test def testGenerationIdIncrementsOnRebalance() { val joinGroupResult = joinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) val initialGenerationId = joinGroupResult.generationId val joinGroupError = joinGroupResult.error val memberId = joinGroupResult.memberId assertEquals(1, initialGenerationId) assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val syncGroupResult = syncGroupLeader(groupId, initialGenerationId, memberId, Map(memberId -> Array[Byte]())) val syncGroupError = syncGroupResult._2 assertEquals(Errors.NONE, syncGroupError) EasyMock.reset(replicaManager) val joinGroupFuture = sendJoinGroup(groupId, memberId, protocolType, protocols) val otherJoinGroupResult = await(joinGroupFuture, 1) val nextGenerationId = otherJoinGroupResult.generationId val otherJoinGroupError = otherJoinGroupResult.error assertEquals(2, nextGenerationId) assertEquals(Errors.NONE, otherJoinGroupError) } @Test def testLeaveGroupWrongCoordinator() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val leaveGroupResult = leaveGroup(otherGroupId, memberId) assertEquals(Errors.NOT_COORDINATOR, leaveGroupResult) } @Test def testLeaveGroupUnknownGroup() { val leaveGroupResult = leaveGroup(groupId, memberId) assertEquals(Errors.UNKNOWN_MEMBER_ID, leaveGroupResult) } @Test def testLeaveGroupUnknownConsumerExistingGroup() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val otherMemberId = "consumerId" val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val leaveGroupResult = leaveGroup(groupId, otherMemberId) assertEquals(Errors.UNKNOWN_MEMBER_ID, leaveGroupResult) } @Test def testValidLeaveGroup() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedMemberId = joinGroupResult.memberId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val leaveGroupResult = leaveGroup(groupId, assignedMemberId) assertEquals(Errors.NONE, leaveGroupResult) } @Test def testListGroupsIncludesStableGroups() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedMemberId = joinGroupResult.memberId val generationId = joinGroupResult.generationId assertEquals(Errors.NONE, joinGroupResult.error) EasyMock.reset(replicaManager) val syncGroupResult = syncGroupLeader(groupId, generationId, assignedMemberId, Map(assignedMemberId -> Array[Byte]())) val syncGroupError = syncGroupResult._2 assertEquals(Errors.NONE, syncGroupError) val (error, groups) = groupCoordinator.handleListGroups() assertEquals(Errors.NONE, error) assertEquals(1, groups.size) assertEquals(GroupOverview("groupId", "consumer"), groups.head) } @Test def testListGroupsIncludesRebalancingGroups() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) assertEquals(Errors.NONE, joinGroupResult.error) val (error, groups) = groupCoordinator.handleListGroups() assertEquals(Errors.NONE, error) assertEquals(1, groups.size) assertEquals(GroupOverview("groupId", "consumer"), groups.head) } @Test def testDescribeGroupWrongCoordinator() { EasyMock.reset(replicaManager) val (error, _) = groupCoordinator.handleDescribeGroup(otherGroupId) assertEquals(Errors.NOT_COORDINATOR, error) } @Test def testDescribeGroupInactiveGroup() { EasyMock.reset(replicaManager) val (error, summary) = groupCoordinator.handleDescribeGroup(groupId) assertEquals(Errors.NONE, error) assertEquals(GroupCoordinator.DeadGroup, summary) } @Test def testDescribeGroupStable() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val assignedMemberId = joinGroupResult.memberId val generationId = joinGroupResult.generationId val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val syncGroupResult = syncGroupLeader(groupId, generationId, assignedMemberId, Map(assignedMemberId -> Array[Byte]())) val syncGroupError = syncGroupResult._2 assertEquals(Errors.NONE, syncGroupError) EasyMock.reset(replicaManager) val (error, summary) = groupCoordinator.handleDescribeGroup(groupId) assertEquals(Errors.NONE, error) assertEquals(protocolType, summary.protocolType) assertEquals("range", summary.protocol) assertEquals(List(assignedMemberId), summary.members.map(_.memberId)) } @Test def testDescribeGroupRebalancing() { val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols) val joinGroupError = joinGroupResult.error assertEquals(Errors.NONE, joinGroupError) EasyMock.reset(replicaManager) val (error, summary) = groupCoordinator.handleDescribeGroup(groupId) assertEquals(Errors.NONE, error) assertEquals(protocolType, summary.protocolType) assertEquals(GroupCoordinator.NoProtocol, summary.protocol) assertEquals(CompletingRebalance.toString, summary.state) assertTrue(summary.members.map(_.memberId).contains(joinGroupResult.memberId)) assertTrue(summary.members.forall(_.metadata.isEmpty)) assertTrue(summary.members.forall(_.assignment.isEmpty)) } @Test def shouldDelayInitialRebalanceByGroupInitialRebalanceDelayOnEmptyGroup() { val firstJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols) timer.advanceClock(GroupInitialRebalanceDelay / 2) verifyDelayedTaskNotCompleted(firstJoinFuture) timer.advanceClock((GroupInitialRebalanceDelay / 2) + 1) val joinGroupResult = await(firstJoinFuture, 1) assertEquals(Errors.NONE, joinGroupResult.error) } private def verifyDelayedTaskNotCompleted(firstJoinFuture: Future[JoinGroupResult]) = { try { await(firstJoinFuture, 1) Assert.fail("should have timed out as rebalance delay not expired") } catch { case _: TimeoutException => // ok } } @Test def shouldResetRebalanceDelayWhenNewMemberJoinsGroupInInitialRebalance() { val rebalanceTimeout = GroupInitialRebalanceDelay * 3 val firstMemberJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols, rebalanceTimeout) EasyMock.reset(replicaManager) timer.advanceClock(GroupInitialRebalanceDelay - 1) val secondMemberJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols, rebalanceTimeout) EasyMock.reset(replicaManager) timer.advanceClock(2) // advance past initial rebalance delay and make sure that tasks // haven't been completed timer.advanceClock(GroupInitialRebalanceDelay / 2 + 1) verifyDelayedTaskNotCompleted(firstMemberJoinFuture) verifyDelayedTaskNotCompleted(secondMemberJoinFuture) // advance clock beyond updated delay and make sure the // tasks have completed timer.advanceClock(GroupInitialRebalanceDelay / 2) val firstResult = await(firstMemberJoinFuture, 1) val secondResult = await(secondMemberJoinFuture, 1) assertEquals(Errors.NONE, firstResult.error) assertEquals(Errors.NONE, secondResult.error) } @Test def shouldDelayRebalanceUptoRebalanceTimeout() { val rebalanceTimeout = GroupInitialRebalanceDelay * 2 val firstMemberJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols, rebalanceTimeout) EasyMock.reset(replicaManager) val secondMemberJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols, rebalanceTimeout) timer.advanceClock(GroupInitialRebalanceDelay + 1) EasyMock.reset(replicaManager) val thirdMemberJoinFuture = sendJoinGroup(groupId, JoinGroupRequest.UNKNOWN_MEMBER_ID, protocolType, protocols, rebalanceTimeout) timer.advanceClock(GroupInitialRebalanceDelay) EasyMock.reset(replicaManager) verifyDelayedTaskNotCompleted(firstMemberJoinFuture) verifyDelayedTaskNotCompleted(secondMemberJoinFuture) verifyDelayedTaskNotCompleted(thirdMemberJoinFuture) // advance clock beyond rebalanceTimeout timer.advanceClock(1) val firstResult = await(firstMemberJoinFuture, 1) val secondResult = await(secondMemberJoinFuture, 1) val thirdResult = await(thirdMemberJoinFuture, 1) assertEquals(Errors.NONE, firstResult.error) assertEquals(Errors.NONE, secondResult.error) assertEquals(Errors.NONE, thirdResult.error) } private def setupJoinGroupCallback: (Future[JoinGroupResult], JoinGroupCallback) = { val responsePromise = Promise[JoinGroupResult] val responseFuture = responsePromise.future val responseCallback: JoinGroupCallback = responsePromise.success(_) (responseFuture, responseCallback) } private def setupSyncGroupCallback: (Future[SyncGroupCallbackParams], SyncGroupCallback) = { val responsePromise = Promise[SyncGroupCallbackParams] val responseFuture = responsePromise.future val responseCallback: SyncGroupCallback = (assignment, error) => responsePromise.success((assignment, error)) (responseFuture, responseCallback) } private def setupHeartbeatCallback: (Future[HeartbeatCallbackParams], HeartbeatCallback) = { val responsePromise = Promise[HeartbeatCallbackParams] val responseFuture = responsePromise.future val responseCallback: HeartbeatCallback = error => responsePromise.success(error) (responseFuture, responseCallback) } private def setupCommitOffsetsCallback: (Future[CommitOffsetCallbackParams], CommitOffsetCallback) = { val responsePromise = Promise[CommitOffsetCallbackParams] val responseFuture = responsePromise.future val responseCallback: CommitOffsetCallback = offsets => responsePromise.success(offsets) (responseFuture, responseCallback) } private def sendJoinGroup(groupId: String, memberId: String, protocolType: String, protocols: List[(String, Array[Byte])], rebalanceTimeout: Int = DefaultRebalanceTimeout, sessionTimeout: Int = DefaultSessionTimeout): Future[JoinGroupResult] = { val (responseFuture, responseCallback) = setupJoinGroupCallback EasyMock.replay(replicaManager) groupCoordinator.handleJoinGroup(groupId, memberId, "clientId", "clientHost", rebalanceTimeout, sessionTimeout, protocolType, protocols, responseCallback) responseFuture } private def sendSyncGroupLeader(groupId: String, generation: Int, leaderId: String, assignment: Map[String, Array[Byte]]): Future[SyncGroupCallbackParams] = { val (responseFuture, responseCallback) = setupSyncGroupCallback val capturedArgument: Capture[Map[TopicPartition, PartitionResponse] => Unit] = EasyMock.newCapture() EasyMock.expect(replicaManager.appendRecords(EasyMock.anyLong(), EasyMock.anyShort(), internalTopicsAllowed = EasyMock.eq(true), isFromClient = EasyMock.eq(false), EasyMock.anyObject().asInstanceOf[Map[TopicPartition, MemoryRecords]], EasyMock.capture(capturedArgument), EasyMock.anyObject().asInstanceOf[Option[ReentrantLock]], EasyMock.anyObject())).andAnswer(new IAnswer[Unit] { override def answer = capturedArgument.getValue.apply( Map(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupPartitionId) -> new PartitionResponse(Errors.NONE, 0L, RecordBatch.NO_TIMESTAMP, 0L) ) )}) EasyMock.expect(replicaManager.getMagic(EasyMock.anyObject())).andReturn(Some(RecordBatch.MAGIC_VALUE_V1)).anyTimes() EasyMock.replay(replicaManager) groupCoordinator.handleSyncGroup(groupId, generation, leaderId, assignment, responseCallback) responseFuture } private def sendSyncGroupFollower(groupId: String, generation: Int, memberId: String): Future[SyncGroupCallbackParams] = { val (responseFuture, responseCallback) = setupSyncGroupCallback EasyMock.replay(replicaManager) groupCoordinator.handleSyncGroup(groupId, generation, memberId, Map.empty[String, Array[Byte]], responseCallback) responseFuture } private def joinGroup(groupId: String, memberId: String, protocolType: String, protocols: List[(String, Array[Byte])], sessionTimeout: Int = DefaultSessionTimeout, rebalanceTimeout: Int = DefaultRebalanceTimeout): JoinGroupResult = { val responseFuture = sendJoinGroup(groupId, memberId, protocolType, protocols, rebalanceTimeout, sessionTimeout) timer.advanceClock(GroupInitialRebalanceDelay + 1) // should only have to wait as long as session timeout, but allow some extra time in case of an unexpected delay Await.result(responseFuture, Duration(rebalanceTimeout + 100, TimeUnit.MILLISECONDS)) } private def syncGroupFollower(groupId: String, generationId: Int, memberId: String, sessionTimeout: Int = DefaultSessionTimeout): SyncGroupCallbackParams = { val responseFuture = sendSyncGroupFollower(groupId, generationId, memberId) Await.result(responseFuture, Duration(sessionTimeout + 100, TimeUnit.MILLISECONDS)) } private def syncGroupLeader(groupId: String, generationId: Int, memberId: String, assignment: Map[String, Array[Byte]], sessionTimeout: Int = DefaultSessionTimeout): SyncGroupCallbackParams = { val responseFuture = sendSyncGroupLeader(groupId, generationId, memberId, assignment) Await.result(responseFuture, Duration(sessionTimeout + 100, TimeUnit.MILLISECONDS)) } private def heartbeat(groupId: String, consumerId: String, generationId: Int): HeartbeatCallbackParams = { val (responseFuture, responseCallback) = setupHeartbeatCallback EasyMock.replay(replicaManager) groupCoordinator.handleHeartbeat(groupId, consumerId, generationId, responseCallback) Await.result(responseFuture, Duration(40, TimeUnit.MILLISECONDS)) } private def await[T](future: Future[T], millis: Long): T = { Await.result(future, Duration(millis, TimeUnit.MILLISECONDS)) } private def commitOffsets(groupId: String, consumerId: String, generationId: Int, offsets: immutable.Map[TopicPartition, OffsetAndMetadata]): CommitOffsetCallbackParams = { val (responseFuture, responseCallback) = setupCommitOffsetsCallback val capturedArgument: Capture[Map[TopicPartition, PartitionResponse] => Unit] = EasyMock.newCapture() EasyMock.expect(replicaManager.appendRecords(EasyMock.anyLong(), EasyMock.anyShort(), internalTopicsAllowed = EasyMock.eq(true), isFromClient = EasyMock.eq(false), EasyMock.anyObject().asInstanceOf[Map[TopicPartition, MemoryRecords]], EasyMock.capture(capturedArgument), EasyMock.anyObject().asInstanceOf[Option[ReentrantLock]], EasyMock.anyObject()) ).andAnswer(new IAnswer[Unit] { override def answer = capturedArgument.getValue.apply( Map(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupPartitionId) -> new PartitionResponse(Errors.NONE, 0L, RecordBatch.NO_TIMESTAMP, 0L) ) ) }) EasyMock.expect(replicaManager.getMagic(EasyMock.anyObject())).andReturn(Some(RecordBatch.MAGIC_VALUE_V1)).anyTimes() EasyMock.replay(replicaManager) groupCoordinator.handleCommitOffsets(groupId, consumerId, generationId, offsets, responseCallback) Await.result(responseFuture, Duration(40, TimeUnit.MILLISECONDS)) } private def commitTransactionalOffsets(groupId: String, producerId: Long, producerEpoch: Short, offsets: immutable.Map[TopicPartition, OffsetAndMetadata]): CommitOffsetCallbackParams = { val (responseFuture, responseCallback) = setupCommitOffsetsCallback val capturedArgument: Capture[Map[TopicPartition, PartitionResponse] => Unit] = EasyMock.newCapture() EasyMock.expect(replicaManager.appendRecords(EasyMock.anyLong(), EasyMock.anyShort(), internalTopicsAllowed = EasyMock.eq(true), isFromClient = EasyMock.eq(false), EasyMock.anyObject().asInstanceOf[Map[TopicPartition, MemoryRecords]], EasyMock.capture(capturedArgument), EasyMock.anyObject().asInstanceOf[Option[ReentrantLock]], EasyMock.anyObject()) ).andAnswer(new IAnswer[Unit] { override def answer = capturedArgument.getValue.apply( Map(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupCoordinator.partitionFor(groupId)) -> new PartitionResponse(Errors.NONE, 0L, RecordBatch.NO_TIMESTAMP, 0L) ) )}) EasyMock.expect(replicaManager.getMagic(EasyMock.anyObject())).andReturn(Some(RecordBatch.MAGIC_VALUE_V2)).anyTimes() EasyMock.replay(replicaManager) groupCoordinator.handleTxnCommitOffsets(groupId, producerId, producerEpoch, offsets, responseCallback) val result = Await.result(responseFuture, Duration(40, TimeUnit.MILLISECONDS)) EasyMock.reset(replicaManager) result } private def leaveGroup(groupId: String, consumerId: String): LeaveGroupCallbackParams = { val (responseFuture, responseCallback) = setupHeartbeatCallback EasyMock.expect(replicaManager.getPartition(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, groupPartitionId))).andReturn(None) EasyMock.expect(replicaManager.getMagic(EasyMock.anyObject())).andReturn(Some(RecordBatch.MAGIC_VALUE_V1)).anyTimes() EasyMock.replay(replicaManager) groupCoordinator.handleLeaveGroup(groupId, consumerId, responseCallback) Await.result(responseFuture, Duration(40, TimeUnit.MILLISECONDS)) } }
themarkypantz/kafka
core/src/test/scala/unit/kafka/coordinator/group/GroupCoordinatorTest.scala
Scala
apache-2.0
65,573
package constants object Properties { val PAST_MIN = 5 }
jiancoding/stock-data
app/constants/Properties.scala
Scala
apache-2.0
60
package org.jetbrains.plugins.scala.failed.typeInference import org.jetbrains.plugins.scala.DependencyManagerBase._ import org.jetbrains.plugins.scala.base.libraryLoaders.{IvyManagedLoader, LibraryLoader} import org.jetbrains.plugins.scala.lang.typeInference.TypeInferenceTestBase /** * @author mucianm * @since 28.03.16. */ class ScalaZTest extends TypeInferenceTestBase { override protected def shouldPass: Boolean = false override protected def librariesLoaders: Seq[LibraryLoader] = super.librariesLoaders :+ IvyManagedLoader("org.scalaz" %% "scalaz-core" % "7.1.0") def testSCL5706(): Unit = { doTest( s""" |import scalaz._, Scalaz._ |object Application { | type Va[+A] = ValidationNel[String, A] | | def v[A](field: String, validations: Va[A]*): (String, Va[List[A]]) = { | (field, ${START}validations.toList.sequence$END) | } |} | | |//Va[List[A]] """.stripMargin ) } def testSCL6096(): Unit = { doTest( s""" |import scalaz.Lens.lensg |import scalaz.State |import scalaz.syntax.traverse.ToTraverseOps |import scalaz.std.indexedSeq.indexedSeqInstance |// this "unused import" is required! ^^^ | |case class X(y: Int) | |def foo(x: X):String = x.toString |def foo(x: Int): String = "ok" |def sequenced(x: X, s: State[X,Any]*) = | foo(${START}s.toIndexedSeq.sequenceU.exec(x)$END) |//Int """.stripMargin) } def testSCL9762(): Unit = { doTest( s""" |import scalaz._ |import Scalaz._ |import Kleisli._ | |object BadKleisli { | val k : Kleisli[Option, Int, String] = ${START}kleisliU ((i: Int) => i.toString.some )$END |} |//Kleisli[Option, Int, String] """.stripMargin) } def testSCL9989(): Unit = { doTest( s""" |import scala.concurrent.ExecutionContext.Implicits.global |import scala.concurrent.Future |import scalaz.Scalaz._ | |class Comparison { | | def function1(inputParam: String): Future[Option[String]] = ??? | | def function2(inputParam: String): Future[Option[String]] = ??? | | def allTogetherWithTraverseM: Future[Option[String]] = { | for { | res1 <- function1("input") | res2 <- res1.traverseM(param => function2(${START}param$END)) | } yield res2 | } |} |//_G[Option[_B]] """.stripMargin) } }
JetBrains/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/failed/typeInference/ScalaZTest.scala
Scala
apache-2.0
2,696
package org.podval.families.template.split.instance import org.podval.families.template.split.family.T1Base abstract class T1Instance extends T1Base[Instance] { final override def add(t2: Instance#T1): Instance#T1 = ??? }
dubinsky/podval-families
src/main/scala/org/podval/families/template/split/instance/T1Instance.scala
Scala
apache-2.0
226
package com.scalaAsm.x86 package Instructions package General // Description: Make Stack Frame for Procedure Parameters // Category: general/stack trait ENTER extends InstructionDefinition { val mnemonic = "ENTER" } object ENTER extends TwoOperands[ENTER] with ENTERImpl trait ENTERImpl extends ENTER { implicit object _0 extends TwoOp[imm16, imm8] { val opcode: OneOpcode = 0xC8 val format = null override def hasImplicitOperand = true } }
bdwashbu/scala-x86-inst
src/main/scala/com/scalaAsm/x86/Instructions/General/ENTER.scala
Scala
apache-2.0
463
package com.ubirch.chain.config import com.ubirch.util.config.ConfigBase /** * author: cvandrei * since: 2017-01-19 */ object ChainConfig extends ConfigBase { /** * The interface the server runs on. * * @return interface */ def interface: String = config.getString(ChainConfigKeys.INTERFACE) /** * Port the server listens on. * * @return port number */ def port: Int = config.getInt(ChainConfigKeys.PORT) def goPipelineName: String = config.getString(ChainConfigKeys.GO_PIPELINE_NAME) def goPipelineLabel: String = config.getString(ChainConfigKeys.GO_PIPELINE_LABEL) def goPipelineRevision: String = config.getString(ChainConfigKeys.GO_PIPELINE_REVISION) /* * Akka Related ************************************************************************************************/ /** * Default actor timeout. * * @return timeout in seconds */ def actorTimeout: Int = config.getInt(ChainConfigKeys.ACTOR_TIMEOUT) def akkaNumberOfWorkers: Int = config.getInt(ChainConfigKeys.AKKA_NUMBER_OF_WORKERS) /* * AWS Related ************************************************************************************************/ def awsAccessKey: String = config.getString(ChainConfigKeys.AWS_ACCESS_KEY) def awsSecretAccessKey: String = config.getString(ChainConfigKeys.AWS_SECRET_ACCESS_KEY) def awsRegion: String = config.getString(ChainConfigKeys.AWS_REGION) def awsQueueOwnerId: String = config.getString(ChainConfigKeys.AWS_QUEUE_OWNER_ID) def awsSqsMaxMessagesPerPoll: Int = config.getInt(ChainConfigKeys.AWS_SQS_MAX_MESSAGES_PER_POLL) def awsSqsQueueDeviceDataIn: String = config.getString(ChainConfigKeys.AWS_SQS_QUEUE_DEVICE_DATA_IN) def awsSqsQueueDeviceDataHashIn: String = config.getString(ChainConfigKeys.AWS_SQS_QUEUE_DEVICE_DATA_HASH_IN) def awsSqsQueueBigchainDbIn: String = config.getString(ChainConfigKeys.AWS_SQS_QUEUE_BIGCHAIN_DB_IN) /* * Blockchain Anchoring Related ************************************************************************************************/ /** * @return true if anchoring is enabled (which chains depends on Notary Service) */ def anchorEnabled: Boolean = config.getBoolean(ChainConfigKeys.ANCHOR_ENABLED) /** * @return interval (in seconds) between two anchors */ def anchorInterval: Int = config.getInt(ChainConfigKeys.ANCHOR_INTERVAL) /** * @return during boot [AnchorActor] is being started with this many seconds delay */ def anchorSchedulerOffset: Int = config.getInt(ChainConfigKeys.ANCHOR_SCHEDULER_OFFSET) /* * Mongo Related (BigchainDb) ************************************************************************************************/ def mongoBigchainCollectionBigchain: String = config.getString(ChainConfigKeys.BIGCHAIN_COLLECTION_BIGCHAIN) /* * Mongo Related (Chain Service) ************************************************************************************************/ def mongoChainServiceCollectionAnchors: String = config.getString(ChainConfigKeys.CHAIN_SERVICE_COLLECTION_ANCHORS) }
ubirch/ubirch-chain-service
config/src/main/scala/com/ubirch/chain/config/ChainConfig.scala
Scala
apache-2.0
3,124
package org.mbari.kdtree import scala.annotation.tailrec /** * Strategy for ordering a multi-dimensional type along a given dimension * * @author Brian Schlining * @since 2014-09-26T10:57:00 */ trait DimensionalOrdering[A] { /** * @return The number of dimensions */ def dimensions: Int /** * Compare some type along a given dimension * * @param dimension The dimension to compare * @param x 1st value to compare * @param y 2nd value to compare * @return negative if x' < y', positive if x' > y' and zero if x' == y' */ def compareByDimension(x: A, y: A, dimension: Int): Int /** * Returns an Ordering of A in which the given dimension is the primary ordering criteria. * If x and y have the same projection on that dimension, then they are compared on the lowest * dimension that is different. * * @param dimension The primary dimension to compare. * @return An ordering for type A along a given dimension */ def orderingBy(dimension: Int): Ordering[A] = new Ordering[A] { override def compare(x: A, y: A): Int = { compareByDimension(x, y, dimension) match { case c if c != 0 => c // Compared dimension and they were different case 0 => compareDimension(x, y, 0) // Compared dimension and the they were the same, use other dimensions for comparison } } @tailrec private def compareDimension(x: A, y: A, dimension: Int): Int = { if (dimension == dimensions) 0 else { val c = compareByDimension(x, y, dimension) if (c != 0) c else compareDimension(x, y, dimension + 1) } } } } object DimensionalOrdering { def dimensionalOrderingForTuple[T <: Product,A](dim: Int)(implicit ord: Ordering[A]) = new DimensionalOrdering[T] { val dimensions = dim def compareByDimension(x: T, y: T, d: Int) = ord.compare( x.productElement(d).asInstanceOf[A], y.productElement(d).asInstanceOf[A]) } implicit def dimensionalOrderingForTuple2[A](implicit ord: Ordering[A]) = dimensionalOrderingForTuple[(A, A), A](2) implicit def dimensionalOrderingForTuple3[A](implicit ord: Ordering[A]) = dimensionalOrderingForTuple[(A, A, A), A](3) implicit def dimensionalOrderingForTuple4[A](implicit ord: Ordering[A]) = dimensionalOrderingForTuple[(A, A, A, A), A](4) implicit def dimensionalOrderingForTuple5[A](implicit ord: Ordering[A]) = dimensionalOrderingForTuple[(A, A, A, A, A), A](5) }
hohonuuli/opencv-imgofinterest
src/main/scala/org/mbari/kdtree/DimensionalOrdering.scala
Scala
mit
2,489
package de.uni_potsdam.hpi.coheel.programs import java.lang.Iterable import de.uni_potsdam.hpi.coheel.io.OutputFiles._ import de.uni_potsdam.hpi.coheel.ml.CoheelClassifier.POS_TAG_GROUPS import de.uni_potsdam.hpi.coheel.programs.DataClasses._ import de.uni_potsdam.hpi.coheel.util.Util import de.uni_potsdam.hpi.coheel.wiki.FullInfoWikiPage import org.apache.flink.api.common.functions.{BroadcastVariableInitializer, RichGroupReduceFunction} import org.apache.flink.api.scala.{ExecutionEnvironment, _} import org.apache.flink.configuration.Configuration import org.apache.flink.core.fs.FileSystem import org.apache.flink.util.Collector import scala.collection.JavaConverters._ import scala.collection.mutable /** * Creates the training data from wikipedia. * * This needs the trie files in two halfs under the * in the configuration specified paths. * bin/prepare-tries.sh can be used to help with downloading * the files from hdfs and create two parts, which can then * be uploaded manually to the nodes. */ class TrainingDataProgram extends CoheelProgram[TrieSelectionStrategy] with Serializable { val SAMPLE_FRACTION = if (runsOffline()) 100 else 5000 // val SAMPLE_NUMBER = if (runsOffline()) 0 else 632 val SAMPLE_NUMBER = if (runsOffline()) 0 else 3786 override def getDescription = "Wikipedia Extraction: Build training data" def arguments = if (runsOffline()) List(new OneTrieEverywhereStrategy("output/surface-link-probs.wiki")) else List( new OneTrieEverywhereStrategy(params.config.getString("first_trie_half")), new OneTrieEverywhereStrategy(params.config.getString("second_trie_half")) ) override def buildProgram(env: ExecutionEnvironment, trieSelector: TrieSelectionStrategy): Unit = { val trieFileName = trieSelector.getTrieFile.getName val wikiPages = readWikiPagesWithFullInfoUnstemmed { pageTitle => Math.abs(pageTitle.hashCode) % SAMPLE_FRACTION == SAMPLE_NUMBER } wikiPages .map { wikiPage => wikiPage.pageTitle } .writeAsText(trainingDataPagesPath + s"-$SAMPLE_NUMBER-$trieFileName.wiki", FileSystem.WriteMode.OVERWRITE) val linkDestinationsPerEntity = wikiPages.map { wp => LinkDestinations(wp.pageTitle, wp.links.values.map { l => l.destination }.toSet) } val classifiables = wikiPages .flatMap(new LinksAsTrainingDataFlatMap(trieSelector)) .name("Links and possible links") classifiables.map { c => val posTags = c.info.posTags (c.id, c.surfaceRepr, c.surfaceLinkProb, c.info.source, c.info.destination, s"PosTags(${posTags.mkString(", ")})", c.context.deep) }.writeAsTsv(trainingDataClassifiablesPath + s"-$SAMPLE_NUMBER-$trieFileName.wiki") // Fill classifiables with candidates, surface probs and context probs val featuresPerGroup = FeatureHelper.buildFeaturesPerGroup(env, classifiables) val trainingData = featuresPerGroup .reduceGroup(new TrainingDataGroupReduce(TrainingDataStrategies.REMOVE_CANDIDATE_ONLY)) .withBroadcastSet(linkDestinationsPerEntity, TrainingDataGroupReduce.BROADCAST_LINK_DESTINATIONS_PER_ENTITY) .name("Training Data") trainingData.writeAsText(trainingDataPath + s"-$SAMPLE_NUMBER-$trieFileName.wiki", FileSystem.WriteMode.OVERWRITE) } } class LinkDestinationsInitializer extends BroadcastVariableInitializer[LinkDestinations, mutable.Map[String, Set[String]]] { override def initializeBroadcastVariable(destinations: Iterable[LinkDestinations]): mutable.Map[String, Set[String]] = { val destinationsMap = mutable.Map[String, Set[String]]() destinations.asScala.foreach { dest => destinationsMap += dest.entity -> dest.destinations } destinationsMap } } object TrainingDataGroupReduce { val BROADCAST_LINK_DESTINATIONS_PER_ENTITY = "linkDestinationsPerEntity" } /** * This creates the training data from the given grouped classifiables by applying * the second order functions. * * It also decides, which classifiables shoud be output at all. */ class TrainingDataGroupReduce(trainingDataStrategy: TrainingDataStrategy) extends RichGroupReduceFunction[Classifiable[TrainInfo], String] { import CoheelLogger._ var linkDestinationsPerEntity: mutable.Map[String, Set[String]] = _ override def open(params: Configuration): Unit = { linkDestinationsPerEntity = getRuntimeContext.getBroadcastVariableWithInitializer( TrainingDataGroupReduce.BROADCAST_LINK_DESTINATIONS_PER_ENTITY, new LinkDestinationsInitializer) } /** * @param candidatesIt All link candidates with scores (all Classifiable's have the same id). */ override def reduce(candidatesIt: Iterable[Classifiable[TrainInfo]], out: Collector[String]): Unit = { val allCandidates = candidatesIt.asScala.toSeq // get all the link destinations from the source entitity of this classifiable // remember, all classifiables come from the same link/trie hit, hence it is ok to // only access the head val linkDestinations = if (allCandidates.head.isTrieHit) linkDestinationsPerEntity(allCandidates.head.info.source) else Set[String]() // This variable is necessary for the REMOVE_ENTIRE_GROUP training strategy // It tracks, whether at least one candidate is linked from the current page var containsCandidateFromLinks = false val featureLines = new mutable.ArrayBuffer[FeatureLine[TrainInfo]](allCandidates.size) FeatureHelper.applyCoheelFunctions(allCandidates) { featureLine => featureLines += featureLine if (linkDestinations.nonEmpty && !containsCandidateFromLinks) { containsCandidateFromLinks = linkDestinations.contains(featureLine.candidateEntity) } } if ((trainingDataStrategy == TrainingDataStrategies.REMOVE_CANDIDATE_ONLY) || (trainingDataStrategy == TrainingDataStrategies.REMOVE_ENTIRE_GROUP && !containsCandidateFromLinks)) { featureLines.foreach { featureLine => import featureLine._ def stringInfo = List(id, surfaceRepr, candidateEntity) ::: featureLine.info.modelInfo val output = s"${stringInfo.mkString("\\t")}\\t${featureLine.features.mkString("\\t")}" // Filter out feature lines with a candidate entity, which is also a link in the source. // Taking care, that not all links are filtered out (not the original), i.e. only do this for trie hits if (id.startsWith(s"${FeatureHelper.TRIE_HIT_MARKER}-")) { // This classifiable/feature line came from a trie hit, we might want to remove it from the // training data set: // Remove the trie hit, if the candidate entity is linked from the current article. // Reasoning: Say, an article contains Angela Merkel as a link. Later, it is referred to as // the "merkel" with no link. It would be wrong to learn, that this should not be linked, because // it is probably only not linked, because it was already linked in the article. if (!linkDestinations.contains(featureLine.candidateEntity)) out.collect(output) else { log.info(s"Do not output surface `${featureLine.surfaceRepr}` with candidate '${featureLine.candidateEntity}' from ${featureLine.info.modelInfo}") } } else { // we have a link, just output it out.collect(output) } } } } } class LinksAsTrainingDataFlatMap(trieSelector: TrieSelectionStrategy) extends ReadTrieFromDiskFlatMap[FullInfoWikiPage, Classifiable[TrainInfo]](trieSelector) { var nrLinks = 0 var nrLinksFiltered = 0 var outputtedTrieHits = 0 var outputtedLinks = 0 import CoheelLogger._ override def flatMap(wikiPage: FullInfoWikiPage, out: Collector[Classifiable[TrainInfo]]): Unit = { assert(wikiPage.tags.size == wikiPage.plainText.size) val linksWithPositions = wikiPage.links val trieHits = trie.findAllInWithTrieHit(wikiPage.plainText).toList trieHits/*.groupBy { th => th.startIndex }.map { ths => ths._2.maxBy { th => th.length } }*/.foreach { trieHit => if (!linksWithPositions.contains(trieHit.startIndex)) { val contextOption = Util.extractContext(wikiPage.plainText, trieHit.startIndex) contextOption.foreach { context => val tags = wikiPage.tags.slice(trieHit.startIndex, trieHit.startIndex + trieHit.length).toArray outputtedTrieHits += 1 out.collect(Classifiable[TrainInfo]( // TH for trie hit s"${FeatureHelper.TRIE_HIT_MARKER}-${Util.id(wikiPage.pageTitle)}-${trieHit.startIndex}-${trieHit.length}", trieHit.s, context.toArray, surfaceLinkProb = trieHit.prob, info = TrainInfo(wikiPage.pageTitle, destination = "", POS_TAG_GROUPS.map { group => if (group.exists(tags.contains(_))) 1.0 else 0.0 }))) } } else { log.info(s"Ignoring trie hit $trieHit because it stems from link ${linksWithPositions(trieHit.startIndex)}") } } linksWithPositions.foreach { case (index, link) => // In theory, the index of the link should be in the set of indices proposed by the trie: // assert(hitPoints.contains(index)) // After all, if this link was found in the first phase, its surface should be in the trie now. // The problem, however, is the different tokenization: When tokenizing link text, we only tokenize // the small text of the link, while plain text tokenization works on the entire text // This tokenization is sometimes different, see the following example: // println(TokenizerHelper.tokenize("Robert V.").mkString(" ")) --> robert v. // println(TokenizerHelper.tokenize("Robert V. The Legacy").mkString(" ")) --> robert v the legaci (dot missing) // // This could be solved by taking the link tokenization directly from the plain text, however, this would // require quite a bit of rewriting. val contextOption = Util.extractContext(wikiPage.plainText, index) val containsResult = trie.contains(link.surfaceRepr) nrLinks += 1 if (containsResult.asEntry) { contextOption.foreach { context => outputtedLinks += 1 out.collect( Classifiable[TrainInfo]( link.id, link.surfaceRepr, context.toArray, surfaceLinkProb = containsResult.prob, info = TrainInfo(link.source, link.destination, POS_TAG_GROUPS.map { group => if (group.exists(link.posTags.contains(_))) 1.0 else 0.0 }))) } } else { nrLinksFiltered += 1 } } } override def close(): Unit = { log.info(s"LinksAsTrainingDataFlatMap summary: # Links/(# Links + # TrieHits) = ${outputtedLinks.toDouble * 100 / (outputtedLinks + outputtedTrieHits)} %") log.info(s"LinksAsTrainingDataFlatMap summary: # Links filtered/# Links = $nrLinksFiltered/$nrLinks = ${nrLinksFiltered.toDouble * 100 / nrLinks} %") } }
stratosphere/coheel
src/main/scala/de/uni_potsdam/hpi/coheel/programs/TrainingDataProgram.scala
Scala
apache-2.0
10,551
package extracells.client import cpw.mods.fml.common.eventhandler.SubscribeEvent import extracells.common.CommonProxy import net.minecraftforge.client.event.TextureStitchEvent import net.minecraftforge.common.MinecraftForge class ClientProxy extends CommonProxy { MinecraftForge.EVENT_BUS.register(this) override def registerRenderers() { } @SubscribeEvent def registerTextures(e: TextureStitchEvent.Pre) { } override def isClient = true override def isServer = false }
ruifung/ExtraCells2
src/main/scala/extracells/client/ClientProxy.scala
Scala
mit
493
val nodupes = Map(1 -> "a", 2-> "b", 3 -> "c") /*start*/nodupes map {_.swap}/*end*/ //Map[String, Int]
ilinum/intellij-scala
testdata/typeInference/bugs/SCL1823.scala
Scala
apache-2.0
102
package com.github.shadowsocks.acl import java.io.{File, FileNotFoundException, IOException} import com.github.shadowsocks.ShadowsocksApplication.app import com.github.shadowsocks.utils.IOUtils import com.j256.ormlite.field.DatabaseField import scala.collection.mutable import scala.io.Source /** * ACL handler compliant with: src/main/jni/shadowsocks-libev/src/acl.c * * OrmLite integration is unused for now. * * @author Mygod */ class Acl { @DatabaseField(generatedId = true) var id: Int = _ val bypassHostnames = new mutable.SortedList[String]() val proxyHostnames = new mutable.SortedList[String]() val subnets = new mutable.SortedList[Subnet]() val urls = new mutable.SortedList[String]() @DatabaseField var bypass: Boolean = _ def getBypassHostnamesString: String = bypassHostnames.mkString("\\n") def getProxyHostnamesString: String = proxyHostnames.mkString("\\n") def getSubnetsString: String = subnets.mkString("\\n") def setBypassHostnamesString(value: String) { bypassHostnames.clear() bypassHostnames ++= value.split("\\n") } def setProxyHostnamesString(value: String) { proxyHostnames.clear() proxyHostnames ++= value.split("\\n") } def setSubnetsString(value: String) { subnets.clear() subnets ++= value.split("\\n").map(Subnet.fromString) } def setUrlRules(value: String) { urls.clear() urls ++= value.split("\\n") } def fromAcl(other: Acl): Acl = { bypassHostnames.clear() bypassHostnames ++= other.bypassHostnames proxyHostnames.clear() proxyHostnames ++= other.proxyHostnames subnets.clear() subnets ++= other.subnets urls.clear() urls ++= other.urls bypass = other.bypass this } def fromSource(value: Source, defaultBypass: Boolean = true): Acl = { bypassHostnames.clear() proxyHostnames.clear() this.subnets.clear() this.urls.clear() bypass = defaultBypass lazy val bypassSubnets = new mutable.SortedList[Subnet]() lazy val proxySubnets = new mutable.SortedList[Subnet]() var hostnames: mutable.SortedList[String] = if (defaultBypass) proxyHostnames else bypassHostnames var subnets: mutable.SortedList[Subnet] = if (defaultBypass) proxySubnets else bypassSubnets var in_urls = false for (line <- value.getLines()) (line.indexOf('#') match { case -1 => if (!in_urls) line else "" case index => { line.indexOf("URLS_BEGIN") match { case -1 => case index => in_urls = true } line.indexOf("URLS_END") match { case -1 => case index => in_urls = false } "" // ignore any comment lines } }).trim match { case "[outbound_block_list]" => hostnames = null subnets = null case "[black_list]" | "[bypass_list]" => hostnames = bypassHostnames subnets = bypassSubnets case "[white_list]" | "[proxy_list]" => hostnames = proxyHostnames subnets = proxySubnets case "[reject_all]" | "[bypass_all]" => bypass = true case "[accept_all]" | "[proxy_all]" => bypass = false case input if subnets != null && input.nonEmpty => try subnets += Subnet.fromString(input) catch { case _: IllegalArgumentException => if (input.startsWith("http://") || input.startsWith("https://")) { urls += input } hostnames += input } case _ => } this.subnets ++= (if (bypass) proxySubnets else bypassSubnets) this } final def fromId(id: String): Acl = fromSource(Source.fromFile(Acl.getFile(id))) def getAclString(network: Boolean): String = { val result = new StringBuilder() if (urls.nonEmpty) { result.append("#URLS_BEGIN\\n") result.append(urls.mkString("\\n")) if (network) { try { urls.foreach((url: String) => result.append(Source.fromURL(url).mkString)) } catch { case e: IOException => // ignore } } result.append("#URLS_END\\n") } if (result.isEmpty) { result.append(if (bypass) "[bypass_all]\\n" else "[proxy_all]\\n") } val (bypassList, proxyList) = if (bypass) (bypassHostnames.toStream, subnets.toStream.map(_.toString) #::: proxyHostnames.toStream) else (subnets.toStream.map(_.toString) #::: bypassHostnames.toStream, proxyHostnames.toStream) if (bypassList.nonEmpty) { result.append("[bypass_list]\\n") result.append(bypassList.mkString("\\n")) result.append('\\n') } if (proxyList.nonEmpty) { result.append("[proxy_list]\\n") result.append(proxyList.mkString("\\n")) result.append('\\n') } result.toString } override def toString: String = { getAclString(false) } def isValidCustomRules: Boolean = bypass && bypassHostnames.isEmpty // Don't change: dummy fields for OrmLite interaction // noinspection ScalaUnusedSymbol @DatabaseField(useGetSet = true) private val bypassHostnamesString: String = null // noinspection ScalaUnusedSymbol @DatabaseField(useGetSet = true) private val proxyHostnamesString: String = null // noinspection ScalaUnusedSymbol @DatabaseField(useGetSet = true) private val subnetsString: String = null } object Acl { final val ALL = "all" final val BYPASS_LAN = "bypass-lan" final val BYPASS_CHN = "bypass-china" final val BYPASS_LAN_CHN = "bypass-lan-china" final val GFWLIST = "gfwlist" final val CHINALIST = "china-list" final val CUSTOM_RULES = "custom-rules" def getFile(id: String) = new File(app.getFilesDir, id + ".acl") def customRules: Acl = { val acl = new Acl() try acl.fromId(CUSTOM_RULES) catch { case _: FileNotFoundException => } acl } def save(id: String, acl: Acl, network: Boolean = false): Unit = { IOUtils.writeString(getFile(id), acl.getAclString(network)) } }
hangim/shadowsocks-android
mobile/src/main/scala/com/github/shadowsocks/acl/Acl.scala
Scala
gpl-3.0
5,876
package com.github.wakfudecrypt.types.data import com.github.wakfudecrypt._ @BinaryDecoder case class NationLaw( id: Int, lawConstantId: Int, params: Array[String], basePointsModification: Int, percentPointsModification: Int, lawPointCost: Int, lawLocked: Boolean, applicableToCitizen: Boolean, applicableToAlliedForeigner: Boolean, applicableToNeutralForeigner: Boolean, restrictedNations: Array[Int] ) object NationLaw extends BinaryDataCompanion[NationLaw] { override val dataId = 52 }
jac3km4/wakfudecrypt
types/src/main/scala/com/github/wakfudecrypt/types/data/NationLaw.scala
Scala
mit
516
package json2caseclass.implementation import json2caseclass.model.CaseClass._ object CaseClassNameGenerator { def makeUnique(takenClassNames: Set[String], name: ClassName): Option[ClassName] = { val suffixes = "" #:: Stream.from(1).map(_.toString) val namesWithSuffix = suffixes.map(suffix => s"$name$suffix") namesWithSuffix .find(nameWithSuffix => !takenClassNames.contains(nameWithSuffix)) .map(_.toClassName) } }
battermann/sbt-json
src/main/scala/json2caseclass/implementation/CaseClassNameGenerator.scala
Scala
mit
454
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.scala import org.apache.camel.Message import java.util.Map import java.lang.{Class, String} import javax.activation.DataHandler class RichMessage(val message: Message) extends Message { // Delegate methods //------------------------------------------------------------------------- def setMessageId(messageId: String) { message.setMessageId(messageId) } def setHeaders(headers: Map[String, Object]) { message.setHeaders(headers) } def setHeader(name: String, value: Any) { message.setHeader(name, value) } def setFault(fault: Boolean) { message.setFault(fault) } def setBody[T](body: Any, bodyType : Class[T]) { message.setBody(body, bodyType) } def setBody(body: Any) {message.setBody()} def setAttachments(attachments: Map[String, DataHandler]) { message.setAttachments(attachments) } def removeHeader(name: String) = message.removeHeader(name) def removeHeaders(pattern: String) = message.removeHeaders(pattern) def removeHeaders(pattern: String, excludePatterns: String*) = message.removeHeaders(pattern, excludePatterns: _*) def removeAttachment(id: String) { message.removeAttachment(id) } def isFault = message.isFault def hasHeaders = message.hasHeaders def hasAttachments = message.hasAttachments def getMessageId = message.getMessageId def getMandatoryBody[T](bodyType : Class[T]) = message.getMandatoryBody(bodyType) def getMandatoryBody = message.getMandatoryBody def getHeaders = message.getHeaders def getHeader[T](name: String, headerType : Class[T]) = message.getHeader(name, headerType) def getHeader[T](name: String, defaultValue: Any, headerType : Class[T]) = message.getHeader(name, defaultValue, headerType) def getHeader(name: String, defaultValue: Any) = message.getHeader(name, defaultValue) def getHeader(name: String) = message.getHeader(name) def getExchange = message.getExchange def getBody[T](bodyType : Class[T]) = message.getBody(bodyType) def getBody = message.getBody def getAttachments = message.getAttachments def getAttachmentNames = message.getAttachmentNames def getAttachment(id: String) = message.getAttachment(id) @Deprecated def createExchangeId = message.createExchangeId def copyFrom(message: Message) {message.copyFrom(message)} def copy = new RichMessage(message.copy) def addAttachment(id: String, content: DataHandler) { message.addAttachment(id, content) } }
shuliangtao/apache-camel-2.13.0-src
components/camel-scala/src/main/scala/org/apache/camel/scala/RichMessage.scala
Scala
apache-2.0
3,243
/* * Copyright: Copyright (C) 2015, Jaguar Land Rover * License: MPL-2.0 */ package org.genivi.sota.resolver.db import java.util.UUID import akka.stream.ActorMaterializer import eu.timepit.refined.api.Refined import eu.timepit.refined.refineV import eu.timepit.refined.string.Regex import org.genivi.sota.common.DeviceRegistry import org.genivi.sota.data.{Namespace, PackageId, Uuid} import org.genivi.sota.db.Operators._ import org.genivi.sota.refined.PackageIdDatabaseConversions.LiftedPackageId import org.genivi.sota.resolver.common.Errors import org.genivi.sota.resolver.components.{Component, ComponentRepository} import org.genivi.sota.resolver.filters._ import org.genivi.sota.resolver.firmware.Firmware import slick.driver.MySQLDriver.api._ import scala.concurrent.{ExecutionContext, Future} object DeviceRepository { import org.genivi.sota.db.SlickExtensions._ import org.genivi.sota.refined.SlickRefined._ class InstalledFirmwareTable(tag: Tag) extends Table[(Firmware, Uuid)](tag, "Firmware") { def namespace = column[Namespace] ("namespace") def module = column[Firmware.Module] ("module") def firmware_id = column[Firmware.FirmwareId] ("firmware_id") def last_modified = column[Long] ("last_modified") def device = column[Uuid] ("device_uuid") // insertOrUpdate buggy for composite-keys, see Slick issue #966. def pk = primaryKey("pk_installedFirmware", (namespace, module, firmware_id, device)) def * = (namespace, module, firmware_id, last_modified, device).shaped <> (p => (Firmware(p._1, p._2, p._3, p._4), p._5), (fw: (Firmware, Uuid)) => Some((fw._1.namespace, fw._1.module, fw._1.firmwareId, fw._1.lastModified, fw._2))) } // scalastyle:on val installedFirmware = TableQuery[InstalledFirmwareTable] def firmwareExists(namespace: Namespace, module: Firmware.Module) (implicit ec: ExecutionContext): DBIO[Firmware.Module] = { val res = for { ifw <- installedFirmware.filter(i => i.namespace === namespace && i.module === module).result.headOption } yield ifw res.flatMap(_.fold[DBIO[Firmware.Module]] (DBIO.failed(Errors.MissingFirmware))(x => DBIO.successful(x._1.module))) } def installFirmware (namespace: Namespace, module: Firmware.Module, firmware_id: Firmware.FirmwareId, last_modified: Long, device: Uuid) (implicit ec: ExecutionContext): DBIO[Unit] = { for { _ <- firmwareExists(namespace, module) _ <- installedFirmware.insertOrUpdate((Firmware(namespace, module, firmware_id, last_modified), device)) } yield() } def firmwareOnDevice (namespace: Namespace, deviceId: Uuid) (implicit ec: ExecutionContext): DBIO[Seq[Firmware]] = { installedFirmware .filter(_.namespace === namespace) .filter(_.device === deviceId) .result .map(_.map(_._1)) } //This method is only intended to be called when the client reports installed firmware. //It therefore clears all installed firmware for the given vin and replaces with the reported //state instead. def updateInstalledFirmware(device: Uuid, firmware: Set[Firmware]) (implicit ec: ExecutionContext): DBIO[Unit] = { (for { _ <- installedFirmware.filter(_.device === device).delete _ <- installedFirmware ++= firmware.map(fw => (Firmware(fw.namespace, fw.module, fw.firmwareId, fw.lastModified), device)) } yield ()).transactionally } /* * Installed packages. */ // scalastyle:off class InstalledPackageTable(tag: Tag) extends Table[(Uuid, UUID)](tag, "InstalledPackage") { def device = column[Uuid]("device_uuid") def packageUuid = column[UUID]("package_uuid") // insertOrUpdate buggy for composite-keys, see Slick issue #966. def pk = primaryKey("pk_installedPackage", (device, packageUuid)) def * = (device, packageUuid).shaped <> (identity, (vp: (Uuid, UUID)) => Some(vp)) } // scalastyle:on val installedPackages = TableQuery[InstalledPackageTable] def installPackage(namespace: Namespace, device: Uuid, pkgId: PackageId) (implicit ec: ExecutionContext): DBIO[Unit] = for { pkg <- PackageRepository.exists(namespace, pkgId) _ <- installedPackages.insertOrUpdate((device, pkg.uuid)) } yield () def uninstallPackage(namespace: Namespace, device: Uuid, pkgId: PackageId) (implicit ec: ExecutionContext): DBIO[Unit] = for { pkg <- PackageRepository.exists(namespace, pkgId) _ <- installedPackages.filter { ip => ip.device === device && ip.packageUuid === pkg.uuid }.delete } yield () def updateInstalledPackages(namespace: Namespace, device: Uuid, packages: Set[PackageId] ) (implicit ec: ExecutionContext): DBIO[Unit] = { def filterAvailablePackages(ids: Set[UUID]) : DBIO[Set[UUID]] = PackageRepository.loadByUuids(ids).map(_.map(_.uuid).toSet) def deleteOld(deletedPackages: Set[UUID]) = installedPackages .filter(_.device === device) .filter(_.packageUuid.inSet(deletedPackages)) .delete def insertNew(newPackages: Set[UUID]) = installedPackages ++= newPackages.map((device, _)) val dbio = for { packageUuids <- PackageRepository.toPackageUuids(namespace, packages) installedPackages <- DeviceRepository.installedOn(device).map(_.map(_.uuid)) newPackages = packageUuids -- installedPackages deletedPackages = installedPackages -- packageUuids newAvailablePackages <- filterAvailablePackages(newPackages) _ <- insertNew(newAvailablePackages) _ <- deleteOld(deletedPackages) } yield () dbio.transactionally } def installedOn(device: Uuid, regexFilter: Option[String] = None) (implicit ec: ExecutionContext) : DBIO[Set[Package]] = { installedPackages .join(PackageRepository.packages).on(_.packageUuid === _.uuid) .regexFilter(regexFilter)(_._2.name, _._2.version) .filter(_._1.device === device) .map { case (_, pkg) => pkg } .result .map(_.toSet) } def listInstalledPackages: DBIO[Seq[(Namespace, Uuid, PackageId)]] = installedPackages .join(PackageRepository.packages).on(_.packageUuid === _.uuid) .map { case (ip, p) => (p.namespace, ip.device, LiftedPackageId(p.name, p.version))} .result /* * Installed components. */ // scalastyle:off class InstalledComponentTable(tag: Tag) extends Table[(Namespace, Uuid, Component.PartNumber)](tag, "InstalledComponent") { def namespace = column[Namespace]("namespace") def device = column[Uuid]("device_uuid") def partNumber = column[Component.PartNumber]("partNumber") // insertOrUpdate buggy for composite-keys, see Slick issue #966. def pk = primaryKey("pk_installedComponent", (namespace, device, partNumber)) def * = (namespace, device, partNumber) } // scalastyle:on val installedComponents = TableQuery[InstalledComponentTable] def listInstalledComponents: DBIO[Seq[(Namespace, Uuid, Component.PartNumber)]] = installedComponents.result def deleteInstalledComponentById(namespace: Namespace, device: Uuid): DBIO[Int] = installedComponents.filter(i => i.namespace === namespace && i.device === device).delete def installComponent(namespace: Namespace, device: Uuid, part: Component.PartNumber) (implicit ec: ExecutionContext): DBIO[Unit] = for { _ <- ComponentRepository.exists(namespace, part) _ <- installedComponents += ((namespace, device, part)) } yield () def uninstallComponent (namespace: Namespace, device: Uuid, part: Component.PartNumber) (implicit ec: ExecutionContext): DBIO[Unit] = for { _ <- ComponentRepository.exists(namespace, part) _ <- installedComponents.filter { ic => ic.namespace === namespace && ic.device === device && ic.partNumber === part }.delete } yield () def componentsOnDevice(namespace: Namespace, device: Uuid) (implicit ec: ExecutionContext): DBIO[Seq[Component.PartNumber]] = { installedComponents .filter(_.namespace === namespace) .filter(_.device === device) .map(_.partNumber) .result } def search(namespace : Namespace, re : Option[Refined[String, Regex]], pkgName : Option[PackageId.Name], pkgVersion: Option[PackageId.Version], part : Option[Component.PartNumber], deviceRegistry: DeviceRegistry) (implicit db: Database, ec: ExecutionContext, mat: ActorMaterializer): Future[Seq[Uuid]] = { def toRegex[T](r: Refined[String, T]): Refined[String, Regex] = refineV[Regex](r.get).right.getOrElse(Refined.unsafeApply(".*")) val vins = re.fold[FilterAST](True)(VinMatches) val pkgs = (pkgName, pkgVersion) match { case (Some(re1), Some(re2)) => HasPackage(toRegex(re1), toRegex(re2)) case _ => True } val comps = part.fold[FilterAST](True)(r => HasComponent(toRegex(r))) val filter = And(vins, And(pkgs, comps)) for { devices <- deviceRegistry.listNamespace(namespace) searchResult <- DbDepResolver.filterDevices(namespace, devices.map(d => d.uuid -> d.deviceId).toMap, filter) } yield searchResult } }
PDXostc/rvi_sota_server
external-resolver/src/main/scala/org/genivi/sota/resolver/db/DeviceRepository.scala
Scala
mpl-2.0
9,456
/* * Copyright 2014 Frugal Mechanic (http://frugalmechanic.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fm.common import java.io._ import java.nio.charset.Charset import java.nio.charset.StandardCharsets.UTF_8 import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream import java.util.zip.{Deflater, GZIPOutputStream} import java.util.zip.{ZipEntry, ZipOutputStream} import java.util.jar.{JarEntry, JarOutputStream} object OutputStreamResource { def wrap( os: OutputStream, fileName: String = "", autoCompress: Boolean = true, compressionLevel: Int = Deflater.BEST_SPEED, buffered: Boolean = true, internalArchiveFileName: Option[String] = None ): OutputStreamResource = { OutputStreamResource( resource = SingleUseResource(os), fileName = fileName, autoCompress = autoCompress, compressionLevel = compressionLevel, buffered = buffered, internalArchiveFileName = internalArchiveFileName ) } // Hacky anonymous inner class with anonymous constructor to set the compression level as seen here: // http://weblogs.java.net/blog/mister__m/archive/2003/12/achieving_bette.html final private class ConfigurableGzipOutputStream(os: OutputStream, level: Int) extends GZIPOutputStream(os) { `def`.setLevel(level) } } final case class OutputStreamResource( resource: Resource[OutputStream], fileName: String = "", autoCompress: Boolean = true, compressionLevel: Int = Deflater.BEST_SPEED, buffered: Boolean = true, internalArchiveFileName: Option[String] = None ) extends Resource[OutputStream] { def isUsable: Boolean = resource.isUsable def isMultiUse: Boolean = resource.isMultiUse def use[T](f: OutputStream => T): T = filteredResource(bufferedFilter(resource)).use{ os: OutputStream => f(os) } def writer(): Resource[Writer] = flatMap{ os => Resource(new OutputStreamWriter(os)) } def writer(encoding: String): Resource[Writer] = { flatMap{ os: OutputStream => val updatedEncoding: String = if (encoding === UTF_8_BOM.name || UTF_8_BOM.aliases().contains(encoding)) { // Write the UTF-8 BOM UTF_8_BOM.writeBOM(os) // Switch to the normal UTF_8 Charset (even though UTF_8_BOM should work the same) "UTF-8" } else { encoding } Resource(new OutputStreamWriter(os, updatedEncoding)) } } def writer(cs: Charset): Resource[Writer] = { flatMap{ os: OutputStream => val updatedCS: Charset = if (cs eq UTF_8_BOM) { // Write the UTF-8 BOM UTF_8_BOM.writeBOM(os) // Switch to the normal UTF_8 Charset (even though UTF_8_BOM should work the same) UTF_8 } else { cs } Resource(new OutputStreamWriter(os, updatedCS)) } } def bufferedWriter(): Resource[BufferedWriter] = writer() flatMap { r => Resource(new BufferedWriter(r)) } def bufferedWriter(encoding: String): Resource[BufferedWriter] = writer(encoding) flatMap { r => Resource(new BufferedWriter(r)) } def bufferedWriter(cs: Charset): Resource[BufferedWriter] = writer(cs) flatMap { r => Resource(new BufferedWriter(r)) } def dataOutput(): Resource[DataOutput] = flatMap{ os => Resource(new DataOutputStream(os)) } private def filteredResource(resource: Resource[OutputStream]): Resource[OutputStream] = { val lowerFileName: String = fileName.toLowerCase if (!autoCompress) resource // else if (lowerFileName.endsWith(".tar.gz")) gzip(tar(resource, ".tar.gz")) // else if (lowerFileName.endsWith(".tgz")) gzip(tar(resource, ".tgz")) // else if (lowerFileName.endsWith(".tbz2")) bzip2(tar(resource, ".tbz2")) // else if (lowerFileName.endsWith(".tbz")) bzip2(tar(resource, ".tbz")) // else if (lowerFileName.endsWith(".tar")) tar(resource, ".tar") else if (lowerFileName.endsWith(".gz")) gzip(resource) else if (lowerFileName.endsWith(".bzip2")) bzip2(resource) else if (lowerFileName.endsWith(".bz2")) bzip2(resource) else if (lowerFileName.endsWith(".bz")) bzip2(resource) else if (lowerFileName.endsWith(".snappy")) snappy(resource) else if (lowerFileName.endsWith(".xz")) xz(resource) else if (lowerFileName.endsWith(".zip")) zip(resource, ".zip") else if (lowerFileName.endsWith(".jar")) jar(resource, ".jar") else if (lowerFileName.endsWith(".zst")) zstd(resource) else resource } private def gzip(r: Resource[OutputStream]): Resource[OutputStream] = r.flatMap { new OutputStreamResource.ConfigurableGzipOutputStream(_, compressionLevel) } private def snappy(r: Resource[OutputStream]): Resource[OutputStream] = r.flatMap { Snappy.newOutputStream(_) } private def bzip2(r: Resource[OutputStream]): Resource[OutputStream] = r.flatMap { new BZip2CompressorOutputStream(_) } private def xz(r: Resource[OutputStream]): Resource[OutputStream] = r.flatMap { new XZCompressorOutputStream(_) } private def zstd(r: Resource[OutputStream]): Resource[OutputStream] = r.flatMap { ZStandard.newOutputStream(_) } private def zip(r: Resource[OutputStream], extension: String): Resource[OutputStream] = r.flatMap { os: OutputStream => val zos: ZipOutputStream = new ZipOutputStream(os) zos.setLevel(compressionLevel) // Add an entry with the extension stripped off val entryName: String = internalArchiveFileName.getOrElse(fileName.substring(0, fileName.length-extension.length)) zos.putNextEntry(new ZipEntry(entryName)) zos } private def jar(r: Resource[OutputStream], extension: String): Resource[OutputStream] = r.flatMap { os: OutputStream => val zos: JarOutputStream = new JarOutputStream(os) zos.setLevel(compressionLevel) // Add an entry with the extension stripped off val entryName: String = internalArchiveFileName.getOrElse(fileName.substring(0, fileName.length-extension.length)) zos.putNextEntry(new JarEntry(entryName)) zos } // // This is SUPER slow for some reason. Using the native ZIP classes directly is way faster. // private def zip(r: Resource[OutputStream], extension: String): Resource[OutputStream] = archive(r, extension, ArchiveStreamFactory.ZIP){ new ZipArchiveEntry(_) } // // // This is SUPER slow for some reason. Using the native ZIP classes directly is way faster. // private def jar(r: Resource[OutputStream], extension: String): Resource[OutputStream] = archive(r, extension, ArchiveStreamFactory.JAR){ new JarArchiveEntry(_) } // // // This is SUPER slow for some reason. Using the native ZIP classes directly is way faster. // private def archive(r: Resource[OutputStream], extension: String, archiverName: String)(createEntry: String => ArchiveEntry): Resource[OutputStream] = r.flatMap { os: OutputStream => // val aos: ArchiveOutputStream = new ArchiveStreamFactory().createArchiveOutputStream(archiverName, os) // val entryName: String = internalArchiveFileName.getOrElse(fileName.substring(0, fileName.length-extension.length)) // aos.putArchiveEntry(createEntry(entryName)) // val wrappedOutputStream: WrappedArchiveOutputStream = new WrappedArchiveOutputStream(aos) // SingleUseResource(wrappedOutputStream)(WrappedArchiveOutputStreamCloseable) // } // // private class WrappedArchiveOutputStream(aos: ArchiveOutputStream) extends FilterOutputStream(aos) { // override def close(): Unit = { } // Disable the close method // def realClose(): Unit = { // // closeArchiveEntry() MUST be called before close() so we can't let any other // // OutputStreams wrapping this stream call the close method. // aos.closeArchiveEntry() // aos.close() // } // } // // private def WrappedArchiveOutputStreamCloseable(os: WrappedArchiveOutputStream): Closeable = new Closeable { // def close(): Unit = os.realClose() // } private def bufferedFilter(resource: Resource[OutputStream]): Resource[OutputStream] = { if (buffered) resource.flatMap{ new BufferedOutputStream(_) } else resource } }
frugalmechanic/fm-common
jvm/src/main/scala/fm/common/OutputStreamResource.scala
Scala
apache-2.0
8,641
package ilc package examples import longRunning._ class HistogramVerification extends BenchmarkVerification( new WordCountBenchData(HistogramGenerated) { override def base = 2 override def last = 2 override def step = 10 })
inc-lc/ilc-scala
bigClients/src/test/scala/ilc/examples/verification.scala
Scala
mit
242
//(a - 3 pts) // Consider the following scala programs. // For each program state what the program outputs assuming all parameters are call-by-value. // Write a short explanation for the output. // Program 1: def bob(x: Int): Int = { println("Bob"); x + 1 } def joe(x: Int,y: Int): Int = { println("Joe"); val a = x; val b = y; println(a+b); a+b } def ron(x: Int, y: Int, z: Int) { println("Ron") println(x + y) println(x + y + z) } ron(bob(joe(bob(1),2)),3,4) // This is because call-by-value functions compute the passed-in expression's value before calling the function, // thus the same value is accessed every time. // However, call-by-name functions recompute the passed-in expression's value every time it is accessed. // Since it is call-by-value, parameters are evaluated before passing into the function. // Should print: // Bob // Joe // Bob // Ron // Program 2: def buggy(x: Int): Int = { println(x); buggy(x - 1) } def foo(x: Int, y: Int): Int = { println(x) x+2 } println(foo(1,buggy(10))) // call-by-value, buggy(10) is recursively evaluated, which results in dead recursion. // Should print: // 10 // 9 // 8 // ... dead recursion to -Infinte
xianminx/cs3101-2
homework/week4/Part1a.scala
Scala
apache-2.0
1,190
/* * Copyright 2015-2020 Noel Welsh * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package doodle package algebra package generic import doodle.core.{Transform => Tx} trait GenericTransform[F[_]] extends Transform[Finalized[F, ?]] { def transform[A](img: Finalized[F, A], tx: Tx): Finalized[F, A] = Finalized.transform(tx)(img) }
underscoreio/doodle
core/shared/src/main/scala/doodle/algebra/generic/GenericTransform.scala
Scala
apache-2.0
855
package mpsc import java.time.LocalDateTime import java.time.temporal.ChronoUnit import java.util import org.slf4j.{Logger, LoggerFactory} import scala.annotation.tailrec class Executor(queue: util.Queue[Task], waiter: Waiter) extends Thread { val logger: Logger = LoggerFactory.getLogger(this.getClass) override def run(): Unit = { while (!isInterrupted) { executeTasks() waiter.monitor.synchronized { try { waiter.monitor.wait() } catch { case e: InterruptedException => logger.debug("Executer stopped") } } } } @tailrec private def executeTasks(): Unit = { val now = LocalDateTime.now() val task = queue.poll() if (!isInterrupted && task != null) { // if its time to execute then do it if (task.scheduledTime.isBefore(now) || task.scheduledTime.isEqual(now)) { task.action.call() executeTasks() } else { // else return task into queue and sleep till scheduled time comes queue.add(task) val delay = ChronoUnit.MILLIS.between(now, task.scheduledTime) waiter.waitMillis(delay) } } } }
algru/mpsc-tasks-executor
src/main/scala/mpsc/Executor.scala
Scala
mit
1,162
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.computations import org.scalatest.{Matchers, WordSpec} import org.scalatestplus.mockito.MockitoSugar import uk.gov.hmrc.ct.BoxValidationFixture import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever class CP17Spec extends WordSpec with MockitoSugar with Matchers with BoxValidationFixture[ComputationsBoxRetriever] { val boxRetriever = mock[ComputationsBoxRetriever] testBoxIsZeroOrPositive("CP17", CP17.apply) }
hmrc/ct-calculations
src/test/scala/uk/gov/hmrc/ct/computations/CP17Spec.scala
Scala
apache-2.0
1,065
package com.ecfront.lfp import java.io.{File, RandomAccessFile} import java.nio.channels.FileChannel import akka.actor.{Actor, ActorLogging, ActorRef} import sun.nio.ch.DirectBuffer /** * 行集合收集类 * @param lineProcess 行数据处理 */ class LineCollector(lineProcess: ActorRef) extends Actor with ActorLogging { def receive = { case (fileName: String, chunkStart: Int, chunkSize: Int) => val file = new File(fileName) val channel = new RandomAccessFile(file, "r").getChannel val mappedBuff = channel.map(FileChannel.MapMode.READ_ONLY, 0, file.length()) //确定end position var endP = chunkSize if (endP >= file.length()) { endP = file.length().intValue - 1 } val start = mappedBuff.get(chunkStart) val startPosition = rowOffsetProcess(chunkStart, mappedBuff, start, endP) val end = mappedBuff.get(endP) val endPosition = if (endP != file.length() - 1) rowOffsetProcess(endP, mappedBuff, end, endP) else endP val stringBuilder = new StringBuilder() for (i <- startPosition to endPosition) { stringBuilder.append(mappedBuff.get(i).asInstanceOf[Char]) } channel.close() //需要手工关闭,否则文件还会被占用 mappedBuff.asInstanceOf[DirectBuffer].cleaner().clean() lineProcess ! stringBuilder.toString().split('\\n').filter(_.trim != "") } /** * 对齐行数据,charBuff会向前移动直到找到换行符,即确保从行首开始 * @param startP 开始position * @param charBuff MappedByteBuffer * @param start 开始字符 * @param length 最大长度 * @return 最近的行首position */ private def rowOffsetProcess(startP: Int, charBuff: java.nio.MappedByteBuffer, start: Byte, length: Int): Int = { var s = start.asInstanceOf[Char] val position = startP var next = position if (position <= length) { while (s != '\\n' && position > 0) { s = charBuff.get(next).asInstanceOf[Char] next = next - 1 } } if (position != next) { next + 1 } else { position } } }
gudaoxuri/large-file-processor
src/main/scala/com/ecfront/lfp/LineCollector.scala
Scala
apache-2.0
2,133
object ch9_13 { import ch9.{ParseError, Location, Parsers} type Parser[+A] = Location => Result[A] sealed trait Result[+A] case class Success[+A](get: A, charsConsumed: Int) extends Result[A] case class Failure(get: ParseError) extends Result[Nothing] object Reference extends Parsers[Parser] { /** As seen from object Reference, the missing signatures are as follows. * For convenience, these are usable as stub implementations. */ def attempt[A](p: Parser[A]): Parser[A] = ??? def flatMap[A, B](a: Parser[A])(f: A => Parser[B]): Parser[B] = ??? def label[A](msg: String)(p: Parser[A]): Parser[A] = ??? def or[A](s1: Parser[A],s2: => Parser[A]): Parser[A] = ??? def scope[A](msg: String)(p: Parser[A]): Parser[A] = ??? def slice[A](p: Parser[A]): Parser[String] = loc => p(loc) match { case f@Failure(_) => f case Success(a, c) => Success(loc.input.slice(loc.offset, c), c) } def succeded[A](a: A): Parser[A] = l => Success(a, 0) implicit def regex(r: scala.util.matching.Regex): Parser[String] = loc => { val input = loc.input.slice(0,loc.offset+1) r.findPrefixOf(input) match { case None => Failure(loc.advanceBy(1).toError("regex " + r)) case Some(m) => Success(m, m.length) } } implicit def string(s: String): Parser[String] = { loc => val input = loc.input.slice(0,loc.offset+1) input.startsWith(s) match { case true => Success(s, s.length) case _ => Failure(loc.advanceBy(1).toError("Expected " + s)) } } def run[A](p: Parser[A])(input: String): Either[ch9.ParseError,A] = ??? } } import ch9_13._ /* from repl you can test typing: :load src/main/scala/fpinscala/ch9/Parser.scala :load src/main/scala/fpinscala/ch9/Exercise13.scala */
rucka/fpinscala
src/main/scala/fpinscala/ch9/Exercise13.scala
Scala
gpl-2.0
1,835
/* * This file is part of the \\BlueLaTeX project. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gnieh.blue package web import tiscaf._ import org.osgi.framework.BundleContext import com.typesafe.config.Config /** This web application serves static web client * * @author Lucas Satabin */ class WebApp(context: BundleContext, config: Config) extends HApp { // this is required by the resource let used to serve static content override val buffered = true private val Prefix = "/*(.*)/*".r private val prefix = config.getString("blue.client.path-prefix") match { case Prefix(prefix) => prefix case _ => "" } private val configLet = new ConfigLet(context, config) private val webLet = new WebLet(context, prefix) private val configPath = s"${if(prefix.nonEmpty) prefix + "/" else ""}configuration" def resolve(req: HReqData) = if(req.uriPath == configPath) Some(configLet) else Some(webLet) }
tdurieux/bluelatex
blue-web/src/main/scala/gnieh/blue/web/WebApp.scala
Scala
apache-2.0
1,492
/* * Copyright (C) 2005, The Beangle Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.beangle.cache /** * @author chaostone */ trait Broadcaster { def publishEviction(cache: String, key: Any): Unit def publishClear(cache: String): Unit } trait BroadcasterBuilder { def build(channel: String, localManager: CacheManager): Broadcaster } object EvictMessage { val Eviction: Byte = 0.asInstanceOf[Byte] val Clear: Byte = 1.asInstanceOf[Byte] val LocalIssuer: Int = new scala.util.Random(System.currentTimeMillis).nextInt(1000000) } class EvictMessage(val cache: String, val key: Any) extends Serializable { import EvictMessage._ var operation: Byte = Eviction var issuer: Int = LocalIssuer def isIssueByLocal: Boolean = { issuer == LocalIssuer } override def toString: String = { if (operation == Eviction) "clear" + cache else s"evict $key in $cache" } }
beangle/cache
api/src/main/scala/org/beangle/cache/Broadcaster.scala
Scala
lgpl-3.0
1,547
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import java.lang.reflect.ParameterizedType import scala.reflect.runtime.universe.TypeTag import scala.util.Try import org.apache.spark.annotation.Stable import org.apache.spark.api.python.PythonEvalType import org.apache.spark.internal.Logging import org.apache.spark.sql.api.java._ import org.apache.spark.sql.catalyst.{JavaTypeInference, ScalaReflection} import org.apache.spark.sql.catalyst.analysis.FunctionRegistry import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder import org.apache.spark.sql.catalyst.expressions.{Expression, ScalaUDF} import org.apache.spark.sql.catalyst.util.CharVarcharUtils import org.apache.spark.sql.execution.aggregate.ScalaUDAF import org.apache.spark.sql.execution.python.UserDefinedPythonFunction import org.apache.spark.sql.expressions.{SparkUserDefinedFunction, UserDefinedAggregateFunction, UserDefinedAggregator, UserDefinedFunction} import org.apache.spark.sql.types.DataType import org.apache.spark.util.Utils /** * Functions for registering user-defined functions. Use `SparkSession.udf` to access this: * * {{{ * spark.udf * }}} * * @since 1.3.0 */ @Stable class UDFRegistration private[sql] (functionRegistry: FunctionRegistry) extends Logging { import UDFRegistration._ protected[sql] def registerPython(name: String, udf: UserDefinedPythonFunction): Unit = { log.debug( s""" | Registering new PythonUDF: | name: $name | command: ${udf.func.command.toSeq} | envVars: ${udf.func.envVars} | pythonIncludes: ${udf.func.pythonIncludes} | pythonExec: ${udf.func.pythonExec} | dataType: ${udf.dataType} | pythonEvalType: ${PythonEvalType.toString(udf.pythonEvalType)} | udfDeterministic: ${udf.udfDeterministic} """.stripMargin) functionRegistry.createOrReplaceTempFunction(name, udf.builder) } /** * Registers a user-defined aggregate function (UDAF). * * @param name the name of the UDAF. * @param udaf the UDAF needs to be registered. * @return the registered UDAF. * * @since 1.5.0 * @deprecated this method and the use of UserDefinedAggregateFunction are deprecated. * Aggregator[IN, BUF, OUT] should now be registered as a UDF via the functions.udaf(agg) method. */ @deprecated("Aggregator[IN, BUF, OUT] should now be registered as a UDF" + " via the functions.udaf(agg) method.", "3.0.0") def register(name: String, udaf: UserDefinedAggregateFunction): UserDefinedAggregateFunction = { def builder(children: Seq[Expression]) = ScalaUDAF(children, udaf, udafName = Some(name)) functionRegistry.createOrReplaceTempFunction(name, builder) udaf } /** * Registers a user-defined function (UDF), for a UDF that's already defined using the Dataset * API (i.e. of type UserDefinedFunction). To change a UDF to nondeterministic, call the API * `UserDefinedFunction.asNondeterministic()`. To change a UDF to nonNullable, call the API * `UserDefinedFunction.asNonNullable()`. * * Example: * {{{ * val foo = udf(() => Math.random()) * spark.udf.register("random", foo.asNondeterministic()) * * val bar = udf(() => "bar") * spark.udf.register("stringLit", bar.asNonNullable()) * }}} * * @param name the name of the UDF. * @param udf the UDF needs to be registered. * @return the registered UDF. * * @since 2.2.0 */ def register(name: String, udf: UserDefinedFunction): UserDefinedFunction = { udf.withName(name) match { case udaf: UserDefinedAggregator[_, _, _] => def builder(children: Seq[Expression]) = udaf.scalaAggregator(children) functionRegistry.createOrReplaceTempFunction(name, builder) udaf case other => def builder(children: Seq[Expression]) = other.apply(children.map(Column.apply) : _*).expr functionRegistry.createOrReplaceTempFunction(name, builder) other } } // scalastyle:off line.size.limit /* register 0-22 were generated by this script (0 to 22).foreach { x => val types = (1 to x).foldRight("RT")((i, s) => {s"A$i, $s"}) val typeTags = (1 to x).map(i => s"A$i: TypeTag").foldLeft("RT: TypeTag")(_ + ", " + _) val inputEncoders = (1 to x).foldRight("Nil")((i, s) => {s"Try(ExpressionEncoder[A$i]()).toOption :: $s"}) println(s""" |/** | * Registers a deterministic Scala closure of $x arguments as user-defined function (UDF). | * @tparam RT return type of UDF. | * @since 1.3.0 | */ |def register[$typeTags](name: String, func: Function$x[$types]): UserDefinedFunction = { | val outputEncoder = Try(ExpressionEncoder[RT]()).toOption | val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) | val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = $inputEncoders | val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) | val finalUdf = if (nullable) udf else udf.asNonNullable() | def builder(e: Seq[Expression]) = if (e.length == $x) { | finalUdf.createScalaUDF(e) | } else { | throw new AnalysisException("Invalid number of arguments for function " + name + | ". Expected: $x; Found: " + e.length) | } | functionRegistry.createOrReplaceTempFunction(name, builder) | finalUdf |}""".stripMargin) } (0 to 22).foreach { i => val extTypeArgs = (0 to i).map(_ => "_").mkString(", ") val anyTypeArgs = (0 to i).map(_ => "Any").mkString(", ") val anyCast = s".asInstanceOf[UDF$i[$anyTypeArgs]]" val anyParams = (1 to i).map(_ => "_: Any").mkString(", ") val version = if (i == 0) "2.3.0" else "1.3.0" val funcCall = if (i == 0) s"() => f$anyCast.call($anyParams)" else s"f$anyCast.call($anyParams)" println(s""" |/** | * Register a deterministic Java UDF$i instance as user-defined function (UDF). | * @since $version | */ |def register(name: String, f: UDF$i[$extTypeArgs], returnType: DataType): Unit = { | val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) | val func = $funcCall | def builder(e: Seq[Expression]) = if (e.length == $i) { | ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) | } else { | throw new AnalysisException("Invalid number of arguments for function " + name + | ". Expected: $i; Found: " + e.length) | } | functionRegistry.createOrReplaceTempFunction(name, builder) |}""".stripMargin) } */ /** * Registers a deterministic Scala closure of 0 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag](name: String, func: Function0[RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 0) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 0; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 1 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag](name: String, func: Function1[A1, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 1) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 1; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 2 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag](name: String, func: Function2[A1, A2, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 2) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 2; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 3 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag](name: String, func: Function3[A1, A2, A3, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 3) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 3; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 4 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag](name: String, func: Function4[A1, A2, A3, A4, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 4) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 4; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 5 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag](name: String, func: Function5[A1, A2, A3, A4, A5, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 5) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 5; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 6 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag](name: String, func: Function6[A1, A2, A3, A4, A5, A6, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 6) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 6; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 7 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag](name: String, func: Function7[A1, A2, A3, A4, A5, A6, A7, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 7) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 7; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 8 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag](name: String, func: Function8[A1, A2, A3, A4, A5, A6, A7, A8, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 8) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 8; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 9 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag](name: String, func: Function9[A1, A2, A3, A4, A5, A6, A7, A8, A9, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 9) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 9; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 10 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag](name: String, func: Function10[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 10) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 10; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 11 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag](name: String, func: Function11[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 11) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 11; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 12 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag](name: String, func: Function12[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 12) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 12; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 13 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag](name: String, func: Function13[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 13) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 13; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 14 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag](name: String, func: Function14[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Try(ExpressionEncoder[A14]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 14) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 14; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 15 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag](name: String, func: Function15[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Try(ExpressionEncoder[A14]()).toOption :: Try(ExpressionEncoder[A15]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 15) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 15; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 16 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag](name: String, func: Function16[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Try(ExpressionEncoder[A14]()).toOption :: Try(ExpressionEncoder[A15]()).toOption :: Try(ExpressionEncoder[A16]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 16) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 16; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 17 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag](name: String, func: Function17[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Try(ExpressionEncoder[A14]()).toOption :: Try(ExpressionEncoder[A15]()).toOption :: Try(ExpressionEncoder[A16]()).toOption :: Try(ExpressionEncoder[A17]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 17) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 17; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 18 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag](name: String, func: Function18[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Try(ExpressionEncoder[A14]()).toOption :: Try(ExpressionEncoder[A15]()).toOption :: Try(ExpressionEncoder[A16]()).toOption :: Try(ExpressionEncoder[A17]()).toOption :: Try(ExpressionEncoder[A18]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 18) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 18; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 19 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag](name: String, func: Function19[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Try(ExpressionEncoder[A14]()).toOption :: Try(ExpressionEncoder[A15]()).toOption :: Try(ExpressionEncoder[A16]()).toOption :: Try(ExpressionEncoder[A17]()).toOption :: Try(ExpressionEncoder[A18]()).toOption :: Try(ExpressionEncoder[A19]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 19) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 19; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 20 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag, A20: TypeTag](name: String, func: Function20[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Try(ExpressionEncoder[A14]()).toOption :: Try(ExpressionEncoder[A15]()).toOption :: Try(ExpressionEncoder[A16]()).toOption :: Try(ExpressionEncoder[A17]()).toOption :: Try(ExpressionEncoder[A18]()).toOption :: Try(ExpressionEncoder[A19]()).toOption :: Try(ExpressionEncoder[A20]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 20) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 20; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 21 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag, A20: TypeTag, A21: TypeTag](name: String, func: Function21[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Try(ExpressionEncoder[A14]()).toOption :: Try(ExpressionEncoder[A15]()).toOption :: Try(ExpressionEncoder[A16]()).toOption :: Try(ExpressionEncoder[A17]()).toOption :: Try(ExpressionEncoder[A18]()).toOption :: Try(ExpressionEncoder[A19]()).toOption :: Try(ExpressionEncoder[A20]()).toOption :: Try(ExpressionEncoder[A21]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 21) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 21; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } /** * Registers a deterministic Scala closure of 22 arguments as user-defined function (UDF). * @tparam RT return type of UDF. * @since 1.3.0 */ def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag, A20: TypeTag, A21: TypeTag, A22: TypeTag](name: String, func: Function22[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, RT]): UserDefinedFunction = { val outputEncoder = Try(ExpressionEncoder[RT]()).toOption val ScalaReflection.Schema(dataType, nullable) = outputEncoder.map(outputSchema).getOrElse(ScalaReflection.schemaFor[RT]) val inputEncoders: Seq[Option[ExpressionEncoder[_]]] = Try(ExpressionEncoder[A1]()).toOption :: Try(ExpressionEncoder[A2]()).toOption :: Try(ExpressionEncoder[A3]()).toOption :: Try(ExpressionEncoder[A4]()).toOption :: Try(ExpressionEncoder[A5]()).toOption :: Try(ExpressionEncoder[A6]()).toOption :: Try(ExpressionEncoder[A7]()).toOption :: Try(ExpressionEncoder[A8]()).toOption :: Try(ExpressionEncoder[A9]()).toOption :: Try(ExpressionEncoder[A10]()).toOption :: Try(ExpressionEncoder[A11]()).toOption :: Try(ExpressionEncoder[A12]()).toOption :: Try(ExpressionEncoder[A13]()).toOption :: Try(ExpressionEncoder[A14]()).toOption :: Try(ExpressionEncoder[A15]()).toOption :: Try(ExpressionEncoder[A16]()).toOption :: Try(ExpressionEncoder[A17]()).toOption :: Try(ExpressionEncoder[A18]()).toOption :: Try(ExpressionEncoder[A19]()).toOption :: Try(ExpressionEncoder[A20]()).toOption :: Try(ExpressionEncoder[A21]()).toOption :: Try(ExpressionEncoder[A22]()).toOption :: Nil val udf = SparkUserDefinedFunction(func, dataType, inputEncoders, outputEncoder).withName(name) val finalUdf = if (nullable) udf else udf.asNonNullable() def builder(e: Seq[Expression]) = if (e.length == 22) { finalUdf.createScalaUDF(e) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 22; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) finalUdf } ////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////// /** * Register a Java UDF class using reflection, for use from pyspark * * @param name udf name * @param className fully qualified class name of udf * @param returnDataType return type of udf. If it is null, spark would try to infer * via reflection. */ private[sql] def registerJava(name: String, className: String, returnDataType: DataType): Unit = { try { val clazz = Utils.classForName[AnyRef](className) val udfInterfaces = clazz.getGenericInterfaces .filter(_.isInstanceOf[ParameterizedType]) .map(_.asInstanceOf[ParameterizedType]) .filter(e => e.getRawType.isInstanceOf[Class[_]] && e.getRawType.asInstanceOf[Class[_]].getCanonicalName.startsWith("org.apache.spark.sql.api.java.UDF")) if (udfInterfaces.length == 0) { throw new AnalysisException(s"UDF class $className doesn't implement any UDF interface") } else if (udfInterfaces.length > 1) { throw new AnalysisException(s"It is invalid to implement multiple UDF interfaces, UDF class $className") } else { try { val udf = clazz.getConstructor().newInstance() val udfReturnType = udfInterfaces(0).getActualTypeArguments.last var returnType = returnDataType if (returnType == null) { returnType = JavaTypeInference.inferDataType(udfReturnType)._1 } udfInterfaces(0).getActualTypeArguments.length match { case 1 => register(name, udf.asInstanceOf[UDF0[_]], returnType) case 2 => register(name, udf.asInstanceOf[UDF1[_, _]], returnType) case 3 => register(name, udf.asInstanceOf[UDF2[_, _, _]], returnType) case 4 => register(name, udf.asInstanceOf[UDF3[_, _, _, _]], returnType) case 5 => register(name, udf.asInstanceOf[UDF4[_, _, _, _, _]], returnType) case 6 => register(name, udf.asInstanceOf[UDF5[_, _, _, _, _, _]], returnType) case 7 => register(name, udf.asInstanceOf[UDF6[_, _, _, _, _, _, _]], returnType) case 8 => register(name, udf.asInstanceOf[UDF7[_, _, _, _, _, _, _, _]], returnType) case 9 => register(name, udf.asInstanceOf[UDF8[_, _, _, _, _, _, _, _, _]], returnType) case 10 => register(name, udf.asInstanceOf[UDF9[_, _, _, _, _, _, _, _, _, _]], returnType) case 11 => register(name, udf.asInstanceOf[UDF10[_, _, _, _, _, _, _, _, _, _, _]], returnType) case 12 => register(name, udf.asInstanceOf[UDF11[_, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 13 => register(name, udf.asInstanceOf[UDF12[_, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 14 => register(name, udf.asInstanceOf[UDF13[_, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 15 => register(name, udf.asInstanceOf[UDF14[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 16 => register(name, udf.asInstanceOf[UDF15[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 17 => register(name, udf.asInstanceOf[UDF16[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 18 => register(name, udf.asInstanceOf[UDF17[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 19 => register(name, udf.asInstanceOf[UDF18[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 20 => register(name, udf.asInstanceOf[UDF19[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 21 => register(name, udf.asInstanceOf[UDF20[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 22 => register(name, udf.asInstanceOf[UDF21[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case 23 => register(name, udf.asInstanceOf[UDF22[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType) case n => throw new AnalysisException(s"UDF class with $n type arguments is not supported.") } } catch { case e @ (_: InstantiationException | _: IllegalArgumentException) => throw new AnalysisException(s"Can not instantiate class $className, please make sure it has public non argument constructor") } } } catch { case e: ClassNotFoundException => throw new AnalysisException(s"Can not load class $className, please make sure it is on the classpath") } } /** * Register a Java UDAF class using reflection, for use from pyspark * * @param name UDAF name * @param className fully qualified class name of UDAF */ private[sql] def registerJavaUDAF(name: String, className: String): Unit = { try { val clazz = Utils.classForName[AnyRef](className) if (!classOf[UserDefinedAggregateFunction].isAssignableFrom(clazz)) { throw new AnalysisException(s"class $className doesn't implement interface UserDefinedAggregateFunction") } val udaf = clazz.getConstructor().newInstance().asInstanceOf[UserDefinedAggregateFunction] register(name, udaf) } catch { case e: ClassNotFoundException => throw new AnalysisException(s"Can not load class ${className}, please make sure it is on the classpath") case e @ (_: InstantiationException | _: IllegalArgumentException) => throw new AnalysisException(s"Can not instantiate class ${className}, please make sure it has public non argument constructor") } } /** * Register a deterministic Java UDF0 instance as user-defined function (UDF). * @since 2.3.0 */ def register(name: String, f: UDF0[_], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = () => f.asInstanceOf[UDF0[Any]].call() def builder(e: Seq[Expression]) = if (e.length == 0) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 0; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF1 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF1[_, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF1[Any, Any]].call(_: Any) def builder(e: Seq[Expression]) = if (e.length == 1) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 1; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF2 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF2[_, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF2[Any, Any, Any]].call(_: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 2) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 2; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF3 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF3[_, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF3[Any, Any, Any, Any]].call(_: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 3) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 3; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF4 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF4[_, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF4[Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 4) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 4; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF5 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF5[_, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF5[Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 5) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 5; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF6 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF6[_, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF6[Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 6) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 6; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF7 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF7[_, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF7[Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 7) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 7; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF8 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF8[_, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF8[Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 8) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 8; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF9 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF9[_, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF9[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 9) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 9; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF10 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF10[_, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF10[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 10) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 10; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF11 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF11[_, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF11[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 11) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 11; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF12 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF12[_, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF12[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 12) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 12; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF13 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF13[_, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF13[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 13) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 13; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF14 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF14[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF14[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 14) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 14; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF15 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF15[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF15[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 15) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 15; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF16 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF16[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF16[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 16) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 16; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF17 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF17[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF17[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 17) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 17; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF18 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF18[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF18[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 18) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 18; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF19 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF19[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF19[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 19) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 19; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF20 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF20[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF20[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 20) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 20; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF21 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF21[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF21[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 21) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 21; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } /** * Register a deterministic Java UDF22 instance as user-defined function (UDF). * @since 1.3.0 */ def register(name: String, f: UDF22[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = { val replaced = CharVarcharUtils.failIfHasCharVarchar(returnType) val func = f.asInstanceOf[UDF22[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any) def builder(e: Seq[Expression]) = if (e.length == 22) { ScalaUDF(func, replaced, e, Nil, udfName = Some(name)) } else { throw new AnalysisException("Invalid number of arguments for function " + name + ". Expected: 22; Found: " + e.length) } functionRegistry.createOrReplaceTempFunction(name, builder) } // scalastyle:on line.size.limit } private[sql] object UDFRegistration { /** * Obtaining the schema of output encoder for `ScalaUDF`. * * As the serialization in `ScalaUDF` is for individual column, not the whole row, * we just take the data type of vanilla object serializer, not `serializer` which * is transformed somehow for top-level row. */ def outputSchema(outputEncoder: ExpressionEncoder[_]): ScalaReflection.Schema = { ScalaReflection.Schema(outputEncoder.objSerializer.dataType, outputEncoder.objSerializer.nullable) } }
BryanCutler/spark
sql/core/src/main/scala/org/apache/spark/sql/UDFRegistration.scala
Scala
apache-2.0
70,366
/* * Copyright 2016 Guy Van den Broeck and Wannes Meert (UCLA and KU Leuven) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.ucla.cs.starai.forclift.rcr import scala.collection._ import edu.ucla.cs.starai.forclift.compiler._ import edu.ucla.cs.starai.forclift._ import edu.ucla.cs.starai.forclift.examples.models._ import edu.ucla.cs.starai.forclift.inference._ import scala.util.Random._ import util.KLD._ class NoTruthRCR( weightedCNF: WeightedCNF, rcrCompilerBuilder: Compiler.Builder = Compiler.Builder.default, verbose: Boolean = false) extends RelaxCompensateRecover(weightedCNF, rcrCompilerBuilder, verbose) { override def onCompensateRecoverStart( initialWeightFunction: PredicateWeights, compensations: List[Compensation], marginalCircuitsSet: MarginalCircuitsSet) { super.onCompensateRecoverStart(initialWeightFunction, compensations, marginalCircuitsSet) val relaxedCNF: CNF = marginalCircuitsSet.independentZs.map { _.cnf }.foldLeft(new CNF(Nil)) { _ ++ _ } println("Relaxed initial structure:") println(relaxedCNF) println } override def onStartCompensation( initialWeightFunction: PredicateWeights, compensations: List[Compensation], marginalCircuitsSet: MarginalCircuitsSet) { super.onStartCompensation(initialWeightFunction, compensations, marginalCircuitsSet) val relaxedCNF: CNF = marginalCircuitsSet.independentZs.map { _.cnf }.foldLeft(new CNF(Nil)) { _ ++ _ } println("Recovered structure:") println(relaxedCNF) println } override def onEndCompensation( weights: PredicateWeights, compensations: List[Compensation], marginalCircuitsSet: MarginalCircuitsSet) { super.onEndCompensation(weights, compensations, marginalCircuitsSet) println("Compensation converged to weights:") for (compensation <- compensations) { println(" - " + compensation.eq.thetaCopy + "=" + weights(compensation.eq.thetaCopy)) println(" - " + compensation.eq.thetaOrig + "=" + weights(compensation.eq.thetaOrig)) } for (marginalCircuit <- marginalCircuitsSet.origMarginals) { println("Probability for class of queries " + marginalCircuit.queryClass + " is " + marginalCircuit.marginal) } println } override def onCompensationIteration( i: Int, compensations: List[Compensation], marginalCircuitsSet: MarginalCircuitsSet, weights: PredicateWeights) { super.onCompensationIteration(i, compensations, marginalCircuitsSet, weights) // println("Compensation iteration " + i) } }
UCLA-StarAI/Forclift
src/main/scala/edu/ucla/cs/starai/forclift/rcr/NoTruthRCR.scala
Scala
apache-2.0
3,077
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest import org.scalautils._ class ShouldTripleEqualsToleranceSpec extends Spec /* with NonImplicitAssertions */ with Matchers with TripleEquals with Tolerance { val sevenDotOh = 7.0 val minusSevenDotOh = -7.0 val sevenDotOhFloat = 7.0f val minusSevenDotOhFloat = -7.0f val sevenLong = 7L val minusSevenLong = -7L val sevenInt = 7 val minusSevenInt = -7 val sevenShort: Short = 7 val minusSevenShort: Short = -7 val sevenByte: Byte = 7 val minusSevenByte: Byte = -7 /* I decided that for X +- Y, Y can be any numeric type that's implicitly convertible to X. So if X is Double, Y could be Double, Float, Long, Int, Short, Byte. If X is Long, Y could be Long, Int, Short, Byte. If X is Short, Y could be Short or Byte. And if X is Byte, Y must be Byte. assert(minusSevenDotOhFloat === (-6.8f +- 0.2d)) */ /* Chose not to do the symmetry, because no one needs it and implementing it would require an implicit. So these fail: (7.1 +- 0.2) should === (sevenDotOh) (7.5 +- 0.2) should !== (sevenDotOh) */ object `The should === syntax` { def `should be true if the number is within the given interval` { // Double +- Double sevenDotOh should === (7.1 +- 0.2) sevenDotOh should === (6.9 +- 0.2) sevenDotOh should === (7.0 +- 0.2) sevenDotOh should === (7.2 +- 0.2) sevenDotOh should === (6.8 +- 0.2) minusSevenDotOh should === (-7.1 +- 0.2) minusSevenDotOh should === (-6.9 +- 0.2) minusSevenDotOh should === (-7.0 +- 0.2) minusSevenDotOh should === (-7.2 +- 0.2) minusSevenDotOh should === (-6.8 +- 0.2) // Double +- Float sevenDotOh should === (7.1 +- 0.2f) sevenDotOh should === (6.9 +- 0.2f) sevenDotOh should === (7.0 +- 0.2f) sevenDotOh should === (7.2 +- 0.2f) sevenDotOh should === (6.8 +- 0.2f) minusSevenDotOh should === (-7.1 +- 0.2f) minusSevenDotOh should === (-6.9 +- 0.2f) minusSevenDotOh should === (-7.0 +- 0.2f) minusSevenDotOh should === (-7.2 +- 0.2f) minusSevenDotOh should === (-6.8 +- 0.2f) // Double +- Long sevenDotOh should === (7.1 +- 2L) sevenDotOh should === (6.9 +- 2L) sevenDotOh should === (7.0 +- 2L) sevenDotOh should === (7.2 +- 2L) sevenDotOh should === (6.8 +- 2L) minusSevenDotOh should === (-7.1 +- 2L) minusSevenDotOh should === (-6.9 +- 2L) minusSevenDotOh should === (-7.0 +- 2L) minusSevenDotOh should === (-7.2 +- 2L) minusSevenDotOh should === (-6.8 +- 2L) // Double +- Int sevenDotOh should === (7.1 +- 2) sevenDotOh should === (6.9 +- 2) sevenDotOh should === (7.0 +- 2) sevenDotOh should === (7.2 +- 2) sevenDotOh should === (6.8 +- 2) minusSevenDotOh should === (-7.1 +- 2) minusSevenDotOh should === (-6.9 +- 2) minusSevenDotOh should === (-7.0 +- 2) minusSevenDotOh should === (-7.2 +- 2) minusSevenDotOh should === (-6.8 +- 2) // Double +- Short sevenDotOh should === (7.1 +- 2.toShort) sevenDotOh should === (6.9 +- 2.toShort) sevenDotOh should === (7.0 +- 2.toShort) sevenDotOh should === (7.2 +- 2.toShort) sevenDotOh should === (6.8 +- 2.toShort) minusSevenDotOh should === (-7.1 +- 2.toShort) minusSevenDotOh should === (-6.9 +- 2.toShort) minusSevenDotOh should === (-7.0 +- 2.toShort) minusSevenDotOh should === (-7.2 +- 2.toShort) minusSevenDotOh should === (-6.8 +- 2.toShort) // Double +- Byte sevenDotOh should === (7.1 +- 2.toByte) sevenDotOh should === (6.9 +- 2.toByte) sevenDotOh should === (7.0 +- 2.toByte) sevenDotOh should === (7.2 +- 2.toByte) sevenDotOh should === (6.8 +- 2.toByte) minusSevenDotOh should === (-7.1 +- 2.toByte) minusSevenDotOh should === (-6.9 +- 2.toByte) minusSevenDotOh should === (-7.0 +- 2.toByte) minusSevenDotOh should === (-7.2 +- 2.toByte) minusSevenDotOh should === (-6.8 +- 2.toByte) // Float +- Float sevenDotOhFloat should === (7.1f +- 0.2f) sevenDotOhFloat should === (6.9f +- 0.2f) sevenDotOhFloat should === (7.0f +- 0.2f) sevenDotOhFloat should === (7.2f +- 0.2f) sevenDotOhFloat should === (6.8f +- 0.2f) minusSevenDotOhFloat should === (-7.1f +- 0.2f) minusSevenDotOhFloat should === (-6.9f +- 0.2f) minusSevenDotOhFloat should === (-7.0f +- 0.2f) minusSevenDotOhFloat should === (-7.2f +- 0.2f) minusSevenDotOhFloat should === (-6.8f +- 0.2f) // Float +- Long sevenDotOhFloat should === (7.1f +- 2L) sevenDotOhFloat should === (6.9f +- 2L) sevenDotOhFloat should === (7.0f +- 2L) sevenDotOhFloat should === (7.2f +- 2L) sevenDotOhFloat should === (6.8f +- 2L) minusSevenDotOhFloat should === (-7.1f +- 2L) minusSevenDotOhFloat should === (-6.9f +- 2L) minusSevenDotOhFloat should === (-7.0f +- 2L) minusSevenDotOhFloat should === (-7.2f +- 2L) minusSevenDotOhFloat should === (-6.8f +- 2L) // Float +- Int sevenDotOhFloat should === (7.1f +- 2) sevenDotOhFloat should === (6.9f +- 2) sevenDotOhFloat should === (7.0f +- 2) sevenDotOhFloat should === (7.2f +- 2) sevenDotOhFloat should === (6.8f +- 2) minusSevenDotOhFloat should === (-7.1f +- 2) minusSevenDotOhFloat should === (-6.9f +- 2) minusSevenDotOhFloat should === (-7.0f +- 2) minusSevenDotOhFloat should === (-7.2f +- 2) minusSevenDotOhFloat should === (-6.8f +- 2) // Float +- Short sevenDotOhFloat should === (7.1f +- 2.toShort) sevenDotOhFloat should === (6.9f +- 2.toShort) sevenDotOhFloat should === (7.0f +- 2.toShort) sevenDotOhFloat should === (7.2f +- 2.toShort) sevenDotOhFloat should === (6.8f +- 2.toShort) minusSevenDotOhFloat should === (-7.1f +- 2.toShort) minusSevenDotOhFloat should === (-6.9f +- 2.toShort) minusSevenDotOhFloat should === (-7.0f +- 2.toShort) minusSevenDotOhFloat should === (-7.2f +- 2.toShort) minusSevenDotOhFloat should === (-6.8f +- 2.toShort) // Float +- Byte sevenDotOhFloat should === (7.1f +- 2.toByte) sevenDotOhFloat should === (6.9f +- 2.toByte) sevenDotOhFloat should === (7.0f +- 2.toByte) sevenDotOhFloat should === (7.2f +- 2.toByte) sevenDotOhFloat should === (6.8f +- 2.toByte) minusSevenDotOhFloat should === (-7.1f +- 2.toByte) minusSevenDotOhFloat should === (-6.9f +- 2.toByte) minusSevenDotOhFloat should === (-7.0f +- 2.toByte) minusSevenDotOhFloat should === (-7.2f +- 2.toByte) minusSevenDotOhFloat should === (-6.8f +- 2.toByte) // Long +- Long sevenLong should === (9L +- 2L) sevenLong should === (8L +- 2L) sevenLong should === (7L +- 2L) sevenLong should === (6L +- 2L) sevenLong should === (5L +- 2L) minusSevenLong should === (-9L +- 2L) minusSevenLong should === (-8L +- 2L) minusSevenLong should === (-7L +- 2L) minusSevenLong should === (-6L +- 2L) minusSevenLong should === (-5L +- 2L) // Long +- Int sevenLong should === (9L +- 2) sevenLong should === (8L +- 2) sevenLong should === (7L +- 2) sevenLong should === (6L +- 2) sevenLong should === (5L +- 2) minusSevenLong should === (-9L +- 2) minusSevenLong should === (-8L +- 2) minusSevenLong should === (-7L +- 2) minusSevenLong should === (-6L +- 2) minusSevenLong should === (-5L +- 2) // Long +- Short sevenLong should === (9L +- 2.toShort) sevenLong should === (8L +- 2.toShort) sevenLong should === (7L +- 2.toShort) sevenLong should === (6L +- 2.toShort) sevenLong should === (5L +- 2.toShort) minusSevenLong should === (-9L +- 2.toShort) minusSevenLong should === (-8L +- 2.toShort) minusSevenLong should === (-7L +- 2.toShort) minusSevenLong should === (-6L +- 2.toShort) minusSevenLong should === (-5L +- 2.toShort) // Long +- Byte sevenLong should === (9L +- 2.toByte) sevenLong should === (8L +- 2.toByte) sevenLong should === (7L +- 2.toByte) sevenLong should === (6L +- 2.toByte) sevenLong should === (5L +- 2.toByte) minusSevenLong should === (-9L +- 2.toByte) minusSevenLong should === (-8L +- 2.toByte) minusSevenLong should === (-7L +- 2.toByte) minusSevenLong should === (-6L +- 2.toByte) minusSevenLong should === (-5L +- 2.toByte) // Int +- Int sevenInt should === (9 +- 2) sevenInt should === (8 +- 2) sevenInt should === (7 +- 2) sevenInt should === (6 +- 2) sevenInt should === (5 +- 2) minusSevenInt should === (-9 +- 2) minusSevenInt should === (-8 +- 2) minusSevenInt should === (-7 +- 2) minusSevenInt should === (-6 +- 2) minusSevenInt should === (-5 +- 2) // Int +- Short sevenInt should === (9 +- 2.toShort) sevenInt should === (8 +- 2.toShort) sevenInt should === (7 +- 2.toShort) sevenInt should === (6 +- 2.toShort) sevenInt should === (5 +- 2.toShort) minusSevenInt should === (-9 +- 2.toShort) minusSevenInt should === (-8 +- 2.toShort) minusSevenInt should === (-7 +- 2.toShort) minusSevenInt should === (-6 +- 2.toShort) minusSevenInt should === (-5 +- 2.toShort) // Int +- Byte sevenInt should === (9 +- 2.toByte) sevenInt should === (8 +- 2.toByte) sevenInt should === (7 +- 2.toByte) sevenInt should === (6 +- 2.toByte) sevenInt should === (5 +- 2.toByte) minusSevenInt should === (-9 +- 2.toByte) minusSevenInt should === (-8 +- 2.toByte) minusSevenInt should === (-7 +- 2.toByte) minusSevenInt should === (-6 +- 2.toByte) minusSevenInt should === (-5 +- 2.toByte) // Short +- Short sevenShort should === (9.toShort +- 2.toShort) sevenShort should === (8.toShort +- 2.toShort) sevenShort should === (7.toShort +- 2.toShort) sevenShort should === (6.toShort +- 2.toShort) sevenShort should === (5.toShort +- 2.toShort) minusSevenShort should === ((-9).toShort +- 2.toShort) minusSevenShort should === ((-8).toShort +- 2.toShort) minusSevenShort should === ((-7).toShort +- 2.toShort) minusSevenShort should === ((-6).toShort +- 2.toShort) minusSevenShort should === ((-5).toShort +- 2.toShort) // Short +- Byte sevenShort should === (9.toShort +- 2.toByte) sevenShort should === (8.toShort +- 2.toByte) sevenShort should === (7.toShort +- 2.toByte) sevenShort should === (6.toShort +- 2.toByte) sevenShort should === (5.toShort +- 2.toByte) minusSevenShort should === ((-9).toShort +- 2.toByte) minusSevenShort should === ((-8).toShort +- 2.toByte) minusSevenShort should === ((-7).toShort +- 2.toByte) minusSevenShort should === ((-6).toShort +- 2.toByte) minusSevenShort should === ((-5).toShort +- 2.toByte) // Byte +- Byte sevenByte should === (9.toByte +- 2.toByte) sevenByte should === (8.toByte +- 2.toByte) sevenByte should === (7.toByte +- 2.toByte) sevenByte should === (6.toByte +- 2.toByte) sevenByte should === (5.toByte +- 2.toByte) minusSevenByte should === ((-9).toByte +- 2.toByte) minusSevenByte should === ((-8).toByte +- 2.toByte) minusSevenByte should === ((-7).toByte +- 2.toByte) minusSevenByte should === ((-6).toByte +- 2.toByte) minusSevenByte should === ((-5).toByte +- 2.toByte) } def `should throw TFE if the number is outside the given interval` { // Double +- Double val caught = intercept[TestFailedException] { sevenDotOh should === (7.5 +- 0.2) } assert(caught.getMessage === "7.0 did not equal 7.5 plus or minus 0.2") intercept[TestFailedException] { sevenDotOh should === (6.5 +- 0.2) } intercept[TestFailedException] { minusSevenDotOh should === (-7.5 +- 0.2) } intercept[TestFailedException] { minusSevenDotOh should === (-6.5 +- 0.2) } // Double +- Float intercept[TestFailedException] { sevenDotOh should === (7.5 +- 0.2f) } intercept[TestFailedException] { sevenDotOh should === (6.5 +- 0.2f) } intercept[TestFailedException] { minusSevenDotOh should === (-7.5 +- 0.2f) } intercept[TestFailedException] { minusSevenDotOh should === (-6.5 +- 0.2f) } // Double +- Long intercept[TestFailedException] { sevenDotOh should === (4.0 +- 2L) } intercept[TestFailedException] { sevenDotOh should === (9.1 +- 2L) } intercept[TestFailedException] { minusSevenDotOh should === (-4.0 +- 2L) } intercept[TestFailedException] { minusSevenDotOh should === (-9.1 +- 2L) } // Double +- Int intercept[TestFailedException] { sevenDotOh should === (4.0 +- 2) } intercept[TestFailedException] { sevenDotOh should === (9.1 +- 2) } intercept[TestFailedException] { minusSevenDotOh should === (-4.0 +- 2) } intercept[TestFailedException] { minusSevenDotOh should === (-9.1 +- 2) } // Double +- Short intercept[TestFailedException] { sevenDotOh should === (4.0 +- 2.toShort) } intercept[TestFailedException] { sevenDotOh should === (9.1 +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOh should === (-4.0 +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOh should === (-9.1 +- 2.toShort) } // Double +- Byte intercept[TestFailedException] { sevenDotOh should === (4.0 +- 2.toByte) } intercept[TestFailedException] { sevenDotOh should === (9.1 +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOh should === (-4.0 +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOh should === (-9.1 +- 2.toByte) } // Float +- Float intercept[TestFailedException] { sevenDotOhFloat should === (7.5f +- 0.2f) } intercept[TestFailedException] { sevenDotOhFloat should === (6.5f +- 0.2f) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-7.5f +- 0.2f) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-6.5f +- 0.2f) } // Float +- Long intercept[TestFailedException] { sevenDotOhFloat should === (4.0f +- 2L) } intercept[TestFailedException] { sevenDotOhFloat should === (9.1f +- 2L) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-4.0f +- 2L) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-9.1f +- 2L) } // Float +- Int intercept[TestFailedException] { sevenDotOhFloat should === (4.0f +- 2) } intercept[TestFailedException] { sevenDotOhFloat should === (9.1f +- 2) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-4.0f +- 2) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-9.1f +- 2) } // Float +- Short intercept[TestFailedException] { sevenDotOhFloat should === (4.0f +- 2.toShort) } intercept[TestFailedException] { sevenDotOhFloat should === (9.1f +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-4.0f +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-9.1f +- 2.toShort) } // Float +- Byte intercept[TestFailedException] { sevenDotOhFloat should === (4.0f +- 2.toByte) } intercept[TestFailedException] { sevenDotOhFloat should === (9.1f +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-4.0f +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOhFloat should === (-9.1f +- 2.toByte) } // Long +- Long intercept[TestFailedException] { sevenLong should === (4L +- 2L) } intercept[TestFailedException] { sevenLong should === (10L +- 2L) } intercept[TestFailedException] { minusSevenLong should === (-4L +- 2L) } intercept[TestFailedException] { minusSevenLong should === (-10L +- 2L) } // Long +- Int intercept[TestFailedException] { sevenLong should === (4L +- 2) } intercept[TestFailedException] { sevenLong should === (10L +- 2) } intercept[TestFailedException] { minusSevenLong should === (-4L +- 2) } intercept[TestFailedException] { minusSevenLong should === (-10L +- 2) } // Long +- Short intercept[TestFailedException] { sevenLong should === (4L +- 2.toShort) } intercept[TestFailedException] { sevenLong should === (10L +- 2.toShort) } intercept[TestFailedException] { minusSevenLong should === (-4L +- 2.toShort) } intercept[TestFailedException] { minusSevenLong should === (-10L +- 2.toShort) } // Long +- Byte intercept[TestFailedException] { sevenLong should === (4L +- 2.toByte) } intercept[TestFailedException] { sevenLong should === (10L +- 2.toByte) } intercept[TestFailedException] { minusSevenLong should === (-4L +- 2.toByte) } intercept[TestFailedException] { minusSevenLong should === (-10L +- 2.toByte) } // Int +- Int intercept[TestFailedException] { sevenInt should === (4 +- 2) } intercept[TestFailedException] { sevenInt should === (10 +- 2) } intercept[TestFailedException] { minusSevenInt should === (-4 +- 2) } intercept[TestFailedException] { minusSevenInt should === (-10 +- 2) } // Int +- Short intercept[TestFailedException] { sevenInt should === (4 +- 2.toShort) } intercept[TestFailedException] { sevenInt should === (10 +- 2.toShort) } intercept[TestFailedException] { minusSevenInt should === (-4 +- 2.toShort) } intercept[TestFailedException] { minusSevenInt should === (-10 +- 2.toShort) } // Int +- Byte intercept[TestFailedException] { sevenInt should === (4 +- 2.toByte) } intercept[TestFailedException] { sevenInt should === (10 +- 2.toByte) } intercept[TestFailedException] { minusSevenInt should === (-4 +- 2.toByte) } intercept[TestFailedException] { minusSevenInt should === (-10 +- 2.toByte) } // Short +- Short intercept[TestFailedException] { sevenShort should === (4.toShort +- 2.toShort) } intercept[TestFailedException] { sevenShort should === (10.toShort +- 2.toShort) } intercept[TestFailedException] { minusSevenShort should === ((-4).toShort +- 2.toShort) } intercept[TestFailedException] { minusSevenShort should === ((-10).toShort +- 2.toShort) } // Short +- Byte intercept[TestFailedException] { sevenShort should === (4.toShort +- 2.toByte) } intercept[TestFailedException] { sevenShort should === (10.toShort +- 2.toByte) } intercept[TestFailedException] { minusSevenShort should === ((-4).toShort +- 2.toByte) } intercept[TestFailedException] { minusSevenShort should === ((-10).toShort +- 2.toByte) } // Byte +- Byte intercept[TestFailedException] { sevenByte should === (4.toByte +- 2.toByte) } intercept[TestFailedException] { sevenByte should === (10.toByte +- 2.toByte) } intercept[TestFailedException] { minusSevenByte should === ((-4).toByte +- 2.toByte) } intercept[TestFailedException] { minusSevenByte should === ((-10).toByte +- 2.toByte) } } } object `The !== syntax` { def `should succeed if the number is outside the given interval` { // Double +- Double sevenDotOh should !== (7.5 +- 0.2) sevenDotOh should !== (6.5 +- 0.2) minusSevenDotOh should !== (-7.5 +- 0.2) minusSevenDotOh should !== (-6.5 +- 0.2) // Double +- Float sevenDotOh should !== (7.5 +- 0.2f) sevenDotOh should !== (6.5 +- 0.2f) minusSevenDotOh should !== (-7.5 +- 0.2f) minusSevenDotOh should !== (-6.5 +- 0.2f) // Double +- Long sevenDotOh should !== (4.0 +- 2L) sevenDotOh should !== (9.1 +- 2L) minusSevenDotOh should !== (-4.0 +- 2L) minusSevenDotOh should !== (-9.1 +- 2L) // Double +- Int sevenDotOh should !== (4.0 +- 2) sevenDotOh should !== (9.1 +- 2) minusSevenDotOh should !== (-4.0 +- 2) minusSevenDotOh should !== (-9.1 +- 2) // Double +- Short sevenDotOh should !== (4.0 +- 2.toShort) sevenDotOh should !== (9.1 +- 2.toShort) minusSevenDotOh should !== (-4.0 +- 2.toShort) minusSevenDotOh should !== (-9.1 +- 2.toShort) // Double +- Byte sevenDotOh should !== (4.0 +- 2.toByte) sevenDotOh should !== (9.1 +- 2.toByte) minusSevenDotOh should !== (-4.0 +- 2.toByte) minusSevenDotOh should !== (-9.1 +- 2.toByte) // Float +- Float sevenDotOhFloat should !== (7.5f +- 0.2f) sevenDotOhFloat should !== (6.5f +- 0.2f) minusSevenDotOhFloat should !== (-7.5f +- 0.2f) minusSevenDotOhFloat should !== (-6.5f +- 0.2f) // Float +- Long sevenDotOhFloat should !== (4.0f +- 2L) sevenDotOhFloat should !== (9.1f +- 2L) minusSevenDotOhFloat should !== (-4.0f +- 2L) minusSevenDotOhFloat should !== (-9.1f +- 2L) // Float +- Int sevenDotOhFloat should !== (4.0f +- 2) sevenDotOhFloat should !== (9.1f +- 2) minusSevenDotOhFloat should !== (-4.0f +- 2) minusSevenDotOhFloat should !== (-9.1f +- 2) // Float +- Short sevenDotOhFloat should !== (4.0f +- 2.toShort) sevenDotOhFloat should !== (9.1f +- 2.toShort) minusSevenDotOhFloat should !== (-4.0f +- 2.toShort) minusSevenDotOhFloat should !== (-9.1f +- 2.toShort) // Float +- Byte sevenDotOhFloat should !== (4.0f +- 2.toByte) sevenDotOhFloat should !== (9.1f +- 2.toByte) minusSevenDotOhFloat should !== (-4.0f +- 2.toByte) minusSevenDotOhFloat should !== (-9.1f +- 2.toByte) // Long +- Long sevenLong should !== (4L +- 2L) sevenLong should !== (10L +- 2L) minusSevenLong should !== (-4L +- 2L) minusSevenLong should !== (-10L +- 2L) // Long +- Int sevenLong should !== (4L +- 2) sevenLong should !== (10L +- 2) minusSevenLong should !== (-4L +- 2) minusSevenLong should !== (-10L +- 2) // Long +- Short sevenLong should !== (4L +- 2.toShort) sevenLong should !== (10L +- 2.toShort) minusSevenLong should !== (-4L +- 2.toShort) minusSevenLong should !== (-10L +- 2.toShort) // Long +- Byte sevenLong should !== (4L +- 2.toByte) sevenLong should !== (10L +- 2.toByte) minusSevenLong should !== (-4L +- 2.toByte) minusSevenLong should !== (-10L +- 2.toByte) // Int +- Int sevenInt should !== (4 +- 2) sevenInt should !== (10 +- 2) minusSevenInt should !== (-4 +- 2) minusSevenInt should !== (-10 +- 2) // Int +- Short sevenInt should !== (4 +- 2.toShort) sevenInt should !== (10 +- 2.toShort) minusSevenInt should !== (-4 +- 2.toShort) minusSevenInt should !== (-10 +- 2.toShort) // Int +- Byte sevenInt should !== (4 +- 2.toByte) sevenInt should !== (10 +- 2.toByte) minusSevenInt should !== (-4 +- 2.toByte) minusSevenInt should !== (-10 +- 2.toByte) // Short +- Short sevenShort should !== (4.toShort +- 2.toShort) sevenShort should !== (10.toShort +- 2.toShort) minusSevenShort should !== ((-4).toShort +- 2.toShort) minusSevenShort should !== ((-10).toShort +- 2.toShort) // Short +- Byte sevenShort should !== (4.toShort +- 2.toByte) sevenShort should !== (10.toShort +- 2.toByte) minusSevenShort should !== ((-4).toShort +- 2.toByte) minusSevenShort should !== ((-10).toShort +- 2.toByte) // Byte +- Byte sevenByte should !== (4.toByte +- 2.toByte) sevenByte should !== (10.toByte +- 2.toByte) minusSevenByte should !== ((-4).toByte +- 2.toByte) minusSevenByte should !== ((-10).toByte +- 2.toByte) } def `should throw TFE if the number is within the given interval` { // Double +- Double val caught = intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 0.2) } assert(caught.getMessage === "7.0 equaled 7.1 plus or minus 0.2") intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 0.2) } intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 0.2) } intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 0.2) } intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 0.2) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 0.2) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 0.2) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 0.2) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 0.2) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 0.2) } // Double +- Float intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 0.2f) } intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 0.2f) } intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 0.2f) } intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 0.2f) } intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 0.2f) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 0.2f) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 0.2f) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 0.2f) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 0.2f) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 0.2f) } // Double +- Long intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 2L) } intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 2L) } intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 2L) } intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 2L) } intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 2L) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 2L) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 2L) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 2L) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 2L) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 2L) } // Double +- Int intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 2) } intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 2) } intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 2) } intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 2) } intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 2) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 2) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 2) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 2) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 2) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 2) } // Double +- Short intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 2.toShort) } intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 2.toShort) } intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 2.toShort) } intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 2.toShort) } intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 2.toShort) } // Double +- Byte intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 2.toByte) } intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 2.toByte) } intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 2.toByte) } intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 2.toByte) } intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 2.toByte) } // Float +- Float intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 0.2f) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 0.2f) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 0.2f) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 0.2f) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 0.2f) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 0.2f) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 0.2f) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 0.2f) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 0.2f) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 0.2f) } // Float +- Long intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 2L) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 2L) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 2L) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 2L) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 2L) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 2L) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 2L) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 2L) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 2L) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 2L) } // Float +- Int intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 2) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 2) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 2) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 2) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 2) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 2) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 2) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 2) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 2) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 2) } // Float +- Short intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 2.toShort) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 2.toShort) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 2.toShort) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 2.toShort) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 2.toShort) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 2.toShort) } // Float +- Byte intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 2.toByte) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 2.toByte) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 2.toByte) } intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 2.toByte) } intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 2.toByte) } intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 2.toByte) } // Long +- Long intercept[TestFailedException] { sevenLong should !== (9L +- 2L) } intercept[TestFailedException] { sevenLong should !== (8L +- 2L) } intercept[TestFailedException] { sevenLong should !== (7L +- 2L) } intercept[TestFailedException] { sevenLong should !== (6L +- 2L) } intercept[TestFailedException] { sevenLong should !== (5L +- 2L) } intercept[TestFailedException] { minusSevenLong should !== (-9L +- 2L) } intercept[TestFailedException] { minusSevenLong should !== (-8L +- 2L) } intercept[TestFailedException] { minusSevenLong should !== (-7L +- 2L) } intercept[TestFailedException] { minusSevenLong should !== (-6L +- 2L) } intercept[TestFailedException] { minusSevenLong should !== (-5L +- 2L) } // Long +- Int intercept[TestFailedException] { sevenLong should !== (9L +- 2) } intercept[TestFailedException] { sevenLong should !== (8L +- 2) } intercept[TestFailedException] { sevenLong should !== (7L +- 2) } intercept[TestFailedException] { sevenLong should !== (6L +- 2) } intercept[TestFailedException] { sevenLong should !== (5L +- 2) } intercept[TestFailedException] { minusSevenLong should !== (-9L +- 2) } intercept[TestFailedException] { minusSevenLong should !== (-8L +- 2) } intercept[TestFailedException] { minusSevenLong should !== (-7L +- 2) } intercept[TestFailedException] { minusSevenLong should !== (-6L +- 2) } intercept[TestFailedException] { minusSevenLong should !== (-5L +- 2) } // Long +- Short intercept[TestFailedException] { sevenLong should !== (9L +- 2.toShort) } intercept[TestFailedException] { sevenLong should !== (8L +- 2.toShort) } intercept[TestFailedException] { sevenLong should !== (7L +- 2.toShort) } intercept[TestFailedException] { sevenLong should !== (6L +- 2.toShort) } intercept[TestFailedException] { sevenLong should !== (5L +- 2.toShort) } intercept[TestFailedException] { minusSevenLong should !== (-9L +- 2.toShort) } intercept[TestFailedException] { minusSevenLong should !== (-8L +- 2.toShort) } intercept[TestFailedException] { minusSevenLong should !== (-7L +- 2.toShort) } intercept[TestFailedException] { minusSevenLong should !== (-6L +- 2.toShort) } intercept[TestFailedException] { minusSevenLong should !== (-5L +- 2.toShort) } // Long +- Byte intercept[TestFailedException] { sevenLong should !== (9L +- 2.toByte) } intercept[TestFailedException] { sevenLong should !== (8L +- 2.toByte) } intercept[TestFailedException] { sevenLong should !== (7L +- 2.toByte) } intercept[TestFailedException] { sevenLong should !== (6L +- 2.toByte) } intercept[TestFailedException] { sevenLong should !== (5L +- 2.toByte) } intercept[TestFailedException] { minusSevenLong should !== (-9L +- 2.toByte) } intercept[TestFailedException] { minusSevenLong should !== (-8L +- 2.toByte) } intercept[TestFailedException] { minusSevenLong should !== (-7L +- 2.toByte) } intercept[TestFailedException] { minusSevenLong should !== (-6L +- 2.toByte) } intercept[TestFailedException] { minusSevenLong should !== (-5L +- 2.toByte) } // Int +- Int intercept[TestFailedException] { sevenInt should !== (9 +- 2) } intercept[TestFailedException] { sevenInt should !== (8 +- 2) } intercept[TestFailedException] { sevenInt should !== (7 +- 2) } intercept[TestFailedException] { sevenInt should !== (6 +- 2) } intercept[TestFailedException] { sevenInt should !== (5 +- 2) } intercept[TestFailedException] { minusSevenInt should !== (-9 +- 2) } intercept[TestFailedException] { minusSevenInt should !== (-8 +- 2) } intercept[TestFailedException] { minusSevenInt should !== (-7 +- 2) } intercept[TestFailedException] { minusSevenInt should !== (-6 +- 2) } intercept[TestFailedException] { minusSevenInt should !== (-5 +- 2) } // Int +- Short intercept[TestFailedException] { sevenInt should !== (9 +- 2.toShort) } intercept[TestFailedException] { sevenInt should !== (8 +- 2.toShort) } intercept[TestFailedException] { sevenInt should !== (7 +- 2.toShort) } intercept[TestFailedException] { sevenInt should !== (6 +- 2.toShort) } intercept[TestFailedException] { sevenInt should !== (5 +- 2.toShort) } intercept[TestFailedException] { minusSevenInt should !== (-9 +- 2.toShort) } intercept[TestFailedException] { minusSevenInt should !== (-8 +- 2.toShort) } intercept[TestFailedException] { minusSevenInt should !== (-7 +- 2.toShort) } intercept[TestFailedException] { minusSevenInt should !== (-6 +- 2.toShort) } intercept[TestFailedException] { minusSevenInt should !== (-5 +- 2.toShort) } // Int +- Byte intercept[TestFailedException] { sevenInt should !== (9 +- 2.toByte) } intercept[TestFailedException] { sevenInt should !== (8 +- 2.toByte) } intercept[TestFailedException] { sevenInt should !== (7 +- 2.toByte) } intercept[TestFailedException] { sevenInt should !== (6 +- 2.toByte) } intercept[TestFailedException] { sevenInt should !== (5 +- 2.toByte) } intercept[TestFailedException] { minusSevenInt should !== (-9 +- 2.toByte) } intercept[TestFailedException] { minusSevenInt should !== (-8 +- 2.toByte) } intercept[TestFailedException] { minusSevenInt should !== (-7 +- 2.toByte) } intercept[TestFailedException] { minusSevenInt should !== (-6 +- 2.toByte) } intercept[TestFailedException] { minusSevenInt should !== (-5 +- 2.toByte) } // Short +- Short intercept[TestFailedException] { sevenShort should !== (9.toShort +- 2.toShort) } intercept[TestFailedException] { sevenShort should !== (8.toShort +- 2.toShort) } intercept[TestFailedException] { sevenShort should !== (7.toShort +- 2.toShort) } intercept[TestFailedException] { sevenShort should !== (6.toShort +- 2.toShort) } intercept[TestFailedException] { sevenShort should !== (5.toShort +- 2.toShort) } intercept[TestFailedException] { minusSevenShort should !== ((-9).toShort +- 2.toShort) } intercept[TestFailedException] { minusSevenShort should !== ((-8).toShort +- 2.toShort) } intercept[TestFailedException] { minusSevenShort should !== ((-7).toShort +- 2.toShort) } intercept[TestFailedException] { minusSevenShort should !== ((-6).toShort +- 2.toShort) } intercept[TestFailedException] { minusSevenShort should !== ((-5).toShort +- 2.toShort) } // Short +- Byte intercept[TestFailedException] { sevenShort should !== (9.toShort +- 2.toByte) } intercept[TestFailedException] { sevenShort should !== (8.toShort +- 2.toByte) } intercept[TestFailedException] { sevenShort should !== (7.toShort +- 2.toByte) } intercept[TestFailedException] { sevenShort should !== (6.toShort +- 2.toByte) } intercept[TestFailedException] { sevenShort should !== (5.toShort +- 2.toByte) } intercept[TestFailedException] { minusSevenShort should !== ((-9).toShort +- 2.toByte) } intercept[TestFailedException] { minusSevenShort should !== ((-8).toShort +- 2.toByte) } intercept[TestFailedException] { minusSevenShort should !== ((-7).toShort +- 2.toByte) } intercept[TestFailedException] { minusSevenShort should !== ((-6).toShort +- 2.toByte) } intercept[TestFailedException] { minusSevenShort should !== ((-5).toShort +- 2.toByte) } // Byte +- Byte intercept[TestFailedException] { sevenByte should !== (9.toByte +- 2.toByte) } intercept[TestFailedException] { sevenByte should !== (8.toByte +- 2.toByte) } intercept[TestFailedException] { sevenByte should !== (7.toByte +- 2.toByte) } intercept[TestFailedException] { sevenByte should !== (6.toByte +- 2.toByte) } intercept[TestFailedException] { sevenByte should !== (5.toByte +- 2.toByte) } intercept[TestFailedException] { minusSevenByte should !== ((-9).toByte +- 2.toByte) } intercept[TestFailedException] { minusSevenByte should !== ((-8).toByte +- 2.toByte) } intercept[TestFailedException] { minusSevenByte should !== ((-7).toByte +- 2.toByte) } intercept[TestFailedException] { minusSevenByte should !== ((-6).toByte +- 2.toByte) } intercept[TestFailedException] { minusSevenByte should !== ((-5).toByte +- 2.toByte) } } } object `The X +- Y syntax` { def `should throw IllegalArgumentException if the number passed to the right is 0 or negative` { // Double +- Double val caught1 = intercept[IllegalArgumentException] { sevenDotOh should === (7.1 +- -0.2) } assert(caught1.getMessage === "-0.2 passed to +- was zero or negative. Must be a positive non-zero number.", caught1.getMessage) // Double +- Float val caught2 = intercept[IllegalArgumentException] { sevenDotOh should === (7.1 +- -0.2f) } assert(caught2.getMessage === "-0.20000000298023224 passed to +- was zero or negative. Must be a positive non-zero number.") // Double +- Long val caught3 = intercept[IllegalArgumentException] { sevenDotOh should === (7.1 +- -2L) } assert(caught3.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.") // Double +- Int val caught4 = intercept[IllegalArgumentException] { sevenDotOh should === (7.1 +- -2) } assert(caught4.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.") // Double +- Short val caught5 = intercept[IllegalArgumentException] { sevenDotOh should === (7.1 +- (-2).toShort) } assert(caught5.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.") // Double +- Byte val caught6 = intercept[IllegalArgumentException] { sevenDotOh should === (7.1 +- (-2).toByte) } assert(caught6.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.") // Float +- Float val caught7 = intercept[IllegalArgumentException] { sevenDotOhFloat should === (7.1f +- -0.2f) } assert(caught7.getMessage === "-0.2 passed to +- was zero or negative. Must be a positive non-zero number.") // Float +- Long val caught8 = intercept[IllegalArgumentException] { sevenDotOhFloat should === (7.1f +- -2L) } assert(caught8.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.") // Float +- Int val caught9 = intercept[IllegalArgumentException] { sevenDotOhFloat should === (7.1f +- -2) } assert(caught9.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.") // Float +- Short val caught10 = intercept[IllegalArgumentException] { sevenDotOhFloat should === (7.1f +- (-2).toShort) } assert(caught10.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.") // Float +- Byte val caught11 = intercept[IllegalArgumentException] { sevenDotOhFloat should === (7.1f +- (-2).toByte) } assert(caught11.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.") // Long +- Long val caught12 = intercept[IllegalArgumentException] { sevenLong should === (9L +- -2L) } assert(caught12.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") // Long +- Int val caught13 = intercept[IllegalArgumentException] { sevenLong should === (9L +- -2) } assert(caught13.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") // Long +- Short val caught14 = intercept[IllegalArgumentException] { sevenLong should === (9L +- (-2).toShort) } assert(caught14.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") // Long +- Byte val caught15 = intercept[IllegalArgumentException] { sevenLong should === (9L +- (-2).toByte) } assert(caught15.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") // Int +- Int val caught16 = intercept[IllegalArgumentException] { sevenInt should === (9 +- -2) } assert(caught16.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") // Int +- Short val caught17 = intercept[IllegalArgumentException] { sevenInt should === (9 +- (-2).toShort) } assert(caught17.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") // Int +- Byte val caught18 = intercept[IllegalArgumentException] { sevenInt should === (9 +- (-2).toByte) } assert(caught18.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") // Short +- Short val caught19 = intercept[IllegalArgumentException] { sevenShort should === (9.toShort +- (-2).toShort) } assert(caught19.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") // Short +- Byte val caught20 = intercept[IllegalArgumentException] { sevenShort should === (9.toShort +- (-2).toByte) } assert(caught20.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") // Byte +- Byte val caught21 = intercept[IllegalArgumentException] { sevenByte should === (9.toByte +- (-2).toByte) } assert(caught21.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.") } } }
svn2github/scalatest
src/test/scala/org/scalatest/ShouldTripleEqualsToleranceSpec.scala
Scala
apache-2.0
48,217
override def toString : String = { return this.textContent }
richnou/vui2
vui2-html/src/gen/Wrapper.body.scala
Scala
agpl-3.0
74
package com.cloudray.scalapress.plugin.variations import org.scalatest.{OneInstancePerTest, FlatSpec} import org.scalatest.mock.MockitoSugar import org.mockito.{Matchers, Mockito} import com.cloudray.scalapress.item.{ItemType, TypeDao} import com.cloudray.scalapress.plugin.variations.controller.DimensionListController /** @author Stephen Samuel */ class DimensionListControllerTest extends FlatSpec with MockitoSugar with OneInstancePerTest { val controller = new DimensionListController controller.dimensionDao = mock[DimensionDao] controller.objectTypeDao = mock[TypeDao] "a dimension list controller" should "persist the dimension when creating a dimension" in { controller.create(45) Mockito.verify(controller.dimensionDao).save(Matchers.any[Dimension]) } it should "return a forward when creating" in { val redirect = controller.create(9) assert("redirect:/backoffice/plugin/variations/dimensions?objectTypeId=9" === redirect) } it should "delete from the database when the delete method is invoked" in { val dimension = new Dimension dimension.objectType = new ItemType Mockito.when(controller.dimensionDao.find(155)).thenReturn(dimension) controller.delete(155) Mockito.verify(controller.dimensionDao).remove(dimension) } it should "forward when deleting" in { val dimension = new Dimension dimension.objectType = new ItemType dimension.objectType.id = 7 Mockito.when(controller.dimensionDao.find(155)).thenReturn(dimension) val redirect = controller.delete(155) assert("redirect:/backoffice/plugin/variations/dimensions?objectTypeId=7" === redirect) } }
vidyacraghav/scalapress
src/test/scala/com/cloudray/scalapress/plugin/variations/DimensionListControllerTest.scala
Scala
apache-2.0
1,653
package japgolly.scalajs.react.core import japgolly.scalajs.react._ import japgolly.univeq.UnivEq import scala.annotation.nowarn import scala.util.NotGiven sealed trait Compilation3Test { import CompilationTest._ import Compilation3Test._ sealed trait TestComponentBuilder { val step1 = ScalaComponent.builder[Int]("") step1.renderBackend[B3b] step1.backend[B3b](new B3b(_)).renderBackend } // Ensure that the ScalaJsReactConfig.Defaults trait contains a default value for every config method class ScalaJsReactConfigDefaults extends ScalaJsReactConfig.Defaults // Reusability derives locally { case class Mono(a: Int) derives Reusability, UnivEq implicitly[Reusability[Mono]] case class Poly[+A](a: A) derives Reusability, UnivEq implicitly[Reusability[Poly[Int]]] implicitly[NotGiven[Reusability[Poly[B3b]]]] } } @nowarn object Compilation3Test { import japgolly.scalajs.react.vdom.html_<^._ import CompilationTest._ class B3b($: BackendScope[Int, Unit])(using i: Imp) { def render: VdomNode = 123 } }
japgolly/scalajs-react
tests/src/test/scala-3/japgolly/scalajs/react/core/Compilation3Test.scala
Scala
apache-2.0
1,075
package ru.makkarpov.scalingua.extract import java.io.{File, FileInputStream, InputStreamReader} import java.nio.charset.StandardCharsets import com.grack.nanojson.{JsonObject, JsonParser, JsonParserException} import ru.makkarpov.scalingua.pofile.Message.{Plural, Singular} import ru.makkarpov.scalingua.pofile._ import ru.makkarpov.scalingua.Compat.CollectionConverters._ object TaggedParser { val TaggedFileName = "tagged-messages.json" case class TaggedMessage(tag: String, msg: String, plural: Option[String], comment: Seq[String]) { def toMessage: Message = { val header = MessageHeader(comment, Nil, MessageLocation(TaggedFileName) :: Nil, MessageFlag.empty, Some(tag)) plural match { case None => Singular(header, None, MultipartString(msg), MultipartString.empty) case Some(p) => Plural(header, None, MultipartString(msg), MultipartString(p), Seq(MultipartString.empty, MultipartString.empty)) } } } /* * Format for tagged JSON file: * * { * "some.message.tag": { * "message": "...", // message itself, mandatory * "plural": "...", // plural version of message, optional * "comments": [ "...", "..."] // comments, optional * }, * * // or, simply: * "some.other.message.tag": "message" * } */ def parse(f: File): Seq[TaggedMessage] = { val ret = Vector.newBuilder[TaggedMessage] try { val obj = { val r = new InputStreamReader(new FileInputStream(f), StandardCharsets.UTF_8) try JsonParser.`object`().from(r) finally r.close() } for (k <- obj.keySet().asScala) obj.get(k) match { case v: JsonObject => if (!v.has("message")) throw TaggedParseException(s"Object with key '$k' has no 'message' field") if (!v.isString("message")) throw TaggedParseException(s"Object with key '$k' has non-string 'message' field") val msg = v.getString("message") val plural = if (v.has("plural")) { if (!v.isString("plural")) throw TaggedParseException(s"Object with key '$k' has non-string 'plural' field") Some(v.getString("plural")) } else None val comments = if (v.has("comments")) { if (v.isString("comments")) v.getString("comments") :: Nil else v.getArray("comments").asScala.toList.map(_.asInstanceOf[String]) } else Nil ret += TaggedMessage(k, msg, plural, comments) case v: String => ret += TaggedMessage(k, v, None, Nil) } } catch { case e: JsonParserException => throw new TaggedParseException(s"Tagged JSON syntax error at ${f.getCanonicalPath}:${e.getLinePosition}:${e.getCharPosition}", e) } ret.result() } }
makkarpov/scalingua
scalingua/shared/src/main/scala/ru/makkarpov/scalingua/extract/TaggedParser.scala
Scala
apache-2.0
2,847
import scala.quoted._ object scalatest { inline def assert(condition: => Boolean): Unit = ${ assertImpl('condition, '{""}) } def assertImpl(cond: Expr[Boolean], clue: Expr[Any])(using qctx: QuoteContext) : Expr[Unit] = { import qctx.tasty._ import util._ def isImplicitMethodType(tp: Type): Boolean = tp match case tp: MethodType => tp.isImplicit case _ => false cond.unseal.underlyingArgument match { case t @ Apply(Select(lhs, op), rhs :: Nil) => let(lhs) { left => let(rhs) { right => val app = Select.overloaded(left, op, Nil, right :: Nil) let(app) { result => val l = left.seal val r = right.seal val b = result.seal.cast[Boolean] val code = '{ scala.Predef.assert($b) } code.unseal } } }.seal.cast[Unit] case Apply(f @ Apply(Select(Apply(qual, lhs :: Nil), op), rhs :: Nil), implicits) if isImplicitMethodType(f.tpe) => let(lhs) { left => let(rhs) { right => val app = Select.overloaded(Apply(qual, left :: Nil), op, Nil, right :: Nil) let(Apply(app, implicits)) { result => val l = left.seal val r = right.seal val b = result.seal.cast[Boolean] val code = '{ scala.Predef.assert($b) } code.unseal } } }.seal.cast[Unit] } } }
som-snytt/dotty
tests/run-macros/reflect-select-value-class/assert_1.scala
Scala
apache-2.0
1,479
package nyaya package object util { @inline implicit final class NyayaUtilAnyExt[A](private val a: A) extends AnyVal { @inline def `JVM|JS`(js: => A): A = Platform.choose(a, js) } }
japgolly/nyaya
util/shared/src/main/scala/nyaya/util/package.scala
Scala
lgpl-2.1
192
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ui.jobs import scala.collection.mutable import scala.xml.Node import org.apache.spark.ui.{ToolTips, UIUtils} import org.apache.spark.ui.jobs.UIData.StageUIData import org.apache.spark.util.Utils /** Stage summary grouped by executors. */ private[ui] class ExecutorTable(stageId: Int, stageAttemptId: Int, parent: StagesTab) { private val listener = parent.progressListener def toNodeSeq: Seq[Node] = { listener.synchronized { executorTable() } } /** Special table which merges two header cells. */ private def executorTable[T](): Seq[Node] = { val stageData = listener.stageIdToData.get((stageId, stageAttemptId)) var hasInput = false var hasOutput = false var hasShuffleWrite = false var hasShuffleRead = false var hasBytesSpilled = false stageData.foreach(data => { hasInput = data.hasInput hasOutput = data.hasOutput hasShuffleRead = data.hasShuffleRead hasShuffleWrite = data.hasShuffleWrite hasBytesSpilled = data.hasBytesSpilled }) <table class={UIUtils.TABLE_CLASS_STRIPED_SORTABLE}> <thead> <th>Executor ID</th> <th>Address</th> <th>Task Time</th> <th>Total Tasks</th> <th>Failed Tasks</th> <th>Succeeded Tasks</th> {if (hasInput) { <th> <span data-toggle="tooltip" title={ToolTips.INPUT}>Input Size / Records</span> </th> }} {if (hasOutput) { <th> <span data-toggle="tooltip" title={ToolTips.OUTPUT}>Output Size / Records</span> </th> }} {if (hasShuffleRead) { <th> <span data-toggle="tooltip" title={ToolTips.SHUFFLE_READ}> Shuffle Read Size / Records</span> </th> }} {if (hasShuffleWrite) { <th> <span data-toggle="tooltip" title={ToolTips.SHUFFLE_WRITE}> Shuffle Write Size / Records</span> </th> }} {if (hasBytesSpilled) { <th>Shuffle Spill (Memory)</th> <th>Shuffle Spill (Disk)</th> }} </thead> <tbody> {createExecutorTable()} </tbody> </table> } private def createExecutorTable() : Seq[Node] = { // Make an executor-id -> address map val executorIdToAddress = mutable.HashMap[String, String]() listener.blockManagerIds.foreach { blockManagerId => val address = blockManagerId.hostPort val executorId = blockManagerId.executorId executorIdToAddress.put(executorId, address) } listener.stageIdToData.get((stageId, stageAttemptId)) match { case Some(stageData: StageUIData) => stageData.executorSummary.toSeq.sortBy(_._1).map { case (k, v) => <tr> <td>{k}</td> <td>{executorIdToAddress.getOrElse(k, "CANNOT FIND ADDRESS")}</td> <td sorttable_customkey={v.taskTime.toString}>{UIUtils.formatDuration(v.taskTime)}</td> <td>{v.failedTasks + v.succeededTasks}</td> <td>{v.failedTasks}</td> <td>{v.succeededTasks}</td> {if (stageData.hasInput) { <td sorttable_customkey={v.inputBytes.toString}> {s"${Utils.bytesToString(v.inputBytes)} / ${v.inputRecords}"} </td> }} {if (stageData.hasOutput) { <td sorttable_customkey={v.outputBytes.toString}> {s"${Utils.bytesToString(v.outputBytes)} / ${v.outputRecords}"} </td> }} {if (stageData.hasShuffleRead) { <td sorttable_customkey={v.shuffleRead.toString}> {s"${Utils.bytesToString(v.shuffleRead)} / ${v.shuffleReadRecords}"} </td> }} {if (stageData.hasShuffleWrite) { <td sorttable_customkey={v.shuffleWrite.toString}> {s"${Utils.bytesToString(v.shuffleWrite)} / ${v.shuffleWriteRecords}"} </td> }} {if (stageData.hasBytesSpilled) { <td sorttable_customkey={v.memoryBytesSpilled.toString}> {Utils.bytesToString(v.memoryBytesSpilled)} </td> <td sorttable_customkey={v.diskBytesSpilled.toString}> {Utils.bytesToString(v.diskBytesSpilled)} </td> }} </tr> } case None => Seq.empty[Node] } } }
practice-vishnoi/dev-spark-1
core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
Scala
apache-2.0
5,271
package streamz.akka.persistence import java.io.File import akka.actor._ import akka.persistence._ import akka.testkit.TestKit import scalaz._ import Scalaz._ import scalaz.stream.Process import org.apache.commons.io.FileUtils import org.scalatest._ class PersistenceSpec extends TestKit(ActorSystem("test")) with WordSpecLike with Matchers with BeforeAndAfterAll { override protected def afterAll(): Unit = { system.shutdown() List( "akka.persistence.journal.leveldb.dir", "akka.persistence.snapshot-store.local.dir") .map(s ⇒ new File(system.settings.config.getString(s))) .foreach(FileUtils.deleteDirectory) } class TestPersistentActor(val persistenceId: String, probe: ActorRef = testActor) extends PersistentActor { var state: String = "" override def receiveRecover = { case p: String => state += p } override def receiveCommand = { case "snap" => saveSnapshot(state) case SaveSnapshotSuccess(md) => probe ! md case p: String => persist(p) { state += _ } } } "A replayer" must { "produce a discrete stream of journaled messages" in { val p = system.actorOf(Props(new TestPersistentActor("p1"))) 1 to 3 foreach { i => p ! i.toString } replay("p1").take(3).runLog.run should be(Seq(Event("p1", 1L, "1"), Event("p1", 2L, "2"), Event("p1", 3L, "3"))) } "produce a discrete stream of journaled messages from user-defined sequence number" in { val p = system.actorOf(Props(new TestPersistentActor("p2"))) 1 to 3 foreach { i => p ! i.toString } replay("p2", 2L).take(2).runLog.run should be(Seq(Event("p2", 2L, "2"), Event("p2", 3L, "3"))) } } "A journal" must { "journal a stream of messages" in { Process("a", "b", "c").journal("p3").run.run replay("p3").map(p => (p.data, p.sequenceNr)).take(3).runLog.run should be(Seq(("a", 1L), ("b", 2L), ("c", 3L))) } } "A snapshot loader" must { "produce the most recent snapshot" in { val p = system.actorOf(Props(new TestPersistentActor("p4"))) p ! "a" p ! "b" p ! "snap" val metadata = expectMsgPF() { case md: SnapshotMetadata => md } snapshot[String]("p4").runLog.run should be(Seq(Snapshot(metadata, "ab"))) } "produce a zero snapshot if there's no snapshot stored" in { snapshot[String]("p5").runLog.run should be(Seq(Snapshot(SnapshotMetadata("p5", 0L, 0L), ""))) } } "A composition of snapshot and replay" must { "produce a discrete stream of updated states" in { val p = system.actorOf(Props(new TestPersistentActor("p6"))) p ! "a" p ! "b" p ! "snap" p ! "c" p ! "d" expectMsgPF() { case md: SnapshotMetadata => md } val c = for { s @ Snapshot(meta, data) <- snapshot[String]("p6") state <- replay(meta.persistenceId, s.nextSequenceNr).map(_.data).scan(data)((acc,p) => acc + p) } yield state c.take(3).runLog.run should be(Seq("ab", "abc", "abcd")) } } }
Astrac/streamz
streamz-akka-persistence/src/test/scala/streamz/akka/persistence/PersistenceSpec.scala
Scala
apache-2.0
3,063
package fi.pelam.javafxactor import java.util.concurrent.{CountDownLatch, TimeUnit} import javafx.application.Application import javafx.stage.Stage import javax.annotation.concurrent.GuardedBy import grizzled.slf4j.Logging /** * Basically a dummy application with static * initialization to allow JavaFxDispatcher * guarantee that JavaFx Platform.runLater will work. */ object JavaFxApplication extends Logging { @GuardedBy("this") private var javaFxStartedLatch = new CountDownLatch(0) @GuardedBy("this") private[this] var javaFxShutdownLatch = new CountDownLatch(0) object LauncherRunnable extends Runnable with Logging { override def run(): Unit = { info("JavaFX thread starting") try { Application.launch(classOf[JavaFxApplication]) } catch { case t: Throwable => error("JavaFx launch threw exception", t) } info("JavaFX thread ending") javaFxShutdownLatch.countDown() } } def resetLatches() = this.synchronized { javaFxStartedLatch = new CountDownLatch(1) javaFxShutdownLatch = new CountDownLatch(1) } def shutdownLatch = this.synchronized { javaFxShutdownLatch } def startedLatch = this.synchronized { javaFxStartedLatch } def isRunning = this.synchronized { // started javaFxStartedLatch.getCount == 0 && // and not shutdown yet javaFxShutdownLatch.getCount == 1 } /** * Blocks until JavaFx is up and running and Platform.runLater is ready * take orders. */ def launch(): Unit = { if (!isRunning) { info("Starting JavaFX thread.") resetLatches() // This initializes JavaFX so Platform.runLater does not throw. val launcherThread = new Thread(LauncherRunnable, "javafx-launcher") launcherThread.setDaemon(true) launcherThread.start() if (!javaFxStartedLatch.await(10, TimeUnit.SECONDS)) { sys.error("JavaFX app did not become initialized.") } info("JavaFX started signal received.") } } } class JavaFxApplication extends Application with Logging { override def start(primaryStage: Stage): Unit = { info("JavaFX running") JavaFxApplication.javaFxStartedLatch.countDown() } override def stop(): Unit = { info("JavaFX stop received") super.stop() } }
pelamfi/pelam-scala-incubator
src/main/scala/fi/pelam/javafxactor/JavaFxApplication.scala
Scala
apache-2.0
2,324
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package views.businessactivities import forms.{EmptyForm, Form2, InvalidForm, ValidForm} import jto.validation.{Path, ValidationError} import models.businessactivities.{UkAccountantsAddress, WhoIsYourAccountantIsUk, WhoIsYourAccountantName} import org.scalatest.MustMatchers import play.api.i18n.Messages import utils.{AmlsViewSpec, AutoCompleteServiceMocks} import views.Fixture import views.html.businessactivities.who_is_your_accountant_is_uk_address class who_is_your_accountant_is_ukSpec extends AmlsViewSpec with MustMatchers { trait ViewFixture extends Fixture with AutoCompleteServiceMocks { lazy val address = app.injector.instanceOf[who_is_your_accountant_is_uk_address] implicit val requestWithToken = addTokenForView() } val defaultName = WhoIsYourAccountantName("accountantName",Some("tradingName")) val defaultIsUkTrue = WhoIsYourAccountantIsUk(true) val defaultUkAddress = UkAccountantsAddress("line1","line2",None,None,"AB12CD") "who_is_your_accountant_is_uk view" must { "have correct title" in new ViewFixture { val form2: ValidForm[WhoIsYourAccountantIsUk] = Form2(defaultIsUkTrue) def view = address(form2, true, defaultName.accountantsName) doc.title must startWith(Messages("businessactivities.whoisyouraccountant.location.title")) } "have correct headings" in new ViewFixture { val form2: ValidForm[WhoIsYourAccountantIsUk] = Form2(defaultIsUkTrue) def view = address(form2, true, defaultName.accountantsName) heading.html must be(Messages("businessactivities.whoisyouraccountant.location.header", defaultName.accountantsName)) subHeading.html must include(Messages("summary.businessactivities")) } "show errors in the correct locations" in new ViewFixture { val form2: InvalidForm = InvalidForm(Map.empty, Seq( (Path \\ "isUK") -> Seq(ValidationError("third not a message Key")) )) def view = address(form2, true, defaultName.accountantsName) errorSummary.html() must include("third not a message Key") doc.getElementById("isUK") .getElementsByClass("error-notification").first().html() must include("third not a message Key") } "have a back link" in new ViewFixture { def view = address(EmptyForm, true, defaultName.accountantsName) doc.getElementsByAttributeValue("class", "link-back") must not be empty } } }
hmrc/amls-frontend
test/views/businessactivities/who_is_your_accountant_is_ukSpec.scala
Scala
apache-2.0
3,027