code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner
import java.lang.{Boolean => JBool}
import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.api.common.typeinfo.{SqlTimeTypeInfo, TypeInformation}
import org.apache.flink.api.java.tuple.{Tuple2 => JTuple2}
import org.apache.flink.api.java.typeutils._
import org.apache.flink.api.scala.typeutils.CaseClassTypeInfo
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.table.api._
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.runtime.conversion._
import org.apache.flink.table.runtime.types.{CRow, CRowTypeInfo}
import org.apache.flink.table.runtime.{CRowMapRunner, OutputRowtimeProcessFunction}
object DataStreamConversions {
/**
* Translates a [[DataStream]] of internal [[CRow]] type into a [[DataStream]] of requested type.
*
* @param inputDataStream The input [[DataStream]] for the conversion.
* @param logicalType The logical row type of the [[DataStream]]. This is needed because
* field naming might be lost during optimization.
* @param withChangeFlag Set to true to emit records with change flags.
* @param requestedOutputType The [[TypeInformation]] of the resulting [[DataStream]].
* @param config The [[TableConfig]] of the current [[TableEnvironment]].
* @tparam A The type of the resulting [[DataStream]].
* @return The [[DataStream]] of requested type.
*/
def convert[A](
inputDataStream: DataStream[CRow],
logicalType: TableSchema,
withChangeFlag: Boolean,
requestedOutputType: TypeInformation[A],
config: TableConfig)
: DataStream[A] = {
val rowtimeFields = logicalType.getFieldTypes.zip(logicalType.getFieldNames).zipWithIndex
.filter(f => FlinkTypeFactory.isRowtimeIndicatorType(f._1._1))
// convert the input type for the conversion mapper
// the input will be changed in the OutputRowtimeProcessFunction later
val convType = if (rowtimeFields.length > 1) {
throw new TableException(
s"Found more than one rowtime field: [${rowtimeFields.map(_._1._2).mkString(", ")}] in " +
s"the table that should be converted to a DataStream.\\n" +
s"Please select the rowtime field that should be used as event-time timestamp for the " +
s"DataStream by casting all other fields to TIMESTAMP.")
} else if (rowtimeFields.length == 1) {
val origRowType = inputDataStream.getType.asInstanceOf[CRowTypeInfo].rowType
val convFieldTypes = origRowType.getFieldTypes.map { t =>
if (FlinkTypeFactory.isRowtimeIndicatorType(t)) {
SqlTimeTypeInfo.TIMESTAMP
} else {
t
}
}
CRowTypeInfo(new RowTypeInfo(convFieldTypes, origRowType.getFieldNames))
} else {
inputDataStream.getType
}
// convert CRow to output type
val conversion: MapFunction[CRow, A] = if (withChangeFlag) {
getConversionMapperWithChanges(
convType,
logicalType,
requestedOutputType,
"DataStreamSinkConversion",
config)
} else {
getConversionMapper(
convType,
logicalType,
requestedOutputType,
"DataStreamSinkConversion",
config)
}
val rootParallelism = inputDataStream.getParallelism
val withRowtime = if (rowtimeFields.isEmpty) {
// no rowtime field to set
inputDataStream.map(conversion)
} else {
// set the only rowtime field as event-time timestamp for DataStream
// and convert it to SQL timestamp
inputDataStream
.process(new OutputRowtimeProcessFunction[A](conversion, rowtimeFields.head._2))
}
withRowtime
.returns(requestedOutputType)
.name(s"to: ${requestedOutputType.getTypeClass.getSimpleName}")
.setParallelism(rootParallelism)
}
/**
* Creates a final converter that maps the internal row type to external type.
*
* @param physicalInputType the input of the sink
* @param logicalInputSchema the input schema with correct field names (esp. for POJO field
* mapping)
* @param requestedOutputType the output type of the sink
* @param functionName name of the map function. Must not be unique but has to be a
* valid Java class identifier.
*/
private def getConversionMapper[OUT](
physicalInputType: TypeInformation[CRow],
logicalInputSchema: TableSchema,
requestedOutputType: TypeInformation[OUT],
functionName: String,
config: TableConfig)
: MapFunction[CRow, OUT] = {
val converterFunction = Conversions.generateRowConverterFunction[OUT](
physicalInputType.asInstanceOf[CRowTypeInfo].rowType,
logicalInputSchema,
requestedOutputType,
functionName,
config
)
converterFunction match {
case Some(func) =>
new CRowMapRunner[OUT](func.name, func.code, func.returnType)
case _ =>
new CRowToRowMapFunction().asInstanceOf[MapFunction[CRow, OUT]]
}
}
/**
* Creates a converter that maps the internal CRow type to Scala or Java Tuple2 with change flag.
*
* @param physicalInputType the input of the sink
* @param logicalInputSchema the input schema with correct field names (esp. for POJO field
* mapping)
* @param requestedOutputType the output type of the sink.
* @param functionName name of the map function. Must not be unique but has to be a
* valid Java class identifier.
*/
private def getConversionMapperWithChanges[OUT](
physicalInputType: TypeInformation[CRow],
logicalInputSchema: TableSchema,
requestedOutputType: TypeInformation[OUT],
functionName: String,
config: TableConfig)
: MapFunction[CRow, OUT] = requestedOutputType match {
// Scala tuple
case t: CaseClassTypeInfo[_]
if t.getTypeClass == classOf[(_, _)] && t.getTypeAt(0) == Types.BOOLEAN =>
val reqType = t.getTypeAt[Any](1)
// convert Row into requested type and wrap result in Tuple2
val converterFunction = Conversions.generateRowConverterFunction(
physicalInputType.asInstanceOf[CRowTypeInfo].rowType,
logicalInputSchema,
reqType,
functionName,
config
)
converterFunction match {
case Some(func) =>
new CRowToScalaTupleMapRunner(
func.name,
func.code,
requestedOutputType.asInstanceOf[TypeInformation[(Boolean, Any)]]
).asInstanceOf[MapFunction[CRow, OUT]]
case _ =>
new CRowToScalaTupleMapFunction().asInstanceOf[MapFunction[CRow, OUT]]
}
// Java tuple
case t: TupleTypeInfo[_]
if t.getTypeClass == classOf[JTuple2[_, _]] && t.getTypeAt(0) == Types.BOOLEAN =>
val reqType = t.getTypeAt[Any](1)
// convert Row into requested type and wrap result in Tuple2
val converterFunction = Conversions.generateRowConverterFunction(
physicalInputType.asInstanceOf[CRowTypeInfo].rowType,
logicalInputSchema,
reqType,
functionName,
config
)
converterFunction match {
case Some(func) =>
new CRowToJavaTupleMapRunner(
func.name,
func.code,
requestedOutputType.asInstanceOf[TypeInformation[JTuple2[JBool, Any]]]
).asInstanceOf[MapFunction[CRow, OUT]]
case _ =>
new CRowToJavaTupleMapFunction().asInstanceOf[MapFunction[CRow, OUT]]
}
}
}
| hequn8128/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/DataStreamConversions.scala | Scala | apache-2.0 | 8,522 |
package reftree.svg.api
import monocle._
import reftree.geometry._
import reftree.util.Optics
import zipper.Unzip
/**
* SVG API sufficient to implement animations
*/
abstract class BaseSvgApi[Svg] {
/* Basic element properties and selection */
def elementName: Getter[Svg, String]
def elementId: Getter[Svg, Option[String]]
def elementClasses: Getter[Svg, Set[String]]
final def select(selector: String): Prism[Svg, Svg] = Optics.only { svg β
Selector.fromString(selector).clauses.exists { clause β
clause.element.forall(_ == elementName.get(svg)) &&
clause.classes.subsetOf(elementClasses.get(svg))
}
}
/* Translation */
def translation: Optional[Svg, Point]
/**
* Makes sure that translation is propagated when moving up and down the SVG nodes,
* as well as when obtaining attributes that have coordinates inside.
*/
private def translated[A <: Svg, B](optional: Optional[A, B])(implicit translatable: Translatable[B]) =
Optional[A, B] { svg β
val t = translation.getOption(svg).getOrElse(Point.zero)
optional.getOption(svg).map(translatable.translate(_, t))
} { value β svg β
val t = translation.getOption(svg).getOrElse(Point.zero)
optional.set(translatable.translate(value, -t))(svg)
}
private implicit lazy val svgTranslatable: Translatable[Svg] =
Translatable((svg, delta) β translation.modify(_ + delta)(svg))
final def groupPosition(anchor: Optional[Svg, Point]): Optional[Svg, Point] =
Optional[Svg, Point](anchor.getOption) { position β svg β
anchor.getOption(svg).fold(svg) { anchorPosition β
translation.modify(_ + position - anchorPosition)(svg)
}
}
/* Navigation */
def immediateChildren: Optional[Svg, List[Svg]]
final lazy val realImmediateChildren = translated(immediateChildren)
final implicit lazy val svgUnzip: Unzip[Svg] =
Optics.unzip(realImmediateChildren)
/* Misc attributes */
def shapeRendering: Optional[Svg, Option[String]]
def textRendering: Optional[Svg, Option[String]]
def viewBox: Optional[Svg, Rectangle]
def opacity: Optional[Svg, Double]
def fillColor: Optional[Svg, Option[Color]]
def strokeColor: Optional[Svg, Option[Color]]
def strokeWidth: Optional[Svg, Double]
/* Prisms for particular SVG elements */
final val polygons = select("polygon")
final val paths = select("path")
final val texts = select("text")
final val anchors = select("a")
/* Element-specific attributes */
def polygonPoints: Optional[Svg, Polyline]
def pathPath: Optional[Svg, Path]
def textPosition: Optional[Svg, Point]
final lazy val realPolygonPoints = translated(polygonPoints)
final lazy val realPathPath = translated(pathPath)
final lazy val realTextPosition = translated(textPosition)
def anchorTitle: Optional[Svg, Option[String]]
}
| stanch/reftree | core/shared/src/main/scala/reftree/svg/api/BaseSvgApi.scala | Scala | gpl-3.0 | 2,868 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.scala.dsl;
import builder.RouteBuilder
import org.apache.camel.processor.idempotent.MemoryIdempotentRepository
import org.apache.camel.spi.IdempotentRepository
import org.apache.camel.{Processor, Exchange};
/**
* Test for an idempotent consumer
*/
class IdempotentConsumerTest extends ScalaTestSupport {
def testSimple() = doTest("direct:a", "mock:a")
def testBlock() = doTest("direct:b", "mock:b")
def doTest(from: String, to: String) = {
to expect { _.received("message 1", "message 2", "message 3")}
def send = sendMessage(from, _:String, _:String)
test {
send("1", "message 1")
send("2", "message 2")
send("1", "message 1")
send("2", "message 2")
send("1", "message 1")
send("3", "message 3")
}
}
def sendMessage(from: String, header: String, body: String) = {
template.send(from, new Processor() {
def process(exchange: Exchange) = {
val in = exchange.getIn()
in.setBody(body)
in.setHeader("messageId", header)
}
})
}
val builder = new RouteBuilder {
//START SNIPPET: simple
"direct:a" idempotentConsumer(_.in("messageId")) memory(200) to ("mock:a")
//END SNIPPET: simple
//START SNIPPET: block
"direct:b" ==> {
idempotentConsumer(_.in("messageId")) memory(200) apply {
to ("mock:b")
}
}
//END SNIPPET: block
}
}
/**
* Scala DSL equivalent for IdempotentConsumerEagerTest.testEager
*/
class IdempotentConsumerEagerTest extends ScalaTestSupport {
def testEagerIdempotentConsumer = {
"mock:result" expect { _.received("one", "two", "three")}
test {
sendMessage("1", "one")
sendMessage("2", "two")
sendMessage("3", "three")
}
}
def sendMessage(messageId: Any, body: Any) = {
template.send("direct:start", new Processor() {
def process(exchange: Exchange) {
// now lets fire in a message
val in = exchange.getIn();
in.setBody(body);
in.setHeader("messageId", messageId);
}
});
}
val builder = new RouteBuilder {
val repo : IdempotentRepository[String] = MemoryIdempotentRepository.memoryIdempotentRepository(200);
"direct:start" ==> {
idempotentConsumer(_.getIn().getHeader("messageId")).repository(repo).eager(true) {
process((exchange : Exchange) =>
if (repo.contains(exchange.getIn().getHeader("messageId").asInstanceOf[String])) {
// this is OK with the eager = true
} else {
throw new RuntimeException("IdemPotentConsumer eager handling is not working properly")
}
)
to("mock:result")
}
}
}
}
| cexbrayat/camel | components/camel-scala/src/test/scala/org/apache/camel/scala/dsl/IdempotentConsumerTest.scala | Scala | apache-2.0 | 3,564 |
case class A(x: Int)
| som-snytt/dotty | tests/pos/simpleCaseClass-2.scala | Scala | apache-2.0 | 21 |
type Id = String
enum Kind {
case Type
}
enum Term[T <: Term[T, K], K] {
case Wrap(t: T)
case Fun(id: Id, tag: K, ret: Term[T, K])
}
enum Type {
case Var(id: Id)
}
val tExp: Term[Type, Kind] =
Term.Fun("x", Kind.Type, Term.Wrap(Type.Var("x"))) // error
def main(args: Array[String]): Unit = { } | som-snytt/dotty | tests/neg-custom-args/allow-deep-subtypes/i8464a.scala | Scala | apache-2.0 | 334 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package config
//import scheduler.Scheduler
//import uk.gov.hmrc.play.microservice.bootstrap.DefaultMicroserviceGlobal
/*
import com.github.ghik.silencer.silent
import com.typesafe.config.Config
import net.ceedubs.ficus.Ficus._
import play.api.{ Application, Configuration, Play }
import scheduler.Scheduler
import uk.gov.hmrc.play.config.{ AppName, ControllerConfig }
import uk.gov.hmrc.play.microservice.bootstrap.DefaultMicroserviceGlobal
import uk.gov.hmrc.play.microservice.filters.{ AuditFilter, LoggingFilter, MicroserviceFilterSupport }
*/
/*
object ControllerConfiguration extends ControllerConfig with MicroserviceFilterSupport {
@silent lazy val controllerConfigs: Config = Play.current.configuration.underlying.as[Config]("controllers")
}*/
/*
object MicroserviceAuditFilter extends AuditFilter with AppName with MicroserviceFilterSupport {
override val auditConnector = MicroserviceAuditConnector
override def controllerNeedsAuditing(controllerName: String) = false // Disable implicit _inbound_ auditing.
override def appNameConfiguration = Play.current.configuration
}*/
/*
object MicroserviceLoggingFilter extends LoggingFilter with MicroserviceFilterSupport {
override def controllerNeedsLogging(controllerName: String): Boolean = ControllerConfiguration.paramsForController(controllerName).needsLogging
}*/
//object MicroserviceGlobal extends DefaultMicroserviceGlobal with Scheduler {
// override val auditConnector = MicroserviceAuditConnector
// override def microserviceMetricsConfig(implicit app: Application): Option[Configuration] =
// app.configuration.getConfig("microservice.metrics")
// override val loggingFilter = MicroserviceLoggingFilter
// override val microserviceAuditFilter = MicroserviceAuditFilter
// override val authFilter = None
//}
| hmrc/fset-faststream | app/config/microserviceGlobal.scala | Scala | apache-2.0 | 2,406 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.planning
import java.io.ByteArrayInputStream
import java.util.Date
import org.geotools.data.Query
import org.geotools.filter.SortByImpl
import org.junit.runner.RunWith
import org.locationtech.geomesa.arrow.io.SimpleFeatureArrowFileReader
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.index.conf.QueryHints
import org.locationtech.geomesa.index.stats.NoopStats
import org.locationtech.geomesa.utils.collection.{CloseableIterator, SelfClosingIterator}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.io.WithClose
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
import org.opengis.filter.sort.SortOrder
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class InMemoryQueryRunnerTest extends Specification {
import org.locationtech.geomesa.filter.ff
val typeName = "memory"
val spec = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326"
val sft = SimpleFeatureTypes.createType(typeName, spec)
val features = Seq(
Array("alice", 20, "2012-01-01T12:00:00.000Z", "POINT(45.0 49.0)"),
Array("bill", 20, "2013-01-01T12:00:00.000Z", "POINT(46.0 49.0)"),
Array("bob", 30, "2014-01-01T12:00:00.000Z", "POINT(47.0 49.0)"),
Array("charles", null, "2014-01-01T12:30:00.000Z", "POINT(48.0 49.0)")
).map {
entry => ScalaSimpleFeature.create(sft, entry.head.toString, entry: _*)
}
val runner = new InMemoryQueryRunner(NoopStats, None) {
override protected val name: String = "test-runner"
override protected def features(sft: SimpleFeatureType, filter: Option[Filter]): CloseableIterator[SimpleFeature] = {
filter match {
case None => CloseableIterator(InMemoryQueryRunnerTest.this.features.iterator)
case Some(f) => CloseableIterator(InMemoryQueryRunnerTest.this.features.iterator.filter(f.evaluate))
}
}
}
"InMemoryQueryRunner" should {
"not sort" in {
runner.runQuery(sft, new Query("memory")).map(ScalaSimpleFeature.copy).toSeq mustEqual features
}
"sort by an attribute" in {
val q = new Query("memory")
q.setSortBy(Array(new SortByImpl(ff.property("name"), SortOrder.ASCENDING)))
runner.runQuery(sft, q).map(ScalaSimpleFeature.copy).toSeq mustEqual features
q.setSortBy(Array(new SortByImpl(ff.property("name"), SortOrder.DESCENDING)))
runner.runQuery(sft, q).map(ScalaSimpleFeature.copy).toSeq mustEqual features.reverse
}
"sort by multiple attributes" in {
val q = new Query("memory")
q.setSortBy(Array(new SortByImpl(ff.property("age"), SortOrder.ASCENDING),
new SortByImpl(ff.property("name"), SortOrder.DESCENDING)))
runner.runQuery(sft, q).map(ScalaSimpleFeature.copy).toSeq mustEqual Seq(features(3), features(1), features(0), features(2))
}
"sort by projections" in {
val q = new Query("memory", Filter.INCLUDE, Array("derived=strConcat('aa', name)", "geom"))
q.setSortBy(Array(new SortByImpl(ff.property("derived"), SortOrder.DESCENDING)))
runner.runQuery(sft, q).map(ScalaSimpleFeature.copy).map(_.getID).toSeq mustEqual features.reverse.map(_.getID)
}
"query for arrow" in {
import org.locationtech.geomesa.arrow.allocator
val q = new Query("memory", Filter.INCLUDE, Array("name", "dtg", "geom"))
val expected = runner.runQuery(sft, q).map(ScalaSimpleFeature.copy).toSeq.sortBy(_.getAttribute("dtg").asInstanceOf[Date])
q.getHints.put(QueryHints.ARROW_ENCODE, java.lang.Boolean.TRUE)
q.getHints.put(QueryHints.ARROW_SORT_FIELD, "dtg")
q.getHints.put(QueryHints.ARROW_DICTIONARY_FIELDS, "name")
// note: need to copy the features as the same object is re-used in the iterator
val iter = runner.runQuery(sft, q)
val bytes = iter.map(_.getAttribute(0).asInstanceOf[Array[Byte]]).reduceLeftOption(_ ++ _).getOrElse(Array.empty[Byte])
WithClose(SimpleFeatureArrowFileReader.streaming(() => new ByteArrayInputStream(bytes))) { reader =>
SelfClosingIterator(reader.features()).map(ScalaSimpleFeature.copy).toSeq mustEqual expected
}
}
}
}
| ddseapy/geomesa | geomesa-index-api/src/test/scala/org/locationtech/geomesa/index/planning/InMemoryQueryRunnerTest.scala | Scala | apache-2.0 | 4,758 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx.lib
import scala.reflect.ClassTag
import org.apache.spark.graphx._
/** Connected components algorithm. */
object ConnectedComponents {
/**
* Compute the connected component membership of each vertex and return a graph with the vertex
* value containing the lowest vertex id in the connected component containing that vertex.
*
* @tparam VD the vertex attribute type (discarded in the computation)
* @tparam ED the edge attribute type (preserved in the computation)
*
* @param graph the graph for which to compute the connected components
*
* @return a graph with vertex attributes containing the smallest vertex in each
* connected component
*/
def run[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED]): Graph[VertexId, ED] = {
val ccGraph = graph.mapVertices { case (vid, _) => vid }
def sendMessage(edge: EdgeTriplet[VertexId, ED]) = {
if (edge.srcAttr < edge.dstAttr) {
Iterator((edge.dstId, edge.srcAttr))
} else if (edge.srcAttr > edge.dstAttr) {
Iterator((edge.srcId, edge.dstAttr))
} else {
Iterator.empty
}
}
val initialMessage = Long.MaxValue
Pregel(ccGraph, initialMessage, activeDirection = EdgeDirection.Either)(
vprog = (id, attr, msg) => math.min(attr, msg),
sendMsg = sendMessage,
mergeMsg = (a, b) => math.min(a, b))
} // end of connectedComponents
}
| sjtu-iiot/graphx-algorithm | src/main/scala/org/apache/spark/graphx/lib/ConnectedComponents.scala | Scala | gpl-2.0 | 2,237 |
package de.hpi.asg.breezetestgen.testgeneration
import de.hpi.asg.breezetestgen.testing.TestEvent
sealed trait TestOp
case class Follow(after: TestEvent) extends TestOp
case class Merge(after: Set[TestEvent]) extends TestOp
| 0x203/BreezeTestGen | src/main/scala/de/hpi/asg/breezetestgen/testgeneration/TestOp.scala | Scala | mit | 226 |
package gitbucket.core.controller.api
import gitbucket.core.api.{ApiBranchCommit, ApiBranchForHeadCommit, ApiCommits, JsonFormat}
import gitbucket.core.controller.ControllerBase
import gitbucket.core.model.Account
import gitbucket.core.service.{AccountService, CommitsService, ProtectedBranchService}
import gitbucket.core.util.Directory.getRepositoryDir
import gitbucket.core.util.Implicits._
import gitbucket.core.util.JGitUtil.{CommitInfo, getBranches, getBranchesOfCommit}
import gitbucket.core.util.{JGitUtil, ReferrerAuthenticator, RepositoryName}
import org.eclipse.jgit.api.Git
import org.eclipse.jgit.revwalk.RevWalk
import scala.jdk.CollectionConverters._
import scala.util.Using
trait ApiRepositoryCommitControllerBase extends ControllerBase {
self: AccountService with CommitsService with ProtectedBranchService with ReferrerAuthenticator =>
/*
* i. List commits on a repository
* https://developer.github.com/v3/repos/commits/#list-commits-on-a-repository
*/
get("/api/v3/repos/:owner/:repository/commits")(referrersOnly { repository =>
val owner = repository.owner
val name = repository.name
// TODO: The following parameters need to be implemented. [:path, :author, :since, :until]
val sha = params.getOrElse("sha", "refs/heads/master")
Using.resource(Git.open(getRepositoryDir(owner, name))) {
git =>
val repo = git.getRepository
Using.resource(new RevWalk(repo)) {
revWalk =>
val objectId = repo.resolve(sha)
revWalk.markStart(revWalk.parseCommit(objectId))
JsonFormat(revWalk.asScala.take(30).map {
commit =>
val commitInfo = new CommitInfo(commit)
ApiCommits(
repositoryName = RepositoryName(repository),
commitInfo = commitInfo,
diffs = JGitUtil.getDiffs(git, commitInfo.parents.headOption, commitInfo.id, false, true),
author = getAccount(commitInfo.authorName, commitInfo.authorEmailAddress),
committer = getAccount(commitInfo.committerName, commitInfo.committerEmailAddress),
commentCount = getCommitComment(repository.owner, repository.name, commitInfo.id).size
)
})
}
}
})
/*
* ii. Get a single commit
* https://developer.github.com/v3/repos/commits/#get-a-single-commit
*/
get("/api/v3/repos/:owner/:repository/commits/:sha")(referrersOnly { repository =>
val owner = repository.owner
val name = repository.name
val sha = params("sha")
Using.resource(Git.open(getRepositoryDir(owner, name))) {
git =>
val repo = git.getRepository
val objectId = repo.resolve(sha)
val commitInfo = Using.resource(new RevWalk(repo)) { revWalk =>
new CommitInfo(revWalk.parseCommit(objectId))
}
JsonFormat(
ApiCommits(
repositoryName = RepositoryName(repository),
commitInfo = commitInfo,
diffs = JGitUtil.getDiffs(git, commitInfo.parents.headOption, commitInfo.id, false, true),
author = getAccount(commitInfo.authorName, commitInfo.authorEmailAddress),
committer = getAccount(commitInfo.committerName, commitInfo.committerEmailAddress),
commentCount = getCommitComment(repository.owner, repository.name, sha).size
)
)
}
})
private def getAccount(userName: String, email: String): Account = {
getAccountByMailAddress(email).getOrElse {
Account(
userName = userName,
fullName = userName,
mailAddress = email,
password = "xxx",
isAdmin = false,
url = None,
registeredDate = new java.util.Date(),
updatedDate = new java.util.Date(),
lastLoginDate = None,
image = None,
isGroupAccount = false,
isRemoved = true,
description = None
)
}
}
/*
* iii. Get the SHA-1 of a commit reference
* https://developer.github.com/v3/repos/commits/#get-the-sha-1-of-a-commit-reference
*/
/*
* iv. Compare two commits
* https://developer.github.com/v3/repos/commits/#compare-two-commits
*/
/*
* v. Commit signature verification
* https://developer.github.com/v3/repos/commits/#commit-signature-verification
*/
/*
* vi. List branches for HEAD commit
* https://docs.github.com/en/rest/reference/repos#list-branches-for-head-commit
*/
get("/api/v3/repos/:owner/:repository/commits/:sha/branches-where-head")(referrersOnly { repository =>
val sha = params("sha")
Using.resource(Git.open(getRepositoryDir(repository.owner, repository.name))) { git =>
val apiBranchForCommits = for {
branch <- getBranchesOfCommit(git, sha)
br <- getBranches(git, branch, repository.repository.originUserName.isEmpty).find(_.name == branch)
} yield {
val protection = getProtectedBranchInfo(repository.owner, repository.name, branch)
ApiBranchForHeadCommit(branch, ApiBranchCommit(br.commitId), protection.enabled)
}
JsonFormat(apiBranchForCommits)
}
})
}
| imeszaros/gitbucket | src/main/scala/gitbucket/core/controller/api/ApiRepositoryCommitControllerBase.scala | Scala | apache-2.0 | 5,162 |
import scala.tasty.Reflection
import scala.tasty.inspector._
object Test {
def main(args: Array[String]): Unit = {
new DBInspector().inspect("", List("Foo"))
}
}
class DBInspector extends TastyInspector {
protected def processCompilationUnit(reflect: Reflection)(root: reflect.Tree): Unit = {
import reflect.{_, given _}
object Traverser extends TreeTraverser {
override def traverseTree(tree: Tree)(implicit ctx: Context): Unit = tree match {
case tree: Definition =>
println(tree.showExtractors)
super.traverseTree(tree)
case tree =>
super.traverseTree(tree)
}
}
Traverser.traverseTree(root)(reflect.rootContext)
}
}
| som-snytt/dotty | tests/run-custom-args/tasty-inspector/tasty-inspector/Test.scala | Scala | apache-2.0 | 711 |
package com.pedrorijo91.yo.model
case class Location(private val latitude: String, private val longitude: String) {
def value: String = s"$latitude,$longitude"
}
| pedrorijo91/yo-scala-client | src/main/scala/com/pedrorijo91/yo/model/Location.scala | Scala | mit | 165 |
/*
* Tests if two lambdas defined in the same class do not lead to
* name clashes.
*/
object Test {
def takeLambda(f: Int => Int ): Int = f(12)
def main(args: Array[String]): Unit = {
println(takeLambda(x => x+1))
println(takeLambda(x => x*2))
}
}
| yusuke2255/dotty | tests/run/delambdafy-two-lambdas.scala | Scala | bsd-3-clause | 259 |
package org.mbari.smith
import vars.annotation.ui.{StateLookup, ToolBelt}
import com.google.inject.Injector
import scala.collection.JavaConverters._
import vars.annotation.VideoFrame
import org.slf4j.LoggerFactory
import java.net.URL
/**
*
* @author Brian Schlining
* @since 2013-01-29
*/
object CoverageEstimator {
private[this] val log = LoggerFactory.getLogger(getClass)
private[this] val toolBelt = StateLookup.GUICE_INJECTOR.getInstance(classOf[ToolBelt])
private[this] var imageWidth: Int = _
private[this] var imageHeight: Int = _
private[this] var gotDimensions = false
def apply(videoArchiveName: String, camera: Camera): List[RealArea] = {
val frameAreas0 = CanadianGrid.toDetrialPolygons(fetchAnnotations(videoArchiveName))
log.debug("Found " + frameAreas0.size + " areaMeasurements")
val frameAreas1 = frameAreas0.map(keepAnnotationsWithinFOV(_))
log.debug("Found " + frameAreas1.size + " areaMeasurements within FOV")
toArea(frameAreas1, camera)
}
/**
* Retrieve the VideoFrames for the VideoArchive
* @param videoArchiveName
* @return
*/
def fetchAnnotations(videoArchiveName: String): List[VideoFrame] = {
val dao = toolBelt.getAnnotationDAOFactory.newVideoArchiveDAO()
dao.startTransaction()
val list = Option(dao.findByName(videoArchiveName)) match {
case Some(videoArchive) => videoArchive.getVideoFrames.asScala.toList
case None => {
log.info("No VideoArchive named " + videoArchiveName + " was found in the database")
List.empty[VideoFrame]
}
}
dao.endTransaction()
list
}
/**
* Drops any AreaPolygons that do not intersect with the FOV
* @param frameAreas
* @return
*/
private def keepAnnotationsWithinFOV(frameAreas: DetritalPolygons) = {
val fovPolygon = frameAreas.fov.polygon
val intersect = frameAreas.detritus.filter( fa => fovPolygon.intersects(fa.polygon.getBounds2D) )
log.debug(frameAreas.videoFrame + " has " + intersect.size + " measurements in the FOV")
frameAreas.copy(detritus = intersect)
}
private def toArea(frameAreas: List[DetritalPolygons], camera: Camera): List[RealArea] = {
for {
fa <- frameAreas
} yield {
if (!gotDimensions) {
val d = CanadianGrid.imageDimensions(new URL(fa.videoFrame.getCameraData.getImageReference))
imageWidth = d._1
imageHeight = d._2
gotDimensions = true
}
val totalFovArea = CanadianGrid.calculateArea(fa.fov.areaMeasurement, imageWidth, imageHeight, camera)
val detritalAreas = fa.detritus.map(ap =>
CanadianGrid.calculateArea(ap.areaMeasurement, imageWidth, imageHeight, camera))
log.debug(detritalAreas.sum + "")
RealArea(fa.videoFrame, totalFovArea, detritalAreas.sum)
}
}
}
| hohonuuli/vars | vars-standalone/src/main/scala/org/mbari/smith/CoverageEstimator.scala | Scala | lgpl-2.1 | 2,819 |
package org.scalatra
package oauth2
package service
import scala.util.control.Exception.allCatch
import commands._
import model._
import scalaz._
import Scalaz._
import akka.actor.ActorSystem
import org.scalatra.validation.{ ValidationError, UnknownError, NotImplemented }
trait CommandHandler { self: Logging β
def execute[S: Manifest](cmd: OAuth2Command[S]): ModelValidation[S] = {
logger.debug("Executing [%s].\\n%s" format (cmd.getClass.getName, cmd))
if (cmd.isValid) {
val res = (allCatch withApply (serverError(cmd.getClass.getName, _))) {
handle.lift(cmd).map(_.map(_.asInstanceOf[S])) | ValidationError("Don't know how to handle: " + cmd.getClass.getName, UnknownError).failNel
}
val ftext = "with %d failures\\n%s".format(~res.fail.toOption.map(_.list.size), ~res.fail.toOption.map(_.list))
logger.debug("Command [%s] executed %s." format (cmd.getClass.getName, res.isSuccess ? "successfully." | ftext))
res
} else {
val f = cmd.errors.map(_.validation) collect {
case Failure(e) β e
}
logger.debug("Command [%s] executed with %d failures.\\n%s" format (cmd.getClass.getName, f.size, f.toList))
nel(f.head, f.tail: _*).fail
}
}
private[this] def serverError[R](cmdName: String, ex: Throwable): ModelValidation[R] = {
logger.error("There was an error while executing " + cmdName, ex)
ValidationError("An error occurred while handling: " + cmdName, UnknownError).failNel[R]
}
type Handler = PartialFunction[OAuth2Command[_], ModelValidation[_]]
protected def handle: Handler
}
final class AuthenticationService(oauth: OAuth2Extension) extends Logging with CommandHandler {
private val accounts = oauth.userProvider
private val authSessions = oauth.authSessions
def loginFromRemember(token: String): ModelValidation[AuthSession] = authSessions loginFromRemember token
def remember(session: AppAuthSession[_]): databinding.FieldValidation[String] = authSessions remember session
def validate(account: Account) = accounts.validate(account)
def completedProfile(account: Account, ipAddress: String): ModelValidation[AuthSession] = {
accounts.save(account)
authSessions.newSession(ipAddress)(account).liftFailNel
}
def loggedIn(account: Account, ipAddress: String): databinding.FieldValidation[AuthSession] =
authSessions.newSession(ipAddress)(accounts.loggedIn(account, ipAddress))
def logout(token: String) = authSessions.logout(token)
protected val handle: Handler = {
case c: LoginCommand β accounts.login(c) flatMap (authSessions.newSession(c) _)
case c: ActivateAccountCommand β accounts.confirm(c) flatMap (authSessions.newSession(c) _)
case c: ResetCommand β accounts.resetPassword(c) flatMap (authSessions.newSession(c) _)
case c: OAuthInfoIncompleteCommand β ValidationError("Not Implemented", NotImplemented).failNel[AuthSession] // TODO: Implement OAuthInfoIncompleteCommand handler
case c: RegisterCommand β accounts.register(c) flatMap (authSessions.newSession(c) _)
case c: ForgotCommand β accounts.forgot(c)
case c: ChangePasswordCommand β accounts.changePassword(c)
}
}
| scalatra/oauth2-server | src/main/scala/org/scalatra/oauth2/service/AuthenticationService.scala | Scala | mit | 3,245 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.coordinator.transaction
import java.{lang, util}
import java.util.Arrays.asList
import org.apache.kafka.clients.ClientResponse
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.protocol.{ApiKeys, Errors}
import org.apache.kafka.common.requests.{RequestHeader, TransactionResult, WriteTxnMarkersRequest, WriteTxnMarkersResponse}
import org.easymock.{EasyMock, IAnswer}
import org.junit.Assert._
import org.junit.Test
import scala.collection.mutable
class TransactionMarkerRequestCompletionHandlerTest {
private val brokerId = 0
private val txnTopicPartition = 0
private val transactionalId = "txnId1"
private val producerId = 0.asInstanceOf[Long]
private val producerEpoch = 0.asInstanceOf[Short]
private val txnTimeoutMs = 0
private val coordinatorEpoch = 0
private val txnResult = TransactionResult.COMMIT
private val topicPartition = new TopicPartition("topic1", 0)
private val txnIdAndMarkers = asList(
TxnIdAndMarkerEntry(transactionalId, new WriteTxnMarkersRequest.TxnMarkerEntry(producerId, producerEpoch, coordinatorEpoch, txnResult, asList(topicPartition))))
private val txnMetadata = new TransactionMetadata(transactionalId, producerId, producerEpoch, txnTimeoutMs,
PrepareCommit, mutable.Set[TopicPartition](topicPartition), 0L, 0L)
private val markerChannelManager: TransactionMarkerChannelManager =
EasyMock.createNiceMock(classOf[TransactionMarkerChannelManager])
private val txnStateManager: TransactionStateManager = EasyMock.createNiceMock(classOf[TransactionStateManager])
private val handler = new TransactionMarkerRequestCompletionHandler(brokerId, txnStateManager, markerChannelManager, txnIdAndMarkers)
private def mockCache(): Unit = {
EasyMock.expect(txnStateManager.partitionFor(transactionalId))
.andReturn(txnTopicPartition)
.anyTimes()
EasyMock.expect(txnStateManager.getTransactionState(EasyMock.eq(transactionalId)))
.andReturn(Right(Some(CoordinatorEpochAndTxnMetadata(coordinatorEpoch, txnMetadata))))
.anyTimes()
EasyMock.replay(txnStateManager)
}
@Test
def shouldReEnqueuePartitionsWhenBrokerDisconnected(): Unit = {
mockCache()
EasyMock.expect(markerChannelManager.addTxnMarkersToBrokerQueue(transactionalId,
producerId, producerEpoch, txnResult, coordinatorEpoch, Set[TopicPartition](topicPartition)))
EasyMock.replay(markerChannelManager)
handler.onComplete(new ClientResponse(new RequestHeader(ApiKeys.PRODUCE, 0, "client", 1),
null, null, 0, 0, true, null, null, null))
EasyMock.verify(markerChannelManager)
}
@Test
def shouldThrowIllegalStateExceptionIfErrorCodeNotAvailableForPid(): Unit = {
mockCache()
EasyMock.replay(markerChannelManager)
val response = new WriteTxnMarkersResponse(new java.util.HashMap[java.lang.Long, java.util.Map[TopicPartition, Errors]]())
try {
handler.onComplete(new ClientResponse(new RequestHeader(ApiKeys.PRODUCE, 0, "client", 1),
null, null, 0, 0, false, null, null, response))
fail("should have thrown illegal argument exception")
} catch {
case _: IllegalStateException => // ok
}
}
@Test
def shouldCompleteDelayedOperationWhenNoErrors(): Unit = {
mockCache()
verifyCompleteDelayedOperationOnError(Errors.NONE)
}
@Test
def shouldCompleteDelayedOperationWhenNotCoordinator(): Unit = {
EasyMock.expect(txnStateManager.getTransactionState(EasyMock.eq(transactionalId)))
.andReturn(Left(Errors.NOT_COORDINATOR))
.anyTimes()
EasyMock.replay(txnStateManager)
verifyRemoveDelayedOperationOnError(Errors.NONE)
}
@Test
def shouldCompleteDelayedOperationWhenCoordinatorLoading(): Unit = {
EasyMock.expect(txnStateManager.getTransactionState(EasyMock.eq(transactionalId)))
.andReturn(Left(Errors.COORDINATOR_LOAD_IN_PROGRESS))
.anyTimes()
EasyMock.replay(txnStateManager)
verifyRemoveDelayedOperationOnError(Errors.NONE)
}
@Test
def shouldCompleteDelayedOperationWhenCoordinatorEpochChanged(): Unit = {
EasyMock.expect(txnStateManager.getTransactionState(EasyMock.eq(transactionalId)))
.andReturn(Right(Some(CoordinatorEpochAndTxnMetadata(coordinatorEpoch+1, txnMetadata))))
.anyTimes()
EasyMock.replay(txnStateManager)
verifyRemoveDelayedOperationOnError(Errors.NONE)
}
@Test
def shouldCompleteDelayedOperationWhenInvalidProducerEpoch(): Unit = {
mockCache()
verifyRemoveDelayedOperationOnError(Errors.INVALID_PRODUCER_EPOCH)
}
@Test
def shouldCompleteDelayedOperationWheCoordinatorEpochFenced(): Unit = {
mockCache()
verifyRemoveDelayedOperationOnError(Errors.TRANSACTION_COORDINATOR_FENCED)
}
@Test
def shouldThrowIllegalStateExceptionWhenUnknownError(): Unit = {
verifyThrowIllegalStateExceptionOnError(Errors.UNKNOWN_SERVER_ERROR)
}
@Test
def shouldThrowIllegalStateExceptionWhenCorruptMessageError(): Unit = {
verifyThrowIllegalStateExceptionOnError(Errors.CORRUPT_MESSAGE)
}
@Test
def shouldThrowIllegalStateExceptionWhenMessageTooLargeError(): Unit = {
verifyThrowIllegalStateExceptionOnError(Errors.MESSAGE_TOO_LARGE)
}
@Test
def shouldThrowIllegalStateExceptionWhenRecordListTooLargeError(): Unit = {
verifyThrowIllegalStateExceptionOnError(Errors.RECORD_LIST_TOO_LARGE)
}
@Test
def shouldThrowIllegalStateExceptionWhenInvalidRequiredAcksError(): Unit = {
verifyThrowIllegalStateExceptionOnError(Errors.INVALID_REQUIRED_ACKS)
}
@Test
def shouldRetryPartitionWhenUnknownTopicOrPartitionError(): Unit = {
verifyRetriesPartitionOnError(Errors.UNKNOWN_TOPIC_OR_PARTITION)
}
@Test
def shouldRetryPartitionWhenNotLeaderForPartitionError(): Unit = {
verifyRetriesPartitionOnError(Errors.NOT_LEADER_FOR_PARTITION)
}
@Test
def shouldRetryPartitionWhenNotEnoughReplicasError(): Unit = {
verifyRetriesPartitionOnError(Errors.NOT_ENOUGH_REPLICAS)
}
@Test
def shouldRetryPartitionWhenNotEnoughReplicasAfterAppendError(): Unit = {
verifyRetriesPartitionOnError(Errors.NOT_ENOUGH_REPLICAS_AFTER_APPEND)
}
@Test
def shouldRemoveTopicPartitionFromWaitingSetOnUnsupportedForMessageFormat(): Unit = {
mockCache()
verifyCompleteDelayedOperationOnError(Errors.UNSUPPORTED_FOR_MESSAGE_FORMAT)
}
private def verifyRetriesPartitionOnError(error: Errors) = {
mockCache()
EasyMock.expect(markerChannelManager.addTxnMarkersToBrokerQueue(transactionalId,
producerId, producerEpoch, txnResult, coordinatorEpoch, Set[TopicPartition](topicPartition)))
EasyMock.replay(markerChannelManager)
val response = new WriteTxnMarkersResponse(createProducerIdErrorMap(error))
handler.onComplete(new ClientResponse(new RequestHeader(ApiKeys.PRODUCE, 0, "client", 1),
null, null, 0, 0, false, null, null, response))
assertEquals(txnMetadata.topicPartitions, mutable.Set[TopicPartition](topicPartition))
EasyMock.verify(markerChannelManager)
}
private def verifyThrowIllegalStateExceptionOnError(error: Errors) = {
mockCache()
val response = new WriteTxnMarkersResponse(createProducerIdErrorMap(error))
try {
handler.onComplete(new ClientResponse(new RequestHeader(ApiKeys.PRODUCE, 0, "client", 1),
null, null, 0, 0, false, null, null, response))
fail("should have thrown illegal state exception")
} catch {
case _: IllegalStateException => // ok
}
}
private def verifyCompleteDelayedOperationOnError(error: Errors): Unit = {
var completed = false
EasyMock.expect(markerChannelManager.completeSendMarkersForTxnId(transactionalId))
.andAnswer(new IAnswer[Unit] {
override def answer(): Unit = {
completed = true
}
})
.once()
EasyMock.replay(markerChannelManager)
val response = new WriteTxnMarkersResponse(createProducerIdErrorMap(error))
handler.onComplete(new ClientResponse(new RequestHeader(ApiKeys.PRODUCE, 0, "client", 1),
null, null, 0, 0, false, null, null, response))
assertTrue(txnMetadata.topicPartitions.isEmpty)
assertTrue(completed)
}
private def verifyRemoveDelayedOperationOnError(error: Errors): Unit = {
var removed = false
EasyMock.expect(markerChannelManager.removeMarkersForTxnId(transactionalId))
.andAnswer(new IAnswer[Unit] {
override def answer(): Unit = {
removed = true
}
})
.once()
EasyMock.replay(markerChannelManager)
val response = new WriteTxnMarkersResponse(createProducerIdErrorMap(error))
handler.onComplete(new ClientResponse(new RequestHeader(ApiKeys.PRODUCE, 0, "client", 1),
null, null, 0, 0, false, null, null, response))
assertTrue(removed)
}
private def createProducerIdErrorMap(errors: Errors) = {
val pidMap = new java.util.HashMap[lang.Long, util.Map[TopicPartition, Errors]]()
val errorsMap = new util.HashMap[TopicPartition, Errors]()
errorsMap.put(topicPartition, errors)
pidMap.put(producerId, errorsMap)
pidMap
}
}
| gf53520/kafka | core/src/test/scala/unit/kafka/coordinator/transaction/TransactionMarkerRequestCompletionHandlerTest.scala | Scala | apache-2.0 | 9,877 |
package com.arcusys.learn.liferay.update.version270
import com.arcusys.learn.liferay.LiferayClasses.LUpgradeProcess
import com.arcusys.learn.liferay.update.version240.file.FileTableComponent
import com.arcusys.learn.liferay.update.version270.migrations.PackageMigrationBase
import com.arcusys.valamis.lesson.scorm.storage.ScormManifestTableComponent
import com.arcusys.learn.liferay.update.version270.lesson.LessonTableComponent
import com.arcusys.valamis.lesson.tincan.storage.TincanActivityTableComponent
import com.arcusys.valamis.persistence.common.{SlickDBInfo, SlickProfile}
import com.arcusys.valamis.web.configuration.ioc.Configuration
import scala.slick.driver.JdbcProfile
import scala.slick.jdbc.JdbcBackend
class DBUpdater2713(val db: JdbcBackend#DatabaseDef,
val driver: JdbcProfile)
extends LUpgradeProcess
with LessonTableComponent
with TincanActivityTableComponent
with ScormManifestTableComponent
with FileTableComponent
with SlickProfile {
def this() = {
this(
Configuration.inject[SlickDBInfo](None).databaseDef,
Configuration.inject[SlickDBInfo](None).slickProfile
)
}
import driver.simple._
override def getThreshold = 2713
override def doUpgrade(): Unit = {
val migration = new PackageMigrationBase {
val db = DBUpdater2713.this.db
val driver = DBUpdater2713.this.driver
}
db.withTransaction { implicit s =>
migration.createMigrationLessonTables()
(lessonLimits.ddl
++ tincanActivitiesTQ.ddl
++ scormManifestsTQ.ddl
++ playerLessons.ddl
++ lessonViewers.ddl
).create
}
}
}
| igor-borisov/valamis | learn-portlet/src/main/scala/com/arcusys/learn/liferay/update/version270/DBUpdater2713.scala | Scala | gpl-3.0 | 1,657 |
/*
* Copyright (c) 2013-2014 Snowplow Analytics Ltd.
* All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache
* License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied.
*
* See the Apache License Version 2.0 for the specific language
* governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich
package kinesis
package sinks
// Java
import java.nio.ByteBuffer
// Amazon
import com.amazonaws.AmazonServiceException
import com.amazonaws.auth.AWSCredentialsProvider
// Scalazon (for Kinesis interaction)
import io.github.cloudify.scala.aws.kinesis.Client
import io.github.cloudify.scala.aws.kinesis.Client.ImplicitExecution._
import io.github.cloudify.scala.aws.kinesis.Definitions.{
Stream,
PutResult,
Record
}
import io.github.cloudify.scala.aws.kinesis.KinesisDsl._
// Config
import com.typesafe.config.Config
// Concurrent libraries
import scala.concurrent.{Future,Await,TimeoutException}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.util.{Success, Failure}
// Logging
import org.slf4j.LoggerFactory
// Snowplow
import com.snowplowanalytics.snowplow.collectors.thrift._
import common.outputs.CanonicalOutput
/**
* Kinesis Sink for Scala enrichment
*/
class KinesisSink(provider: AWSCredentialsProvider,
config: KinesisEnrichConfig) extends ISink {
private lazy val log = LoggerFactory.getLogger(getClass())
import log.{error, debug, info, trace}
// Create a Kinesis client for stream interactions.
private implicit val kinesis = Client.fromCredentials(provider)
// The output stream for enriched events.
private val enrichedStream = createAndLoadStream()
/**
* Checks if a stream exists.
*/
def streamExists(name: String, timeout: Int = 60): Boolean = {
val streamDescribeFuture = for {
s <- Kinesis.stream(name).describe
} yield s
val description = Await.result(streamDescribeFuture, Duration(timeout, SECONDS))
if (description.isActive) {
info(s"Stream $name exists and is active")
return true
}
info(s"Stream $name doesn't exist or isn't active")
false
}
/**
* Creates a new stream if one doesn't exist
*/
def createAndLoadStream(timeout: Int = 60): Stream = {
val name = config.enrichedOutStream
val size = config.enrichedOutStreamShards
if (streamExists(name)) {
Kinesis.stream(name)
} else {
info(s"Creating stream $name of size $size")
val createStream = for {
s <- Kinesis.streams.create(name)
} yield s
try {
val stream = Await.result(createStream, Duration(timeout, SECONDS))
info(s"Successfully created stream $name. Waiting until it's active")
Await.result(stream.waitActive.retrying(timeout),
Duration(timeout, SECONDS))
info(s"Stream $name active")
stream
} catch {
case _: TimeoutException =>
throw new RuntimeException("Error: Timed out")
}
}
}
/**
* Side-effecting function to store the CanonicalOutput
* to the given output stream.
*
* CanonicalOutput takes the form of a tab-delimited
* String until such time as https://github.com/snowplow/snowplow/issues/211
* is implemented.
*/
def storeCanonicalOutput(output: String, key: String) = {
val putData = for {
p <- enrichedStream.put(
ByteBuffer.wrap(output.getBytes),
key
)
} yield p
putData onComplete {
case Success(result) => {
info(s"Writing successful")
info(s" + ShardId: ${result.shardId}")
info(s" + SequenceNumber: ${result.sequenceNumber}")
}
case Failure(f) => {
error(s"Writing failed.")
error(s" + " + f.getMessage)
}
}
}
}
| pkallos/snowplow | 3-enrich/scala-kinesis-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich.kinesis/sinks/KinesisSink.scala | Scala | apache-2.0 | 4,235 |
/*
* Copyright (c) 2014-2019 by The Minitest Project Developers.
* Some rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package minitest.api
import scala.util.{Try => STry}
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
final case class SourceLocation(
fileName: Option[String],
filePath: Option[String],
line: Int
)
object SourceLocation {
implicit def fromContext: SourceLocation =
macro Macros.fromContext
class Macros(val c: whitebox.Context) {
import c.universe._
def fromContext: Tree = {
val (fileNameExpr, pathExpr, lineExpr) = getSourceLocation
val SourceLocationSym = symbolOf[SourceLocation].companion
q"""$SourceLocationSym($fileNameExpr, $pathExpr, $lineExpr)"""
}
private def getSourceLocation = {
val line = c.Expr[Int](Literal(Constant(c.enclosingPosition.line)))
val file = STry(Option(c.enclosingPosition.source.file.file)).toOption.flatten
(wrapOption(file.map(_.getName)), wrapOption(file.map(_.getPath)), line)
}
private def wrapOption[A](opt: Option[A]): c.Expr[Option[A]] =
c.Expr[Option[A]](
opt match {
case None =>
q"""_root_.scala.None"""
case Some(value) =>
val v = c.Expr[A](Literal(Constant(value)))
q"""_root_.scala.Option($v)"""
})
}
} | monifu/minitest | shared/src/main/scala/minitest/api/SourceLocation.scala | Scala | apache-2.0 | 1,894 |
package com.github.gluthra.fitnessz.app
import com.github.gluthra.fitnessz.app.models.{FitnessEventQueryParams, FitnessEventDAO, FitnessEvent}
import com.github.gluthra.fitnessz.app.models.FitnessEventConversions._
import org.joda.time.DateTime
class EventService {
def getEventsForUser(userId: String): List[FitnessEvent] = {
val fitnessEventDao = new FitnessEventDAO()
fitnessEventDao.find(FitnessEventQueryParams(userId = Some(userId))).toList
}
def createEventForUser(userId: String) = {
val fitnessEventDao = new FitnessEventDAO()
val event = FitnessEvent(eventName = "HIIT", eventDate = new DateTime(), userId = userId)
val id = fitnessEventDao.insert(event)
assert(id.isDefined)
}
}
| gsluthra/fitnessz | src/main/scala/com/github/gluthra/fitnessz/app/EventService.scala | Scala | apache-2.0 | 726 |
package org.broadinstitute.dsde.workbench.sam.dataAccess
object ConnectionType extends Enumeration {
type ConnectionType = Value
val LDAP, Postgres = Value
}
| broadinstitute/sam | src/main/scala/org/broadinstitute/dsde/workbench/sam/dataAccess/ConnectionType.scala | Scala | bsd-3-clause | 163 |
package org.jetbrains.plugins.scala.extensions.implementation.iterator
import com.intellij.psi.PsiElement
import scala.collection.mutable
/**
* Pavel.Fatin, 09.05.2010
*/
class DepthFirstIterator(element: PsiElement, predicate: PsiElement => Boolean) extends Iterator[PsiElement] {
private val stack: mutable.Stack[PsiElement] =
if (element == null) mutable.Stack()
else mutable.Stack(element)
def hasNext: Boolean = stack.nonEmpty
def next(): PsiElement = {
val element = stack.pop()
if (predicate(element)) pushChildren(element)
element
}
def pushChildren(element: PsiElement) {
var child = element.getLastChild
while (child != null) {
stack.push(child)
child = child.getPrevSibling
}
}
} | loskutov/intellij-scala | src/org/jetbrains/plugins/scala/extensions/implementation/iterator/DepthFirstIterator.scala | Scala | apache-2.0 | 765 |
package universe
/** Helper class to do simple arithmetic on (Int, Int) tuples */
case class Pos(x: Int, y: Int) {
def distance(other: Pos): Double = (other - this).euclidianNorm
def directionTo(target: Pos): Pos = (target - this).normalize
def -(other: Pos): Pos = this + (other.unary_-)
def +(other: Pos): Pos = Pos(x + other.x, y + other.y)
def unary_- : Pos = Pos(-x, -y)
def normalize: Pos = /(euclidianNorm)
def /(scalar: Double): Pos =
Pos(scala.math.round(x / scalar).asInstanceOf[Int], scala.math.round(y / scalar).asInstanceOf[Int])
def euclidianNorm: Double = scala.math.sqrt((x * x + y * y).toDouble)
}
| guidosalva/REScala | Code/Examples/Universe/src/main/scala/universe/Pos.scala | Scala | apache-2.0 | 690 |
package edu.gemini.pit.ui.action
import edu.gemini.model.p1.immutable.ProposalIo
import edu.gemini.pit.model.Model
import edu.gemini.ui.workspace.scala.RichShell
import swing.Dialog
/**
* Validate the proposal upon demand.
*/
class ValidateAction(shell: RichShell[Model]) extends ShellAction(shell, "Validate Proposal") {// with Bound[Model, Proposal] {
override def apply() {
shell.model foreach { m =>
val (msg, tipe) = ProposalIo.validate(m.proposal) match {
case Right(_) => ("Proposal Validates.", Dialog.Message.Info)
case Left(err) => ("Validation Error: " + err, Dialog.Message.Error)
}
Dialog.showMessage(null, msg, "Proposal Validation Result", tipe)
}
}
} | spakzad/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/action/ValidateAction.scala | Scala | bsd-3-clause | 718 |
/* Copyright (C) 2008-2016 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.optimize
import cc.factorie._
import cc.factorie.infer.GibbsSampler
import cc.factorie.la.{DenseTensor2, DenseTensor3, DenseTensor4, _}
import cc.factorie.model.DotTemplateWithStatistics2
import cc.factorie.variable._
object TestSampleRank extends cc.factorie.util.FastLogging{
object LabelDomain extends CategoricalDomain[String]
class Label(s:String, val instance:Instance) extends LabeledCategoricalVariable(s) { def domain = LabelDomain }
object InstanceDomain extends CategoricalVectorDomain[String]
class Instance(labelString:String) extends BinaryFeatureVectorVariable[String] {
def domain = InstanceDomain
val label = new Label(labelString, this)
// Add features that coorespond to label exactly
logger.debug("New Instance with Label "+labelString)
this += "f1"+labelString; logger.debug("TestSampleRank features "+value+" intArray "+value.asInstanceOf[SparseBinaryTensorLike1].toIntArray.toSeq)
this += "f2"+labelString; logger.debug("TestSampleRank features "+value+" intArray "+value.asInstanceOf[SparseBinaryTensorLike1].toIntArray.toSeq)
this += "f3"+labelString; logger.debug("TestSampleRank features "+value+" intArray "+value.asInstanceOf[SparseBinaryTensorLike1].toIntArray.toSeq)
}
val model = new DotTemplateWithStatistics2[Label,Instance] with Parameters {
val weights = Weights(new la.DenseTensor2(LabelDomain.size, InstanceDomain.dimensionSize))
def unroll1(l:Label) = Factor(l, l.instance)
def unroll2(i:Instance) = Factor(i.label, i)
}
def main(args:Array[String]): Unit = {
implicit val random = new scala.util.Random(0)
// Test Tensor index arithmetic
for (trials <- 1 to 100) {
val dim1 = random.nextInt(20)+1
val dim2 = random.nextInt(20)+1
val dim3 = random.nextInt(20)+1
val dim4 = random.nextInt(20)+1
logger.debug(List(dim1, dim2, dim3, dim4))
var v = 0.0; var rdim1 = 0; var rdim2 = 0; var rdim3 = 0; var rdim4 = 0
val t2 = new DenseTensor2(dim1,dim2)
v = 0.0; for (i <- 0 until dim1; j <- 0 until dim2) { t2(i, j) = v; v += 1.0 }
v = 0.0; for (i <- 0 until dim1*dim2) { assert(t2(i) == v, "dim1="+dim1+" dim2="+dim2+" i="+i+" v="+v+" t(i)="+t2(i)+"\\n"+t2); v += 1.0 }
rdim1 = random.nextInt(dim1)
rdim2 = random.nextInt(dim2)
val t2i = t2.singleIndex(rdim1, rdim2)
assert(t2.multiIndex(t2i) == (rdim1, rdim2))
val t3 = new DenseTensor3(dim1,dim2,dim3)
v = 0.0; for (i <- 0 until dim1; j <- 0 until dim2; k <- 0 until dim3) { t3(i, j, k) = v; v += 1.0 }
v = 0.0; for (i <- 0 until dim1*dim2*dim3) { assert(t3(i) == v); v += 1.0 }
rdim1 = random.nextInt(dim1)
rdim2 = random.nextInt(dim2)
rdim3 = random.nextInt(dim3)
val t3i = t3.singleIndex(rdim1, rdim2, rdim3)
assert(t3.multiIndex(t3i) == (rdim1, rdim2, rdim3))
val t4 = new DenseTensor4(dim1,dim2,dim3,dim4)
v = 0.0; for (i <- 0 until dim1; j <- 0 until dim2; k <- 0 until dim3; l <- 0 until dim4) { t4(i, j, k, l) = v; v += 1.0 }
v = 0.0; for (i <- 0 until dim1*dim2*dim3*dim4) { assert(t4(i) == v); v += 1.0 }
rdim1 = random.nextInt(dim1)
rdim2 = random.nextInt(dim2)
rdim3 = random.nextInt(dim3)
rdim4 = random.nextInt(dim4)
val t4i = t4.singleIndex(rdim1, rdim2, rdim3, rdim4)
assert(t4.multiIndex(t4i) == (rdim1, rdim2, rdim3, rdim4))
}
val labels = List("n", "y").map(s => new Instance(s)).map(_.label)
logger.debug("feature domain: "+InstanceDomain.dimensionDomain.mkString(" "))
logger.debug("feature tensors:\\n"+labels.map(l => l.instance.value.toString+"\\n"))
val learner = new optimize.SampleRankTrainer(new GibbsSampler(model, HammingObjective), new cc.factorie.optimize.ConstantLearningRate)
//learner.logLevel = 10
learner.processContexts(labels)
labels.foreach(l => l.set(0)(null)); logger.debug("Set to 0")
labels.foreach(l => logger.debug("feature="+l.instance.value+" value="+l.categoryValue+" target="+l.target.categoryValue+" score="+model.currentScore(l)))
labels.foreach(l => l.set(1)(null)); logger.debug("Set to 1")
labels.foreach(l => logger.debug("feature="+l.instance.value+" value="+l.categoryValue+" target="+l.target.categoryValue+" score="+model.currentScore(l)))
MaximizeDiscrete(labels, model); logger.debug("Set to max")
labels.foreach(l => logger.debug("feature="+l.instance.value+" value="+l.categoryValue+" target="+l.target.categoryValue+" score="+model.currentScore(l)))
logger.debug("Train accuracy "+labels.map(l => HammingObjective.currentScore(l)).sum / labels.length)
}
}
| strubell/factorie | src/test/scala/cc/factorie/optimize/TestSampleRank.scala | Scala | apache-2.0 | 5,384 |
package org.hablapps.meetup
package funmonad
package logic
import common.logic.Domain._
sealed abstract class StoreProgram[A]
case class PutUser(user: User) extends StoreProgram[Int]
case class PutGroup(group: Group) extends StoreProgram[Int]
case class GetUser(uid: Int) extends StoreProgram[User]
case class GetGroup(gid: Int) extends StoreProgram[Group]
case class PutJoin(join: JoinRequest) extends StoreProgram[JoinRequest]
case class PutMember(member: Member) extends StoreProgram[Member]
case class Returns[U](value: U) extends StoreProgram[U]
case class Sequence[U,V](inst: StoreProgram[U], next: U => StoreProgram[V])
extends StoreProgram[V]
object StoreProgram{
implicit object StoreDeep extends Store[StoreProgram] {
// Operadores de Store
def putUser(user: User): StoreProgram[Int] = PutUser(user)
def putGroup(group: Group): StoreProgram[Int] = PutGroup(group)
def getGroup(gid: Int): StoreProgram[Group] = GetGroup(gid)
def getUser(uid: Int): StoreProgram[User] = GetUser(uid)
def putJoin(join: JoinRequest): StoreProgram[JoinRequest] = PutJoin(join)
def putMember(member: Member): StoreProgram[Member] = PutMember(member)
}
implicit object StoreMonad extends Monad[StoreProgram]{
// Operadores de Monad
def pure[A](a: A): StoreProgram[A] = Returns(a)
def flatMap[A, B](fa: StoreProgram[A])(f: A => StoreProgram[B]): StoreProgram[B] = Sequence(fa, f)
}
}
| hablapps/meetapp | app/funMonad/logic/DeepStore.scala | Scala | apache-2.0 | 1,429 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.orc
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.execution.datasources.SchemaPruningSuite
import org.apache.spark.sql.execution.datasources.v2.BatchScanExec
import org.apache.spark.sql.execution.datasources.v2.orc.OrcScan
import org.apache.spark.sql.internal.SQLConf
class OrcV2SchemaPruningSuite extends SchemaPruningSuite with AdaptiveSparkPlanHelper {
override protected val dataSourceName: String = "orc"
override protected val vectorizedReaderEnabledKey: String =
SQLConf.ORC_VECTORIZED_READER_ENABLED.key
override protected def sparkConf: SparkConf =
super.sparkConf
.setAppName("test")
.set("spark.sql.parquet.columnarReaderBatchSize", "4096")
.set("spark.sql.sources.useV1SourceList", "avro")
.set("spark.sql.extensions", "com.intel.oap.ColumnarPlugin")
.set("spark.sql.execution.arrow.maxRecordsPerBatch", "4096")
//.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.ColumnarShuffleManager")
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "false")
.set("spark.sql.columnar.window", "false")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set(SQLConf.USE_V1_SOURCE_LIST, "")
override def checkScanSchemata(df: DataFrame, expectedSchemaCatalogStrings: String*): Unit = {
val fileSourceScanSchemata =
collect(df.queryExecution.executedPlan) {
case BatchScanExec(_, scan: OrcScan) => scan.readDataSchema
}
assert(fileSourceScanSchemata.size === expectedSchemaCatalogStrings.size,
s"Found ${fileSourceScanSchemata.size} file sources in dataframe, " +
s"but expected $expectedSchemaCatalogStrings")
fileSourceScanSchemata.zip(expectedSchemaCatalogStrings).foreach {
case (scanSchema, expectedScanSchemaCatalogString) =>
val expectedScanSchema = CatalystSqlParser.parseDataType(expectedScanSchemaCatalogString)
implicit val equality = schemaEquality
assert(scanSchema === expectedScanSchema)
}
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcV2SchemaPruningSuite.scala | Scala | apache-2.0 | 3,433 |
package unfiltered.filter.util
object IteratorConversions {
import org.apache.commons.{fileupload => fu}
import fu.{FileItemIterator, FileItemStream}
import java.util.{Iterator => JIterator}
/** convert java iterator to scala iterator */
implicit final class JIteratorWrapper[A](i: JIterator[A]) extends Iterator[A] {
def hasNext: Boolean = i.hasNext
def next(): A = i.next
}
/** convert apache commons file iterator to scala iterator */
implicit final class FIIteratorWrapper(i: FileItemIterator) extends Iterator[FileItemStream] {
def hasNext: Boolean = i.hasNext
def next(): FileItemStream = i.next
}
}
| unfiltered/unfiltered | filter-uploads/src/main/scala/request/conversions.scala | Scala | mit | 643 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.singleTest
import org.jetbrains.plugins.scala.testingSupport.scalatest.generators.FunSpecGenerator
/**
* @author Roman.Shein
* @since 20.01.2015.
*/
trait FunSpecSingleTestTest extends FunSpecGenerator {
val funSpecTestPath = List("[root]", "FunSpecTest", "FunSpecTest", "should launch single test")
def testFunSpec() {
addFunSpec()
runTestByLocation(5, 9, funSpecFileName,
checkConfigAndSettings(_, funSpecClassName, "FunSpecTest should launch single test"),
root => checkResultTreeHasExactNamedPath(root, funSpecTestPath:_*) &&
checkResultTreeDoesNotHaveNodes(root, "should not launch other tests"),
debug = true
)
}
}
| triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/testingSupport/scalatest/singleTest/FunSpecSingleTestTest.scala | Scala | apache-2.0 | 735 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.mllib
import org.apache.log4j.{Level, Logger}
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.clustering.KMeans
import org.apache.spark.mllib.linalg.Vectors
/**
* An example k-means app. Run with
* {{{
* ./bin/run-example org.apache.spark.examples.mllib.DenseKMeans [options] <input>
* }}}
* If you use it as a template to create your own app, please use `spark-submit` to submit your app.
*/
object DenseKMeans {
object InitializationMode extends Enumeration {
type InitializationMode = Value
val Random, Parallel = Value
}
import InitializationMode._
case class Params(
input: String = null,
k: Int = -1,
numIterations: Int = 10,
initializationMode: InitializationMode = Parallel) extends AbstractParams[Params]
def main(args: Array[String]) {
val defaultParams = Params()
val parser = new OptionParser[Params]("DenseKMeans") {
head("DenseKMeans: an example k-means app for dense data.")
opt[Int]('k', "k")
.required()
.text(s"number of clusters, required")
.action((x, c) => c.copy(k = x))
opt[Int]("numIterations")
.text(s"number of iterations, default: ${defaultParams.numIterations}")
.action((x, c) => c.copy(numIterations = x))
opt[String]("initMode")
.text(s"initialization mode (${InitializationMode.values.mkString(",")}), " +
s"default: ${defaultParams.initializationMode}")
.action((x, c) => c.copy(initializationMode = InitializationMode.withName(x)))
arg[String]("<input>")
.text("input paths to examples")
.required()
.action((x, c) => c.copy(input = x))
}
parser.parse(args, defaultParams) match {
case Some(params) => run(params)
case _ => sys.exit(1)
}
}
def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"DenseKMeans with $params")
val sc = new SparkContext(conf)
Logger.getRootLogger.setLevel(Level.WARN)
val examples = sc.textFile(params.input).map { line =>
Vectors.dense(line.split(' ').map(_.toDouble))
}.cache()
val numExamples = examples.count()
println(s"numExamples = $numExamples.")
val initMode = params.initializationMode match {
case Random => KMeans.RANDOM
case Parallel => KMeans.K_MEANS_PARALLEL
}
val model = new KMeans()
.setInitializationMode(initMode)
.setK(params.k)
.setMaxIterations(params.numIterations)
.run(examples)
val cost = model.computeCost(examples)
println(s"Total cost = $cost.")
sc.stop()
}
}
// scalastyle:on println
| lhfei/spark-in-action | spark-2.x/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala | Scala | apache-2.0 | 3,643 |
package whilelang
object Main {
def main(args : Array[String]) : Unit = {
if(args.length != 1) {
Error("Please provide an input file.")
}
import java.io.{FileInputStream,IOException}
val parser = new Parser
try {
val in = new FileInputStream(args(0))
val parsed = parser.parseInputStream(in)
println("Original program:")
println("*****************\\n")
TreePrinter(parsed)
println("*****************\\n")
val minimal = Conversion.convert(parsed)
println("Minimal language program:")
println("*****************\\n")
TreePrinter(minimal)
println("*****************\\n")
val unrolled = Unrolling.unroll(minimal, 3)
println("Unrolled program:")
println("*****************\\n")
TreePrinter(unrolled)
println("*****************\\n")
val formula = VCGen.compoVCG(unrolled)
println("Formula for unrolled program:")
println("*****************\\n")
println(formula);
println("*****************\\n")
println("Z3 output:")
println("*****************\\n")
Formulas.isSat(formula) match {
case Some(true) => println("Program incorrect!")
case Some(false) => println("Program correct!")
case None => println("Error invoking Z3, please review the paramters and input")
}
println("*****************\\n")
} catch {
case e: IOException => Error(e.getMessage)
}
}
}
| sana/WorkAtEPFL | WhiteLanguageProgramVerifier/compiler/src/whilelang/Main.scala | Scala | gpl-3.0 | 1,473 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert2.transforms
import org.apache.commons.lang3.StringUtils
import scala.util.matching.Regex
class StringFunctionFactory extends TransformerFunctionFactory {
override def functions: Seq[TransformerFunction] =
Seq(stripQuotes, strip, stripPrefix, stripSuffix, replace, remove,
strLen, trim, capitalize, lowercase, uppercase, regexReplace, concat,
substr, string, mkstring, emptyToNull, printf)
private val string = TransformerFunction.pure("toString") { args =>
args(0).toString
}
private val stripQuotes = TransformerFunction.pure("stripQuotes") { args =>
StringUtils.strip(args(0).asInstanceOf[String], "'\\"")
}
private val strip = TransformerFunction.pure("strip") { args =>
if (args.length == 1) {
StringUtils.strip(args(0).asInstanceOf[String])
} else {
val toStrip = args(1).asInstanceOf[String]
StringUtils.strip(args(0).asInstanceOf[String], toStrip)
}
}
private val stripPrefix = TransformerFunction.pure("stripPrefix") { args =>
val toStrip = args(1).asInstanceOf[String]
StringUtils.stripStart(args(0).asInstanceOf[String], toStrip)
}
private val stripSuffix = TransformerFunction.pure("stripSuffix") { args =>
val toStrip = args(1).asInstanceOf[String]
StringUtils.stripEnd(args(0).asInstanceOf[String], toStrip)
}
private val replace = TransformerFunction.pure("replace") { args =>
val toRemove = args(1).asInstanceOf[String]
val replacement = args(2).asInstanceOf[String]
args(0).asInstanceOf[String].replaceAllLiterally(toRemove, replacement)
}
private val remove = TransformerFunction.pure("remove") { args =>
val toRemove = args(1).asInstanceOf[String]
StringUtils.remove(args(0).asInstanceOf[String], toRemove)
}
private val trim = TransformerFunction.pure("trim") { args =>
args(0).asInstanceOf[String].trim
}
private val capitalize = TransformerFunction.pure("capitalize") { args =>
args(0).asInstanceOf[String].capitalize
}
private val lowercase = TransformerFunction.pure("lowercase") { args =>
args(0).asInstanceOf[String].toLowerCase
}
private val uppercase = TransformerFunction.pure("uppercase") { args =>
args(0).asInstanceOf[String].toUpperCase
}
private val concat = TransformerFunction.pure("concat", "concatenate") { args =>
args.map(_.toString).mkString
}
private val mkstring = TransformerFunction.pure("mkstring") { args =>
args.drop(1).map(_.toString).mkString(args(0).toString)
}
private val emptyToNull = TransformerFunction.pure("emptyToNull") { args =>
Option(args(0)).map(_.toString).filterNot(_.trim.isEmpty).orNull
}
private val regexReplace = TransformerFunction.pure("regexReplace") { args =>
args(0).asInstanceOf[Regex].replaceAllIn(args(2).asInstanceOf[String], args(1).asInstanceOf[String])
}
private val substr = TransformerFunction.pure("substr", "substring") { args =>
args(0).asInstanceOf[String].substring(args(1).asInstanceOf[Int], args(2).asInstanceOf[Int])
}
private val strLen = TransformerFunction.pure("strlen", "stringLength", "length") { args =>
args(0).asInstanceOf[String].length
}
private val printf = TransformerFunction.pure("printf") { args =>
String.format(args(0).toString, args.drop(1).asInstanceOf[Array[AnyRef]]: _*)
}
}
| aheyne/geomesa | geomesa-convert/geomesa-convert-common/src/main/scala/org/locationtech/geomesa/convert2/transforms/StringFunctionFactory.scala | Scala | apache-2.0 | 3,825 |
package edu.gemini.qv.plugin.filter.core
import edu.gemini.pot.sp.SPComponentType
import edu.gemini.qv.plugin.QvStore
import edu.gemini.qv.plugin.QvStore.{BarChart, Histogram, Table}
import edu.gemini.qv.plugin.chart.Axis
import edu.gemini.qv.plugin.filter.core.Filter._
import scala.xml.Node
object FilterXMLFormatter {
/** Store everything. */
def formatAll: Node = formatSome(QvStore.filters, QvStore.axes, QvStore.histograms, QvStore.tables, QvStore.visCharts)
/** Store some data.
* Only store customer made elements, don't store pre-defined/default ones.
* This makes sure a fail proof minimal set of axes, charts and tables is always available.
* @return
*/
// TODO: The dynamic on-the-fly axes should become part of the default axes, so we don't need to treat them separately.
def formatSome(filters: Seq[FilterSet] = Seq(), axes: Seq[Axis] = Seq(), histograms: Seq[Histogram] = Seq(), tables: Seq[Table] = Seq(), barCharts: Seq[BarChart] = Seq()): Node =
<qvTool>
<filters>{filters.filterNot(QvStore.DefaultFilters.contains(_)).map(format)}</filters>
<axes>{axes.filterNot((QvStore.DefaultAxes ++ Axis.Dynamics).contains(_)).map(format)}</axes>
<histograms>{histograms.filterNot(QvStore.DefaultHistograms.contains(_)).map(_.toXml)}</histograms>
<tables>{tables.filterNot(QvStore.DefaultTables.contains(_)).map(_.toXml)}</tables>
<barcharts>{barCharts.filterNot(QvStore.DefaultBarCharts.contains(_)).map(_.toXml)}</barcharts>
</qvTool>
def format(filterSet: FilterSet): Node = {
<filter>
<name>{filterSet.label}</name>
<filterset>
{ filterSet.filters.map(format) }
</filterset>
</filter>
}
def format(axis: Axis): Node = {
<axis>
<name>{axis.label}</name>
<filtergroups>
{ axis.groups.map(format) }
</filtergroups>
</axis>
}
def format(input: Filter): Node = {
// Functions to create inner nodes for standardization and easy maintainability.
def makeSetNode[A](valueSet: Set[A]) = <set>{valueSet.map(x => <elem>{x.toString}</elem>)}</set>
def makeRangeNode[A](low: A, high: A) = <range><min>{low.toString}</min><max>{high.toString}</max></range>
def makeEnumSetNode[A](valueSet: Set[A]) = <set>{valueSet.map(x => <elem>{x.asInstanceOf[Enum[_]].name}</elem>)}</set>
input match {
case RA(min, max) => <rafilter>{makeRangeNode(min, max)}</rafilter>
case Dec(min, max) => <decfilter>{makeRangeNode(min, max)}</decfilter>
case IsNonSidereal(value) => <isnonsidereal><boolvalue>{toBoolean(value)}</boolvalue></isnonsidereal>
case IsActive(value) => <isactive><boolvalue>{toBoolean(value)}</boolvalue></isactive>
case IsCompleted(value) => <iscompleted><boolvalue>{toBoolean(value)}</boolvalue></iscompleted>
case IsRollover(value) => <rolloverfilter><boolvalue>{toBoolean(value)}</boolvalue></rolloverfilter>
case HasTimingConstraints(value) => <timingConstraints><boolvalue>{toBoolean(value)}</boolvalue></timingConstraints>
case HasElevationConstraints(value) => <elevationConstraints><boolvalue>{toBoolean(value)}</boolvalue></elevationConstraints>
case HasPreImaging(value) => <preImaging><boolvalue>{toBoolean(value)}</boolvalue></preImaging>
case HasDummyTarget(value) => <dummyTarget><boolvalue>{toBoolean(value)}</boolvalue></dummyTarget>
case ProgId(value) => <progidfilter><id>{value}</id></progidfilter>
case ProgPi(value) => <progpifilter><id>{value}</id></progpifilter>
case ProgContact(value) => <progcontactfilter><id>{value}</id></progcontactfilter>
case ObsId(value) => <obsidfilter><id>{value}</id></obsidfilter>
case RemainingNights(_, min, max, enabled, cur, next) => <remnightsfilter>{makeRangeNode(min, max)}<enabled>{enabled}</enabled><cur>{cur}</cur><next>{next}</next></remnightsfilter>
case RemainingHours(_, min, max, enabled, cur, next) => <remhoursfilter>{makeRangeNode(min, max)}<enabled>{enabled}</enabled><cur>{cur}</cur><next>{next}</next></remhoursfilter>
case RemainingHoursFraction(_, min, max, enabled, cur, next) => <remhoursfracfilter>{makeRangeNode(min, max)}<enabled>{enabled}</enabled><cur>{cur}</cur><next>{next}</next></remhoursfracfilter>
case SetTime(_, min, max) => <settimefilter>{makeRangeNode(min, max)}</settimefilter>
case EnumFilter("Semesters", _, _, selection, _) => <semesterfilter>{makeSetNode(selection)}</semesterfilter>
case EnumFilter("Partner", _, _, selection, _) => <partnerfilter>{makeSetNode(selection.map(toPartner))}</partnerfilter>
case EnumFilter("AO", _, _, selection, _) => <aofilter>{makeSetNode(selection)}</aofilter>
case EnumFilter("Band", _, _, selection, _) => <bandfilter>{makeEnumSetNode(selection)}</bandfilter>
case EnumFilter("Instruments", _, _, selection, _) => <instrumentfilter>{makeEnumSetNode(selection)}</instrumentfilter>
case EnumFilter("Priority", _, _, selection, _) => <priorityfilter>{makeEnumSetNode(selection)}</priorityfilter>
case EnumFilter("Image Quality", _, _, selection, _) => <iqfilter>{makeEnumSetNode(selection)}</iqfilter>
case EnumFilter("Cloud Cover", _, _, selection, _) => <ccfilter>{makeEnumSetNode(selection)}</ccfilter>
case EnumFilter("Water Vapor", _, _, selection, _) => <wvfilter>{makeEnumSetNode(selection)}</wvfilter>
case EnumFilter("Sky Background", _, _, selection, _) => <sbfilter>{makeEnumSetNode(selection)}</sbfilter>
case EnumFilter("TOO Type", _, _, selection, _) => <toofilter>{makeEnumSetNode(selection)}</toofilter>
case EnumFilter("Program Types", _, _, selection, _) => <typefilter>{makeSetNode(selection)}</typefilter>
case EnumFilter("Observation Status", _, _, selection, _) => <obsStatus>{makeEnumSetNode(selection)}</obsStatus>
case EnumFilter("Observation Class", _, _, selection, _) => <obsClass>{makeEnumSetNode(selection)}</obsClass>
// GMOSN
// *** NOTE: Bizarrely, if we use InstGmosNorth.SP_TYPE instead of the exact same value SPComponentType.Instrument_GMOS,
// we get "warning: unreachable code". This can be remedied instead by changing this to:
// case ConfigurationFilter(_, tp, "Dispersers", _, selection, _) if tp == InstGmosNorth.SP_TYPE
// as well, but this solution is not particularly elegant either.
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GMOS, "Dispersers", _, selection, _) => <gmosndisp>{makeEnumSetNode(selection)}</gmosndisp>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GMOS, "Filters", _, selection, _) => <gmosnfilt>{makeEnumSetNode(selection)}</gmosnfilt>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GMOS, "Focal Planes", _, selection, _) => <gmosnfocplane>{makeEnumSetNode(selection)}</gmosnfocplane>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GMOS, "CCD", _, selection, _) => <gmosnccd>{makeEnumSetNode(selection)}</gmosnccd>
// GMOSS
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GMOSSOUTH, "Dispersers", _, selection, _) => <gmossdisp>{makeEnumSetNode(selection)}</gmossdisp>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GMOSSOUTH, "Filters", _, selection, _) => <gmossfilt>{makeEnumSetNode(selection)}</gmossfilt>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GMOSSOUTH, "Focal Planes", _, selection, _) => <gmossfocplane>{makeEnumSetNode(selection)}</gmossfocplane>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GMOSSOUTH, "CCD", _, selection, _) => <gmossccd>{makeEnumSetNode(selection)}</gmossccd>
// GNIRS
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GNIRS, "Dispersers", _, selection, _) => <gnirsdisp>{makeEnumSetNode(selection)}</gnirsdisp>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GNIRS, "Filters", _, selection, _) => <gnirsfilt>{makeEnumSetNode(selection)}</gnirsfilt>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GNIRS, "Cross Dispersers", _, selection, _) => <gnirscrossdisp>{makeEnumSetNode(selection)}</gnirscrossdisp>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GNIRS, "Cameras", _, selection, _) => <gnirscam>{makeEnumSetNode(selection)}</gnirscam>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GNIRS, "Focal Planes", _, selection, _) => <gnirsfocplane>{makeEnumSetNode(selection)}</gnirsfocplane>
// GSAOI
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_GSAOI, "Filters", _, selection, _) => <gsaoifilt>{makeEnumSetNode(selection)}</gsaoifilt>
// F2
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_FLAMINGOS2, "Dispersers", _, selection, _) => <f2disp>{makeEnumSetNode(selection)}</f2disp>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_FLAMINGOS2, "Filters", _, selection, _) => <f2filt>{makeEnumSetNode(selection)}</f2filt>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_FLAMINGOS2, "Focal Planes", _, selection, _) => <f2focplane>{makeEnumSetNode(selection)}</f2focplane>
// NIFS
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NIFS, "Dispersers", _, selection, _) => <nifsdisp>{makeEnumSetNode(selection)}</nifsdisp>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NIFS, "Filters", _, selection, _) => <nifsfilt>{makeEnumSetNode(selection)}</nifsfilt>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NIFS, "Masks", _, selection, _) => <nifsmask>{makeEnumSetNode(selection)}</nifsmask>
// NICI
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NICI, "Focal Planes", _, selection, _) => <nicifocplane>{makeEnumSetNode(selection)}</nicifocplane>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NICI, "Dichroic Wheel", _, selection, _) => <niciwheel>{makeEnumSetNode(selection)}</niciwheel>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NICI, "Filter Red Channel", _, selection, _) => <nicired>{makeEnumSetNode(selection)}</nicired>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NICI, "Filter Blue Channel", _, selection, _) => <niciblue>{makeEnumSetNode(selection)}</niciblue>
// NIRI
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NIRI, "Dispersers", _, selection, _) => <niridisp>{makeEnumSetNode(selection)}</niridisp>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NIRI, "Filters", _, selection, _) => <nirifilt>{makeEnumSetNode(selection)}</nirifilt>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NIRI, "Cameras", _, selection, _) => <niricam>{makeEnumSetNode(selection)}</niricam>
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_NIRI, "Masks", _, selection, _) => <nirimask>{makeEnumSetNode(selection)}</nirimask>
// TEXES
case ConfigurationFilter(_, SPComponentType.INSTRUMENT_TEXES, "Dispersers", _, selection, _) => <texesdisp>{makeEnumSetNode(selection)}</texesdisp>
case EmptyFilter(name) => <filterall><name>{name}</name></filterall>
case FilterOr(a,b) => <or><first>{FilterXMLFormatter.format(a)}</first><second>{FilterXMLFormatter.format(b)}</second></or>
case FilterAnd(a,b) => <and><first>{FilterXMLFormatter.format(a)}</first><second>{FilterXMLFormatter.format(b)}</second></and>
// fail
case f => throw new IllegalArgumentException("did not recognize filter " + f)
}
}
def toBoolean(value: Option[Boolean]): String =
value match {
case None => "none"
case Some(true) => "true"
case Some(false) => "false"
}
def toPartner(partner: Any): String =
partner match {
case Partner(None) => "none"
case Partner(Some(affiliate)) => affiliate.toString
case _ => throw new IllegalArgumentException("unexpected partner")
}
} | arturog8m/ocs | bundle/edu.gemini.qv.plugin/src/main/scala/edu/gemini/qv/plugin/filter/core/FilterXMLFormatter.scala | Scala | bsd-3-clause | 11,969 |
package im.actor.server.api.rpc.calls
import java.math.BigInteger
import java.security.MessageDigest
import scala.concurrent.{ ExecutionContext, Future }
import akka.actor.ActorSystem
import slick.driver.PostgresDriver.api._
import im.actor.api.rpc._
import im.actor.api.rpc.calls.{ CallsService, ResponseGetVoxUser, ResponseInitVoxSupport }
import im.actor.api.rpc.peers.UserOutPeer
import im.actor.server.{ models, persist }
import im.actor.server.util.ACLUtils
import im.actor.server.voximplant.VoxImplant
final class CallsServiceImpl(voximplant: VoxImplant)(implicit db: Database, actorSystem: ActorSystem) extends CallsService {
import PeerHelpers._
override implicit val ec: ExecutionContext = actorSystem.dispatcher
override def jhandleGetVoxUser(userPeer: UserOutPeer, clientData: ClientData): Future[HandlerResult[ResponseGetVoxUser]] = {
val authorizedAction = requireAuth(clientData) map { implicit client β
withUserOutPeer(userPeer) {
persist.voximplant.VoxUser.findByUserId(userPeer.userId) map {
case Some(voxUser) β Ok(ResponseGetVoxUser(s"${voxUser.userName}@${voximplant.appName}"))
case None β Error(CommonErrors.UserNotFound)
}
}
}
db.run(toDBIOAction(authorizedAction))
}
override def jhandleInitVoxSupport(clientData: ClientData): Future[HandlerResult[ResponseInitVoxSupport]] = {
val authorizedAction = requireAuth(clientData) map { client β
persist.voximplant.VoxUser.findByUserId(client.userId) flatMap {
case Some(voxUser) β
DBIO.successful(Ok(ResponseInitVoxSupport(voxUser.userName, genPassword(voxUser.salt))))
case None β
val voxUsername = s"user_${client.userId}"
val salt = ACLUtils.nextAccessSalt()
val password = genPassword(salt)
for {
dbUserName β persist.User.findName(client.userId) map (_.getOrElse(s"User ${client.userId}"))
voxUserId β DBIO.from(voximplant.addUser(voxUsername, password, dbUserName))
_ β DBIO.from(voximplant.bindUser(voxUserId))
voxUser = models.voximplant.VoxUser(client.userId, voxUserId, voxUsername, dbUserName, salt)
_ β persist.voximplant.VoxUser.create(voxUser)
} yield Ok(ResponseInitVoxSupport(voxUser.userName, password))
}
}
db.run(toDBIOAction(authorizedAction map (_.transactionally)))
}
private def genPassword(salt: String): String = {
val secret = ACLUtils.secretKey()
val seed = s"${salt}:${secret}"
val md = MessageDigest.getInstance("SHA-256")
md.update(seed.getBytes("UTF-8"))
val bi = new BigInteger(1, md.digest())
bi.toString(16)
}
}
| boneyao/actor-platform | actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/calls/CallsServiceImpl.scala | Scala | mit | 2,717 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.csv
import java.io.InputStream
import java.math.BigDecimal
import scala.util.Try
import scala.util.control.NonFatal
import com.univocity.parsers.csv.CsvParser
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.catalyst.util.{BadRecordException, DateTimeUtils}
import org.apache.spark.sql.execution.datasources.FailureSafeParser
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
class UnivocityParser(
schema: StructType,
requiredSchema: StructType,
val options: CSVOptions) extends Logging {
require(requiredSchema.toSet.subsetOf(schema.toSet),
"requiredSchema should be the subset of schema.")
def this(schema: StructType, options: CSVOptions) = this(schema, schema, options)
// A `ValueConverter` is responsible for converting the given value to a desired type.
private type ValueConverter = String => Any
private val tokenizer = new CsvParser(options.asParserSettings)
private val row = new GenericInternalRow(requiredSchema.length)
// Retrieve the raw record string.
private def getCurrentInput: UTF8String = {
UTF8String.fromString(tokenizer.getContext.currentParsedContent().stripLineEnd)
}
// This parser first picks some tokens from the input tokens, according to the required schema,
// then parse these tokens and put the values in a row, with the order specified by the required
// schema.
//
// For example, let's say there is CSV data as below:
//
// a,b,c
// 1,2,A
//
// So the CSV data schema is: ["a", "b", "c"]
// And let's say the required schema is: ["c", "b"]
//
// with the input tokens,
//
// input tokens - [1, 2, "A"]
//
// Each input token is placed in each output row's position by mapping these. In this case,
//
// output row - ["A", 2]
private val valueConverters: Array[ValueConverter] =
schema.map(f => makeConverter(f.name, f.dataType, f.nullable, options)).toArray
private val tokenIndexArr: Array[Int] = {
requiredSchema.map(f => schema.indexOf(f)).toArray
}
/**
* Create a converter which converts the string value to a value according to a desired type.
* Currently, we do not support complex types (`ArrayType`, `MapType`, `StructType`).
*
* For other nullable types, returns null if it is null or equals to the value specified
* in `nullValue` option.
*/
def makeConverter(
name: String,
dataType: DataType,
nullable: Boolean = true,
options: CSVOptions): ValueConverter = dataType match {
case _: ByteType => (d: String) =>
nullSafeDatum(d, name, nullable, options)(_.toByte)
case _: ShortType => (d: String) =>
nullSafeDatum(d, name, nullable, options)(_.toShort)
case _: IntegerType => (d: String) =>
nullSafeDatum(d, name, nullable, options)(_.toInt)
case _: LongType => (d: String) =>
nullSafeDatum(d, name, nullable, options)(_.toLong)
case _: FloatType => (d: String) =>
nullSafeDatum(d, name, nullable, options) {
case options.nanValue => Float.NaN
case options.negativeInf => Float.NegativeInfinity
case options.positiveInf => Float.PositiveInfinity
case datum => datum.toFloat
}
case _: DoubleType => (d: String) =>
nullSafeDatum(d, name, nullable, options) {
case options.nanValue => Double.NaN
case options.negativeInf => Double.NegativeInfinity
case options.positiveInf => Double.PositiveInfinity
case datum => datum.toDouble
}
case _: BooleanType => (d: String) =>
nullSafeDatum(d, name, nullable, options)(_.toBoolean)
case dt: DecimalType => (d: String) =>
nullSafeDatum(d, name, nullable, options) { datum =>
val value = new BigDecimal(datum.replaceAll(",", ""))
Decimal(value, dt.precision, dt.scale)
}
case _: TimestampType => (d: String) =>
nullSafeDatum(d, name, nullable, options) { datum =>
// This one will lose microseconds parts.
// See https://issues.apache.org/jira/browse/SPARK-10681.
Try(options.timestampFormat.parse(datum).getTime * 1000L)
.getOrElse {
// If it fails to parse, then tries the way used in 2.0 and 1.x for backwards
// compatibility.
DateTimeUtils.stringToTime(datum).getTime * 1000L
}
}
case _: DateType => (d: String) =>
nullSafeDatum(d, name, nullable, options) { datum =>
// This one will lose microseconds parts.
// See https://issues.apache.org/jira/browse/SPARK-10681.x
Try(DateTimeUtils.millisToDays(options.dateFormat.parse(datum).getTime))
.getOrElse {
// If it fails to parse, then tries the way used in 2.0 and 1.x for backwards
// compatibility.
DateTimeUtils.millisToDays(DateTimeUtils.stringToTime(datum).getTime)
}
}
case _: StringType => (d: String) =>
nullSafeDatum(d, name, nullable, options)(UTF8String.fromString)
case udt: UserDefinedType[_] => (datum: String) =>
makeConverter(name, udt.sqlType, nullable, options)
// We don't actually hit this exception though, we keep it for understandability
case _ => throw new RuntimeException(s"Unsupported type: ${dataType.typeName}")
}
private def nullSafeDatum(
datum: String,
name: String,
nullable: Boolean,
options: CSVOptions)(converter: ValueConverter): Any = {
if (datum == options.nullValue || datum == null) {
if (!nullable) {
throw new RuntimeException(s"null value found but field $name is not nullable.")
}
null
} else {
converter.apply(datum)
}
}
/**
* Parses a single CSV string and turns it into either one resulting row or no row (if the
* the record is malformed).
*/
def parse(input: String): InternalRow = convert(tokenizer.parseLine(input))
private def convert(tokens: Array[String]): InternalRow = {
if (tokens.length != schema.length) {
// If the number of tokens doesn't match the schema, we should treat it as a malformed record.
// However, we still have chance to parse some of the tokens, by adding extra null tokens in
// the tail if the number is smaller, or by dropping extra tokens if the number is larger.
val checkedTokens = if (schema.length > tokens.length) {
tokens ++ new Array[String](schema.length - tokens.length)
} else {
tokens.take(schema.length)
}
def getPartialResult(): Option[InternalRow] = {
try {
Some(convert(checkedTokens))
} catch {
case _: BadRecordException => None
}
}
// For records with less or more tokens than the schema, tries to return partial results
// if possible.
throw BadRecordException(
() => getCurrentInput,
() => getPartialResult(),
new RuntimeException("Malformed CSV record"))
} else {
try {
var i = 0
while (i < requiredSchema.length) {
val from = tokenIndexArr(i)
row(i) = valueConverters(from).apply(tokens(from))
i += 1
}
row
} catch {
case NonFatal(e) =>
// For corrupted records with the number of tokens same as the schema,
// CSV reader doesn't support partial results. All fields other than the field
// configured by `columnNameOfCorruptRecord` are set to `null`.
throw BadRecordException(() => getCurrentInput, () => None, e)
}
}
}
}
private[csv] object UnivocityParser {
/**
* Parses a stream that contains CSV strings and turns it into an iterator of tokens.
*/
def tokenizeStream(
inputStream: InputStream,
shouldDropHeader: Boolean,
tokenizer: CsvParser): Iterator[Array[String]] = {
convertStream(inputStream, shouldDropHeader, tokenizer)(tokens => tokens)
}
/**
* Parses a stream that contains CSV strings and turns it into an iterator of rows.
*/
def parseStream(
inputStream: InputStream,
shouldDropHeader: Boolean,
parser: UnivocityParser,
schema: StructType): Iterator[InternalRow] = {
val tokenizer = parser.tokenizer
val safeParser = new FailureSafeParser[Array[String]](
input => Seq(parser.convert(input)),
parser.options.parseMode,
schema,
parser.options.columnNameOfCorruptRecord)
convertStream(inputStream, shouldDropHeader, tokenizer) { tokens =>
safeParser.parse(tokens)
}.flatten
}
private def convertStream[T](
inputStream: InputStream,
shouldDropHeader: Boolean,
tokenizer: CsvParser)(convert: Array[String] => T) = new Iterator[T] {
tokenizer.beginParsing(inputStream)
private var nextRecord = {
if (shouldDropHeader) {
tokenizer.parseNext()
}
tokenizer.parseNext()
}
override def hasNext: Boolean = nextRecord != null
override def next(): T = {
if (!hasNext) {
throw new NoSuchElementException("End of stream")
}
val curRecord = convert(nextRecord)
nextRecord = tokenizer.parseNext()
curRecord
}
}
/**
* Parses an iterator that contains CSV strings and turns it into an iterator of rows.
*/
def parseIterator(
lines: Iterator[String],
shouldDropHeader: Boolean,
parser: UnivocityParser,
schema: StructType): Iterator[InternalRow] = {
val options = parser.options
val linesWithoutHeader = if (shouldDropHeader) {
// Note that if there are only comments in the first block, the header would probably
// be not dropped.
CSVUtils.dropHeaderLine(lines, options)
} else {
lines
}
val filteredLines: Iterator[String] =
CSVUtils.filterCommentAndEmpty(linesWithoutHeader, options)
val safeParser = new FailureSafeParser[String](
input => Seq(parser.parse(input)),
parser.options.parseMode,
schema,
parser.options.columnNameOfCorruptRecord)
filteredLines.flatMap(safeParser.parse)
}
}
| szhem/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/UnivocityParser.scala | Scala | apache-2.0 | 11,048 |
package dotty.tools.dotc
package core
import Types._, Symbols._, Contexts._
import printing.Printer
import printing.Texts.Text
object Constants {
final val NoTag = 0
final val UnitTag = 1
final val BooleanTag = 2
final val ByteTag = 3
final val ShortTag = 4
final val CharTag = 5
final val IntTag = 6
final val LongTag = 7
final val FloatTag = 8
final val DoubleTag = 9
final val StringTag = 10
final val NullTag = 11
final val ClazzTag = 12
// For supporting java enumerations inside java annotations (see ClassfileParser)
final val EnumTag = 13
class Constant(val value: Any, val tag: Int) extends printing.Showable with Product1[Any] {
import java.lang.Double.doubleToRawLongBits
import java.lang.Float.floatToRawIntBits
def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue
def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue
def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue
def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag
def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag
def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag
def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag
def isNonUnitAnyVal: Boolean = BooleanTag <= tag && tag <= DoubleTag
def isAnyVal: Boolean = UnitTag <= tag && tag <= DoubleTag
def tpe(implicit ctx: Context): Type = tag match {
case UnitTag => defn.UnitType
case BooleanTag => defn.BooleanType
case ByteTag => defn.ByteType
case ShortTag => defn.ShortType
case CharTag => defn.CharType
case IntTag => defn.IntType
case LongTag => defn.LongType
case FloatTag => defn.FloatType
case DoubleTag => defn.DoubleType
case StringTag => defn.StringType
case NullTag => defn.NullType
case ClazzTag => defn.ClassType(typeValue)
case EnumTag => defn.EnumType(symbolValue)
}
/** We need the equals method to take account of tags as well as values.
*/
override def equals(other: Any): Boolean = other match {
case that: Constant =>
this.tag == that.tag && equalHashValue == that.equalHashValue
case _ => false
}
def isNaN: Boolean = value match {
case f: Float => f.isNaN
case d: Double => d.isNaN
case _ => false
}
def booleanValue: Boolean =
if (tag == BooleanTag) value.asInstanceOf[Boolean]
else throw new Error("value " + value + " is not a boolean")
def byteValue: Byte = tag match {
case ByteTag => value.asInstanceOf[Byte]
case ShortTag => value.asInstanceOf[Short].toByte
case CharTag => value.asInstanceOf[Char].toByte
case IntTag => value.asInstanceOf[Int].toByte
case LongTag => value.asInstanceOf[Long].toByte
case FloatTag => value.asInstanceOf[Float].toByte
case DoubleTag => value.asInstanceOf[Double].toByte
case _ => throw new Error("value " + value + " is not a Byte")
}
def shortValue: Short = tag match {
case ByteTag => value.asInstanceOf[Byte].toShort
case ShortTag => value.asInstanceOf[Short]
case CharTag => value.asInstanceOf[Char].toShort
case IntTag => value.asInstanceOf[Int].toShort
case LongTag => value.asInstanceOf[Long].toShort
case FloatTag => value.asInstanceOf[Float].toShort
case DoubleTag => value.asInstanceOf[Double].toShort
case _ => throw new Error("value " + value + " is not a Short")
}
def charValue: Char = tag match {
case ByteTag => value.asInstanceOf[Byte].toChar
case ShortTag => value.asInstanceOf[Short].toChar
case CharTag => value.asInstanceOf[Char]
case IntTag => value.asInstanceOf[Int].toChar
case LongTag => value.asInstanceOf[Long].toChar
case FloatTag => value.asInstanceOf[Float].toChar
case DoubleTag => value.asInstanceOf[Double].toChar
case _ => throw new Error("value " + value + " is not a Char")
}
def intValue: Int = tag match {
case ByteTag => value.asInstanceOf[Byte].toInt
case ShortTag => value.asInstanceOf[Short].toInt
case CharTag => value.asInstanceOf[Char].toInt
case IntTag => value.asInstanceOf[Int]
case LongTag => value.asInstanceOf[Long].toInt
case FloatTag => value.asInstanceOf[Float].toInt
case DoubleTag => value.asInstanceOf[Double].toInt
case _ => throw new Error("value " + value + " is not an Int")
}
def longValue: Long = tag match {
case ByteTag => value.asInstanceOf[Byte].toLong
case ShortTag => value.asInstanceOf[Short].toLong
case CharTag => value.asInstanceOf[Char].toLong
case IntTag => value.asInstanceOf[Int].toLong
case LongTag => value.asInstanceOf[Long]
case FloatTag => value.asInstanceOf[Float].toLong
case DoubleTag => value.asInstanceOf[Double].toLong
case _ => throw new Error("value " + value + " is not a Long")
}
def floatValue: Float = tag match {
case ByteTag => value.asInstanceOf[Byte].toFloat
case ShortTag => value.asInstanceOf[Short].toFloat
case CharTag => value.asInstanceOf[Char].toFloat
case IntTag => value.asInstanceOf[Int].toFloat
case LongTag => value.asInstanceOf[Long].toFloat
case FloatTag => value.asInstanceOf[Float]
case DoubleTag => value.asInstanceOf[Double].toFloat
case _ => throw new Error("value " + value + " is not a Float")
}
def doubleValue: Double = tag match {
case ByteTag => value.asInstanceOf[Byte].toDouble
case ShortTag => value.asInstanceOf[Short].toDouble
case CharTag => value.asInstanceOf[Char].toDouble
case IntTag => value.asInstanceOf[Int].toDouble
case LongTag => value.asInstanceOf[Long].toDouble
case FloatTag => value.asInstanceOf[Float].toDouble
case DoubleTag => value.asInstanceOf[Double]
case _ => throw new Error("value " + value + " is not a Double")
}
/** Convert constant value to conform to given type.
*/
def convertTo(pt: Type)(implicit ctx: Context): Constant = {
def classBound(pt: Type): Type = pt.dealias.stripTypeVar match {
case tref: TypeRef if !tref.symbol.isClass && tref.info.exists =>
classBound(tref.info.bounds.lo)
case param: TypeParamRef =>
ctx.typerState.constraint.entry(param) match {
case TypeBounds(lo, hi) =>
if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound
else classBound(lo)
case NoType => classBound(param.binder.paramInfos(param.paramNum).lo)
case inst => classBound(inst)
}
case pt => pt
}
val target = classBound(pt).typeSymbol
if (target == tpe.typeSymbol)
this
else if ((target == defn.ByteClass) && isByteRange)
Constant(byteValue)
else if (target == defn.ShortClass && isShortRange)
Constant(shortValue)
else if (target == defn.CharClass && isCharRange)
Constant(charValue)
else if (target == defn.IntClass && isIntRange)
Constant(intValue)
else if (target == defn.LongClass && isLongRange)
Constant(longValue)
else if (target == defn.FloatClass && isFloatRange)
Constant(floatValue)
else if (target == defn.DoubleClass && isNumeric)
Constant(doubleValue)
else
null
}
def stringValue: String = value.toString
def toText(printer: Printer): Text = printer.toText(this)
def typeValue: Type = value.asInstanceOf[Type]
def symbolValue: Symbol = value.asInstanceOf[Symbol]
/**
* Consider two `NaN`s to be identical, despite non-equality
* Consider -0d to be distinct from 0d, despite equality
*
* We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`)
* to avoid treating different encodings of `NaN` as the same constant.
* You probably can't express different `NaN` varieties as compile time
* constants in regular Scala code, but it is conceivable that you could
* conjure them with a macro.
*/
private def equalHashValue: Any = value match {
case f: Float => floatToRawIntBits(f)
case d: Double => doubleToRawLongBits(d)
case v => v
}
override def hashCode: Int = {
import scala.util.hashing.MurmurHash3._
val seed = 17
var h = seed
h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide.
h = mix(h, equalHashValue.##)
finalizeHash(h, length = 2)
}
override def toString: String = s"Constant($value)"
def canEqual(x: Any): Boolean = true
def get: Any = value
def isEmpty: Boolean = false
def _1: Any = value
}
object Constant {
def apply(x: Null): Constant = new Constant(x, NullTag)
def apply(x: Unit): Constant = new Constant(x, UnitTag)
def apply(x: Boolean): Constant = new Constant(x, BooleanTag)
def apply(x: Byte): Constant = new Constant(x, ByteTag)
def apply(x: Short): Constant = new Constant(x, ShortTag)
def apply(x: Int): Constant = new Constant(x, IntTag)
def apply(x: Long): Constant = new Constant(x, LongTag)
def apply(x: Float): Constant = new Constant(x, FloatTag)
def apply(x: Double): Constant = new Constant(x, DoubleTag)
def apply(x: String): Constant = new Constant(x, StringTag)
def apply(x: Char): Constant = new Constant(x, CharTag)
def apply(x: Type): Constant = new Constant(x, ClazzTag)
def apply(x: Symbol): Constant = new Constant(x, EnumTag)
def apply(value: Any): Constant =
new Constant(value,
value match {
case null => NullTag
case x: Unit => UnitTag
case x: Boolean => BooleanTag
case x: Byte => ByteTag
case x: Short => ShortTag
case x: Int => IntTag
case x: Long => LongTag
case x: Float => FloatTag
case x: Double => DoubleTag
case x: String => StringTag
case x: Char => CharTag
case x: Type => ClazzTag
case x: Symbol => EnumTag
}
)
def unapply(c: Constant): Constant = c
}
}
| som-snytt/dotty | compiler/src/dotty/tools/dotc/core/Constants.scala | Scala | apache-2.0 | 10,851 |
package com.judopay.connect.statsd.config
import com.judopay.connect.statsd.ConnectorParser.{ComparisonContext, LhsContext, RhsContext, StattypeContext}
import com.judopay.connect.statsd.{ConnectorLexer, ConnectorParser, ConnectorParserBaseListener, StatType}
import org.antlr.v4.runtime._
import org.apache.kafka.connect.data.Struct
import org.apache.kafka.connect.sink.SinkRecord
/**
* Created by mark on 08/03/17.
*/
abstract class BinaryComparisonOp {
def apply(a: AnyRef, b: AnyRef): Boolean
}
object EqualsComparisonOp extends BinaryComparisonOp{
override def apply(a: AnyRef, b: AnyRef): Boolean = a == b
}
abstract class SinkRecordPredicate {
def apply(sr: SinkRecord): Boolean
}
object TrueSinkRecordPredicate extends SinkRecordPredicate {
override def apply(sr: SinkRecord): Boolean = true
}
case class StructFieldPredicate(field: Option[String], rhs: AnyRef, op: BinaryComparisonOp) extends SinkRecordPredicate {
def apply(sr: SinkRecord) = field match {
case None => sr.value() == rhs
case Some(f) => {
val field = f
sr.value().asInstanceOf[Struct].get(field).equals(rhs)
}
}
}
case class ExtractorConfig(topic: String, metric: SinkRecord => String, statType: StatType, field: Option[String], predicate: SinkRecordPredicate = TrueSinkRecordPredicate)
object ExtractorConfig {
def apply(topic: String, metric: SinkRecord => String, statType: StatType): ExtractorConfig =
new ExtractorConfig(topic, metric, statType, None, TrueSinkRecordPredicate)
def apply(topic: String, metric: SinkRecord => String,
statType: StatType, field: String): ExtractorConfig =
new ExtractorConfig(topic, metric, statType, Some(field), TrueSinkRecordPredicate)
def apply(
topic: String, metric: SinkRecord => String,
statType: StatType, field: String,
predicate: SinkRecordPredicate): ExtractorConfig =
new ExtractorConfig(topic, metric, statType, Some(field), predicate)
def parse(syntax: String): ExtractorConfig= {
val lexer = new ConnectorLexer(new ANTLRInputStream(syntax))
val tokens = new CommonTokenStream(lexer)
val parser = new ConnectorParser(tokens)
var config = ExtractorConfig("", sr => "", StatType.Count)
var nextRhs: Option[AnyRef] = None
parser.addErrorListener(new BaseErrorListener {
override def syntaxError(recognizer: Recognizer[_, _],
offendingSymbol: Any,
line: Int,
charPositionInLine: Int,
msg: String,
e: RecognitionException): Unit = throw new IllegalStateException("failed to parse at line " + line + " due to " + msg, e)
})
parser.addParseListener(new ConnectorParserBaseListener {
override def exitTopicname(ctx: ConnectorParser.TopicnameContext) = {
config = config.copy(topic = ctx.getText)
}
override def exitStattype(ctx: ConnectorParser.StattypeContext): Unit = {
val statType = ctx.getText.toLowerCase() match {
case "value()" => StatType.Value
case "count()" => StatType.Count
case s:String => throw new Exception(s"Unknown stat type ${s}")
}
config = config.copy(statType = statType)
}
override def exitMetric(ctx: ConnectorParser.MetricContext) = {
val metric = ctx.getText
val metricTempalte = ctx.template()
if(metricTempalte != null){
val metricField = metricTempalte.field().getText
config = config.copy(metric = sr => {
val value = sr.value().asInstanceOf[Struct].get(metricField)
val metricToReplaceWith = if(value == null) "null" else value.toString
metric.replace("${" + metricField + "}", metricToReplaceWith)
})
}else {
config = config.copy(metric = sr => metric)
}
}
override def exitSend_field(ctx: ConnectorParser.Send_fieldContext) = {
if(ctx.getText != "*"){
config = config.copy(field = Some(ctx.getText))
}
}
override def exitComparison(ctx: ComparisonContext): Unit = {
val lhs = ctx.lhs().getText
config = nextRhs match {
case Some(s) => config.copy(predicate = StructFieldPredicate(Some(lhs),s,EqualsComparisonOp))
case None => throw new Exception("Unable to parse rhs of comparison expression " + ctx.getText)
}
nextRhs = None
}
override def exitRhs(ctx: RhsContext): Unit = {
if(ctx.DECIMAL() != null) nextRhs = Some(ctx.getText.toDouble.asInstanceOf[AnyRef])
else if(ctx.INT() != null) nextRhs = Some(ctx.getText.toInt.asInstanceOf[AnyRef])
else if(ctx.STRING() != null) nextRhs = Some(ctx.getText.replaceAll("'","").asInstanceOf[AnyRef])
}
})
try {
parser.stat()
} catch {
case ex: Throwable =>
throw new IllegalArgumentException("Invalid syntax." + ex.getMessage(), ex);
}
config
}
}
| mtranter/kafka-connect-statsd | kafka-connect-statsd/src/main/scala/com/judopay/connect/statsd/config/ExtractorParser.scala | Scala | mit | 5,061 |
package com.nekopiano.scala.processing.sandbox.poc.physics
import com.nekopiano.scala.processing._
import com.typesafe.scalalogging.LazyLogging
/**
* Created on 09/08/2016.
*/
class BouncyBubblesIn3DWSoundApp extends ThreeDimensionalCameraPApp with LazyLogging {
implicit val sp5 = this
val numBalls = 32
var balls: Set[Ball] = null
override def settings: Unit = {
size(1024, 768, P3D)
pixelDensity(displayDensity())
//smooth(8)
SuperColliderSynth.runServer()
}
override def setup(): Unit = {
surface.setResizable(true)
colorMode(HSB, 360, 100, 100, 100)
noStroke()
val localBalls = (0.until(numBalls)).map(i => {
SoundBall.apply(i)
}).toSet[Ball]
localBalls.foreach(ball => {ball.others = localBalls - ball})
balls = localBalls
logger.info("waiting")
//Thread.sleep(5000)
}
override def drawObjects(): Unit = {
background(0, 0, 0)
lights()
fill(255, 100, 100, 100)
balls.foreach(ball => {
ball.collide()
ball.move()
ball.display()
})
usingMatrix {
val gravityVector = ScalaPVector.apply(width./(2), height, 0)
val heavenVector = ScalaPVector.apply(width./(2), 0, 0)
usingStyle {
stroke(222, 111, 123, 222)
line(heavenVector, gravityVector)
textSize(36)
text("G", gravityVector)
}
}
}
}
object BouncyBubblesIn3DWSoundApp extends ScalaPAppCompanion {}
class SoundBall(vectorConveryor: ScalaPVector, override val diameter: Float, override val id: Int)(override implicit val sp53d: ThreeDimensionalPApp) extends Ball(vectorConveryor, diameter, id) with LazyLogging {
// If you use a vector in this SoundBall class, e.g.
// println(vectorConveryor)
// then after compilation the SoundBall class has it as a property.
// However if you never use it, then the SoundBall doesn't.
import sp53d._
//override var others = Set.empty[SoundBall]
override def actInCollision() {
//collisionSound(random(220, 880))
val mapped = map(vector.z, 0, width, 220, 1320)
collisionSound(mapped)
}
def collisionSound(freq:Double) {
import de.sciss.synth._
import Ops._
import ugen._
Synth.play(SuperColliderSynth.sineDecaySynth.name, Seq.apply ("freq".->(freq)) )
}
}
object SoundBall {
def apply(id: Int)(implicit sp53d: ThreeDimensionalPApp): SoundBall = {
import sp53d._
val vector = ScalaPVector.apply(random(width), random(height.*(2)), 0)
new SoundBall(vector, random(30, 70), id)
}
}
object SuperColliderSynth extends LazyLogging {
import de.sciss.synth._
import Ops._
import ugen._
val cfg = {
val conf = Server.Config.apply()
// the path to scsynth
conf.program = "/Applications/SuperCollider.app/Contents/Resources/scsynth"
conf
}
val sineDecaySynth = SynthDef.apply("sineDecay") {
val freq = "freq".kr(261.6)
val amp = "amp".kr(.2)
//val env = EnvGen.kr(Env.linen(0.01, 0.98, 0.01, 1), timeScale = 1.0, doneAction = 2)
val env = EnvGen.kr(Env.perc, doneAction = 2)
val src = SinOsc.ar(freq).madd(amp.*(env), 0)
val pan = Pan2.ar(src.*(env), 0)
Out.ar(0, pan)
}
def runServer() {
import scala.concurrent._
import ExecutionContext.Implicits.global
val serverPromise = Promise[Server]
val serverFuture: Future[Server] = serverPromise.future
serverFuture.foreach(e => println("deferred promised future value = " + e + " : ".+(sourcecode.Line.generate)))
val runningCodes = (s: Server) => {
sineDecaySynth.recv(s)
// Synth.play (sineDecaySynth.name, Seq ("freq" -> 440) )
val defaultServer = Server.default
logger.info("defaultServer=" + defaultServer + " : " + sourcecode.Line.generate)
serverPromise.success(Server.default)
logger.info("run ends." + " : " + sourcecode.Line.generate)
defaultServer
}
Server.run(cfg) { s => runningCodes.apply(s) }
import scala.concurrent.duration._
Await.result(serverFuture, 10.seconds)
logger.info("ready" + " : " + sourcecode.Line.generate)
logger.info("defaultServer=" + serverPromise + " : " + sourcecode.Line.generate)
logger.info("end" + " : " + sourcecode.Line.generate)
}
}
| lamusique/ScalaProcessing | samples/src/test/scala/com/nekopiano/scala/processing/sandbox/poc/physics/BouncyBubblesIn3DWSound.scala | Scala | apache-2.0 | 4,248 |
package squants.electro
import org.scalatest.{FlatSpec, Matchers}
import squants.QuantityParseException
import squants.space.Meters
/**
* @author Nicolas Vinuesa
* @since 1.4
*
*/
class MagneticFieldStrengthSpec extends FlatSpec with Matchers {
behavior of "MagneticFieldStrength and its Units of Measure"
it should "create values using UOM factories" in {
FaradsPerMeter(1).toFaradsMeters should be(1)
}
it should "create values from properly formatted Strings" in {
MagneticFieldStrength("10.22 A/m").get should be(AmperesPerMeter(10.22))
MagneticFieldStrength("10.22 zz").failed.get should be(QuantityParseException("Unable to parse MagneticFieldStrength", "10.22 zz"))
MagneticFieldStrength("zz A/m").failed.get should be(QuantityParseException("Unable to parse MagneticFieldStrength", "zz A/m"))
}
it should "properly convert to all supported Units of Measure" in {
val x = AmperesPerMeter(10)
x.toAmperesPerMeter should be(10.0)
}
it should "return properly formatted strings for all supported Units of Measure" in {
AmperesPerMeter(1).toString(AmperesPerMeter) should be("1.0 A/m")
}
it should "return ElectricCurrent when multiplied by Length" in {
AmperesPerMeter(1) * Meters(1) should be(Amperes(1))
}
behavior of "MagneticFieldStrengthConversions"
it should "provide aliases for single unit values" in {
import MagneticFieldStrengthConversions._
amperePerMeter should be(AmperesPerMeter(1))
}
it should "provide implicit conversion from Double" in {
import MagneticFieldStrengthConversions._
val d = 10.22
d.amperesPerMeter should be(AmperesPerMeter(d))
}
it should "provide Numeric support" in {
import MagneticFieldStrengthConversions.MagneticFieldStrengthNumeric
val rs = List(AmperesPerMeter(100), AmperesPerMeter(10))
rs.sum should be(AmperesPerMeter(110))
}
}
| underscorenico/squants | shared/src/test/scala/squants/electro/MagneticFieldStrengthSpec.scala | Scala | apache-2.0 | 1,900 |
package fpgatidbits.regfile
import Chisel._
// command bundle for read/writes to AEG/CSR registers
class RegCommand(idBits: Int, dataBits: Int) extends Bundle {
val regID = UInt(width = idBits)
val read = Bool()
val write = Bool()
val writeData = UInt(width = dataBits)
override def clone = { new RegCommand(idBits, dataBits).asInstanceOf[this.type] }
def driveDefaults() = {
regID := UInt(0)
read := Bool(false)
write := Bool(false)
writeData := UInt(0)
}
}
// register file interface
class RegFileSlaveIF(idBits: Int, dataBits: Int) extends Bundle {
// register read/write commands
// the "valid" signal here should be connected to (.read OR .write)
val cmd = Valid(new RegCommand(idBits, dataBits)).flip
// returned read data
val readData = Valid(UInt(width = dataBits))
// number of registers
val regCount = UInt(OUTPUT, width = idBits)
override def clone = { new RegFileSlaveIF(idBits, dataBits).asInstanceOf[this.type] }
}
class RegFile(numRegs: Int, idBits: Int, dataBits: Int) extends Module {
val io = new Bundle {
// external command interface
val extIF = new RegFileSlaveIF(idBits, dataBits)
// exposed values of all registers, for internal use
val regOut = Vec.fill(numRegs) { UInt(OUTPUT, width = dataBits) }
// valid pipes for writing new values for all registers, for internal use
// (extIF takes priority over this)
val regIn = Vec.fill(numRegs) { Valid(UInt(width = dataBits)).flip }
}
// drive num registers to compile-time constant
io.extIF.regCount := UInt(numRegs)
// instantiate the registers in the file
val regFile = Vec.fill(numRegs) { Reg(init = UInt(0, width = dataBits)) }
// latch the incoming commands
val regCommand = Reg(next = io.extIF.cmd.bits)
val regDoCmd = Reg(init = Bool(false), next = io.extIF.cmd.valid)
val hasExtReadCommand = (regDoCmd && regCommand.read)
val hasExtWriteCommand = (regDoCmd && regCommand.write)
// register read logic
io.extIF.readData.valid := hasExtReadCommand
// make sure regID stays within range for memory read
when (regCommand.regID < UInt(numRegs)) {
io.extIF.readData.bits := regFile(regCommand.regID)
} .otherwise {
// return 0 otherwise
io.extIF.readData.bits := UInt(0)
}
// register write logic
// to avoid multiple ports, we prioritize the extIF writes over the internal
// ones (e.g if there is an external write present, the internal write will
// be ignored if it arrives simultaneously)
when (hasExtWriteCommand) {
regFile(regCommand.regID) := regCommand.writeData
} .otherwise {
for(i <- 0 until numRegs) {
when (io.regIn(i).valid) { regFile(i) := io.regIn(i).bits }
}
}
// expose all reg outputs for personality's access
for (i <- 0 to numRegs-1) {
io.regOut(i) := regFile(i)
}
// TODO add testbench for regfile logic
}
| maltanar/fpga-tidbits | src/main/scala/fpgatidbits/regfile/RegFile.scala | Scala | bsd-2-clause | 2,908 |
package io.taig.android.widget
import android.content.Context
import android.util.AttributeSet
import android.view.MotionEvent
/**
* Advanced [[android.support.v4.view.ViewPager]] that allows to toggle swiping
*/
class ViewPager(context: Context, attributes: AttributeSet)
extends android.support.v4.view.ViewPager(context, attributes) {
private var swipe = true
def this(attributes: AttributeSet)(implicit c: Context) = this(c, null)
def this()(implicit c: Context) = this(c, null)
def setSwipeEnabled(enabled: Boolean): Unit = {
swipe = enabled
}
override def onInterceptTouchEvent(event: MotionEvent) = swipe match {
case true β super.onInterceptTouchEvent(event)
case false β false
}
override def onTouchEvent(event: MotionEvent) = swipe match {
case true β super.onTouchEvent(event)
case false β false
}
}
| Taig/Toolbelt | widget/src/main/scala/io/taig/android/widget/ViewPager.scala | Scala | mit | 872 |
package fpinscala.parallelism
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.{Callable, ExecutorService, Executors, ThreadFactory, TimeUnit, TimeoutException, Future => JFuture}
import scala.concurrent.{Future => SFuture}
import scala.concurrent.ExecutionContext.Implicits.global
import fpinscala.parallelism.Par.Par
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import org.scalatest.prop.PropertyChecks
/**
* Created by benen on 03/08/17.
*/
class ParSpec extends FlatSpec with PropertyChecks with Matchers with BeforeAndAfter with ScalaFutures with Eventually {
val asyncThreadCount = new AtomicInteger
val threadFactory: ThreadFactory =
(r: Runnable) => {
asyncThreadCount.incrementAndGet
Executors.defaultThreadFactory.newThread(r)
}
var executorService: ExecutorService = _
before {
asyncThreadCount.set(0)
// note that sequence() will not work if we do not provide enough parallel threads! (see exercise 7.9)
executorService = Executors.newCachedThreadPool(threadFactory)
}
behavior of "7.3 map2WithTimeouts"
it should "apply f correctly" in {
// given
val pa = Par.delay { TimeUnit.MILLISECONDS.sleep(10); Par.unit(1) }
val pb = Par.delay { TimeUnit.MILLISECONDS.sleep(10); Par.unit(2) }
// when
val result = Par.map2WithTimeouts(pa, pb)(_ + _)(executorService).get(25L, TimeUnit.MILLISECONDS)
// then
result shouldEqual 3
}
it should "respect timeouts" in intercept[TimeoutException] {
// given
val pa: Par[Int] = asPar { TimeUnit.MILLISECONDS.sleep(3); 1 }
val pb: Par[Int] = asPar { TimeUnit.MILLISECONDS.sleep(3); 1 }
// when
Par.map2WithTimeouts(pa, pb)(_ + _)(executorService).get(5L, TimeUnit.MILLISECONDS)
// then BOOM!
}
behavior of "7.4 asyncF"
it should "apply f in a separate thread" in {
// given
val op = Par.asyncF((i: Int) => i.toString)(42)
// when
val running = op.run
// then
whenReady(running.asScala){ result =>
result shouldEqual "42"
assertAsync
}
}
behavior of "7.5 sequence"
it should "wrap the results in a single par" in {
// given
val list = List(asPar(1), asPar(2), asPar(3))
// when
val result = Par.sequence(list).run
// then
whenReady(result.asScala){ inner => inner shouldEqual List(1, 2, 3) }
}
behavior of "7.6 filter"
it should "filter the list in parallel" in {
forAll(Gen.listOf(Arbitrary.arbitrary[Int])){ list =>
def isEven: Int => Boolean = _ % 2 == 0
list.filter(isEven) shouldEqual Par.parFilter(list)(isEven).get
assertAsync
}
}
behavior of "7.11.1 choiceN"
it should "work asynchronously for non-empty Lists" in {
val intPars = List(Par.lazyUnit(1), Par.lazyUnit(2), Par.lazyUnit(3))
val n = Par.lazyUnit(1)
val parInt = Par.choiceN(n)(intPars).run
eventually {
assert(parInt.get == 2)
assertAsync
}
}
behavior of "7.11.2 choiceViaChoiceN"
it should "work asynchronously for true case" in {
val trueChoice = Par.choiceViaChoiceN(Par.lazyUnit(true))(Par.lazyUnit("yes"), Par.lazyUnit("no")).run
eventually {
assert(trueChoice.get == "yes")
assertAsync
}
}
it should "work asynchronously for false case" in {
val falseChoice = Par.choiceViaChoiceN(Par.lazyUnit(false))(Par.lazyUnit("yes"), Par.lazyUnit("no")).run
eventually {
assert(falseChoice.get == "no")
assertAsync
}
}
behavior of "7.12 choiceMap"
it should "work asynchronously" in {
val choices = Map(1 -> Par.lazyUnit(1), 2 -> Par.lazyUnit(2), 3 -> Par.lazyUnit(3))
val key = Par.lazyUnit(2)
val parInt = Par.choiceMap(key)(choices).run
eventually {
assert(parInt.get == 2)
assertAsync
}
}
behavior of "7.13.1 chooser"
it should "work asynchronously for List" in {
val intPars = List(Par.lazyUnit(1), Par.lazyUnit(2), Par.lazyUnit(3))
val n = Par.lazyUnit(1)
val parInt = Par.chooser(n)(intPars).run
eventually {
assert(parInt.get == 2)
assertAsync
}
}
it should "work asynchronously for Map" in {
val choices = Map(1 -> Par.lazyUnit(1), 2 -> Par.lazyUnit(2), 3 -> Par.lazyUnit(3))
val key = Par.lazyUnit(2)
val parInt = Par.chooser(key)(choices).run
eventually {
assert(parInt.get == 2)
assertAsync
}
}
behavior of "7.13.2 choiceViaChooser"
it should "work asynchronously for true case" in {
val trueChoice = Par.choiceViaChooser(Par.lazyUnit(true))(Par.lazyUnit("yes"), Par.lazyUnit("no")).run
eventually {
assert(trueChoice.get == "yes")
assertAsync
}
}
it should "work asynchronously for false case" in {
val falseChoice = Par.choiceViaChooser(Par.lazyUnit(false))(Par.lazyUnit("yes"), Par.lazyUnit("no")).run
eventually {
assert(falseChoice.get == "no")
assertAsync
}
}
behavior of "7.13.3 choiceNViaChooser"
it should "work asynchronously for List" in {
val intPars = List(Par.lazyUnit(1), Par.lazyUnit(2), Par.lazyUnit(3))
val n = Par.lazyUnit(1)
val parInt = Par.choiceNViaChooser(n)(intPars).run
eventually {
assert(parInt.get == 2)
assertAsync
}
}
behavior of "7.14.1 join"
it should "work asynchronously" in {
val ppi = Par.lazyUnit(Par.lazyUnit(42))
val i = Par.join(ppi)
eventually {
assert(i.get == 42)
assertAsync
}
}
behavior of "7.14.2 flatMapViaJoin"
it should "work asynchronously" in {
val pi = Par.lazyUnit(42)
val i = Par.flatMapViaJoin(pi)(Par.lazyUnit(_))
eventually {
assert(i.get == 42)
assertAsync
}
}
behavior of "7.14.3 joinViaFlatMap"
it should "work asynchronously" in {
val ppi = Par.lazyUnit(Par.lazyUnit(42))
val i = Par.joinViaFlatMap(ppi)
eventually {
assert(i.get == 42)
assertAsync
}
}
private def assertAsync = assert(asyncThreadCount.get > 0, "execution must be async")
private def assertSync = assert(asyncThreadCount.get == 0, "execution must be sync")
private def asPar[A](a: => A): Par[A] = { (es) =>
val exe = new Callable[A] { def call(): A = a }
es.submit(exe)
}
implicit class TestParOps[A](p: Par[A]) {
def run: JFuture[A] = Par.run(executorService)(p)
def get: A = Par.run(executorService)(p).get
}
implicit class ToScalaFuture[A](f: JFuture[A]) {
def asScala: SFuture[A] = SFuture { f.get }
}
}
| benen/fpinscala | exercises/src/test/scala/fpinscala/parallelism/ParSpec.scala | Scala | mit | 6,604 |
package controllers
import contexts.{CreateInventoryContext, DeleteInventoryContext}
import jsons.InventoryJson
import models.Inventory
import play.api.data.Forms._
import play.api.data._
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import utils.exceptions.InvalidRequest
object InventoriesController extends KiwiERPController {
def list = AuthorizedAction.async { implicit req =>
req.getQueryString("partsId") filter isId map { partsIdStr =>
val partsId = partsIdStr.toLong
Page(Inventory.findAllByPartsId(partsId)) map { results =>
val (inventories, page) = results
Ok(InventoryJson.index(inventories, page))
}
} getOrElse (throw new InvalidRequest)
}
def create = AuthorizedAction.async(parse.urlFormEncoded) { implicit req =>
case class CreateForm(partsId: Long, description: Option[String], quantity: Int)
val form = Form(
mapping(
"partsId" -> longNumber(min = 0, max = MAX_LONG_NUMBER),
"description" -> optional(text(maxLength = 500)),
"quantity" -> number(min = 1, max = MAX_NUMBER)
)(CreateForm.apply)(CreateForm.unapply))
form.bindFromRequestAndCheckErrors { f =>
CreateInventoryContext(f.partsId, f.description, f.quantity) map { inventory =>
CreatedWithLocation(InventoryJson.create(inventory))
}
}
}
def read(id: Long) = AuthorizedAction.async {
Inventory.find(id) map { inventory =>
Ok(InventoryJson.read(inventory))
}
}
def update(id: Long) = AuthorizedAction.async(parse.urlFormEncoded) { implicit req =>
case class UpdateForm(description: Option[String], quantity: Int)
val form = Form(
mapping(
"description" -> optional(text(maxLength = 500)),
"quantity" -> number(min = 1, max = MAX_NUMBER)
)(UpdateForm.apply)(UpdateForm.unapply))
form.bindFromRequestAndCheckErrors { f =>
Inventory.save(id)(f.description, f.quantity) map (_ => NoContent)
}
}
def delete(id: Long) = AuthorizedAction.async {
DeleteInventoryContext(id) map (_ => NoContent)
}
}
| KIWIKIGMBH/kiwierp | kiwierp-backend/app/controllers/InventoriesController.scala | Scala | mpl-2.0 | 2,104 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package base
package types
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.types._
/**
* @author ilyas, Alexander Podkhalyuzin
*/
class ScFunctionalTypeElementImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScFunctionalTypeElement {
override protected def acceptScala(visitor: ScalaElementVisitor): Unit = {
visitor.visitFunctionalTypeElement(this)
}
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScFunctionalTypeElementImpl.scala | Scala | apache-2.0 | 556 |
package almhirt.akkax
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import almhirt.common._
import akka.event.LoggingAdapter
import akka.actor._
import akka.pattern._
import almhirt.context.HasAlmhirtContext
object FusedActor {
def wrap(fusedActor: ActorRef)(timeout: FiniteDuration)(implicit executor: ExecutionContext): CircuitControl =
new CircuitControl {
import almhirt.almfuture.all._
def attemptClose() { fusedActor ! InternalFusedActorMessage.AttemptClose }
def removeFuse() { fusedActor ! InternalFusedActorMessage.RemoveFuse }
def destroy() { fusedActor ! InternalFusedActorMessage.Destroy }
def circumvent() { fusedActor ! InternalFusedActorMessage.Circumvent }
def state: AlmFuture[CircuitState] =
(fusedActor ? InternalFusedActorMessage.ReportState)(timeout).mapCastTo[CircuitState]
def onOpened(listener: () β Unit): CircuitControl = {
fusedActor ! InternalFusedActorMessage.OnOpened(listener)
this
}
def onHalfOpened(listener: () β Unit): CircuitControl = {
fusedActor ! InternalFusedActorMessage.OnHalfOpened(listener)
this
}
def onClosed(listener: () β Unit): CircuitControl = {
fusedActor ! InternalFusedActorMessage.OnClosed(listener)
this
}
def onFuseRemoved(listener: () β Unit): CircuitControl = {
fusedActor ! InternalFusedActorMessage.OnFuseRemoved(listener)
this
}
def onDestroyed(listener: () β Unit): CircuitControl = {
fusedActor ! InternalFusedActorMessage.OnDestroyed(listener)
this
}
def onCircumvented(listener: () β Unit): CircuitControl = {
fusedActor ! InternalFusedActorMessage.OnCircumvented(listener)
this
}
def onWarning(listener: (Int, Int) β Unit): CircuitControl = {
fusedActor ! InternalFusedActorMessage.OnWarning(listener)
this
}
}
}
private[almhirt] object InternalFusedActorMessage {
case object ReportState
case object AttemptClose
case object RemoveFuse
case object Destroy
case object Circumvent
final case class OnOpened(listener: () β Unit)
final case class OnHalfOpened(listener: () β Unit)
final case class OnClosed(listener: () β Unit)
final case class OnFuseRemoved(listener: () β Unit)
final case class OnDestroyed(listener: () β Unit)
final case class OnCircumvented(listener: () β Unit)
final case class OnWarning(listener: (Int, Int) β Unit)
}
trait SyncFusedActor { me: AlmActor β
import java.util.concurrent.CopyOnWriteArrayList
import AlmCircuitBreaker._
def circuitControlSettings: CircuitControlSettings
def circuitControlCallbackExecutorSelector: ExtendedExecutionContextSelector
def circuitControlLoggingAdapter: Option[LoggingAdapter] = None
def circuitStateReportingInterval: Option[FiniteDuration]
def sendStateChangedEvents: Boolean
private val CircuitControlSettings(maxFailures, failuresWarnThreshold, callTimeout, resetTimeout, startState) = circuitControlSettings
private val callbackExecutor = circuitControlCallbackExecutorSelector.select(this.almhirtContext, this.context)
private[this] var currentState: InternalState = {
val state =
startState match {
case CircuitStartState.Closed β InternalClosed
case CircuitStartState.HalfOpen β InternalHalfOpen
case CircuitStartState.Open β InternalOpen
case CircuitStartState.FuseRemoved β InternalFuseRemoved
case CircuitStartState.Destroyed β InternalDestroyed
case CircuitStartState.Circumvented β InternalCircumvented
}
state.enter()
state
}
def fused[T](body: β AlmValidation[T]): AlmValidation[T] =
fusedWithSurrogate(scalaz.Failure(CircuitOpenProblem("The circuit is open.")))(body)
def fusedWithSurrogate[T](surrogate: β AlmValidation[T])(body: β AlmValidation[T]): AlmValidation[T] =
currentState.invoke(surrogate, body)
def registerCircuitControl(): Unit =
registerCircuitControl(FusedActor.wrap(self)(10.seconds)(almhirtContext.futuresContext))
def state: CircuitState = currentState.publicState
private object ReportState
private def moveTo(newState: InternalState) {
currentState = newState
newState.enter()
}
private def attemptTransition(oldState: InternalState, newState: InternalState): Boolean = {
if (currentState == oldState) {
moveTo(newState)
true
} else {
circuitControlLoggingAdapter.foreach(log β
log.warning(s"""Attempted transition from $oldState to $newState failed. Current state was $currentState."""))
false
}
}
private case class AttemptTransition(origin: InternalState, target: InternalState)
private val internalReceive: Receive = {
case AttemptTransition(origin, target) β
attemptTransition(origin, origin)
case InternalFusedActorMessage.ReportState β
sender() ! currentState.publicState
case InternalFusedActorMessage.AttemptClose β
val res = currentState.attemptManualClose
circuitControlLoggingAdapter.foreach(log β
if (res) log.info("Manual reset attempt succeeded")
else log.warning(s"""Manual reset attempt failed. Current state is ${currentState.publicState}"""))
case InternalFusedActorMessage.RemoveFuse β
val res = currentState.attemptManualRemoveFuse
circuitControlLoggingAdapter.foreach(log β
if (res) log.warning("Manual remove fuse attempt succeeded")
else log.warning(s"""Manual remove fuse attempt failed. Current state is ${currentState.publicState}"""))
case InternalFusedActorMessage.Destroy β
val res = currentState.attemptManualDestroyFuse
circuitControlLoggingAdapter.foreach(log β
if (res) log.warning("Manual destroy attempt succeeded")
else log.warning(s"""Manual destroy failed. Current state is ${currentState.publicState}"""))
case InternalFusedActorMessage.Circumvent β
val res = currentState.manualCircumvent
circuitControlLoggingAdapter.foreach(log β
if (res) log.warning("Manual circumverate attempt succeeded")
else log.warning(s"""Manual circumverate failed. Current state is ${currentState.publicState}"""))
case InternalFusedActorMessage.OnOpened(listener) β
InternalOpen addListener new Runnable { def run() { listener() } }
case InternalFusedActorMessage.OnHalfOpened(listener) β
InternalHalfOpen addListener new Runnable { def run() { listener() } }
case InternalFusedActorMessage.OnClosed(listener) β
InternalClosed addListener new Runnable { def run() { listener() } }
case InternalFusedActorMessage.OnFuseRemoved(listener) β
InternalFuseRemoved addListener new Runnable { def run() { listener() } }
case InternalFusedActorMessage.OnDestroyed(listener) β
InternalDestroyed addListener new Runnable { def run() { listener() } }
case InternalFusedActorMessage.OnCircumvented(listener) β
InternalCircumvented addListener new Runnable { def run() { listener() } }
case InternalFusedActorMessage.OnWarning(listener) β
InternalClosed addWarningListener (currentFailures β new Runnable { def run() { listener(currentFailures, maxFailures) } })
case ReportState β
circuitControlLoggingAdapter.flatMap(log β circuitStateReportingInterval.map((log, _))).foreach {
case (log, interval) β
log.info(s"Current circuit state: ${currentState.publicState}")
currentState match {
case InternalOpen β
context.system.scheduler.scheduleOnce(interval, self, ReportState)(callbackExecutor)
case InternalHalfOpen β
context.system.scheduler.scheduleOnce(interval, self, ReportState)(callbackExecutor)
case _ β
()
}
}
}
protected implicit class ContextOps(self: ActorContext) {
def becomeFused(handler: Receive, discardOld: Boolean) {
self.become(internalReceive orElse handler, discardOld)
}
def becomeFused(handler: Receive) {
becomeFused(handler, true)
}
}
private sealed trait InternalState {
private val transitionListeners = new CopyOnWriteArrayList[Runnable]
def addListener(listener: Runnable): Unit = transitionListeners add listener
def publicState: CircuitState
def invoke[T](surrogate: β AlmValidation[T], body: β AlmValidation[T]): AlmValidation[T]
def callThrough[T](body: β AlmValidation[T]): AlmValidation[T] = {
val deadline = callTimeout.fromNow
val bodyValidation = try body catch { case scala.util.control.NonFatal(exn) β scalaz.Failure(ExceptionCaughtProblem(exn)) }
bodyValidation.fold(
fail β callFails(),
succ β
if (!deadline.isOverdue)
callSucceeds()
else callFails())
bodyValidation
}
def callSucceeds(): Unit
def callFails(): Unit
def enter(): Unit = {
_enter()
notifyTransitionListeners()
sendStateChanged()
self ! ReportState
}
private def sendStateChanged() {
if (sendStateChangedEvents)
this match {
case InternalClosed β self ! ActorMessages.CircuitClosed
case InternalHalfOpen β self ! ActorMessages.CircuitHalfOpened
case InternalOpen β self ! ActorMessages.CircuitOpened
case InternalFuseRemoved β self ! ActorMessages.CircuitFuseRemoved
case InternalDestroyed β self ! ActorMessages.CircuitDestroyed
case InternalCircumvented β self ! ActorMessages.CircuitCircumvented
}
}
protected def _enter(): Unit
protected def notifyTransitionListeners() {
if (!transitionListeners.isEmpty()) {
val iterator = transitionListeners.iterator
while (iterator.hasNext) {
val listener = iterator.next
callbackExecutor.execute(listener)
}
}
}
def attemptManualClose(): Boolean = false
def attemptManualDestroyFuse(): Boolean
def attemptManualRemoveFuse(): Boolean
def manualCircumvent(): Boolean
}
private object InternalClosed extends InternalState {
private val warningListeners = new CopyOnWriteArrayList[Int β Runnable]
private var failureCount = 0
def addWarningListener(listener: Int β Runnable): Unit = warningListeners add listener
override def publicState = CircuitState.Closed(failureCount, maxFailures, failuresWarnThreshold)
def invoke[T](surrogate: β AlmValidation[T], body: β AlmValidation[T]): AlmValidation[T] =
callThrough(body)
override def callSucceeds() {
failureCount = 0
}
override def callFails() {
failureCount += 1
if (failureCount == maxFailures)
moveTo(InternalOpen)
failuresWarnThreshold.foreach(wt β
if (failureCount == wt)
notifyWarningListeners(failureCount))
}
protected def notifyWarningListeners(failures: Int) {
if (!warningListeners.isEmpty()) {
val iterator = warningListeners.iterator
while (iterator.hasNext) {
val listener = iterator.next()(failures)
callbackExecutor.execute(listener)
}
}
}
override def attemptManualDestroyFuse(): Boolean = attemptTransition(InternalClosed, InternalDestroyed)
override def attemptManualRemoveFuse(): Boolean = attemptTransition(InternalClosed, InternalFuseRemoved)
override def manualCircumvent(): Boolean = attemptTransition(InternalClosed, InternalCircumvented)
override def _enter() {
failureCount = 0
}
}
private object InternalHalfOpen extends InternalState {
private var recovering = false
override def publicState = CircuitState.HalfOpen(recovering)
def invoke[T](surrogate: β AlmValidation[T], body: β AlmValidation[T]): AlmValidation[T] =
if (!recovering) {
recovering = true
callThrough(body)
} else {
surrogate
}
override def callSucceeds() {
moveTo(InternalClosed)
}
override def callFails() {
moveTo(InternalOpen)
}
override def _enter() {
recovering = false
}
override def attemptManualDestroyFuse(): Boolean = attemptTransition(InternalHalfOpen, InternalDestroyed)
override def attemptManualRemoveFuse(): Boolean = attemptTransition(InternalHalfOpen, InternalFuseRemoved)
override def manualCircumvent(): Boolean = attemptTransition(InternalHalfOpen, InternalCircumvented)
}
private object InternalOpen extends InternalState {
private var entered: Long = 0L
private val myResetTimeout: FiniteDuration = resetTimeout getOrElse (Duration.Zero)
override def publicState = CircuitState.Open(remainingDuration())
def invoke[T](surrogate: β AlmValidation[T], body: β AlmValidation[T]): AlmValidation[T] =
surrogate
override def callSucceeds() {
}
private def remainingDuration(): FiniteDuration = {
val elapsedNanos = System.nanoTime() - entered
if (elapsedNanos <= 0L) Duration.Zero
else myResetTimeout - elapsedNanos.nanos
}
override def callFails() {
}
override def _enter() {
entered = System.nanoTime()
resetTimeout.foreach { rt β
context.system.scheduler.scheduleOnce(rt) {
self ! AttemptTransition(InternalOpen, InternalHalfOpen)
}(callbackExecutor)
}
}
override def attemptManualClose(): Boolean = attemptTransition(InternalOpen, InternalHalfOpen)
override def attemptManualDestroyFuse(): Boolean = attemptTransition(InternalOpen, InternalDestroyed)
override def attemptManualRemoveFuse(): Boolean = attemptTransition(InternalOpen, InternalFuseRemoved)
override def manualCircumvent(): Boolean = attemptTransition(InternalOpen, InternalCircumvented)
}
/**
* Valid transitions:
* FuseRemoved β HalfOpen
* FuseRemoved β Destroyed
*/
private object InternalFuseRemoved extends InternalState {
private var enteredNanos = 0L
override def publicState = CircuitState.FuseRemoved(forDuration)
def invoke[T](surrogate: β AlmValidation[T], body: β AlmValidation[T]): AlmValidation[T] =
surrogate
override def callSucceeds() {
}
override def callFails() {
}
private def forDuration(): FiniteDuration = {
val elapsedNanos = System.nanoTime() - enteredNanos
if (elapsedNanos <= 0L) Duration.Zero
else elapsedNanos.nanos
}
override def _enter() {
enteredNanos = System.nanoTime()
}
override def attemptManualClose(): Boolean = attemptTransition(InternalFuseRemoved, InternalHalfOpen)
override def attemptManualDestroyFuse(): Boolean = attemptTransition(InternalFuseRemoved, InternalDestroyed)
override def attemptManualRemoveFuse(): Boolean = false
override def manualCircumvent(): Boolean = attemptTransition(InternalFuseRemoved, InternalCircumvented)
}
/** No transitions */
private object InternalDestroyed extends InternalState {
private var enteredNanos = 0L
override def publicState = CircuitState.Destroyed(forDuration)
def invoke[T](surrogate: β AlmValidation[T], body: β AlmValidation[T]): AlmValidation[T] =
surrogate
override def callSucceeds() {
}
override def callFails() {
}
private def forDuration(): FiniteDuration = {
val elapsedNanos = System.nanoTime() - enteredNanos
if (elapsedNanos <= 0L) Duration.Zero
else elapsedNanos.nanos
}
override def _enter() {
enteredNanos = System.nanoTime()
}
override def attemptManualClose(): Boolean = false
override def attemptManualDestroyFuse(): Boolean = false
override def attemptManualRemoveFuse(): Boolean = false
override def manualCircumvent(): Boolean = false
}
/** No transitions */
private object InternalCircumvented extends InternalState {
private var enteredNanos = 0L
override def publicState = CircuitState.Circumvented(forDuration)
def invoke[T](surrogate: β AlmValidation[T], body: β AlmValidation[T]): AlmValidation[T] =
body
override def callSucceeds() {
}
override def callFails() {
}
private def forDuration(): FiniteDuration = {
val elapsedNanos = System.nanoTime() - enteredNanos
if (elapsedNanos <= 0L) Duration.Zero
else elapsedNanos.nanos
}
override def _enter() {
enteredNanos = System.nanoTime()
}
override def attemptManualClose(): Boolean = attemptTransition(InternalCircumvented, InternalHalfOpen)
override def attemptManualDestroyFuse(): Boolean = attemptTransition(InternalCircumvented, InternalDestroyed)
override def attemptManualRemoveFuse(): Boolean = attemptTransition(InternalCircumvented, InternalFuseRemoved)
override def manualCircumvent(): Boolean = false
}
} | chridou/almhirt | almhirt-core/src/main/scala/almhirt/akkax/FusedActor.scala | Scala | apache-2.0 | 17,002 |
package com.github.mdr.mash.ns.os.pathClass
import java.nio.file.Files
import java.nio.file.attribute.PosixFilePermission
import com.github.mdr.mash.functions._
import com.github.mdr.mash.ns.core.NoArgFunction
import com.github.mdr.mash.ns.core.NoArgFunction.NoArgValue
import com.github.mdr.mash.ns.os.PermissionsClass
import com.github.mdr.mash.os.linux.LinuxFileSystem
import com.github.mdr.mash.runtime.{ MashObject, MashValue }
import scala.collection.JavaConverters._
object SetExecutableMethod extends MashMethod("setExecutable") {
private val fileSystem = LinuxFileSystem
object Params {
val All: Parameter = Parameter(
nameOpt = Some("all"),
summaryOpt = Some("Set the bit for all (owner, group and others) (default false)"),
shortFlagOpt = Some('a'),
isFlag = true,
defaultValueGeneratorOpt = Some(NoArgValue),
isBooleanFlag = true)
val Owner: Parameter = Parameter(
nameOpt = Some("owner"),
summaryOpt = Some("Set the bit for the owner (default false)"),
shortFlagOpt = Some('u'),
isFlag = true,
defaultValueGeneratorOpt = Some(NoArgValue),
isBooleanFlag = true)
val Group: Parameter = Parameter(
nameOpt = Some("group"),
summaryOpt = Some("Set the bit for the group (default false)"),
shortFlagOpt = Some('g'),
isFlag = true,
defaultValueGeneratorOpt = Some(NoArgValue),
isBooleanFlag = true)
val Others: Parameter = Parameter(
nameOpt = Some("others"),
summaryOpt = Some("Set the bit for others (default false)"),
shortFlagOpt = Some('o'),
isFlag = true,
defaultValueGeneratorOpt = Some(NoArgValue),
isBooleanFlag = true)
val Value: Parameter = Parameter(
nameOpt = Some("value"),
summaryOpt = Some("The value for the bit (default true)"),
defaultValueGeneratorOpt = Some(true))
}
import Params._
val params = ParameterModel(Owner, Group, Others, All, Value)
def call(target: MashValue, boundParams: BoundParams): MashObject = {
val path = FunctionHelpers.interpretAsPath(target)
val value = boundParams(Value).isTruthy
val parties = getParties(boundParams)
val existingPermissions = Files.getPosixFilePermissions(path).asScala.toSet
val newPermissionSet = calculateNewPermissionSet(value, parties, existingPermissions)
Files.setPosixFilePermissions(path, newPermissionSet.asJava)
val summary = fileSystem.getPathSummary(path)
PermissionsClass.asMashObject(summary.permissions)
}
def getParties(boundParams: BoundParams): Parties = {
val allOpt = NoArgFunction.option(boundParams(All)).map(_.isTruthy)
val ownerOpt = NoArgFunction.option(boundParams(Owner)).map(_.isTruthy)
val groupOpt = NoArgFunction.option(boundParams(Group)).map(_.isTruthy)
val othersOpt = NoArgFunction.option(boundParams(Others)).map(_.isTruthy)
val all = allOpt getOrElse false
val owner = all || (ownerOpt getOrElse false) || Seq(allOpt, ownerOpt, groupOpt, othersOpt).flatten.isEmpty
val group = all || (groupOpt getOrElse false)
val others = all || (othersOpt getOrElse false)
Parties(owner, group, others)
}
private def calculateNewPermissionSet(value: Boolean,
parties: Parties,
existingPermissionSet: Set[PosixFilePermission]): Set[PosixFilePermission] =
if (value)
existingPermissionSet ++ parties.executePermissionSet
else
existingPermissionSet diff parties.executePermissionSet
override def typeInferenceStrategy = PermissionsClass
override def summaryOpt = Some("Set the execute/search bit on the given path")
override def descriptionOpt = Some(
s"""If none of the --$Owner, --$Group, --$Others or --$All flags are specified, then the owner bit is changed.
|
|Returns the updated permissions for the path.
|
|Examples:
|<mash>
| path.setExecutable # Set permission for the owner to true
| path.setExecutable false # Set permission for the owner to false
| path.setExecutable --group --others # Set permission for group and others to true
| path.setExecutable -og # Set permission for group and others to true
| path.setExecutable --all false # Set permission for everyone to false
|</mash>""".stripMargin)
}
| mdr/mash | src/main/scala/com/github/mdr/mash/ns/os/pathClass/SetExecutableMethod.scala | Scala | mit | 4,443 |
package org.openmole.site
/*
* Copyright (C) 2015 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
object Config {
val baseURL = "http://www.openmole.org"
lazy val closedIssues = Map(
"12" -> "https://github.com/openmole/openmole/milestone/10",
"10" -> "https://github.com/openmole/openmole/milestone/8",
"9" -> "https://github.com/openmole/openmole/milestone/7?closed=1",
"8" -> "https://github.com/openmole/openmole/milestone/4?closed=1",
"7" β "https://github.com/openmole/openmole/milestone/3?closed=1",
"6.2" β "",
"6.1" β "https://github.com/openmole/openmole/milestone/6?closed=1",
"6.0" β "https://github.com/openmole/openmole/milestone/2?closed=1"
)
}
| openmole/openmole | openmole/bin/org.openmole.site/jvm/src/main/scala/org/openmole/site/Config.scala | Scala | agpl-3.0 | 1,352 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.sql.execution.vectorized.{ColumnarBatch, ColumnVector}
import org.apache.spark.sql.types.DataType
/**
* Helper trait for abstracting scan functionality using
* [[org.apache.spark.sql.execution.vectorized.ColumnarBatch]]es.
*/
private[sql] trait ColumnarBatchScan extends CodegenSupport {
val inMemoryTableScan: InMemoryTableScanExec = null
def vectorTypes: Option[Seq[String]] = None
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"scanTime" -> SQLMetrics.createTimingMetric(sparkContext, "scan time"))
/**
* Generate [[ColumnVector]] expressions for our parent to consume as rows.
* This is called once per [[ColumnarBatch]].
*/
private def genCodeColumnVector(
ctx: CodegenContext,
columnVar: String,
ordinal: String,
dataType: DataType,
nullable: Boolean): ExprCode = {
val javaType = ctx.javaType(dataType)
val value = ctx.getValue(columnVar, dataType, ordinal)
val isNullVar = if (nullable) { ctx.freshName("isNull") } else { "false" }
val valueVar = ctx.freshName("value")
val str = s"columnVector[$columnVar, $ordinal, ${dataType.simpleString}]"
val code = s"${ctx.registerComment(str)}\n" + (if (nullable) {
s"""
boolean $isNullVar = $columnVar.isNullAt($ordinal);
$javaType $valueVar = $isNullVar ? ${ctx.defaultValue(dataType)} : ($value);
"""
} else {
s"$javaType $valueVar = $value;"
}).trim
ExprCode(code, isNullVar, valueVar)
}
/**
* Produce code to process the input iterator as [[ColumnarBatch]]es.
* This produces an [[UnsafeRow]] for each row in each batch.
*/
// TODO: return ColumnarBatch.Rows instead
override protected def doProduce(ctx: CodegenContext): String = {
val input = ctx.freshName("input")
// PhysicalRDD always just has one input
ctx.addMutableState("scala.collection.Iterator", input, s"$input = inputs[0];")
// metrics
val numOutputRows = metricTerm(ctx, "numOutputRows")
val scanTimeMetric = metricTerm(ctx, "scanTime")
val scanTimeTotalNs = ctx.freshName("scanTime")
ctx.addMutableState("long", scanTimeTotalNs, s"$scanTimeTotalNs = 0;")
val columnarBatchClz = classOf[ColumnarBatch].getName
val batch = ctx.freshName("batch")
ctx.addMutableState(columnarBatchClz, batch, s"$batch = null;")
val idx = ctx.freshName("batchIdx")
ctx.addMutableState("int", idx, s"$idx = 0;")
val colVars = output.indices.map(i => ctx.freshName("colInstance" + i))
val columnVectorClzs = vectorTypes.getOrElse(
Seq.fill(colVars.size)(classOf[ColumnVector].getName))
val columnAssigns = colVars.zip(columnVectorClzs).zipWithIndex.map {
case ((name, columnVectorClz), i) =>
ctx.addMutableState(columnVectorClz, name, s"$name = null;")
s"$name = ($columnVectorClz) $batch.column($i);"
}
val nextBatch = ctx.freshName("nextBatch")
val nextBatchFuncName = ctx.addNewFunction(nextBatch,
s"""
|private void $nextBatch() throws java.io.IOException {
| long getBatchStart = System.nanoTime();
| if ($input.hasNext()) {
| $batch = ($columnarBatchClz)$input.next();
| $numOutputRows.add($batch.numRows());
| $idx = 0;
| ${columnAssigns.mkString("", "\n", "\n")}
| }
| $scanTimeTotalNs += System.nanoTime() - getBatchStart;
|}""".stripMargin)
ctx.currentVars = null
val rowidx = ctx.freshName("rowIdx")
val columnsBatchInput = (output zip colVars).map { case (attr, colVar) =>
genCodeColumnVector(ctx, colVar, rowidx, attr.dataType, attr.nullable)
}
val localIdx = ctx.freshName("localIdx")
val localEnd = ctx.freshName("localEnd")
val numRows = ctx.freshName("numRows")
val shouldStop = if (isShouldStopRequired) {
s"if (shouldStop()) { $idx = $rowidx + 1; return; }"
} else {
"// shouldStop check is eliminated"
}
s"""
|if ($batch == null) {
| $nextBatchFuncName();
|}
|while ($batch != null) {
| int $numRows = $batch.numRows();
| int $localEnd = $numRows - $idx;
| for (int $localIdx = 0; $localIdx < $localEnd; $localIdx++) {
| int $rowidx = $idx + $localIdx;
| ${consume(ctx, columnsBatchInput).trim}
| $shouldStop
| }
| $idx = $numRows;
| $batch = null;
| $nextBatchFuncName();
|}
|$scanTimeMetric.add($scanTimeTotalNs / (1000 * 1000));
|$scanTimeTotalNs = 0;
""".stripMargin
}
}
| minixalpha/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/ColumnarBatchScan.scala | Scala | apache-2.0 | 5,924 |
package org.jetbrains.plugins.scala.debugger.evaluateExpression
import org.jetbrains.plugins.scala.debugger.ScalaDebuggerTestCase
/**
* User: Alefas
* Date: 15.10.11
*/
class ScalaLocalMethodEvaluationTest extends ScalaDebuggerTestCase {
def testSimple() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| def foo: Int = 1
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 3)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testLocalWithParameters() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val y = "test"
| def foo(x: Int): Int = x + y.length
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 4)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo(3)", "7")
}
}
def testSimpleLocalWithParams() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val x = 2
| def foo: Int = x - 1
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 4)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testSimpleLocalWithDiffParams1() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val x = 2
| val y = "c"
| def foo: Int = x - y.length()
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testSimpleLocalWithDiffParams2() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val y = "c"
| val x = 2
| def foo: Int = x - y.length()
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testSimpleLocalWithDiffParams3() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val y = "c"
| val x = 2
| def foo: Int = - y.length() + x
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testLocalWithLocalObject() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| object y {val y = 1}
| val x = 2
| def foo: Int = x - y.y
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testLocalWithField() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val g = 1
| def moo(x: Int) = g + x
| val zz = (y: Int) => {
| val uu = (x: Int) => {
| g
| "stop here"
| }
| uu(1)
| }
| zz(2)
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 7)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("moo(x)", "2")
}
}
def testLocalFromAnonymous() {
addFileToProject("Sample.scala",
"""
|object Sample {
| val y = 1
| def main(args: Array[String]) {
| val x = 2
| def foo: Int = x - y
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testClojure() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| def outer() {
| val s = "start"
| def inner(a: String, b: String): String = {
| "stop here"
| s + a + b
| }
| inner("aa", "bb")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 6)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "bb")
evalEquals("s", "start")
evalEquals("inner(\\"qq\\", \\"ww\\")", "startqqww")
}
}
def testLocalWithDefaultAndNamedParams() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| def outer() {
| def inner(a: String, b: String = "default", c: String = "other"): String = {
| "stop here"
| a + b + c
| }
| inner("aa")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "default")
evalEquals("c", "other")
evalEquals("inner(\\"aa\\", \\"bb\\")", "aabbother")
evalEquals("inner(\\"aa\\")", "aadefaultother")
evalEquals("inner(\\"aa\\", c = \\"cc\\")", "aadefaultcc")
}
}
// def testLocalMethodsWithSameName() {
// addFileToProject("Sample.scala",
// """
// |object Sample {
// | def main(args: Array[String]) {
// | def foo(i: Int = 1) = {
// | def foo(j: Int = 2) = j
// | i
// | }
// | "stop"
// | def other() {
// | def foo(i: Int = 3) = i
// | "stop"
// | }
// | def third() {
// | def foo(i: Int = 4) = i
// | "stop"
// | }
// | foo()
// | other()
// | third()
// | }
// |}
// """.stripMargin.trim())
// addBreakpoint("Sample.scala", 4)
// addBreakpoint("Sample.scala", 6)
// addBreakpoint("Sample.scala", 9)
// addBreakpoint("Sample.scala", 13)
// runDebugger("Sample") {
// //todo test for multiple breakpoints?
// waitForBreakpoint()
// evalEquals("foo()", "1")
// waitForBreakpoint()
// evalEquals("foo()", "2")
//
//
// }
// }
//todo this test should work, but it doesn't (last two assertions)
// def testClojureWithDefaultParameter() {
// addFileToProject("Sample.scala",
// """
// |object Sample {
// | def main(args: Array[String]) {
// | def outer() {
// | val s = "start"
// | val d = "default"
// | def inner(a: String, b: String = d): String = {
// | "stop here"
// | s + a + b
// | }
// | inner("aa")
// | }
// | outer()
// | }
// |}
// """.stripMargin.trim()
// )
// addBreakpoint("Sample.scala", 6)
// runDebugger("Sample") {
// waitForBreakpoint()
// evalEquals("a", "aa")
// evalEquals("b", "default")
// evalEquals("s", "start")
// evalEquals("inner(\\"aa\\", \\"bb\\")", "startaabb")
// evalEquals("inner(\\"aa\\")", "startaadefault")
// }
// }
} | consulo/consulo-scala | test/org/jetbrains/plugins/scala/debugger/evaluateExpression/ScalaLocalMethodEvaluationTest.scala | Scala | apache-2.0 | 7,776 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.jms.check
import javax.jms.Message
import scala.collection.mutable
import io.gatling.commons.validation._
import io.gatling.core.check.CheckResult
import io.gatling.core.session.Session
import io.gatling.jms._
case class JmsSimpleCheck(func: Message => Boolean) extends JmsCheck {
override def check(response: Message, session: Session)(implicit cache: mutable.Map[Any, Any]): Validation[CheckResult] = {
func(response) match {
case true => CheckResult.NoopCheckResultSuccess
case _ => Failure("Jms check failed")
}
}
}
| timve/gatling | gatling-jms/src/main/scala/io/gatling/jms/check/JmsSimpleCheck.scala | Scala | apache-2.0 | 1,188 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.concurrent
import org.scalactic._
import java.util.concurrent.{TimeUnit, Future => FutureOfJava}
import org.scalatest.Resources
import org.scalatest.Suite.anExceptionThatShouldCauseAnAbort
import org.scalatest.exceptions.StackDepthException
import org.scalatest.exceptions.TestCanceledException
import org.scalatest.exceptions.{TestPendingException, TestFailedException, TimeoutField}
import org.scalatest.time.Span
/**
* Provides an implicit conversion from <code>java.util.concurrent.Future[T]</code> to
* <a href="Futures$FutureConcept.html"><code>FutureConcept[T]</code></a>.
*
* <p>
* This trait enables you to invoke the methods defined on <code>FutureConcept</code> on a Java <code>Future</code>, as well as to pass a Java future
* to the <code>whenReady</code> methods of supertrait <code>Futures</code>.
* See the documentation for supertrait <a href="Futures.html"><code>Futures</code></a> for the details on the syntax this trait provides
* for testing with Java futures.
* </p>
*
* @author Bill Venners
*/
trait JavaFutures extends Futures {
import scala.language.implicitConversions
/**
* Implicitly converts a <code>java.util.concurrent.Future[T]</code> to
* <code>FutureConcept[T]</code>, allowing you to invoke the methods
* defined on <code>FutureConcept</code> on a Java <code>Future</code>, as well as to pass a Java future
* to the <code>whenReady</code> methods of supertrait <a href="Futures.html"><code>Futures</code></a>.
*
* <p>
* See the documentation for supertrait <a href="Futures.html"><code>Futures</code></a> for the details on the syntax this trait provides
* for testing with Java futures.
* </p>
*
* <p>If the <code>get</code> method of the underlying Java future throws <code>java.util.concurrent.ExecutionException</code>, and this
* exception contains a non-<code>null</code> cause, that cause will be included in the <code>TestFailedException</code> as its cause. The <code>ExecutionException</code>
* will be be included as the <code>TestFailedException</code>'s cause only if the <code>ExecutionException</code>'s cause is <code>null</code>.
* </p>
*
* <p>
* The <code>isExpired</code> method of the returned <code>FutureConcept</code> will always return <code>false</code>, because
* the underlying type, <code>java.util.concurrent.Future</code>, does not support the notion of a timeout. The <code>isCanceled</code>
* method of the returned <code>FutureConcept</code> will return the result of invoking <code>isCancelled</code> on the underlying
* <code>java.util.concurrent.Future</code>.
* </p>
*
* @param javaFuture a <code>java.util.concurrent.Future[T]</code> to convert
* @return a <code>FutureConcept[T]</code> wrapping the passed <code>java.util.concurrent.Future[T]</code>
*/
implicit def convertJavaFuture[T](javaFuture: FutureOfJava[T]): FutureConcept[T] =
new FutureConcept[T] {
def eitherValue: Option[Either[Throwable, T]] =
if (javaFuture.isDone())
Some(Right(javaFuture.get))
else
None
def isExpired: Boolean = false // Java Futures don't support the notion of a timeout
def isCanceled: Boolean = javaFuture.isCancelled // Two ll's in Canceled. The verbosity of Java strikes again!
// TODO: Catch TimeoutException and wrap that in a TFE with ScalaTest's TimeoutException I think.
// def awaitAtMost(span: Span): T = javaFuture.get(span.totalNanos, TimeUnit.NANOSECONDS)
override private[concurrent] def futureValueImpl(pos: source.Position)(implicit config: PatienceConfig): T = {
/*val adjustment =
if (methodName == "whenReady")
3
else
0*/
if (javaFuture.isCanceled)
throw new TestFailedException(
(_: StackDepthException) => Some(Resources.futureWasCanceled),
None,
pos
)
try {
javaFuture.get(config.timeout.totalNanos, TimeUnit.NANOSECONDS)
}
catch {
case e: java.util.concurrent.TimeoutException =>
throw new TestFailedException(
(_: StackDepthException) => Some(Resources.wasNeverReady(1, config.interval.prettyString)),
None,
pos
) with TimeoutField {
val timeout: Span = config.timeout
}
case e: java.util.concurrent.ExecutionException =>
val cause = e.getCause
val exToReport = if (cause == null) e else cause
if (anExceptionThatShouldCauseAnAbort(exToReport) || exToReport.isInstanceOf[TestPendingException] || exToReport.isInstanceOf[TestCanceledException]) {
throw exToReport
}
throw new TestFailedException(
(_: StackDepthException) => Some {
if (exToReport.getMessage == null)
Resources.futureReturnedAnException(exToReport.getClass.getName)
else
Resources.futureReturnedAnExceptionWithMessage(exToReport.getClass.getName, exToReport.getMessage)
},
Some(exToReport),
pos
)
}
}
}
}
| dotty-staging/scalatest | scalatest/src/main/scala/org/scalatest/concurrent/JavaFutures.scala | Scala | apache-2.0 | 5,843 |
package org.deepdive.extraction.datastore
import org.deepdive.Logging
import scala.collection.mutable.{Map => MMap, ArrayBuffer}
import play.api.libs.json._
/* Stores Extraction Results */
trait MemoryExtractionDataStoreComponent extends ExtractionDataStoreComponent{
val dataStore = new MemoryExtractionDataStore
class MemoryExtractionDataStore extends ExtractionDataStore[JsObject] with Logging {
def BatchSize = 100000
val data = MMap[String, ArrayBuffer[JsObject]]()
def init() = {
data.clear()
}
def queryAsJson[A](query: String, batchSize: Option[Int] = None)
(block: Iterator[JsObject] => A) : A = {
block(data.get(query).map(_.toList).getOrElse(Nil).iterator)
}
def queryAsMap[A](query: String, batchSize: Option[Int] = None)
(block: Iterator[Map[String, Any]] => A) : A = {
queryAsJson(query) { iter =>
block(iter.map(_.value.toMap.mapValues {
case JsNull => null
case JsString(x) => x
case JsNumber(x) => x
case JsBoolean(x) => x
case _ => null
}))
}
}
def queryUpdate(query: String) {}
def addBatch(result: Iterator[JsObject], outputRelation: String) : Unit = {
//TODO: Use parallel collection
data.synchronized {
data.get(outputRelation) match {
case Some(rows) => rows ++= result.toSeq
case None => data += Tuple2(outputRelation, ArrayBuffer(result.toList: _*))
}
}
}
}
} | feiranwang/deepdive | src/main/scala/org/deepdive/extraction/datastore/MemoryExtractionDataStore.scala | Scala | apache-2.0 | 1,522 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql.streaming
import scala.reflect.ClassTag
import org.apache.spark.sql.Row
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Duration, Time}
private[streaming]
class MappedDStream[U: ClassTag] (
parent: SchemaDStream,
mapFunc: Row => U
) extends DStream[U](parent.snsc) {
override def dependencies: List[DStream[_]] = List(parent)
override def slideDuration: Duration = parent.slideDuration
override def compute(validTime: Time): Option[RDD[U]] = {
parent.compute(validTime).map(_.map[U](mapFunc))
}
} | vjr/snappydata | core/src/main/scala/org/apache/spark/sql/streaming/MappedDStream.scala | Scala | apache-2.0 | 1,275 |
package drt.shared
import drt.shared.Terminals._
import drt.shared.TimeUtil._
import drt.shared.api.{TerminalWalkTimes, WalkTime, WalkTimes}
import org.specs2.mutable.Specification
class WalkTimesSpec extends Specification {
"When formatting a walk time as minutes and seconds" >> {
"Given a round minute I should get back the minute with nos mentioned" >> {
val millis = 60000L
val result: String = MinuteAsNoun(millisToMinutes(millis)).display
result === "1 minute"
}
"Given 90s I should get back 1 minute" >> {
val millis = 90000L
val result: String = MinuteAsNoun(millisToMinutes(millis)).display
result === "1 minute"
}
}
val stand1T1 = WalkTime("stand1", T1, 10000L)
val stand2T1 = WalkTime("stand2", T1, 40000L)
val stand1T2 = WalkTime("stand1", T2, 20000L)
val gate1T1 = WalkTime("gate1", T1, 10000L)
val gate2T1 = WalkTime("gate2", T1, 20000L)
val gate1T2 = WalkTime("gate1", T2, 40000L)
"Given a seq of standWalkTimes and a seq of gateWalkTimes I should get back a WalktTimes case class" >> {
val standWalkTimes = Seq(
stand1T1,
stand2T1,
stand1T2,
)
val gateWalkTimes = Seq(
gate1T1,
gate2T1,
gate1T2,
)
val result = WalkTimes(gateWalkTimes, standWalkTimes)
val expected = WalkTimes(
Map(
T2 -> TerminalWalkTimes(
Map(
"gate1" -> gate1T2
),
Map(
"stand1" -> stand1T2
),
),
T1 -> TerminalWalkTimes(
Map(
"gate1" -> gate1T1,
"gate2" -> gate2T1
),
Map(
"stand1" -> stand1T1,
"stand2" -> stand2T1
),
),
),
)
result === expected
}
"When getting a walk time for an arrival" >> {
val gateWalkTimes = Seq(
gate1T1,
gate2T1,
gate1T2,
)
val standWalkTimes = Seq(
stand1T1,
stand2T1,
stand1T2,
)
val wt = WalkTimes(gateWalkTimes, standWalkTimes)
val walkTimeProvider: (Option[String], Option[String], Terminal) => String = wt.walkTimeStringForArrival(300000L)
"Given a gate and no stand I should get back the gate walk time" >> {
val result = walkTimeProvider(Option("gate1"), None, T1)
result === s"${gate1T1.inMinutes} minute walk time"
}
"Given a stand and no gate I should get back the stand walk time" >> {
val result = walkTimeProvider(None, Option("stand1"), T1)
result === s"${stand1T1.inMinutes} minute walk time"
}
"Given a gate and a stand I should get back the gate walk time" >> {
val result = walkTimeProvider(Option("gate1"), Option("stand1"), T1)
result === s"${gate1T1.inMinutes} minute walk time"
}
"Given no gate or stand I should get back the default walk time" >> {
val result = walkTimeProvider(None, None, T1)
result === "5 minutes (default walk time for terminal)"
}
"Given a non existent gate I should get back the default walk time" >> {
val result = walkTimeProvider(Option("notValid"), None, T1)
result === "5 minutes (default walk time for terminal)"
}
"Given a non existent stand I should get back the default walk time" >> {
val result = walkTimeProvider(None, Option("notValid"), T1)
result === "5 minutes (default walk time for terminal)"
}
"Given a non existent gate and a valid stand I should get back the stand time" >> {
val result = walkTimeProvider(Option("notValid"), Option("stand1"), T1)
result === s"${stand1T1.inMinutes} minute walk time"
}
"Given a non existent stand and a valid gate I should get back the gate time" >> {
val result = walkTimeProvider(Option("gate1"), Option("notValid"), T1)
result === s"${gate1T1.inMinutes} minute walk time"
}
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | server/src/test/scala/drt/shared/WalkTimesSpec.scala | Scala | apache-2.0 | 3,903 |
package com.socrata.http.server.util
object PreconditionRenderer extends (Precondition => Seq[(String, String)]) {
def apply(precondition: Precondition): Seq[(String, String)] = precondition match {
case IfDoesNotExist => List("If-None-Match" -> "*")
case IfExists => List("If-Match" -> "*")
case IfAnyOf(etags) => List("If-Match" -> etags.map(EntityTagRenderer).mkString(","))
case IfNoneOf(etags) => List("If-None-Match" -> etags.map(EntityTagRenderer).mkString(","))
case AndPrecondition(a, b) => apply(a) ++ apply(b)
case NoPrecondition => Nil
}
}
| socrata-platform/socrata-http | socrata-http-server/src/main/scala/com/socrata/http/server/util/PreconditionRenderer.scala | Scala | apache-2.0 | 581 |
/*
* Copyright 2015 Roberto Tyley
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.madgag.rfc5988link
import com.madgag.rfc5988link.LinkParser.linkValues
import fastparse._
import okhttp3.HttpUrl
import org.scalatest.flatspec._
import org.scalatest.matchers._
class LinkParserTest extends AnyFlatSpec with should.Matchers {
"FastParse" should "parse examples from RFC 5988 Section 5.5" in {
val Parsed.Success(value, _) = {
parse("<http://example.com/TheBook/chapter2>; rel=\"previous\"; title=\"previous chapter\"", linkValues(_))
}
value should contain only LinkTarget(
HttpUrl.parse("http://example.com/TheBook/chapter2"),
Seq(
"rel" -> "previous",
"title" -> "previous chapter"
)
)
}
it should "parse a typical GitHub pagination response" in {
val Parsed.Success(value, _) =
parse("<https://api.github.com/user/52038/repos?page=2>; rel=\"next\", <https://api.github.com/user/52038/repos?page=4>; rel=\"last\"", linkValues(_))
value should contain inOrderOnly(
LinkTarget(
HttpUrl.parse("https://api.github.com/user/52038/repos?page=2"),
Seq("rel" -> "next")
),
LinkTarget(
HttpUrl.parse("https://api.github.com/user/52038/repos?page=4"),
Seq("rel" -> "last")
)
)
}
}
| rtyley/play-git-hub | src/test/scala/com/madgag/rfc5988link/LinkParserTest.scala | Scala | gpl-3.0 | 1,841 |
package inloopio.util.pinyin
import java.io.UnsupportedEncodingException
import inloopio.util.pinyin.format.HanyuPinyinCaseType
import inloopio.util.pinyin.format.HanyuPinyinOutputFormat
import inloopio.util.pinyin.format.HanyuPinyinToneType
import inloopio.util.pinyin.format.HanyuPinyinVCharType
object PinYin {
private val outFormat = HanyuPinyinOutputFormat(
HanyuPinyinVCharType.WITH_V,
HanyuPinyinCaseType.UPPERCASE,
HanyuPinyinToneType.WITHOUT_TONE)
def getCnSpells(cn: Char): Set[String] = {
try {
val bytes = String.valueOf(cn).getBytes("GB2312")
if (bytes.length < 2) {
Set(new String(bytes))
} else {
val spells = PinyinHelper.toHanyuPinyins(cn, outFormat)
if (spells.nonEmpty) {
spells
} else Set(String.valueOf(cn))
}
} catch {
case ex: UnsupportedEncodingException => Set(String.valueOf(cn))
}
}
def getFullSpells(cnStr: String): Set[String] = {
if (cnStr == null || cnStr.trim == "") return Set(cnStr)
var allSpells = Set("")
val chars = cnStr.toCharArray
var i = 0
while (i < chars.length) {
val spells = getCnSpells(toDBC(chars(i)))
var newSpells = Set[String]()
for (spell <- spells) {
for (prevSpell <- allSpells) {
newSpells += prevSpell + spell
}
}
allSpells = newSpells
i += 1
}
allSpells
}
def getFirstSpells(cnStr: String): Set[String] = {
if (cnStr == null || cnStr.trim == "") return Set(cnStr)
var allSpells = Set("")
val chars = cnStr.toCharArray
var i = 0
while (i < chars.length) {
toDBC(chars(i)) match {
case ' ' =>
case c =>
val spells = getCnSpells(c)
var newSpells = Set[String]()
for (spell <- spells) {
val first = if (spell.length > 0) {
spell.charAt(0)
} else c
for (prevSpell <- allSpells) {
newSpells += prevSpell + first
}
}
allSpells = newSpells
}
i += 1
}
allSpells
}
/**
* Convert full-width character (SBC case) to half-width character (DBC case)
*/
private def toDBC(c: Char): Char = c match {
case '\u3000' => ' '
case c if c > '\uFF00' && c < '\uFF5F' => (c - 65248).toChar
case c => c
}
/**
* Convert full-width character (SBC case) to half-width character (DBC case)
*/
private def toDBC(input: String): Array[Char] = {
val chars = input.toCharArray
var i = 0
while (i < chars.length) {
chars(i) = toDBC(chars(i))
i += 1
}
chars
}
def main(args: Array[String]) {
val test = getCnSpells('ι')
test foreach println
val t1 = System.currentTimeMillis
val ss = Array(
"桦ειΆθ‘",
"桦 ε ιΆ θ‘",
"θ₯ΏθηΏδΈ",
"This is δΏε½",
"δΈθ―ζζ°",
"δΈε½η³ζ²Ή",
"οΌ³οΌ΄οΌδΈζ΅·",
"ε½εΊοΌοΌοΌ",
"δΈη§οΌ‘")
for (s <- ss) {
val s1 = toDBC(s)
println(new String(s1) + ": " + getFirstSpells(s).mkString("(", ",", ")"))
println(new String(s1) + ": " + getFullSpells(s).mkString("(", ",", ")"))
}
println("Time used: " + (System.currentTimeMillis - t1) + "ms")
}
}
| dcaoyuan/inloopio-libs | inloopio-util/src/main/scala/inloopio/util/pinyin/PinYin.scala | Scala | bsd-3-clause | 3,367 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2015, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.library
import scala.scalajs.js
import scala.scalajs.js.annotation._
import org.scalajs.jasminetest.JasmineTest
import org.scalajs.testsuite.Typechecking._
object UseAsTest extends JasmineTest {
describe("js.use(x).as[T] - Scala Types - success cases") {
it("should support basic typechecking") {
class A {
@JSExport
def m(a: Int, b: String): js.Object = ???
}
js.use(new A).as[JSBasic]
}
it("should support covariance in return types") {
class A {
@JSExport
def m(a: Int, b: String): js.Array[Int] = ???
}
js.use(new A).as[JSBasic]
}
it("should support contravariance in argument") {
class A {
@JSExport
def m(a: Int, b: Any): js.Object = ???
}
js.use(new A).as[JSBasic]
}
it("should support explicit names in JSExports") {
class A {
@JSExport("m")
def f(a: Int, b: String): js.Object = ???
}
js.use(new A).as[JSBasic]
}
it("should support JSName") {
class A {
@JSExport
def m(a: Int, b: String): js.Object = ???
}
class B {
@JSExport("m")
def bar(a: Int, b: String): js.Object = ???
}
js.use(new A).as[JSBasicJSName]
js.use(new B).as[JSBasicJSName]
}
it("should support JSExportAll") {
@JSExportAll
class A {
def m(a: Int, b: String): js.Object = ???
}
class B extends A
js.use(new A).as[JSBasic]
js.use(new B).as[JSBasic]
}
it("should support inherited exports") {
abstract class A {
@JSExport
def m(a: Int, b: String): js.Object
}
class B extends A {
def m(a: Int, b: String): js.Object = ???
}
js.use(new B).as[JSBasic]
}
it("should support JSExportAll on superclass") {
@JSExportAll
abstract class A {
def m(a: Int, b: String): js.Object
}
class B extends A {
def m(a: Int, b: String): js.Object = ???
}
js.use(new B).as[JSBasic]
}
it("should work with JSExportAll with an apply method") {
@JSExportAll
class A {
@JSExport("bar")
def apply(x: Int): Int = x * 2
}
val a = js.use(new A).as[JSNamedApply]
expect(a(2)).toEqual(4)
expect(a.bar(2)).toEqual(4)
}
it("should resolve generics in JSRaw types") {
class A {
@JSExport
def arr: js.Array[Int] = ???
}
js.use(new A).as[JSGeneric[Int]]
js.use(new A).as[JSGenericInt]
}
it("should resolve type members in JSRaw types") {
class A {
@JSExport
def foo(x: Int): Int = ???
}
js.use(new A).as[JSTypeMember { type R = Int }]
}
it("should resolve exports with class-level type parameter") {
class A[T] {
@JSExport
def arr: js.Array[T] = ???
}
class B extends A[Int]
js.use(new A[Int]).as[JSGeneric[Int]]
js.use(new B).as[JSGeneric[Int]]
}
it("should resolve exports with type member") {
class A {
type T
@JSExport
def arr: js.Array[T] = ???
}
class B extends A {
type T = Int
}
js.use(new B).as[JSGeneric[Int]]
}
it("should resolve overloading") {
@JSExportAll
class A {
def m(a: Int, b: String): js.Object = ???
def m(b: String): Int = ???
@JSExport("m")
def strangeName(a: Int): js.Object = ???
}
js.use(new A).as[JSOverload]
}
it("should support vals/getters") {
@JSExportAll
class A {
val a: Int = 1
def b: String = ???
// Test covariance as well
def c: js.Array[Int] = ???
}
js.use(new A).as[JSGetters]
}
it("should support setters") {
class A {
@JSExport("a")
def fooA_=(x: Int): Unit = ???
@JSExport
def b_=(x: String): Unit = ???
@JSExport("c_=")
def barC_=(x: js.Object): Unit = ???
}
js.use(new A).as[JSSetters]
}
it("should support vars") {
class A {
@JSExport
def a: Int = ???
@JSExport
def a_=(x: Int): Unit = ???
@JSExport("b")
var fooB: String = _
@JSExport
var c: js.Object = _
}
js.use(new A).as[JSVars]
}
it("should support basic default arguments") {
@JSExportAll
class A {
def sum4(a: Int, b: Int = 1, c: Int = 2, d: Int = 3): Int = a + b + c + d
def sum2(a: Int, b: Int = 1): Int = a + b
}
js.use(new A).as[JSDefaultArgs]
}
it("should allow additional default arguments at the end of the params") {
class A {
@JSExport
def m(a: Int, b: String, c: Int = ???, d: String = ???): js.Object = ???
}
js.use(new A).as[JSBasic]
}
it("should support repeated parameter lists") {
@JSExportAll
class A {
def rep(a: Int, b: String*): Unit = ???
def rep(a: Int*): Unit = ???
}
js.use(new A).as[JSRepeated]
}
it("should flatten multi parameter lists in raw JS type") {
@JSExportAll
class A {
def multi(a: Int, b: String): Int = ???
}
js.use(new A).as[JSMulti]
}
it("should flatten multi parameter lists in exported method") {
@JSExportAll
class B {
def m(a: Int)(b: String): js.Object = ???
}
js.use(new B).as[JSBasic]
}
it("should support anonymous types") {
js.use(new { @JSExport def m(a: Int, b: String): js.Object = ??? }).as[JSBasic]
}
it("should allow Nothing") {
if (false) {
js.use(???).as[JSBasic]
}
}
it("should allow Null") {
js.use(null).as[JSBasic]
}
}
describe("js.use(x).as[T] - Raw JS Types - success cases") {
it("should support basic typechecking") {
js.use(null: JSBasic).as[JSBasicJSName]
js.use(null: JSBasicJSName).as[JSBasic]
}
it("should support generics") {
js.use(null: JSGeneric[Int]).as[JSGenericInt]
js.use(null: JSGenericInt).as[JSGeneric[Int]]
}
it("should support JS calls") {
js.use(null: js.Function0[String]).as[JSApplyString]
}
it("should support @JSBracketAccess") {
js.use(new js.Array[Int](0)).as[JSBracketAccessInt]
}
it("should support @JSBracketCall") {
js.use(null: JSBracketCallInt1).as[JSBracketCallInt2]
}
}
describe("js.use(x).as[T] - general failure cases") {
it("fails with polymorphic methods") {
typeErrorWithMsg(
"js.use(new Object).as[JSPolyMethod]",
"Polymorphic methods are currently not supported. Offending " +
"method: org.scalajs.testsuite.library.UseAsTest.JSPolyMethod.poly")
}
it("fails with non-type refinements") {
typeErrorWithMsg(
"js.use(???).as[JSBasic { def foo: Int }]",
"Refinement foo is not a type. Only types may be refined with as.")
}
it("fails with non trait") {
typeErrorWithMsg(
"js.use(???).as[js.Date]",
"Only traits can be used with as")
}
it("fails with class parents") {
typeErrorWithMsg(
"js.use(???).as[JSNonClassParent]",
"Supertype scala.scalajs.js.Date of trait JSNonClassParent is a " +
"class. Cannot be used with as.")
}
it("fails gracefully with existential types - #1841") {
typeErrorWithMsg(
"js.use(null: JSTypeMember).as[JSTypeMember]",
"Methods with existential types are not supported. Offending " +
"method: org.scalajs.testsuite.library.UseAsTest.JSTypeMember.foo. " +
"This is likely caused by an abstract type in the method signature")
}
}
describe("js.use(x).as[T] - Scala Types - failure cases") {
it("fails with apply in a raw JS type") {
typeErrorWithMsg(
"js.use(new Object).as[JSWithApply]",
"org.scalajs.testsuite.library.UseAsTest.JSWithApply defines an apply " +
"method. This cannot be implemented by any Scala exported type, " +
"since it would need to chain Function's prototype.")
}
it("fails with @JSBracketAccess in a raw JS type") {
typeErrorWithMsg(
"js.use(new Object).as[JSWithBracketAccess]",
"org.scalajs.testsuite.library.UseAsTest.JSWithBracketAccess " +
"defines a @JSMemberBracketAccess method. Existence of such a " +
"method cannot be statically checked for any Scala exported type.")
}
it("fails with @JSBracketCall in a raw JS type") {
typeErrorWithMsg(
"js.use(new Object).as[JSWithBracketCall]",
"org.scalajs.testsuite.library.UseAsTest.JSWithBracketCall defines " +
"a @JSMemberBracketCall method. Existence of such a method cannot " +
"be statically checked for any Scala exported type.")
}
it("fails with a missing method") {
class A {
@JSExport
def e(a: Int, b: String): js.Object = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSBasic]",
"A does not export a method m(Int, String): scala.scalajs.js.Object.")
}
it("fails with a missing overload") {
class A {
@JSExport
def m(a: Int, b: String): js.Object = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSOverload]",
"A does not export a method m(Int): scala.scalajs.js.Object.")
}
it("fails with wrong argument types") {
class A {
@JSExport
def m(a: String, b: Int): js.Object = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSBasic]",
"A does not export a method m(Int, String): scala.scalajs.js.Object.")
}
it("fails with wrong return types") {
class A {
@JSExport
def m(a: Int, b: String): Any = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSBasic]",
"A does not export a method m(Int, String): scala.scalajs.js.Object.")
}
it("fails with a missing default argument") {
@JSExportAll
class A {
def sum4(a: Int, b: Int = 1, c: Int = 2, d: Int = 3): Int = a + b + c + d
def sum2(a: Int, b: Int): Int = a + b // should have default
}
typeErrorWithMsg(
"js.use(new A).as[JSDefaultArgs]",
"A does not export a method sum2(Int, Int = ???): Int.")
}
it("fails with a mismatching repeated argument") {
@JSExportAll
class A {
def rep(a: Int, b: String): Unit = ??? // should be repeated
def rep(a: Int*): Unit = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSRepeated]",
"A does not export a method rep(Int, String*): Unit.")
class B {
@JSExport
def m(a: Int, b: String*): js.Object = ??? // should not be repeated
}
typeErrorWithMsg(
"js.use(new B).as[JSBasic]",
"B does not export a method m(Int, String): scala.scalajs.js.Object.")
}
}
describe("js.use(x).as[T] - Raw JS Types - failure cases") {
it("fails with a missing apply") {
typeErrorWithMsg(
"js.use(new js.Object).as[JSWithApply]",
"scala.scalajs.js.Object does not have a method " +
"<apply>(String): Int. (type is not callable)")
}
it("fails with a missing @JSBracketAccess") {
typeErrorWithMsg(
"js.use(new js.Object).as[JSWithBracketAccess]",
"scala.scalajs.js.Object does not have a method " +
"<bracketaccess>(String): Int. (type doesn't support member " +
"selection via []). Add @JSBracketAccess to use a method for " +
"member selection.")
}
it("fails with a missing @JSBracketCall") {
typeErrorWithMsg(
"js.use(new js.Object).as[JSWithBracketCall]",
"scala.scalajs.js.Object does not have a method " +
"<bracketcall>(String, String): Int. (type doesn't support " +
"dynamically calling methods). Add @JSBracketCall to use a method " +
"for dynamic calls.")
}
it("fails with a missing method") {
typeErrorWithMsg(
"js.use(new js.Object).as[JSBasic]",
"scala.scalajs.js.Object does not have a method " +
"m(Int, String): scala.scalajs.js.Object.")
}
it("fails with a missing overload") {
typeErrorWithMsg(
"js.use(null: JSBasic).as[JSOverload]",
"org.scalajs.testsuite.library.UseAsTest.JSBasic does not have a " +
"method m(Int): scala.scalajs.js.Object.")
}
it("fails with wrongly typed generic") {
typeErrorWithMsg(
"js.use(null: JSGeneric[Int]).as[JSGeneric[String]]",
"org.scalajs.testsuite.library.UseAsTest.JSGeneric[Int] does not " +
"have a getter arr: scala.scalajs.js.Array[String].")
}
}
@js.native
trait JSBasic extends js.Object {
def m(a: Int, b: String): js.Object = js.native
}
@js.native
trait JSBasicJSName extends js.Object {
@JSName("m")
def foo(a: Int, b: String): js.Object = js.native
}
@js.native
trait JSNamedApply extends js.Object {
@JSName("apply")
def apply(x: Int): Int = js.native
def bar(x: Int): Int = js.native
}
@js.native
trait JSGeneric[T] extends js.Object {
def arr: js.Array[T] = js.native
}
@js.native
trait JSGenericInt extends JSGeneric[Int]
@js.native
trait JSTypeMember extends js.Object {
type R
def foo(x: R): Int = js.native
}
@js.native
trait JSOverload extends JSBasic {
def m(b: String): Int = js.native
def m(a: Int): js.Object = js.native
}
@js.native
trait JSGetters extends js.Object {
def a: Int = js.native
val b: String = js.native
def c: js.Object = js.native
}
@js.native
trait JSSetters extends js.Object {
def a_=(x: Int): Unit = js.native
@JSName("b")
def fooJS_=(x: String): Unit = js.native
@JSName("c_=")
def barJS_=(x: js.Array[Int]): Unit = js.native
}
@js.native
trait JSVars extends js.Object {
var a: Int = js.native
def b: String = js.native
def b_=(x: String): Unit = js.native
@JSName("c")
var fooJS: js.Object = js.native
}
@js.native
trait JSDefaultArgs extends js.Object {
def sum4(a: Int, b: Int = ???, c: Int = ???, d: Int = ???): Int = js.native
def sum2(a: Int, b: Int = ???): Int = js.native
}
@js.native
trait JSRepeated extends js.Object {
def rep(a: Int, b: String*): Unit = js.native
def rep(a: Int*): Unit = js.native
}
@js.native
trait JSMulti extends js.Object {
def multi(a: Int)(b: String): Int = js.native
}
@js.native
trait JSPolyMethod extends js.Object {
def poly[T](a: T): js.Array[T] = js.native
}
@js.native
trait JSWithApply extends js.Object {
def apply(a: String): Int = js.native
}
@js.native
trait JSWithBracketAccess extends js.Object {
@JSBracketAccess
def foo(a: String): Int = js.native
}
@js.native
trait JSWithBracketCall extends js.Object {
@JSBracketCall
def foo(name: String, b: String): Int = js.native
}
@js.native
trait JSNonClassParent extends js.Date
@js.native
trait JSApplyString extends js.Object {
def apply(): String = js.native
}
@js.native
trait JSBracketAccessInt extends js.Object {
@JSBracketAccess
def apply(x: Int): Int = js.native
}
@js.native
trait JSBracketCallInt1 extends js.Object {
@JSBracketCall
def foo(method: String): Int = js.native
}
@js.native
trait JSBracketCallInt2 extends js.Object {
@JSBracketCall
def bar(method: String): Int = js.native
}
}
| CapeSepias/scala-js | test-suite/src/test/scala/org/scalajs/testsuite/library/UseAsTest.scala | Scala | bsd-3-clause | 16,342 |
import sbt._
import Keys._
// import com.typesafe.sbtosgi.OsgiPlugin._
object ConfigBuild extends Build {
val unpublished = Seq(
// no artifacts in this project
publishArtifact := false,
// make-pom has a more specific publishArtifact setting already
// so needs specific override
publishArtifact in makePom := false,
// can't seem to get rid of ivy files except by no-op'ing the entire publish task
publish := {},
publishLocal := {}
)
object sonatype extends PublishToSonatype(ConfigBuild) {
def projectUrl = "https://github.com/typesafehub/config"
def developerId = "havocp"
def developerName = "Havoc Pennington"
def developerUrl = "http://ometer.com/"
def scmUrl = "git://github.com/typesafehub/config.git"
}
override val settings = super.settings ++ Seq(isSnapshot <<= isSnapshot or version(_ endsWith "-SNAPSHOT"))
lazy val root = Project(id = "root",
base = file("."),
settings = Project.defaultSettings ++ unpublished) aggregate(testLib, configLib, simpleLib, simpleApp)
lazy val configLib = Project(id = "config",
base = file("config"),
settings =
Project.defaultSettings ++
// sonatype.settings ++
// osgiSettings ++
Seq(
name := "config-ordered",
publishTo := Some("Rhinofly external releases" at "http://maven-repository.rhinofly.net:8081/artifactory/ext-release-local")
// OsgiKeys.exportPackage := Seq("com.typesafe.config"),
// packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), OsgiKeys.bundle).identityMap,
// artifact in (Compile, packageBin) ~= { _.copy(`type` = "bundle") }
)) // dependsOn(testLib % "test->test")
lazy val testLib = Project(id = "test-lib",
base = file("test-lib"),
settings = Project.defaultSettings ++ unpublished)
lazy val simpleLib = Project(id = "simple-lib",
base = file("examples/simple-lib"),
settings = Project.defaultSettings ++ unpublished) dependsOn(configLib)
lazy val simpleApp = Project(id = "simple-app",
base = file("examples/simple-app"),
settings = Project.defaultSettings ++ unpublished) dependsOn(simpleLib)
lazy val complexApp = Project(id = "complex-app",
base = file("examples/complex-app"),
settings = Project.defaultSettings ++ unpublished) dependsOn(simpleLib)
}
// from https://raw.github.com/paulp/scala-improving/master/project/PublishToSonatype.scala
abstract class PublishToSonatype(build: Build) {
import build._
val ossSnapshots = "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots/"
val ossStaging = "Sonatype OSS Staging" at "https://oss.sonatype.org/service/local/staging/deploy/maven2/"
def projectUrl: String
def developerId: String
def developerName: String
def developerUrl: String
def licenseName = "Apache License, Version 2.0"
def licenseUrl = "http://www.apache.org/licenses/LICENSE-2.0"
def licenseDistribution = "repo"
def scmUrl: String
def scmConnection = "scm:git:" + scmUrl
def generatePomExtra(scalaVersion: String): xml.NodeSeq = {
<url>{ projectUrl }</url>
<licenses>
<license>
<name>{ licenseName }</name>
<url>{ licenseUrl }</url>
<distribution>{ licenseDistribution }</distribution>
</license>
</licenses>
<scm>
<url>{ scmUrl }</url>
<connection>{ scmConnection }</connection>
</scm>
<developers>
<developer>
<id>{ developerId }</id>
<name>{ developerName }</name>
<url>{ developerUrl }</url>
</developer>
</developers>
}
def settings: Seq[Setting[_]] = Seq(
// publishMavenStyle := true,
// publishTo <<= (isSnapshot) { (snapshot) => Some(if (snapshot) ossSnapshots else ossStaging) },
publishArtifact in Test := false
// pomIncludeRepository := (_ => false),
// pomExtra <<= (scalaVersion)(generatePomExtra)
)
}
| Kaliber/config-ordered | project/Build.scala | Scala | apache-2.0 | 4,697 |
import com.cave.metrics.data.Role
import data.UserData
import org.joda.time.DateTime
import org.mockito.{Matchers, Mockito}
import org.mockito.Mockito._
import play.api.libs.json.Json
import play.api.mvc.Results
import play.api.test.{FakeHeaders, FakeRequest, PlaySpecification}
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
class AddOrganizationUserApiSpec extends PlaySpecification with Results with AbstractOrganizationApiSpec with UserData {
"POST /organizations/$name/users" should {
"return 202 after adding an existing user to an organization" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.successful(List(GiltName -> Role.Admin)))
when(mockDataManager.getUserByEmail(SOME_EMAIL)).thenReturn(Future.successful(Some(USER1)))
when(mockDataManager.addUserToOrganization(USER1, GiltOrg, Role.Member)).thenReturn(Future.successful(true))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(ACCEPTED)
contentAsString(result) must equalTo("")
}
"return 209 if the user already is in the organization" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.successful(List(GiltName -> Role.Admin)))
when(mockDataManager.getUserByEmail(SOME_EMAIL)).thenReturn(Future.successful(Some(USER1)))
when(mockDataManager.addUserToOrganization(USER1, GiltOrg, Role.Member)).thenReturn(Future.successful(false))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(CONFLICT)
contentAsString(result) must equalTo("")
}
"return 202 after adding a non existing user to an organization" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.successful(List(GiltName -> Role.Admin)))
when(mockDataManager.getUserByEmail(SOME_EMAIL)).thenReturn(Future.successful(None))
when(mockMailService.sendAttemptedOrganizationAdd(SOME_EMAIL, GiltOrg, SOME_USER)).thenReturn(Future.successful())
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(ACCEPTED)
contentAsString(result) must equalTo("")
}
"return 400 if the input data does not contain an email" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.successful(List(GiltName -> Role.Admin)))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("mail" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(BAD_REQUEST)
contentAsString(result) must equalTo("Unable to parse request body: must have both 'email' and 'role'.")
}
"return 400 if the input data does not contain a role" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.successful(List(GiltName -> Role.Admin)))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "mole" -> "admin")))
status(result) must equalTo(BAD_REQUEST)
contentAsString(result) must equalTo("Unable to parse request body: must have both 'email' and 'role'.")
}
"return 400 if the input data does not contain a valid role" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.successful(List(GiltName -> Role.Admin)))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> "leader")))
status(result) must equalTo(BAD_REQUEST)
contentAsString(result) must equalTo("Unable to parse request body: must have both 'email' and 'role'.")
}
"return 401 if no authorization token specified" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(),
Json.obj("email" -> SOME_EMAIL, "role" -> "leader")))
status(result) must equalTo(UNAUTHORIZED)
contentAsString(result) must equalTo("")
}
"return 401 if unsupported authorization token specified" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(GiltOrgBadAuth))),
Json.obj("email" -> SOME_EMAIL, "role" -> "leader")))
status(result) must equalTo(UNAUTHORIZED)
contentAsString(result) must equalTo("")
}
"return 403 if user not found" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(None))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(FORBIDDEN)
contentAsString(result) must equalTo("")
}
"return 403 if user not admin" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.successful(List(GiltName -> Role.Member)))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(FORBIDDEN)
contentAsString(result) must equalTo("")
}
"return 500 if there's an error during getOrganization" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Failure(new RuntimeException(ErrorMessage)))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(INTERNAL_SERVER_ERROR)
contentAsString(result) must equalTo(InternalErrorMessage)
}
"return 500 if there's an error during getOrganizationsForUser" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.failed(new RuntimeException(ErrorMessage)))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(INTERNAL_SERVER_ERROR)
contentAsString(result) must equalTo(InternalErrorMessage)
}
"return 500 if there's an error during getUserByEmail" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.successful(List(GiltName -> Role.Admin)))
when(mockDataManager.getUserByEmail(SOME_EMAIL)).thenReturn(Future.failed(new RuntimeException(ErrorMessage)))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(INTERNAL_SERVER_ERROR)
contentAsString(result) must equalTo(InternalErrorMessage)
}
"return 500 if there's an error during addUserToOrganization" in {
Mockito.reset(mockAwsWrapper, mockDataManager, mockInfluxClientFactory)
when(mockDataManager.findUserByToken(Matchers.eq(SOME_TOKEN), Matchers.any[DateTime])(Matchers.any[ExecutionContext]))
.thenReturn(Future.successful(Some(SOME_USER)))
when(mockDataManager.getOrganization(GiltName)).thenReturn(Success(Some(GiltOrg)))
when(mockDataManager.getOrganizationsForUser(SOME_USER)).thenReturn(Future.successful(List(GiltName -> Role.Admin)))
when(mockDataManager.getUserByEmail(SOME_EMAIL)).thenReturn(Future.successful(Some(USER1)))
when(mockDataManager.addUserToOrganization(USER1, GiltOrg, Role.Member)).thenReturn(Future.failed(new RuntimeException(ErrorMessage)))
val result = new TestController().addUser(GiltName)(
FakeRequest(POST, s"/organizations/$GiltName/users",
FakeHeaders(Seq(AUTHORIZATION -> Seq(AUTH_TOKEN))),
Json.obj("email" -> SOME_EMAIL, "role" -> Role.Member)))
status(result) must equalTo(INTERNAL_SERVER_ERROR)
contentAsString(result) must equalTo(InternalErrorMessage)
}
}
}
| gilt/cave | api/test/AddOrganizationUserApiSpec.scala | Scala | mit | 13,100 |
package org.broadinstitute.dsde.firecloud.test.analysis
import org.broadinstitute.dsde.firecloud.fixture.UserFixtures
import org.broadinstitute.dsde.workbench.auth.AuthToken
import org.broadinstitute.dsde.workbench.config.UserPool
import org.broadinstitute.dsde.workbench.fixture._
import org.broadinstitute.dsde.workbench.service.test.WebBrowserSpec
import org.scalatest._
class MethodImportSpec extends FreeSpec with Matchers with WebBrowserSpec with WorkspaceFixtures
with UserFixtures with MethodFixtures with BillingFixtures with TestReporterFixture {
"import method config" - {
"copy from a workspace" in {
val user = UserPool.chooseProjectOwner
implicit val authToken: AuthToken = user.makeAuthToken()
withCleanBillingProject(user) { billingProject =>
withWorkspace(billingProject, "MethodImportSpec_workspace_src") { sourceWorkspaceName =>
withWorkspace(billingProject, "MethodImportSpec_workspace_dest") { destWorkspaceName =>
withMethod("MethodImportSpec_import_from_workspace", MethodData.SimpleMethod, 1) { methodName =>
val method = MethodData.SimpleMethod.copy(methodName = methodName)
api.methodConfigurations.createMethodConfigInWorkspace(billingProject, sourceWorkspaceName,
method, method.methodNamespace, method.methodName, 1, Map.empty, Map.empty, method.rootEntityType)
api.workspaces.waitForBucketReadAccess(billingProject, destWorkspaceName)
withWebDriver { implicit driver =>
withSignIn(user) { listPage =>
val methodConfigTab = listPage.enterWorkspace(billingProject, destWorkspaceName).goToMethodConfigTab()
val methodConfigDetailsPage = methodConfigTab.copyMethodConfigFromWorkspace(
billingProject, sourceWorkspaceName, method.methodNamespace, method.methodName)
methodConfigDetailsPage.isLoaded shouldBe true
methodConfigDetailsPage.methodConfigName shouldBe method.methodName
// launch modal shows no default entities
val launchModal = methodConfigDetailsPage.openLaunchAnalysisModal()
launchModal.verifyNoRowsMessage() shouldBe true
launchModal.xOut()
}
}
}
}
}
}
}
"import from method repo" in {
val user = UserPool.chooseProjectOwner
implicit val authToken: AuthToken = user.makeAuthToken()
withCleanBillingProject(user) { billingProject =>
withWorkspace(billingProject, "MethodImportSpec_import_from_methodrepo") { workspaceName =>
withWebDriver { implicit driver =>
withSignIn(user) { workspaceListPage =>
val methodConfigPage = workspaceListPage.enterWorkspace(billingProject, workspaceName).goToMethodConfigTab()
val methodConfigDetailsPage = methodConfigPage.importMethodConfigFromRepo(
MethodData.SimpleMethod.methodNamespace,
MethodData.SimpleMethod.methodName,
MethodData.SimpleMethod.snapshotId,
SimpleMethodConfig.configName)
methodConfigDetailsPage.isLoaded shouldBe true
methodConfigDetailsPage.editMethodConfig(inputs = Some(SimpleMethodConfig.inputs))
}
}
}
}
}
}
}
| broadinstitute/firecloud-ui | automation/src/test/scala/org/broadinstitute/dsde/firecloud/test/analysis/MethodImportSpec.scala | Scala | bsd-3-clause | 3,432 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.lang
class StringBuilder
extends AnyRef with CharSequence with Appendable with java.io.Serializable {
private[this] var content: String = ""
def this(str: String) = {
this()
if (str eq null)
throw new NullPointerException
content = str
}
def this(initialCapacity: Int) = {
this()
if (initialCapacity < 0)
throw new NegativeArraySizeException()
}
def this(seq: CharSequence) = this(seq.toString)
@inline
def append(obj: AnyRef): StringBuilder = {
// If (obj eq null), this appends "null", otherwise obj.toString()
content += obj
this
}
@inline
def append(str: String): StringBuilder = {
content += str // if (str eq null), this appends "null"
this
}
def append(sb: StringBuffer): StringBuilder = append(sb: AnyRef)
def append(s: CharSequence): StringBuilder = append(s: AnyRef)
def append(s: CharSequence, start: Int, end: Int): StringBuilder =
append((if (s == null) "null" else s).subSequence(start, end))
def append(str: Array[scala.Char]): StringBuilder =
append(String.valueOf(str))
def append(str: Array[scala.Char], offset: Int, len: Int): StringBuilder =
append(String.valueOf(str, offset, len))
def append(b: scala.Boolean): StringBuilder = append(b.toString())
def append(c: scala.Char): StringBuilder = append(c.toString())
def append(i: scala.Int): StringBuilder = append(i.toString())
def append(lng: scala.Long): StringBuilder = append(lng.toString())
def append(f: scala.Float): StringBuilder = append(f.toString())
def append(d: scala.Double): StringBuilder = append(d.toString())
def appendCodePoint(codePoint: Int): StringBuilder =
append(Character.codePointToString(codePoint))
def delete(start: Int, end: Int): StringBuilder =
replace(start, end, "")
def deleteCharAt(index: Int): StringBuilder = {
/* This is not equivalent to `delete(index, index + 1)` when
* `index == length`.
*/
val oldContent = content
if (index < 0 || index >= oldContent.length)
throw new StringIndexOutOfBoundsException(index)
content = oldContent.substring(0, index) + oldContent.substring(index + 1)
this
}
def replace(start: Int, end: Int, str: String): StringBuilder = {
val oldContent = content
val length = oldContent.length
if (start < 0 || start > length || start > end)
throw new StringIndexOutOfBoundsException(start)
val firstPart = oldContent.substring(0, start) + str
content =
if (end >= length) firstPart
else firstPart + oldContent.substring(end)
this
}
def insert(index: Int, str: Array[scala.Char], offset: Int,
len: Int): StringBuilder = {
insert(index, String.valueOf(str, offset, len))
}
@inline def insert(offset: Int, obj: AnyRef): StringBuilder =
insert(offset, String.valueOf(obj))
def insert(offset: Int, str: String): StringBuilder = {
val oldContent = content
if (offset < 0 || offset > oldContent.length)
throw new StringIndexOutOfBoundsException(offset)
content =
oldContent.substring(0, offset) + str + oldContent.substring(offset)
this
}
def insert(offset: Int, str: Array[scala.Char]): StringBuilder =
insert(offset, String.valueOf(str))
def insert(dstOffset: Int, s: CharSequence): StringBuilder =
insert(dstOffset, s: AnyRef)
def insert(dstOffset: Int, s: CharSequence, start: Int,
end: Int): StringBuilder = {
insert(dstOffset, (if (s == null) "null" else s).subSequence(start, end))
}
def insert(offset: Int, b: scala.Boolean): StringBuilder =
insert(offset, b.toString)
def insert(offset: Int, c: scala.Char): StringBuilder =
insert(offset, c.toString)
def insert(offset: Int, i: scala.Int): StringBuilder =
insert(offset, i.toString)
def insert(offset: Int, l: scala.Long): StringBuilder =
insert(offset, l.toString)
def insert(offset: Int, f: scala.Float): StringBuilder =
insert(offset, f.toString)
def insert(offset: Int, d: scala.Double): StringBuilder =
insert(offset, d.toString)
def indexOf(str: String): Int = content.indexOf(str)
def indexOf(str: String, fromIndex: Int): Int =
content.indexOf(str, fromIndex)
def lastIndexOf(str: String): Int = content.lastIndexOf(str)
def lastIndexOf(str: String, fromIndex: Int): Int =
content.lastIndexOf(str, fromIndex)
def reverse(): StringBuilder = {
val original = content
var result = ""
var i = original.length - 1
while (i > 0) {
val c = original.charAt(i)
if (Character.isLowSurrogate(c)) {
val c2 = original.charAt(i - 1)
if (Character.isHighSurrogate(c2)) {
result = result + c2.toString + c.toString
i -= 2
} else {
result += c.toString
i -= 1
}
} else {
result += c.toString
i -= 1
}
}
if (i == 0)
result += original.charAt(0).toString
content = result
this
}
override def toString(): String = content
def length(): Int = content.length()
def capacity(): Int = length()
def ensureCapacity(minimumCapacity: Int): Unit = ()
def trimToSize(): Unit = ()
def setLength(newLength: Int): Unit = {
if (newLength < 0)
throw new StringIndexOutOfBoundsException(newLength)
var newContent = content
val additional = newLength - newContent.length // cannot overflow
if (additional < 0) {
newContent = newContent.substring(0, newLength)
} else {
var i = 0
while (i != additional) {
newContent += "\\u0000"
i += 1
}
}
content = newContent
}
def charAt(index: Int): Char = content.charAt(index)
def codePointAt(index: Int): Int = content.codePointAt(index)
def codePointBefore(index: Int): Int = content.codePointBefore(index)
def codePointCount(beginIndex: Int, endIndex: Int): Int =
content.codePointCount(beginIndex, endIndex)
def offsetByCodePoints(index: Int, codePointOffset: Int): Int =
content.offsetByCodePoints(index, codePointOffset)
def getChars(srcBegin: Int, srcEnd: Int, dst: Array[scala.Char],
dstBegin: Int): Unit = {
content.getChars(srcBegin, srcEnd, dst, dstBegin)
}
def setCharAt(index: Int, ch: scala.Char): Unit = {
val oldContent = content
if (index < 0 || index >= oldContent.length)
throw new StringIndexOutOfBoundsException(index)
content =
oldContent.substring(0, index) + ch + oldContent.substring(index + 1)
}
def substring(start: Int): String = content.substring(start)
def subSequence(start: Int, end: Int): CharSequence = substring(start, end)
def substring(start: Int, end: Int): String = content.substring(start, end)
}
| nicolasstucki/scala-js | javalanglib/src/main/scala/java/lang/StringBuilder.scala | Scala | apache-2.0 | 7,018 |
package nasa.nccs.edas.portal
import java.io.File
import java.lang.management.ManagementFactory
import java.nio.file.{Files, Path, Paths}
import java.sql.{Date, Timestamp}
import scala.xml
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import nasa.nccs.cdapi.data.{FastMaskedArray, HeapFltArray}
import nasa.nccs.edas.engine.{EDASExecutionManager, ExecutionCallback, TestProcess}
import nasa.nccs.edas.engine.spark.CDSparkContext
import nasa.nccs.edas.rdd.{CDRecord, TestClockProcess, TestDatasetProcess}
import nasa.nccs.edas.sources.netcdf.NetcdfDatasetMgr
import nasa.nccs.esgf.wps.{Job, ProcessManager, wpsObjectParser}
import nasa.nccs.edas.utilities.appParameters
import nasa.nccs.esgf.process._
import nasa.nccs.esgf.wps.edasServiceProvider.getResponseSyntax
import nasa.nccs.utilities.{EDASLogManager, Loggable}
import nasa.nccs.wps.{WPSMergedEventReport, _}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SQLContext, SQLImplicits}
import ucar.ma2.{ArrayFloat, IndexIterator}
import ucar.nc2.Variable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
// import org.apache.spark.implicits._
import org.apache.spark.SparkEnv
import org.apache.spark.api.java.function.MapPartitionsFunction
import org.apache.spark.sql.Row
import ucar.ma2
import ucar.nc2.dataset.{CoordinateAxis1DTime, NetcdfDataset}
import ucar.nc2.time.CalendarDate
import scala.collection.mutable
import scala.io.Source
//import gov.nasa.gsfc.cisto.cds.sia.scala.climatespark.core.EDASDriver
//
//object TestImportApp extends EDASDriver {
//
//}
object EDASapp {
def elem( array: Array[String], index: Int, default: String = "" ): String = if( array.length > index ) array(index) else default
def getConfiguration( parameter_file_path: String ): Map[String, String] = {
if( parameter_file_path.isEmpty ) { Map.empty[String, String] }
else if( Files.exists( Paths.get(parameter_file_path) ) ) {
val params: Iterator[Array[String]] = for ( line <- Source.fromFile(parameter_file_path).getLines() ) yield { line.split('=') }
Map( params.filter( _.length > 1 ).map( a => ( a.head.trim, a.last.trim ) ).toSeq: _* )
}
else { throw new Exception( "Can't find parameter file: " + parameter_file_path) }
}
}
class EDASapp( client_address: String, request_port: Int, response_port: Int, appConfiguration: Map[String,String] ) extends EDASPortal( client_address, request_port, response_port ) {
import EDASapp._
val processManager = new ProcessManager( appConfiguration )
val process = "edas"
val randomIds = new RandomString(16)
val printer = new scala.xml.PrettyPrinter(200, 3)
Runtime.getRuntime.addShutdownHook( new Thread() { override def run() { term("ShutdownHook Called") } } )
def start( run_program: String = "" ): Unit = {
if( run_program.isEmpty ) { run() }
else { runAltProgram(run_program) }
}
def runAltProgram( run_program: String ): Unit = {
}
override def execUtility(utilSpec: Array[String]): Message = {
new Message("","","")
}
// def getResult( resultSpec: Array[String], response_syntax: ResponseSyntax.Value ) = {
// val result: xml.Node = processManager.getResult( process, resultSpec(0),response_syntax )
// sendResponse( resultSpec(0), printer.format( result ) )
// }
//
// def getResultStatus( resultSpec: Array[String], response_syntax: ResponseSyntax.Value ) = {
// val result: xml.Node = processManager.getResultStatus( process, resultSpec(0), response_syntax )
// sendResponse( resultSpec(0), printer.format( result ) )
// }
def getRunArgs( taskSpec: Array[String] ): Map[String,String] = {
val runargs = if( taskSpec.length > 4 ) wpsObjectParser.parseMap( taskSpec(4) ) else Map.empty[String, Any]
val responseForm = runargs.getOrElse("responseform","wps").toString
if( responseForm == "wps" ) {
val responseType = runargs.getOrElse( "response", defaultResponseType(runargs) ).toString
runargs.mapValues(_.toString) + ("response" -> responseType )
} else {
val responseToks = responseForm.split(':')
val new_runargs = runargs.mapValues(_.toString) + ("response" -> responseToks.head )
if( responseToks.head.equalsIgnoreCase("collection") && responseToks.length > 1 ) { new_runargs + ("cid" -> responseToks.last ) } else { new_runargs }
}
}
def defaultResponseType( runargs: Map[String, Any] ): String = {
val status = runargs.getOrElse("status","false").toString.toBoolean
if( status ) { "file" } else { "xml" }
}
override def execute( taskSpec: Array[String] ): Response = {
val clientId = elem(taskSpec,0)
val runargs = getRunArgs( taskSpec )
val jobId = runargs.getOrElse( "jobId", randomIds.nextString )
logger.info( " @@E TaskSpec: " + taskSpec.mkString(", ") )
try {
val process_name = elem(taskSpec,2)
val dataInputsSpec = elem(taskSpec,3)
setExeStatus( clientId, jobId, "executing " + process_name + "-> " + dataInputsSpec )
logger.info( " @@E: Executing " + process_name + "-> " + dataInputsSpec + ", jobId = " + jobId + ", runargs = " + runargs.mkString("; "))
val response_syntax = getResponseSyntax(runargs)
val responseType = runargs.getOrElse("response","file")
val executionCallback: ExecutionCallback = new ExecutionCallback {
override def success( results: xml.Node ): Unit = {
logger.info(s" *** ExecutionCallback: jobId = $jobId, responseType = $responseType *** ")
val metadata = if (responseType == "object") { sendDirectResponse(response_syntax, clientId, jobId, results) }
else if (responseType == "file") { sendFileResponse(response_syntax, clientId, jobId, results) }
else { Map.empty }
setExeStatus(clientId, jobId, "completed|" + ( metadata.map { case (key,value) => key + ":" + value } mkString "," ) )
}
override def failure( msg: String ): Unit = {
logger.error( s"ERROR CALLBACK ($jobId:$clientId): " + msg )
setExeStatus( clientId, jobId, "error" )
}
}
val (rid, responseElem) = processManager.executeProcess( process, Job(jobId, process_name, dataInputsSpec, runargs, 1f ), Some(executionCallback) )
new Message(clientId, jobId, printer.format(responseElem))
} catch {
case e: Throwable =>
logger.error( "Caught execution error: " + e.getMessage )
logger.error( "\\n" + e.getStackTrace.mkString("\\n") )
setExeStatus( clientId, jobId, "error" )
new ErrorReport( clientId, jobId, e.getClass.getSimpleName + ": " + e.getMessage )
}
}
def sendErrorReport( response_format: ResponseSyntax.Value, clientId: String, responseId: String, exc: Exception ): Unit = {
val err = new WPSExceptionReport(exc)
sendErrorReport( clientId, responseId, printer.format( err.toXml(response_format) ) )
}
def sendErrorReport( taskSpec: Array[String], exc: Exception ): Unit = {
val clientId = taskSpec(0)
val runargs = getRunArgs( taskSpec )
val syntax = getResponseSyntax(runargs)
val err = new WPSExceptionReport(exc)
sendErrorReport( clientId, "requestError", printer.format( err.toXml(syntax) ) )
}
override def shutdown(): Unit = processManager.term
def sendDirectResponse( response_format: ResponseSyntax.Value, clientId: String, responseId: String, response: xml.Node ): Map[String,String] = {
val refs: xml.NodeSeq = response \\\\ "data"
val resultHref = refs.flatMap( _.attribute("href") ).find( _.nonEmpty ).map( _.text ) match {
case Some( href ) =>
val rid = href.split("[/]").last
logger.info( "\\n\\n **** Found result Id: " + rid + " ****** \\n\\n")
processManager.getResultVariable("edas",rid) match {
case Some( resultVar ) =>
val slice: CDRecord = resultVar.result.concatSlices.records.head
slice.elements.foreach { case ( id, array ) =>
sendArrayData( clientId, rid, array.origin, array.shape, array.toByteArray, resultVar.result.metadata + ( "elem" -> id ) ) // + ("gridfile" -> gridfilename)
}
// var gridfilename = ""
// resultVar.result.slices.foreach { case (key, data) =>
// if( gridfilename.isEmpty ) {
// val gridfilepath = data.metadata("gridfile")
// gridfilename = sendFile( clientId, rid, "gridfile", gridfilepath, true )
// }
case None =>
logger.error( "Can't find result variable " + rid)
sendErrorReport( response_format, clientId, rid, new Exception( "Can't find result variable " + rid + " in [ " + processManager.getResultVariables("edas").mkString(", ") + " ]") )
}
case None =>
logger.error( "Can't find result Id in direct response: " + response.toString() )
sendErrorReport( response_format, clientId, responseId, new Exception( "Can't find result Id in direct response: " + response.toString() ) )
}
Map.empty[String,String]
}
def getNodeAttribute( node: xml.Node, attrId: String ): Option[String] = {
node.attribute( attrId ).flatMap( _.find( _.nonEmpty ).map( _.text ) )
}
def getNodeAttributes( node: xml.Node ): String = node.attributes.toString()
def sendFileResponse( response_format: ResponseSyntax.Value, clientId: String, jobId: String, response: xml.Node ): Map[String,String] = {
val refs: xml.NodeSeq = response \\\\ "data"
var result_files = new ArrayBuffer[String]()
for( node: xml.Node <- refs; hrefOpt = getNodeAttribute( node,"href"); fileOpt = getNodeAttribute( node,"files") ) {
if (hrefOpt.isDefined && fileOpt.isDefined) {
val sharedDataDir = appParameters( "wps.shared.data.dir" )
// val href = hrefOpt.get
// val rid = href.split("[/]").last
fileOpt.get.split(",").foreach( filepath => {
logger.info(" ****>> Found file node for jobId: " + jobId + ", clientId: " + clientId + ", sending File: " + filepath + " ****** ")
result_files += sendFile( clientId, jobId, "publish", filepath, sharedDataDir.isEmpty )
})
} else if (hrefOpt.isDefined && hrefOpt.get.startsWith("collection")) {
responder.sendResponse( new Message( clientId, jobId, "Created " + hrefOpt.get ) )
} else {
sendErrorReport( response_format, clientId, jobId, new Exception( "Can't find href or files in attributes: " + getNodeAttributes( node ) ) )
}
}
Map( "files" -> result_files.mkString(",") )
}
override def getCapabilities(utilSpec: Array[String]): Message = {
val runargs: Map[String,String] = getRunArgs( utilSpec )
logger.info(s"Processing getCapabilities request with args ${runargs.toList.mkString(",")}" )
val result: xml.Elem = processManager.getCapabilities( process, elem(utilSpec,2), runargs )
new Message( utilSpec(0), "capabilities", printer.format( result ) )
}
override def describeProcess(procSpec: Array[String]): Message = {
val runargs: Map[String,String] = getRunArgs( procSpec )
logger.info(s"Processing describeProcess request with args ${runargs.toList.mkString(",")}" )
val result: xml.Elem = processManager.describeProcess( process, elem(procSpec,2), runargs )
new Message( procSpec(0), "preocesses", printer.format( result ) )
}
}
object EDASApplication extends Loggable {
def main(args: Array[String]) {
import EDASapp._
EDASLogManager.isMaster()
logger.info(s"Executing EDAS with args: ${args.mkString(",")}, nprocs: ${Runtime.getRuntime.availableProcessors()}")
val request_port = elem(args, 0, "0").toInt
val response_port = elem(args, 1, "0").toInt
val parameter_file = elem(args, 2 )
val run_program = elem(args, 3 )
val appConfiguration = getConfiguration( parameter_file )
val client_address: String = appConfiguration.getOrElse("client","*")
// EDASExecutionManager.addTestProcess( new TestDatasetProcess( "testDataset") )
// EDASExecutionManager.addTestProcess( new TestClockProcess( "testClock") )
val app = new EDASapp( client_address, request_port, response_port, appConfiguration )
app.start( run_program )
logger.info(s"EXIT EDASApplication")
sys.exit()
}
}
object SparkCleanup extends Loggable {
def main(args: Array[String]): Unit = {
EDASExecutionManager.shutdown_python_workers()
EDASExecutionManager.cleanup_spark_workers()
}
}
object TestApplication extends Loggable {
def main(args: Array[String]) {
EDASLogManager.isMaster()
val sc = CDSparkContext()
val indices = sc.sparkContext.parallelize( Array.range(0,500), 100 )
val base_time = System.currentTimeMillis()
val timings = indices.map( getProfileDiagnostic(base_time) )
val time_list = timings.collect() mkString "\\n"
println( " @@@ NPart = " + indices.getNumPartitions.toString )
println( time_list )
}
def getProfileDiagnostic( base_time: Float )( index: Int ): String = {
val result = s" T{$index} => E${SparkEnv.get.executorId}:${ManagementFactory.getRuntimeMXBean.getName} -> %.4f".format( (System.currentTimeMillis() - base_time)/1.0E3 )
logger.info( result )
result
}
}
object TestReadApplication extends Loggable {
def main(args: Array[String]): Unit = {
import ucar.ma2.ArrayFloat
val data_path= "/dass/pubrepo/CREATE-IP/data/reanalysis"
val dset_address = data_path + "/NASA-GMAO/GEOS-5/MERRA2/6hr/atmos/ta/ta_6hr_reanalysis_MERRA2_1980010100-1980013118.nc"
val indices = List( 1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41 )
val vname = "ta"
val test = 3
val t0 = System.nanoTime()
val input_dataset = NetcdfDataset.openDataset(dset_address)
val input_variable = input_dataset.findVariable(vname)
val raw_data = input_variable.read()
val slices = indices.map( raw_data.slice(1,_) )
val attrs = input_variable.getAttributes.map( _.getStringValue ).mkString(", ")
val in_shape = input_variable.getShape
var out_shape = input_variable.getShape
out_shape(1) = indices.length
val out_array: ArrayFloat = ucar.ma2.Array.factory( ucar.ma2.DataType.FLOAT, out_shape ).asInstanceOf[ArrayFloat]
val out_buffer: Any = out_array.getStorage
val out_index = out_array.getIndex
val out_size = out_array.getSize.toInt
val nTS = out_shape(0)
val nZ = out_shape(1)
val out_tstride = out_size / nTS
val out_zstride = out_tstride / nZ
val slice_shape = slices.head.getShape
val slice_size = slices.head.getSize.toInt
val copy_size = slice_size / nTS
val t1 = System.nanoTime()
logger.info( s"Running test, in_shape : [ ${in_shape.mkString(",")} ], out_shape : [ ${out_shape.mkString(",")} ], slice_shape : [ ${slice_shape.mkString(",")} ], slice_size : [ $slice_size ] , nTS : [ $nTS ] " )
if( test == 1 ) {
for (si <- slices.indices; slice = slices(si); slice_index = slice.getIndex ) {
for( iTS <- 0 until nTS ) {
val slice_element = copy_size*iTS
val out_element = si*slice_size + slice_element
val slice_buffer: Any = slice.get1DJavaArray( slice.getElementType )
System.arraycopy( slice_buffer, slice_element, out_buffer, out_element, copy_size.toInt )
}
}
} else if( test == 2 ) {
for (slice_index <- slices.indices; slice = slices(slice_index); slice_iter = slice.getIndexIterator) {
logger.info(s"Merging slice $slice_index, shape = [ ${slice.getShape.mkString(", ")} ]")
while (slice_iter.hasNext) {
val f0 = slice_iter.getFloatNext
val counter = slice_iter.getCurrentCounter
out_index.set(counter(0), slice_index, counter(1), counter(2))
out_array.setFloat(out_index.currentElement, f0)
}
}
} else if( test == 3 ) {
for ( iZ <- slices.indices; slice = slices(iZ) ) {
for( iT <- 0 until nTS ) {
for( iXY <- 0 until copy_size ) {
val i0 = iT * copy_size + iXY
val f0 = slice.getFloat(i0)
val i1 = iT * out_tstride + iZ * out_zstride + iXY
out_array.setFloat( i1, f0 )
}
}
}
}
val t2 = System.nanoTime()
logger.info( s"Completed test, time = %.4f sec, array join time = %.4f sec".format( (t2 - t0) / 1.0E9, (t2 - t1) / 1.0E9 ) )
}
}
// val levs = List(100000, 97500, 95000, 92500, 90000, 87500, 85000, 82500, 80000, 77500, 75000, 70000, 65000, 60000, 55000, 50000, 45000, 40000, 35000, 30000, 25000, 20000, 15000, 10000)
// nasa.nccs.edas.portal.TestApplication
// nasa.nccs.edas.portal.TestReadApplication
| nasa-nccs-cds/EDAS | src/main/scala/nasa/nccs/edas/portal/application.scala | Scala | gpl-2.0 | 16,685 |
package com.campudus.tableaux.router
import java.util.UUID
import com.campudus.tableaux.controller.TableauxController
import com.campudus.tableaux.database.domain.{CellAnnotationType, Pagination}
import com.campudus.tableaux.helper.JsonUtils._
import com.campudus.tableaux.{InvalidJsonException, NoJsonFoundException, TableauxConfig}
import io.vertx.scala.ext.web.handler.BodyHandler
import io.vertx.scala.ext.web.{Router, RoutingContext}
import org.vertx.scala.core.json.JsonArray
import scala.concurrent.Future
import scala.util.Try
object TableauxRouter {
def apply(config: TableauxConfig, controllerCurry: TableauxConfig => TableauxController): TableauxRouter = {
new TableauxRouter(config, controllerCurry(config))
}
}
class TableauxRouter(override val config: TableauxConfig, val controller: TableauxController) extends BaseRouter {
private val attachmentOfCell: String = s"/tables/$tableId/columns/$columnId/rows/$rowId/attachment/$uuidRegex"
private val linkOfCell: String = s"/tables/$tableId/columns/$columnId/rows/$rowId/link/$linkId"
private val linkOrderOfCell: String = s"/tables/$tableId/columns/$columnId/rows/$rowId/link/$linkId/order"
private val columnsValues: String = s"/tables/$tableId/columns/$columnId/values"
private val columnsValuesWithLangtag: String = s"/tables/$tableId/columns/$columnId/values/$langtagRegex"
private val cell: String = s"/tables/$tableId/columns/$columnId/rows/$rowId"
private val cellAnnotations: String = s"/tables/$tableId/columns/$columnId/rows/$rowId/annotations"
private val cellAnnotation: String = s"/tables/$tableId/columns/$columnId/rows/$rowId/annotations/$uuidRegex"
private val cellAnnotationLangtag: String =
s"/tables/$tableId/columns/$columnId/rows/$rowId/annotations/$uuidRegex/$langtagRegex"
private val row: String = s"/tables/$tableId/rows/$rowId"
private val rowDuplicate: String = s"/tables/$tableId/rows/$rowId/duplicate"
private val rowDependent: String = s"/tables/$tableId/rows/$rowId/dependent"
private val rowAnnotations: String = s"/tables/$tableId/rows/$rowId/annotations"
private val rows: String = s"/tables/$tableId/rows"
private val rowsAnnotations: String = s"/tables/$tableId/rows/annotations"
private val rowsOfColumn: String = s"/tables/$tableId/columns/$columnId/rows"
private val rowsOfFirstColumn: String = s"/tables/$tableId/columns/first/rows"
private val rowsOfLinkCell: String = s"/tables/$tableId/columns/$columnId/rows/$rowId/foreignRows"
private val completeTable: String = s"/completetable"
private val completeTableId: String = s"/completetable/$tableId"
private val annotationsTable: String = s"/tables/$tableId/annotations"
private val annotationCount: String = s"/tables/annotationCount"
private val translationStatus: String = s"/tables/translationStatus"
private val cellHistory: String = s"/tables/$tableId/columns/$columnId/rows/$rowId/history"
private val cellHistoryWithLangtag: String = s"/tables/$tableId/columns/$columnId/rows/$rowId/history/$langtagRegex"
private val rowHistory: String = s"/tables/$tableId/rows/$rowId/history"
private val rowHistoryWithLangtag: String = s"/tables/$tableId/rows/$rowId/history/$langtagRegex"
private val tableHistory: String = s"/tables/$tableId/history"
private val tableHistoryWithLangtag: String = s"/tables/$tableId/history/$langtagRegex"
def route: Router = {
val router = Router.router(vertx)
// RETRIEVE
router.getWithRegex(rows).handler(retrieveRows)
router.getWithRegex(rowsOfLinkCell).handler(retrieveRowsOfLinkCell)
router.getWithRegex(rowsOfColumn).handler(retrieveRowsOfColumn)
router.getWithRegex(rowsOfFirstColumn).handler(retrieveRowsOfFirstColumn)
router.getWithRegex(row).handler(retrieveRow)
router.getWithRegex(rowDependent).handler(retrieveDependentRows)
router.getWithRegex(cell).handler(retrieveCell)
router.getWithRegex(completeTableId).handler(retrieveCompleteTable)
router.getWithRegex(annotationsTable).handler(retrieveAnnotations)
router.getWithRegex(annotationCount).handler(retrieveAnnotationCount)
router.getWithRegex(translationStatus).handler(retrieveTranslationStatus)
router.getWithRegex(columnsValues).handler(retrieveUniqueColumnValues)
router.getWithRegex(columnsValuesWithLangtag).handler(retrieveUniqueColumnValuesWithLangtag)
router.getWithRegex(cellHistory).handler(retrieveCellHistory)
router.getWithRegex(cellHistoryWithLangtag).handler(retrieveCellHistoryWithLangtag)
router.getWithRegex(rowHistory).handler(retrieveRowHistory)
router.getWithRegex(rowHistoryWithLangtag).handler(retrieveRowHistoryWithLangtag)
router.getWithRegex(tableHistory).handler(retrieveTableHistory)
router.getWithRegex(tableHistoryWithLangtag).handler(retrieveTableHistoryWithLangtag)
// DELETE
router.deleteWithRegex(cellAnnotation).handler(deleteCellAnnotation)
router.deleteWithRegex(cellAnnotationLangtag).handler(deleteCellAnnotationLangtag)
router.deleteWithRegex(row).handler(deleteRow)
router.deleteWithRegex(cell).handler(clearCell)
router.deleteWithRegex(attachmentOfCell).handler(deleteAttachmentOfCell)
router.deleteWithRegex(linkOfCell).handler(deleteLinkOfCell)
val bodyHandler = BodyHandler.create()
router.post("/tables/*").handler(bodyHandler)
router.patch("/tables/*").handler(bodyHandler)
router.put("/tables/*").handler(bodyHandler)
router.post("/completetable").handler(bodyHandler)
// CREATE
router.postWithRegex(completeTable).handler(createCompleteTable)
router.postWithRegex(rows).handler(createRow)
router.postWithRegex(rowDuplicate).handler(duplicateRow)
router.postWithRegex(cellAnnotations).handler(createCellAnnotation)
// UPDATE
router.patchWithRegex(rowAnnotations).handler(updateRowAnnotations)
router.patchWithRegex(rowsAnnotations).handler(updateRowsAnnotations)
router.patchWithRegex(cell).handler(updateCell)
router.postWithRegex(cell).handler(updateCell)
router.putWithRegex(cell).handler(replaceCell)
router.putWithRegex(linkOrderOfCell).handler(changeLinkOrder)
router
}
/**
* Get rows
*/
private def retrieveRows(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
} yield {
sendReply(
context,
asyncGetReply {
val limit = getLongParam("limit", context)
val offset = getLongParam("offset", context)
val pagination = Pagination(offset, limit)
controller.retrieveRows(tableId, pagination)
}
)
}
}
/**
* Get foreign rows from a link cell point of view
* e.g. cardinality in both direction will be considered
*/
private def retrieveRowsOfLinkCell(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
val limit = getLongParam("limit", context)
val offset = getLongParam("offset", context)
val pagination = Pagination(offset, limit)
controller.retrieveForeignRows(tableId, columnId, rowId, pagination)
}
)
}
}
/**
* Get rows of column
*/
private def retrieveRowsOfColumn(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
} yield {
sendReply(
context,
asyncGetReply {
val limit = getLongParam("limit", context)
val offset = getLongParam("offset", context)
val pagination = Pagination(offset, limit)
controller.retrieveRowsOfColumn(tableId, columnId, pagination)
}
)
}
}
/**
* Get rows of first column
*/
private def retrieveRowsOfFirstColumn(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
} yield {
sendReply(
context,
asyncGetReply {
val limit = getLongParam("limit", context)
val offset = getLongParam("offset", context)
val pagination = Pagination(offset, limit)
controller.retrieveRowsOfFirstColumn(tableId, pagination)
}
)
}
}
/**
* Get row
*/
private def retrieveRow(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveRow(tableId, rowId)
}
)
}
}
/**
* Get dependent rows
*/
private def retrieveDependentRows(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveDependentRows(tableId, rowId)
}
)
}
}
/**
* Get Cell
*/
private def retrieveCell(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveCell(tableId, columnId, rowId)
}
)
}
}
/**
* Get complete table
*/
private def retrieveCompleteTable(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveCompleteTable(tableId)
}
)
}
}
/**
* Retrieve all Cell Annotations for a specific table
*/
private def retrieveAnnotations(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveTableWithCellAnnotations(tableId)
}
)
}
}
/**
* Retrieve Cell Annotation count for all tables
*/
private def retrieveAnnotationCount(context: RoutingContext): Unit = {
sendReply(
context,
asyncGetReply {
controller.retrieveTablesWithCellAnnotationCount()
}
)
}
/**
* Retrieve translation status for all tables
*/
private def retrieveTranslationStatus(context: RoutingContext): Unit = {
sendReply(
context,
asyncGetReply {
controller.retrieveTranslationStatus()
}
)
}
/**
* Retrieve unique values of a shorttext column
*/
private def retrieveUniqueColumnValues(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveColumnValues(tableId, columnId, None)
}
)
}
}
/**
* Retrieve unique values of a multi-language shorttext column
*/
private def retrieveUniqueColumnValuesWithLangtag(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
langtag <- getLangtag(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveColumnValues(tableId, columnId, Some(langtag))
}
)
}
}
/**
* Retrieve Cell History
*/
private def retrieveCellHistory(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
typeOpt = getStringParam("historyType", context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveCellHistory(tableId, columnId, rowId, None, typeOpt)
}
)
}
}
/**
* Retrieve Cell History with langtag
*/
private def retrieveCellHistoryWithLangtag(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
langtag <- getLangtag(context)
typeOpt = getStringParam("historyType", context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveCellHistory(tableId.toLong, columnId.toLong, rowId.toLong, Some(langtag), typeOpt)
}
)
}
}
/**
* Retrieve row History
*/
private def retrieveRowHistory(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
rowId <- getRowId(context)
typeOpt = getStringParam("historyType", context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveRowHistory(tableId.toLong, rowId.toLong, None, typeOpt)
}
)
}
}
/**
* Retrieve row History with langtag
*/
private def retrieveRowHistoryWithLangtag(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
rowId <- getRowId(context)
langtag <- getLangtag(context)
typeOpt = getStringParam("historyType", context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveRowHistory(tableId.toLong, rowId.toLong, Some(langtag), typeOpt)
}
)
}
}
/**
* Retrieve Table History
*/
private def retrieveTableHistory(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
typeOpt = getStringParam("historyType", context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveTableHistory(tableId.toLong, None, typeOpt)
}
)
}
}
/**
* Retrieve Table History with langtag
*/
private def retrieveTableHistoryWithLangtag(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
langtag <- getLangtag(context)
typeOpt = getStringParam("historyType", context)
} yield {
sendReply(
context,
asyncGetReply {
controller.retrieveTableHistory(tableId.toLong, Some(langtag), typeOpt)
}
)
}
}
/**
* Create table with columns and rows
*/
private def createCompleteTable(context: RoutingContext): Unit = {
sendReply(
context,
asyncGetReply {
val json = getJson(context)
for {
completeTable <- if (json.containsKey("rows")) {
controller.createCompleteTable(json.getString("name"), toCreateColumnSeq(json), toRowValueSeq(json))
} else {
controller.createCompleteTable(json.getString("name"), toCreateColumnSeq(json), Seq())
}
} yield completeTable
}
)
}
/**
* Create row
*/
private def createRow(context: RoutingContext): Unit = {
def getOptionalValues = {
val json = getJson(context)
if (json.containsKey("columns") && json.containsKey("rows")) {
Some(toColumnValueSeq(json))
} else {
None
}
}
for {
tableId <- getTableId(context)
} yield {
sendReply(
context,
asyncGetReply {
val optionalValues = Try(getOptionalValues)
.recover({
case _: NoJsonFoundException => None
})
.get
controller
.createRow(tableId, optionalValues)
}
)
}
}
/**
* Duplicate row
*/
private def duplicateRow(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.duplicateRow(tableId, rowId)
}
)
}
}
/**
* Update row Annotations
*/
private def updateRowAnnotations(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
val json = getJson(context)
val finalFlagOpt = booleanToValueOption(json.containsKey("final"), json.getBoolean("final", false))
.map(_.booleanValue())
for {
updated <- controller.updateRowAnnotations(tableId, rowId, finalFlagOpt)
} yield updated
}
)
}
}
/**
* Update all row Annotations of a table
*/
private def updateRowsAnnotations(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
} yield {
sendReply(
context,
asyncGetReply {
val json = getJson(context)
val finalFlagOpt = booleanToValueOption(json.containsKey("final"), json.getBoolean("final", false))
.map(_.booleanValue())
for {
updated <- controller.updateRowsAnnotations(tableId.toLong, finalFlagOpt)
} yield updated
}
)
}
}
/**
* Add Cell Annotation (will possibly be merged with an existing annotation)
*/
private def createCellAnnotation(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
import com.campudus.tableaux.ArgumentChecker._
val json = getJson(context)
val langtags = checked(asCastedList[String](json.getJsonArray("langtags", new JsonArray())))
val flagType = checked(hasString("type", json).map(CellAnnotationType(_)))
val value = json.getString("value")
for {
cellAnnotation <- controller.addCellAnnotation(tableId, columnId, rowId, langtags, flagType, value)
} yield cellAnnotation
}
)
}
}
/**
* Update Cell or add Link/Attachment
*/
private def updateCell(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
val json = getJson(context)
for {
updated <- controller.updateCellValue(tableId, columnId, rowId, json.getValue("value"))
} yield updated
}
)
}
}
/**
* Replace Cell value
*/
private def replaceCell(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
val json = getJson(context)
for {
updated <- if (json.containsKey("value")) {
controller.replaceCellValue(tableId, columnId, rowId, json.getValue("value"))
} else {
Future.failed(InvalidJsonException("request must contain a value", "value_is_missing"))
}
} yield updated
}
)
}
}
/**
* Change order of link
*/
private def changeLinkOrder(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
linkId <- getLinkId(context)
} yield {
sendReply(
context,
asyncGetReply {
val json = getJson(context)
for {
updated <- controller.updateCellLinkOrder(tableId, columnId, rowId, linkId, toLocationType(json))
} yield updated
}
)
}
}
/**
* Delete Cell Annotation
*/
private def deleteCellAnnotation(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
uuid <- getUUID(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.deleteCellAnnotation(tableId, columnId, rowId, UUID.fromString(uuid))
}
)
}
}
/**
* Delete Langtag from Cell Annotation
*/
private def deleteCellAnnotationLangtag(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
uuid <- getUUID(context)
langtag <- getLangtag(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.deleteCellAnnotation(tableId, columnId, rowId, UUID.fromString(uuid), langtag)
}
)
}
}
/**
* Delete row
*/
private def deleteRow(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncEmptyReply {
controller.deleteRow(tableId, rowId)
}
)
}
}
/**
* Clear Cell value
*/
private def clearCell(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.clearCellValue(tableId, columnId, rowId)
}
)
}
}
/**
* Delete Attachment from Cell
*/
private def deleteAttachmentOfCell(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
uuid <- getUUID(context)
} yield {
sendReply(
context,
asyncEmptyReply {
controller.deleteAttachment(tableId.toLong, columnId.toLong, rowId.toLong, uuid)
}
)
}
}
/**
* Delete Link from Cell
*/
private def deleteLinkOfCell(context: RoutingContext): Unit = {
for {
tableId <- getTableId(context)
columnId <- getColumnId(context)
rowId <- getRowId(context)
linkId <- getLinkId(context)
} yield {
sendReply(
context,
asyncGetReply {
controller.deleteLink(tableId, columnId, rowId, linkId)
}
)
}
}
private def getRowId(context: RoutingContext): Option[Long] = {
getLongParam("rowId", context)
}
private def getLinkId(context: RoutingContext): Option[Long] = {
getLongParam("linkId", context)
}
}
| campudus/tableaux | src/main/scala/com/campudus/tableaux/router/TableauxRouter.scala | Scala | apache-2.0 | 22,352 |
package example
import scalaz.syntax.foldable._
import scalaz.std.list._
import scalaz.syntax.foldable._
import scalaz.std.math.bigDecimal._
import scalaz.std.anyVal._
import monocle.macros._
@Lenses("_")
case class Person(name: String, firstName : Option[String], firstAddress : Address , addresses : List[Address], account : Option[Account])
@Lenses("_")
case class Account(number: Int, bookings : List[Booking]) {
def balance : Balance = Balance(number, bookings.map(_.id).maximum.getOrElse(0), bookings.foldMap(_.amount))
}
@Lenses("_")
case class Address(street: String, city: String)
@Lenses("_")
case class Booking(id: Int, amount: BigDecimal)
@Lenses("_")
case class Balance(accountId: Int, id: Int, amount : BigDecimal) | inoio/monocle-example | src/main/scala/example/entities.scala | Scala | gpl-2.0 | 739 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.util.concurrent._
import scala.collection.TraversableLike
import scala.collection.generic.CanBuildFrom
import scala.language.higherKinds
import com.google.common.util.concurrent.{MoreExecutors, ThreadFactoryBuilder}
import scala.concurrent.{Awaitable, ExecutionContext, ExecutionContextExecutor, Future}
import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.util.control.NonFatal
import org.apache.spark.SparkException
private[spark] object ThreadUtils {
private val sameThreadExecutionContext =
ExecutionContext.fromExecutorService(MoreExecutors.sameThreadExecutor())
/**
* An `ExecutionContextExecutor` that runs each task in the thread that invokes `execute/submit`.
* The caller should make sure the tasks running in this `ExecutionContextExecutor` are short and
* never block.
*/
def sameThread: ExecutionContextExecutor = sameThreadExecutionContext
/**
* Create a thread factory that names threads with a prefix and also sets the threads to daemon.
*/
def namedThreadFactory(prefix: String): ThreadFactory = {
new ThreadFactoryBuilder().setDaemon(true).setNameFormat(prefix + "-%d").build()
}
/**
* Wrapper over newCachedThreadPool. Thread names are formatted as prefix-ID, where ID is a
* unique, sequentially assigned integer.
*/
def newDaemonCachedThreadPool(prefix: String): ThreadPoolExecutor = {
val threadFactory = namedThreadFactory(prefix)
Executors.newCachedThreadPool(threadFactory).asInstanceOf[ThreadPoolExecutor]
}
/**
* Create a cached thread pool whose max number of threads is `maxThreadNumber`. Thread names
* are formatted as prefix-ID, where ID is a unique, sequentially assigned integer.
*/
def newDaemonCachedThreadPool(
prefix: String, maxThreadNumber: Int, keepAliveSeconds: Int = 60): ThreadPoolExecutor = {
val threadFactory = namedThreadFactory(prefix)
val threadPool = new ThreadPoolExecutor(
maxThreadNumber, // corePoolSize: the max number of threads to create before queuing the tasks
maxThreadNumber, // maximumPoolSize: because we use LinkedBlockingDeque, this one is not used
keepAliveSeconds,
TimeUnit.SECONDS,
new LinkedBlockingQueue[Runnable],
threadFactory)
threadPool.allowCoreThreadTimeOut(true)
threadPool
}
/**
* Wrapper over newFixedThreadPool. Thread names are formatted as prefix-ID, where ID is a
* unique, sequentially assigned integer.
*/
def newDaemonFixedThreadPool(nThreads: Int, prefix: String): ThreadPoolExecutor = {
val threadFactory = namedThreadFactory(prefix)
Executors.newFixedThreadPool(nThreads, threadFactory).asInstanceOf[ThreadPoolExecutor]
}
/**
* Wrapper over newSingleThreadExecutor.
*/
def newDaemonSingleThreadExecutor(threadName: String): ExecutorService = {
val threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build()
Executors.newSingleThreadExecutor(threadFactory)
}
/**
* Wrapper over ScheduledThreadPoolExecutor.
*/
def newDaemonSingleThreadScheduledExecutor(threadName: String): ScheduledExecutorService = {
val threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build()
val executor = new ScheduledThreadPoolExecutor(1, threadFactory)
// By default, a cancelled task is not automatically removed from the work queue until its delay
// elapses. We have to enable it manually.
executor.setRemoveOnCancelPolicy(true)
executor
}
/**
* Wrapper over ScheduledThreadPoolExecutor.
*/
def newDaemonThreadPoolScheduledExecutor(threadNamePrefix: String, numThreads: Int)
: ScheduledExecutorService = {
val threadFactory = new ThreadFactoryBuilder()
.setDaemon(true)
.setNameFormat(s"$threadNamePrefix-%d")
.build()
val executor = new ScheduledThreadPoolExecutor(numThreads, threadFactory)
// By default, a cancelled task is not automatically removed from the work queue until its delay
// elapses. We have to enable it manually.
executor.setRemoveOnCancelPolicy(true)
executor
}
/**
* Run a piece of code in a new thread and return the result. Exception in the new thread is
* thrown in the caller thread with an adjusted stack trace that removes references to this
* method for clarity. The exception stack traces will be like the following
*
* SomeException: exception-message
* at CallerClass.body-method (sourcefile.scala)
* at ... run in separate thread using org.apache.spark.util.ThreadUtils ... ()
* at CallerClass.caller-method (sourcefile.scala)
* ...
*/
def runInNewThread[T](
threadName: String,
isDaemon: Boolean = true)(body: => T): T = {
@volatile var exception: Option[Throwable] = None
@volatile var result: T = null.asInstanceOf[T]
val thread = new Thread(threadName) {
override def run(): Unit = {
try {
result = body
} catch {
case NonFatal(e) =>
exception = Some(e)
}
}
}
thread.setDaemon(isDaemon)
thread.start()
thread.join()
exception match {
case Some(realException) =>
// Remove the part of the stack that shows method calls into this helper method
// This means drop everything from the top until the stack element
// ThreadUtils.runInNewThread(), and then drop that as well (hence the `drop(1)`).
val baseStackTrace = Thread.currentThread().getStackTrace().dropWhile(
! _.getClassName.contains(this.getClass.getSimpleName)).drop(1)
// Remove the part of the new thread stack that shows methods call from this helper method
val extraStackTrace = realException.getStackTrace.takeWhile(
! _.getClassName.contains(this.getClass.getSimpleName))
// Combine the two stack traces, with a place holder just specifying that there
// was a helper method used, without any further details of the helper
val placeHolderStackElem = new StackTraceElement(
s"... run in separate thread using ${ThreadUtils.getClass.getName.stripSuffix("$")} ..",
" ", "", -1)
val finalStackTrace = extraStackTrace ++ Seq(placeHolderStackElem) ++ baseStackTrace
// Update the stack trace and rethrow the exception in the caller thread
realException.setStackTrace(finalStackTrace)
throw realException
case None =>
result
}
}
/**
* Construct a new ForkJoinPool with a specified max parallelism and name prefix.
*/
def newForkJoinPool(prefix: String, maxThreadNumber: Int): ForkJoinPool = {
// Custom factory to set thread names
val factory = new ForkJoinPool.ForkJoinWorkerThreadFactory {
override def newThread(pool: ForkJoinPool) =
new ForkJoinWorkerThread(pool) {
setName(prefix + "-" + super.getName)
}
}
new ForkJoinPool(maxThreadNumber, factory,
null, // handler
false // asyncMode
)
}
// scalastyle:off awaitresult
/**
* Preferred alternative to `Await.result()`.
*
* This method wraps and re-throws any exceptions thrown by the underlying `Await` call, ensuring
* that this thread's stack trace appears in logs.
*
* In addition, it calls `Awaitable.result` directly to avoid using `ForkJoinPool`'s
* `BlockingContext`. Codes running in the user's thread may be in a thread of Scala ForkJoinPool.
* As concurrent executions in ForkJoinPool may see some [[ThreadLocal]] value unexpectedly, this
* method basically prevents ForkJoinPool from running other tasks in the current waiting thread.
* In general, we should use this method because many places in Spark use [[ThreadLocal]] and it's
* hard to debug when [[ThreadLocal]]s leak to other tasks.
*/
@throws(classOf[SparkException])
def awaitResult[T](awaitable: Awaitable[T], atMost: Duration): T = {
try {
// `awaitPermission` is not actually used anywhere so it's safe to pass in null here.
// See SPARK-13747.
val awaitPermission = null.asInstanceOf[scala.concurrent.CanAwait]
awaitable.result(atMost)(awaitPermission)
} catch {
case e: SparkFatalException =>
throw e.throwable
// TimeoutException is thrown in the current thread, so not need to warp the exception.
case NonFatal(t) if !t.isInstanceOf[TimeoutException] =>
throw new SparkException("Exception thrown in awaitResult: ", t)
}
}
// scalastyle:on awaitresult
// scalastyle:off awaitready
/**
* Preferred alternative to `Await.ready()`.
*
* @see [[awaitResult]]
*/
@throws(classOf[SparkException])
def awaitReady[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type = {
try {
// `awaitPermission` is not actually used anywhere so it's safe to pass in null here.
// See SPARK-13747.
val awaitPermission = null.asInstanceOf[scala.concurrent.CanAwait]
awaitable.ready(atMost)(awaitPermission)
} catch {
// TimeoutException is thrown in the current thread, so not need to warp the exception.
case NonFatal(t) if !t.isInstanceOf[TimeoutException] =>
throw new SparkException("Exception thrown in awaitResult: ", t)
}
}
// scalastyle:on awaitready
def shutdown(
executor: ExecutorService,
gracePeriod: Duration = FiniteDuration(30, TimeUnit.SECONDS)): Unit = {
executor.shutdown()
executor.awaitTermination(gracePeriod.toMillis, TimeUnit.MILLISECONDS)
if (!executor.isShutdown) {
executor.shutdownNow()
}
}
/**
* Transforms input collection by applying the given function to each element in parallel fashion.
* Comparing to the map() method of Scala parallel collections, this method can be interrupted
* at any time. This is useful on canceling of task execution, for example.
*
* @param in - the input collection which should be transformed in parallel.
* @param prefix - the prefix assigned to the underlying thread pool.
* @param maxThreads - maximum number of thread can be created during execution.
* @param f - the lambda function will be applied to each element of `in`.
* @tparam I - the type of elements in the input collection.
* @tparam O - the type of elements in resulted collection.
* @return new collection in which each element was given from the input collection `in` by
* applying the lambda function `f`.
*/
def parmap[I, O, Col[X] <: TraversableLike[X, Col[X]]]
(in: Col[I], prefix: String, maxThreads: Int)
(f: I => O)
(implicit
cbf: CanBuildFrom[Col[I], Future[O], Col[Future[O]]], // For in.map
cbf2: CanBuildFrom[Col[Future[O]], O, Col[O]] // for Future.sequence
): Col[O] = {
val pool = newForkJoinPool(prefix, maxThreads)
try {
implicit val ec = ExecutionContext.fromExecutor(pool)
val futures = in.map(x => Future(f(x)))
val futureSeq = Future.sequence(futures)
awaitResult(futureSeq, Duration.Inf)
} finally {
pool.shutdownNow()
}
}
}
| aosagie/spark | core/src/main/scala/org/apache/spark/util/ThreadUtils.scala | Scala | apache-2.0 | 11,963 |
/************************************************************************\\
** Project **
** ______ ______ __ ______ ____ **
** / ____/ / __ / / / / __ / / __/ (c) 2011-2014 **
** / /__ / /_/ / / / / /_/ / / /_ **
** /___ / / ____/ / / / __ / / __/ Erik Osheim, Tom Switzer **
** ____/ / / / / / / / | | / /__ **
** /_____/ /_/ /_/ /_/ |_| /____/ All rights reserved. **
** **
** Redistribution and use permitted under the MIT license. **
** **
\\************************************************************************/
package spire
package random
package rng
import spire.syntax.cfor._
import spire.util.Pack
import java.nio.ByteBuffer
import java.util
/**
* This is a Scala implementation of the Well44497a PRNG based on WELL44497a.c.
*
* <p>The acronym WELL stands for Well Equidistributed Long-period Linear.
*
* <p><b>Reference: </b>
* FranΓ§ois Panneton, Pierre L'Ecuyer and Makoto Matsumoto:
* "Improved Long-Period Generators Based on Linear Recurrences Modulo 2",
* <i>ACM Transactions on Mathematical Software,</i> Vol. 32, No. 1, January 2006, pp 1--16.
*
* @see <a href="http://www.iro.umontreal.ca/~panneton/well/WELL44497a.c">WELL44497a.c</a>
* @see <a href="http://www.iro.umontreal.ca/~panneton/WELLRNG.html">Well PRNG Home Page</a>
* @see <a href="http://en.wikipedia.org/wiki/Well_Equidistributed_Long-period_Linear">WELL @ Wikipedia</a>
* @author <a href="mailto:dusan.kysel@gmail.com">DuΕ‘an Kysel</a>
*/
final class Well44497a protected[random](state: Array[Int], i0: Int) extends IntBasedGenerator {
import Well44497a.{UpperMask, LowerMask, R, BYTES, mat0pos, mat0neg, mat1, mat3neg, mat5}
private var i : Int = i0
def copyInit: Well44497a = new Well44497a(state.clone(), i)
def getSeedBytes(): Array[Byte] = {
val bytes = new Array[Byte](BYTES)
val bb = ByteBuffer.wrap(bytes)
cfor(0)(_ < R, _ + 1) { i => bb.putInt(state(i)) }
bb.putInt(i)
bytes
}
def setSeedBytes(bytes: Array[Byte]): Unit = {
val bs = if (bytes.length < BYTES) util.Arrays.copyOf(bytes, BYTES) else bytes
val bb = ByteBuffer.wrap(bs)
cfor(0)(_ < R, _ + 1) { i => state(i) = bb.getInt }
i = bb.getInt
}
def nextInt(): Int = {
import Well44497abIndexCache._
val z0: Int = (state(vrm1(i)) & LowerMask) | (state(vrm2(i)) & UpperMask)
val z1: Int = mat0neg(-24, state(i)) ^ mat0pos(30, state(vm1(i)))
val z2: Int = mat0neg(-10, state(vm2(i))) ^ mat3neg(-26, state(vm3(i)))
state(i) = z1 ^ z2
state(vrm1(i)) = mat1(z0) ^ mat0pos(20, z1) ^ mat5(9, 0xb729fcec, 0xfbffffff, 0x00020000, z2) ^ mat1(state(i))
i = vrm1(i)
state(i)
}
}
object Well44497a extends GeneratorCompanion[Well44497a, (Array[Int], Int)] {
@inline private val UpperMask = 0xFFFFFFFF >>> 17
@inline private val LowerMask = ~UpperMask
// Number of bits in the pool.
@inline private final val K : Int = 44497
// Length of the pool in ints.
@inline private final val R : Int = (K + 31) / 32
// Length of the pool in ints -1.
// @inline private final val R_1 : Int = R - 1
// Length of the pool in ints -2.
// @inline private final val R_2 : Int = R - 2
// Length of the pool and index in bytes
@inline private final val BYTES = R * 4 + 4
// First parameter of the algorithm.
// @inline private final val M1 : Int = 23
// Second parameter of the algorithm.
// @inline private final val M2 : Int = 481
// Third parameter of the algorithm.
// @inline private final val M3 : Int = 229
@inline private final def mat0pos(t: Int, v: Int) = v ^ (v >>> t)
@inline private final def mat0neg(t: Int, v: Int) = v ^ (v << -t)
@inline private final def mat1(v: Int) = v
// @inline private final def mat2(a: Int, v: Int) = if ((v & 1) != 0) (v >>> 1) ^ a else v >>> 1
// @inline private final def mat3pos(t: Int, v: Int) = v >>> t
@inline private final def mat3neg(t: Int, v: Int) = v << -t
// @inline private final def mat4pos(t: Int, b: Int, v: Int) = v ^ ((v >>> t) & b)
// @inline private final def mat4neg(t: Int, b: Int, v: Int) = v ^ ((v << -t) & b)
@inline private final def mat5(r: Int, a: Int, ds: Int, dt: Int, v: Int) = {
if ((v & dt) != 0) {
(((v << r) ^ (v >>> (32 - r))) & ds) ^ a
} else {
((v << r) ^ (v >>> (32 - r))) & ds
}
}
def randomSeed(): (Array[Int], Int) =
(Utils.seedFromInt(R, Utils.intFromTime()), 0)
def fromSeed(seed: (Array[Int], Int)): Well44497a =
seed match {
case (state, stateIndex) =>
assert(state.length == R)
new Well44497a(state, stateIndex)
}
def fromArray(arr: Array[Int]): Well44497a =
fromSeed((Utils.seedFromArray(R, arr), 0))
def fromBytes(bytes: Array[Byte]): Well44497a =
fromArray(Pack.intsFromBytes(bytes, bytes.length / 4))
def fromTime(time: Long = System.nanoTime): Well44497a =
fromSeed((Utils.seedFromInt(R, Utils.intFromTime(time)), 0))
}
| AlecZorab/spire | core/src/main/scala/spire/random/rng/Well44497a.scala | Scala | mit | 5,341 |
package eu.execom.FabutPresentation.persistence
trait Enum {
def name: String
}
case class SortOrder(name: String) extends Enum
object SortOrder {
val ASC = SortOrder("ASC")
val DESC = SortOrder("DESC")
val values: List[SortOrder] = ASC :: DESC :: Nil
def withName(name:String):SortOrder = values.find(_.name == name).get
}
case class InvitationStatus(name: String) extends Enum
object InvitationStatus {
val PENDING = InvitationStatus("PENDING")
val USED = InvitationStatus("USED")
val values: List[InvitationStatus] = PENDING :: USED :: Nil
def withName(name:String):InvitationStatus = values.find(_.name == name).get
}
case class FriendRequestStatus(name: String) extends Enum
object FriendRequestStatus {
val PENDING = FriendRequestStatus("PENDING")
val SENT = FriendRequestStatus("SENT")
val CONNECTED = FriendRequestStatus("CONNECTED")
val values: List[FriendRequestStatus] = PENDING :: SENT :: CONNECTED :: Nil
def withName(name:String):FriendRequestStatus = values.find(_.name == name).get
}
| idostanic/FabutPresentation | src/main/scala/eu/execom/FabutPresentation/persistence/Enums.scala | Scala | apache-2.0 | 1,040 |
package jp.sf.amateras.solr.scala
import org.apache.solr.common._
import org.apache.solr.common.util._
import org.apache.solr.client.solrj._
import org.apache.solr.client.solrj.impl.HttpSolrServer
import org.apache.http.impl.client.DefaultHttpClient
import org.apache.http.auth.AuthScope
import org.apache.http.auth.UsernamePasswordCredentials
object SolrServerFactory {
/**
* Configure SolrClient for basic authentication.
*
* {{{
* implicit val server = SolrServerFactory.basicAuth("username", "password")
* val client = new SolrClient("http://localhost:8983/solr")
* }}}
*/
def basicAuth(username: String, password: String) = (url: String) => {
val server = new HttpSolrServer(url)
val jurl = new java.net.URL(server.getBaseURL())
val client = server.getHttpClient().asInstanceOf[DefaultHttpClient]
client.getCredentialsProvider().setCredentials(
new AuthScope(jurl.getHost(), jurl.getPort(), AuthScope.ANY_REALM),
new UsernamePasswordCredentials(username, password))
server
}
/**
* Provides the dummy SolrServer for unit testing.
*
* {{{
* implicit val server = SolrServerFactory.dummy { request =>
* println(request.getMethod)
* println(request.getPath)
* println(request.getParams)
* }
* val client = new SolrClient("http://localhost:8983/solr")
* }}}
*/
def dummy(listener: (SolrRequest) => Unit) = (url: String) => new SolrServer {
def request(request: SolrRequest): NamedList[Object] = {
listener(request)
val response = new SimpleOrderedMap[Object]()
response.add("response", new SolrDocumentList())
response
}
def shutdown() = {
}
}
} | matthewchartier/solr-scala-client | src/main/scala/jp/sf/amateras/solr/scala/SolrServerFactory.scala | Scala | apache-2.0 | 1,773 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sparklinedata.druid.jscodegen
import org.apache.spark.sql.types._
import org.sparklinedata.druid.jscodegen.JSDateTimeCtx._
// TODO: Track nullability of each JSEXpr & and add null safe casting if rqd.
// Metrics & Time DIM can't be null; Non TIME_DIM DIM or VCOLS of Metric/TIME_DIM
// using conditional exprs can have NULL values.
case class JSCast(from: JSExpr, toDT: DataType, ctx: JSCodeGenerator) {
private[jscodegen] val castCode: Option[JSExpr] =
toDT match {
case _ if ((toDT == from.fnDT && (!from.timeDim || toDT != StringType)) ||
from.fnDT == NullType) =>
Some(JSExpr(None,from.linesSoFar, from.getRef, StringType))
case BooleanType => castToBooleanCode
case ShortType => castToShortCode
case IntegerType => castToNumericCode(IntegerType)
case FloatType => castToNumericCode(FloatType)
case LongType => castToNumericCode(LongType)
case DoubleType => castToNumericCode(DoubleType)
case StringType => castToStringCode
case DateType => castToDateCode
case TimestampType => castToTimestampCode
case _ => None
}
private[this] def castToBooleanCode: Option[JSExpr] = from.fnDT match {
case StringType | IntegerType | LongType | FloatType | DoubleType =>
Some(JSExpr(None, from.linesSoFar, s"Boolean(${from.getRef})", BooleanType))
case DateType =>
// Hive would return null when cast from date to boolean
// TODO: review this; Shouldn't this be "" ?
Some(JSExpr(None, from.linesSoFar, s"null", BooleanType))
case TimestampType =>
Some(JSExpr(None, from.linesSoFar, s"Boolean(${from.getRef}.getMillis())", BooleanType))
case _ => None
}
// TODO: enable support for Decimal (if Decimal is not represented by Number)
private[this] def castToNumericCode(outDt: DataType): Option[JSExpr] =
from.fnDT match {
case BooleanType | StringType =>
Some(JSExpr(None, from.linesSoFar, s"Number(${from.getRef})", outDt))
case (FloatType | DoubleType) if ctx.SPIntegralNumeric(outDt) =>
Some (JSExpr(None, from.linesSoFar, s"Math.floor(${from.getRef})", outDt))
case ShortType | IntegerType | LongType| FloatType | DoubleType =>
Some (JSExpr(None, from.linesSoFar, from.getRef, outDt))
case DateType =>
Some(JSExpr(None, from.linesSoFar, s"null", outDt))
case TimestampType =>
Some(JSExpr(None, from.linesSoFar, s"Number(${dtToLongCode(from.getRef)})", outDt))
case _ => None
}
// TODO: enable Cast To Short after support for Decimal, Short, TimeStamp Types
private[this] def castToShortCode: Option[JSExpr] = from.fnDT match {
case StringType | BooleanType | DateType => castToNumericCode(ShortType)
case _ => None
}
private[this] def castToStringCode: Option[JSExpr] = from.fnDT match {
case BooleanType | ShortType | IntegerType | LongType | FloatType
| DoubleType | DecimalType() => nullSafeCastToString(from.getRef)
case DateType =>
nullSafeCastToString(dateToStrCode(from.getRef))
case TimestampType =>
nullSafeCastToString(dtToStrCode(from.getRef))
case StringType if from.timeDim =>
nullSafeCastToString(dtToStrCode(longToISODTCode(from.getRef, ctx.dateTimeCtx)))
case _ => None
}
// TODO: Handle parsing failure as in Spark
private[this] def castToDateCode: Option[JSExpr] = from.fnDT match {
case StringType =>
Some(JSExpr(None, from.linesSoFar,
if (from.timeDim) {longToDateCode(from.getRef, ctx.dateTimeCtx)}
else {stringToDateCode(from.getRef, ctx.dateTimeCtx)}, DateType))
case TimestampType =>
Some(JSExpr(None, from.linesSoFar, dtToDateCode(from.getRef), DateType))
case _ => None
}
// TODO: Support for DecimalType. Handle Double/Float isNaN/isInfinite
private[this] def castToTimestampCode: Option[JSExpr] = from.fnDT match {
case StringType =>
Some(JSExpr(None, from.linesSoFar,
if (from.timeDim) {longToISODTCode(from.getRef, ctx.dateTimeCtx)}
else {stringToISODTCode(from.getRef, ctx.dateTimeCtx)}, TimestampType))
case BooleanType =>
Some(JSExpr(None, from.linesSoFar, stringToISODTCode
(s"((${from.getRef}) == true ? T00:00:01Z : T00:00:00Z)", ctx.dateTimeCtx),
TimestampType))
case FloatType | DoubleType | DecimalType() =>
for (le <- castToNumericCode(LongType)) yield
JSExpr(None, le.linesSoFar, longToISODTCode(le.getRef, ctx.dateTimeCtx),
TimestampType)
case ShortType | IntegerType | LongType =>
Some(JSExpr(None, from.linesSoFar,
longToISODTCode(from.getRef, ctx.dateTimeCtx), TimestampType))
case DateType => Some(JSExpr(None, from.linesSoFar,
localDateToDTCode(from.getRef, ctx.dateTimeCtx), TimestampType))
case _ => None
}
private[this] def nullSafeCastToString(valToCast: String): Option[JSExpr] = {
if (from.fnVar.isEmpty) {
val v1 = ctx.makeUniqueVarName
Some(JSExpr(None, from.linesSoFar + s"var $v1 = $valToCast;",
s"""(($v1 != null) ? $v1.toString() : "")""".stripMargin, StringType))
} else {
Some(JSExpr(None, from.linesSoFar,
s"""(($valToCast != null) ? $valToCast.toString() : "")""".stripMargin,
StringType))
}
}
} | SparklineData/spark-druid-olap | src/main/scala/org/sparklinedata/druid/jscodegen/JSCast.scala | Scala | apache-2.0 | 6,099 |
package lodo
import japgolly.scalajs.react.ReactComponentB
import japgolly.scalajs.react.vdom.prefix_<^._
object Header {
case class Props(b: Dashboard.Backend, isShowSidebar: Boolean, isHideComplete: Boolean, isQuickAdd: Boolean)
val header = ReactComponentB[Props]("Header")
.render(P => {
<.nav(^.cls := "navbar navbar-default navbar-fixed-top",
<.div(^.cls := "container-fluid",
<.div(^.cls := "navbar-header",
<.button(^.cls := "navbar-toggle collapsed", ^.tpe := "button",
^.onClick ==> P.b.performRedo(),
<.span(^.cls := "glyphicon glyphicon-forward")
),
<.button(^.cls := "navbar-toggle collapsed", ^.tpe := "button",
^.onClick ==> P.b.performUndo(),
<.span(^.cls := "glyphicon glyphicon-backward")
),
<.button(^.cls := "navbar-toggle collapsed", ^.tpe := "button",
<.span(^.cls := "glyphicon glyphicon-search")
),
<.button(
^.classSet1("navbar-toggle collapsed", ("nav-selected", P.isShowSidebar)),
^.tpe := "button",
^.onClick --> P.b.toggleShowSidebar(),
<.span(^.cls := "glyphicon glyphicon-book")
),
<.button(
^.classSet1("navbar-toggle collapsed", ("nav-selected", !P.isHideComplete)),
^.tpe := "button",
^.onClick --> P.b.toggleCompleted(),
<.span(^.cls := "glyphicon glyphicon-ok-circle")
),
<.button(
^.classSet1("navbar-toggle collapsed", ("nav-selected", P.isQuickAdd)),
^.tpe := "button",
^.onClick --> P.b.toggleQuickAdd(),
<.span(^.cls := "glyphicon glyphicon-time")
),
<.span(^.cls := "navbar-brand",
<.span(^.cls := "glyphicon glyphicon-check"),
"Lodo"
)
),
<.div(^.cls := "navbar-collapse collapse",
<.p(^.cls := "navbar-text", "Toggle:"),
<.ul(^.cls := "nav navbar-nav",
<.li(
^.classSet(("nav-selected", !P.isShowSidebar)),
<.a(^.href := "#",
^.onClick --> P.b.toggleShowSidebar(), "Notebooks")
),
<.li(
^.classSet(("nav-selected", !P.isHideComplete)),
<.a(^.href := "#",
^.onClick --> P.b.toggleCompleted(), "Completed")
),
<.li(
^.classSet(("nav-selected", P.isQuickAdd)),
<.a(^.href := "#",
^.onClick --> P.b.toggleQuickAdd(), "Quick Add")
)
),
<.ul(^.cls := "nav navbar-nav navbar-right",
<.li(<.a(^.href := "#",
^.onClick ==> P.b.performUndo(), "Undo")),
<.li(<.a(^.href := "#",
^.onClick ==> P.b.performRedo(), "Redo"))
),
<.form(^.cls := "navbar-form navbar-right",
<.input(^.cls := "form-control filter",
^.tpe := "text",
^.placeholder := "\\uE003"))
)
)
)
}).build
def apply(props: Props) = header(props)
}
| k3d3/lodo | lodo/js/src/main/scala/components/Header.scala | Scala | agpl-3.0 | 3,274 |
/*
* Copyright (c) 2013-16 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import org.junit.Test
import org.junit.Assert._
import test._
import testutil._
import ops.coproduct._
import ops.union._
import union._
class CoproductTests {
type ISB = Int :+: String :+: Boolean :+: CNil
type III = Int :+: Int :+: Int :+: CNil
case class Foo(msg: String)
case class Bar(msg: String)
type FBI = Foo :+: Bar :+: Int :+: CNil
trait Fruit
case class Apple() extends Fruit
case class Pear() extends Fruit
case class Banana() extends Fruit
type APB = Apple :+: Pear :+: Banana :+: CNil
object size extends Poly1 {
implicit val caseInt = at[Int](_ => 1)
implicit val caseString = at[String](_.length)
implicit val caseBoolean = at[Boolean](_ => 1)
}
@Test
def testInject {
implicitly[Inject[Int :+: CNil, Int]]
implicitly[Inject[Int :+: Int :+: CNil, Int]]
implicitly[Inject[Int :+: Int :+: Int :+: CNil, Int]]
implicitly[Inject[String :+: Int :+: CNil, Int]]
implicitly[Inject[Int :+: String :+: CNil, Int]]
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
illTyped("""
val foo4 = Coproduct[ISB](1.0)
""")
illTyped("""
val foo4 = Coproduct[ISB](CNil)
""")
}
@Test
def testMatch {
def cpMatch(v: ISB) = v match {
case Inl(x) =>
typed[Int](x)
case Inr(Inl(x)) =>
typed[String](x)
case Inr(Inr(Inl(x))) =>
typed[Boolean](x)
case Inr(Inr(Inr(_))) => ??? // This impossible case required for exhaustivity
}
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
cpMatch(foo1)
cpMatch(foo2)
cpMatch(foo3)
}
@Test
def testSelect {
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val sel1a = foo1.select[Int]
typed[Option[Int]](sel1a)
assertEquals(Some(23), sel1a)
val sel1b = foo1.select[String]
typed[Option[String]](sel1b)
assertEquals(None, sel1b)
val sel1c = foo1.select[Boolean]
typed[Option[Boolean]](sel1c)
assertEquals(None, sel1c)
illTyped("""
foo1.select[Double]
""")
val sel2a = foo2.select[Int]
typed[Option[Int]](sel2a)
assertEquals(None, sel2a)
val sel2b = foo2.select[String]
typed[Option[String]](sel2b)
assertEquals(Some("foo"), sel2b)
val sel2c = foo2.select[Boolean]
typed[Option[Boolean]](sel2c)
assertEquals(None, sel2c)
illTyped("""
foo2.select[Double]
""")
val sel3a = foo3.select[Int]
typed[Option[Int]](sel3a)
assertEquals(None, sel3a)
val sel3b = foo3.select[String]
typed[Option[String]](sel3b)
assertEquals(None, sel3b)
val sel3c = foo3.select[Boolean]
typed[Option[Boolean]](sel3c)
assertEquals(Some(true), sel3c)
illTyped("""
foo3.select[Double]
""")
val fbi1 = Coproduct[FBI](Foo("hi"))
val fbi2 = Coproduct[FBI](Bar("hi"))
val fbi3 = Coproduct[FBI](23)
val sel4a = fbi1.select[Foo]
assertTypedEquals[Option[Foo]](Some(Foo("hi")), sel4a)
val sel4b = fbi2.select[Bar]
assertTypedEquals[Option[Bar]](Some(Bar("hi")), sel4b)
val sel4c = fbi3.select[Int]
assertTypedEquals[Option[Int]](Some(23), sel4c)
}
@Test
def testFlatMap {
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
object coIdentity extends Poly1 {
implicit def default[A] = at[A](a => Coproduct[A :+: CNil](a))
}
val r1 = in1.flatMap(coIdentity)
assertTypedEquals[I :+: CNil](in1, r1)
val r2 = is.flatMap(coIdentity)
assertTypedEquals[I :+: S :+: CNil](is, r2)
object coSquare extends Poly1 {
implicit def default[A] = at[A](a => Coproduct[A :+: A :+: CNil](a))
}
val r3 = in1.flatMap(coSquare)
assertTypedEquals[I :+: I :+: CNil](Coproduct[I :+:I :+: CNil](1), r3)
val r4 = is.flatMap(coSquare)
assertTypedEquals[I :+: I :+: S :+: S :+: CNil](
Coproduct[I :+: I :+: S :+: S :+: CNil](1), r4)
object complex extends Poly1 {
implicit def caseInt = at[Int](i => Coproduct[S :+: CNil](i.toString))
implicit def caseString = at[String](s => Coproduct[C :+: D :+: CNil](s(0)))
implicit def caseDouble = at[Double](d => Coproduct[I :+: S :+: CNil](d.toInt))
}
val r5 = isd.flatMap(complex)
assertTypedEquals[S :+: C :+: D :+: I :+: S :+: CNil](
Coproduct[S :+: C :+: D :+: I :+: S :+: CNil]("1"), r5)
}
@Test
def testMap {
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val foo1b = foo1 map size
typed[III](foo1b)
assertEquals(Inl(1), foo1b)
val foo2b = foo2 map size
typed[III](foo2b)
assertEquals(Inr(Inl(3)), foo2b)
val foo3b = foo3 map size
typed[III](foo3b)
assertEquals(Inr(Inr(Inl(1))), foo3b)
}
@Test
def testUnify {
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val foo1b = foo1 map size
val foo2b = foo2 map size
val foo3b = foo3 map size
val foo1c = foo1b.unify
typed[Int](foo1c)
assertEquals(1, foo1c)
val foo2c = foo2b.unify
typed[Int](foo2c)
assertEquals(3, foo2c)
val foo3c = foo3b.unify
typed[Int](foo3c)
assertEquals(1, foo3c)
val f1 = Coproduct[APB](Apple())
val f2 = Coproduct[APB](Pear())
val f3 = Coproduct[APB](Banana())
val f1b = f1.unify
typed[Fruit](f1b)
val f2b = f2.unify
typed[Fruit](f2b)
val f3b = f3.unify
typed[Fruit](f3b)
// See https://github.com/milessabin/shapeless/issues/242
case class Foo[T](c: T)
val existentials1 = Coproduct[Foo[Double] :+: Foo[Float] :+: CNil](Foo(23F)).unify
val existentials2 = Coproduct[Foo[Double] :+: Foo[Float] :+: Foo[Int] :+: CNil](Foo(23F)).unify
typed[Foo[_ >: Float with Double <: AnyVal]](existentials1)
typed[Foo[_ >: Int with Float with Double <: AnyVal]](existentials2)
}
@Test
def testFold {
import poly.identity
object addSize extends Poly2 {
implicit def default[T](implicit st: size.Case.Aux[T, Int]) =
at[Int, T] { (acc, t) => acc + size(t) }
}
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val foo1b = foo1 fold size
val foo2b = foo2 fold size
val foo3b = foo3 fold size
val foo1c = foo1.foldLeft(42)(addSize)
val foo2c = foo2.foldLeft(42)(addSize)
val foo3c = foo3.foldLeft(42)(addSize)
typed[Int](foo1b)
assertEquals(1, foo1b)
typed[Int](foo2b)
assertEquals(3, foo2b)
typed[Int](foo3b)
assertEquals(1, foo3b)
typed[Int](foo1c)
assertEquals(43, foo1c)
typed[Int](foo2c)
assertEquals(45, foo2c)
typed[Int](foo3c)
assertEquals(43, foo3c)
val f1 = Coproduct[APB](Apple())
val f2 = Coproduct[APB](Pear())
val f3 = Coproduct[APB](Banana())
val f1b = f1 fold identity
typed[Fruit](f1b)
val f2b = f2 fold identity
typed[Fruit](f2b)
val f3b = f3 fold identity
typed[Fruit](f3b)
}
@Test
def testZipWith {
type H = Float :: Double :: String :: HNil
val h: H = 1000.0f :: 42.0d :: "Hello" :: HNil
/*
* Type `C` arises from zipping an `ISB` coproduct with an HList of the above type `H1`.
*/
type C = (Int, Float) :+: (String, Double) :+: (Boolean, String) :+: CNil
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val foo1Zipped = foo1.zipWith(h)
typed[C](foo1Zipped)
val foo1ZippedSel1 = foo1Zipped.select[(Int, Float)]
assertTypedEquals[Option[(Int, Float)]](Some((23, 1000.0f)), foo1ZippedSel1)
val foo1ZippedSel2 = foo1Zipped.select[(String, Double)]
assertTypedEquals[Option[(String, Double)]](None, foo1ZippedSel2)
val foo1ZippedSel3 = foo1Zipped.select[(Boolean, String)]
assertTypedEquals[Option[(Boolean, String)]](None, foo1ZippedSel3)
val foo2Zipped = foo2.zipWith(h)
typed[C](foo2Zipped)
val foo2ZippedSel1 = foo2Zipped.select[(Int, Float)]
assertTypedEquals[Option[(Int, Float)]](None, foo2ZippedSel1)
val foo2ZippedSel2 = foo2Zipped.select[(String, Double)]
assertTypedEquals[Option[(String, Double)]](Some("foo", 42.0d), foo2ZippedSel2)
val foo2ZippedSel3 = foo2Zipped.select[(Boolean, String)]
assertTypedEquals[Option[(Boolean, String)]](None, foo2ZippedSel3)
val foo3Zipped = foo3.zipWith(h)
typed[C](foo3Zipped)
val foo3ZippedSel1 = foo3Zipped.select[(Int, Float)]
assertTypedEquals[Option[(Int, Float)]](None, foo3ZippedSel1)
val foo3ZippedSel2 = foo3Zipped.select[(String, Double)]
assertTypedEquals[Option[(String, Double)]](None, foo3ZippedSel2)
val foo3ZippedSel3 = foo3Zipped.select[(Boolean, String)]
assertTypedEquals[Option[(Boolean, String)]](Some(true, "Hello"), foo3ZippedSel3)
}
@Test
def testZip {
import shapeless.Nat._
val c1 = Coproduct[ISB](42)
val zi1 = c1.zipWithIndex
val vz1 = zi1.select[(Int,_0)]
val vz2 = zi1.select[(String,_1)]
typed[Option[(Int,_0)]](vz1)
typed[Option[(String,_1)]](vz2)
assertEquals(Some((42,_0)), vz1)
assertEquals(None, vz2)
}
@Test
def testZipConst {
/*
* Type `R` represents the result of zipping the `ISB` coproduct with a constant of type `Double`.
*/
type R = (Int, Double) :+: (String, Double) :+: (Boolean, Double) :+: CNil
val foo1 = Coproduct[ISB](23)
val foo2 = Coproduct[ISB]("foo")
val foo3 = Coproduct[ISB](true)
val foo1Zipped = foo1.zipConst(3.14d)
typed[R](foo1Zipped)
val foo1ZippedSel1 = foo1Zipped.select[(Int, Double)]
assertTypedEquals[Option[(Int, Double)]](Some((23, 3.14d)), foo1ZippedSel1)
val foo1ZippedSel2 = foo1Zipped.select[(String, Double)]
assertTypedEquals[Option[(String, Double)]](None, foo1ZippedSel2)
val foo1ZippedSel3 = foo1Zipped.select[(Boolean, Double)]
assertTypedEquals[Option[(Boolean, Double)]](None, foo1ZippedSel3)
val foo2Zipped = foo2.zipConst(3.14d)
typed[R](foo2Zipped)
val foo2ZippedSel1 = foo2Zipped.select[(Int, Double)]
assertTypedEquals[Option[(Int, Double)]](None, foo2ZippedSel1)
val foo2ZippedSel2 = foo2Zipped.select[(String, Double)]
assertTypedEquals[Option[(String, Double)]](Some(("foo", 3.14d)), foo2ZippedSel2)
val foo2ZippedSel3 = foo2Zipped.select[(Boolean, Double)]
assertTypedEquals[Option[(Boolean, Double)]](None, foo2ZippedSel3)
val foo3Zipped = foo3.zipConst(3.14d)
typed[R](foo3Zipped)
val foo3ZippedSel1 = foo3Zipped.select[(Int, Double)]
assertTypedEquals[Option[(Int, Double)]](None, foo3ZippedSel1)
val foo3ZippedSel2 = foo3Zipped.select[(String, Double)]
assertTypedEquals[Option[(String, Double)]](None, foo3ZippedSel2)
val foo3ZippedSel3 = foo3Zipped.select[(Boolean, Double)]
assertTypedEquals[Option[(Boolean, Double)]](Some((true, 3.14d)), foo3ZippedSel3)
}
@Test
def testWithKeys {
type U = Union.`'i -> Int, 's -> String, 'b -> Boolean`.T
val cKeys = Keys[U].apply()
val u1 = Coproduct[ISB](23).zipWithKeys(cKeys)
val v1 = u1.get('i)
typed[Option[Int]](v1)
assertEquals(Some(23), v1)
assertEquals(None, u1.get('s))
val u2 = Coproduct[ISB]("foo").zipWithKeys(cKeys)
val v2 = u2.get('s)
typed[Option[String]](v2)
assertEquals(Some("foo"), v2)
assertEquals(None, u2.get('b))
val u3 = Coproduct[ISB](true).zipWithKeys(cKeys)
val v3 = u3.get('b)
typed[Option[Boolean]](v3)
assertEquals(Some(true), v3)
assertEquals(None, u3.get('i))
illTyped("v3.get('d)")
// key/value lengths must match up
illTyped("u1.zipWithKeys(uKeys.tail)")
// Explicit type argument
{
val u1 = Coproduct[ISB](23).zipWithKeys[HList.`'i, 's, 'b`.T]
val v1 = u1.get('i)
typed[Option[Int]](v1)
assertEquals(Some(23), v1)
assertEquals(None, u1.get('s))
}
{
val u2 = Coproduct[ISB]("foo").zipWithKeys[HList.`'i, 's, 'b`.T]
val v2 = u2.get('s)
typed[Option[String]](v2)
assertEquals(Some("foo"), v2)
assertEquals(None, u2.get('b))
}
{
val u3 = Coproduct[ISB](true).zipWithKeys[HList.`'i, 's, 'b`.T]
val v3 = u3.get('b)
typed[Option[Boolean]](v3)
assertEquals(Some(true), v3)
assertEquals(None, u3.get('i))
illTyped("v3.get('d)")
}
illTyped(" Coproduct[ISB](true).zipWithKeys[HList.`'i, 's, 'b, 'd`.T] ")
}
@Test
def testPartialOrdering {
val (one, two, abc, xyz) =
(Coproduct[ISB](1), Coproduct[ISB](2), Coproduct[ISB]("abc"), Coproduct[ISB]("xyz"))
def assertPOEquals(expected: Option[Int], l: ISB, r: ISB)(implicit po: PartialOrdering[ISB]) = {
val actual = po.tryCompare(l, r) map { i => Some(if (i < 0) -1 else if (i > 0) 1 else 0) }
assertEquals(s"${l} ${r}", expected, actual getOrElse None)
}
assertPOEquals(Some(0), one, one)
assertPOEquals(Some(-1), one, two)
assertPOEquals(Some(1), two, one)
assertPOEquals(Some(0), abc, abc)
assertPOEquals(Some(-1), abc, xyz)
assertPOEquals(Some(1), xyz, abc)
assertPOEquals(None, one, abc)
assertPOEquals(None, abc, one)
}
@Test
def testLength {
val r1 = Coproduct[Int :+: CNil](123).length
assertTypedEquals[Nat._1](Nat._1, r1)
val r2 = Coproduct[Int :+: String :+: CNil](123).length
assertTypedEquals[Nat._2](Nat._2, r2)
val r3 = Coproduct[Int :+: String :+: Double :+: CNil](123).length
assertTypedEquals[Nat._3](Nat._3, r3)
val r4 = Coproduct[Int :+: String :+: Double :+: Char :+: CNil](123).length
assertTypedEquals[Nat._4](Nat._4, r4)
}
@Test
def testExtendRight {
type S = String; type I = Int; type D = Double; type C = Char
type CoI = I :+: CNil
type CoIS = I :+: S :+: CNil
type CoISD = I :+: S :+: D :+: CNil
type CoISDC = I :+: S :+: D :+: C :+: CNil
val r1 = Coproduct[CoI](1).extendRight[S]
assertTypedEquals[CoIS](Coproduct[CoIS](1), r1)
val r2 = Coproduct[CoIS](1).extendRight[D]
assertTypedEquals[CoISD](Coproduct[CoISD](1), r2)
val r3 = Coproduct[CoISD](1).extendRight[C]
assertTypedEquals[CoISDC](Coproduct[CoISDC](1), r3)
}
@Test
def testExtendLeft {
type S = String; type I = Int; type D = Double; type C = Char
type CoI = I :+: CNil
type CoSI = S :+: I :+: CNil
type CoDSI = D :+: S :+: I :+: CNil
type CoCDSI = C :+: D :+: S :+: I :+: CNil
val r1 = Coproduct[CoI](1).extendLeft[S]
assertTypedEquals[CoSI](Coproduct[CoSI](1), r1)
val r2 = Coproduct[CoSI](1).extendLeft[D]
assertTypedEquals[CoDSI](Coproduct[CoDSI](1), r2)
val r3 = Coproduct[CoDSI](1).extendLeft[C]
assertTypedEquals[CoCDSI](Coproduct[CoCDSI](1), r3)
}
@Test
def testExtendLeftBy {
type S = String; type I = Int; type D = Double; type C = Char
type CoI = I :+: CNil
type CoSI = S :+: I :+: CNil
type CoDSI = D :+: S :+: I :+: CNil
type CoCDSI = C :+: D :+: S :+: I :+: CNil
val coi = Coproduct[CoI](1)
val r1 = coi.extendLeftBy[CNil]
assertTypedEquals[CoI](coi, r1)
val r2 = coi.extendLeftBy[S :+: CNil]
assertTypedEquals[CoSI](Coproduct[CoSI](1), r2)
val r3 = coi.extendLeftBy[D :+: S :+: CNil]
assertTypedEquals[CoDSI](Coproduct[CoDSI](1), r3)
val r4 = coi.extendLeftBy[C :+: D :+: S :+: CNil]
assertTypedEquals[CoCDSI](Coproduct[CoCDSI](1), r4)
}
@Test
def testExtendRightBy {
type S = String; type I = Int; type D = Double; type C = Char
type CoI = I :+: CNil
type CoIS = I :+: S :+: CNil
type CoISD = I :+: S :+: D :+: CNil
type CoISDC = I :+: S :+: D :+: C :+: CNil
val coi = Coproduct[CoI](1)
val r1 = coi.extendRightBy[CNil]
assertTypedEquals[CoI](coi, r1)
val r2 = coi.extendRightBy[S :+: CNil]
assertTypedEquals[CoIS](Coproduct[CoIS](1), r2)
val r3 = coi.extendRightBy[S :+: D :+: CNil]
assertTypedEquals[CoISD](Coproduct[CoISD](1), r3)
val r4 = coi.extendRightBy[S :+: D :+: C :+: CNil]
assertTypedEquals[CoISDC](Coproduct[CoISDC](1), r4)
}
@Test
def testRotateLeft {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val in2 = Coproduct[I :+: S :+: CNil](1)
val in3 = Coproduct[I :+: S :+: D :+: CNil](1)
val in4 = Coproduct[I :+: S :+: D :+: C :+: CNil](1)
{ // rotateLeft(0)
val r1 = in1.rotateLeft(0)
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in2.rotateLeft(0)
assertTypedSame[I :+: S :+: CNil](in2, r2)
val r3 = in3.rotateLeft(0)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r3)
val r4 = in4.rotateLeft(0)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r4)
}
{ // rotateLeft[_0]
val r1 = in1.rotateLeft[_0]
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in2.rotateLeft[_0]
assertTypedSame[I :+: S :+: CNil](in2, r2)
val r3 = in3.rotateLeft[_0]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r3)
val r4 = in4.rotateLeft[_0]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r4)
}
{ // rotateLeft(n % size == 0)
val r1 = in1.rotateLeft(1)
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in1.rotateLeft(2)
assertTypedSame[I :+: CNil](in1, r2)
val r3 = in2.rotateLeft(2)
assertTypedSame[I :+: S :+: CNil](in2, r3)
val r4 = in2.rotateLeft(4)
assertTypedSame[I :+: S :+: CNil](in2, r4)
val r5 = in3.rotateLeft(3)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r5)
val r6 = in3.rotateLeft(6)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r6)
val r7 = in4.rotateLeft(4)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r7)
val r8 = in4.rotateLeft(8)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r8)
}
{ // rotateLeft[N % Size == 0]
val r1 = in1.rotateLeft[_1]
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in1.rotateLeft[_2]
assertTypedSame[I :+: CNil](in1, r2)
val r3 = in2.rotateLeft[_2]
assertTypedSame[I :+: S :+: CNil](in2, r3)
val r4 = in2.rotateLeft[_4]
assertTypedSame[I :+: S :+: CNil](in2, r4)
val r5 = in3.rotateLeft[_3]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r5)
val r6 = in3.rotateLeft[_6]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r6)
val r7 = in4.rotateLeft[_4]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r7)
val r8 = in4.rotateLeft[_8]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r8)
}
{ // other(n)
val r1 = in2.rotateLeft(1)
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r1)
val r2 = in3.rotateLeft(1)
assertTypedEquals[S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](1), r2)
val r3 = in4.rotateLeft(1)
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r3)
val r4 = in4.rotateLeft(2)
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r4)
val r5 = in4.rotateLeft(3)
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r5)
val r6 = in4.rotateLeft(5)
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r6)
val r7 = in4.rotateLeft(6)
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r7)
}
{ // other[N]
val r1 = in2.rotateLeft[_1]
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r1)
val r2 = in3.rotateLeft[_1]
assertTypedEquals[S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](1), r2)
val r3 = in4.rotateLeft[_1]
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r3)
val r4 = in4.rotateLeft[_2]
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r4)
val r5 = in4.rotateLeft[_3]
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r5)
val r6 = in4.rotateLeft[_5]
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r6)
val r7 = in4.rotateLeft[_6]
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r7)
}
{
type C1 = Int :+: Boolean :+: String :+: Int :+: CNil
type C2 = Boolean :+: String :+: Int :+: Int :+: CNil
type C3 = String :+: Int :+: Int :+: Boolean :+: CNil
type C4 = Int :+: Int :+: Boolean :+: String :+: CNil
val i1: C1 = Inl(1)
val i2: C2 = Inr(Inr(Inr(Inl(1))))
val i3: C3 = Inr(Inr(Inl(1)))
val i4: C4 = Inr(Inl(1))
assertTypedEquals(i1, i1.rotateLeft(0))
assertTypedEquals(i2, i1.rotateLeft(1))
assertTypedEquals(i3, i1.rotateLeft(2))
assertTypedEquals(i4, i1.rotateLeft(3))
assertTypedEquals(i2, i2.rotateLeft(0))
assertTypedEquals(i3, i2.rotateLeft(1))
assertTypedEquals(i4, i2.rotateLeft(2))
assertTypedEquals(i1, i2.rotateLeft(3))
assertTypedEquals(i3, i3.rotateLeft(0))
assertTypedEquals(i4, i3.rotateLeft(1))
assertTypedEquals(i1, i3.rotateLeft(2))
assertTypedEquals(i2, i3.rotateLeft(3))
assertTypedEquals(i4, i4.rotateLeft(0))
assertTypedEquals(i1, i4.rotateLeft(1))
assertTypedEquals(i2, i4.rotateLeft(2))
assertTypedEquals(i3, i4.rotateLeft(3))
}
}
@Test
def testRotateRight {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val in2 = Coproduct[I :+: S :+: CNil](1)
val in3 = Coproduct[I :+: S :+: D :+: CNil](1)
val in4 = Coproduct[I :+: S :+: D :+: C :+: CNil](1)
{ // rotateRight(0)
val r1 = in1.rotateRight(0)
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in2.rotateRight(0)
assertTypedSame[I :+: S :+: CNil](in2, r2)
val r3 = in3.rotateRight(0)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r3)
val r4 = in4.rotateRight(0)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r4)
}
{ // rotateRight[_0]
val r1 = in1.rotateRight[_0]
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in2.rotateRight[_0]
assertTypedSame[I :+: S :+: CNil](in2, r2)
val r3 = in3.rotateRight[_0]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r3)
val r4 = in4.rotateRight[_0]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r4)
}
{ // rotateRight(n % size == 0)
val r1 = in1.rotateRight(1)
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in1.rotateRight(2)
assertTypedSame[I :+: CNil](in1, r2)
val r3 = in2.rotateRight(2)
assertTypedSame[I :+: S :+: CNil](in2, r3)
val r4 = in2.rotateRight(4)
assertTypedSame[I :+: S :+: CNil](in2, r4)
val r5 = in3.rotateRight(3)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r5)
val r6 = in3.rotateRight(6)
assertTypedSame[I :+: S :+: D :+: CNil](in3, r6)
val r7 = in4.rotateRight(4)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r7)
val r8 = in4.rotateRight(8)
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r8)
}
{ // rotateRight[N % Size == 0]
val r1 = in1.rotateRight[_1]
assertTypedSame[I :+: CNil](in1, r1)
val r2 = in1.rotateRight[_2]
assertTypedSame[I :+: CNil](in1, r2)
val r3 = in2.rotateRight[_2]
assertTypedSame[I :+: S :+: CNil](in2, r3)
val r4 = in2.rotateRight[_4]
assertTypedSame[I :+: S :+: CNil](in2, r4)
val r5 = in3.rotateRight[_3]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r5)
val r6 = in3.rotateRight[_6]
assertTypedSame[I :+: S :+: D :+: CNil](in3, r6)
val r7 = in4.rotateRight[_4]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r7)
val r8 = in4.rotateRight[_8]
assertTypedSame[I :+: S :+: D :+: C :+: CNil](in4, r8)
}
{ // other(n)
val r1 = in2.rotateRight(1)
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r1)
val r2 = in3.rotateRight(1)
assertTypedEquals[D :+: I :+: S :+: CNil](Coproduct[D :+: I :+: S :+: CNil](1), r2)
val r3 = in4.rotateRight(1)
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r3)
val r4 = in4.rotateRight(2)
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r4)
val r5 = in4.rotateRight(3)
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r5)
val r6 = in4.rotateRight(5)
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r6)
val r7 = in4.rotateRight(6)
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r7)
}
{ // other[N]
val r1 = in2.rotateRight[_1]
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r1)
val r2 = in3.rotateRight[_1]
assertTypedEquals[D :+: I :+: S :+: CNil](Coproduct[D :+: I :+: S :+: CNil](1), r2)
val r3 = in4.rotateRight[_1]
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r3)
val r4 = in4.rotateRight[_2]
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r4)
val r5 = in4.rotateRight[_3]
assertTypedEquals[S :+: D :+: C :+: I :+: CNil](Coproduct[S :+: D :+: C :+: I :+: CNil](1), r5)
val r6 = in4.rotateRight[_5]
assertTypedEquals[C :+: I :+: S :+: D :+: CNil](Coproduct[C :+: I :+: S :+: D :+: CNil](1), r6)
val r7 = in4.rotateRight[_6]
assertTypedEquals[D :+: C :+: I :+: S :+: CNil](Coproduct[D :+: C :+: I :+: S :+: CNil](1), r7)
}
{
type C1 = Int :+: Boolean :+: String :+: Int :+: CNil
type C2 = Int :+: Int :+: Boolean :+: String :+: CNil
type C3 = String :+: Int :+: Int :+: Boolean :+: CNil
type C4 = Boolean :+: String :+: Int :+: Int :+: CNil
val i1: C1 = Inl(1)
val i2: C2 = Inr(Inl(1))
val i3: C3 = Inr(Inr(Inl(1)))
val i4: C4 = Inr(Inr(Inr(Inl(1))))
assertTypedEquals(i1, i1.rotateRight(0))
assertTypedEquals(i2, i1.rotateRight(1))
assertTypedEquals(i3, i1.rotateRight(2))
assertTypedEquals(i4, i1.rotateRight(3))
assertTypedEquals(i2, i2.rotateRight(0))
assertTypedEquals(i3, i2.rotateRight(1))
assertTypedEquals(i4, i2.rotateRight(2))
assertTypedEquals(i1, i2.rotateRight(3))
assertTypedEquals(i3, i3.rotateRight(0))
assertTypedEquals(i4, i3.rotateRight(1))
assertTypedEquals(i1, i3.rotateRight(2))
assertTypedEquals(i2, i3.rotateRight(3))
assertTypedEquals(i4, i4.rotateRight(0))
assertTypedEquals(i1, i4.rotateRight(1))
assertTypedEquals(i2, i4.rotateRight(2))
assertTypedEquals(i3, i4.rotateRight(3))
}
}
@Test
def testHead {
val r1 = Coproduct[Int :+: CNil](1).head
assertTypedEquals[Option[Int]](Some(1), r1)
val r2 = Coproduct[Int :+: String :+: CNil](1).head
assertTypedEquals[Option[Int]](Some(1), r2)
val r3 = Coproduct[Int :+: String :+: CNil]("foo").head
assertTypedEquals[Option[Int]](None, r3)
}
@Test
def testTail {
val r1 = Coproduct[Int :+: CNil](1).tail
assertTypedEquals[Option[CNil]](None, r1)
val r2 = Coproduct[Int :+: String :+: CNil](1).tail
assertTypedEquals[Option[String :+: CNil]](None, r2)
val r3 = Coproduct[Int :+: String :+: CNil]("foo").tail
assertTypedEquals[Option[String :+: CNil]](Some(Coproduct[String :+: CNil]("foo")), r3)
}
@Test
def testPrepend: Unit = {
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val in2 = Coproduct[I :+: S :+: CNil](1)
val in3 = Coproduct[I :+: S :+: D :+: CNil](1)
val in4 = Coproduct[I :+: S :+: D :+: C :+: CNil](1)
{
// Prepending CNil - checking for same-ness, not only equality
val r1 = Prepend[CNil, I :+: CNil].apply(Right(in1))
assertTypedSame(in1, r1)
val r2 = Prepend[CNil, I :+: S :+: CNil].apply(Right(in2))
assertTypedSame(in2, r2)
val r3 = Prepend[CNil, I :+: S :+: D :+: CNil].apply(Right(in3))
assertTypedSame(in3, r3)
val r4 = Prepend[CNil, I :+: S :+: D :+: C :+: CNil].apply(Right(in4))
assertTypedSame(in4, r4)
}
{
// Appending CNil - checking for same-ness, not only equality
val r1 = Prepend[I :+: CNil, CNil].apply(Left(in1))
assertTypedSame(in1, r1)
val r2 = Prepend[I :+: S :+: CNil, CNil].apply(Left(in2))
assertTypedSame(in2, r2)
val r3 = Prepend[I :+: S :+: D :+: CNil, CNil].apply(Left(in3))
assertTypedSame(in3, r3)
val r4 = Prepend[I :+: S :+: D :+: C :+: CNil, CNil].apply(Left(in4))
assertTypedSame(in4, r4)
}
{
val r11_1 = Prepend[I :+: CNil, I :+: CNil].apply(Left(in1))
assertTypedEquals(Inl(1), r11_1)
val r11_2 = Prepend[I :+: CNil, I :+: CNil].apply(Right(in1))
assertTypedEquals(Inr(Inl(1)), r11_2)
val r12_1 = Prepend[I :+: CNil, I :+: S :+: CNil].apply(Left(in1))
assertTypedEquals(Inl(1), r12_1)
val r12_2 = Prepend[I :+: CNil, I :+: S :+: CNil].apply(Right(in2))
assertTypedEquals(Inr(Inl(1)), r12_2)
val r34_3 = Prepend[I :+: S :+: D :+: CNil, I :+: S :+: D :+: C :+: CNil].apply(Left(in3))
assertTypedEquals(Inl(1), r34_3)
val r34_4 = Prepend[I :+: S :+: D :+: CNil, I :+: S :+: D :+: C :+: CNil].apply(Right(in4))
assertTypedEquals(Inr(Inr(Inr(Inl(1)))), r34_4)
}
}
@Test
def testAlign {
type K0 = Int :+: String :+: Boolean :+: CNil
type K1 = Int :+: Boolean :+: String :+: CNil
type K2 = String :+: Int :+: Boolean :+: CNil
type K3 = String :+: Boolean :+: Int :+: CNil
type K4 = Boolean :+: Int :+: String :+: CNil
type K5 = Boolean :+: String :+: Int :+: CNil
val k0i = Coproduct[K0](13)
val k0s = Coproduct[K0]("bar")
val k0b = Coproduct[K0](false)
val k1i = Coproduct[K1](13)
val k1s = Coproduct[K1]("bar")
val k1b = Coproduct[K1](false)
val k2i = Coproduct[K2](13)
val k2s = Coproduct[K2]("bar")
val k2b = Coproduct[K2](false)
val k3i = Coproduct[K3](13)
val k3s = Coproduct[K3]("bar")
val k3b = Coproduct[K3](false)
val k4i = Coproduct[K4](13)
val k4s = Coproduct[K4]("bar")
val k4b = Coproduct[K4](false)
val k5i = Coproduct[K5](13)
val k5s = Coproduct[K5]("bar")
val k5b = Coproduct[K5](false)
type C = K0
val ci = Coproduct[C](23)
val cs = Coproduct[C]("foo")
val cb = Coproduct[C](true)
val a0i = ci.align(k0i)
assertTypedEquals[K0](Coproduct[K0](23), a0i)
val a0s = cs.align(k0s)
assertTypedEquals[K0](Coproduct[K0]("foo"), a0s)
val a0b = cb.align(k0b)
assertTypedEquals[K0](Coproduct[K0](true), a0b)
val a1i = ci.align(k1i)
assertTypedEquals[K1](Coproduct[K1](23), a1i)
val a1s = cs.align(k1s)
assertTypedEquals[K1](Coproduct[K1]("foo"), a1s)
val a1b = cb.align(k1b)
assertTypedEquals[K1](Coproduct[K1](true), a1b)
val a2i = ci.align(k2i)
assertTypedEquals[K2](Coproduct[K2](23), a2i)
val a2s = cs.align(k2s)
assertTypedEquals[K2](Coproduct[K2]("foo"), a2s)
val a2b = cb.align(k2b)
assertTypedEquals[K2](Coproduct[K2](true), a2b)
val a3i = ci.align(k3i)
assertTypedEquals[K3](Coproduct[K3](23), a3i)
val a3s = cs.align(k3s)
assertTypedEquals[K3](Coproduct[K3]("foo"), a3s)
val a3b = cb.align(k3b)
assertTypedEquals[K3](Coproduct[K3](true), a3b)
val a4i = ci.align(k4i)
assertTypedEquals[K4](Coproduct[K4](23), a4i)
val a4s = cs.align(k4s)
assertTypedEquals[K4](Coproduct[K4]("foo"), a4s)
val a4b = cb.align(k4b)
assertTypedEquals[K4](Coproduct[K4](true), a4b)
val a5i = ci.align(k5i)
assertTypedEquals[K5](Coproduct[K5](23), a5i)
val a5s = cs.align(k5s)
assertTypedEquals[K5](Coproduct[K5]("foo"), a5s)
val a5b = cb.align(k5b)
assertTypedEquals[K5](Coproduct[K5](true), a5b)
val b0i = ci.align[K0]
assertTypedEquals[K0](Coproduct[K0](23), b0i)
val b0s = cs.align[K0]
assertTypedEquals[K0](Coproduct[K0]("foo"), b0s)
val b0b = cb.align[K0]
assertTypedEquals[K0](Coproduct[K0](true), b0b)
val b1i = ci.align[K1]
assertTypedEquals[K1](Coproduct[K1](23), b1i)
val b1s = cs.align[K1]
assertTypedEquals[K1](Coproduct[K1]("foo"), b1s)
val b1b = cb.align[K1]
assertTypedEquals[K1](Coproduct[K1](true), b1b)
val b2i = ci.align[K2]
assertTypedEquals[K2](Coproduct[K2](23), b2i)
val b2s = cs.align[K2]
assertTypedEquals[K2](Coproduct[K2]("foo"), b2s)
val b2b = cb.align[K2]
assertTypedEquals[K2](Coproduct[K2](true), b2b)
val b3i = ci.align[K3]
assertTypedEquals[K3](Coproduct[K3](23), b3i)
val b3s = cs.align[K3]
assertTypedEquals[K3](Coproduct[K3]("foo"), b3s)
val b3b = cb.align[K3]
assertTypedEquals[K3](Coproduct[K3](true), b3b)
val b4i = ci.align[K4]
assertTypedEquals[K4](Coproduct[K4](23), b4i)
val b4s = cs.align[K4]
assertTypedEquals[K4](Coproduct[K4]("foo"), b4s)
val b4b = cb.align[K4]
assertTypedEquals[K4](Coproduct[K4](true), b4b)
val b5i = ci.align[K5]
assertTypedEquals[K5](Coproduct[K5](23), b5i)
val b5s = cs.align[K5]
assertTypedEquals[K5](Coproduct[K5]("foo"), b5s)
val b5b = cb.align[K5]
assertTypedEquals[K5](Coproduct[K5](true), b5b)
illTyped("""
(Coproduct[String :+: CNil]).align[Int :+: CNil]
""")
illTyped("""
(Coproduct[String :+: Int :+: CNil]).align[String :+: CNil]
""")
illTyped("""
(Coproduct[Int :+: CNil]).align[Int :+: String :+: CNil]
""")
}
@Test
def testReverse {
{
type S = String; type I = Int; type D = Double; type C = Char
type SI = S :+: I :+: CNil; type IS = I :+: S :+: CNil
val r1 = Coproduct[I :+: CNil](1).reverse
assertTypedEquals[I :+: CNil](Coproduct[I :+: CNil](1), r1)
val r2 = Coproduct[IS](1).reverse
assertTypedEquals[SI](Coproduct[SI](1), r2)
val r3 = Coproduct[IS]("foo").reverse
assertTypedEquals[SI](Coproduct[SI]("foo"), r3)
}
{
type C = Int :+: String :+: Double :+: Int :+: Boolean :+: CNil
val c1: C = Inl(1)
val c2: C = Inr(Inl("str"))
val c3: C = Inr(Inr(Inl(3.0)))
val c4: C = Inr(Inr(Inr(Inl(4))))
val c5: C = Inr(Inr(Inr(Inr(Inl(true)))))
assertTypedEquals(c1, c1.reverse.reverse)
assertTypedEquals(c2, c2.reverse.reverse)
assertTypedEquals(c3, c3.reverse.reverse)
assertTypedEquals(c4, c4.reverse.reverse)
assertTypedEquals(c5, c5.reverse.reverse)
}
}
@Test
def testInit {
val r1 = Coproduct[Int :+: CNil](1).init
assertTypedEquals[Option[CNil]](None, r1)
val r2 = Coproduct[Int :+: String :+: CNil]("foo").init
assertTypedEquals[Option[Int :+: CNil]](None, r2)
val r3 = Coproduct[Int :+: String :+: CNil](1).init
assertTypedEquals[Option[Int :+: CNil]](Some(Coproduct[Int :+: CNil](1)), r3)
}
@Test
def testLast {
val r1 = Coproduct[Int :+: CNil](1).last
assertTypedEquals[Option[Int]](Some(1), r1)
val r2 = Coproduct[Int :+: String :+: CNil]("foo").last
assertTypedEquals[Option[String]](Some("foo"), r2)
val r3 = Coproduct[Int :+: String :+: CNil](1).last
assertTypedEquals[Option[String]](None, r3)
}
@Test
def testAt {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val in2 = Coproduct[I :+: S :+: CNil](1)
val in3 = Coproduct[I :+: S :+: D :+: CNil](1)
{
val r1 = in1.at(0)
assertTypedEquals[Option[I]](Some(1), r1)
val r2 = in2.at(0)
assertTypedEquals[Option[I]](Some(1), r2)
val r3 = in3.at(0)
assertTypedEquals[Option[I]](Some(1), r3)
val r4 = in2.at(1)
assertTypedEquals[Option[S]](None, r4)
val r5 = in3.at(1)
assertTypedEquals[Option[S]](None, r5)
val r6 = in3.at(2)
assertTypedEquals[Option[D]](None, r6)
}
{
val r1 = in1.at[nat._0]
assertTypedEquals[Option[I]](Some(1), r1)
val r2 = in2.at[nat._0]
assertTypedEquals[Option[I]](Some(1), r2)
val r3 = in3.at[nat._0]
assertTypedEquals[Option[I]](Some(1), r3)
val r4 = in2.at[nat._1]
assertTypedEquals[Option[S]](None, r4)
val r5 = in3.at[nat._1]
assertTypedEquals[Option[S]](None, r5)
val r6 = in3.at[nat._2]
assertTypedEquals[Option[D]](None, r6)
}
}
@Test
def testPartition {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isdi: I :+: S :+: D :+: I :+: CNil =
Inr[I, S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](2))
val r1 = i.partition[I]
assertTypedEquals[Either[I :+: CNil, CNil]](Left(i), r1)
val r2 = is.partition[I]
assertTypedEquals[Either[I :+: CNil, S :+: CNil]](Left(i), r2)
val r3 = i.partition[S]
assertTypedEquals[Either[CNil, I :+: CNil]](Right(i), r3)
val r4 = is.partition[S]
assertTypedEquals[Either[S :+: CNil, I :+: CNil]](Right(i), r4)
val r5 = i.partition[C]
assertTypedEquals[Either[CNil, I :+: CNil]](Right(i), r5)
val r6 = is.partition[C]
assertTypedEquals[Either[CNil, I :+: S :+: CNil]](Right(is), r6)
val r7 = isdi.partition[I]
assertTypedEquals[Either[I :+: I :+: CNil, S :+: D :+: CNil]](Left(Inr[I, I :+: CNil](Inl[I, CNil](2))), r7)
}
@Test
def testPartitionC {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isdi: I :+: S :+: D :+: I :+: CNil =
Inr[I, S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](2))
val r1 = i.partitionC[I]
assertTypedEquals[(I :+: CNil) :+: CNil :+: CNil](Inl(i), r1)
val r2 = is.partitionC[I]
assertTypedEquals[(I :+: CNil) :+: (S :+: CNil) :+: CNil](Inl(i), r2)
val r3 = i.partitionC[S]
assertTypedEquals[CNil :+: (I :+: CNil) :+: CNil](Inr(Inl(i)), r3)
val r4 = is.partitionC[S]
assertTypedEquals[(S :+: CNil) :+: (I :+: CNil) :+: CNil](Inr(Inl(i)), r4)
val r5 = i.partitionC[C]
assertTypedEquals[CNil :+: (I :+: CNil) :+: CNil](Inr(Inl(i)), r5)
val r6 = is.partitionC[C]
assertTypedEquals[CNil :+: (I :+: S :+: CNil) :+: CNil](Inr(Inl(is)), r6)
val r7 = isdi.partitionC[I]
assertTypedEquals[(I :+: I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](
Inl(Inr[I, I :+: CNil](Inl[I, CNil](2))), r7)
}
@Test
def testFilter {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isdi: I :+: S :+: D :+: I :+: CNil =
Inr[I, S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](2))
val r1 = i.filter[I]
assertTypedEquals[Option[I :+: CNil]](Some(i), r1)
val r2 = is.filter[I]
assertTypedEquals[Option[I :+: CNil]](Some(i), r2)
val r3 = i.filter[S]
assertTypedEquals[Option[CNil]](None, r3)
val r4 = is.filter[S]
assertTypedEquals[Option[S :+: CNil]](None, r4)
val r5 = i.filter[C]
assertTypedEquals[Option[CNil]](None, r5)
val r6 = is.filter[C]
assertTypedEquals[Option[CNil]](None, r6)
val r7 = isdi.filter[I]
assertTypedEquals[Option[I :+: I :+: CNil]](Some(Inr[I, I :+: CNil](Inl[I, CNil](2))), r7)
}
@Test
def testFilterNot {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val isdi: I :+: S :+: D :+: I :+: CNil =
Inr[I, S :+: D :+: I :+: CNil](Coproduct[S :+: D :+: I :+: CNil](2))
val r1 = i.filterNot[I]
assertTypedEquals[Option[CNil]](None, r1)
val r2 = is.filterNot[I]
assertTypedEquals[Option[S :+: CNil]](None, r2)
val r4 = i.filterNot[S]
assertTypedEquals[Option[I :+: CNil]](Some(i), r4)
val r5 = is.filterNot[S]
assertTypedEquals[Option[I :+: CNil]](Some(i), r5)
val r7 = i.filterNot[D]
assertTypedEquals[Option[I :+: CNil]](Some(i), r7)
val r8 = is.filterNot[D]
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r8)
val r14 = isdi.filterNot[I]
assertTypedEquals[Option[S :+: D :+: CNil]](None, r14)
}
@Test
def testSplit {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val dc = Coproduct[D :+: C :+: CNil](2.0)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
val isdc = Coproduct[I :+: S :+: D :+: C :+: CNil](2.0)
{
val r1 = in1.split(0)
assertTypedEquals[Either[CNil, I :+: CNil]](Right(in1), r1)
val r2 = is.split(0)
assertTypedEquals[Either[CNil, I :+: S :+: CNil]](Right(is), r2)
val r3 = in1.split(1)
assertTypedEquals[Either[I :+: CNil, CNil]](Left(in1), r3)
val r4 = is.split(1)
assertTypedEquals[Either[I :+: CNil, S :+: CNil]](Left(in1), r4)
val r5 = isd.split(1)
assertTypedEquals[Either[I :+: CNil, S :+: D :+: CNil]](Left(in1), r5)
// Cannot split at index 2 a coproduct of length 1
illTyped(""" in1.split(2) """)
val r7 = is.split(2)
assertTypedEquals[Either[I :+: S :+: CNil, CNil]](Left(is), r7)
val r8 = isd.split(2)
assertTypedEquals[Either[I :+: S :+: CNil, D :+: CNil]](Left(is), r8)
val r9 = isdc.split(2)
assertTypedEquals[Either[I :+: S :+: CNil, D :+: C :+: CNil]](Right(dc), r9)
}
{
val r1 = in1.split[_0]
assertTypedEquals[Either[CNil, I :+: CNil]](Right(in1), r1)
val r2 = is.split[_0]
assertTypedEquals[Either[CNil, I :+: S :+: CNil]](Right(is), r2)
val r3 = in1.split[_1]
assertTypedEquals[Either[I :+: CNil, CNil]](Left(in1), r3)
val r4 = is.split[_1]
assertTypedEquals[Either[I :+: CNil, S :+: CNil]](Left(in1), r4)
val r5 = isd.split[_1]
assertTypedEquals[Either[I :+: CNil, S :+: D :+: CNil]](Left(in1), r5)
// Cannot split at index 2 a coproduct of length 1
illTyped(""" in1.split[_2] """)
val r7 = is.split[_2]
assertTypedEquals[Either[I :+: S :+: CNil, CNil]](Left(is), r7)
val r8 = isd.split[_2]
assertTypedEquals[Either[I :+: S :+: CNil, D :+: CNil]](Left(is), r8)
val r9 = isdc.split[_2]
assertTypedEquals[Either[I :+: S :+: CNil, D :+: C :+: CNil]](Right(dc), r9)
}
}
@Test
def testSplitC {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val dc = Coproduct[D :+: C :+: CNil](2.0)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
val isdc = Coproduct[I :+: S :+: D :+: C :+: CNil](2.0)
{
val r1 = in1.splitC(0)
assertTypedEquals[CNil :+: (I :+: CNil) :+: CNil](
Coproduct[CNil :+: (I :+: CNil) :+: CNil](in1), r1)
val r2 = is.splitC(0)
assertTypedEquals[CNil :+: (I :+: S :+: CNil) :+: CNil](
Coproduct[CNil :+: (I :+: S :+: CNil) :+: CNil](is), r2)
val r3 = in1.splitC(1)
assertTypedEquals[(I :+: CNil) :+: CNil :+: CNil](
Coproduct[(I :+: CNil) :+: CNil :+: CNil](in1), r3)
val r4 = is.splitC(1)
assertTypedEquals[(I :+: CNil) :+: (S :+: CNil) :+: CNil](
Coproduct[(I :+: CNil) :+: (S :+: CNil) :+: CNil](in1), r4)
val r5 = isd.splitC(1)
assertTypedEquals[(I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](
Coproduct[(I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](in1), r5)
// Cannot split at index 2 a coproduct of length 1
illTyped(""" in1.splitC(2) """)
val r7 = is.splitC(2)
assertTypedEquals[(I :+: S :+: CNil) :+: CNil :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: CNil :+: CNil](is), r7)
val r8 = isd.splitC(2)
assertTypedEquals[(I :+: S :+: CNil) :+: (D :+: CNil) :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: (D :+: CNil) :+: CNil](is), r8)
val r9 = isdc.splitC(2)
assertTypedEquals[(I :+: S :+: CNil) :+: (D :+: C :+: CNil) :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: (D :+: C :+: CNil) :+: CNil](dc), r9)
}
{
val r1 = in1.splitC[_0]
assertTypedEquals[CNil :+: (I :+: CNil) :+: CNil](
Coproduct[CNil :+: (I :+: CNil) :+: CNil](in1), r1)
val r2 = is.splitC[_0]
assertTypedEquals[CNil :+: (I :+: S :+: CNil) :+: CNil](
Coproduct[CNil :+: (I :+: S :+: CNil) :+: CNil](is), r2)
val r3 = in1.splitC[_1]
assertTypedEquals[(I :+: CNil) :+: CNil :+: CNil](
Coproduct[(I :+: CNil) :+: CNil :+: CNil](in1), r3)
val r4 = is.splitC[_1]
assertTypedEquals[(I :+: CNil) :+: (S :+: CNil) :+: CNil](
Coproduct[(I :+: CNil) :+: (S :+: CNil) :+: CNil](in1), r4)
val r5 = isd.splitC[_1]
assertTypedEquals[(I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](
Coproduct[(I :+: CNil) :+: (S :+: D :+: CNil) :+: CNil](in1), r5)
// Cannot split at index 2 a coproduct of length 1
illTyped(""" in1.splitC[_2] """)
val r7 = is.splitC[_2]
assertTypedEquals[(I :+: S :+: CNil) :+: CNil :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: CNil :+: CNil](is), r7)
val r8 = isd.splitC[_2]
assertTypedEquals[(I :+: S :+: CNil) :+: (D :+: CNil) :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: (D :+: CNil) :+: CNil](is), r8)
val r9 = isdc.splitC[_2]
assertTypedEquals[(I :+: S :+: CNil) :+: (D :+: C :+: CNil) :+: CNil](
Coproduct[(I :+: S :+: CNil) :+: (D :+: C :+: CNil) :+: CNil](dc), r9)
}
}
@Test
def testTake {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val dc = Coproduct[D :+: C :+: CNil](2.0)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
val isdc = Coproduct[I :+: S :+: D :+: C :+: CNil](2.0)
{
val r1 = in1.take(0)
assertTypedEquals[Option[CNil]](None, r1)
val r2 = is.take(0)
assertTypedEquals[Option[CNil]](None, r2)
val r3 = in1.take(1)
assertTypedEquals[Option[I :+: CNil]](Some(in1), r3)
val r4 = is.take(1)
assertTypedEquals[Option[I :+: CNil]](Some(in1), r4)
val r5 = isd.take(1)
assertTypedEquals[Option[I :+: CNil]](Some(in1), r5)
// Cannot take 2 elements out of a coproduct of length 1
illTyped(""" in1.take(2) """)
val r7 = is.take(2)
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r7)
val r8 = isd.take(2)
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r8)
val r9 = isdc.take(2)
assertTypedEquals[Option[I :+: S :+: CNil]](None, r9)
}
{
val r1 = in1.take[_0]
assertTypedEquals[Option[CNil]](None, r1)
val r2 = is.take[_0]
assertTypedEquals[Option[CNil]](None, r2)
val r3 = in1.take[_1]
assertTypedEquals[Option[I :+: CNil]](Some(in1), r3)
val r4 = is.take[_1]
assertTypedEquals[Option[I :+: CNil]](Some(in1), r4)
val r5 = isd.take[_1]
assertTypedEquals[Option[I :+: CNil]](Some(in1), r5)
// Cannot take 2 elements out of a coproduct of length 1
illTyped(""" in1.take[_2] """)
val r7 = is.take[_2]
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r7)
val r8 = isd.take[_2]
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r8)
val r9 = isdc.take[_2]
assertTypedEquals[Option[I :+: S :+: CNil]](None, r9)
}
}
@Test
def testDrop {
import Nat._
type S = String; type I = Int; type D = Double; type C = Char
val in1 = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val dc = Coproduct[D :+: C :+: CNil](2.0)
val isd = Coproduct[I :+: S :+: D :+: CNil](1)
val isdc = Coproduct[I :+: S :+: D :+: C :+: CNil](2.0)
{
val r1 = in1.drop(0)
assertTypedEquals[Option[I :+: CNil]](Some(in1), r1)
val r2 = is.drop(0)
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r2)
val r3 = in1.drop(1)
assertTypedEquals[Option[CNil]](None, r3)
val r4 = is.drop(1)
assertTypedEquals[Option[S :+: CNil]](None, r4)
val r5 = isd.drop(1)
assertTypedEquals[Option[S :+: D :+: CNil]](None, r5)
// Cannot drop 2 elements out of a coproduct of length 1
illTyped(""" in1.drop(2) """)
val r7 = is.drop(2)
assertTypedEquals[Option[CNil]](None, r7)
val r8 = isd.drop(2)
assertTypedEquals[Option[D :+: CNil]](None, r8)
val r9 = isdc.drop(2)
assertTypedEquals[Option[D :+: C :+: CNil]](Some(dc), r9)
}
{
val r1 = in1.drop[_0]
assertTypedEquals[Option[I :+: CNil]](Some(in1), r1)
val r2 = is.drop[_0]
assertTypedEquals[Option[I :+: S :+: CNil]](Some(is), r2)
val r3 = in1.drop[_1]
assertTypedEquals[Option[CNil]](None, r3)
val r4 = is.drop[_1]
assertTypedEquals[Option[S :+: CNil]](None, r4)
val r5 = isd.drop[_1]
assertTypedEquals[Option[S :+: D :+: CNil]](None, r5)
// Cannot drop 2 elements out of a coproduct of length 1
illTyped(""" in1.drop[_2] """)
val r7 = is.drop[_2]
assertTypedEquals[Option[CNil]](None, r7)
val r8 = isd.drop[_2]
assertTypedEquals[Option[D :+: CNil]](None, r8)
val r9 = isdc.drop[_2]
assertTypedEquals[Option[D :+: C :+: CNil]](Some(dc), r9)
}
}
@Test
def testRemoveElem {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val ii = Coproduct[I :+: I :+: CNil](1)
val r1 = i.removeElemC[I]
assertTypedEquals[I :+: CNil](i, r1)
val r2 = i.removeElem[I]
assertTypedEquals[Either[I, CNil]](Left(1), r2)
val r3 = is.removeElemC[I]
assertTypedEquals[I :+: S :+: CNil](is, r3)
val r4 = is.removeElem[I]
assertTypedEquals[Either[I, S :+: CNil]](Left(1), r4)
val r5 = is.removeElemC[S]
assertTypedEquals[S :+: I :+: CNil](Coproduct[S :+: I :+: CNil](1), r5)
val r6 = is.removeElem[S]
assertTypedEquals[Either[S, I :+: CNil]](Right(i), r6)
// See https://github.com/milessabin/shapeless/issues/251
val r7 = ii.removeElemC[I]
assertTypedEquals[I :+: I :+: CNil](ii, r7)
val r8 = ii.removeElem[I]
assertTypedEquals[Either[I, I :+: CNil]](Left(1), r8)
}
@Test
def testRemoveInverse = {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val is0 = Coproduct[I :+: S :+: CNil]("a")
val iis = Coproduct[I :+: S :+: I :+: CNil](2)
val iis0 = Coproduct[I :+: S :+: I :+: CNil]("b")
val u1 = Remove[I :+: CNil, I]
val r1 = u1.inverse(Left(1))
assertTypedEquals[I :+: CNil](i, r1)
val u2 = Remove[I :+: S :+: CNil, I]
val r2 = u2.inverse(Left(1))
assertTypedEquals[I :+: S :+: CNil](is, r2)
val r2_0 = u2.inverse(Right(Inl("a")))
assertTypedEquals[I :+: S :+: CNil](is0, r2_0)
val u3 = Remove[I :+: S :+: I :+: CNil, I]
val r3 = u3.inverse(Left(2))
assertTypedEquals[I :+: S :+: I :+: CNil](iis, r3)
val r3_0 = u3.inverse(Right(Inl("b")))
assertTypedEquals[I :+: S :+: I :+: CNil](iis0, r3_0)
}
@Test
def testRemoveLastInverse = {
type S = String; type I = Int; type D = Double; type C = Char
val i = Coproduct[I :+: CNil](1)
val is = Coproduct[I :+: S :+: CNil](1)
val is0 = Coproduct[I :+: S :+: CNil]("a")
val iis: I :+: S :+: I :+: CNil = Inr(Inr(Inl(2)))
val iis0 = Coproduct[I :+: S :+: I :+: CNil]("b")
val u1 = RemoveLast[I :+: CNil, I]
val r1 = u1.inverse(Left(1))
assertTypedEquals[I :+: CNil](i, r1)
val u2 = RemoveLast[I :+: S :+: CNil, I]
val r2 = u2.inverse(Left(1))
assertTypedEquals[I :+: S :+: CNil](is, r2)
val r2_0 = u2.inverse(Right(Inl("a")))
assertTypedEquals[I :+: S :+: CNil](is0, r2_0)
// These two are different from testRemoveInverse
val u3 = RemoveLast[I :+: S :+: I :+: CNil, I]
val r3 = u3.inverse(Left(2))
assertTypedEquals[I :+: S :+: I :+: CNil](iis, r3)
val r3_0 = u3.inverse(Right(Inr(Inl("b"))))
assertTypedEquals[I :+: S :+: I :+: CNil](iis0, r3_0)
}
@Test
def testToHList {
type CISB = Int :+: String :+: Boolean :+: CNil
type PISBa = Int :: String :: Boolean :: HNil
type PISBb = the.`ToHList[CISB]`.Out
implicitly[PISBa =:= PISBb]
}
@Test
def testEmbedDeembed {
type S1 = Int :+: CNil
type S2 = Int :+: String :+: CNil
type S3 = Int :+: String :+: Boolean :+: CNil
type S4 = String :+: Boolean :+: CNil
type S5 = Int :+: Int :+: Int :+: CNil
val c1_0 = Coproduct[S1](5)
val c1_1 = c1_0.embed[S2]
assertTypedEquals[S2](c1_1, Coproduct[S2](5))
assertTypedEquals[S1](c1_0, c1_1.deembed[S1].right.get)
val c1_2 = c1_0.embed[S3]
assertTypedEquals[S3](c1_2, Coproduct[S3](5))
assertTypedEquals[S1](c1_0, c1_2.deembed[S1].right.get)
val c2_0 = Coproduct[S2]("toto")
val c2 = c2_0.embed[S3]
assertTypedEquals[S3](c2, Coproduct[S3]("toto"))
assertTypedEquals[S2](c2_0, c2.deembed[S2].right.get)
illTyped("Coproduct[S1](5).embed[S4]")
// See https://github.com/milessabin/shapeless/issues/253
illTyped("Coproduct[S5](3).embed[S1]")
// See https://github.com/milessabin/shapeless/issues/253#issuecomment-59648119
{
type II = Int :+: Int :+: CNil
type IDI = Int :+: Double :+: Int :+: CNil
val c1: II = Inr(Inl(1))
val c2: II = Inl(1)
val c1_0 = c1.embed[IDI].deembed[II].right.get
val c2_0 = c2.embed[IDI].deembed[II].right.get
assertTypedEquals[II](c1, c1_0)
assertTypedEquals[II](c2, c2_0)
assert(c2 != c1_0)
}
}
@Test
def testCoproductTypeSelector {
import syntax.singleton._
{
type C = Coproduct.` `.T
implicitly[C =:= CNil]
}
{
type C = Coproduct.`Int`.T
typed[C](Inl(23))
}
{
type C = Coproduct.`Int, String`.T
typed[C](Inl(23))
typed[C](Inr(Inl("foo")))
}
{
type C = Coproduct.`Int, String, Boolean`.T
typed[C](Inl(23))
typed[C](Inr(Inl("foo")))
typed[C](Inr(Inr(Inl(true))))
}
// Literal types
{
type C = Coproduct.`2`.T
typed[C](Inl(2.narrow))
}
{
type C = Coproduct.`2, "a", true`.T
typed[C](Inl(2.narrow))
typed[C](Inr(Inl("a".narrow)))
typed[C](Inr(Inr(Inl(true.narrow))))
}
{
type C = Coproduct.`2`.T
illTyped(""" typed[C](Inl(3.narrow)) """)
()
}
// Mix of standard and literal types
{
type C = Coproduct.`2, String, true`.T
typed[C](Inl(2.narrow))
typed[C](Inr(Inl("a")))
typed[C](Inr(Inr(Inl(true.narrow))))
}
}
@Test
def testReify {
import syntax.singleton._
assertTypedEquals(HNil, Reify[CNil].apply)
val s1 = Coproduct.`'a`
assertTypedEquals('a.narrow :: HNil, Reify[s1.T].apply)
val s2 = Coproduct.`'a, 1, "b", true`
assertEquals('a.narrow :: 1.narrow :: "b".narrow :: true.narrow :: HNil, Reify[s2.T].apply)
illTyped(""" Reify[String :+: Int :+: CNil] """)
illTyped(""" Reify[String :+: Coproduct.`'a, 1, "b", true`.T] """)
}
@Test
def testLiftAll {
trait F[A]
implicit object FInt extends F[Int]
implicit object FString extends F[String]
assertEquals(HNil, implicitly[LiftAll[F, CNil]].instances)
assertEquals(FInt :: HNil, implicitly[LiftAll[F, Int :+: CNil]].instances)
assertEquals(FString :: FInt :: HNil, implicitly[LiftAll[F, String :+: Int :+: CNil]].instances)
illTyped("implicitly[LiftAll[F, Long :+: String :+: Int :+: CNil]]")
assertEquals(FInt :: HNil, LiftAll[F](Coproduct[Int :+: CNil](1)).instances)
}
@Test
def testIsCCons = {
val isCCons = IsCCons[Int :+: String :+: CNil]
assertEquals(Inl(23), isCCons.cons(Left(23)))
assertEquals(Inr(Inl("bar")), isCCons.cons(Right(Inl("bar"))))
}
@Test
def testToEither = {
type ISBD = Int :+: String :+: Boolean :+: Double :+: CNil
val i = Coproduct[ISBD](2)
val s = Coproduct[ISBD]("abc")
val b = Coproduct[ISBD](true)
val d = Coproduct[ISBD](3.0)
type E = Either[Int, Either[String, Either[Boolean, Double]]]
val ei: E = Left(2)
val es: E = Right(Left("abc"))
val eb: E = Right(Right(Left(true)))
val ed: E = Right(Right(Right(3.0)))
assertEquals(i.toEither, ei)
assertEquals(s.toEither, es)
assertEquals(b.toEither, eb)
assertEquals(d.toEither, ed)
import syntax.std.either._
assertEquals(ei.toCoproduct, i)
assertEquals(es.toCoproduct, s)
assertEquals(eb.toCoproduct, b)
assertEquals(ed.toCoproduct, d)
assertEquals(i.toEither.toCoproduct, i)
assertEquals(s.toEither.toCoproduct, s)
assertEquals(b.toEither.toCoproduct, b)
assertEquals(d.toEither.toCoproduct, d)
assertEquals(ei.toCoproduct.toEither, ei)
assertEquals(es.toCoproduct.toEither, es)
assertEquals(eb.toCoproduct.toEither, eb)
assertEquals(ed.toCoproduct.toEither, ed)
}
@Test
def runtimeInject = {
import syntax.inject._
val foo1 = Coproduct.runtimeInject[ISB](23: Any)
val foo2 = Coproduct.runtimeInject[ISB]("foo": Any)
val foo3 = Coproduct.runtimeInject[ISB](true: Any)
val foo4 = Coproduct.runtimeInject[ISB](1.345: Any)
val foo5 = Coproduct.runtimeInject[ISB](null: Any)
assertTypedEquals[Option[ISB]](Option(Inl(23)), foo1)
assertTypedEquals[Option[ISB]](Option(Inr(Inl("foo"))), foo2)
assertTypedEquals[Option[ISB]](Option(Inr(Inr(Inl(true)))), foo3)
assertTypedEquals[Option[ISB]](Option.empty[ISB], foo4)
assertTypedEquals[Option[ISB]](Option.empty[ISB], foo5)
illTyped("Coproduct.runtimeInject[CNil](23: Any)")
assertTypedEquals[Option[ISB]](foo3, true.runtimeInject[ISB])
}
@Test
def testInjectSyntax {
type ISBD = Int :+: String :+: Boolean :+: Double :+: CNil
import syntax.inject._
val i = 1.inject[ISBD]
assertEquals(Inl(1), i)
val b = true.inject[ISBD]
assertEquals(Inr(Inr(Inl(true))), b)
illTyped("1.inject[String :+: CNil]")
typed[ISBD](1.inject[ISBD])
}
}
package CoproductTestAux {
// See https://github.com/milessabin/shapeless/issues/328
case class Foo(msg: String)
case class Bar(msg: String)
object Stuff {
type Blah = Foo :+: Bar :+: Int :+: CNil
val t = Coproduct[Blah](Foo("hi"))
t.select[Foo]
}
}
| rorygraves/perf_tester | corpus/shapeless/src/test/scala/shapeless/coproduct.scala | Scala | apache-2.0 | 60,283 |
/*
* A real-time collaborative tool to develop files over the network.
* Copyright (C) 2010 Mauro Ciancio and Leandro Gilioli
* {maurociancio,legilioli} at gmail dot com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ar.noxit.paralleleditor.kernel.exceptions
class UsernameAlreadyExistsException(val message: String) extends RuntimeException(message)
| maurociancio/parallel-editor | src/parallel-editor-kernel/src/main/scala/ar/noxit/paralleleditor/kernel/exceptions/UsernameAlreadyExistsException.scala | Scala | gpl-3.0 | 1,003 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package src.main.scala
import org.apache.log4j.Logger
import org.apache.log4j.Level
import com.google.common.primitives.UnsignedBytes
import org.apache.spark.SparkContext._
import org.apache.spark._
import org.apache.spark.{SparkConf, SparkContext}
/**
* This file is modified from com.github.ehiggs.spark.terasort.*
*/
object terasortApp {
implicit val caseInsensitiveOrdering = UnsignedBytes.lexicographicalComparator
def main(args: Array[String]) {
Logger.getLogger("org.apache.spark").setLevel(Level.WARN);
Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF);
if (args.length < 2) {
println("Usage:[input-file] [output-file]")
System.exit(0)
}
// Process command line arguments
val inputFile = args(0)
val outputFile = args(1)
val conf = new SparkConf()
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.setAppName(s"TeraSort")
val sc = new SparkContext(conf)
val dataset = sc.newAPIHadoopFile[Array[Byte], Array[Byte], TeraInputFormat](inputFile)
val sorted = dataset.partitionBy(new TeraSortPartitioner(dataset.partitions.size)).sortByKey()
sorted.saveAsNewAPIHadoopFile[TeraOutputFormat](outputFile)
}
}
| ibmsoe/spark-bench | Terasort/src/main/scala/terasortApp.scala | Scala | apache-2.0 | 2,221 |
package io.taig.android.util.syntax
trait all extends bitmap with bundle with `class` with file with string
object all extends all
| Taig/Toolbelt | util/src/main/scala/io/taig/android/util/syntax/all.scala | Scala | mit | 133 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.loadBalancer.test
import akka.actor.ActorRef
import akka.actor.ActorRefFactory
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.testkit.TestProbe
import common.{StreamLogging, WhiskProperties}
import java.nio.charset.StandardCharsets
import org.apache.kafka.clients.producer.RecordMetadata
import org.apache.kafka.common.TopicPartition
import org.junit.runner.RunWith
import org.scalamock.scalatest.MockFactory
import org.scalatest.junit.JUnitRunner
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import scala.concurrent.Await
import scala.concurrent.Future
import scala.concurrent.duration._
import org.apache.openwhisk.common.Logging
import org.apache.openwhisk.common.NestedSemaphore
import org.apache.openwhisk.core.entity.FullyQualifiedEntityName
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.WhiskConfig
import org.apache.openwhisk.core.connector.ActivationMessage
import org.apache.openwhisk.core.connector.CompletionMessage
import org.apache.openwhisk.core.connector.Message
import org.apache.openwhisk.core.connector.MessageConsumer
import org.apache.openwhisk.core.connector.MessageProducer
import org.apache.openwhisk.core.connector.MessagingProvider
import org.apache.openwhisk.core.entity.ActivationId
import org.apache.openwhisk.core.entity.BasicAuthenticationAuthKey
import org.apache.openwhisk.core.entity.ControllerInstanceId
import org.apache.openwhisk.core.entity.EntityName
import org.apache.openwhisk.core.entity.EntityPath
import org.apache.openwhisk.core.entity.ExecManifest
import org.apache.openwhisk.core.entity.Identity
import org.apache.openwhisk.core.entity.InvokerInstanceId
import org.apache.openwhisk.core.entity.MemoryLimit
import org.apache.openwhisk.core.entity.Namespace
import org.apache.openwhisk.core.entity.Secret
import org.apache.openwhisk.core.entity.Subject
import org.apache.openwhisk.core.entity.UUID
import org.apache.openwhisk.core.entity.WhiskActionMetaData
import org.apache.openwhisk.core.entity.test.ExecHelpers
import org.apache.openwhisk.core.entity.ByteSize
import org.apache.openwhisk.core.entity.size._
import org.apache.openwhisk.core.entity.test.ExecHelpers
import org.apache.openwhisk.core.loadBalancer.FeedFactory
import org.apache.openwhisk.core.loadBalancer.InvokerPoolFactory
import org.apache.openwhisk.core.loadBalancer.InvokerState._
import org.apache.openwhisk.core.loadBalancer._
/**
* Unit tests for the ContainerPool object.
*
* These tests test only the "static" methods "schedule" and "remove"
* of the ContainerPool object.
*/
@RunWith(classOf[JUnitRunner])
class ShardingContainerPoolBalancerTests
extends FlatSpec
with Matchers
with StreamLogging
with ExecHelpers
with MockFactory {
behavior of "ShardingContainerPoolBalancerState"
val defaultUserMemory: ByteSize = 1024.MB
def healthy(i: Int, memory: ByteSize = defaultUserMemory) =
new InvokerHealth(InvokerInstanceId(i, userMemory = memory), Healthy)
def unhealthy(i: Int) = new InvokerHealth(InvokerInstanceId(i, userMemory = defaultUserMemory), Unhealthy)
def offline(i: Int) = new InvokerHealth(InvokerInstanceId(i, userMemory = defaultUserMemory), Offline)
def semaphores(count: Int, max: Int): IndexedSeq[NestedSemaphore[FullyQualifiedEntityName]] =
IndexedSeq.fill(count)(new NestedSemaphore[FullyQualifiedEntityName](max))
def lbConfig(blackboxFraction: Double, managedFraction: Option[Double] = None) =
ShardingContainerPoolBalancerConfig(managedFraction.getOrElse(1.0 - blackboxFraction), blackboxFraction, 1)
it should "update invoker's state, growing the slots data and keeping valid old data" in {
// start empty
val slots = 10
val memoryPerSlot = MemoryLimit.minMemory
val memory = memoryPerSlot * slots
val state = ShardingContainerPoolBalancerState()(lbConfig(0.5))
state.invokers shouldBe 'empty
state.blackboxInvokers shouldBe 'empty
state.managedInvokers shouldBe 'empty
state.invokerSlots shouldBe 'empty
state.managedStepSizes shouldBe Seq.empty
state.blackboxStepSizes shouldBe Seq.empty
// apply one update, verify everything is updated accordingly
val update1 = IndexedSeq(healthy(0, memory))
state.updateInvokers(update1)
state.invokers shouldBe update1
state.blackboxInvokers shouldBe update1 // fallback to at least one
state.managedInvokers shouldBe update1 // fallback to at least one
state.invokerSlots should have size update1.size
state.invokerSlots.head.availablePermits shouldBe memory.toMB
state.managedStepSizes shouldBe Seq(1)
state.blackboxStepSizes shouldBe Seq(1)
// aquire a slot to alter invoker state
state.invokerSlots.head.tryAcquire(memoryPerSlot.toMB.toInt)
state.invokerSlots.head.availablePermits shouldBe (memory - memoryPerSlot).toMB.toInt
// apply second update, growing the state
val update2 =
IndexedSeq(healthy(0, memory), healthy(1, memory * 2))
state.updateInvokers(update2)
state.invokers shouldBe update2
state.managedInvokers shouldBe IndexedSeq(update2.head)
state.blackboxInvokers shouldBe IndexedSeq(update2.last)
state.invokerSlots should have size update2.size
state.invokerSlots.head.availablePermits shouldBe (memory - memoryPerSlot).toMB.toInt
state.invokerSlots(1).tryAcquire(memoryPerSlot.toMB.toInt)
state.invokerSlots(1).availablePermits shouldBe memory.toMB * 2 - memoryPerSlot.toMB
state.managedStepSizes shouldBe Seq(1)
state.blackboxStepSizes shouldBe Seq(1)
}
it should "allow managed partition to overlap with blackbox for small N" in {
Seq(0.1, 0.2, 0.3, 0.4, 0.5).foreach { bf =>
val state = ShardingContainerPoolBalancerState()(lbConfig(bf))
(1 to 100).toSeq.foreach { i =>
state.updateInvokers((1 to i).map(_ => healthy(1, MemoryLimit.stdMemory)))
withClue(s"invoker count $bf $i:") {
state.managedInvokers.length should be <= i
state.blackboxInvokers should have size Math.max(1, (bf * i).toInt)
val m = state.managedInvokers.length
val b = state.blackboxInvokers.length
bf match {
// written out explicitly for clarity
case 0.1 if i < 10 => m + b shouldBe i + 1
case 0.2 if i < 5 => m + b shouldBe i + 1
case 0.3 if i < 4 => m + b shouldBe i + 1
case 0.4 if i < 3 => m + b shouldBe i + 1
case 0.5 if i < 2 => m + b shouldBe i + 1
case _ => m + b shouldBe i
}
}
}
}
}
it should "return the same pools if managed- and blackbox-pools are overlapping" in {
val state = ShardingContainerPoolBalancerState()(lbConfig(1.0, Some(1.0)))
(1 to 100).foreach { i =>
state.updateInvokers((1 to i).map(_ => healthy(1, MemoryLimit.stdMemory)))
}
state.managedInvokers should have size 100
state.blackboxInvokers should have size 100
state.managedInvokers shouldBe state.blackboxInvokers
}
it should "update the cluster size, adjusting the invoker slots accordingly" in {
val slots = 10
val memoryPerSlot = MemoryLimit.minMemory
val memory = memoryPerSlot * slots
val state = ShardingContainerPoolBalancerState()(lbConfig(0.5))
state.updateInvokers(IndexedSeq(healthy(0, memory), healthy(1, memory * 2)))
state.invokerSlots.head.tryAcquire(memoryPerSlot.toMB.toInt)
state.invokerSlots.head.availablePermits shouldBe (memory - memoryPerSlot).toMB
state.invokerSlots(1).tryAcquire(memoryPerSlot.toMB.toInt)
state.invokerSlots(1).availablePermits shouldBe memory.toMB * 2 - memoryPerSlot.toMB
state.updateCluster(2)
state.invokerSlots.head.availablePermits shouldBe memory.toMB / 2 // state reset + divided by 2
state.invokerSlots(1).availablePermits shouldBe memory.toMB
}
it should "fallback to a size of 1 (alone) if cluster size is < 1" in {
val slots = 10
val memoryPerSlot = MemoryLimit.minMemory
val memory = memoryPerSlot * slots
val state = ShardingContainerPoolBalancerState()(lbConfig(0.5))
state.updateInvokers(IndexedSeq(healthy(0, memory)))
state.invokerSlots.head.availablePermits shouldBe memory.toMB
state.updateCluster(2)
state.invokerSlots.head.availablePermits shouldBe memory.toMB / 2
state.updateCluster(0)
state.invokerSlots.head.availablePermits shouldBe memory.toMB
state.updateCluster(-1)
state.invokerSlots.head.availablePermits shouldBe memory.toMB
}
it should "set the threshold to 1 if the cluster is bigger than there are slots on 1 invoker" in {
val slots = 10
val memoryPerSlot = MemoryLimit.minMemory
val memory = memoryPerSlot * slots
val state = ShardingContainerPoolBalancerState()(lbConfig(0.5))
state.updateInvokers(IndexedSeq(healthy(0, memory)))
state.invokerSlots.head.availablePermits shouldBe memory.toMB
state.updateCluster(20)
state.invokerSlots.head.availablePermits shouldBe MemoryLimit.minMemory.toMB
}
val namespace = EntityPath("testspace")
val name = EntityName("testname")
val fqn = FullyQualifiedEntityName(namespace, name)
behavior of "schedule"
implicit val transId = TransactionId.testing
it should "return None on an empty invoker list" in {
ShardingContainerPoolBalancer.schedule(
1,
fqn,
IndexedSeq.empty,
IndexedSeq.empty,
MemoryLimit.minMemory.toMB.toInt,
index = 0,
step = 2) shouldBe None
}
it should "return None if no invokers are healthy" in {
val invokerCount = 3
val invokerSlots = semaphores(invokerCount, 3)
val invokers = (0 until invokerCount).map(unhealthy)
ShardingContainerPoolBalancer.schedule(
1,
fqn,
invokers,
invokerSlots,
MemoryLimit.minMemory.toMB.toInt,
index = 0,
step = 2) shouldBe None
}
it should "choose the first available invoker, jumping in stepSize steps, falling back to randomized scheduling once all invokers are full" in {
val invokerCount = 3
val slotPerInvoker = 3
val invokerSlots = semaphores(invokerCount + 3, slotPerInvoker) // needs to be offset by 3 as well
val invokers = (0 until invokerCount).map(i => healthy(i + 3)) // offset by 3 to asset InstanceId is returned
val expectedResult = Seq(3, 3, 3, 5, 5, 5, 4, 4, 4)
val result = expectedResult.map { _ =>
ShardingContainerPoolBalancer
.schedule(1, fqn, invokers, invokerSlots, 1, index = 0, step = 2)
.get
._1
.toInt
}
result shouldBe expectedResult
val bruteResult = (0 to 100).map { _ =>
ShardingContainerPoolBalancer
.schedule(1, fqn, invokers, invokerSlots, 1, index = 0, step = 2)
.get
}
bruteResult.map(_._1.toInt) should contain allOf (3, 4, 5)
bruteResult.map(_._2) should contain only true
}
it should "ignore unhealthy or offline invokers" in {
val invokers = IndexedSeq(healthy(0), unhealthy(1), offline(2), healthy(3))
val slotPerInvoker = 3
val invokerSlots = semaphores(invokers.size, slotPerInvoker)
val expectedResult = Seq(0, 0, 0, 3, 3, 3)
val result = expectedResult.map { _ =>
ShardingContainerPoolBalancer
.schedule(1, fqn, invokers, invokerSlots, 1, index = 0, step = 1)
.get
._1
.toInt
}
result shouldBe expectedResult
// more schedules will result in randomized invokers, but the unhealthy and offline invokers should not be part
val bruteResult = (0 to 100).map { _ =>
ShardingContainerPoolBalancer
.schedule(1, fqn, invokers, invokerSlots, 1, index = 0, step = 1)
.get
}
bruteResult.map(_._1.toInt) should contain allOf (0, 3)
bruteResult.map(_._1.toInt) should contain noneOf (1, 2)
bruteResult.map(_._2) should contain only true
}
it should "only take invokers that have enough free slots" in {
val invokerCount = 3
// Each invoker has 4 slots
val invokerSlots = semaphores(invokerCount, 4)
val invokers = (0 until invokerCount).map(i => healthy(i))
// Ask for three slots -> First invoker should be used
ShardingContainerPoolBalancer
.schedule(1, fqn, invokers, invokerSlots, 3, index = 0, step = 1)
.get
._1
.toInt shouldBe 0
// Ask for two slots -> Second invoker should be used
ShardingContainerPoolBalancer
.schedule(1, fqn, invokers, invokerSlots, 2, index = 0, step = 1)
.get
._1
.toInt shouldBe 1
// Ask for 1 slot -> First invoker should be used
ShardingContainerPoolBalancer
.schedule(1, fqn, invokers, invokerSlots, 1, index = 0, step = 1)
.get
._1
.toInt shouldBe 0
// Ask for 4 slots -> Third invoker should be used
ShardingContainerPoolBalancer
.schedule(1, fqn, invokers, invokerSlots, 4, index = 0, step = 1)
.get
._1
.toInt shouldBe 2
// Ask for 2 slots -> Second invoker should be used
ShardingContainerPoolBalancer
.schedule(1, fqn, invokers, invokerSlots, 2, index = 0, step = 1)
.get
._1
.toInt shouldBe 1
invokerSlots.foreach(_.availablePermits shouldBe 0)
}
behavior of "pairwiseCoprimeNumbersUntil"
it should "return an empty set for malformed inputs" in {
ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(0) shouldBe Seq.empty
ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(-1) shouldBe Seq.empty
}
it should "return all coprime numbers until the number given" in {
ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(1) shouldBe Seq(1)
ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(2) shouldBe Seq(1)
ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(3) shouldBe Seq(1, 2)
ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(4) shouldBe Seq(1, 3)
ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(5) shouldBe Seq(1, 2, 3)
ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(9) shouldBe Seq(1, 2, 5, 7)
ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(10) shouldBe Seq(1, 3, 7)
}
behavior of "concurrent actions"
it should "allow concurrent actions to be scheduled to same invoker without affecting memory slots" in {
val invokerCount = 3
// Each invoker has 2 slots, each action has concurrency 3
val slots = 2
val invokerSlots = semaphores(invokerCount, slots)
val concurrency = 3
val invokers = (0 until invokerCount).map(i => healthy(i))
(0 until invokerCount).foreach { i =>
(1 to slots).foreach { s =>
(1 to concurrency).foreach { c =>
ShardingContainerPoolBalancer
.schedule(concurrency, fqn, invokers, invokerSlots, 1, 0, 1)
.get
._1
.toInt shouldBe i
invokerSlots
.lift(i)
.get
.concurrentState(fqn)
.availablePermits shouldBe concurrency - c
}
}
}
}
implicit val am = ActorMaterializer()
val config = new WhiskConfig(ExecManifest.requiredProperties)
val invokerMem = 2000.MB
val concurrencyEnabled = Option(WhiskProperties.getProperty("whisk.action.concurrency")).exists(_.toBoolean)
val concurrency = if (concurrencyEnabled) 5 else 1
val actionMem = 256.MB
val actionMetaData =
WhiskActionMetaData(
namespace,
name,
js10MetaData(Some("jsMain"), false),
limits = actionLimits(actionMem, concurrency))
val maxContainers = invokerMem.toMB.toInt / actionMetaData.limits.memory.megabytes
val numInvokers = 3
val maxActivations = maxContainers * numInvokers * concurrency
//run a separate test for each variant of 1..n concurrently-ish arriving activations, to exercise:
// - no containers started
// - containers started but no concurrency room
// - no concurrency room and no memory room to launch new containers
//(1 until maxActivations).foreach { i =>
(75 until maxActivations).foreach { i =>
it should s"reflect concurrent processing ${i} state in containerSlots" in {
//each batch will:
// - submit activations concurrently
// - wait for activation submission to messaging system (mostly to detect which invoker was assiged
// - verify remaining concurrency slots available
// - complete activations concurrently
// - verify concurrency/memory slots are released
testActivationBatch(i)
}
}
def mockMessaging(): MessagingProvider = {
val messaging = stub[MessagingProvider]
val producer = stub[MessageProducer]
val consumer = stub[MessageConsumer]
(messaging
.getProducer(_: WhiskConfig, _: Option[ByteSize])(_: Logging, _: ActorSystem))
.when(*, *, *, *)
.returns(producer)
(messaging
.getConsumer(_: WhiskConfig, _: String, _: String, _: Int, _: FiniteDuration)(_: Logging, _: ActorSystem))
.when(*, *, *, *, *, *, *)
.returns(consumer)
(producer
.send(_: String, _: Message, _: Int))
.when(*, *, *)
.returns(Future.successful(new RecordMetadata(new TopicPartition("fake", 0), 0, 0, 0l, 0l, 0, 0)))
messaging
}
def testActivationBatch(numActivations: Int): Unit = {
//setup mock messaging
val feedProbe = new FeedFactory {
def createFeed(f: ActorRefFactory, m: MessagingProvider, p: (Array[Byte]) => Future[Unit]) =
TestProbe().testActor
}
val invokerPoolProbe = new InvokerPoolFactory {
override def createInvokerPool(
actorRefFactory: ActorRefFactory,
messagingProvider: MessagingProvider,
messagingProducer: MessageProducer,
sendActivationToInvoker: (MessageProducer, ActivationMessage, InvokerInstanceId) => Future[RecordMetadata],
monitor: Option[ActorRef]): ActorRef =
TestProbe().testActor
}
val balancer =
new ShardingContainerPoolBalancer(config, ControllerInstanceId("0"), feedProbe, invokerPoolProbe, mockMessaging)
val invokers = IndexedSeq.tabulate(numInvokers) { i =>
new InvokerHealth(InvokerInstanceId(i, userMemory = invokerMem), Healthy)
}
balancer.schedulingState.updateInvokers(invokers)
val invocationNamespace = EntityName("invocationSpace")
val fqn = actionMetaData.fullyQualifiedName(true)
val hash =
ShardingContainerPoolBalancer.generateHash(invocationNamespace, actionMetaData.fullyQualifiedName(false))
val home = hash % invokers.size
val stepSizes = ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(invokers.size)
val stepSize = stepSizes(hash % stepSizes.size)
val uuid = UUID()
//initiate activation
val published = (0 until numActivations).par.map { _ =>
val aid = ActivationId.generate()
val msg = ActivationMessage(
TransactionId.testing,
actionMetaData.fullyQualifiedName(true),
actionMetaData.rev,
Identity(
Subject(),
Namespace(invocationNamespace, uuid),
BasicAuthenticationAuthKey(uuid, Secret()),
Set.empty),
aid,
ControllerInstanceId("0"),
blocking = false,
content = None)
//send activation to loadbalancer
aid -> balancer.publish(actionMetaData.toExecutableWhiskAction.get, msg)
}.toMap
val activations = published.values
val ids = published.keys
//wait for activation submissions
Await.ready(Future.sequence(activations.toList), 10.seconds)
val maxActivationsPerInvoker = concurrency * maxContainers
//verify updated concurrency slots
def rem(count: Int) =
if (count % concurrency > 0) {
concurrency - (count % concurrency)
} else {
0
}
//assert available permits per invoker are as expected
var nextInvoker = home
ids.toList.grouped(maxActivationsPerInvoker).zipWithIndex.foreach { g =>
val remaining = rem(g._1.size)
val concurrentState = balancer.schedulingState._invokerSlots
.lift(nextInvoker)
.get
.concurrentState(fqn)
concurrentState.availablePermits shouldBe remaining
concurrentState.counter shouldBe g._1.size
nextInvoker = (nextInvoker + stepSize) % numInvokers
}
//complete all
val acks = ids.par.map { aid =>
val invoker = balancer.activationSlots(aid).invokerName
completeActivation(invoker, balancer, aid)
}
Await.ready(Future.sequence(acks.toList), 10.seconds)
//verify invokers go back to unused state
invokers.foreach { i =>
val concurrentState = balancer.schedulingState._invokerSlots
.lift(i.id.toInt)
.get
.concurrentState
.get(fqn)
concurrentState shouldBe None
balancer.schedulingState._invokerSlots.lift(i.id.toInt).map { i =>
i.availablePermits shouldBe invokerMem.toMB
}
}
}
def completeActivation(invoker: InvokerInstanceId, balancer: ShardingContainerPoolBalancer, aid: ActivationId) = {
//complete activation
val ack = CompletionMessage(TransactionId.testing, aid, false, invoker).serialize
.getBytes(StandardCharsets.UTF_8)
balancer.processAcknowledgement(ack)
}
}
| csantanapr/incubator-openwhisk | tests/src/test/scala/org/apache/openwhisk/core/loadBalancer/test/ShardingContainerPoolBalancerTests.scala | Scala | apache-2.0 | 22,042 |
package fpinscala.ch03datastructures
sealed trait List[+A] // `List` data type, parameterized on a type, `A`
case object Nil extends List[Nothing] // A `List` data constructor representing the empty list
/* Another data constructor, representing nonempty lists. Note that `tail` is another `List[A]`,
which may be `Nil` or another `Cons`.
*/
case class Cons[+A](head: A, tail: List[A]) extends List[A]
object List { // `List` companion object. Contains functions for creating and working with lists.
def sum(ints: List[Int]): Int = ints match { // A function that uses pattern matching to add up a list of integers
case Nil => 0 // The sum of the empty list is 0.
case Cons(x,xs) => x + sum(xs) // The sum of a list starting with `x` is `x` plus the sum of the rest of the list.
}
def product(ds: List[Double]): Double = ds match {
case Nil => 1.0
case Cons(0.0, _) => 0.0
case Cons(x,xs) => x * product(xs)
}
def apply[A](as: A*): List[A] = // Variadic function syntax
if (as.isEmpty) Nil
else Cons(as.head, apply(as.tail: _*))
val x = List(1,2,3,4,5) match {
case Cons(x, Cons(2, Cons(4, _))) => x
case Nil => 42
case Cons(x, Cons(y, Cons(3, Cons(4, _)))) => x + y
case Cons(h, t) => h + sum(t)
case _ => 101
}
def append[A](a1: List[A], a2: List[A]): List[A] =
a1 match {
case Nil => a2
case Cons(h,t) => Cons(h, append(t, a2))
}
def foldRight[A,B](as: List[A], z: B)(f: (A, B) => B): B = // Utility functions
as match {
case Nil => z
case Cons(x, xs) => f(x, foldRight(xs, z)(f))
}
def sum2(ns: List[Int]) =
foldRight(ns, 0)((x,y) => x + y)
def product2(ns: List[Double]) =
foldRight(ns, 1.0)(_ * _) // `_ * _` is more concise notation for `(x,y) => x * y`; see sidebar
def tail[A](l: List[A]): List[A] = l match {
case Cons(h, t) => t
case _ => Nil
}
def setHead[A](l: List[A], h: A): List[A] = l match {
case Cons(o, t) => Cons(h, t)
case _ => Nil
}
// https://github.com/dabd/fpscala/commit/5adb9c03225143d3c0036c4ec44378ed4a67847a#diff-21e741bb07e57f54026a51f23688ed8bR23
// Optimised to drop out immediatly if list is Nil, instead of doing the recursion til the end
def drop[A](l: List[A], n: Int): List[A] = n match {
case 0 => l
case m => if (l == Nil) Nil else drop(tail(l), m - 1)
}
def dropWhile[A](l: List[A], f: A => Boolean): List[A] = l match {
case Cons(h, t) if f(h) => dropWhile(t, f)
case _ => l
}
def init[A](l: List[A]): List[A] = l match {
case Cons(_, Nil) | Nil => Nil
case Cons(h, t) => Cons(h, init(t))
}
def length[A](l: List[A]): Int =
foldRight(l, 0)((el, res) => res + 1)
def foldLeft[A,B](l: List[A], z: B)(f: (B, A) => B): B = l match {
case Cons(h, t) => foldLeft(t, f(z, h))(f)
case Nil => z
}
def sum3(ns: List[Int]) =
foldLeft(ns, 0)(_ + _)
def product3(ns: List[Double]) =
foldLeft(ns, 1.0)(_ * _)
def length3[A](l: List[A]): Int =
foldLeft(l, 0)((res, _) => res + 1)
def reverse[A](l: List[A]): List[A] =
foldLeft(l, Nil:List[A])((res, el) => Cons(el, res))
def foldLeft2[A,B](l: List[A], z: B)(f: (B, A) => B): B = {
def g: (A, B) => B = (a, b) => f(b, a)
foldRight(reverse(l), z)(g)
}
def foldRight2[A,B](l: List[A], z: B)(f: (A, B) => B): B = {
def g: (B, A) => B = (b, a) => f(a, b)
foldLeft(reverse(l), z)(g)
}
def append2[A](a1: List[A], a2: List[A]): List[A] =
foldRight(a1, a2)(Cons(_, _))
def listOfLists[A](l: List[List[A]]): List[A] =
foldLeft(l, Nil:List[A])((res, el) => append(res, el))
// foldRight(l, Nil:List[A])((el, res) => append(el, res))
def addOne(l: List[Int]): List[Int] = l match {
case Cons(h, t) => Cons(h + 1, addOne(t))
case _ => Nil
}
def listToString(l: List[Double]): List[String] = l match {
case Cons(h, t) => Cons(h.toString, listToString(t))
case _ => Nil
}
def map[A,B](l: List[A])(f: A => B): List[B] = l match {
case Cons(h, t) => Cons(f(h), map(t)(f))
case _ => Nil
}
def filter_firstVersion[A](as: List[A])(f: A => Boolean): List[A] = as match {
case Cons(h, t) => if (f(h)) Cons(h, filter(t)(f)) else filter(t)(f)
case _ => Nil
}
def flatMap[A,B](as: List[A])(f: A => List[B]): List[B] = as match {
case Cons(h, t) => append(f(h), flatMap(t)(f))
case _ => Nil
}
def filter[A](as: List[A])(f: A => Boolean): List[A] =
flatMap(as)(a => if (f(a)) List(a) else List())
def addLists(al: List[Int], bl: List[Int]): List[Int] = (al, bl) match {
case (Cons(h1, t1), Cons(h2, t2)) => Cons(h1 + h2, addLists(t1, t2))
case (l1, Nil) => l1
case (Nil, l2) => l2
}
def zipWith[A](al: List[A], bl: List[A])(f: (A, A) => A): List[A] = (al, bl) match {
case (Cons(h1, t1), Cons(h2, t2)) => Cons(f(h1, h2), zipWith(t1, t2)(f))
case (l1, Nil) => l1
case (Nil, l2) => l2
}
def hasSubsequence[A](sup: List[A], sub: List[A]): Boolean = {
def isSubsequence(sup: List[A], sub: List[A]): Boolean = (sup, sub) match {
case (Cons(hsup, tsup), Cons(hsub, tsub)) => hsup == hsub && isSubsequence(tsup, tsub)
case (_, Nil) => true
case _ => false
}
sup != Nil && (isSubsequence(sup, sub) || hasSubsequence(tail(sup), sub))
}
}
| hugocf/fpinscala | src/main/scala/fpinscala/ch03datastructures/List.scala | Scala | mit | 5,311 |
package com.saikocat.meownificent
import akka.actor.{ActorSystem, Props}
import akka.event.Logging
import akka.io.IO
import spray.can.Http
import com.saikocat.meownificent.configuration.Settings
import com.saikocat.meownificent.service.RootServiceActor
object Boot extends App {
implicit val system = ActorSystem("meownificent-rest")
val log = Logging(system, getClass)
val settings = Settings(system)
log.info("Starting service actor and HTTP server...")
val listener = system.actorOf(
Props(new RootServiceActor(settings)),
"meownificent-rest-service")
IO(Http) ! Http.Bind(listener, settings.interface, settings.port)
}
| saikocat/meownificent | rest/src/main/scala/com/saikocat/meownificent/Boot.scala | Scala | mit | 647 |
//: ----------------------------------------------------------------------------
//: Copyright (C) 2015 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package funnel
package chemist
import scalaz.Order
import scalaz.std.string._
import scalaz.std.vector._
import scalaz.syntax.monadPlus._
case class FlaskID(value: String) extends AnyVal
object FlaskID {
implicit val flaskIdOrder: Order[FlaskID] = implicitly[Order[String]].contramap[FlaskID](_.value)
}
case class Flask(id: FlaskID, location: Location)
import scalaz.Nondeterminism
import scalaz.concurrent.Task
import journal.Logger
import java.net.URI
object Flask {
import FlaskID._
import metrics._
import scalaz.std.set._
import scalaz.syntax.apply._
import Sharding.Distribution
private[this] val log = Logger[Flask.type]
implicit val flaskOrder: Order[Flask] = implicitly[Order[FlaskID]].contramap[Flask](_.id)
/**
* Given a collection of flask instances, find out what exactly they are already
* mirroring and absorb that into the view of the world.
*
* If some flasks are down then they will not be returned as part of distribution
* and should not be allocated work too. This might lead to double work assignment in case of
* network partitioning but Chemist need to be able to correct double assignment issues anyways.
*/
def gatherAssignedTargets(flasks: Seq[Flask])(http: dispatch.HttpExecutor): Task[Distribution] = {
val lookups = Nondeterminism[Task].gatherUnordered(
flasks.map(
f => requestAssignedTargets(f.location)(http).map(f -> _).attempt.map {
v => v.leftMap(f -> _)
}
)
)
val d = lookups.map {
all =>
val (errors, success) = all.toVector.separate
metrics.ModelDeadFlasks.set(errors.size)
metrics.ModelLiveFlasks.set(success.size)
errors.foreach {
e => log.error(s"[gatherAssigned] dead flask=${e._1}, problem=${e._2}")
}
val dis = success.foldLeft(Distribution.empty)(
(a,b) => a.updateAppend(b._1, b._2)
)
val cnt = dis.values.map(_.size).sum
metrics.ModelAssignedSources.set(cnt)
log.info(s"[gatherAssigned] distribution stats: flasks=${dis.keySet.size} targets=$cnt")
log.debug(s"[gatherAssigned] distribution details: $dis")
dis
}
GatherAssignedLatency.timeTask(d) or Task.now(Distribution.empty)
}
import funnel.http.{Cluster,JSON => HJSON}
/**
* Call out to the specific location and grab the list of things the flask
* is already mirroring.
*/
private def requestAssignedTargets(location: Location)(http: dispatch.HttpExecutor): Task[Set[Target]] = {
import argonaut._, Argonaut._, JSON._, HJSON._
import dispatch._, Defaults._
val a = location.uriFromTemplate(LocationTemplate(s"http://@host:@port/mirror/sources"))
val req = Task.delay(url(a.toString)) <* Task.delay(log.debug(s"requesting assigned targets from $a"))
req flatMap {
b => http.apply(b OK as.String).map {
c => Parse.decodeOption[List[Cluster]](c).toList.flatten.foldLeft(Set.empty[Target]){
(a,b) => a ++ b.urls.map(s => Target(b.label, new URI(s))).toSet
}
}
}
}
}
| neigor/funnel | chemist/src/main/scala/flask.scala | Scala | apache-2.0 | 3,896 |
package opennlp.scalabha.tag.support
import opennlp.scalabha.util.CollectionUtils._
import opennlp.scalabha.util.CollectionUtil._
import opennlp.scalabha.util.LogNum
import opennlp.scalabha.util.LogNum._
class BinomialFreqDist[T](label0: T, label1: T, firstProb: LogNum)
extends MultinomialFreqDist(Map(label0 -> firstProb, label1 -> (1 - firstProb)), LogNum.zero) {
override def sample(): T = {
if (rand.uniform.draw.toLogNum < firstProb)
label0
else
label1
}
override def toString = "BinomialFreqDist(%s, %s, %s)".format(label0, label1, firstProb.toDouble)
}
object BinomialFreqDist {
def apply[T](label0: T, label1: T, firstProb: LogNum) = {
new BinomialFreqDist[T](label0, label1, firstProb)
}
def apply[T](labels: Seq[T], firstProb: LogNum) = {
require(labels.size == 2, "BinomialFreqDist must have exactly two labels")
val Seq(l0, l1) = labels
new BinomialFreqDist[T](l0, l1, firstProb)
}
def main(args: Array[String]) {
val dist = new BinomialFreqDist("H", "T", 0.6.toLogNum)
println(dist.sample)
println((1 to 100000).map(_ => dist.sample).counts.normalizeValues)
}
}
object BooleanFreqDist {
def apply(propTrue: LogNum): BinomialFreqDist[Boolean] = BinomialFreqDist(true, false, propTrue)
def apply(propTrue: Double): BinomialFreqDist[Boolean] = apply(propTrue.toLogNum)
}
| eponvert/Scalabha | src/main/scala/opennlp/scalabha/tag/support/BinomialFreqDist.scala | Scala | apache-2.0 | 1,363 |
package biz.k11i.xgboost.spark.util
import biz.k11i.xgboost.util.FVec
import org.apache.spark.ml.linalg.{DenseVector, SparseVector, Vector}
object FVecMLVector {
/**
* Transform feature vector from spark.ml's [[Vector]] to [[FVec]].
*
* @param vector feature vector represented by [[Vector]]
* @return [[FVec]] object
*/
def transform(vector: Vector, missingValue: Double = Double.NaN): FVec = vector match {
case dv: DenseVector =>
if (missingValue.isNaN) {
new FVecDenseVectorNaN(dv)
} else {
new FVecDenseVectorMissingValue(dv, missingValue)
}
case sv: SparseVector => new FVecSparseVector(sv)
}
}
private class FVecDenseVectorNaN(dv: DenseVector)
extends FVec {
override def fvalue(index: Int): Double = {
if (index >= dv.values.length) {
Double.NaN
} else {
dv.values(index)
}
}
}
private class FVecDenseVectorMissingValue(dv: DenseVector, missingValue: Double)
extends FVec {
override def fvalue(index: Int): Double = {
if (index >= dv.values.length) {
Double.NaN
} else {
dv.values(index) match {
case x if missingValue == x => Double.NaN
case n => n
}
}
}
}
private class FVecSparseVector(sv: SparseVector)
extends FVec {
val map: Map[Int, Double] = sv.indices.zip(sv.values).toMap
override def fvalue(index: Int): Double = map.getOrElse(index, Double.NaN)
}
| komiya-atsushi/xgboost-predictor-java | xgboost-predictor-spark/src/main/scala/biz/k11i/xgboost/spark/util/FVecMLVector.scala | Scala | apache-2.0 | 1,427 |
package com.overviewdocs.clustering
import java.io.{BufferedWriter,OutputStream,OutputStreamWriter}
import java.nio.charset.StandardCharsets
import com.overviewdocs.database.HasBlockingDatabase
import com.overviewdocs.models.Document
import com.overviewdocs.models.tables.{DocumentTags,Documents}
/** Iterates over Documents from the database.
*
* The constructor and foreach methods are slow, and they block. Beware.
*
* @param documentSetId Filter by document set ID.
* @param maybeTagId If set, filter by the given tag ID.
*/
class CatDocuments(
documentSetId: Long,
maybeTagId: Option[Long],
pageSize: Int = 50 // max ~1MB/doc
) extends HasBlockingDatabase {
private val allIds: Seq[Long] = {
import database.api._
blockingDatabase.option(
sql"""SELECT sorted_document_ids FROM document_set WHERE id = $documentSetId""".as[Seq[Long]]
).getOrElse(Seq[Long]())
}
private val taggedIds = {
import database.api._
maybeTagId match {
case None => allIds
case Some(tagId) => {
val usefulIds: Set[Long] = blockingDatabase.seq(
DocumentTags
.filter(_.tagId === tagId)
.map(_.documentId)
).toSet
allIds.filter(usefulIds.contains _)
}
}
}
def length: Int = taggedIds.length
def foreach(f: Document => Unit): Unit = {
import database.api._
taggedIds.grouped(pageSize).foreach { someIds =>
val documents: Map[Long,Document] = {
blockingDatabase.seq(Documents.filter(_.id inSet someIds)).map(d => (d.id -> d)).toMap
}
someIds.foreach { id =>
val document: Document = documents(id) // Missing? Crash.
f(document)
}
}
}
}
| overview/overview-server | worker/src/main/scala/com/overviewdocs/clustering/CatDocuments.scala | Scala | agpl-3.0 | 1,717 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core.javac
import akka.event.slf4j.SLF4JLogging
import com.sun.source.tree.Tree
import javax.lang.model.element.Element
import com.sun.tools.javac.tree.JCTree
import com.sun.tools.javac.tree.JCTree._
/**
* This trait provides behavior missing from jdk 1.6, 1.7, see:
* http://hg.openjdk.java.net/jdk8/jdk8/langtools/rev/8dd528992c15 and
* sadly depends on unsupported implementation classes,
* (com.sun.tools.javac.*). As a result, this may stop working on on
* Java 9. At that point we can either:
* 1) Take the long way around and find Elements ourselves by searching the
* scope/target for the selected name (see my older version of JavaDocFinding).
* 2) Use reflection here.
* 3) Convince Java 9 to let us import these unsafe libs.
*
* -aemon
*/
trait UnsafeHelpers extends SLF4JLogging {
protected def unsafeGetElement(info: CompilationInfo, t: Tree): Option[Element] = {
t match {
case t: JCCompilationUnit => Some(t.packge)
case t: JCClassDecl => Some(t.sym)
case t: JCMethodDecl => Some(t.sym)
case t: JCVariableDecl => Some(t.sym)
case t: JCIdent => Some(t.sym)
case t: JCFieldAccess => Some(t.sym)
// TODO: Workaround for java 6
// case t: JCMemberReference => Some(t.sym)
case t: JCNewClass => Some(t.constructor)
case t: JCMethodInvocation => unsafeGetElement(info, t.meth)
case t: JCTypeApply => unsafeGetElement(info, t.clazz)
case t: JCTree => if (t.`type` != null) Some(t.`type`.tsym) else None
case _ => None
}
}
}
| j-mckitrick/ensime-sbt | src/sbt-test/ensime-sbt/ensime-server/core/src/main/scala/org/ensime/core/javac/UnsafeHelpers.scala | Scala | apache-2.0 | 1,694 |
package myorg
import io.prediction.controller.Engine
import io.prediction.controller.IEngineFactory
import io.prediction.engines.itemrec.EventsDataSource
import io.prediction.engines.itemrec.ItemRecPreparator
import io.prediction.engines.itemrec.NCItemBasedAlgorithm
object TempFilterEngine extends IEngineFactory {
def apply() = {
new Engine(
classOf[EventsDataSource],
classOf[ItemRecPreparator],
Map("ncMahoutItemBased" -> classOf[NCItemBasedAlgorithm]),
classOf[TempFilter]
)
}
}
| ch33hau/PredictionIO | examples/experimental/scala-local-movielens-filtering/src/main/scala/Engine.scala | Scala | apache-2.0 | 522 |
package definiti.common.utils
import definiti.common.ast._
object ASTUtils {
def root(namespaces: Namespace*): Root = {
Root(namespaces)
}
def root(namespaceElements: NamespaceElement*)(implicit dummyImplicit: DummyImplicit): Root = {
Root(Seq(Namespace("", "", namespaceElements)))
}
def namespace(name: String, fullName: String, elements: NamespaceElement*): Namespace = {
Namespace(name, fullName, elements)
}
def namespace(fullName: String, elements: NamespaceElement*): Namespace = {
Namespace(StringUtils.lastPart(fullName), fullName, elements)
}
def definedType(
fullName: String,
location: Location,
genericTypes: Seq[String] = Seq.empty,
parameters: Seq[ParameterDefinition] = Seq.empty,
attributes: Seq[AttributeDefinition] = Seq.empty,
verifications: Seq[TypeVerification] = Seq.empty,
inherited: Seq[VerificationReference] = Seq.empty,
comment: Option[String] = None
): DefinedType = {
DefinedType(
name = StringUtils.lastPart(fullName),
fullName = fullName,
genericTypes = genericTypes,
parameters = parameters,
attributes = attributes,
verifications = verifications,
inherited = inherited,
comment = comment,
location = location
)
}
def attributeDefinition(
name: String,
typeDeclaration: TypeDeclaration,
location: Location,
comment: Option[String] = None,
verifications: Seq[VerificationReference] = Seq.empty,
typeName: Option[String] = None
): AttributeDefinition = {
AttributeDefinition(
name = name,
typeDeclaration = typeDeclaration,
comment = comment,
verifications = verifications,
typeName = typeName,
location = location
)
}
def typeDeclaration(
typeName: String,
location: Location,
genericTypes: Seq[TypeDeclaration] = Seq.empty,
parameters: Seq[AtomicExpression] = Seq.empty
): TypeDeclaration = {
TypeDeclaration(
typeName = typeName,
genericTypes = genericTypes,
parameters = parameters,
location = location
)
}
}
| definiti/definiti-core | src/main/scala/definiti/common/utils/ASTUtils.scala | Scala | mit | 2,112 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.analytic
import java.text.SimpleDateFormat
import java.util.TimeZone
import org.geotools.factory.CommonFactoryFinder
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.geotools.filter.text.cql2.CQL
import org.geotools.data.util.NullProgressListener
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.TestWithDataStore
import org.locationtech.geomesa.features.avro.AvroSimpleFeatureFactory
import org.locationtech.geomesa.process.analytic.{AttributeVisitor, UniqueProcess}
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.filter.Filter
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConverters._
@RunWith(classOf[JUnitRunner])
class UniqueProcessTest extends Specification with TestWithDataStore {
sequential
override val spec = "name:String:index=join,weight:Double:index=join,ml:List[String],dtg:Date,*geom:Point:srid=4326"
import java.{util => jl}
def toJavaList(s: Seq[String]): java.util.List[String] = s.asJava
addFeatures({
val geom = WKTUtils.read("POINT(45.0 49.0)")
val builder = new SimpleFeatureBuilder(sft, new AvroSimpleFeatureFactory)
val dtFormat = new SimpleDateFormat("yyyyMMdd HH:mm:SS")
dtFormat.setTimeZone(TimeZone.getTimeZone("UTC"))
Seq(
Seq("alice", 20, toJavaList(Seq()), dtFormat.parse("20120101 12:00:00"), geom),
Seq("alice", 25, null.asInstanceOf[jl.List[String]], dtFormat.parse("20120101 12:00:00"), geom),
Seq("bill", 21, toJavaList(Seq("foo", "bar")), dtFormat.parse("20130101 12:00:00"), geom),
Seq("bill", 22, toJavaList(Seq("foo")), dtFormat.parse("20130101 12:00:00"), geom),
Seq("bill", 23, toJavaList(Seq("foo")), dtFormat.parse("20130101 12:00:00"), geom),
Seq("bob", 30, toJavaList(Seq("foo")), dtFormat.parse("20140101 12:00:00"), geom),
Seq("charles", 40, toJavaList(Seq("foo")), dtFormat.parse("20140101 12:30:00"), geom),
Seq("charles", null, toJavaList(Seq("foo")), dtFormat.parse("20140101 12:30:00"), geom)
).map { case name :: weight :: l :: dtg :: geom :: Nil =>
val feature = builder.buildFeature(s"$name$weight")
feature.setDefaultGeometry(geom)
feature.setAttribute("name", name)
feature.setAttribute("weight", weight)
feature.setAttribute("ml", l)
feature.setAttribute("dtg", dtg)
feature.setAttribute("geom", geom)
feature
}
})
val pl = new NullProgressListener()
"UniqueProcess" should {
"return things without a filter" in {
val features = fs.getFeatures()
val process = new UniqueProcess
val results = process.execute(features, "name", null, null, null, null, pl)
val names = SelfClosingIterator(results.features()).map(_.getAttribute("value")).toList
names must contain(exactly[Any]("alice", "bill", "bob", "charles"))
}
"respect a parent filter" in {
val features = fs.getFeatures(CQL.toFilter("name LIKE 'b%'"))
val process = new UniqueProcess
val results = process.execute(features, "name", null, null, null, null, pl)
val names = SelfClosingIterator(results.features()).map(_.getAttribute("value")).toList
names must contain(exactly[Any]("bill", "bob"))
}
"be able to use its own filter" in {
val features = fs.getFeatures()
val process = new UniqueProcess
val results = process.execute(features, "name", CQL.toFilter("name LIKE 'b%'"), null, null, null, pl)
val names = SelfClosingIterator(results.features()).map(_.getAttribute("value")).toList
names must contain(exactly[Any]("bill", "bob"))
}
"combine parent and own filter" in {
val features = fs.getFeatures(CQL.toFilter("name LIKE 'b%'"))
val process = new UniqueProcess
val results = process.execute(features, "name", CQL.toFilter("weight > 25"), null, null, null, pl)
val names = SelfClosingIterator(results.features()).map(_.getAttribute("value")).toList
names must contain(exactly[Any]("bob"))
}
"default to no histogram" in {
val features = fs.getFeatures()
val process = new UniqueProcess
val results = process.execute(features, "name", null, null, null, null, pl)
val uniques = SelfClosingIterator(results.features()).toList
val names = uniques.map(_.getAttribute("value"))
names must contain(exactly[Any]("alice", "bill", "bob", "charles"))
val counts = uniques.flatMap(f => Option(f.getAttribute("count")))
counts must beEmpty
}
"include histogram if requested" in {
val features = fs.getFeatures()
val process = new UniqueProcess
val results = process.execute(features, "name", null, true, null, null, pl)
val uniques = SelfClosingIterator(results.features()).toList
val names = uniques.map(_.getAttribute("value"))
names should contain(exactly[Any]("alice", "bill", "bob", "charles"))
val counts = uniques.map(_.getAttribute("count"))
counts should contain(exactly[Any](1L, 2L, 2L, 3L))
val alice = uniques.find(_.getAttribute("value") == "alice").map(_.getAttribute("count"))
alice must beSome(2)
val bill = uniques.find(_.getAttribute("value") == "bill").map(_.getAttribute("count"))
bill must beSome(3)
val charles = uniques.find(_.getAttribute("value") == "charles").map(_.getAttribute("count"))
charles must beSome(2)
}
"sort by value" in {
val features = fs.getFeatures()
val process = new UniqueProcess
val results = process.execute(features, "name", null, true, "DESC", null, pl)
val uniques = SelfClosingIterator(results.features()).toList
val names = uniques.map(_.getAttribute("value"))
names must haveLength(4)
names(0) mustEqual("charles")
names(1) mustEqual("bob")
names(2) mustEqual("bill")
names(3) mustEqual("alice")
val counts = uniques.map(_.getAttribute("count"))
counts should contain(exactly[Any](1L, 2L, 2L, 3L))
val alice = uniques.find(_.getAttribute("value") == "alice").map(_.getAttribute("count"))
alice must beSome(2)
val bill = uniques.find(_.getAttribute("value") == "bill").map(_.getAttribute("count"))
bill must beSome(3)
val charles = uniques.find(_.getAttribute("value") == "charles").map(_.getAttribute("count"))
charles must beSome(2)
}
"sort by histogram" in {
val features = fs.getFeatures()
val process = new UniqueProcess
val results = process.execute(features, "name", null, true, "DESC", true, pl)
val uniques = SelfClosingIterator(results.features()).toList
val names = uniques.map(_.getAttribute("value"))
names must haveLength(4)
names(0) mustEqual("bill")
names(1) mustEqual("alice")
names(2) mustEqual("charles")
names(3) mustEqual("bob")
val counts = uniques.map(_.getAttribute("count"))
counts should contain(exactly[Any](1L, 2L, 2L, 3L))
val alice = uniques.find(_.getAttribute("value") == "alice").map(_.getAttribute("count"))
alice must beSome(2)
val bill = uniques.find(_.getAttribute("value") == "bill").map(_.getAttribute("count"))
bill must beSome(3)
val charles = uniques.find(_.getAttribute("value") == "charles").map(_.getAttribute("count"))
charles must beSome(2)
}
"deal with multi-valued properties correctly" >> {
val features = fs.getFeatures()
val proc = new UniqueProcess
val results = proc.execute(features, "ml", null, true, "DESC", false, pl)
val uniques = SelfClosingIterator(results.features()).toList
val values = uniques.map(_.getAttribute("value"))
"contain 'foo' and 'bar'" >> { values must containTheSameElementsAs(Seq("foo", "bar")) }
"'foo' must have count 6" >> { uniques.find(_.getAttribute("value") == "foo").map(_.getAttribute("count")) must beSome(6) }
"'bar' must have count 1" >> { uniques.find(_.getAttribute("value") == "bar").map(_.getAttribute("count")) must beSome(1) }
}
}
}
| elahrvivaz/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/process/analytic/UniqueProcessTest.scala | Scala | apache-2.0 | 8,856 |
/*
* Author: Pablo Lalloni <plalloni@gmail.com>
* Created: 24/05/2011 13:48:43
*/
package org.retistruen.instrument
import org.scalatest.FunSpec
import org.scalatest.matchers.ShouldMatchers
class SlidingMaxSpec extends FunSpec with ShouldMatchers {
def newFixture = {
val emt = new SourceEmitter[Int]("emitter")
val max = new SlidingMax[Int]("max3", Prune.bySize(3))
val rec = new RecordingReceiver[Int]("rec")
emt >> max >> rec
(emt, max, rec)
}
describe("A SlidingMax") {
describe("when received 3, 5, 1 datums") {
val (emt, max, rec) = newFixture
emt <<< (3, 5, 1)
it("should emit 5") {
rec.data.last.value should equal(5)
}
it("should have emitted 3, 5, 5") {
rec.data.map(_.value) should equal(Seq(3, 5, 5))
}
}
describe("when received 10, 5, 3, 1") {
val (emt, max, rec) = newFixture
emt <<< (10, 5, 3, 1)
it("should have emitted 5 as last max") {
rec.data.last.value should equal(5)
}
it("should have emitted 10, 10, 10, 5") {
rec.data.map(_.value) should equal(Seq(10, 10, 10, 5))
}
}
}
}
| plalloni/retistruen | src/test/scala/org/retistruen/instrument/SlidingMaxSpec.scala | Scala | mit | 1,156 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.version
object HmrcVersions {
case object CT600Version3 extends Version {
override def name: String = "v3"
}
case object CT600Version2 extends Version {
override def name: String = "v2"
}
case object ComputationsCT20150201 extends Version {
override def name: String = "ct-2015-02-01"
}
case object ComputationsCT20141001 extends Version {
override def name: String = "ct-2014-10-01"
}
case object ComputationsCT20130721 extends Version {
override def name: String = "ct-2013-07-21"
}
case object UploadedAccounts extends Version {
override def name: String = "uploaded-1.0"
}
}
| keithhall/ct-calculations | src/main/scala/uk/gov/hmrc/ct/version/HmrcVersions.scala | Scala | apache-2.0 | 1,263 |
/*
* Copyright 2014 porter <https://github.com/eikek/porter>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package porter.app.openid
import porter.model.Ident
import spray.http.Uri
import java.nio.file.{Path, Files}
import scala.util.Try
import porter.app.openid.common.Supplier
import porter.model.PasswordCrypt
import org.eknet.spray.openid.provider.Mustache
trait OpenIdServiceSettings {
def passwordCrypt: PasswordCrypt
def decider: porter.auth.Decider
def contact: Option[String]
def staticResourceDir: java.nio.file.Path
def templateDir: java.nio.file.Path
def cookieName: String
def cookieKey: Vector[Byte]
def cookieSecure: Boolean
def realms: List[Ident]
def defaultRealm: Ident
final def acceptRealm(realm: Ident) = realm == defaultRealm || realms.contains(realm)
def endpointBaseUrl: Uri
final def endpointUrl: Uri = endpointBaseUrl.withPath(endpointBaseUrl.path + "/openid/ep")
final def openIdUrl: Uri = endpointBaseUrl.withPath(endpointBaseUrl.path + "/openid")
def registrationEnabled: Boolean
def registrationRequiresEmail: Boolean
def registrationKey: Option[String]
def avatarCacheDir: Option[Path]
def avatarCacheDirSize: Int
def avatarMaxUploadSize: Int
private def loadTemplateFile(name: String): Option[Supplier] = {
val tfile = templateDir.resolve(name)
if (Files.isRegularFile(tfile) && Files.isReadable(tfile))
Some(() => Try(Files.newInputStream(tfile)))
else {
val url = Option(getClass.getResource("/porter/app/openid/assets/" + name))
url.map(u => () => Try(u.openStream()))
}
}
private def loadTemplate(name: String) = {
val in = loadTemplateFile(name)
in.map(s => Mustache(s().get)).getOrElse(sys.error(s"$name not found"))
}
lazy val loginTemplate = loadTemplate("login-template.mustache")
lazy val errorTemplate = loadTemplate("error-template.mustache")
lazy val continueTemplate = loadTemplate("continue-template.mustache")
lazy val userTemplate = loadTemplate("user-template.mustache")
lazy val registerTemplate = loadTemplate("register-template.mustache")
}
| eikek/porter | openid/src/main/scala/porter/app/openid/OpenIdServiceSettings.scala | Scala | apache-2.0 | 2,624 |
package at.logic.gapt.proofs.lk.base
import at.logic.gapt.algorithms.rewriting.NameReplacement
import at.logic.gapt.language.hol.{ HOLPosition, HOLOrdering }
import at.logic.gapt.proofs.occurrences._
import at.logic.gapt.proofs.proofs._
import at.logic.gapt.expr._
import at.logic.gapt.utils.ds.trees._
/**
* A sequent Ξ :- Ξ of formulas.
*
* @param antecedent The formulas on the left side of the sequent.
* @param succedent The formulas on the right side of the sequent.
*/
class FSequent( val antecedent: Seq[HOLFormula], val succedent: Seq[HOLFormula] ) {
type SymbolMap = Map[String, ( Int, String )]
val _1 = antecedent
val _2 = succedent
/**
* Equality treating each side of the sequent as list, i.e. respecting order and multiplicity.
*/
override def equals( fs: Any ): Boolean = fs match {
case FSequent( ant, succ ) => ( this.antecedent equals ant ) && ( this.succedent equals succ )
case _ => false
}
override def hashCode: Int = 31 * antecedent.hashCode() + succedent.hashCode()
override def toString: String =
this.antecedent.mkString( "," ) + " :- " + this.succedent.mkString( "," )
/**
* Equality treating each side of the sequent as a set.
*/
def setEquals( o: FSequent ) = _1.toSet == o._1.toSet && _2.toSet == o._2.toSet
/**
* Equality treating each side of the sequent as a multiset.
*/
def multiSetEquals( o: FSequent ) =
_1.diff( o._1 ).isEmpty && _2.diff( o._2 ).isEmpty &&
o._1.diff( _1 ).isEmpty && o._2.diff( _2 ).isEmpty
/**
* The formula on both sides of the sequent, i.e. the concatenation of antecedent and succedent.
*/
def formulas: Seq[HOLFormula] = antecedent ++ succedent
/**
* Takes the multiset difference between two sequents, i.e. each side separately.
*/
def diff( other: FSequent ) = FSequent( this.antecedent diff other.antecedent, this.succedent diff other.succedent )
/**
* Composes two sequents by taking the concatenation of the formulas on the left side, and on the right side.
*/
def compose( other: FSequent ) = FSequent( antecedent ++ other.antecedent, succedent ++ other.succedent )
/**
* Interpretation of the sequent as a formula.
*/
def toFormula: HOLFormula = Or( antecedent.toList.map( f => Neg( f ) ) ++ succedent )
/**
* Are both sides of the sequent empty?
*/
def isEmpty = _1.isEmpty && _2.isEmpty
/**
* Sorts each side of the sequent by [[HOLOrdering]].
*
* @return A copy of this sequent where the two sides are sorted.
*/
def sorted = FSequent( _1.sorted( HOLOrdering ), _2.sorted( HOLOrdering ) )
/**
* Computes the intersection of two sequents.
*
* @param other
* @return
*/
def intersect( other: FSequent ) = FSequent( antecedent intersect other.antecedent, succedent intersect other.succedent )
/**
* Computes the union of two sequents.
*
* @param other
* @return
*/
def union( other: FSequent ) = FSequent( antecedent union other.antecedent, succedent union other.succedent )
/**
* Removes duplicate formulas from both cedents.
*
* @return
*/
def distinct = FSequent( antecedent.distinct, succedent.distinct )
/**
* @param other Another FSequent
* @return True iff this contains other as a pair of multisets.
*/
def superMultiSet( other: FSequent ) = other subMultiSet this
/**
* @param other Another FSequent.
* @return True iff this contains other as a pair of sets.
*/
def superSet( other: FSequent ) = other subSet this
def subMultiSet( other: FSequent ) = ( this diff other ).isEmpty
/**
* @param other Another FSequent.
* @return True iff other contains this pair of sets.
*/
def subSet( other: FSequent ) = ( this.distinct diff other.distinct ).isEmpty
/**
*
* @return The sequent in tuple form.
*/
def toTuple = ( antecedent, succedent )
def renameSymbols( map: SymbolMap ) =
FSequent( antecedent map ( NameReplacement( _, map ) ), succedent map ( NameReplacement( _, map ) ) )
}
object FSequent {
def apply( ant: Seq[HOLFormula], succ: Seq[HOLFormula] ): FSequent = new FSequent( ant, succ )
/**
* Constructs an [[FSequent]] from a [[Sequent]], by ignoring where the formulas occur.
*/
def apply( seq: Sequent ): FSequent = FSequent( seq.antecedent map ( _.formula ), seq.succedent map ( _.formula ) )
/**
* Destructs an [[FSequent]] into a tuple of its antecedent and succedent.
*/
def unapply( f: FSequent ): Option[( Seq[HOLFormula], Seq[HOLFormula] )] = Some( ( f.antecedent, f.succedent ) )
}
object FSequentOrdering extends FSequentOrdering
/**
* Ordering for sequents.
*/
class FSequentOrdering extends Ordering[FSequent] {
def compare( x: FSequent, y: FSequent ): Int = {
if ( x.antecedent.size < y.antecedent.size ) -1 else if ( y.antecedent.size < x.antecedent.size ) 1 else if ( x.antecedent.size == y.antecedent.size && x.succedent.size < y.succedent.size ) -1 else if ( x.antecedent.size == y.antecedent.size && y.succedent.size < x.succedent.size ) 1 else {
assert( x.antecedent.size == y.antecedent.size &&
x.succedent.size == y.succedent.size, "Implementation error comparing FSequents!" )
val xs = x.sorted.formulas
val ys = y.sorted.formulas
val xys = xs zip ys
xys.foldLeft( 0 )( ( rv, pair ) => {
//as long as it is undecided, we compare pairs
if ( rv == 0 ) HOLOrdering.compare( pair._1, pair._2 )
//otherwise we pass the result on
else rv
} )
}
}
}
/**
* Sequent of formulas tracking their occurrences in a proof. Each formula together with its occurrence in a proof is
* stored as a [[at.logic.gapt.proofs.occurrences.FormulaOccurrence]].
*
* @param antecedent The formulas on the left side.
* @param succedent The formulas on the right side.
*/
class Sequent( val antecedent: Seq[FormulaOccurrence], val succedent: Seq[FormulaOccurrence] ) {
/**
* Equality treating each side as a multiset of formulas, ignoring the occurrence.
*/
def syntacticMultisetEquals( o: Sequent ) = FSequent( this ) multiSetEquals FSequent( o )
/**
* Equality treating each side as a multiset of formulas, ignoring the occurrence.
*/
def syntacticMultisetEquals( o: FSequent ) = FSequent( this ) multiSetEquals o
/**
* Equality treating each side as a multiset of formulas, ignoring the occurrence.
*/
def =^( o: Sequent ): Boolean = syntacticMultisetEquals( o )
/**
* Removes the specified [[at.logic.gapt.proofs.occurrences.FormulaOccurrence]]s from each side.
*/
def removeFormulasAtOccurrences( occs: Seq[FormulaOccurrence] ): Sequent = Sequent(
antecedent.filterNot( x => occs.contains( x ) ),
succedent.filterNot( x => occs.contains( x ) ) )
/**
* Finds the first occurrence in this sequent having the given ancestor.
*/
def getChildOf( ancestor: FormulaOccurrence ): Option[FormulaOccurrence] = ( antecedent ++ succedent ).find( _.parents.contains( ancestor ) )
/**
* Converts to an [[FSequent]], ignoring where the formulas occur.
*/
def toFSequent: FSequent = FSequent( this )
/**
* Interpretation of the sequent as formula.
*/
def toFormula = toFSequent toFormula
/**
* Is this sequent of the form F :- F?
*/
def isTaut = antecedent.size == 1 && succedent.size == 1 && antecedent.head.formula == succedent.head.formula
/**
* Occurrences on both sides of the sequent, i.e. the concatenation of antecedent and succedent.
*/
def occurrences = antecedent ++ succedent
/**
* Is this sequent of the form :- t = t?
*/
def isReflexivity = antecedent.size == 0 && succedent.size == 1 && (
succedent.head.formula match {
case Eq( s, t ) => ( s == t )
case _ => false
} )
/**
* Composes with the other sequent. The result is the concatenation of the two antecedents as antecedent, and the
* two succedents as succedent.
*/
def compose( that: Sequent ) = Sequent( this.antecedent ++ that.antecedent, this.succedent ++ that.antecedent )
override def toString: String = toFSequent toString
def freeVariables: List[Var] = ( ( antecedent ++ succedent ) flatMap ( ( fo: FormulaOccurrence ) => at.logic.gapt.expr.freeVariables( fo.formula ) ) ).toList
}
object Sequent {
def apply( antecedent: Seq[FormulaOccurrence], succedent: Seq[FormulaOccurrence] ) = new Sequent( antecedent, succedent )
def unapply( so: Sequent ) = Some( so.antecedent, so.succedent )
}
// exceptions
class LKRuleException( msg: String ) extends RuleException( msg )
class LKRuleCreationException( msg: String ) extends LKRuleException( msg )
//these two classes allow detailed error diagnosis
case class LKUnaryRuleCreationException( name: String, parent: LKProof, aux: List[HOLFormula] )
extends LKRuleCreationException( "" ) {
override def getMessage = "Could not create lk rule " + name + " from parent " + parent.root + " with auxiliary formulas " + aux.mkString( ", " )
}
case class LKBinaryRuleCreationException( name: String, parent1: LKProof, aux1: HOLFormula, parent2: LKProof, aux2: HOLFormula )
extends LKRuleCreationException( "" ) {
override def getMessage = "Could not create lk rule " + name + " from left parent " + parent1.root + " with auxiliary formula " + aux1 +
" and right parent " + parent2.root + " with auxiliary formula " + aux2
}
class FormulaNotExistsException( msg: String ) extends LKRuleException( msg )
trait LKProof extends TreeProof[Sequent] with Tree[Sequent] {
def getDescendantInLowerSequent( fo: FormulaOccurrence ): Option[FormulaOccurrence] = {
( root.antecedent ++ root.succedent ).filter( _.isDescendantOf( fo, reflexive = true ) ) match {
case x :: Nil => Some( x )
case Nil => None
case _ => throw new LKRuleException( "Illegal lower sequent in rule in application of getDescendantInLowerSequent: More than one such formula exists" )
}
}
def containsDescendantOf( fo: FormulaOccurrence ): Boolean = getDescendantInLowerSequent( fo ) match {
case Some( _ ) => true
case None => false
}
}
trait NullaryLKProof extends LeafTree[Sequent] with LKProof with NullaryTreeProof[Sequent]
trait UnaryLKProof extends UnaryTree[Sequent] with LKProof with UnaryTreeProof[Sequent] {
override def uProof = t.asInstanceOf[LKProof]
}
trait BinaryLKProof extends BinaryTree[Sequent] with LKProof with BinaryTreeProof[Sequent] {
override def uProof1 = t1.asInstanceOf[LKProof]
override def uProof2 = t2.asInstanceOf[LKProof]
}
// traits denoting having auxiliary and main formulas
trait AuxiliaryFormulas {
// for each upper sequent we have a list of occurrences
def aux: List[List[FormulaOccurrence]]
}
trait PrincipalFormulas {
def prin: List[FormulaOccurrence]
}
trait SubstitutionTerm {
def subst: LambdaExpression
}
trait Eigenvariable {
def eigenvar: Var
}
trait TermPositions {
def termPos: List[HOLPosition]
}
// method for creating the context of the lower sequent. Essentially creating nre occurrences
// create new formula occurrences in the new context
object createContext {
def apply( set: Seq[FormulaOccurrence] ): Seq[FormulaOccurrence] =
set.map( x =>
x.factory.createFormulaOccurrence( x.formula.asInstanceOf[HOLFormula], x :: Nil ) )
}
| gisellemnr/gapt | src/main/scala/at/logic/gapt/proofs/lk/base.scala | Scala | gpl-3.0 | 11,328 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* With small modifications by MemSQL
*/
package test.util
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.BeforeAndAfterEach
import org.scalatest._
trait LocalSparkContext extends BeforeAndAfterEach { self: Suite =>
@transient private var _sc: SparkContext = _
val _sparkConf = new SparkConf(false)
.set("spark.ui.showConsoleProgress", "false")
def sc: SparkContext = _sc
override def beforeEach() {
_sc = new SparkContext("local[4]", "test", _sparkConf)
super.beforeEach()
}
override def afterEach() {
resetSparkContext()
super.afterEach()
}
def resetSparkContext(): Unit = {
LocalSparkContext.stop(_sc)
_sc = null
}
}
object LocalSparkContext {
def stop(sc: SparkContext) {
if (sc != null) {
sc.stop()
}
// To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
System.clearProperty("spark.driver.port")
}
/** Runs `f` by passing in `sc` and ensures that `sc` is stopped. */
def withSpark[T](sc: SparkContext)(f: SparkContext => T): T = {
try {
f(sc)
} finally {
stop(sc)
}
}
}
| memsql/streamliner-examples | src/test/scala/test/util/LocalSparkContext.scala | Scala | apache-2.0 | 1,955 |
package model
case class LoginUserInput(usernameOrEmail: String, password: String)
| sysgears/apollo-universal-starter-kit | modules/user/server-scala/src/main/scala/model/LoginUserInput.scala | Scala | mit | 84 |
/* Copyright 2009-2016 EPFL, Lausanne */
import leon.lang._
object Postconditions {
def failling_1(f: BigInt => BigInt) = {
require(forall((a: BigInt) => a > 0 ==> f(a) > 0))
f(10)
} ensuring { res => forall((a: BigInt) => res > f(a)) }
def failling_2(f: BigInt => BigInt, x: BigInt) = {
require(x >= 0 && forall((a: BigInt) => a > 0 ==> f(a) < 0))
x
} ensuring { res => forall((a: BigInt) => res > f(a)) }
}
| epfl-lara/leon | src/test/resources/regression/verification/purescala/invalid/Postcondition.scala | Scala | gpl-3.0 | 437 |
/*
* Copyright 2010 Michael Fortin <mike@brzy.org>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.brzy.webapp
import org.brzy.webapp.application.WebApp
import org.slf4j.LoggerFactory
import javax.servlet.http._
import javax.servlet.{ServletConfig, ServletResponse, ServletRequest}
/**
* Http servlet that finds and call action requested. If the session is expired,
* then the login page is displayed. This also checks the constraints on the
* action and checks them before calling the action.
*
* @author Michael Fortin
*/
class BrzyServlet extends HttpServlet {
private val log = LoggerFactory.getLogger(classOf[BrzyServlet])
override def init(config: ServletConfig) {
}
override def service(req: ServletRequest, res: ServletResponse) {
val request = req.asInstanceOf[HttpServletRequest]
val response = res.asInstanceOf[HttpServletResponse]
val webapp = req.getServletContext.getAttribute("application").asInstanceOf[WebApp]
webapp.serviceAction(request)
.getOrElse(throw new RuntimeException(s"Action not found: ${request.getRequestURI}"))
.doService(request, response)
}
} | m410/brzy | src/main/scala/org/brzy/webapp/BrzyServlet.scala | Scala | apache-2.0 | 1,660 |
trait A[+_X] {
protected[this] type X = _X // error: variance
def f: X
}
trait B extends A[B] {
def f: X = new B {}
}
class C extends B with A[C] {
// should be required because the inherited f is of type B, not C
// override def f: X = new C
}
object Test extends App {
val c1 = new C
val c2: C = c1.f
}
| som-snytt/dotty | tests/neg/i3989d.scala | Scala | apache-2.0 | 322 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.reactive.Observable
import scala.concurrent.duration._
object SampleRepeatedSuite extends BaseOperatorSuite {
def waitNext = 500.millis
def waitFirst = 500.millis
def createObservable(sourceCount: Int) = Some {
val o = Observable
.now(1L)
.delayOnComplete(sourceCount.minutes)
.sampleRepeated(500.millis)
.take(sourceCount.toLong)
Sample(o, sourceCount, sourceCount, waitFirst, waitNext)
}
def observableInError(sourceCount: Int, ex: Throwable) = None
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = None
override def cancelableObservables() = {
val o = Observable
.now(1L)
.delayOnComplete(1.hour)
.sampleRepeated(500.millis)
Seq(
Sample(o, 0, 0, 0.seconds, 0.seconds),
Sample(o, 2, 2, 1.seconds, 0.seconds)
)
}
}
| alexandru/monifu | monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/SampleRepeatedSuite.scala | Scala | apache-2.0 | 1,558 |
package scala.lms
package common
import java.io.PrintWriter
import internal._
import scala.reflect.SourceContext
trait ExceptionOps extends Variables {
// TODO: support virtualization of try-catch-finally blocks
// for now, we only allow fatal errors (the exception will never be caught in generated code)
def fatal(m: Rep[String]) = throw_exception(m)
def throw_exception(m: Rep[String]): Rep[Nothing]
}
trait ExceptionOpsExp extends ExceptionOps with EffectExp {
case class ThrowException(m: Rep[String]) extends Def[Nothing]
def throw_exception(m: Exp[String]) = reflectEffect(ThrowException(m), Global())
override def mirror[A:Manifest](e: Def[A], f: Transformer)(implicit pos: SourceContext): Exp[A] = (e match {
case Reflect(ThrowException(s), u, es) => reflectMirrored(Reflect(ThrowException(f(s)), mapOver(f,u), f(es)))(mtype(manifest[A]), pos)
case _ => super.mirror(e,f)
}).asInstanceOf[Exp[A]]
}
trait ScalaGenExceptionOps extends ScalaGenBase {
val IR: ExceptionOpsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case ThrowException(m) => emitValDef(sym, src"throw new Exception($m)")
case _ => super.emitNode(sym, rhs)
}
}
trait CLikeGenExceptionOps extends CLikeGenBase {
val IR: ExceptionOpsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case ThrowException(m) =>
stream.println("printf(" + quote(m) + ".c_str());")
stream.println("assert(false);")
case _ => super.emitNode(sym, rhs)
}
}
trait CGenExceptionOps extends CGenBase with CLikeGenExceptionOps
trait CudaGenExceptionOps extends CudaGenBase with CLikeGenExceptionOps {
val IR: ExceptionOpsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case ThrowException(m) =>
stream.println("printf(" + quote(m) + ");")
stream.println("assert(false);")
case _ => super.emitNode(sym, rhs)
}
}
//OpenCL does not support printf within a kernel
//trait OpenCLGenExceptionOps extends OpenCLGenBase with CLikeGenExceptionOps
| scalan/virtualization-lms-core | src/common/ExceptionOps.scala | Scala | bsd-3-clause | 2,114 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.utils
import org.apache.calcite.avatica.util.TimeUnit
import org.apache.calcite.rel.RelNode
import org.apache.calcite.sql.parser.SqlParserPos
import org.apache.calcite.sql.{SqlExplainLevel, SqlIntervalQualifier}
import org.apache.flink.api.common.BatchShuffleMode
import org.apache.flink.api.common.typeinfo.{AtomicType, TypeInformation}
import org.apache.flink.api.java.typeutils.{PojoTypeInfo, RowTypeInfo, TupleTypeInfo}
import org.apache.flink.api.scala.typeutils.CaseClassTypeInfo
import org.apache.flink.configuration.ExecutionOptions
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.{JsonNode, ObjectMapper}
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.{LocalStreamEnvironment, StreamExecutionEnvironment}
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment => ScalaStreamExecEnv}
import org.apache.flink.streaming.api.{TimeCharacteristic, environment}
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.java.{StreamTableEnvironment => JavaStreamTableEnv}
import org.apache.flink.table.api.bridge.scala.{StreamTableEnvironment => ScalaStreamTableEnv}
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableEnvironmentInternal, TableImpl}
import org.apache.flink.table.catalog.{CatalogManager, FunctionCatalog, GenericInMemoryCatalog, ObjectIdentifier}
import org.apache.flink.table.data.RowData
import org.apache.flink.table.delegation.{Executor, ExecutorFactory}
import org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_TYPE
import org.apache.flink.table.descriptors.DescriptorProperties
import org.apache.flink.table.descriptors.Schema.SCHEMA
import org.apache.flink.table.expressions.Expression
import org.apache.flink.table.factories.{FactoryUtil, PlannerFactoryUtil, StreamTableSourceFactory}
import org.apache.flink.table.functions._
import org.apache.flink.table.module.ModuleManager
import org.apache.flink.table.operations.{ModifyOperation, Operation, QueryOperation, SinkModifyOperation}
import org.apache.flink.table.planner.calcite.CalciteConfig
import org.apache.flink.table.planner.delegation.PlannerBase
import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable
import org.apache.flink.table.planner.operations.{InternalDataStreamQueryOperation, PlannerQueryOperation, RichTableSourceQueryOperation}
import org.apache.flink.table.planner.plan.nodes.calcite.LogicalWatermarkAssigner
import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext
import org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodePlanDumper
import org.apache.flink.table.planner.plan.optimize.program._
import org.apache.flink.table.planner.plan.stats.FlinkStatistic
import org.apache.flink.table.planner.plan.utils.FlinkRelOptUtil
import org.apache.flink.table.planner.runtime.utils.{TestingAppendTableSink, TestingRetractTableSink, TestingUpsertTableSink}
import org.apache.flink.table.planner.sinks.CollectRowTableSink
import org.apache.flink.table.planner.utils.PlanKind.PlanKind
import org.apache.flink.table.planner.utils.TableTestUtil.{replaceNodeIdInOperator, replaceStageId, replaceStreamNodeId}
import org.apache.flink.table.runtime.types.TypeInfoLogicalTypeConverter.fromLogicalTypeToTypeInfo
import org.apache.flink.table.sinks._
import org.apache.flink.table.sources.{StreamTableSource, TableSource}
import org.apache.flink.table.types.logical.LogicalType
import org.apache.flink.table.types.utils.TypeConversions
import org.apache.flink.table.typeutils.FieldInfoUtils
import org.apache.flink.types.Row
import org.junit.Assert.{assertEquals, assertTrue, fail}
import org.junit.Rule
import org.junit.rules.{ExpectedException, TemporaryFolder, TestName}
import _root_.java.math.{BigDecimal => JBigDecimal}
import _root_.java.util
import java.io.{File, IOException}
import java.nio.file.{Files, Paths}
import java.time.Duration
import _root_.scala.collection.JavaConversions._
import _root_.scala.io.Source
/**
* Test base for testing Table API / SQL plans.
*/
abstract class TableTestBase {
// used for accurate exception information checking.
val expectedException: ExpectedException = ExpectedException.none()
// used for get test case method name
val testName: TestName = new TestName
val _tempFolder = new TemporaryFolder
@Rule
def tempFolder: TemporaryFolder = _tempFolder
@Rule
def thrown: ExpectedException = expectedException
@Rule
def name: TestName = testName
def streamTestUtil(conf: TableConfig = new TableConfig): StreamTableTestUtil =
StreamTableTestUtil(this, conf = conf)
def scalaStreamTestUtil(): ScalaStreamTableTestUtil = ScalaStreamTableTestUtil(this)
def javaStreamTestUtil(): JavaStreamTableTestUtil = JavaStreamTableTestUtil(this)
def batchTestUtil(conf: TableConfig = new TableConfig): BatchTableTestUtil =
BatchTableTestUtil(this, conf = conf)
def scalaBatchTestUtil(): ScalaBatchTableTestUtil = ScalaBatchTableTestUtil(this)
def javaBatchTestUtil(): JavaBatchTableTestUtil = JavaBatchTableTestUtil(this)
def verifyTableEquals(expected: Table, actual: Table): Unit = {
val expectedString = FlinkRelOptUtil.toString(TableTestUtil.toRelNode(expected))
val actualString = FlinkRelOptUtil.toString(TableTestUtil.toRelNode(actual))
assertEquals(
"Logical plans do not match",
LogicalPlanFormatUtils.formatTempTableId(expectedString),
LogicalPlanFormatUtils.formatTempTableId(actualString))
}
}
abstract class TableTestUtilBase(test: TableTestBase, isStreamingMode: Boolean) {
protected lazy val diffRepository: DiffRepository = DiffRepository.lookup(test.getClass)
protected val setting: EnvironmentSettings = if (isStreamingMode) {
EnvironmentSettings.newInstance().inStreamingMode().build()
} else {
EnvironmentSettings.newInstance().inBatchMode().build()
}
// a counter for unique table names
private var counter = 0L
private def getNextId: Long = {
counter += 1
counter
}
protected def getTableEnv: TableEnvironment
protected def isBounded: Boolean = !isStreamingMode
def getPlanner: PlannerBase = {
getTableEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
}
/**
* Creates a table with the given DDL SQL string.
*/
def addTable(ddl: String): Unit = {
getTableEnv.executeSql(ddl)
}
/**
* Create a [[DataStream]] with the given schema,
* and registers this DataStream under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param fields field names
* @tparam T field types
* @return returns the registered [[Table]].
*/
def addDataStream[T: TypeInformation](name: String, fields: Expression*): Table = {
val env = new ScalaStreamExecEnv(new LocalStreamEnvironment())
val dataStream = env.fromElements[T]().javaStream
val tableEnv = getTableEnv
TableTestUtil.createTemporaryView(tableEnv, name, dataStream, Some(fields.toArray))
tableEnv.from(name)
}
/**
* Create a [[TestTableSource]] with the given schema,
* and registers this TableSource under a unique name into the TableEnvironment's catalog.
*
* @param fields field names
* @tparam T field types
* @return returns the registered [[Table]].
*/
def addTableSource[T: TypeInformation](fields: Expression*): Table = {
addTableSource[T](s"Table$getNextId", fields: _*)
}
/**
* Create a [[TestTableSource]] with the given schema,
* and registers this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param fields field names
* @tparam T field types
* @return returns the registered [[Table]].
*/
def addTableSource[T: TypeInformation](name: String, fields: Expression*): Table = {
val typeInfo: TypeInformation[T] = implicitly[TypeInformation[T]]
val tableSchema = if (fields.isEmpty) {
val fieldTypes: Array[TypeInformation[_]] = typeInfo match {
case tt: TupleTypeInfo[_] => (0 until tt.getArity).map(tt.getTypeAt).toArray
case ct: CaseClassTypeInfo[_] => (0 until ct.getArity).map(ct.getTypeAt).toArray
case at: AtomicType[_] => Array[TypeInformation[_]](at)
case pojo: PojoTypeInfo[_] => (0 until pojo.getArity).map(pojo.getTypeAt).toArray
case _ => throw new TableException(s"Unsupported type info: $typeInfo")
}
val types = fieldTypes.map(TypeConversions.fromLegacyInfoToDataType)
val names = FieldInfoUtils.getFieldNames(typeInfo)
TableSchema.builder().fields(names, types).build()
} else {
TableSchema.fromResolvedSchema(
FieldInfoUtils.getFieldsInfo(typeInfo, fields.toArray).toResolvedSchema)
}
addTableSource(name, new TestTableSource(isBounded, tableSchema))
}
/**
* Create a [[TestTableSource]] with the given schema, table stats and unique keys,
* and registers this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param types field types
* @param fields field names
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
types: Array[TypeInformation[_]],
fields: Array[String]): Table = {
val schema = new TableSchema(fields, types)
val tableSource = new TestTableSource(isBounded, schema)
addTableSource(name, tableSource)
}
/**
* Register this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param tableSource table source
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
tableSource: TableSource[_]): Table = {
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
name, tableSource)
getTableEnv.from(name)
}
/**
* Registers a [[ScalarFunction]] under given name into the TableEnvironment's catalog.
*
* @deprecated Use [[addTemporarySystemFunction]].
*/
@deprecated
@Deprecated
def addFunction(name: String, function: ScalarFunction): Unit = {
getTableEnv.registerFunction(name, function)
}
/**
* Registers a [[UserDefinedFunction]] according to FLIP-65.
*/
def addTemporarySystemFunction(name: String, function: UserDefinedFunction): Unit = {
getTableEnv.createTemporarySystemFunction(name, function)
}
/**
* Registers a [[UserDefinedFunction]] class according to FLIP-65.
*/
def addTemporarySystemFunction(name: String, function: Class[_ <: UserDefinedFunction]): Unit = {
getTableEnv.createTemporarySystemFunction(name, function)
}
/**
* Verify the AST (abstract syntax tree), the optimized rel plan and the optimized exec plan
* for the given SELECT query.
* Note: An exception will be thrown if the given query can't be translated to exec plan.
*/
def verifyPlan(query: String): Unit = {
doVerifyPlan(
query,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL, PlanKind.OPT_EXEC))
}
/**
* Verify the AST (abstract syntax tree), the optimized rel plan and the optimized exec plan
* for the given SELECT query. The plans will contain the extra [[ExplainDetail]]s.
* Note: An exception will be thrown if the given query can't be translated to exec plan.
*/
def verifyPlan(query: String, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(
query,
extraDetails.toArray,
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL, PlanKind.OPT_EXEC))
}
/**
* Verify the AST (abstract syntax tree), the optimized rel plan and the optimized exec plan
* for the given INSERT statement.
*/
def verifyPlanInsert(insert: String): Unit = {
doVerifyPlanInsert(
insert,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL, PlanKind.OPT_EXEC))
}
/**
* Verify the AST (abstract syntax tree), the optimized rel plan and the optimized exec plan
* for the given INSERT statement. The plans will contain the extra [[ExplainDetail]]s.
*/
def verifyPlanInsert(insert: String, extraDetails: ExplainDetail*): Unit = {
doVerifyPlanInsert(
insert,
extraDetails.toArray,
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL, PlanKind.OPT_EXEC))
}
/**
* Verify the AST (abstract syntax tree), the optimized rel plan and the optimized exec plan
* for the given [[Table]].
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyPlan(table: Table): Unit = {
doVerifyPlan(
table,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL, PlanKind.OPT_EXEC))
}
/**
* Verify the AST (abstract syntax tree), the optimized rel plan and the optimized exec plan
* for the given [[Table]]. The plans will contain the extra [[ExplainDetail]]s.
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyPlan(table: Table, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(
table,
extraDetails.toArray,
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL, PlanKind.OPT_EXEC))
}
/**
* Verify the AST (abstract syntax tree), the optimized rel plan and the optimized exec plan
* for the given [[Table]] with the given sink table name.
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyPlanInsert(table: Table, sink: TableSink[_], targetPath: String): Unit = {
val stmtSet = getTableEnv.createStatementSet()
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
stmtSet.addInsert(targetPath, table)
verifyPlan(stmtSet)
}
/**
* Verify the AST (abstract syntax tree), the optimized rel plan and the optimized exec plan
* for the given [[Table]] with the given sink table name.
* The plans will contain the extra [[ExplainDetail]]s.
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyPlanInsert(
table: Table,
sink: TableSink[_],
targetPath: String,
extraDetails: ExplainDetail*): Unit = {
val stmtSet = getTableEnv.createStatementSet()
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
stmtSet.addInsert(targetPath, table)
verifyPlan(stmtSet, extraDetails: _*)
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan and the optimized exec plan
* for the given [[StatementSet]]. The plans will contain the extra [[ExplainDetail]]s.
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyPlan(stmtSet: StatementSet, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(
stmtSet,
extraDetails.toArray,
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL, PlanKind.OPT_EXEC),
() => Unit)
}
/**
* Verify the AST (abstract syntax tree).
*/
def verifyAstPlan(stmtSet: StatementSet): Unit = {
doVerifyPlan(
stmtSet,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST),
() => Unit)
}
/**
* Verify the AST (abstract syntax tree). The plans will contain the extra [[ExplainDetail]]s.
*/
def verifyAstPlan(stmtSet: StatementSet, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(
stmtSet,
extraDetails.toArray,
withRowType = false,
Array(PlanKind.AST),
() => Unit)
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan for the given SELECT query.
*/
def verifyRelPlan(query: String): Unit = {
doVerifyPlan(
query,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL))
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan for the given SELECT query.
* The plans will contain the extra [[ExplainDetail]]s.
*/
def verifyRelPlan(query: String, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(
query,
extraDetails.toArray,
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL))
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan
* for the given INSERT statement.
*/
def verifyRelPlanInsert(insert: String): Unit = {
doVerifyPlanInsert(
insert,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL))
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan
* for the given INSERT statement. The plans will contain the extra [[ExplainDetail]]s.
*/
def verifyRelPlanInsert(insert: String, extraDetails: ExplainDetail*): Unit = {
doVerifyPlanInsert(
insert,
extraDetails.toArray,
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL))
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan for the given [[Table]].
*/
def verifyRelPlan(table: Table): Unit = {
doVerifyPlan(
table,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL))
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan for the given [[Table]].
* The plans will contain the extra [[ExplainDetail]]s.
*/
def verifyRelPlan(table: Table, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(
table,
extraDetails.toArray,
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL))
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan for the given [[Table]]
* with the given sink table name.
*/
def verifyRelPlanInsert(table: Table, sink: TableSink[_], targetPath: String): Unit = {
val stmtSet = getTableEnv.createStatementSet()
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
stmtSet.addInsert(targetPath, table)
verifyRelPlan(stmtSet)
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan for the given [[Table]]
* with the given sink table name. The plans will contain the extra [[ExplainDetail]]s.
*/
def verifyRelPlanInsert(
table: Table,
sink: TableSink[_],
targetPath: String,
extraDetails: ExplainDetail*): Unit = {
val stmtSet = getTableEnv.createStatementSet()
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
stmtSet.addInsert(targetPath, table)
verifyRelPlan(stmtSet, extraDetails: _*)
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan
* for the given [[StatementSet]].
*/
def verifyRelPlan(stmtSet: StatementSet): Unit = {
doVerifyPlan(
stmtSet,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL),
() => Unit)
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan
* for the given [[StatementSet]]. The plans will contain the extra [[ExplainDetail]]s.
*/
def verifyRelPlan(stmtSet: StatementSet, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(
stmtSet,
extraDetails.toArray,
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL),
() => Unit)
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan for the given SELECT query.
* The rel plans will contain the output type ([[org.apache.calcite.rel.type.RelDataType]]).
*/
def verifyRelPlanWithType(query: String): Unit = {
doVerifyPlan(
query,
Array.empty[ExplainDetail],
withRowType = true,
Array(PlanKind.AST, PlanKind.OPT_REL))
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan for the given [[Table]].
* The rel plans will contain the output type ([[org.apache.calcite.rel.type.RelDataType]]).
*/
def verifyRelPlanWithType(table: Table): Unit = {
doVerifyPlan(
table,
Array.empty[ExplainDetail],
withRowType = true,
Array(PlanKind.AST, PlanKind.OPT_REL))
}
/**
* Verify the AST (abstract syntax tree) and the optimized rel plan
* for the given [[StatementSet]].
* The rel plans will contain the output type ([[org.apache.calcite.rel.type.RelDataType]]).
*/
def verifyRelPlanWithType(stmtSet: StatementSet): Unit = {
doVerifyPlan(
stmtSet,
Array.empty[ExplainDetail],
withRowType = true,
Array(PlanKind.AST, PlanKind.OPT_REL),
() => Unit)
}
/**
* Verify whether the optimized rel plan for the given SELECT query
* does not contain the `notExpected` strings.
*/
def verifyRelPlanNotExpected(query: String, notExpected: String*): Unit = {
verifyRelPlanNotExpected(getTableEnv.sqlQuery(query), notExpected: _*)
}
/**
* Verify whether the optimized rel plan for the given [[Table]]
* does not contain the `notExpected` strings.
*/
def verifyRelPlanNotExpected(table: Table, notExpected: String*): Unit = {
require(notExpected.nonEmpty)
val relNode = TableTestUtil.toRelNode(table)
val optimizedRel = getPlanner.optimize(relNode)
val optimizedPlan = getOptimizedRelPlan(Array(optimizedRel), Array.empty, withRowType = false)
val result = notExpected.forall(!optimizedPlan.contains(_))
val message = s"\\nactual plan:\\n$optimizedPlan\\nnot expected:\\n${notExpected.mkString(", ")}"
assertTrue(message, result)
}
/**
* Verify the AST (abstract syntax tree) and the optimized exec plan for the given SELECT query.
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyExecPlan(query: String): Unit = {
doVerifyPlan(
query,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_EXEC))
}
/**
* Verify the AST (abstract syntax tree) and the optimized exec plan
* for the given INSERT statement.
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyExecPlanInsert(insert: String): Unit = {
doVerifyPlanInsert(
insert,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_EXEC))
}
/**
* Verify the AST (abstract syntax tree) and the optimized exec plan for the given [[Table]].
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyExecPlan(table: Table): Unit = {
doVerifyPlan(
table,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_EXEC))
}
/**
* Verify the AST (abstract syntax tree) and the optimized exec plan
* for the given [[Table]] with the given sink table name.
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyExecPlanInsert(table: Table, sink: TableSink[_], targetPath: String): Unit = {
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
val stmtSet = getTableEnv.createStatementSet()
stmtSet.addInsert(targetPath, table)
verifyExecPlan(stmtSet)
}
/**
* Verify the AST (abstract syntax tree) and the optimized exec plan
* for the given [[StatementSet]].
* Note: An exception will be thrown if the given sql can't be translated to exec plan.
*/
def verifyExecPlan(stmtSet: StatementSet): Unit = {
doVerifyPlan(
stmtSet,
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_EXEC),
() => Unit)
}
/**
* Verify the explain result for the given SELECT query. See more about [[Table#explain()]].
*/
def verifyExplain(query: String): Unit = verifyExplain(getTableEnv.sqlQuery(query))
/**
* Verify the explain result for the given SELECT query. The explain result will contain
* the extra [[ExplainDetail]]s. See more about [[Table#explain()]].
*/
def verifyExplain(query: String, extraDetails: ExplainDetail*): Unit = {
val table = getTableEnv.sqlQuery(query)
verifyExplain(table, extraDetails: _*)
}
/**
* Verify the explain result for the given INSERT statement.
* See more about [[StatementSet#explain()]].
*/
def verifyExplainInsert(insert: String): Unit = {
val statSet = getTableEnv.createStatementSet()
statSet.addInsertSql(insert)
verifyExplain(statSet)
}
/**
* Verify the explain result for the given INSERT statement. The explain result will contain
* the extra [[ExplainDetail]]s. See more about [[StatementSet#explain()]].
*/
def verifyExplainInsert(insert: String, extraDetails: ExplainDetail*): Unit = {
val statSet = getTableEnv.createStatementSet()
statSet.addInsertSql(insert)
verifyExplain(statSet, extraDetails: _*)
}
/**
* Verify the explain result for the given sql clause which represents a [[ModifyOperation]].
*/
def verifyExplainSql(sql: String): Unit = {
val operations = getTableEnv.asInstanceOf[TableEnvironmentImpl].getParser.parse(sql)
val relNode = TableTestUtil.toRelNode(
getTableEnv,
operations.get(0).asInstanceOf[ModifyOperation])
assertPlanEquals(
Array(relNode),
Array.empty[ExplainDetail],
withRowType = false,
Array(PlanKind.AST, PlanKind.OPT_REL),
() => assertEqualsOrExpand("sql", sql))
}
/**
* Verify the explain result for the given [[Table]]. See more about [[Table#explain()]].
*/
def verifyExplain(table: Table): Unit = {
doVerifyExplain(table.explain())
}
/**
* Verify the explain result for the given [[Table]]. The explain result will contain
* the extra [[ExplainDetail]]s. See more about [[Table#explain()]].
*/
def verifyExplain(table: Table, extraDetails: ExplainDetail*): Unit = {
doVerifyExplain(table.explain(extraDetails: _*), extraDetails: _*)
}
/**
* Verify the explain result for the given [[Table]] with the given sink table name.
* See more about [[StatementSet#explain()]].
*/
def verifyExplainInsert(table: Table, sink: TableSink[_], targetPath: String): Unit = {
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
val stmtSet = getTableEnv.createStatementSet()
stmtSet.addInsert(targetPath, table)
verifyExplain(stmtSet)
}
/**
* Verify the explain result for the given [[Table]] with the given sink table name.
* The explain result will contain the extra [[ExplainDetail]]s.
* See more about [[StatementSet#explain()]].
*/
def verifyExplainInsert(
table: Table,
sink: TableSink[_],
targetPath: String,
extraDetails: ExplainDetail*): Unit = {
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
val stmtSet = getTableEnv.createStatementSet()
stmtSet.addInsert(targetPath, table)
verifyExplain(stmtSet, extraDetails: _*)
}
/**
* Verify the explain result for the given [[StatementSet]].
* See more about [[StatementSet#explain()]].
*/
def verifyExplain(stmtSet: StatementSet): Unit = {
doVerifyExplain(stmtSet.explain())
}
/**
* Verify the explain result for the given [[StatementSet]]. The explain result will contain
* the extra [[ExplainDetail]]s. See more about [[StatementSet#explain()]].
*/
def verifyExplain(stmtSet: StatementSet, extraDetails: ExplainDetail*): Unit = {
doVerifyExplain(stmtSet.explain(extraDetails: _*), extraDetails: _*)
}
final val PLAN_TEST_FORCE_OVERWRITE = "PLAN_TEST_FORCE_OVERWRITE"
/**
* Verify the json plan for the given insert statement.
*/
def verifyJsonPlan(insert: String): Unit = {
ExecNodeContext.resetIdCounter()
val jsonPlan = getTableEnv.asInstanceOf[TableEnvironmentInternal].compilePlanSql(insert)
val jsonPlanWithoutFlinkVersion = TableTestUtil.replaceFlinkVersion(jsonPlan.asJsonString())
// add the postfix to the path to avoid conflicts
// between the test class name and the result file name
val clazz = test.getClass
val testClassDirPath = clazz.getName.replaceAll("\\\\.", "/") + "_jsonplan"
val testMethodFileName = test.testName.getMethodName + ".out"
val resourceTestFilePath = s"/$testClassDirPath/$testMethodFileName"
val plannerDirPath = clazz.getResource("/").getFile.replace("/target/test-classes/", "")
val file = new File(s"$plannerDirPath/src/test/resources$resourceTestFilePath")
val path = file.toPath
if (!file.exists() || "true".equalsIgnoreCase(System.getenv(PLAN_TEST_FORCE_OVERWRITE))) {
Files.deleteIfExists(path)
file.getParentFile.mkdirs()
assertTrue(file.createNewFile())
val prettyJson = TableTestUtil.getPrettyJson(jsonPlanWithoutFlinkVersion)
Files.write(path, prettyJson.getBytes)
fail(s"$testMethodFileName regenerated.")
} else {
val expected = String.join("\\n", Files.readAllLines(path))
assertEquals(
TableTestUtil.replaceExecNodeId(
TableTestUtil.getFormattedJson(expected)),
TableTestUtil.replaceExecNodeId(
TableTestUtil.getFormattedJson(jsonPlanWithoutFlinkVersion)))
}
}
/**
* Verify the given query and the expected plans translated from the SELECT query.
*
* @param query the SELECT query to check
* @param extraDetails the extra [[ExplainDetail]]s the plans should contain
* @param withRowType whether the rel plans contain the output type
* @param expectedPlans the expected [[PlanKind]]s to check
*/
def doVerifyPlan(
query: String,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
expectedPlans: Array[PlanKind]): Unit = {
val table = getTableEnv.sqlQuery(query)
val relNode = TableTestUtil.toRelNode(table)
assertPlanEquals(
Array(relNode),
extraDetails,
withRowType,
expectedPlans,
() => assertEqualsOrExpand("sql", query))
}
/**
* Verify the given query and the expected plans translated from the INSERT statement.
*
* @param insert the INSERT statement to check
* @param extraDetails the extra [[ExplainDetail]]s the plans should contain
* @param withRowType whether the rel plans contain the output type
* @param expectedPlans the expected [[PlanKind]]s to check
*/
def doVerifyPlanInsert(
insert: String,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
expectedPlans: Array[PlanKind]): Unit = {
val stmtSet = getTableEnv.createStatementSet()
stmtSet.addInsertSql(insert)
doVerifyPlan(
stmtSet,
extraDetails,
withRowType,
expectedPlans,
() => assertEqualsOrExpand("sql", insert))
}
/**
* Verify the expected plans translated from the given [[Table]].
*
* @param table the [[Table]] to check
* @param extraDetails the extra [[ExplainDetail]]s the plans should contain
* @param withRowType whether the rel plans contain the output type
* @param expectedPlans the expected [[PlanKind]]s to check
*/
def doVerifyPlan(
table: Table,
extraDetails: Array[ExplainDetail],
withRowType: Boolean = false,
expectedPlans: Array[PlanKind]): Unit = {
val relNode = TableTestUtil.toRelNode(table)
assertPlanEquals(Array(relNode), extraDetails, withRowType, expectedPlans, () => {})
}
/**
* Verify the expected plans translated from the given [[StatementSet]].
*
* @param stmtSet the [[StatementSet]] to check
* @param extraDetails the extra [[ExplainDetail]]s the plans should contain
* @param withRowType whether the rel plans contain the output type
* @param expectedPlans the expected [[PlanKind]]s to check
* @param assertSqlEqualsOrExpandFunc the function to check whether the sql equals to the expected
* if the `stmtSet` is only translated from sql
*/
def doVerifyPlan(
stmtSet: StatementSet,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
expectedPlans: Array[PlanKind],
assertSqlEqualsOrExpandFunc: () => Unit): Unit = {
val testStmtSet = stmtSet.asInstanceOf[TestingStatementSet]
val relNodes = testStmtSet.getOperations.map(getPlanner.translateToRel)
if (relNodes.isEmpty) {
throw new TableException("No output table have been created yet. " +
"A program needs at least one output table that consumes data.\\n" +
"Please create output table(s) for your program")
}
assertPlanEquals(
relNodes.toArray,
extraDetails,
withRowType,
expectedPlans,
assertSqlEqualsOrExpandFunc)
}
/**
* Verify the expected plans translated from the given [[RelNode]]s.
*
* @param relNodes the original (un-optimized) [[RelNode]]s to check
* @param extraDetails the extra [[ExplainDetail]]s the plans should contain
* @param withRowType whether the rel plans contain the output type
* @param expectedPlans the expected [[PlanKind]]s to check
* @param assertSqlEqualsOrExpandFunc the function to check whether the sql equals to the expected
* if the `relNodes` are translated from sql
*/
private def assertPlanEquals(
relNodes: Array[RelNode],
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
expectedPlans: Array[PlanKind],
assertSqlEqualsOrExpandFunc: () => Unit): Unit = {
// build ast plan
val astBuilder = new StringBuilder
relNodes.foreach { sink =>
astBuilder
.append(System.lineSeparator)
.append(FlinkRelOptUtil.toString(
sink, SqlExplainLevel.EXPPLAN_ATTRIBUTES, withRowType = withRowType))
}
val astPlan = astBuilder.toString()
// build optimized rel plan
val optimizedRels = getPlanner.optimize(relNodes)
val optimizedRelPlan = System.lineSeparator +
getOptimizedRelPlan(optimizedRels.toArray, extraDetails, withRowType = withRowType)
// build optimized exec plan if `expectedPlans` contains OPT_EXEC
val optimizedExecPlan = if (expectedPlans.contains(PlanKind.OPT_EXEC)) {
val execGraph = getPlanner.translateToExecNodeGraph(optimizedRels)
System.lineSeparator + ExecNodePlanDumper.dagToString(execGraph)
} else {
""
}
// check whether the sql equals to the expected if the `relNodes` are translated from sql
assertSqlEqualsOrExpandFunc()
// check ast plan
if (expectedPlans.contains(PlanKind.AST)) {
assertEqualsOrExpand("ast", astPlan)
}
// check optimized rel plan
if (expectedPlans.contains(PlanKind.OPT_REL)) {
assertEqualsOrExpand("optimized rel plan", optimizedRelPlan, expand = false)
}
// check optimized exec plan
if (expectedPlans.contains(PlanKind.OPT_EXEC)) {
assertEqualsOrExpand("optimized exec plan", optimizedExecPlan, expand = false)
}
}
private def doVerifyExplain(explainResult: String, extraDetails: ExplainDetail*): Unit = {
def replace(result: String, explainDetail: ExplainDetail): String = {
val replaced = explainDetail match {
case ExplainDetail.ESTIMATED_COST => replaceEstimatedCost(result)
case ExplainDetail.JSON_EXECUTION_PLAN =>
replaceNodeIdInOperator(replaceStreamNodeId(replaceStageId(result)))
case _ => result
}
replaced
}
var replacedResult = explainResult
extraDetails.foreach {
detail =>
replacedResult = replace(replacedResult, detail)
}
assertEqualsOrExpand("explain", TableTestUtil.replaceStageId(replacedResult), expand = false)
}
protected def getOptimizedRelPlan(
optimizedRels: Array[RelNode],
extraDetails: Array[ExplainDetail],
withRowType: Boolean): String = {
require(optimizedRels.nonEmpty)
val explainLevel = if (extraDetails.contains(ExplainDetail.ESTIMATED_COST)) {
SqlExplainLevel.ALL_ATTRIBUTES
} else {
SqlExplainLevel.EXPPLAN_ATTRIBUTES
}
val withChangelogTraits = extraDetails.contains(ExplainDetail.CHANGELOG_MODE)
val optimizedPlan = optimizedRels.head match {
case _: RelNode =>
optimizedRels.map { rel =>
FlinkRelOptUtil.toString(
rel,
detailLevel = explainLevel,
withChangelogTraits = withChangelogTraits,
withRowType = withRowType)
}.mkString("\\n")
case o =>
throw new TableException("The expected optimized plan is RelNode plan, " +
s"actual plan is ${o.getClass.getSimpleName} plan.")
}
replaceEstimatedCost(optimizedPlan)
}
/**
* Replace the estimated costs for the given plan, because it may be unstable.
*/
protected def replaceEstimatedCost(s: String): String = {
var str = s.replaceAll("\\\\r\\\\n", "\\n")
val scientificFormRegExpr = "[+-]?[\\\\d]+([\\\\.][\\\\d]*)?([Ee][+-]?[0-9]{0,2})?"
str = str.replaceAll(s"rowcount = $scientificFormRegExpr", "rowcount = ")
str = str.replaceAll(s"$scientificFormRegExpr rows", "rows")
str = str.replaceAll(s"$scientificFormRegExpr cpu", "cpu")
str = str.replaceAll(s"$scientificFormRegExpr io", "io")
str = str.replaceAll(s"$scientificFormRegExpr network", "network")
str = str.replaceAll(s"$scientificFormRegExpr memory", "memory")
str
}
protected def assertEqualsOrExpand(tag: String, actual: String, expand: Boolean = true): Unit = {
val expected = s"$${$tag}"
if (!expand) {
diffRepository.assertEquals(test.name.getMethodName, tag, expected, actual)
return
}
val expanded = diffRepository.expand(test.name.getMethodName, tag, expected)
if (expanded != null && !expanded.equals(expected)) {
// expected does exist, check result
diffRepository.assertEquals(test.name.getMethodName, tag, expected, actual)
} else {
// expected does not exist, update
diffRepository.expand(test.name.getMethodName, tag, actual)
}
}
}
abstract class TableTestUtil(
test: TableTestBase,
// determines if the table environment should work in a batch or streaming mode
isStreamingMode: Boolean,
catalogManager: Option[CatalogManager] = None,
val tableConfig: TableConfig)
extends TableTestUtilBase(test, isStreamingMode) {
protected val testingTableEnv: TestingTableEnvironment =
TestingTableEnvironment.create(setting, catalogManager, tableConfig)
val tableEnv: TableEnvironment = testingTableEnv
tableEnv.getConfig
.getConfiguration
.set(ExecutionOptions.BATCH_SHUFFLE_MODE, BatchShuffleMode.ALL_EXCHANGES_PIPELINED)
private val env: StreamExecutionEnvironment = getPlanner.getExecEnv
override def getTableEnv: TableEnvironment = tableEnv
def getStreamEnv: StreamExecutionEnvironment = env
/**
* Create a [[TestTableSource]] with the given schema, table stats and unique keys,
* and registers this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param types field types
* @param fields field names
* @param statistic statistic of current table
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
types: Array[TypeInformation[_]],
fields: Array[String],
statistic: FlinkStatistic = FlinkStatistic.UNKNOWN): Table = {
val schema = new TableSchema(fields, types)
val tableSource = new TestTableSource(isBounded, schema)
addTableSource(name, tableSource, statistic)
}
/**
* Register this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param tableSource table source
* @param statistic statistic of current table
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
tableSource: TableSource[_],
statistic: FlinkStatistic): Table = {
// TODO RichTableSourceQueryOperation should be deleted and use registerTableSourceInternal
// method instead of registerTable method here after unique key in TableSchema is ready
// and setting catalog statistic to TableSourceTable in DatabaseCalciteSchema is ready
val identifier = ObjectIdentifier.of(
testingTableEnv.getCurrentCatalog,
testingTableEnv.getCurrentDatabase,
name)
val operation = new RichTableSourceQueryOperation(
identifier,
tableSource,
statistic)
val table = testingTableEnv.createTable(operation)
testingTableEnv.registerTable(name, table)
testingTableEnv.from(name)
}
/**
* @deprecated Use [[addTemporarySystemFunction()]] for the new type inference.
*/
@deprecated
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T]): Unit = testingTableEnv.registerFunction(name, function)
/**
* @deprecated Use [[addTemporarySystemFunction()]] for the new type inference.
*/
@deprecated
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = testingTableEnv.registerFunction(name, function)
/**
* @deprecated Use [[addTemporarySystemFunction()]] for the new type inference.
*/
@deprecated
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: TableAggregateFunction[T, ACC]): Unit = {
testingTableEnv.registerFunction(name, function)
}
}
abstract class ScalaTableTestUtil(
test: TableTestBase,
isStreamingMode: Boolean)
extends TableTestUtilBase(test, isStreamingMode) {
// scala env
val env = new ScalaStreamExecEnv(new LocalStreamEnvironment())
// scala tableEnv
val tableEnv: ScalaStreamTableEnv = ScalaStreamTableEnv.create(env, setting)
override def getTableEnv: TableEnvironment = tableEnv
/**
* Registers a [[TableFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[AggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[TableAggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: TableAggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
}
abstract class JavaTableTestUtil(
test: TableTestBase,
isStreamingMode: Boolean)
extends TableTestUtilBase(test, isStreamingMode) {
// java env
val env = new LocalStreamEnvironment()
// java tableEnv
val tableEnv: JavaStreamTableEnv = JavaStreamTableEnv.create(env, setting)
override def getTableEnv: TableEnvironment = tableEnv
/**
* Registers a [[TableFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[AggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[TableAggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: TableAggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
}
/**
* Utility for stream table test.
*/
case class StreamTableTestUtil(
test: TableTestBase,
catalogManager: Option[CatalogManager] = None,
conf: TableConfig = new TableConfig)
extends TableTestUtil(test, isStreamingMode = true, catalogManager, conf) {
/**
* Register a table with specific row time field and offset.
*
* @param tableName table name
* @param sourceTable table to register
* @param rowtimeField row time field
* @param offset offset to the row time field value
*/
def addTableWithWatermark(
tableName: String,
sourceTable: Table,
rowtimeField: String,
offset: Long): Unit = {
val sourceRel = TableTestUtil.toRelNode(sourceTable)
val rowtimeFieldIdx = sourceRel.getRowType.getFieldNames.indexOf(rowtimeField)
if (rowtimeFieldIdx < 0) {
throw new TableException(s"$rowtimeField does not exist, please check it")
}
val rexBuilder = sourceRel.getCluster.getRexBuilder
val inputRef = rexBuilder.makeInputRef(sourceRel, rowtimeFieldIdx)
val offsetLiteral = rexBuilder.makeIntervalLiteral(
JBigDecimal.valueOf(offset),
new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, SqlParserPos.ZERO))
val expr = rexBuilder.makeCall(FlinkSqlOperatorTable.MINUS, inputRef, offsetLiteral)
val watermarkAssigner = new LogicalWatermarkAssigner(
sourceRel.getCluster,
sourceRel.getTraitSet,
sourceRel,
rowtimeFieldIdx,
expr
)
val queryOperation = new PlannerQueryOperation(watermarkAssigner)
testingTableEnv.registerTable(tableName, testingTableEnv.createTable(queryOperation))
}
def buildStreamProgram(firstProgramNameToRemove: String): Unit = {
val program = FlinkStreamProgram.buildProgram(tableEnv.getConfig.getConfiguration)
var startRemove = false
program.getProgramNames.foreach {
name =>
if (name.equals(firstProgramNameToRemove)) {
startRemove = true
}
if (startRemove) {
program.remove(name)
}
}
replaceStreamProgram(program)
}
def replaceStreamProgram(program: FlinkChainedProgram[StreamOptimizeContext]): Unit = {
var calciteConfig = TableConfigUtils.getCalciteConfig(tableEnv.getConfig)
calciteConfig = CalciteConfig.createBuilder(calciteConfig)
.replaceStreamProgram(program).build()
tableEnv.getConfig.setPlannerConfig(calciteConfig)
}
def getStreamProgram(): FlinkChainedProgram[StreamOptimizeContext] = {
val tableConfig = tableEnv.getConfig
val calciteConfig = TableConfigUtils.getCalciteConfig(tableConfig)
calciteConfig.getStreamProgram.getOrElse(FlinkStreamProgram.buildProgram(
tableConfig.getConfiguration))
}
def enableMiniBatch(): Unit = {
tableEnv.getConfig.getConfiguration.setBoolean(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
tableEnv.getConfig.getConfiguration.set(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, Duration.ofSeconds(1))
tableEnv.getConfig.getConfiguration.setLong(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_SIZE, 3L)
}
def createAppendTableSink(
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): AppendStreamTableSink[Row] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new TestingAppendTableSink().configure(fieldNames, typeInfos)
}
def createUpsertTableSink(
keys: Array[Int],
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): UpsertStreamTableSink[RowData] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new TestingUpsertTableSink(keys).configure(fieldNames, typeInfos)
}
def createRetractTableSink(
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): RetractStreamTableSink[Row] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new TestingRetractTableSink().configure(fieldNames, typeInfos)
}
}
/**
* Utility for stream scala table test.
*/
case class ScalaStreamTableTestUtil(test: TableTestBase) extends ScalaTableTestUtil(test, true) {
}
/**
* Utility for stream java table test.
*/
case class JavaStreamTableTestUtil(test: TableTestBase) extends JavaTableTestUtil(test, true) {
}
/**
* Utility for batch table test.
*/
case class BatchTableTestUtil(
test: TableTestBase,
catalogManager: Option[CatalogManager] = None,
conf: TableConfig = new TableConfig)
extends TableTestUtil(test, isStreamingMode = false, catalogManager, conf) {
def buildBatchProgram(firstProgramNameToRemove: String): Unit = {
val program = FlinkBatchProgram.buildProgram(tableEnv.getConfig.getConfiguration)
var startRemove = false
program.getProgramNames.foreach {
name =>
if (name.equals(firstProgramNameToRemove)) {
startRemove = true
}
if (startRemove) {
program.remove(name)
}
}
replaceBatchProgram(program)
}
def replaceBatchProgram(program: FlinkChainedProgram[BatchOptimizeContext]): Unit = {
var calciteConfig = TableConfigUtils.getCalciteConfig(tableEnv.getConfig)
calciteConfig = CalciteConfig.createBuilder(calciteConfig)
.replaceBatchProgram(program).build()
tableEnv.getConfig.setPlannerConfig(calciteConfig)
}
def getBatchProgram(): FlinkChainedProgram[BatchOptimizeContext] = {
val tableConfig = tableEnv.getConfig
val calciteConfig = TableConfigUtils.getCalciteConfig(tableConfig)
calciteConfig.getBatchProgram.getOrElse(FlinkBatchProgram.buildProgram(
tableConfig.getConfiguration))
}
def createCollectTableSink(
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): TableSink[Row] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new CollectRowTableSink().configure(fieldNames, typeInfos)
}
}
/**
* Utility for batch scala table test.
*/
case class ScalaBatchTableTestUtil(test: TableTestBase) extends ScalaTableTestUtil(test, false) {
}
/**
* Utility for batch java table test.
*/
case class JavaBatchTableTestUtil(test: TableTestBase) extends JavaTableTestUtil(test, false) {
}
/**
* Batch/Stream [[org.apache.flink.table.sources.TableSource]] for testing.
*/
class TestTableSource(override val isBounded: Boolean, schema: TableSchema)
extends StreamTableSource[Row] {
override def getDataStream(execEnv: environment.StreamExecutionEnvironment): DataStream[Row] = {
execEnv.fromCollection(List[Row](), getReturnType)
}
override def getReturnType: TypeInformation[Row] = {
val logicalTypes = schema.getFieldTypes
new RowTypeInfo(logicalTypes, schema.getFieldNames)
}
override def getTableSchema: TableSchema = schema
}
object TestTableSource {
def createTemporaryTable(
tEnv: TableEnvironment,
isBounded: Boolean,
tableSchema: TableSchema,
tableName: String): Unit = {
val source = new TestTableSource(isBounded, tableSchema)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, source)
}
}
class TestTableSourceFactory extends StreamTableSourceFactory[Row] {
override def createStreamTableSource(
properties: util.Map[String, String]): StreamTableSource[Row] = {
val dp = new DescriptorProperties
dp.putProperties(properties)
val tableSchema = dp.getTableSchema(SCHEMA)
val isBounded = dp.getOptionalBoolean("is-bounded").orElse(false)
new TestTableSource(isBounded, tableSchema)
}
override def requiredContext(): util.Map[String, String] = {
val context = new util.HashMap[String, String]()
context.put(CONNECTOR_TYPE, "TestTableSource")
context
}
override def supportedProperties(): util.List[String] = {
val properties = new util.ArrayList[String]()
properties.add("*")
properties
}
}
class TestingTableEnvironment private(
catalogManager: CatalogManager,
moduleManager: ModuleManager,
tableConfig: TableConfig,
executor: Executor,
functionCatalog: FunctionCatalog,
planner: PlannerBase,
isStreamingMode: Boolean,
userClassLoader: ClassLoader)
extends TableEnvironmentImpl(
catalogManager,
moduleManager,
tableConfig,
executor,
functionCatalog,
planner,
isStreamingMode,
userClassLoader) {
// just for testing, remove this method while
// `<T, ACC> void registerFunction(String name, AggregateFunction<T, ACC> aggregateFunction);`
// is added into TableEnvironment
def registerFunction[T: TypeInformation](name: String, tf: TableFunction[T]): Unit = {
val typeInfo = UserDefinedFunctionHelper
.getReturnTypeOfTableFunction(tf, implicitly[TypeInformation[T]])
functionCatalog.registerTempSystemTableFunction(
name,
tf,
typeInfo
)
}
// just for testing, remove this method while
// `<T> void registerFunction(String name, TableFunction<T> tableFunction);`
// is added into TableEnvironment
def registerFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: AggregateFunction[T, ACC]): Unit = {
registerImperativeAggregateFunction(name, f)
}
// just for testing, remove this method while
// `<T, ACC> void registerFunction(String name, TableAggregateFunction<T, ACC> tableAggFunc);`
// is added into TableEnvironment
def registerFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: TableAggregateFunction[T, ACC]): Unit = {
registerImperativeAggregateFunction(name, f)
}
private def registerImperativeAggregateFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: ImperativeAggregateFunction[T, ACC]): Unit = {
val typeInfo = UserDefinedFunctionHelper
.getReturnTypeOfAggregateFunction(f, implicitly[TypeInformation[T]])
val accTypeInfo = UserDefinedFunctionHelper
.getAccumulatorTypeOfAggregateFunction(f, implicitly[TypeInformation[ACC]])
functionCatalog.registerTempSystemAggregateFunction(
name,
f,
typeInfo,
accTypeInfo
)
}
override def createTable(tableOperation: QueryOperation): TableImpl = {
super.createTable(tableOperation)
}
override def createStatementSet(): StatementSet = new TestingStatementSet(this)
}
class TestingStatementSet(tEnv: TestingTableEnvironment) extends StatementSet {
private val operations: util.List[ModifyOperation] = new util.ArrayList[ModifyOperation]
def getOperations: util.List[ModifyOperation] = operations
override def addInsertSql(statement: String): StatementSet = {
val operations = tEnv.getParser.parse(statement)
if (operations.size != 1) {
throw new TableException("Only single statement is supported.")
}
operations.get(0) match {
case op: ModifyOperation =>
this.operations.add(op)
case _ =>
throw new TableException("Only insert statement is supported now.")
}
this
}
override def addInsert(targetPath: String, table: Table): StatementSet = {
this.addInsert(targetPath, table, overwrite = false)
}
override def addInsert(targetPath: String, table: Table, overwrite: Boolean): StatementSet = {
val unresolvedIdentifier = tEnv.getParser.parseIdentifier(targetPath)
val objectIdentifier = tEnv.getCatalogManager.qualifyIdentifier(unresolvedIdentifier)
val resolvedTable = tEnv.getCatalogManager.getTable(objectIdentifier).get()
operations.add(new SinkModifyOperation(
resolvedTable,
table.getQueryOperation,
util.Collections.emptyMap[String, String],
overwrite,
util.Collections.emptyMap[String, String]))
this
}
override def addInsert(descriptor: TableDescriptor, table: Table): StatementSet = {
throw new TableException("Not implemented")
}
override def addInsert(
targetDescriptor: TableDescriptor,
table: Table,
overwrite: Boolean): StatementSet = {
throw new TableException("Not implemented")
}
override def explain(extraDetails: ExplainDetail*): String = {
tEnv.explainInternal(operations.map(o => o.asInstanceOf[Operation]), extraDetails: _*)
}
override def execute(): TableResult = {
try {
tEnv.executeInternal(operations)
} finally {
operations.clear()
}
}
}
object TestingTableEnvironment {
def create(
settings: EnvironmentSettings,
catalogManager: Option[CatalogManager] = None,
tableConfig: TableConfig): TestingTableEnvironment = {
tableConfig.addConfiguration(settings.toConfiguration)
// temporary solution until FLINK-15635 is fixed
val classLoader = Thread.currentThread.getContextClassLoader
val moduleManager = new ModuleManager
val catalogMgr = catalogManager match {
case Some(c) => c
case _ =>
CatalogManager.newBuilder
.classLoader(classLoader)
.config(tableConfig.getConfiguration)
.defaultCatalog(
settings.getBuiltInCatalogName,
new GenericInMemoryCatalog(
settings.getBuiltInCatalogName,
settings.getBuiltInDatabaseName))
.build
}
val functionCatalog = new FunctionCatalog(tableConfig, catalogMgr, moduleManager)
val executorFactory =
FactoryUtil.discoverFactory(classLoader, classOf[ExecutorFactory], settings.getExecutor)
val executor = executorFactory.create(tableConfig.getConfiguration)
val planner = PlannerFactoryUtil.createPlanner(settings.getPlanner, executor, tableConfig,
moduleManager, catalogMgr, functionCatalog).asInstanceOf[PlannerBase]
new TestingTableEnvironment(
catalogMgr,
moduleManager,
tableConfig,
executor,
functionCatalog,
planner,
settings.isStreamingMode,
classLoader)
}
}
/**
* [[PlanKind]] defines the types of plans to check in test cases.
*/
object PlanKind extends Enumeration {
type PlanKind = Value
/** Abstract Syntax Tree */
val AST: Value = Value("AST")
/** Optimized Rel Plan */
val OPT_REL: Value = Value("OPT_REL")
/** Optimized Execution Plan */
val OPT_EXEC: Value = Value("OPT_EXEC")
}
object TableTestUtil {
val STREAM_SETTING: EnvironmentSettings =
EnvironmentSettings.newInstance().inStreamingMode().build()
val BATCH_SETTING: EnvironmentSettings = EnvironmentSettings.newInstance().inBatchMode().build()
/**
* Convert operation tree in the given table to a RelNode tree.
*/
def toRelNode(table: Table): RelNode = {
table.asInstanceOf[TableImpl]
.getTableEnvironment.asInstanceOf[TableEnvironmentImpl]
.getPlanner.asInstanceOf[PlannerBase]
.getRelBuilder.queryOperation(table.getQueryOperation).build()
}
/**
* Convert modify operation to a RelNode tree.
*/
def toRelNode(
tEnv: TableEnvironment,
modifyOperation: ModifyOperation): RelNode = {
val planner = tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
planner.translateToRel(modifyOperation)
}
def createTemporaryView[T](
tEnv: TableEnvironment,
name: String,
dataStream: DataStream[T],
fields: Option[Array[Expression]] = None,
fieldNullables: Option[Array[Boolean]] = None,
statistic: Option[FlinkStatistic] = None): Unit = {
val planner = tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
val execEnv = planner.getExecEnv
val streamType = dataStream.getType
// get field names and types for all non-replaced fields
val typeInfoSchema = fields.map((f: Array[Expression]) => {
val fieldsInfo = FieldInfoUtils.getFieldsInfo(streamType, f)
// check if event-time is enabled
if (fieldsInfo.isRowtimeDefined &&
(execEnv.getStreamTimeCharacteristic ne TimeCharacteristic.EventTime)) {
throw new ValidationException(String.format(
"A rowtime attribute requires an EventTime time characteristic in stream " +
"environment. But is: %s",
execEnv.getStreamTimeCharacteristic))
}
fieldsInfo
}).getOrElse(FieldInfoUtils.getFieldsInfo(streamType))
val fieldCnt = typeInfoSchema.getFieldTypes.length
val dataStreamQueryOperation = new InternalDataStreamQueryOperation(
ObjectIdentifier.of(tEnv.getCurrentCatalog, tEnv.getCurrentDatabase, name),
dataStream,
typeInfoSchema.getIndices,
typeInfoSchema.toResolvedSchema,
fieldNullables.getOrElse(Array.fill(fieldCnt)(true)),
statistic.getOrElse(FlinkStatistic.UNKNOWN)
)
val table = tEnv.asInstanceOf[TableEnvironmentImpl].createTable(dataStreamQueryOperation)
tEnv.registerTable(name, table)
}
def readFromResource(path: String): String = {
val basePath = getClass.getResource("/").getFile
val fullPath = if (path.startsWith("/")) {
s"$basePath${path.substring(1)}"
} else {
s"$basePath$path"
}
val source = Source.fromFile(fullPath)
val str = source.mkString
source.close()
str
}
def readFromFile(path: String): Seq[String] = {
val file = new File(path)
if (file.isDirectory) {
file.listFiles().foldLeft(Seq.empty[String]) {
(lines, p) => lines ++ readFromFile(p.getAbsolutePath)
}
} else if (file.isHidden) {
Seq.empty[String]
} else {
Files.readAllLines(Paths.get(file.toURI)).toSeq
}
}
@throws[IOException]
def getFormattedJson(json: String): String = {
val parser = new ObjectMapper().getFactory.createParser(json)
val jsonNode: JsonNode = parser.readValueAsTree[JsonNode]
jsonNode.toString
}
@throws[IOException]
def getPrettyJson(json: String): String = {
val parser = new ObjectMapper().getFactory.createParser(json)
val jsonNode: JsonNode = parser.readValueAsTree[JsonNode]
jsonNode.toPrettyString
}
/**
* Stage {id} is ignored, because id keeps incrementing in test class
* while StreamExecutionEnvironment is up
*/
def replaceStageId(s: String): String = {
s.replaceAll("\\\\r\\\\n", "\\n").replaceAll("Stage \\\\d+", "")
}
/**
* Stream node {id} is ignored, because id keeps incrementing in test class
* while StreamExecutionEnvironment is up
*/
def replaceStreamNodeId(s: String): String = {
s.replaceAll("\\"id\\" : \\\\d+", "\\"id\\" : ").trim
}
/**
* ExecNode {id} is ignored, because id keeps incrementing in test class.
*/
def replaceExecNodeId(s: String): String = {
s.replaceAll("\\"id\\"\\\\s*:\\\\s*\\\\d+", "\\"id\\": 0")
.replaceAll("\\"source\\"\\\\s*:\\\\s*\\\\d+", "\\"source\\": 0")
.replaceAll("\\"target\\"\\\\s*:\\\\s*\\\\d+", "\\"target\\": 0")
}
/**
* Ignore flink version value.
*/
def replaceFlinkVersion(s: String): String = {
s.replaceAll("\\"flinkVersion\\":\\"[\\\\w.-]*\\"", "\\"flinkVersion\\":\\"\\"")
}
/**
* Ignore exec node in operator name and description.
*/
def replaceNodeIdInOperator(s: String): String = {
s.replaceAll("\\"contents\\" : \\"\\\\[\\\\d+\\\\]:", "\\"contents\\" : \\"[]:")
.replaceAll("(\\"type\\" : \\".*?)\\\\[\\\\d+\\\\]", "$1[]")
}
}
| zentol/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala | Scala | apache-2.0 | 64,123 |
package com.karasiq.nanoboard
import scala.language.postfixOps
package object model extends Tables with ConfigQueries with PostQueries with ContainerQueries | Karasiq/nanoboard | src/main/scala/com/karasiq/nanoboard/model/package.scala | Scala | apache-2.0 | 158 |
package lila
package object evaluation extends PackageObject
| luanlv/lila | modules/evaluation/src/main/package.scala | Scala | mit | 62 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.