code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package satisfaction
package hadoop
package hive.ms
import org.apache.hadoop.hive.ql.metadata.Partition
import hive.ms._
import org.joda.time._
import fs._
import hdfs.HdfsImplicits._
import collection.JavaConversions._
case class HiveTablePartition(
part: Partition)
(implicit val ms : MetaStore,
val hdfs : FileSystem ) extends DataInstance with Markable {
def size: Long = {
ms.getPartitionSize(part)
}
def created: DateTime = {
val createdMetaData = getMetaData("created")
createdMetaData match {
case Some(secCount) =>
msDateTime(secCount.toLong)
case None =>
getMetaData("transient_lastDdlTime") match {
case Some(fallbackTime) => msDateTime(fallbackTime.toLong)
case None => null
}
}
}
/// SIC ... is that OK ???
def lastAccessedTime: DateTime = lastModifiedTime
def lastModifiedTime: DateTime = {
val createdMetaData = getMetaData(MetaStore.LastModifiedMetaDataKey)
createdMetaData match {
case Some(secCount) =>
msDateTime(secCount.toLong)
case None => null
}
}
def msDateTime(msLong: Long): DateTime = {
new DateTime(msLong * 1000)
}
def witness : Witness = {
Witness( part.getParameters )
}
def lastAccessed: DateTime = {
msDateTime(part.getLastAccessTime)
}
def path : fs.Path = {
part.getDataLocation
}
def lastModifiedBy: String = {
getMetaData("last_modified_by").getOrElse(null)
}
def getMetaData(key: String): Option[String] = {
ms.getPartitionMetaData(part, key)
}
def setMetaData( key: String, md : String ) : Unit = {
ms.setPartitionMetaData(part, key, md)
}
def drop : Unit = {
ms.dropPartition( part.getTable().getDbName, part.getTable().getTableName(), part , true )
}
/**
* Mark that the producer of this
* DataInstance fully completed .
*/
def markCompleted : Unit = {
setMetaData( MetaStore.IsCompleteMetaDataKey , "true")
}
def markIncomplete : Unit = {
setMetaData(MetaStore.IsCompleteMetaDataKey , "false")
}
/**
* Check that the Data instance has been Marked completed,
* according to the test of the markable.
*/
def isMarkedCompleted : Boolean = {
getMetaData(MetaStore.IsCompleteMetaDataKey) match {
case Some( check) => {
check.toBoolean
}
///case None => false
case None => {
getMetaData("isComplete") match {
case Some( check) => {
check.toBoolean
}
case None => false
}
}
}
}
} | jeromebanks/satisfaction | modules/hive-ms/src/main/scala/satisfaction/hive/ms/HiveTablePartition.scala | Scala | apache-2.0 | 2,892 |
/*
* Copyright 2008, Mark Harrah
*
* This file is part of Frostbridge.
*
* Frostbridge is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 2.1 of the License, or
* (at your option) any later version.
*
* Frostbridge is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Frostbridge. If not, see <http://www.gnu.org/licenses/>.
*/
package net.frostbridge.util
/** Utility methods for truncating a string to a maximum length.*/
object TruncateString
{
/** The maximum number of characters of a string returned from truncate. */
val MaximumLength = 30
/** The algorithm must be be adjusted if the length of this is changed. */
val Separator = " ... "
assume(MaximumLength > 3*Separator.length, "Truncated length must be reasonably larger than the size of the separator.")
/** Ensures that the string 's' is no longer than 30 characters. If 's' is longer than
* 30 characters, a smaller string is created by removing middle characters and inserting
* " ... "
*/
def apply(s: String): String =
{
val length = s.length
if(length <= MaximumLength)
s
else
{
val usableLength = MaximumLength - Separator.length
val third = usableLength / 3
s.substring(0, usableLength - third) + Separator + s.substring(length - third, length)
}
}
} | yzhhui/frostbridge | src/main/scala/net/frostbridge/util/Truncate.scala | Scala | lgpl-3.0 | 1,694 |
/*
*
*/
package see.nodes
import see.EvalError
import see.Illegal
import see.Scope
import see.Unary
import see.Unknown
import see.Unresolved
import see.builtins
import see.operations.Symbolize
import see.values._
import see.Scope.CONSTNAME
private[see] class LocalP extends Proto{
override def precedence = 0
// requires parameter list
override def finish(n: Node): Option[Local] = {
val locals = n match {
case v: Variable => List(v)
case vn: Vnode if vn.isParamList => vn.asArgs.get
case _ => return None // No suitable parameter list
}
Some(new Local(locals))
}
}
// Node that declares its arguments as local variables,
// which are still undefined.
private[see] class Local(locals: Seq[Variable]) extends Leaf
{
override def evalIn(s: Scope) = {
for (v <- locals){
s.setLocal(v.name, NullVal)
}
Bool.False
}
override def simplifyIn(s: Scope): Node = this
override def isDefinedIn(s: Scope) = true
override def toString() = "[local(" + locals + ")]"
}
private[see] class DefinedP extends Proto{
override def precedence = 0
override def finish(n: Node) = Some(new Defined(n))
}
// Node that checks, whether its operand is defined within current scope.
private[see] class Defined(o: Node) extends Atom(o)
{
override def evalIn(s: Scope) = Bool(opd isDefinedIn s)
override def simplifyIn(s: Scope): Node = {
val simple = opd.simplifyIn(s)
if (simple.isInstanceOf[Constant]) Constant(Bool(true))
else new Defined(simple)
}
override def isDefinedIn(s: Scope) = true
override def toString() = "[defined(" + opd + ")]"
}
// Node that defines operation with a single parameter, which may be a vector.
// In fact, all functions are represented as Prefix node with a vector argument.
private[see] class PrefixP(val operator: Unary) extends Proto {
override def precedence = 0
override def finish(n: Node) = operator match {
case Symbolize => Some(new Indirection(n))
case _ => Some(new Prefix(n, operator))
}
}
private[see] class Prefix(o: Node, val operator: Unary)
extends Atom(o)
{
override def evalIn(s: Scope): Val = {
// evaluate operand
val arg = opd evalIn s
operator(s, arg)
}
override def simplifyIn(s: Scope): Node = {
val simple = opd.simplifyIn(s)
val simplified = new Prefix(simple, operator)
if (simple.isInstanceOf[Constant])
return new Constant(s checkStability simplified)
simplified
}
override def isDefinedIn(s: Scope) = opd isDefinedIn s
override def toString() = "["+ operator.opCode + "(" + opd + ")]"
}
private[see] class Indirection(o: Node)
extends Prefix(o, Symbolize) with LvNode
{
override def evalIn(s: Scope) = operator(s, opd evalIn s)
override def setIn(s: Scope, value: Val) {
val iname = s.coerce(opd)
s.iset(iname.toStr, value)
}
override def simplifyIn(s: Scope): Node = {
if (s.isStableCheck) throw new EvalError("Indirect name is unstable")
val simple = opd.simplifyIn(s)
new Indirection(simple)
}
override def toString() = "[indirect(" + opd + ")]"
}
private[see] class Fcall(val fname: String, args: Node)
extends Atom(args)
{
def argList = opd.asInstanceOf[Vnode]
override def evalIn(s: Scope): Val = {
// evaluate operand
val arg = opd evalIn s
// we try function variables first:
(s getVar fname) match {
case Some(func: Callable) => func.call(s, arg)
case Some(x) => x.coerce match {
case f: Callable => f.call(s, arg)
case other => throw new Illegal(
"Tried to call '" + fname + "'["+ other.getClass() +
"], which is not callable.")
}
case None => builtins(fname) match {
case None => throw new Unknown(fname)
case Some(op: Unary) => op(s, arg)
}
}
}
override def simplifyIn(s: Scope): Node = {
val simple = opd.simplifyIn(s)
val simplified = new Fcall(fname, simple)
if (fname == "rnd") { // never simplify a random generator
if (s.isStableCheck)
throw new EvalError("rnd is unstable")
else return simplified
}
// We cannot simplify user function calls in general,
// because they may be redefined at any time.
// So restrict to const patterns and built-ins.
// In principle, built-ins wouldn't be allowed either,
// but we keep them for performance reasons.
// So if you insist on overwriting built-in functions,
// at least do so early and within a separate expression or live
// with the consequences.
//
if (simple.isInstanceOf[Constant]) {
if (CONSTNAME.matches(fname) || builtins.contains(fname) ) try {
return new Constant(s checkStability simplified)
} catch {
case ex: Exception => if (s.isStableCheck) throw ex // fail ongoing check
}
}
// for simplicity, we only allow built-ins to be eliminated
if (s.isStableCheck && (s.contains(fname) || !builtins.contains(fname)))
throw new Unresolved("Unstable function: " + fname)
simplified
}
override def isDefinedIn(s: Scope) = {
(opd isDefinedIn s) && ((s getVar fname) match {
case Some(func: Callable) => func.isDefinedIn(s)
case Some(x) => false
case None => builtins.contains(fname)
} )
}
override def toString() = "["+ fname + "(" + opd + ")]"
}
| RayRacine/scee | src/main/scala/see/nodes/Atoms.scala | Scala | bsd-3-clause | 5,298 |
package org.sapia.mediafire.core
/**
* Holds the Mediafire credentials.
*
* @author yduchesne
*/
class MfCredentials(apiKey: String, email: String, password: String, applicationId: String) {
/**
* @return the user's API key.
*/
def getApiKey() = apiKey
/**
* @return the user's email.
*/
def getEmail() = email
/**
* @return the user's password
*/
def getPassword() = password
/**
* @return the user's application ID.
*/
def getApplicationId() = applicationId
} | sapia-oss/mediafire_maven_plugin | core/src/main/scala/org/sapia/mediafire/core/MfCredentials.scala | Scala | apache-2.0 | 511 |
package org.jetbrains.plugins.scala
package lang
package completion
package filters
package expression
import com.intellij.psi._
import com.intellij.psi.filters.ElementFilter
import org.jetbrains.annotations.NonNls
import org.jetbrains.plugins.scala.extensions.ObjectExt
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.expr._
/**
* @author Alexander Podkhalyuzin
* @since 22.05.2008
*/
class ElseFilter extends ElementFilter {
import ScalaCompletionUtil._
override def isAcceptable(element: Object, context: PsiElement): Boolean = {
if (context.is[PsiComment]) return false
val leaf = getLeafByOffset(context.getTextRange.getStartOffset, context)
if (leaf != null) {
var parent = leaf.getParent
if (parent.is[ScExpression] && parent.getPrevSibling != null &&
parent.getPrevSibling.getPrevSibling != null) {
val ifStmt = parent.getPrevSibling match {
case x: ScIf => x
case x if x.is[PsiWhiteSpace] || x.getNode.getElementType == ScalaTokenTypes.tWHITE_SPACE_IN_LINE =>
x.getPrevSibling match {
case x: ScIf => x
case _ => null
}
case _ => null
}
var text = ""
if (ifStmt == null) {
while (parent != null && !parent.is[ScIf]) parent = parent.getParent
if (parent == null) return false
text = parent.getText
text = replaceLiteral(text, " else true")
} else {
text = ifStmt.getText + " else true"
}
return checkElseWith(text, context = parent)
}
}
false
}
override def isClassAcceptable(hintClass: java.lang.Class[_]): Boolean = true
@NonNls
override def toString: String = "else keyword filter"
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/completion/filters/expression/ElseFilter.scala | Scala | apache-2.0 | 1,813 |
package com.stulsoft.kafka3
import java.util.{Collections, Properties}
import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.common.TopicPartition
import scala.concurrent.Future
/** With manual offset setting
* @author Yuriy Stul.
*/
final class Consumer2(val topic:String) extends LazyLogging {
private var continueExecuting = false
import scala.concurrent.ExecutionContext.Implicits.global
def start(): Future[Unit] = Future {
logger.info("Started Consumer2")
continueExecuting = true
val props = new Properties
props.put("bootstrap.servers", "localhost:9092,localhost:9093")
props.put("group.id", "test")
// props.put("enable.auto.commit", "true")
props.put("enable.auto.commit", "false")
props.put("auto.offset.reset","earliest")
props.put("auto.commit.interval.ms", "1000")
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
val consumer = new KafkaConsumer[String, String](props)
consumer.subscribe(Collections.singletonList(topic))
val partition = new TopicPartition(topic,0)
// consumer.seek(partition, 62)
// consumer.seekToBeginning(Collections.singletonList(partition))
while (continueExecuting) {
val records = consumer.poll(100)
records.forEach(record => {
val resultText = s"""Received message.\\n\\tPartition = ${record.partition()}, offset is ${record.offset}, topic is "${record.topic()}" key is "${record.key}", value is "${record.value}""""
logger.info(resultText)
})
// consumer.commitSync()
}
logger.info("Stopped Consumer2")
}
def stop(): Unit = {
logger.info("Stopping Consumer2")
continueExecuting = false
}
}
| ysden123/poc | pkafka/kafka3/src/main/scala/com/stulsoft/kafka3/Consumer2.scala | Scala | mit | 1,881 |
package ir.bama
/**
* @author ahmad
*/
package object repositories {
object SortOrder extends Enumeration {
val ASC = Value("ASC")
val DESC = Value("DESC")
type SortOrder = Value
}
}
| ahmadmo/bama-api-demo | app/ir/bama/repositories/package.scala | Scala | apache-2.0 | 206 |
package com.readytalk.metrics
import org.slf4j.LoggerFactory
object CromwellStatsD {
val logger = LoggerFactory.getLogger("StatsDLogger")
}
/**
* Filters out unwanted timing metrics.
* The package is funky on purpose because the StatsD constructor is package private so we need to be in this package to invoke it.
* Taken from rawls.
*/
case class CromwellStatsD(hostname: String, port: Int) extends StatsD(hostname, port) {
val MetricSuffixesToFilter = Set("max", "min", "p50", "p75", "p98", "p99", "p999", "mean_rate", "m5_rate", "m15_rate")
override def send(name: String, value: String): Unit = {
if (MetricSuffixesToFilter.exists(suffix => name.endsWith(suffix)))
CromwellStatsD.logger.debug(s"Filtering metric with name [$name] and value [$value]")
else super.send(name, value)
}
}
| ohsu-comp-bio/cromwell | services/src/main/scala/cromwell/services/instrumentation/impl/statsd/CromwellStatsD.scala | Scala | bsd-3-clause | 823 |
package io.hydrosphere.mist.api.ml.clustering
import io.hydrosphere.mist.api.ml._
import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.mllib.clustering.{KMeansModel => OldKMeansModel}
import org.apache.spark.mllib.clustering.{KMeansModel => MLlibKMeans}
import org.apache.spark.mllib.linalg.{Vectors, Vector => MLlibVec}
import scala.collection.immutable.ListMap
import scala.reflect.runtime.universe
class LocalKMeansModel(override val sparkTransformer: KMeansModel) extends LocalTransformer[KMeansModel] {
lazy val parent: OldKMeansModel = {
val mirror = universe.runtimeMirror(sparkTransformer.getClass.getClassLoader)
val parentTerm = universe.typeOf[KMeansModel].decl(universe.TermName("parentModel")).asTerm
mirror.reflect(sparkTransformer).reflectField(parentTerm).get.asInstanceOf[OldKMeansModel]
}
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
parent.predict(vector)
})
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalKMeansModel extends LocalModel[KMeansModel] {
override def load(metadata: Metadata, data: Map[String, Any]): KMeansModel = {
val sorted = ListMap(data.toSeq.sortBy { case (key: String, _: Any) => key.toInt}: _*)
val centers = sorted map {
case (_: String, value: Any) =>
val center = value.asInstanceOf[Map[String, Any]]
Vectors.dense(center("values").asInstanceOf[List[Double]].to[Array])
}
val parentConstructor = classOf[MLlibKMeans].getDeclaredConstructor(classOf[Array[MLlibVec]])
parentConstructor.setAccessible(true)
val mlk = parentConstructor.newInstance(centers.toArray)
val constructor = classOf[KMeansModel].getDeclaredConstructor(classOf[String], classOf[MLlibKMeans])
constructor.setAccessible(true)
var inst = constructor
.newInstance(metadata.uid, mlk)
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst = inst.set(inst.k, metadata.paramMap("k").asInstanceOf[Number].intValue())
inst = inst.set(inst.initMode, metadata.paramMap("initMode").asInstanceOf[String])
inst = inst.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
inst = inst.set(inst.initSteps, metadata.paramMap("initSteps").asInstanceOf[Number].intValue())
inst = inst.set(inst.seed, metadata.paramMap("seed").toString.toLong)
inst = inst.set(inst.tol, metadata.paramMap("tol").asInstanceOf[Double])
inst
}
override implicit def getTransformer(transformer: KMeansModel): LocalTransformer[KMeansModel] = new LocalKMeansModel(transformer)
}
| KineticCookie/mist | mist-lib/src/main/scala-2.11/io/hydrosphere/mist/api/ml/clustering/LocalKMeansModel.scala | Scala | apache-2.0 | 2,955 |
/*
* Copyright 2013 Akiyoshi Sugiki, University of Tsukuba
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kumoi.shell.aaa.bill
import kumoi.shell.aaa._
import kumoi.shell.aaa.ops._
import kumoi.shell.aaa.resource._
import kumoi.shell.or._
import kumoi.shell.cache._
/**
*
* @author Akiyoshi SUGIKI
*/
@billtype
@remote trait HotRate extends ORMapper[HotRate] {
//@invalidate(Array("name")) @nocache @update def name_=(n: String)
@invalidate(Array("name")) @nocache @update def name_=(na: (String, AAA))
} | axi-sugiki/kumoi | src/kumoi/shell/aaa/bill/HotRate.scala | Scala | apache-2.0 | 1,037 |
package beyond.plugin
import beyond.config.BeyondConfiguration
import beyond.engine.javascript.AssetsModuleSourceProvider
import beyond.engine.javascript.BeyondGlobal
import beyond.engine.javascript.BeyondJavaScriptEngine
import beyond.engine.javascript.lib.ScriptableConsole
import beyond.engine.javascript.lib.ScriptableFuture
import beyond.engine.javascript.lib.http.ScriptableRequest
import beyond.engine.javascript.lib.http.ScriptableResponse
import com.typesafe.scalalogging.slf4j.{ StrictLogging => Logging }
import org.mozilla.javascript.Context
import org.mozilla.javascript.Function
import play.api.mvc.Request
import play.api.mvc.Result
import scala.concurrent.Future
import scalax.file.Path
class NoHandlerFunctionFoundException extends Exception
object GamePlugin extends Logging {
import com.beyondframework.rhino.RhinoConversions._
import scala.concurrent.ExecutionContext.Implicits.global
private val library: AssetsModuleSourceProvider = new AssetsModuleSourceProvider
private val engines: Map[String, BeyondJavaScriptEngine] =
BeyondConfiguration.pluginPaths.map {
case (prefix: String, paths: Seq[Path]) =>
prefix -> new BeyondJavaScriptEngine(new BeyondGlobal(library), paths)
}
private lazy val defaultEngine: BeyondJavaScriptEngine =
new BeyondJavaScriptEngine(new BeyondGlobal(library), BeyondConfiguration.deprecatedPluginPaths)
ScriptableConsole.setRedirectConsoleToLogger(true)
private def makeHandler(engine: BeyondJavaScriptEngine): Function =
engine.contextFactory.call { cx: Context =>
val mainFilename = "main.js"
val exports = engine.loadMain(mainFilename)
// FIXME: Don't hardcode the name of handler function.
exports.get("handle", exports) match {
case handler: Function =>
handler
case _ /* Scriptable.NOT_FOUND */ =>
logger.error("No handler function is found")
throw new NoHandlerFunctionFoundException
}
}.asInstanceOf[Function]
private val handlers: Map[String, Function] = engines.map {
case (prefix: String, engine: BeyondJavaScriptEngine) =>
prefix -> makeHandler(engine)
}
private lazy val defaultHandler: Function =
makeHandler(defaultEngine)
private def handle[A](engine: BeyondJavaScriptEngine, handler: Function)(request: Request[A]): Future[Result] =
engine.contextFactory.call { cx: Context =>
val scope = engine.global
val args: Array[AnyRef] = Array(ScriptableRequest(cx, request))
val response = handler.call(cx, scope, scope, args)
response match {
case f: ScriptableFuture =>
f.future.mapTo[ScriptableResponse].map(_.result)
case res: ScriptableResponse =>
Future.successful(res.result)
}
}.asInstanceOf[Future[Result]]
def handle[A](request: Request[A]): Future[Result] = {
val (pluginName, pluginNameRemovedReq): (String, Request[A]) = {
val uris = request.uri.split("/").drop(1) // `request.uri` starts with "/".
val pluginName = uris(1)
val pluginNameRemovedUri = (uris(0) +: uris.drop(2).toSeq).mkString("/", "/", "")
pluginName -> Request(request.copy(uri = pluginNameRemovedUri), request.body)
}
engines.get(pluginName) match {
case None =>
handle(defaultEngine, defaultHandler)(request)
case Some(engine: BeyondJavaScriptEngine) =>
handle(engine, handlers(pluginName))(pluginNameRemovedReq)
}
}
}
| SollmoStudio/beyond | core/app/beyond/plugin/GamePlugin.scala | Scala | apache-2.0 | 3,465 |
/*
* Copyright (c) 2017-2022 Lymia Alusyia <lymia@lymiahugs.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package moe.lymia.princess.svg.scripting
import moe.lymia.lua._
import moe.lymia.princess.core.gamedata.LuaLibrary
import moe.lymia.princess.svg._
case class PhysicalScale(unPerViewport: Double, unit: PhysicalUnit)
trait LuaTemplateImplicits {
implicit object LuaPhysicalScale extends LuaUserdataType[PhysicalScale] {
metatable { (L, mt) =>
L.register(mt, "__mul", (scale: Double, s: PhysicalScale) => PhysicalScale(scale * s.unPerViewport, s.unit))
}
}
}
object TemplateLib extends LuaLibrary {
def open(L: LuaState, table: LuaTable) = {
val unit = L.newTable()
L.rawSet(unit, "mm", PhysicalScale(1, PhysicalUnit.mm))
L.rawSet(unit, "in", PhysicalScale(1, PhysicalUnit.in))
L.rawSet(table, "PhysicalUnit", unit)
}
} | Lymia/PrincessEdit | modules/princess-edit/src/main/scala/moe/lymia/princess/svg/scripting/TemplateLib.scala | Scala | mit | 1,903 |
/*^
===========================================================================
TwoBinManager
===========================================================================
Copyright (C) 2016-2017 Gianluca Costa
===========================================================================
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program. If not, see
<http://www.gnu.org/licenses/gpl-3.0.html>.
===========================================================================
*/
package info.gianlucacosta.twobinmanager.analytics.providers
import info.gianlucacosta.twobinmanager.sdk.analytics.filteringby.framemode.AverageTargetNumberGroupingProvider
class AverageTargetByTotalBlockCount extends AverageTargetNumberGroupingProvider(
"Total block count",
solution =>
solution.problem.frameTemplate.blockPool.totalBlockCount
)
| giancosta86/TwoBinManager | src/main/scala/info/gianlucacosta/twobinmanager/analytics/providers/AverageTargetByTotalBlockCount.scala | Scala | gpl-3.0 | 1,381 |
package de.zalando.apifirst.generators
import de.zalando.apifirst.Application._
import de.zalando.apifirst.Domain._
import de.zalando.apifirst.ScalaName._
import de.zalando.apifirst.generators.DenotationNames.DenotationTable
import PlayScalaControllerAnalyzer._
import de.zalando.apifirst.StringUtil
import scala.collection.Iterable
/**
* @author slasch
* @since 16.11.2015.
*/
class ScalaGenerator(val strictModel: StrictModel, customTemplateLocation: Option[String] = None) {
val denotationTable = AstScalaPlayEnricher(strictModel)
val StrictModel(modelCalls, modelTypes, modelParameters, discriminators, _, overridenPackageName, stateTransitions, securityDefinitions) = strictModel
val testsTemplateName = "play_scala_test"
val validatorsTemplateName = "play_validation"
val generatorsTemplateName = "generators"
val modelTemplateName = "model"
val controllersTemplateName = "play_scala_controller"
val controllerBaseTemplateName = "play_scala_controller_base"
val marschallersTemplateName = "play_scala_response_writers"
val securityTemplateName = "play_scala_controller_security"
val securityExtractorsTemplateName = "play_scala_security_extractors"
val formParsersTemplateName = "play_scala_form_parser"
def generateBase: (String, String, String) => Seq[String] = (fileName, packageName, currentController) => Seq(
generateModel(fileName, packageName),
playValidators(fileName, packageName),
playScalaSecurity(fileName, packageName),
playScalaControllerBases(fileName, packageName),
playScalaFormParsers(fileName, packageName)
)
def generateTest: (String, String, String) => Seq[String] = (fileName, packageName, currentController) => Seq(
generateGenerators(fileName, packageName),
playScalaTests(fileName, packageName)
)
def generateMarshallers: (String, String, String) => Seq[String] = (fileName, packageName, currentController) => Seq(
playScalaMarshallers(fileName, packageName)
)
def generateExtractors: (String, String, String) => Seq[String] = (fileName, packageName, currentController) => Seq(
playScalaSecurityExtractors(fileName, packageName)
)
def generateControllers: (String, String, String) => Seq[String] = (fileName, packageName, currentController) => Seq(
playScalaControllers(fileName, packageName, currentController)
)
def generateModel(fileName: String, packageName: String): String =
if (modelTypes.values.forall(_.isInstanceOf[PrimitiveType])) ""
else apply(fileName, packageName, modelTemplateName)
def generateGenerators(fileName: String, packageName: String): String =
if (modelTypes.isEmpty) ""
else apply(fileName, packageName, generatorsTemplateName)
def playValidators(fileName: String, packageName: String): String =
if (modelCalls.map(_.handler.parameters.size).sum == 0) ""
else apply(fileName, packageName, validatorsTemplateName)
def playScalaTests(fileName: String, packageName: String): String =
if (modelCalls.map(_.handler.parameters.size).sum == 0) ""
else apply(fileName, packageName, testsTemplateName)
def playScalaControllers(fileName: String, packageName: String, currentController: String): String =
if (modelCalls.isEmpty) ""
else apply(fileName, packageName, controllersTemplateName, currentController)
def playScalaControllerBases(fileName: String, packageName: String): String =
if (modelCalls.isEmpty) ""
else apply(fileName, packageName, controllerBaseTemplateName)
def playScalaFormParsers(fileName: String, packageName: String): String =
if (modelCalls.isEmpty) ""
else apply(fileName, packageName, formParsersTemplateName)
def playScalaMarshallers(fileName: String, packageName: String): String =
if (modelCalls.isEmpty) ""
else apply(fileName, packageName, marschallersTemplateName)
def playScalaSecurityExtractors(fileName: String, packageName: String): String =
if (modelCalls.isEmpty) ""
else apply(fileName, packageName, securityExtractorsTemplateName)
def playScalaSecurity(fileName: String, packageName: String): String =
if (securityDefinitions.isEmpty) ""
else apply(fileName, packageName, securityTemplateName)
private def apply(fileName: String, packageName: String, templateName: String,
currentController: String = ""): String = {
nonEmptyTemplate(fileName, packageName, templateName, currentController)
}
private def nonEmptyTemplate(fileName: String, packageName: String, templateName: String, currentController: String): String = {
assert(packageName.contains('-') == packageName.contains('`'), packageName)
val validations = ReShaper.filterByType("validators", denotationTable)
val validationsByType = ReShaper.groupByType(validations.toSeq).toMap
val bindings = ReShaper.filterByType("bindings", denotationTable)
val grouppedBindings = ReShaper.groupByType(bindings.toSeq.distinct)
val sortedBindings = grouppedBindings.map {
case (x, y: Seq[Map[String, Any]@unchecked]) =>
val sorted = y.sortWith { (a, b) =>
(a.get("dependencies"), b.get("dependencies")) match {
case (Some(aa: Int), Some(bb: Int)) => aa < bb
case _ if b("format").toString.contains("[") => true
case _ => false
}
}
x -> sorted
}
val bindingsByType = sortedBindings.toMap
val modelBindings = bindingsByType.flatMap {
case (k, b) =>
b.map(_ ("format"))
}.toSeq.distinct.map { b => Map("full_name" -> b) }
val forms = ReShaper.filterByType("form_data", denotationTable)
val marshallers = ReShaper.filterByType("marshallers", denotationTable)
val grouppedMarshallers = ReShaper.groupByType(marshallers.toSeq).toMap
val unmarshallers = ReShaper.filterByType("unmarshallers", denotationTable)
val grouppedunMarshallers = ReShaper.groupByType(unmarshallers.toSeq).toMap
val securityExtractors = ReShaper.filterByType("security_extractors", denotationTable)
val extractors = ReShaper.groupByType(securityExtractors.toSeq).toMap
val codeParts = collectImplementations(currentController.split("\\n"), sof, eof)
val unmanagedParts = analyzeController(modelCalls, codeParts)
val deadCode = codeParts.filterNot { cp =>
unmanagedParts.keys.map(asMarker).exists(_ == cp._1)
}.map { case (k,v) =>
k -> v.mkString("\\n")
}
val userImports = unmanagedImports(currentController, modelTypes)
val pckg = overridenPackageName.getOrElse(packageName)
val formParsersRequired = forms.exists(_ ("form_parameters").asInstanceOf[Seq[_]].nonEmpty)
val packages = Map(
"main_package" -> pckg,
"main_package_prefix" -> pckg.split('.').init.mkString("."),
"main_package_suffix" -> pckg.split('.').last,
"spec_name" -> escape(StringUtil.capitalize("\\\\.", fileName) + "Spec"),
"form_parsers_required" -> formParsersRequired
)
val controllersList = PlayScalaControllersGenerator.controllers(modelCalls, unmanagedParts, pckg, deadCode)(denotationTable)
val stdImports = standardImports(modelTypes).map(i => Map("name" -> i))
val controllersMap = Map(
"controllers" -> controllersList,
"controller_imports" -> (controllerImports.map(i => Map("name" -> i)) ++ stdImports),
"unmanaged_imports" -> userImports.map(i => Map("name" -> i))
)
val singlePackage: Map[String, Iterable[Any]] = Map(
"classes" -> ReShaper.filterByType("classes", denotationTable),
"aliases" -> ReShaper.filterByType("aliases", denotationTable),
"traits" -> ReShaper.filterByType("traits", denotationTable),
"enums" -> ReShaper.filterByType("enums", denotationTable),
"test_data_classes" -> ReShaper.filterByType("test_data_classes", denotationTable),
"test_data_aliases" -> ReShaper.filterByType("test_data_aliases", denotationTable),
"tests" -> ReShaper.filterByType("tests", denotationTable),
"marshallers" -> grouppedMarshallers,
"unmarshallers" -> grouppedunMarshallers,
"security_extractors" -> extractors,
"bindings" -> bindingsByType,
"forms" -> forms,
"model_bindings" -> modelBindings
)
val rawAllPackages = singlePackage ++ validationsByType ++ controllersMap
val allPackages = enrichWithStructuralInfo(rawAllPackages)
renderTemplate(packages, templateName, allPackages)
}
def renderTemplate(map: Map[String, Any], templateName: String,
allPackages: Map[String, Any]): String = {
import de.zalando.beard.renderer._
val templateSuffix = ".mustache"
val templatePrefix = "/"
val loader = customTemplateLocation flatMap { f =>
val targetTemplate = new java.io.File(f + templatePrefix + templateName + templateSuffix)
if (targetTemplate.canRead) Some(new FileTemplateLoader(f, templateSuffix)) else None
} getOrElse {
new ClasspathTemplateLoader(templatePrefix, templateSuffix)
}
val templateCompiler = new CustomizableTemplateCompiler(loader)
val template = templateCompiler.compile(TemplateName(templateName)).get
val renderer = new BeardTemplateRenderer(templateCompiler)
renderer.render(template,
StringWriterRenderResult(),
map ++ allPackages,
None).toString
}
def enrichWithStructuralInfo(rawAllPackages: Map[String, Iterable[Any]]): Map[String, Any] = {
val imports = KeyCollector.collect("imports")(rawAllPackages)
val importMaps = imports.distinct map { i => Map("name" -> i) }
val bind_imports = KeyCollector.collect("binding_imports")(rawAllPackages)
val bind_importMaps = bind_imports.distinct map { i => Map("name" -> i) }
val allPackages = LastListElementMarks.set(rawAllPackages) ++
neededParts(imports) + ("imports" -> importMaps) + ("binding_imports" -> bind_importMaps)
allPackages
}
private val partsMapping = Map(
"lists_part" -> "ArrayWrapper",
"maps_part" -> "Map",
"date_part" -> "LocalDate",
"date_time_part" -> "DateTime",
"binary_string_part" -> "BinaryString",
"base64_string_part" -> "Base64String",
"file_part" -> "File",
"uuid_part" -> "UUID"
)
private def neededParts(imports: Seq[String]): Map[String, Boolean] = partsMapping map {
case (k, v) => k -> imports.exists(_.contains(v))
}
}
object PlayScalaControllersGenerator {
val baseControllersSuffix = "Base"
val securityTraitSuffix = "Security"
def controllers(allCalls: Seq[ApiCall], unmanagedParts: Map[ApiCall, UnmanagedPart], packageName: String, deadCode: Map[String, String])
(table: DenotationTable): Iterable[Map[String, Object]] = {
allCalls groupBy { c =>
(c.handler.packageName, c.handler.controller)
} map {
case (controller, calls) =>
val methods = calls map {
singleMethod(unmanagedParts, table)
}
if (packageName != controller._1) {
println(s"WARN: Ignoring package part of the handler name '${controller._1}', using '$packageName' instead. \\n\\t" +
"Current plugin version only supports single package definition per specification.\\n\\t" +
"Play's route files will fail to compile.")
}
val securityTrait = calls.find(_.security.nonEmpty).map(_ => escape(controller._2 + securityTraitSuffix))
val deadCodeParts = deadCode.toSeq.map { case (k,v) =>
Map("name" -> k, "code" -> v)
}
Map(
"effective_package" -> packageName,
"controller" -> escape(controller._2),
"base" -> escape(controller._2 + baseControllersSuffix),
"methods" -> methods,
"security_trait" -> securityTrait,
"dead_code" -> deadCodeParts
)
}
}
def singleMethod(unmanagedParts: Map[ApiCall, UnmanagedPart], table: DenotationTable): ApiCall => Map[String, Any] =
call => {
val method = table(call.asReference)("controller")
val methodWithCode = method + (
"implementation" -> unmanagedParts.get(call).map(_.relevantCode.mkString("\\n").trim).getOrElse("NotImplementedYet")
)
methodWithCode
}
}
| zalando/play-swagger | play-scala-generator/src/main/scala/de/zalando/apifirst/generators/playScala.scala | Scala | mit | 12,160 |
package com.twitter.finatra.http.marshalling
import com.twitter.finagle.http.Status._
import com.twitter.finagle.http.{Request, Response, Status}
import com.twitter.finatra.http.internal.marshalling.CallbackConverter
import com.twitter.finatra.http.modules.{CallbackConverterModule, DocRootModule, MessageBodyModule, MustacheModule}
import com.twitter.finatra.http.response.SimpleResponse
import com.twitter.finatra.json.modules.FinatraJacksonModule
import com.twitter.inject.app.TestInjector
import com.twitter.inject.{Mockito, Test}
import com.twitter.util.{Await, Future}
import org.jboss.netty.handler.codec.http.HttpResponseStatus
class CallbackConverterIntegrationTest extends Test with Mockito {
val injector = TestInjector(
MessageBodyModule, FinatraJacksonModule,
MustacheModule, CallbackConverterModule, DocRootModule)
val callbackConverter = injector.instance[CallbackConverter]
val request = mock[Request]
val ford = Car("Ford")
val okResponse = SimpleResponse(Ok, "bob")
"Future Some String" in {
assertOk(
callbackConverter.convertToFutureResponse(futureSomeString),
withBody = "hello")
}
"Future None String" in {
assertStatus(
callbackConverter.convertToFutureResponse(futureNoneString),
expectedStatus = NotFound)
}
"Future Some Product" in {
assertOk(
callbackConverter.convertToFutureResponse(futureSomeProduct),
withBody = """{"name":"Ford"}""")
}
"Future Some Trait" in {
assertOk(
callbackConverter.convertToFutureResponse(futureSomeTrait),
withBody = """{"name":"Ford"}""")
}
"Future String" in {
assertOk(
callbackConverter.convertToFutureResponse(futureString),
withBody = "bob")
}
"Future Response" in {
assertOk(
callbackConverter.convertToFutureResponse(futureResponse),
withBody = "bob")
}
"Future Some Response" in {
assertOk(
callbackConverter.convertToFutureResponse(futureSomeResponse),
withBody = "bob")
}
"Future None Response" in {
assertStatus(
callbackConverter.convertToFutureResponse(futureNoneResponse),
expectedStatus = NotFound)
}
"Future Seq String" in {
assertOk(
callbackConverter.convertToFutureResponse(futureSeqString),
withBody = """["bob"]""")
}
"Future Seq Car" in {
assertOk(
callbackConverter.convertToFutureResponse(futureSeqCar),
withBody = """[{"name":"Ford"}]""")
}
"Future Seq CarTrait" in {
assertOk(
callbackConverter.convertToFutureResponse(futureSeqCarTrait),
withBody = """[{"name":"Ford"}]""")
}
"Object" in {
assertOk(
callbackConverter.convertToFutureResponse(objectCallback),
withBody = """asdf""")
}
"None" in {
assertStatus(
callbackConverter.convertToFutureResponse(noneCallback),
expectedStatus = NotFound)
}
"Some" in {
assertOk(
callbackConverter.convertToFutureResponse(someCallback),
withBody = """asdf""")
}
"Null" in {
pending
assertOk(
callbackConverter.convertToFutureResponse(nullCallback),
withBody = "")
}
def objectCallback(request: Request): Object = {
"asdf"
}
def noneCallback(request: Request): Option[String] = {
None
}
def someCallback(request: Request): Option[String] = {
Some("asdf")
}
def nullCallback(request: Request) = {
null
}
def futureSomeString(request: Request): Future[Option[String]] = {
Future(Some("hello"))
}
def futureNoneString(request: Request): Future[Option[String]] = {
Future(None)
}
def futureSomeProduct(request: Request): Future[Option[Car]] = {
Future(Some(ford))
}
def futureSomeTrait(request: Request): Future[Option[CarTrait]] = {
Future(Some(ford))
}
def futureString(request: Request): Future[String] = {
Future("bob")
}
def futureResponse(request: Request): Future[Response] = {
Future(okResponse)
}
def futureSomeResponse(request: Request): Future[Option[Response]] = {
Future(Some(okResponse))
}
def futureNoneResponse(request: Request): Future[Option[Response]] = {
Future.None
}
def futureSeqString(request: Request): Future[Seq[String]] = {
Future(Seq("bob"))
}
def futureSeqCar(request: Request): Future[Seq[Car]] = {
Future(Seq(ford))
}
def futureSeqCarTrait(request: Request): Future[Seq[CarTrait]] = {
Future(Seq(ford))
}
private def assertOk(response: Response, expectedBody: String) {
response.status should equal(Status.Ok)
response.contentString should equal(expectedBody)
}
private def assertOk(convertedFunc: (Request) => Future[Response], withBody: String) {
val response = Await.result(convertedFunc(request))
assertOk(response, withBody)
}
private def assertStatus(convertedFunc: (Request) => Future[Response], expectedStatus: HttpResponseStatus) {
val response = Await.result(convertedFunc(request))
response.status should equal(expectedStatus)
}
}
case class Car(name: String) extends CarTrait
trait CarTrait {
val name: String
}
| tom-chan/finatra | http/src/test/scala/com/twitter/finatra/http/marshalling/CallbackConverterIntegrationTest.scala | Scala | apache-2.0 | 5,094 |
package org.nexbook.performance
import org.nexbook.domain.Order
import org.nexbook.tags.Performance
import org.nexbook.testutils.OrderProvider
import org.scalatest.{Matchers, WordSpec}
import org.slf4j.LoggerFactory
import scala.collection.mutable
import scala.concurrent.duration._
/**
* Created by milczu on 04.01.16.
*/
class VolumeSumTest extends WordSpec with Matchers with StopWatch {
val logger = LoggerFactory.getLogger(classOf[VolumeSumTest])
val ordersCounts = Seq(5000, 50000, 100000)
"Seq and Par Set processing" should {
val allOrders = OrderProvider.get(ordersCounts.max)
for (ordersCount <- ordersCounts) {
s"sum volume $ordersCount orders" taggedAs Performance in {
logger.info(s"====== Performance test for add operation for OrderInMemoryRepository, orders count: $ordersCount")
val orders = allOrders.take(ordersCount)
orders should have size ordersCount
logger.info(s"Loaded $ordersCount orders as test data. Test starting")
testCountOperation(orders)
}
}
}
def testCountOperation(orders: List[Order]) {
val attemptsForWarmCpuCache = 2
val repeats = attemptsForWarmCpuCache + 15
val ordersSet: mutable.SortedSet[Order] = mutable.TreeSet.empty[Order](Ordering.fromLessThan(_.tradeID < _.tradeID))
orders.foreach(ordersSet.+=)
ordersSet should have size orders.size
def measureSeq: Long = stopwatch {
ordersSet.foldLeft(0.00)(_ + _.leaveQty)
}
def measurePar: Long = stopwatch {
ordersSet.par.foldLeft(0.00)(_ + _.leaveQty)
}
val avgExecTimeSeq = (Seq.fill(repeats)(measureSeq).drop(attemptsForWarmCpuCache).sum / repeats).nanoseconds
val avgExecTimePar = (Seq.fill(repeats)(measurePar).drop(attemptsForWarmCpuCache).sum / repeats).nanoseconds
logger.info(s"avgExecTimeSeq: ${avgExecTimeSeq}ns")
logger.info(s"avgExecTimePar: ${avgExecTimePar}ns")
}
}
| milczarekIT/nexbook | src/test/scala/org/nexbook/performance/VolumeSumTest.scala | Scala | apache-2.0 | 1,836 |
/*
* Copyright 2010-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mongodb
import org.bson.types.ObjectId
import json.{DefaultFormats, Formats}
import json.JsonAST.JObject
import com.mongodb.{BasicDBObject, DB, DBCollection, DBObject}
trait JsonFormats {
// override this for custom Formats
def formats: Formats = DefaultFormats.lossless
implicit lazy val _formats: Formats = formats
lazy val allFormats = DefaultFormats.lossless + new ObjectIdSerializer + new DateSerializer + new PatternSerializer + new UUIDSerializer
}
/*
* This is used by both MongoDocumentMeta and MongoMetaRecord
*/
trait MongoMeta[BaseDocument] extends JsonFormats {
// class name has a $ at the end.
private lazy val _collectionName = {
getClass.getName.split("\\\\.").toList.last.replace("$", "")+"s"
}
/*
* Collection names should begin with letters or an underscore and may include
* numbers; $ is reserved. Collections can be organized in namespaces; these
* are named groups of collections defined using a dot notation. For example,
* you could define collections blog.posts and blog.authors, both reside under
* "blog". Note that this is simply an organizational mechanism for the user
* -- the collection namespace is flat from the database's perspective.
* From: http://www.mongodb.org/display/DOCS/Collections
*/
def fixCollectionName = _collectionName.toLowerCase match {
case name if (name.contains("$")) => name.replace("$", "_d_")
case name => name
}
/**
* The name of the database collection. Override this method if you
* want to change the collection to something other than the name of
* the class with an 's' appended to the end.
*/
def collectionName: String = fixCollectionName
// override this to specify a MongoIdentifier for this MongoDocument type
def mongoIdentifier: MongoIdentifier = DefaultMongoIdentifier
/*
* Use the collection associated with this Meta.
*/
def useColl[T](f: DBCollection => T) =
MongoDB.useCollection(mongoIdentifier, collectionName)(f)
/*
* Use the db associated with this Meta.
*/
def useDb[T](f: DB => T) = MongoDB.use(mongoIdentifier)(f)
/*
* Count all documents
*/
def count: Long = useColl { coll => coll.getCount }
/*
* Count documents by DBObject query
*/
def count(qry: DBObject):Long = useColl { coll => coll.getCount(qry) }
/*
* Count documents by JObject query
*/
def count(qry: JObject):Long = count(JObjectParser.parse(qry))
/*
* Count distinct records on a given field
*/
def countDistinct(key: String, query: DBObject): Long =
useColl { coll => coll.distinct(key, query).size }
/*
* Delete documents by a DBObject query
*/
def delete(qry: DBObject): Unit =
useColl { coll => coll.remove(qry) }
// delete a document
def delete(k: String, v: Any) {
delete(new BasicDBObject(k, v match {
case s: String if (ObjectId.isValid(s)) => new ObjectId(s)
case _ => v
}))
}
/*
* Delete documents by a JObject query
*/
def delete(qry: JObject): Unit = delete(JObjectParser.parse(qry))
/* drop this document collection */
def drop: Unit = useColl { coll => coll.drop }
/*
* Ensure an index exists
*/
def ensureIndex(keys: JObject): Unit =
useColl { coll => coll.ensureIndex(JObjectParser.parse(keys)) }
/*
* Ensure an index exists and make unique
*/
def ensureIndex(keys: JObject, unique: Boolean): Unit = {
val options = new BasicDBObject
if (unique) options.put("unique", true)
useColl { coll =>
coll.ensureIndex(JObjectParser.parse(keys), options)
}
}
/*
* Ensure an index exists with options
*/
def ensureIndex(keys: JObject, opts: JObject): Unit =
useColl { coll =>
coll.ensureIndex(JObjectParser.parse(keys), JObjectParser.parse(opts))
}
/*
* Update document with a DBObject query using the given Mongo instance.
*/
def update(qry: DBObject, newobj: DBObject, db: DB, opts: UpdateOption*) {
val dboOpts = opts.toList
db.getCollection(collectionName).update(
qry,
newobj,
dboOpts.find(_ == Upsert).map(x => true).getOrElse(false),
dboOpts.find(_ == Multi).map(x => true).getOrElse(false)
)
}
/*
* Update document with a JObject query using the given Mongo instance.
*/
def update(qry: JObject, newobj: JObject, db: DB, opts: UpdateOption*) {
update(
JObjectParser.parse(qry),
JObjectParser.parse(newobj),
db,
opts :_*
)
}
/*
* Update document with a JObject query.
*/
def update(qry: JObject, newobj: JObject, opts: UpdateOption*) {
useDb { db => update(qry, newobj, db, opts :_*) }
}
}
/*
* For passing in options to the find function
*/
abstract sealed class FindOption {
def value: Int
}
case class Limit(value: Int) extends FindOption
case class Skip(value: Int) extends FindOption
/*
* For passing in options to the update function
*/
abstract sealed class UpdateOption
case object Upsert extends UpdateOption
case object Multi extends UpdateOption
| pbrant/framework | persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoMeta.scala | Scala | apache-2.0 | 5,613 |
/* Generated File */
package services.address
import com.kyleu.projectile.models.result.data.DataField
import com.kyleu.projectile.models.result.filter.Filter
import com.kyleu.projectile.models.result.orderBy.OrderBy
import com.kyleu.projectile.services.ModelServiceHelper
import com.kyleu.projectile.services.database.JdbcDatabase
import com.kyleu.projectile.util.{Credentials, CsvUtils}
import com.kyleu.projectile.util.tracing.{TraceData, TracingService}
import java.sql.Connection
import java.time.ZonedDateTime
import models.address.CityRow
import models.queries.address.CityRowQueries
import scala.concurrent.{ExecutionContext, Future}
@javax.inject.Singleton
class CityRowService @javax.inject.Inject() (val db: JdbcDatabase, override val tracing: TracingService)(implicit ec: ExecutionContext) extends ModelServiceHelper[CityRow]("cityRow", "address" -> "CityRow") {
def getByPrimaryKey(creds: Credentials, cityId: Int, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("get.by.primary.key")(td => db.queryF(CityRowQueries.getByPrimaryKey(cityId), conn)(td))
}
def getByPrimaryKeyRequired(creds: Credentials, cityId: Int, conn: Option[Connection] = None)(implicit trace: TraceData) = getByPrimaryKey(creds, cityId, conn).map { opt =>
opt.getOrElse(throw new IllegalStateException(s"Cannot load cityRow with cityId [$cityId]"))
}
def getByPrimaryKeySeq(creds: Credentials, cityIdSeq: Seq[Int], conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
if (cityIdSeq.isEmpty) {
Future.successful(Nil)
} else {
traceF("get.by.primary.key.seq")(td => db.queryF(CityRowQueries.getByPrimaryKeySeq(cityIdSeq), conn)(td))
}
}
override def countAll(creds: Credentials, filters: Seq[Filter] = Nil, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("get.all.count")(td => db.queryF(CityRowQueries.countAll(filters), conn)(td))
}
override def getAll(creds: Credentials, filters: Seq[Filter] = Nil, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("get.all")(td => db.queryF(CityRowQueries.getAll(filters, orderBys, limit, offset), conn)(td))
}
// Search
override def searchCount(creds: Credentials, q: Option[String], filters: Seq[Filter] = Nil, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("search.count")(td => db.queryF(CityRowQueries.searchCount(q, filters), conn)(td))
}
override def search(
creds: Credentials, q: Option[String], filters: Seq[Filter] = Nil, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None, conn: Option[Connection] = None
)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("search")(td => db.queryF(CityRowQueries.search(q, filters, orderBys, limit, offset), conn)(td))
}
def searchExact(
creds: Credentials, q: String, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None, conn: Option[Connection] = None
)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("search.exact")(td => db.queryF(CityRowQueries.searchExact(q, orderBys, limit, offset), conn)(td))
}
def countByCity(creds: Credentials, city: String, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("count.by.city")(td => db.queryF(CityRowQueries.CountByCity(city), conn)(td))
}
def getByCity(creds: Credentials, city: String, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("get.by.city")(td => db.queryF(CityRowQueries.GetByCity(city, orderBys, limit, offset), conn)(td))
}
def getByCitySeq(creds: Credentials, citySeq: Seq[String], conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
if (citySeq.isEmpty) {
Future.successful(Nil)
} else {
traceF("get.by.city.seq") { td =>
db.queryF(CityRowQueries.GetByCitySeq(citySeq), conn)(td)
}
}
}
def countByCityId(creds: Credentials, cityId: Int, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("count.by.cityId")(td => db.queryF(CityRowQueries.CountByCityId(cityId), conn)(td))
}
def getByCityId(creds: Credentials, cityId: Int, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("get.by.cityId")(td => db.queryF(CityRowQueries.GetByCityId(cityId, orderBys, limit, offset), conn)(td))
}
def getByCityIdSeq(creds: Credentials, cityIdSeq: Seq[Int], conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
if (cityIdSeq.isEmpty) {
Future.successful(Nil)
} else {
traceF("get.by.cityId.seq") { td =>
db.queryF(CityRowQueries.GetByCityIdSeq(cityIdSeq), conn)(td)
}
}
}
def countByCountryId(creds: Credentials, countryId: Int, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("count.by.countryId")(td => db.queryF(CityRowQueries.CountByCountryId(countryId), conn)(td))
}
def getByCountryId(creds: Credentials, countryId: Int, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("get.by.countryId")(td => db.queryF(CityRowQueries.GetByCountryId(countryId, orderBys, limit, offset), conn)(td))
}
def getByCountryIdSeq(creds: Credentials, countryIdSeq: Seq[Int], conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
if (countryIdSeq.isEmpty) {
Future.successful(Nil)
} else {
traceF("get.by.countryId.seq") { td =>
db.queryF(CityRowQueries.GetByCountryIdSeq(countryIdSeq), conn)(td)
}
}
}
def countByLastUpdate(creds: Credentials, lastUpdate: ZonedDateTime, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("count.by.lastUpdate")(td => db.queryF(CityRowQueries.CountByLastUpdate(lastUpdate), conn)(td))
}
def getByLastUpdate(creds: Credentials, lastUpdate: ZonedDateTime, orderBys: Seq[OrderBy] = Nil, limit: Option[Int] = None, offset: Option[Int] = None, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
traceF("get.by.lastUpdate")(td => db.queryF(CityRowQueries.GetByLastUpdate(lastUpdate, orderBys, limit, offset), conn)(td))
}
def getByLastUpdateSeq(creds: Credentials, lastUpdateSeq: Seq[ZonedDateTime], conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "view") {
if (lastUpdateSeq.isEmpty) {
Future.successful(Nil)
} else {
traceF("get.by.lastUpdate.seq") { td =>
db.queryF(CityRowQueries.GetByLastUpdateSeq(lastUpdateSeq), conn)(td)
}
}
}
// Mutations
def insert(creds: Credentials, model: CityRow, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "edit") {
traceF("insert")(td => db.executeF(CityRowQueries.insert(model), conn)(td).flatMap {
case 1 => getByPrimaryKey(creds, model.cityId, conn)(td)
case _ => throw new IllegalStateException("Unable to find newly-inserted City")
})
}
def insertBatch(creds: Credentials, models: Seq[CityRow], conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "edit") {
traceF("insertBatch")(td => if (models.isEmpty) {
Future.successful(0)
} else {
db.executeF(CityRowQueries.insertBatch(models), conn)(td)
})
}
def create(creds: Credentials, fields: Seq[DataField], conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "edit") {
traceF("create")(td => db.executeF(CityRowQueries.create(fields), conn)(td).flatMap { _ =>
getByPrimaryKey(creds, fieldVal(fields, "cityId").toInt, conn)
})
}
def remove(creds: Credentials, cityId: Int, conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "edit") {
traceF("remove")(td => getByPrimaryKey(creds, cityId, conn)(td).flatMap {
case Some(current) =>
db.executeF(CityRowQueries.removeByPrimaryKey(cityId), conn)(td).map(_ => current)
case None => throw new IllegalStateException(s"Cannot find CityRow matching [$cityId]")
})
}
def update(creds: Credentials, cityId: Int, fields: Seq[DataField], conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "edit") {
traceF("update")(td => getByPrimaryKey(creds, cityId, conn)(td).flatMap {
case Some(current) if fields.isEmpty => Future.successful(current -> s"No changes required for City [$cityId]")
case Some(_) => db.executeF(CityRowQueries.update(cityId, fields), conn)(td).flatMap { _ =>
getByPrimaryKey(creds, fields.find(_.k == "cityId").flatMap(_.v).map(s => s.toInt).getOrElse(cityId), conn)(td).map {
case Some(newModel) =>
newModel -> s"Updated [${fields.size}] fields of City [$cityId]"
case None => throw new IllegalStateException(s"Cannot find CityRow matching [$cityId]")
}
}
case None => throw new IllegalStateException(s"Cannot find CityRow matching [$cityId]")
})
}
def updateBulk(creds: Credentials, pks: Seq[Int], fields: Seq[DataField], conn: Option[Connection] = None)(implicit trace: TraceData) = checkPerm(creds, "edit") {
Future.sequence(pks.map(pk => update(creds, pk, fields, conn))).map { x =>
s"Updated [${fields.size}] fields for [${x.size} of ${pks.size}] CityRow"
}
}
def csvFor(totalCount: Int, rows: Seq[CityRow])(implicit trace: TraceData) = {
traceB("export.csv")(td => CsvUtils.csvFor(Some(key), totalCount, rows, CityRowQueries.fields)(td))
}
}
| KyleU/boilerplay | app/services/address/CityRowService.scala | Scala | cc0-1.0 | 10,174 |
/**
* Copyright 2015 Adrian Hurtado (adrianhurt)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.html.b4
package object inline {
import play.twirl.api.Html
import play.api.mvc.{ Call, RequestHeader }
import play.api.i18n.Messages
import views.html.helper._
import views.html.bs.Args.{ inner, isTrue }
/**
* Declares the class for the Inline FieldConstructor.
*/
class InlineFieldConstructor(val isCustom: Boolean = false, val withFeedbackTooltip: Boolean = false) extends B4FieldConstructor {
/* Define the class of the corresponding form */
val formClass = "form-inline"
/* Renders the corresponding template of the field constructor */
def apply(fieldInfo: B4FieldInfo, inputHtml: Html)(implicit messages: Messages) = bsFieldConstructor(fieldInfo, inputHtml)(this, messages)
/* Renders the corresponding template of the form group */
def apply(contentHtml: Html, argsMap: Map[Symbol, Any])(implicit messages: Messages) = bsFormGroup(contentHtml, argsMap)(messages)
}
/**
* Creates a new InlineFieldConstructor to use for specific forms or scopes (don't use it as a default one).
* If a default B4FieldConstructor and a specific InlineFieldConstructor are within the same scope, the more
* specific will be chosen.
*/
def fieldConstructorSpecific(isCustom: Boolean = false, withFeedbackTooltip: Boolean = false): InlineFieldConstructor =
new InlineFieldConstructor(isCustom, withFeedbackTooltip)
/**
* Returns it as a B4FieldConstructor to use it as default within a template
*/
def fieldConstructor(isCustom: Boolean = false, withFeedbackTooltip: Boolean = false): B4FieldConstructor =
fieldConstructorSpecific(isCustom, withFeedbackTooltip)
/**
* **********************************************************************************************************************************
* SHORTCUT HELPERS
* *********************************************************************************************************************************
*/
def form(action: Call, args: (Symbol, Any)*)(body: InlineFieldConstructor => Html) = {
val ifc = fieldConstructorSpecific(isCustom = isTrue(args, '_custom), withFeedbackTooltip = isTrue(args, '_feedbackTooltip))
views.html.b4.form(action, inner(args): _*)(body(ifc))(ifc)
}
def formCSRF(action: Call, args: (Symbol, Any)*)(body: InlineFieldConstructor => Html)(implicit request: RequestHeader) = {
val ifc = fieldConstructorSpecific(isCustom = isTrue(args, '_custom), withFeedbackTooltip = isTrue(args, '_feedbackTooltip))
views.html.b4.formCSRF(action, inner(args): _*)(body(ifc))(ifc, request)
}
} | adrianhurt/play-bootstrap | play25-bootstrap4/module/app/views/b4/inline/package.scala | Scala | apache-2.0 | 3,187 |
/*
* Copyright 2016 rdbc contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rdbc.implbase
import java.time._
import java.util.UUID
import io.rdbc.sapi.{DecimalNumber, Row}
import org.reactivestreams.Subscription
import org.reactivestreams.tck.SubscriberWhiteboxVerification.{SubscriberPuppet, WhiteboxSubscriberProbe}
import org.reactivestreams.tck.{SubscriberWhiteboxVerification, TestEnvironment}
import org.scalatest.testng.TestNGSuiteLike
import scala.reflect.ClassTag
class HeadSubscriberVerification
extends SubscriberWhiteboxVerification[Row](new TestEnvironment())
with TestNGSuiteLike {
def createSubscriber(probe: WhiteboxSubscriberProbe[Row]): HeadSubscriber = {
new HeadSubscriber(n = Some(100)) {
override def onSubscribe(s: Subscription): Unit = {
super.onSubscribe(s)
probe.registerOnSubscribe(new SubscriberPuppet() {
def signalCancel(): Unit = s.cancel()
def triggerRequest(elements: Long): Unit = s.request(elements)
})
}
override def onNext(elem: Row): Unit = {
super.onNext(elem)
probe.registerOnNext(elem)
}
override def onError(t: Throwable): Unit = {
super.onError(t)
probe.registerOnError(t)
}
override def onComplete(): Unit = {
super.onComplete()
probe.registerOnComplete()
}
}
}
def createElement(element: Int): Row = dummyRow
private def dummyRow: Row = new Row {
def localDateTime(name: String): LocalDateTime = ???
def localDateTime(idx: Int): LocalDateTime = ???
def col[A: ClassTag](idx: Int): A = ???
def col[A: ClassTag](name: String): A = ???
def shortOpt(name: String): Option[Short] = ???
def shortOpt(idx: Int): Option[Short] = ???
def bool(name: String): Boolean = ???
def bool(idx: Int): Boolean = ???
def decimal(name: String): DecimalNumber = ???
def decimal(idx: Int): DecimalNumber = ???
def float(name: String): Float = ???
def float(idx: Int): Float = ???
def uuid(name: String): UUID = ???
def uuid(idx: Int): UUID = ???
def long(name: String): Long = ???
def long(idx: Int): Long = ???
def instant(name: String): Instant = ???
def instant(idx: Int): Instant = ???
def doubleOpt(name: String): Option[Double] = ???
def doubleOpt(idx: Int): Option[Double] = ???
def localDateOpt(name: String): Option[LocalDate] = ???
def localDateOpt(idx: Int): Option[LocalDate] = ???
def bigDecimalOpt(name: String): Option[BigDecimal] = ???
def bigDecimalOpt(idx: Int): Option[BigDecimal] = ???
def strOpt(name: String): Option[String] = ???
def strOpt(idx: Int): Option[String] = ???
def instantOpt(name: String): Option[Instant] = ???
def instantOpt(idx: Int): Option[Instant] = ???
def localDate(name: String): LocalDate = ???
def localDate(idx: Int): LocalDate = ???
def bigDecimal(name: String): BigDecimal = ???
def bigDecimal(idx: Int): BigDecimal = ???
def longOpt(name: String): Option[Long] = ???
def longOpt(idx: Int): Option[Long] = ???
def double(name: String): Double = ???
def double(idx: Int): Double = ???
def colOpt[A: ClassTag](idx: Int): Option[A] = ???
def colOpt[A: ClassTag](name: String): Option[A] = ???
def localTimeOpt(name: String): Option[LocalTime] = ???
def localTimeOpt(idx: Int): Option[LocalTime] = ???
def uuidOpt(name: String): Option[UUID] = ???
def uuidOpt(idx: Int): Option[UUID] = ???
def int(name: String): Int = ???
def int(idx: Int): Int = ???
def decimalOpt(name: String): Option[DecimalNumber] = ???
def decimalOpt(idx: Int): Option[DecimalNumber] = ???
def str(name: String): String = ???
def str(idx: Int): String = ???
def localTime(name: String): LocalTime = ???
def localTime(idx: Int): LocalTime = ???
def localDateTimeOpt(name: String): Option[LocalDateTime] = ???
def localDateTimeOpt(idx: Int): Option[LocalDateTime] = ???
def charOpt(name: String): Option[Char] = ???
def charOpt(idx: Int): Option[Char] = ???
def boolOpt(name: String): Option[Boolean] = ???
def boolOpt(idx: Int): Option[Boolean] = ???
def intOpt(name: String): Option[Int] = ???
def intOpt(idx: Int): Option[Int] = ???
def bytesOpt(name: String): Option[Array[Byte]] = ???
def bytesOpt(idx: Int): Option[Array[Byte]] = ???
def bytes(name: String): Array[Byte] = ???
def bytes(idx: Int): Array[Byte] = ???
def char(name: String): Char = ???
def char(idx: Int): Char = ???
def floatOpt(name: String): Option[Float] = ???
def floatOpt(idx: Int): Option[Float] = ???
def short(name: String): Short = ???
def short(idx: Int): Short = ???
def instant(name: String, zoneId: ZoneId): Instant = ???
def instantOpt(name: String, zoneId: ZoneId): Option[Instant] = ???
def instant(idx: Int, zoneId: ZoneId): Instant = ???
def instantOpt(idx: Int, zoneId: ZoneId): Option[Instant] = ???
def zonedDateTime(name: String): ZonedDateTime = ???
def zonedDateTimeOpt(name: String): Option[ZonedDateTime] = ???
def zonedDateTime(idx: Int): ZonedDateTime = ???
def zonedDateTimeOpt(idx: Int): Option[ZonedDateTime] = ???
}
}
| rdbc-io/rdbc | rdbc-implbase/src/test/scala/io/rdbc/implbase/HeadSubscriberVerification.scala | Scala | apache-2.0 | 5,778 |
// Generated by <a href="http://scalaxb.org/">scalaxb</a>.
package eveapi.xml.account.char.ContractItems
case class Eveapi(currentTime: String,
result: eveapi.xml.account.char.ContractItems.Result,
cachedUntil: String,
attributes: Map[String, scalaxb.DataRecord[Any]] = Map()) {
lazy val version = attributes("@version").as[BigInt]
}
case class Result(rowset: eveapi.xml.account.char.ContractItems.Rowset)
case class Rowset(row: Seq[eveapi.xml.account.char.ContractItems.Row] = Nil,
attributes: Map[String, scalaxb.DataRecord[Any]] = Map()) {
lazy val columns = attributes("@columns").as[String]
lazy val key = attributes("@key").as[String]
lazy val name = attributes("@name").as[String]
}
case class Row(attributes: Map[String, scalaxb.DataRecord[Any]] = Map()) {
lazy val included = attributes("@included").as[BigInt]
lazy val quantity = attributes("@quantity").as[BigInt]
lazy val recordID = attributes("@recordID").as[BigInt]
lazy val singleton = attributes("@singleton").as[BigInt]
lazy val typeID = attributes("@typeID").as[BigInt]
}
| scala-eveapi/eveapi | xml/src/main/scala/eveapi/xml/char/ContractItems/ContractItems.scala | Scala | mit | 1,134 |
package com.bryanjswift.web.resources
import com.sun.jersey.spi.resource.Singleton
import javax.ws.rs.{GET, Produces, Path, PathParam}
import javax.ws.rs.core.MediaType.{APPLICATION_XML, TEXT_PLAIN, TEXT_XML, TEXT_HTML}
import velocity.VelocityView
@Singleton
@Path("/index")
class IndexResource {
@GET
@Produces(Array(TEXT_HTML))
def message: String = {
val xml = <h1>Hello Bryan, From Jersey</h1>
xml.toString
}
@GET
@Path("/velocity")
@Produces(Array(TEXT_HTML))
def velocity: String = {
val context = Map[String, Any]()
val view = new VelocityView("templates/index.vm")
view.merge(context)
}
}
| bryanjswift/bryanjswift.com | src/main/scala/com/bryanjswift/web/resources/IndexResource.scala | Scala | mit | 638 |
package com.nulabinc.backlog.migration.common.service
import javax.inject.Inject
import com.nulabinc.backlog.migration.common.client.BacklogAPIClient
import com.nulabinc.backlog.migration.common.domain.{
BacklogCustomStatus,
BacklogProjectKey,
BacklogStatus,
BacklogStatuses,
Id
}
import com.nulabinc.backlog.migration.common.utils.Logging
import com.nulabinc.backlog4j.BacklogAPIException
import com.nulabinc.backlog4j.Project.CustomStatusColor
import com.nulabinc.backlog4j.api.option.{AddStatusParams, UpdateOrderOfStatusParams}
import scala.jdk.CollectionConverters._
import scala.util.Try
/**
* @author
* uchida
*/
class StatusServiceImpl @Inject() (
backlog: BacklogAPIClient,
projectKey: BacklogProjectKey
) extends StatusService
with Logging {
override def allStatuses(): BacklogStatuses =
Try {
BacklogStatuses(
backlog.getStatuses(projectKey).asScala.toSeq.map(BacklogStatus.from)
)
}.recover {
case ex: BacklogAPIException if ex.getMessage.contains("No such project") =>
defaultStatuses()
case ex =>
throw ex
}.getOrElse(defaultStatuses())
override def add(status: BacklogCustomStatus): BacklogCustomStatus = {
val added = backlog.addStatus(
new AddStatusParams(
projectKey.value,
status.name.trimmed,
CustomStatusColor.strValueOf(status.color)
)
)
BacklogCustomStatus.from(added)
}
override def updateOrder(ids: Seq[Id[BacklogStatus]]): Unit =
Try {
backlog.updateOrderOfStatus(
new UpdateOrderOfStatusParams(projectKey.value, ids.map(_.value).asJava)
)
}.recover {
case ex: BacklogAPIException if ex.getMessage.contains("Undefined resource") =>
logger.warn("Your backlog doesn't support the updateOrder API", ex)
case ex =>
logger.error(s"UpdateOrder API error. Message: ${ex.getMessage}", ex)
throw ex
}.getOrElse(())
override def remove(id: Id[BacklogStatus]): Unit =
backlog.removeStatus(projectKey.value, id.value, 1) // Any status id is OK
private def defaultStatuses(): BacklogStatuses =
BacklogStatuses(
backlog.getStatuses.asScala.toSeq.map(BacklogStatus.from)
)
}
| nulab/backlog-migration-common | core/src/main/scala/com/nulabinc/backlog/migration/common/service/StatusServiceImpl.scala | Scala | mit | 2,230 |
package gitbucket.core.service
import gitbucket.core.model.{Session => _, _}
import gitbucket.core.plugin.ReceiveHook
import gitbucket.core.model.Profile._
import gitbucket.core.model.Profile.profile.blockingApi._
import org.eclipse.jgit.transport.{ReceiveCommand, ReceivePack}
trait ProtectedBranchService {
import ProtectedBranchService._
private def getProtectedBranchInfoOpt(owner: String, repository: String, branch: String)(implicit session: Session): Option[ProtectedBranchInfo] =
ProtectedBranches
.joinLeft(ProtectedBranchContexts)
.on { case (pb, c) => pb.byBranch(c.userName, c.repositoryName, c.branch) }
.map { case (pb, c) => pb -> c.map(_.context) }
.filter(_._1.byPrimaryKey(owner, repository, branch))
.list
.groupBy(_._1)
.headOption
.map { p => p._1 -> p._2.flatMap(_._2) }
.map { case (t1, contexts) =>
new ProtectedBranchInfo(t1.userName, t1.repositoryName, true, contexts, t1.statusCheckAdmin)
}
def getProtectedBranchInfo(owner: String, repository: String, branch: String)(implicit session: Session): ProtectedBranchInfo =
getProtectedBranchInfoOpt(owner, repository, branch).getOrElse(ProtectedBranchInfo.disabled(owner, repository))
def getProtectedBranchList(owner: String, repository: String)(implicit session: Session): List[String] =
ProtectedBranches.filter(_.byRepository(owner, repository)).map(_.branch).list
def enableBranchProtection(owner: String, repository: String, branch:String, includeAdministrators: Boolean, contexts: Seq[String])
(implicit session: Session): Unit = {
disableBranchProtection(owner, repository, branch)
ProtectedBranches.insert(new ProtectedBranch(owner, repository, branch, includeAdministrators && contexts.nonEmpty))
contexts.map{ context =>
ProtectedBranchContexts.insert(new ProtectedBranchContext(owner, repository, branch, context))
}
}
def disableBranchProtection(owner: String, repository: String, branch:String)(implicit session: Session): Unit =
ProtectedBranches.filter(_.byPrimaryKey(owner, repository, branch)).delete
}
object ProtectedBranchService {
class ProtectedBranchReceiveHook extends ReceiveHook with ProtectedBranchService with RepositoryService with AccountService {
override def preReceive(owner: String, repository: String, receivePack: ReceivePack, command: ReceiveCommand, pusher: String)
(implicit session: Session): Option[String] = {
val branch = command.getRefName.stripPrefix("refs/heads/")
if(branch != command.getRefName){
val repositoryInfo = getRepository(owner, repository)
if(command.getType == ReceiveCommand.Type.DELETE && repositoryInfo.exists(_.repository.defaultBranch == branch)){
Some(s"refusing to delete the branch: ${command.getRefName}.")
} else {
getProtectedBranchInfo(owner, repository, branch).getStopReason(receivePack.isAllowNonFastForwards, command, pusher)
}
} else {
None
}
}
}
case class ProtectedBranchInfo(
owner: String,
repository: String,
enabled: Boolean,
/**
* Require status checks to pass before merging
* Choose which status checks must pass before branches can be merged into test.
* When enabled, commits must first be pushed to another branch,
* then merged or pushed directly to test after status checks have passed.
*/
contexts: Seq[String],
/**
* Include administrators
* Enforce required status checks for repository administrators.
*/
includeAdministrators: Boolean) extends AccountService with RepositoryService with CommitStatusService {
def isAdministrator(pusher: String)(implicit session: Session): Boolean =
pusher == owner || getGroupMembers(owner).exists(gm => gm.userName == pusher && gm.isManager) ||
getCollaborators(owner, repository).exists { case (collaborator, isGroup) =>
if(collaborator.role == Role.ADMIN.name){
if(isGroup){
getGroupMembers(collaborator.collaboratorName).exists(gm => gm.userName == pusher)
} else {
collaborator.collaboratorName == pusher
}
} else false
}
/**
* Can't be force pushed
* Can't be deleted
* Can't have changes merged into them until required status checks pass
*/
def getStopReason(isAllowNonFastForwards: Boolean, command: ReceiveCommand, pusher: String)(implicit session: Session): Option[String] = {
if(enabled){
command.getType() match {
case ReceiveCommand.Type.UPDATE_NONFASTFORWARD if isAllowNonFastForwards =>
Some("Cannot force-push to a protected branch")
case ReceiveCommand.Type.UPDATE|ReceiveCommand.Type.UPDATE_NONFASTFORWARD if needStatusCheck(pusher) =>
unSuccessedContexts(command.getNewId.name) match {
case s if s.size == 1 => Some(s"""Required status check "${s.toSeq(0)}" is expected""")
case s if s.size >= 1 => Some(s"${s.size} of ${contexts.size} required status checks are expected")
case _ => None
}
case ReceiveCommand.Type.DELETE =>
Some("Cannot delete a protected branch")
case _ => None
}
} else {
None
}
}
def unSuccessedContexts(sha1: String)(implicit session: Session): Set[String] = if(contexts.isEmpty){
Set.empty
} else {
contexts.toSet -- getCommitStatues(owner, repository, sha1).filter(_.state == CommitState.SUCCESS).map(_.context).toSet
}
def needStatusCheck(pusher: String)(implicit session: Session): Boolean = pusher match {
case _ if !enabled => false
case _ if contexts.isEmpty => false
case _ if includeAdministrators => true
case p if isAdministrator(p) => false
case _ => true
}
}
object ProtectedBranchInfo{
def disabled(owner: String, repository: String): ProtectedBranchInfo = ProtectedBranchInfo(owner, repository, false, Nil, false)
}
}
| gencer/gitbucket | src/main/scala/gitbucket/core/service/ProtectedBranchService.scala | Scala | apache-2.0 | 6,168 |
package ru.pavkin.todoist.api.core
import org.scalacheck.Gen.alphaStr
import org.scalacheck.Gen.posNum
import org.scalatest.{Matchers, FunSuite}
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import shapeless.test.illTyped
import shapeless.{::, HNil}
class ToRawRequestSpec extends FunSuite with Matchers with GeneratorDrivenPropertyChecks {
implicit val i1 = ToRawRequest.command[Int]((i: Int) => List(i.toString))
implicit val i2 = ToRawRequest.command[String]((s: String) => List(s))
test("ToRawRequest") {
ToRawRequest[Int]
ToRawRequest[String]
ToRawRequest[Int :: String :: HNil].rawRequest(2 :: "abc" :: HNil) shouldBe
Map("commands" -> List("2", "abc"))
illTyped("""ToRawRequest[Boolean]""")
illTyped("""ToRawRequest[Boolean :: Int :: HNil]""")
illTyped("""ToRawRequest[Int :: Boolean :: HNil]""")
}
test("ToRawRequest combinates") {
forAll(posNum[Int], alphaStr) { (a: Int, b: String) =>
ToRawRequest[Int :: String :: HNil].rawRequest(a :: b :: HNil) shouldEqual
Map(ToRawRequest.COMMANDS -> List(a.toString, b))
}
}
}
| vpavkin/scalist | tests/src/test/scala/ru/pavkin/todoist/api/core/ToRawRequestSpec.scala | Scala | mit | 1,108 |
import org.junit.runner._
import org.specs2.mutable._
import org.specs2.runner._
import play.api.test.Helpers._
import play.api.test._
/**
* Add your spec here.
* You can mock out a whole application including requests, plugins etc.
* For more information, consult the wiki.
*/
@RunWith(classOf[JUnitRunner])
class ApplicationSpec extends Specification {
"Application" should {
"send 404 on a bad request" in new WithApplication {
route(FakeRequest(GET, "/boum")) must beSome.which(status(_) == NOT_FOUND)
}
"render the index page" in new WithApplication {
val home = route(FakeRequest(GET, "/")).get
status(home) must equalTo(OK)
contentType(home) must beSome.which(_ == "text/html")
}
}
}
| tarugo07/play-markdown | test/ApplicationSpec.scala | Scala | mit | 744 |
def main(args: Array[String]) {
val int = 100
def bug(f: () => Double) = {
println(f())
}
bug(/*start*/() => int/*end*/) // <- plugin confused by conversion from int to double
}
//() => Double | ilinum/intellij-scala | testdata/typeInference/bugs2/SCL2014.scala | Scala | apache-2.0 | 204 |
package japgolly.scalajs.react.util
import java.time.Duration
import scala.scalajs.js
import scala.scalajs.js.|
import scala.util.{Failure, Success, Try}
object JsUtil {
// TODO: Add to microlibs and maybe ReactTestUtils
@inline def setStackTraceLimit(n: Int): Unit =
js.constructorOf[js.Error].stackTraceLimit = n
object JsSymbol {
def unapply(a: Any): Option[js.Symbol] =
js.typeOf(a.asInstanceOf[js.Any]) match {
case "symbol" => Some(a.asInstanceOf[js.Symbol])
case _ => None
}
}
def safeToString(a: Any): String =
try
a match {
case JsSymbol(s) => symbolToString(s)
case _ => a.toString
}
catch {
case _: Throwable => "?"
}
def symbolToString(s: js.Symbol): String =
try
s.asInstanceOf[js.Dynamic].applyDynamic("toString")().asInstanceOf[String]
catch {
case _: Throwable =>
js.Symbol.keyFor(s).toOption match {
case Some(k) => s"Symbol($k)"
case None => "Symbol(?)"
}
}
def inspectValue(a: Any): String =
a match {
case s: String => js.JSON.stringify(s)
case o: js.Object => inspectObject(o)
case () | null => "" + a
case JsSymbol(s) => symbolToString(s)
case _ => s"${safeToString(a)}: ${js.typeOf(a.asInstanceOf[js.Any])}"
}
def objectIterator(o: js.Object): Iterator[(String, js.Any)] = {
val d = o.asInstanceOf[js.Dynamic]
js.Object.properties(o).iterator.map { n =>
val v = (try d.selectDynamic(n) catch { case t: Throwable => safeToString(t) }).asInstanceOf[js.Any]
n -> v
}
}
def inspectObject(o: js.Object): String = {
val s = objectIterator(o).toVector.sortBy(_._1)
if (s.isEmpty)
"Value has no object properties: " + o
else {
val ss = s.map { case (k, v) =>
(k, js.typeOf(v), safeToString(v).split('\\n')(0))
}
val sz = s.size
val nlen = sz.toString.length
val klen = ss.map(_._1.length).max
val tlen = ss.map(_._2.length).max
val fmt = s" [%${nlen}d/$sz] %-${klen}s : %-${tlen}s = %s"
var i = 0
ss.map { case (k, t, v) =>
i = i + 1
fmt.format(i, k, t, v)
}.mkString(s"$o\\n", "\\n", "")
}
}
def jsArray[A](as: A*): js.Array[A] = {
val array = new js.Array[A]
array.push(as: _*)
array
}
def jsArrayFromTraversable[A](as: IterableOnce[A]): js.Array[A] = {
val array = new js.Array[A]
as.iterator.foreach(array push _)
array
}
@inline def notNull[A](a: A | Null): A =
a.asInstanceOf[A]
def jsNullToOption[A](an: A | Null): Option[A] =
Option(an.asInstanceOf[A])
def optionToJsNull[A](oa: Option[A]): A | Null =
oa match {
case Some(a) => a
case None => null
}
def durationFromDOMHighResTimeStamp(ms: Double): Duration =
Duration.ofNanos((ms * 1000000).toLong)
def newPromise[A](): (js.Promise[A], Try[A] => js.Function0[Unit]) = {
var complete: Try[A] => js.Function0[Unit] = null
val p = new js.Promise[A]((respond: js.Function1[A | js.Thenable[A], _], reject: js.Function1[Any, _]) => {
def fail(t: Throwable) =
reject(t match {
case js.JavaScriptException(e) => e
case e => e
})
complete = {
case Success(a) => () => respond(a)
case Failure(e) => () => fail(e)
}
})
(p, complete)
}
def runPromiseAsync[A](pa: => js.Thenable[A])(complete: Try[A] => js.Function0[Unit]): Unit = {
def next(ta: Try[A]): js.Thenable[Unit] = {
val (p, pc) = newPromise[Unit]()
pc(Try(complete(ta)()))()
p
}
type R = Unit | js.Thenable[Unit]
val ok: A => R = a => next(Success(a))
val ko: Any => R = e => next(Failure(e match {
case t: Throwable => t
case _ => js.JavaScriptException(e)
}))
pa.`then`[Unit](ok, ko: js.Function1[Any, R])
}
def asyncToPromise[A](async: (Try[A] => js.Function0[Unit]) => js.Function0[Unit]): () => js.Promise[A] =
() => {
val (p, pc) = newPromise[A]()
async(pc)()
p
}
}
| japgolly/scalajs-react | util/src/main/scala/japgolly/scalajs/react/util/JsUtil.scala | Scala | apache-2.0 | 4,173 |
package com.github.diegopacheco.sandbox.scala.akka.timeout
import akka.actor.Actor
import akka.actor.ReceiveTimeout
import scala.concurrent.duration._
import akka.actor.ActorSystem
import akka.actor.Props
class ActorReceiveTimeOut extends Actor {
// sets initial delay
//context.setReceiveTimeout(1 milliseconds)
def receive = {
case "works" => println("OK")
case "Hello" => context.setReceiveTimeout(1000 milliseconds);
case ReceiveTimeout => throw new RuntimeException("Receive timed out")
}
}
object ActorReceiveTimeOutMainApp extends App {
val system = ActorSystem("ActorReceiveTimeOutActorSystem")
val actor = system.actorOf(Props[ActorReceiveTimeOut],"actor01")
try{
actor ! "works"
actor ! "Hello"
}catch{
case e:RuntimeException => println(e.getMessage())
}
}
| diegopacheco/scala-playground | scala_11_akka_23_full_playground/src/main/scala/com/github/diegopacheco/sandbox/scala/akka/timeout/ActorReceiveTimeOut.scala | Scala | unlicense | 895 |
/*
* Copyright 2014 Treode, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.treode.disk
import java.nio.file.Path
import com.treode.async.Async
/** The recovery builder. */
trait DiskRecovery {
/** Register a method to replay a log entry.
*
* A replayer must be registered for every type of log entry that may be in the write log. The
* disk system will refuse to recover if it cannot identify all log entries.
*/
def replay [R] (desc: RecordDescriptor [R]) (f: R => Any)
/** Reattach one or more disk drives.
*
* You need provide only some of the paths previously attached to this disk system. The
* recovery mechanim will find the complete list of paths in the superblock.
*
* Call `reattach` after registering all replayers. This method closes the recovery builder.
*/
def reattach (items: Path*): Async [DiskLaunch]
/** Initialize the system without any disk drives.
*
* Since there are no logs to replay, there's no need to attach replayers. This method closes
* the recovery builder.
*/
def init (sysid: SystemId): Async [DiskLaunch]
}
| Treode/store | disk/src/com/treode/disk/DiskRecovery.scala | Scala | apache-2.0 | 1,649 |
package breeze.collection.mutable
/*
Copyright 2010 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import scala.collection.mutable.Seq
import scala.reflect.ClassTag
/**
* A TriangularArray is a jagged 2-d array where for every row r,
* we have an array of size dim - r.
*
* These are useful for parse charts.
*
* @author dlwh
* @param dimension: The size of the array
*/
@SerialVersionUID(1L)
final class TriangularArray[T:ClassTag](val dimension: Int) extends Serializable { outer =>
import TriangularArray._
private def numElems = dimension * (dimension+1) / 2
val data = new Array[T](numElems)
def update(r: Int, c: Int, t: T) { data(index(r,c)) = t }
@inline
def apply(r: Int, c: Int) = data(index(r,c))
@inline
def apply(r: Int) = slice(r)
private def slice(r: Int):Seq[T] = new Seq[T] {
def apply(c: Int) = outer.apply(r,c)
def update(c: Int, t: T) = outer.update(r,c,t)
def length = (dimension - r)
def iterator = Iterator.range(r,dimension).map(apply _ )
}
def iterator = Iterator.range(0,numElems) map slice
def foreach(f: T=>Unit) { data foreach f }
def map[U:ClassTag](f: T=>U) = tabulate(dimension)((i,j) => f(apply(i,j)))
override def toString = {
val buffer = new StringBuilder()
for ( r <- 0 until dimension ) {
val columns = for(c <- 0 until dimension) yield {
if(c <= r) "----" else Option(apply(r,c)).map(_.toString).getOrElse("null")
}
buffer ++= columns.mkString("[",", ","]\\n")
}
buffer.toString()
}
}
object TriangularArray {
def tabulate[T:ClassTag](dim: Int)(fill: (Int,Int)=>T) = {
val array = new TriangularArray[T](dim)
for( c <- 0 until dim; r <- 0 to c) {
array.data(index(r,c)) = fill(r,c)
}
array
}
def fill[T:ClassTag](dim: Int)(fill: =>T) = {
val array = new TriangularArray[T](dim)
for( c <- 0 until dim; r <- 0 to c) {
array.data(index(r,c)) = fill
}
array
}
@inline
def index(r: Int, c: Int) = {
if(r > c) require(r <= c, "row must be less than column!")
(c * (c+1) /2 + r)
}
def raw[T:ClassTag](dim: Int, fill: =>T) = {
val numElems = arraySize(dim)
val data = Array.fill[T](numElems)(fill)
data
}
def arraySize(dim: Int): Int = {
dim * (dim + 1) / 2
}
}
| ktakagaki/breeze | src/main/scala/breeze/collection/mutable/TriangularArray.scala | Scala | apache-2.0 | 2,811 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import scala.collection.mutable
import scala.util.Random
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.internal.Logging
/**
* ::DeveloperApi::
* BlockReplicationPrioritization provides logic for prioritizing a sequence of peers for
* replicating blocks. BlockManager will replicate to each peer returned in order until the
* desired replication order is reached. If a replication fails, prioritize() will be called
* again to get a fresh prioritization.
*/
@DeveloperApi
trait BlockReplicationPolicy {
/**
* Method to prioritize a bunch of candidate peers of a block
*
* @param blockManagerId Id of the current BlockManager for self identification
* @param peers A list of peers of a BlockManager
* @param peersReplicatedTo Set of peers already replicated to
* @param blockId BlockId of the block being replicated. This can be used as a source of
* randomness if needed.
* @param numReplicas Number of peers we need to replicate to
* @return A prioritized list of peers. Lower the index of a peer, higher its priority.
* This returns a list of size at most `numPeersToReplicateTo`.
*/
def prioritize(
blockManagerId: BlockManagerId,
peers: Seq[BlockManagerId],
peersReplicatedTo: mutable.HashSet[BlockManagerId],
blockId: BlockId,
numReplicas: Int): List[BlockManagerId]
}
object BlockReplicationUtils {
/**
* Uses sampling algorithm by Robert Floyd. Finds a random sample in O(n) while
* minimizing space usage. Please see <a href="https://math.stackexchange.com/q/178690">
* here</a>.
*
* @param n total number of indices
* @param m number of samples needed
* @param r random number generator
* @return list of m random unique indices
*/
private def getSampleIds(n: Int, m: Int, r: Random): List[Int] = {
val indices = (n - m + 1 to n).foldLeft(mutable.LinkedHashSet.empty[Int]) {case (set, i) =>
val t = r.nextInt(i) + 1
if (set.contains(t)) set + i else set + t
}
indices.map(_ - 1).toList
}
/**
* Get a random sample of size m from the elems
*
* @param elems
* @param m number of samples needed
* @param r random number generator
* @tparam T
* @return a random list of size m. If there are fewer than m elements in elems, we just
* randomly shuffle elems
*/
def getRandomSample[T](elems: Seq[T], m: Int, r: Random): List[T] = {
if (elems.size > m) {
getSampleIds(elems.size, m, r).map(elems(_))
} else {
r.shuffle(elems).toList
}
}
}
@DeveloperApi
class RandomBlockReplicationPolicy
extends BlockReplicationPolicy
with Logging {
/**
* Method to prioritize a bunch of candidate peers of a block. This is a basic implementation,
* that just makes sure we put blocks on different hosts, if possible
*
* @param blockManagerId Id of the current BlockManager for self identification
* @param peers A list of peers of a BlockManager
* @param peersReplicatedTo Set of peers already replicated to
* @param blockId BlockId of the block being replicated. This can be used as a source of
* randomness if needed.
* @param numReplicas Number of peers we need to replicate to
* @return A prioritized list of peers. Lower the index of a peer, higher its priority
*/
override def prioritize(
blockManagerId: BlockManagerId,
peers: Seq[BlockManagerId],
peersReplicatedTo: mutable.HashSet[BlockManagerId],
blockId: BlockId,
numReplicas: Int): List[BlockManagerId] = {
val random = new Random(blockId.hashCode)
logDebug(s"Input peers : ${peers.mkString(", ")}")
val prioritizedPeers = if (peers.size > numReplicas) {
BlockReplicationUtils.getRandomSample(peers, numReplicas, random)
} else {
if (peers.size < numReplicas) {
logWarning(s"Expecting ${numReplicas} replicas with only ${peers.size} peer/s.")
}
random.shuffle(peers).toList
}
logDebug(s"Prioritized peers : ${prioritizedPeers.mkString(", ")}")
prioritizedPeers
}
}
@DeveloperApi
class BasicBlockReplicationPolicy
extends BlockReplicationPolicy
with Logging {
/**
* Method to prioritize a bunch of candidate peers of a block manager. This implementation
* replicates the behavior of block replication in HDFS. For a given number of replicas needed,
* we choose a peer within the rack, one outside and remaining blockmanagers are chosen at
* random, in that order till we meet the number of replicas needed.
* This works best with a total replication factor of 3, like HDFS.
*
* @param blockManagerId Id of the current BlockManager for self identification
* @param peers A list of peers of a BlockManager
* @param peersReplicatedTo Set of peers already replicated to
* @param blockId BlockId of the block being replicated. This can be used as a source of
* randomness if needed.
* @param numReplicas Number of peers we need to replicate to
* @return A prioritized list of peers. Lower the index of a peer, higher its priority
*/
override def prioritize(
blockManagerId: BlockManagerId,
peers: Seq[BlockManagerId],
peersReplicatedTo: mutable.HashSet[BlockManagerId],
blockId: BlockId,
numReplicas: Int): List[BlockManagerId] = {
logDebug(s"Input peers : $peers")
logDebug(s"BlockManagerId : $blockManagerId")
val random = new Random(blockId.hashCode)
// if block doesn't have topology info, we can't do much, so we randomly shuffle
// if there is, we see what's needed from peersReplicatedTo and based on numReplicas,
// we choose whats needed
if (blockManagerId.topologyInfo.isEmpty || numReplicas == 0) {
// no topology info for the block. The best we can do is randomly choose peers
BlockReplicationUtils.getRandomSample(peers, numReplicas, random)
} else {
// we have topology information, we see what is left to be done from peersReplicatedTo
val doneWithinRack = peersReplicatedTo.exists(_.topologyInfo == blockManagerId.topologyInfo)
val doneOutsideRack = peersReplicatedTo.exists { p =>
p.topologyInfo.isDefined && p.topologyInfo != blockManagerId.topologyInfo
}
if (doneOutsideRack && doneWithinRack) {
// we are done, we just return a random sample
BlockReplicationUtils.getRandomSample(peers, numReplicas, random)
} else {
// we separate peers within and outside rack
val (inRackPeers, outOfRackPeers) = peers
.filter(_.host != blockManagerId.host)
.partition(_.topologyInfo == blockManagerId.topologyInfo)
val peerWithinRack = if (doneWithinRack) {
// we are done with in-rack replication, so don't need anymore peers
Seq.empty
} else {
if (inRackPeers.isEmpty) {
Seq.empty
} else {
Seq(inRackPeers(random.nextInt(inRackPeers.size)))
}
}
val peerOutsideRack = if (doneOutsideRack || numReplicas - peerWithinRack.size <= 0) {
Seq.empty
} else {
if (outOfRackPeers.isEmpty) {
Seq.empty
} else {
Seq(outOfRackPeers(random.nextInt(outOfRackPeers.size)))
}
}
val priorityPeers = peerWithinRack ++ peerOutsideRack
val numRemainingPeers = numReplicas - priorityPeers.size
val remainingPeers = if (numRemainingPeers > 0) {
val rPeers = peers.filter(p => !priorityPeers.contains(p))
BlockReplicationUtils.getRandomSample(rPeers, numRemainingPeers, random)
} else {
Seq.empty
}
(priorityPeers ++ remainingPeers).toList
}
}
}
}
| lxsmnv/spark | core/src/main/scala/org/apache/spark/storage/BlockReplicationPolicy.scala | Scala | apache-2.0 | 8,672 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.orc
import java.io.File
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.TestingUDT.{IntervalData, IntervalUDT}
import org.apache.spark.sql.execution.datasources.orc.OrcSuite
import org.apache.spark.sql.hive.HiveUtils
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
class HiveOrcSourceSuite extends OrcSuite with TestHiveSingleton {
override val orcImp: String = "hive"
override def beforeAll(): Unit = {
super.beforeAll()
sql(
s"""CREATE EXTERNAL TABLE normal_orc(
| intField INT,
| stringField STRING
|)
|STORED AS ORC
|LOCATION '${orcTableAsDir.toURI}'
""".stripMargin)
sql(
s"""INSERT INTO TABLE normal_orc
|SELECT intField, stringField FROM orc_temp_table
""".stripMargin)
spark.sql(
s"""CREATE TEMPORARY VIEW normal_orc_source
|USING org.apache.spark.sql.hive.orc
|OPTIONS (
| PATH '${new File(orcTableAsDir.getAbsolutePath).toURI}'
|)
""".stripMargin)
spark.sql(
s"""CREATE TEMPORARY VIEW normal_orc_as_source
|USING org.apache.spark.sql.hive.orc
|OPTIONS (
| PATH '${new File(orcTableAsDir.getAbsolutePath).toURI}'
|)
""".stripMargin)
}
test("SPARK-19459/SPARK-18220: read char/varchar column written by Hive") {
val location = Utils.createTempDir()
val uri = location.toURI
try {
hiveClient.runSqlHive("USE default")
hiveClient.runSqlHive(
"""
|CREATE EXTERNAL TABLE hive_orc(
| a STRING,
| b CHAR(10),
| c VARCHAR(10),
| d ARRAY<CHAR(3)>)
|STORED AS orc""".stripMargin)
// Hive throws an exception if I assign the location in the create table statement.
hiveClient.runSqlHive(
s"ALTER TABLE hive_orc SET LOCATION '$uri'")
hiveClient.runSqlHive(
"""
|INSERT INTO TABLE hive_orc
|SELECT 'a', 'b', 'c', ARRAY(CAST('d' AS CHAR(3)))
|FROM (SELECT 1) t""".stripMargin)
// We create a different table in Spark using the same schema which points to
// the same location.
spark.sql(
s"""
|CREATE EXTERNAL TABLE spark_orc(
| a STRING,
| b CHAR(10),
| c VARCHAR(10),
| d ARRAY<CHAR(3)>)
|STORED AS orc
|LOCATION '$uri'""".stripMargin)
val result = Row("a", "b ", "c", Seq("d "))
checkAnswer(spark.table("hive_orc"), result)
checkAnswer(spark.table("spark_orc"), result)
} finally {
hiveClient.runSqlHive("DROP TABLE IF EXISTS hive_orc")
hiveClient.runSqlHive("DROP TABLE IF EXISTS spark_orc")
Utils.deleteRecursively(location)
}
}
test("SPARK-24204 error handling for unsupported data types") {
withTempDir { dir =>
val orcDir = new File(dir, "orc").getCanonicalPath
// write path
var msg = intercept[AnalysisException] {
sql("select interval 1 days").write.mode("overwrite").orc(orcDir)
}.getMessage
assert(msg.contains("Cannot save interval data type into external storage."))
msg = intercept[AnalysisException] {
sql("select null").write.mode("overwrite").orc(orcDir)
}.getMessage
assert(msg.contains("ORC data source does not support null data type."))
msg = intercept[AnalysisException] {
spark.udf.register("testType", () => new IntervalData())
sql("select testType()").write.mode("overwrite").orc(orcDir)
}.getMessage
assert(msg.contains("ORC data source does not support interval data type."))
// read path
msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", CalendarIntervalType, true) :: Nil)
spark.range(1).write.mode("overwrite").orc(orcDir)
spark.read.schema(schema).orc(orcDir).collect()
}.getMessage
assert(msg.contains("ORC data source does not support interval data type."))
msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", new IntervalUDT(), true) :: Nil)
spark.range(1).write.mode("overwrite").orc(orcDir)
spark.read.schema(schema).orc(orcDir).collect()
}.getMessage
assert(msg.contains("ORC data source does not support interval data type."))
}
}
test("Check BloomFilter creation") {
Seq(true, false).foreach { convertMetastore =>
withSQLConf(HiveUtils.CONVERT_METASTORE_ORC.key -> s"$convertMetastore") {
if (HiveUtils.isHive23) {
testBloomFilterCreation(org.apache.orc.OrcProto.Stream.Kind.BLOOM_FILTER_UTF8)
} else {
// Before ORC-101
testBloomFilterCreation(org.apache.orc.OrcProto.Stream.Kind.BLOOM_FILTER)
}
}
}
}
test("Enforce direct encoding column-wise selectively") {
Seq(true, false).foreach { convertMetastore =>
withSQLConf(HiveUtils.CONVERT_METASTORE_ORC.key -> s"$convertMetastore") {
testSelectiveDictionaryEncoding(isSelective = false, isHive23 = HiveUtils.isHive23)
}
}
}
test("SPARK-11412 read and merge orc schemas in parallel") {
testMergeSchemasInParallel(OrcFileOperator.readOrcSchemasInParallel)
}
test("SPARK-25993 CREATE EXTERNAL TABLE with subdirectories") {
Seq(true, false).foreach { convertMetastore =>
withSQLConf(HiveUtils.CONVERT_METASTORE_ORC.key -> s"$convertMetastore") {
withTempDir { dir =>
withTable("orc_tbl1", "orc_tbl2", "orc_tbl3") {
val orcTblStatement1 =
s"""
|CREATE EXTERNAL TABLE orc_tbl1(
| c1 int,
| c2 int,
| c3 string)
|STORED AS orc
|LOCATION '${s"${dir.getCanonicalPath}/l1/"}'""".stripMargin
sql(orcTblStatement1)
val orcTblInsertL1 =
s"INSERT INTO TABLE orc_tbl1 VALUES (1, 1, 'orc1'), (2, 2, 'orc2')".stripMargin
sql(orcTblInsertL1)
val orcTblStatement2 =
s"""
|CREATE EXTERNAL TABLE orc_tbl2(
| c1 int,
| c2 int,
| c3 string)
|STORED AS orc
|LOCATION '${s"${dir.getCanonicalPath}/l1/l2/"}'""".stripMargin
sql(orcTblStatement2)
val orcTblInsertL2 =
s"INSERT INTO TABLE orc_tbl2 VALUES (3, 3, 'orc3'), (4, 4, 'orc4')".stripMargin
sql(orcTblInsertL2)
val orcTblStatement3 =
s"""
|CREATE EXTERNAL TABLE orc_tbl3(
| c1 int,
| c2 int,
| c3 string)
|STORED AS orc
|LOCATION '${s"${dir.getCanonicalPath}/l1/l2/l3/"}'""".stripMargin
sql(orcTblStatement3)
val orcTblInsertL3 =
s"INSERT INTO TABLE orc_tbl3 VALUES (5, 5, 'orc5'), (6, 6, 'orc6')".stripMargin
sql(orcTblInsertL3)
withTable("tbl1", "tbl2", "tbl3", "tbl4", "tbl5", "tbl6") {
val topDirStatement =
s"""
|CREATE EXTERNAL TABLE tbl1(
| c1 int,
| c2 int,
| c3 string)
|STORED AS orc
|LOCATION '${s"${dir.getCanonicalPath}"}'""".stripMargin
sql(topDirStatement)
val topDirSqlStatement = s"SELECT * FROM tbl1"
if (convertMetastore) {
checkAnswer(sql(topDirSqlStatement), Nil)
} else {
checkAnswer(sql(topDirSqlStatement), (1 to 6).map(i => Row(i, i, s"orc$i")))
}
val l1DirStatement =
s"""
|CREATE EXTERNAL TABLE tbl2(
| c1 int,
| c2 int,
| c3 string)
|STORED AS orc
|LOCATION '${s"${dir.getCanonicalPath}/l1/"}'""".stripMargin
sql(l1DirStatement)
val l1DirSqlStatement = s"SELECT * FROM tbl2"
if (convertMetastore) {
checkAnswer(sql(l1DirSqlStatement), (1 to 2).map(i => Row(i, i, s"orc$i")))
} else {
checkAnswer(sql(l1DirSqlStatement), (1 to 6).map(i => Row(i, i, s"orc$i")))
}
val l2DirStatement =
s"""
|CREATE EXTERNAL TABLE tbl3(
| c1 int,
| c2 int,
| c3 string)
|STORED AS orc
|LOCATION '${s"${dir.getCanonicalPath}/l1/l2/"}'""".stripMargin
sql(l2DirStatement)
val l2DirSqlStatement = s"SELECT * FROM tbl3"
if (convertMetastore) {
checkAnswer(sql(l2DirSqlStatement), (3 to 4).map(i => Row(i, i, s"orc$i")))
} else {
checkAnswer(sql(l2DirSqlStatement), (3 to 6).map(i => Row(i, i, s"orc$i")))
}
val wildcardTopDirStatement =
s"""
|CREATE EXTERNAL TABLE tbl4(
| c1 int,
| c2 int,
| c3 string)
|STORED AS orc
|LOCATION '${new File(s"${dir}/*").toURI}'""".stripMargin
sql(wildcardTopDirStatement)
val wildcardTopDirSqlStatement = s"SELECT * FROM tbl4"
if (convertMetastore) {
checkAnswer(sql(wildcardTopDirSqlStatement), (1 to 2).map(i => Row(i, i, s"orc$i")))
} else {
checkAnswer(sql(wildcardTopDirSqlStatement), Nil)
}
val wildcardL1DirStatement =
s"""
|CREATE EXTERNAL TABLE tbl5(
| c1 int,
| c2 int,
| c3 string)
|STORED AS orc
|LOCATION '${new File(s"${dir}/l1/*").toURI}'""".stripMargin
sql(wildcardL1DirStatement)
val wildcardL1DirSqlStatement = s"SELECT * FROM tbl5"
if (convertMetastore) {
checkAnswer(sql(wildcardL1DirSqlStatement), (1 to 4).map(i => Row(i, i, s"orc$i")))
} else {
checkAnswer(sql(wildcardL1DirSqlStatement), Nil)
}
val wildcardL2Statement =
s"""
|CREATE EXTERNAL TABLE tbl6(
| c1 int,
| c2 int,
| c3 string)
|STORED AS orc
|LOCATION '${new File(s"${dir}/l1/l2/*").toURI}'""".stripMargin
sql(wildcardL2Statement)
val wildcardL2SqlStatement = s"SELECT * FROM tbl6"
if (convertMetastore) {
checkAnswer(sql(wildcardL2SqlStatement), (3 to 6).map(i => Row(i, i, s"orc$i")))
} else {
checkAnswer(sql(wildcardL2SqlStatement), Nil)
}
}
}
}
}
}
}
}
| ptkool/spark | sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/HiveOrcSourceSuite.scala | Scala | apache-2.0 | 12,071 |
object ch12_16 {
???
}
import ch12_16._
/*
from repl you can test typing:
:load src/main/scala/fpinscala/ch12/Exercise16.scala
*/
| rucka/fpinscala | src/main/scala/fpinscala/ch12/Exercise16.scala | Scala | gpl-2.0 | 132 |
import com.github.raduba.gis._
import org.scalatest._
// TODO: add more tests
class WKTParserSpec extends FunSpec with Matchers {
def gparse[A](in: String, parser: WKTParser.Parser[A]): Option[A] = {
WKTParser.parse(parser, in) match {
case WKTParser.Success(g, _) => Some(g)
case _ => None
}
}
describe("WKTParser should parse point definition") {
it("should parse valid point definitions") {
val toParse = List("1 2", "-1 -2", " -1 2", "1 -2", "1.123 2.123", "-1.123 2.123", "-1.123 -2.123", "1.123 -2.123", "0 1", "0 0", "1 0", "0 -1", "-1 0")
val expected = List(Point2D(1, 2), Point2D(-1, -2), Point2D(-1, 2), Point2D(1, -2), Point2D(1.123, 2.123), Point2D(-1.123, 2.123), Point2D(-1.123, -2.123), Point2D(1.123, -2.123), Point2D(0, 1), Point2D(0, 0), Point2D(1, 0), Point2D(0, -1), Point2D(-1, 0))
val result = toParse.flatMap(s => gparse(s"POINT ($s)", WKTParser.point))
result should contain theSameElementsAs expected
}
it("should parse empty point definition") {
val result = gparse(s"POINT EMPTY", WKTParser.point)
result.isEmpty should equal(false)
result.get should equal(Point2D(0, 0))
}
it ("should fail on invalid point definition") {
val toParse = List("1 2a", " -2", "a", "a b", "", " ", "a1 -2.123", "-1 0")
val expected = List(Point2D(-1, 0))
val result = toParse.flatMap(s => gparse(s"POINT ($s)", WKTParser.point))
result should contain theSameElementsAs expected
}
}
describe("WKTParser should parse linestring definition") {
it("should parse valid linestring definitions") {
val validLineString = gparse("LINESTRING (30 10, 10 30, 40 40)", WKTParser.lineString)
val minValidLineString = gparse("LINESTRING(30 10, 10 30)", WKTParser.lineString)
val invalidLineString = gparse("LINESTRING (30 10)", WKTParser.lineString)
validLineString should equal(Some(Line(List(Point2D(30.0,10.0), Point2D(10.0,30.0), Point2D(40.0,40.0)))))
minValidLineString should equal(Some(Line(List(Point2D(30.0,10.0), Point2D(10.0,30.0)))))
invalidLineString should equal(None)
}
}
describe("WKTParser should parse polygon definition") {
it("should parse valid polygon definitions") {
val poly1 = gparse("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", WKTParser.polygon)
val poly2 = gparse("POLYGON((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))", WKTParser.polygon)
poly1 should equal(Some(Polygon(List(Line(List(Point2D(30.0,10.0), Point2D(40.0,40.0), Point2D(20.0,40.0), Point2D(10.0,20.0), Point2D(30.0,10.0)))))))
poly2 should equal(Some(Polygon(List(Line(List(Point2D(35.0,10.0), Point2D(45.0,45.0), Point2D(15.0,40.0), Point2D(10.0,20.0), Point2D(35.0,10.0))), Line(List(Point2D(20.0,30.0), Point2D(35.0,35.0), Point2D(30.0,20.0), Point2D(20.0,30.0)))))))
}
}
describe("WKTParser should parse multipoint definition") {
it("should parse valid multipoint definitions") {
val mp1 = gparse("MULTIPOINT ((10 40), (40 30), (20 20), (30 10))", WKTParser.multiPoint)
val mp2 = gparse("MULTIPOINT(10 40, 40 30, 20 20, 30 10)", WKTParser.multiPoint)
mp1 should equal(Some(MultiPoint(List(Point2D(10.0,40.0), Point2D(40.0,30.0), Point2D(20.0,20.0), Point2D(30.0,10.0)))))
mp2 should equal(Some(MultiPoint(List(Point2D(10.0,40.0), Point2D(40.0,30.0), Point2D(20.0,20.0), Point2D(30.0,10.0)))))
}
}
describe("WKTParser should parse multilinestring definition") {
it("should parse valid multilinestring definitions") {
val mls = gparse("MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30 10))", WKTParser.multiLineString)
mls should equal(Some(MultiLine(List(Line(List(Point2D(10.0,10.0), Point2D(20.0,20.0), Point2D(10.0,40.0))), Line(List(Point2D(40.0,40.0), Point2D(30.0,30.0), Point2D(40.0,20.0), Point2D(30.0,10.0)))))))
}
}
describe("WKTParser should parse multipolygon definition") {
it("should parse valid multipolygon definitions") {
val mpoly1 = gparse("MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))", WKTParser.multiPolygon)
val mpoly2 = gparse("MULTIPOLYGON(((40 40, 20 45, 45 30, 40 40)), ((20 35, 10 30, 10 10, 30 5, 45 20, 20 35), (30 20, 20 15, 20 25, 30 20)))", WKTParser.multiPolygon)
mpoly1 should equal(Some(MultiPolygon(List(Polygon(List(Line(List(Point2D(30.0,20.0), Point2D(45.0,40.0), Point2D(10.0,40.0), Point2D(30.0,20.0))))), Polygon(List(Line(List(Point2D(15.0,5.0), Point2D(40.0,10.0), Point2D(10.0,20.0), Point2D(5.0,10.0), Point2D(15.0,5.0)))))))))
mpoly2 should equal(Some(MultiPolygon(List(Polygon(List(Line(List(Point2D(40.0,40.0), Point2D(20.0,45.0), Point2D(45.0,30.0), Point2D(40.0,40.0))))), Polygon(List(Line(List(Point2D(20.0,35.0), Point2D(10.0,30.0), Point2D(10.0,10.0), Point2D(30.0,5.0), Point2D(45.0,20.0), Point2D(20.0,35.0))), Line(List(Point2D(30.0,20.0), Point2D(20.0,15.0), Point2D(20.0,25.0), Point2D(30.0,20.0)))))))))
}
}
}
| raduba/wkt-geometry-scala-parser | src/test/scala/WKTParserSpec.scala | Scala | apache-2.0 | 5,059 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.api
import slamdata.Predef._
import quasar.{Data, DataCodec}
import quasar.csv._
import quasar.fp._
import quasar.fp.ski._
import quasar.main.Prettify
import eu.timepit.refined.api.Refined
import eu.timepit.refined.numeric.Positive
import eu.timepit.refined.auto._
import org.http4s._
import org.http4s.parser.HttpHeaderParser
import org.http4s.headers._
import scalaz._, Scalaz._
import scalaz.concurrent.Task
import scalaz.stream.Process
sealed abstract class JsonPrecision extends Product with Serializable {
def codec: DataCodec
def name: String
}
object JsonPrecision {
case object Readable extends JsonPrecision {
val codec = DataCodec.Readable
val name = "readable"
}
case object Precise extends JsonPrecision {
val codec = DataCodec.Precise
val name = "precise"
}
}
sealed abstract class JsonFormat extends Product with Serializable {
def mediaType: MediaType
}
object JsonFormat {
case object LineDelimited extends JsonFormat {
val mediaType = new MediaType("application", "ldjson", true, true) // ldjson => line delimited json
}
case object SingleArray extends JsonFormat {
val mediaType = MediaType.`application/json`
}
}
final case class DecodeError(msg: String)
object DecodeError {
implicit val show: Show[DecodeError] = Show.shows(_.msg)
}
trait Decoder {
def mediaType: MediaType
@SuppressWarnings(Array("org.wartremover.warts.Overloading"))
def decode(txt: String): DecodeError \\/ Process[Task, DecodeError \\/ Data]
/* Does not decode in a streaming fashion */
@SuppressWarnings(Array("org.wartremover.warts.Overloading"))
def decode(txtStream: Process[Task,String]): Task[DecodeError \\/ Process[Task, DecodeError \\/ Data]] =
txtStream.runLog.map(_.mkString).map(decode(_))
/* Not a streaming decoder */
def decoder: EntityDecoder[Process[Task, DecodeError \\/ Data]] = EntityDecoder.decodeBy(mediaType) { msg =>
EitherT(decode(msg.bodyAsText).map(_.leftMap(err => InvalidMessageBodyFailure(err.msg))))
}
}
sealed abstract class MessageFormat extends Decoder {
def disposition: Option[`Content-Disposition`]
def encode[F[_]](data: Process[F, Data]): Process[F, String]
protected def dispositionExtension: Map[String, String] =
disposition.map(disp => Map("disposition" -> disp.value)).getOrElse(Map.empty)
}
object MessageFormat {
final case class JsonContentType(
mode: JsonPrecision,
format: JsonFormat,
disposition: Option[`Content-Disposition`]
) extends MessageFormat {
val LineSep = "\\r\\n"
def mediaType = {
val extensions = Map("mode" -> mode.name)
format.mediaType.withExtensions(extensions)
}
override def encode[F[_]](data: Process[F, Data]): Process[F, String] = {
val encodedData = data.map(DataCodec.render(_)(mode.codec)).unite
format match {
case JsonFormat.SingleArray =>
Process.emit("[" + LineSep) ++ encodedData.intersperse("," + LineSep) ++ Process.emit(LineSep + "]" + LineSep)
case JsonFormat.LineDelimited =>
encodedData.map(_ + LineSep)
}
}
def decode(txt: String): DecodeError \\/ Process[Task, DecodeError \\/ Data] =
if (txt.isEmpty) Process.empty.right
else {
implicit val codec: DataCodec = mode.codec
format match {
case JsonFormat.SingleArray =>
DataCodec.parse(txt).fold(
err => DecodeError("parse error: " + err.message).left,
{
case Data.Arr(data) => Process.emitAll(data.map(_.right)).right
case _ => DecodeError("Provided body is not a json array").left
}
)
case JsonFormat.LineDelimited =>
val jsonByLine = txt.split("\\n").map(line => DataCodec.parse(line).leftMap(
e => DecodeError(s"parse error: ${e.message} in the following line: $line")
)).toList
Process.emitAll(jsonByLine).right
}
}
}
object JsonContentType {
@SuppressWarnings(Array("org.wartremover.warts.Overloading"))
def apply(mode: JsonPrecision, format: JsonFormat): JsonContentType = JsonContentType(mode, format, disposition = None)
}
val Default = JsonContentType(JsonPrecision.Readable, JsonFormat.LineDelimited, None)
final case class Csv(format: CsvParser.Format, disposition: Option[`Content-Disposition`]) extends MessageFormat {
import Csv._
val CsvColumnsFromInitialRowsCount: Int Refined Positive = 1000
def mediaType = {
val alwaysExtensions = Map(
"columnDelimiter" -> escapeNewlines(format.delimiter.toString),
"rowDelimiter" -> escapeNewlines(format.lineTerminator),
"quoteChar" -> escapeNewlines(format.quoteChar.toString),
"escapeChar" -> escapeNewlines(format.escapeChar.toString))
val extensions = alwaysExtensions ++ dispositionExtension
Csv.mediaType.withExtensions(extensions)
}
override def encode[F[_]](data: Process[F, Data]): Process[F, String] = {
val writer = CsvWriter(Some(format))
Prettify.renderStream(data, CsvColumnsFromInitialRowsCount).map(writer(_))
}
override def decode(txt: String): DecodeError \\/ Process[Task, DecodeError \\/ Data] = {
val lines = CsvParser.TototoshiCsvParser(format).parse(txt)
val data = lines.headOption.map { header =>
val paths = header.fold(κ(Nil), _.fields.map(Prettify.Path.parse(_).toOption))
lines.drop(1).map(_.bimap(
err => DecodeError("parse error: " + err),
rec => {
val pairs = paths zip rec.fields.map(Prettify.parse)
val good = pairs.map { case (p, s) => (p |@| s).tupled }.foldMap(_.toList)
Prettify.unflatten(good.toListMap)
}
))
}.getOrElse(Stream.empty)
Process.emitAll(data).right
}
}
object Csv {
val mediaType = MediaType.`text/csv`
val Default = Csv(CsvParser.Format(',', '"', '"', "\\r\\n"), None)
def escapeNewlines(str: String): String =
str.replace("\\r", "\\\\r").replace("\\n", "\\\\n")
def unescapeNewlines(str: String): String =
str.replace("\\\\r", "\\r").replace("\\\\n", "\\n")
}
val supportedMediaTypes: Set[MediaRange] = Set(
JsonFormat.LineDelimited.mediaType,
new MediaType("application", "x-ldjson"),
JsonFormat.SingleArray.mediaType,
Csv.mediaType)
def forMessage(msg: Message): DecodeFailure \\/ MessageFormat =
for {
cType <- msg.headers.get(`Content-Type`) \\/> (MediaTypeMissing(supportedMediaTypes): DecodeFailure)
fmt <- fromMediaType(cType.mediaType) \\/> MediaTypeMismatch(cType.mediaType, supportedMediaTypes)
} yield fmt
def fromMediaType(mediaType: MediaRange): Option[MessageFormat] = {
val disposition = mediaType.extensions.get("disposition").flatMap(str =>
HttpHeaderParser.CONTENT_DISPOSITION(str).toOption)
if (mediaType satisfies Csv.mediaType) {
def toChar(str: String): Option[Char] = str.toList match {
case c :: Nil => Some(c)
case _ => None
}
val format = CsvParser.Format(
delimiter = mediaType.extensions.get("columnDelimiter").map(Csv.unescapeNewlines).flatMap(toChar).getOrElse(','),
quoteChar = mediaType.extensions.get("quoteChar").map(Csv.unescapeNewlines).flatMap(toChar).getOrElse('"'),
escapeChar = mediaType.extensions.get("escapeChar").map(Csv.unescapeNewlines).flatMap(toChar).getOrElse('"'),
lineTerminator = mediaType.extensions.get("rowDelimiter").map(Csv.unescapeNewlines).getOrElse("\\r\\n"))
Some(Csv(format, disposition))
}
else {
val format =
if (mediaType satisfies JsonFormat.SingleArray.mediaType)
if (mediaType.extensions.get("boundary") =/= Some("NL")) Some(JsonFormat.SingleArray)
else Some(JsonFormat.LineDelimited)
else if ((mediaType satisfies JsonFormat.LineDelimited.mediaType) ||
(mediaType satisfies new MediaType("application", "x-ldjson"))) Some(JsonFormat.LineDelimited)
else None
format.map { f =>
val precision =
if (mediaType.extensions.get("mode") ≟ Some(JsonPrecision.Precise.name))
JsonPrecision.Precise
else
JsonPrecision.Readable
JsonContentType(precision, f, disposition)
}
}
}
def fromAccept(accept: Option[Accept]): MessageFormat =
// TODO: MediaRange needs an Order instance – combining QValue ordering
// with specificity (EG, application/json sorts before
// application/* if they have the same q-value).
accept.flatMap(_.values.sortBy(_.qValue).list.map(_.mediaRange).map(fromMediaType).flatten(Option.option2Iterable).lastOption)
.getOrElse(MessageFormat.Default)
}
| drostron/quasar | web/src/main/scala/quasar/api/MessageFormat.scala | Scala | apache-2.0 | 9,396 |
package com.ybrikman.ping.scalaapi.dedupe
import Cache._
import play.api.Configuration
import java.util.concurrent.ConcurrentHashMap
import collection.JavaConverters._
/**
* A Scala wrapper for a Java's ConcurrentHashMap (CHM). Exposes the basic underlying methods of CHM and adds a
* getOrElseUpdate(key, value) method that lazily evaluates the value parameter only if the key is not already present
* in the cache.
*
* You may be asking, why not just use Scala's ConcurrentMap interface, which already has a getOrElseUpdate method?
*
* val cache = new ConcurrentHashMap().asScala
* cache.getOrElseUpdate("foo", "bar") // BAD idea
*
* The answer is because this method is inherited from the MapLike trait, and is NOT a thread safe (atomic) operation!
*
* The strategy used in the class below is to wrap all values with a LazyWrapper class that only evaluates the value
* when explicitly accessed. In the getOrElseUpdate method, we avoid accessing the passed in value unless we know it
* was the one actually inserted into the cache.
*
* For more info, see: http://boundary.com/blog/2011/05/
*
* TODO: investigate if boundary's NonBlockingHashMap is as good as they say it is (and what tests they have to prove
* it).
*
* TODO: Java-friendly API
*
* @param initialCapacity
* @param concurrencyLevel
* @param loadFactor
* @tparam K
* @tparam V
*/
class Cache[K, V](initialCapacity: Int, loadFactor: Float, concurrencyLevel: Int) {
/**
* Overloaded constructor that creates the cache with initial capacity, concurrency level, and load factor read from
* config
*
* @param config
* @return
*/
def this(config: Configuration) = this(
config.getInt(CONFIG_KEY_INITIAL_CAPACITY).getOrElse(DEFAULT_INITIAL_CAPACITY),
config.getDouble(CONFIG_KEY_LOAD_FACTOR).map(_.toFloat).getOrElse(DEFAULT_LOAD_FACTOR),
config.getInt(CONFIG_KEY_CONCURRENCY_LEVEL).getOrElse(DEFAULT_CONCURRENCY_LEVEL)
)
/**
* Empty constructor that uses default values for initial capacity, concurrency level, and load factor
* @return
*/
def this() = this(
DEFAULT_INITIAL_CAPACITY,
DEFAULT_LOAD_FACTOR,
DEFAULT_CONCURRENCY_LEVEL
)
private val cache = new ConcurrentHashMap[K, LazyWrapper[V]](initialCapacity, loadFactor, concurrencyLevel).asScala
/**
* Returns all elements of the cache. Use this method only if you really need all of the elements. Calling it will cause
* all lazy values to be calculated.
*/
def getAll: Map[K, V] = {
val mutable = cache.map { case(key, wrapper) => key -> unwrap(wrapper) }
mutable.toMap
}
/**
* Returns true if this key is associated with a value in the cache and false otherwise.
*
* @param key
* @return
*/
def contains(key: K): Boolean = {
cache.contains(key)
}
/**
* Optionally return the value associated with the given key
*
* @param key
* @return
*/
def get(key: K): Option[V] = {
cache.get(key).map(unwrap)
}
/**
* Associate the given key with the given value. Optionally return any value previously associated with the key.
*
* @param key
* @param value
* @return
*/
def put(key: K, value: V): Option[V] = {
cache.put(key, wrap(value)).map(unwrap)
}
/**
* If the given key is already associated with a value, return that value. Otherwise, associate the key with the
* given value and return None.
*
* @param key
* @param value
* @return
*/
def putIfAbsent(key: K, value: V): Option[V] = {
cache.putIfAbsent(key, wrap(value)).map(unwrap)
}
/**
* Get the value associated with the given key. If no value is already associated, then associate the given value
* with the key and use it as the return value.
*
* Like Scala's ConcurrentMap, the value parameter will be lazily evaluated: that is, it'll only be evaluated if
* there wasn't already a value associated with the given key. However, unlike Scala's ConcurrentMap, this method is
* a thread safe (atomic) operation.
*
* @param key
* @param value
* @return
*/
def getOrElseUpdate(key: K, value: => V): V = {
val newWrapper = wrap(value)
// If there was no previous value, we'll end up calling the .value on newWrapper, which will evaluate it for the
// first (and last) time
cache.putIfAbsent(key, newWrapper).getOrElse(newWrapper).value
}
/**
* Remove the key and any associated value from the cache. Optionally return any previously associated value.
*
* @param key
* @return
*/
def remove(key: K): Option[V] = {
cache.remove(key).map(unwrap)
}
/**
* Remove all keys and values from the cache
*/
def clear() {
cache.clear()
}
/**
* Return how many elements are in the cache
*
* @return
*/
def size: Int = {
cache.size
}
private def wrap[T](value: => T): LazyWrapper[T] = {
new LazyWrapper[T](value)
}
private def unwrap[T](lazyWrapper: LazyWrapper[T]): T = {
lazyWrapper.value
}
override def toString: String = "Cache(%s)".format(cache)
override def hashCode(): Int = cache.hashCode()
override def equals(other: Any): Boolean = {
Option(other) match {
case Some(otherCache: Cache[_, _]) => cache.equals(otherCache.cache)
case _ => false
}
}
}
/**
* A wrapper that avoids evaluating the value until explicitly accessed by calling either .value, .equals, .hashCode,
* or .toString.
*
* @param wrapped
* @tparam T
*/
class LazyWrapper[T](wrapped: => T) {
// Store in a lazy val to make sure the wrapped value is evaluated at most once
lazy val value = wrapped
override def toString: String = "LazyWrapper(%s)".format(value)
override def hashCode(): Int = value.hashCode()
override def equals(other: Any): Boolean = {
Option(other) match {
case Some(otherLazy: LazyWrapper[_]) => value.equals(otherLazy.value)
case _ => false
}
}
}
object Cache {
val DEFAULT_INITIAL_CAPACITY = 16
val DEFAULT_CONCURRENCY_LEVEL = 16
val DEFAULT_LOAD_FACTOR = 0.75f
val CONFIG_KEY_INITIAL_CAPACITY = "initialCapacity"
val CONFIG_KEY_CONCURRENCY_LEVEL = "concurrencyLevel"
val CONFIG_KEY_LOAD_FACTOR = "loadFactor"
} | carlosFattor/ping-play | big-pipe/src/main/scala/com/ybrikman/ping/scalaapi/dedupe/Cache.scala | Scala | mit | 6,219 |
package com.arcusys.learn.models.report
case class StudentMostActiveResponse(id: Long,
fullname: String,
avatarUrl: String,
stmtCount: Int)
| ViLPy/Valamis | learn-portlet/src/main/scala/com/arcusys/learn/models/report/StudentMostActiveResponse.scala | Scala | lgpl-3.0 | 147 |
package thistle.core
case class Query[T](
headPredicate: ElementPredicate[T],
tailPredicates: MatchPredicate[T]*) extends Seq[MatchPredicate[T]] {
def length: Int =
tailPredicates.length + 1
def iterator: Iterator[MatchPredicate[T]] =
Iterator(headPredicate) ++ tailPredicates.iterator
def apply(idx: Int): MatchPredicate[T] =
if (idx == 0) {
headPredicate
} else {
tailPredicates(idx - 1)
}
} | smarden1/thistle | src/main/scala/thistle/core/Query.scala | Scala | mit | 439 |
package org.apache.spark.ml.bundle.ops.feature
import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl._
import ml.combust.bundle.op.OpModel
import ml.combust.mleap.core.types.TensorShape
import org.apache.spark.ml.bundle.{ParamSpec, SimpleParamSpec, SimpleSparkOp, SparkBundleContext}
import org.apache.spark.ml.feature.ChiSqSelectorModel
import org.apache.spark.mllib.feature
import org.apache.spark.sql.mleap.TypeConverters.sparkToMleapDataShape
/**
* Created by hollinwilkins on 12/27/16.
*/
class ChiSqSelectorOp extends SimpleSparkOp[ChiSqSelectorModel] {
override val Model: OpModel[SparkBundleContext, ChiSqSelectorModel] = new OpModel[SparkBundleContext, ChiSqSelectorModel] {
override val klazz: Class[ChiSqSelectorModel] = classOf[ChiSqSelectorModel]
override def opName: String = Bundle.BuiltinOps.feature.chi_sq_selector
override def store(model: Model, obj: ChiSqSelectorModel)
(implicit context: BundleContext[SparkBundleContext]): Model = {
val dataset = context.context.dataset.get
val inputShape = sparkToMleapDataShape(dataset.schema(obj.getFeaturesCol), dataset).asInstanceOf[TensorShape]
model.withValue("filter_indices", Value.longList(obj.selectedFeatures.map(_.toLong).toSeq))
.withValue("input_size", Value.int(inputShape.dimensions.get.head))
}
override def load(model: Model)
(implicit context: BundleContext[SparkBundleContext]): ChiSqSelectorModel = {
new ChiSqSelectorModel(uid = "",
selectedFeatures = model.value("filter_indices").getLongList.map(_.toInt).toArray)
}
}
override def sparkLoad(uid: String, shape: NodeShape, model: ChiSqSelectorModel): ChiSqSelectorModel = {
new ChiSqSelectorModel(uid = uid, selectedFeatures = model.selectedFeatures)
}
override def sparkInputs(obj: ChiSqSelectorModel): Seq[ParamSpec] = {
Seq("input" -> obj.featuresCol)
}
override def sparkOutputs(obj: ChiSqSelectorModel): Seq[SimpleParamSpec] = {
Seq("output" -> obj.outputCol)
}
}
| combust/mleap | mleap-spark/src/main/scala/org/apache/spark/ml/bundle/ops/feature/ChiSqSelectorOp.scala | Scala | apache-2.0 | 2,064 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.domain
/**
* User: israel
* Date: 11/12/14
* Time: 22:07
*/
trait AggregationFilter extends Formattable {
def name: String
def field: Field
def `type`: String = this.getClass.getSimpleName.dropRight(6) // 6 == "Filter".length
def copyWithSuffix(suffix: String): AggregationFilter
}
trait BucketAggregationFilter extends AggregationFilter with Formattable {
def subFilters: Seq[AggregationFilter]
}
sealed trait FieldValeOperator
case object AnalyzedField extends FieldValeOperator
case object NonAnalyzedField extends FieldValeOperator
case class Field(operator: FieldValeOperator, value: String)
case class StatsAggregationFilter(name: String = "Statistics Aggregation", override val field: Field)
extends AggregationFilter {
override def copyWithSuffix(suffix: String): AggregationFilter = this.copy(name = name + suffix)
}
case class TermAggregationFilter(name: String = "Term Aggregation",
override val field: Field,
size: Int = 10,
subFilters: Seq[AggregationFilter] = Seq.empty)
extends BucketAggregationFilter {
override def copyWithSuffix(suffix: String): AggregationFilter = {
var counter = 0
this.copy(name = name + suffix, subFilters = subFilters.map { counter += 1; _.copyWithSuffix(suffix + counter) })
}
}
case class HistogramAggregationFilter(name:String = "Histogram Aggregation", override val field:Field, interval:Int, minDocCount:Int,
extMin:Option[Double], extMax:Option[Double],
subFilters:Seq[AggregationFilter] = Seq.empty) extends BucketAggregationFilter {
override def copyWithSuffix(suffix: String): AggregationFilter = {
var counter = 0
this.copy(name = name + suffix, subFilters = subFilters.map { counter += 1; _.copyWithSuffix(suffix + counter) })
}
}
case class SignificantTermsAggregationFilter(name: String = "Signigicant Terms Aggregation",
override val field: Field,
backgroundTerm: Option[(String, String)],
minDocCount: Int,
size: Int,
subFilters: Seq[AggregationFilter] = Seq.empty)
extends BucketAggregationFilter {
override def copyWithSuffix(suffix: String): AggregationFilter = {
var counter = 0
this.copy(name = name + suffix, subFilters = subFilters.map { counter += 1; _.copyWithSuffix(suffix + counter) })
}
}
case class CardinalityAggregationFilter(name: String, override val field: Field, precisionThreshold: Option[Long])
extends AggregationFilter {
override def copyWithSuffix(suffix: String): AggregationFilter = this.copy(name = name + suffix)
}
| thomsonreuters/CM-Well | server/cmwell-domain/src/main/scala/cmwell/domain/AggregationFilter.scala | Scala | apache-2.0 | 3,511 |
/*-------------------------------------------------------------------------*\\
** ScalaCheck **
** Copyright (c) 2007-2021 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
\\*------------------------------------------------------------------------ */
package org.scalacheck
import language.higherKinds
import language.implicitConversions
import rng.Seed
import util.Buildable
import util.SerializableCanBuildFroms._
import ScalaVersionSpecific._
import scala.annotation.tailrec
import scala.collection.immutable.TreeMap
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration.{Duration, FiniteDuration}
import java.util.{ Calendar, UUID }
import java.math.{BigInteger, BigDecimal => JavaDecimal}
/**
* A generator produces values for [[Prop]]s
*
* This module provides:
* 1. Definitions for non-arbitrary generators,
* 1. Factories to construct generators,
* 1. Methods to modify a generator, and
* 1. Various combinators for producing generators of values for more
* complex data types.
*
* Explicit generators aren't required to write [[Prop]]s:
*
* {{{
* Prop.forAll { (n: Int) =>
* n == n
* }
* }}}
*
* The [[Prop]] above is defined with parameters only and without an
* explicit generator, because generators are implicitly provided by
* [[Arbitrary]] for various data types.
*
* However, it's not uncommon to need to write explicit custom
* generators:
*
* {{{
* val genInt: Gen[Int] = Gen.choose(1,10)
* Prop.forAll(genInt) { (n: Int) =>
* 1 <= n && n <= 10
* }
* }}}
*
* This is a simple definition of a generator for booleans:
* {{{
* val genBool: Gen[Boolean] = Gen.oneOf(true,false)
* }}}
*
* The above definition isn't necessary, though. The same boolean
* generator is defined in [[Arbitrary]] as an implicit declaration
* for automatically parameterizing [[Prop]]s. Instead, use the
* generator that is defined in [[Arbitrary]] and available from the
* polymorphic method [[Arbitrary.arbitrary]] with an explicit type
* parameter:
*
* {{{
* val genBool: Gen[Boolean] = Arbitrary.arbitrary[Boolean]
* }}}
*
* Alternatively, this is a boolean generator, but one that always
* produces true:
* {{{
* val genBool = Gen.const(true)
* }}}
*
* This is a generator of booleans that is true at a 2-to-1 ratio:
* {{{
* val genBool = Gen.frequency(2 -> true, 1 -> false)
* }}}
*
* This is a boolean generator that will produce true 75% of the time:
* {{{
* val genBool = Gen.prob(0.75)
* }}}
*
* For more information on designing custom generators and the
* motivations for doing so, see chapter 6, ''Generators in Detail'',
* of the book ''ScalaCheck: The Definitive Guide'' (2013) by Rickard
* Nilsson published by Artima Press.
*
* This is an example of a custom generator for integers:
* {{{
* val genSmallInt: Gen[Int] = Gen.choose(-100,100)
* }}}
*
* This can be used to generate different collections of zero or more small integers:
* {{{
* val genListOfInts: Gen[List[Int]] = Gen.listOf(genSmallInt)
*
* val genSeqOfInts: Gen[Seq[Int]] = Gen.someOf(-100 to 100)
*
* val genVectorOfInts: Gen[Vector[Int]] = Gen.containerOf[Vector,Int](genSmallInt)
*
* val genMap: Gen[Map[Int,Boolean]] = Gen.mapOf(Gen.zip(genSmallInt, genBool))
*
* val genOptionalInt: Gen[Option[Int]] = Gen.option(genSmallInt)
* }}}
*
* Or collections of one or more small integers:
* {{{
* val genListOfInts: Gen[List[Int]] = Gen.nonEmptyListOf(genSmallInt)
*
* val genSeqOfInts: Gen[Seq[Int]] = Gen.atLeastOne(-100 to 100)
*
* val genVectorOfInts: Gen[Vector[Int]] = Gen.nonEmptyContainerOf[Vector,Int](genSmallInt)
*
* val genMap: Gen[Map[Int,Boolean]] = Gen.nonEmptyMap(Gen.zip(genSmallInt, genBool))
*
* val genOptionalInt: Gen[Option[Int]] = Gen.some(genSmallInt)
* }}}
*
* The class methods for [[Gen]] should be familiar with those in the
* [[scala.collection Scala collections API]]:
*
* - [[map]] - Apply a function to generated values
* - [[flatMap]] - Apply a function that returns a generator
* - [[filter]] - Use values that satisfy a predicate
*
* The [[Gen]] class also supports for-comprehensions to compose
* complex generators:
*
* {{{
* val genPerson = for {
* firstName <- Gen.oneOf("Alan", "Ada", "Alonzo")
* lastName <- Gen.oneOf("Lovelace", "Turing", "Church")
* age <- Gen.choose(0,100) if (age >= 18)
* } yield Person(firstName, lastName, age)
* }}}
*
* Constructors and factories for generators:
* - [[Gen$.const const]] - Always generates a single value
* - [[Gen$.oneOf[T](t0* oneOf]] - Generate a value from a list of values
* - [[Gen$.atLeastOne[T](g1* atLeastOne]] - Generate a collection with at least one value from a list
* - [[Gen$.someOf[T](l* someOf]] - Generate a collection with zero or more values from a list
* - [[Gen$.choose choose]] - Generate numeric values in an (inclusive) range
* - [[Gen$.frequency frequency]] - Choose from multiple values with a weighted distribution
*
* Combinators of generators:
* - [[Gen$.buildableOf buildableOf]] - Generates a collection with a generator
* - [[Gen$.buildableOfN buildableOfN]] - Generates a collection of at most ''n'' elements
* - [[Gen$.nonEmptyBuildableOf nonEmptyBuildableOf]] - Generates a non-empty collection
* - [[Gen$.containerOf containerOf]] - Generates a collection with a generator
* - [[Gen$.containerOfN containerOfN]] - Generates a collection of at most ''n'' elements
* - [[Gen$.nonEmptyContainerOf nonEmptyContainerOf]] - Generates a non-empty collection
* - [[Gen$.either either]] - Generate a disjoint union of [[scala.util.Either]]
* - infiniteLazyList - Generates an infinite lazy list
* - [[Gen$.infiniteStream infiniteStream]] - Generates an infinite stream
* - [[Gen$.listOf listOf]] - Generates a list of random length
* - [[Gen$.listOfN listOfN]] - Generates a list of at most ''n'' elements
* - [[Gen$.nonEmptyListOf nonEmptyListOf]] - Generates a non-empty list of random length.
* - [[Gen$.mapOf mapOf]] - Generates a [[scala.collection.Map]]
* - [[Gen$.mapOfN mapOfN]] - Generates a [[scala.collection.Map]] with at most ''n'' elements
* - [[Gen$.nonEmptyMap nonEmptyMap]] - Generates a non-empty map of random length
* - [[Gen$.option option]] - Generate values of [[scala.Some]] and [[scala.None]]
* - [[Gen$.pick[T](n:Int,g1* pick]] - A generator that randomly picks ''n'' elements from a list
* - [[Gen$.sequence sequence]] - Sequences generators.
* - [[Gen$.some some]] - A generator of [[scala.Some]]
* - [[Gen.someOf[T](g1* someOf]] - A generator that picks a random number of elements from a list
* - [[Gen$.stringOf stringOf]] - Generate string of characters
* - [[Gen$.stringOfN stringOfN]] - Generate string of at most ''n'' characters
*
* Methods for working with [[Gen]] internals:
* - [[Gen$.resize resize]] - Creates a resized version of a generator
* - [[Gen$.parameterized parameterized]] - Generator with the parameters
* - [[Gen$.size size]] - Generate with the value of the default size parameter
* - [[Gen$.sized sized]] - Build a generator using the default size parameter
* Methods for probabilistic generators:
* - [[Gen$.exponential exponential]] - Generate numbers according to an exponential distribution
* - [[Gen$.gaussian gaussian]] - Generates numbers according to a Gaussian distribution
* - [[Gen$.geometric geometric]] - Generates numbers according to a geometric distribution
* - [[Gen$.poisson poisson]] - Generates numbers according to a Poisson distribution
* - [[Gen$.prob prob]] - Generates a boolean for the probability of true
*
* Definitions for generating various, non-arbitrary, common values of
* strings and characters:
* - [[Gen$.alphaChar alphaChar]] - Generates an alpha character
* - [[Gen$.alphaStr alphaStr]] - Generates a string of alpha characters
* - [[Gen$.numChar numChar]] - Generates a numerical character
* - [[Gen$.numStr numStr]] - Generates a string of digits
* - [[Gen$.alphaNumChar alphaNumChar]] - Generates an alphanumerical character
* - [[Gen$.alphaNumStr alphaNumStr]] - Generates a string of alphanumerical characters
* - [[Gen$.alphaLowerChar alphaLowerChar]] - Generates a lower-case alpha character
* - [[Gen$.alphaLowerStr alphaLowerStr]] - Generates a string of lower-case alpha characters
* - [[Gen$.alphaUpperChar alphaUpperChar]] - Generates an upper-case alpha character
* - [[Gen$.alphaUpperStr alphaUpperStr]] - Generates a string of upper-case alpha characters
* - [[Gen$.asciiChar asciiChar]] - Generates an ASCII character
* - [[Gen$.asciiStr asciiStr]] - Generates a string of ASCII characters
* - [[Gen$.identifier identifier]] - Generates an identifier
* - [[Gen$.uuid uuid]] - Generates a UUID
* - [[Gen$.hexChar hexChar]] - Generates a character of a hexadecimal digit
* - [[Gen$.hexStr hexStr]] - Generates a string of hexadecimal digits
*
* Definitions for generating arbitrary values of commonly used types in
* Scala are defined elsewhere, see [[Arbitrary]].
*
* There are a couple of factory methods that are for advanced uses of generators:
* - [[Gen$.delay delay]] - Generate a value of an expression by-name
* - [[Gen$.lzy lzy]] - Lazily generate a value of an expression
* - [[Gen$.fail fail]] - Fail to generate any values of a type
* - [[Gen$.recursive recursive]] - A fixed point generator
* - [[Gen$.resultOf[T,R0](f* resultOf]] - Generate values with a function or class
* - [[Gen$.zip[T1](g1* zip]] - Generate tuples
*/
sealed abstract class Gen[+T] extends Serializable { self =>
//// Private interface ////
import Gen.{R, gen}
// This is no longer used but preserved here for binary compatibility.
private[scalacheck] def sieveCopy(x: Any): Boolean = true
// If you implement new Gen[_] directly (instead of using
// combinators), make sure to use p.initialSeed or p.useInitialSeed
// in the implementation, instead of using seed directly.
private[scalacheck] def doApply(p: Gen.Parameters, seed: Seed): R[T]
//// Public interface ////
/** A class supporting filtered operations. */
final class WithFilter(p: T => Boolean) {
def map[U](f: T => U): Gen[U] = self.suchThat(p).map(f)
def flatMap[U](f: T => Gen[U]): Gen[U] = self.suchThat(p).flatMap(f)
def withFilter(q: T => Boolean): WithFilter = self.withFilter(x => p(x) && q(x))
}
/** Evaluate this generator with the given parameters */
def apply(p: Gen.Parameters, seed: Seed): Option[T] =
doApply(p, seed).retrieve
def doPureApply(p: Gen.Parameters, seed: Seed, retries: Int = 100): Gen.R[T] = {
@tailrec def loop(r: Gen.R[T], i: Int): Gen.R[T] =
if (r.retrieve.isDefined) r
else if (i > 0) loop(doApply(p, r.seed), i - 1)
else throw new Gen.RetrievalError()
loop(doApply(p, seed), retries)
}
/**
* Evaluate this generator with the given parameters.
*
* The generator will attempt to generate a valid `T` value. If a
* valid value is not produced it may retry several times,
* determined by the `retries` parameter (which defaults to 100).
*
* If all the retries fail it will throw a `Gen.RetrievalError`
* exception.
*/
def pureApply(p: Gen.Parameters, seed: Seed, retries: Int = 100): T =
doPureApply(p, seed, retries).retrieve.get
/** Create a new generator by mapping the result of this generator */
def map[U](f: T => U): Gen[U] = gen { (p, seed) => doApply(p, seed).map(f) }
/** Create a new generator by flat-mapping the result of this generator */
def flatMap[U](f: T => Gen[U]): Gen[U] = gen { (p, seed) =>
val rt = doApply(p, seed)
rt.flatMap(t => f(t).doApply(p, rt.seed))
}
/** Create a new generator that uses this generator to produce a value
* that fulfills the given condition. If the condition is not fulfilled,
* the generator fails (returns None). Also, make sure that the provided
* test property is side-effect free, e.g. it should not use external vars. */
def filter(p: T => Boolean): Gen[T] = suchThat(p)
/** Create a new generator that uses this generator to produce a value
* that doesn't fulfill the given condition. If the condition is fulfilled,
* the generator fails (returns None). Also, make sure that the provided
* test property is side-effect free, e.g. it should not use external vars. */
def filterNot(p: T => Boolean): Gen[T] = suchThat(x => !p(x))
/** Creates a non-strict filtered version of this generator. */
def withFilter(p: T => Boolean): WithFilter = new WithFilter(p)
/** Create a new generator that uses this generator to produce a value
* that fulfills the given condition. If the condition is not fulfilled,
* the generator fails (returns None). Also, make sure that the provided
* test property is side-effect free, e.g. it should not use external vars.
* This method is identical to [Gen.filter]. */
def suchThat(f: T => Boolean): Gen[T] =
new Gen[T] {
def doApply(p: Gen.Parameters, seed: Seed): Gen.R[T] =
p.useInitialSeed(seed) { (p0, s0) =>
val r = self.doApply(p0, s0)
r.copy(r = r.retrieve.filter(f))
}
}
case class RetryUntilException(n: Int) extends RuntimeException(s"retryUntil failed after $n attempts")
/**
* Create a generator that calls this generator repeatedly until the
* given condition is fulfilled. The generated value is then
* returned. Make sure that the provided test property is
* side-effect free (it should not use external vars).
*
* If the generator fails more than maxTries, a RetryUntilException
* will be thrown.
*/
def retryUntil(p: T => Boolean, maxTries: Int): Gen[T] = {
require(maxTries > 0)
def loop(params: Gen.Parameters, seed: Seed, tries: Int): R[T] =
if (tries > maxTries) throw RetryUntilException(tries) else {
val r = self.doApply(params, seed)
if (r.retrieve.exists(p)) r else loop(params, r.seed, tries + 1)
}
Gen.gen((params, seed) => loop(params, seed, 1))
}
/**
* Create a generator that calls this generator repeatedly until the
* given condition is fulfilled. The generated value is then
* returned. Make sure that the provided test property is
* side-effect free (it should not use external vars).
*
*
* If the generator fails more than 10000 times, a
* RetryUntilException will be thrown. You can call `retryUntil`
* with a second parameter to change this number.
*/
def retryUntil(p: T => Boolean): Gen[T] =
retryUntil(p, 10000)
def sample: Option[T] =
doApply(Gen.Parameters.default, Seed.random()).retrieve
/** Returns a new property that holds if and only if both this
* and the given generator generates the same result, or both
* generators generate no result. */
def ==[U](g: Gen[U]): Prop = Prop { prms =>
// test equality using a random seed
val seed = Seed.random()
val lhs = doApply(prms, seed).retrieve
val rhs = g.doApply(prms, seed).retrieve
if (lhs == rhs) Prop.proved(prms) else Prop.falsified(prms)
}
def !=[U](g: Gen[U]): Prop =
Prop.forAll(this)(r => Prop.forAll(g)(_ != r))
def !==[U](g: Gen[U]): Prop = Prop { prms =>
// test inequality using a random seed
val seed = Seed.random()
val lhs = doApply(prms, seed).retrieve
val rhs = g.doApply(prms, seed).retrieve
if (lhs != rhs) Prop.proved(prms) else Prop.falsified(prms)
}
/** Put a label on the generator to make test reports clearer */
def label(l: String): Gen[T] = new Gen[T] {
def doApply(p: Gen.Parameters, seed: Seed) =
p.useInitialSeed(seed) { (p0, s0) =>
val r = self.doApply(p0, s0)
r.copy(l = r.labels + l)
}
}
/** Put a label on the generator to make test reports clearer */
def :|(l: String): Gen[T] = label(l)
/** Put a label on the generator to make test reports clearer */
def |:(l: String): Gen[T] = label(l)
/** Put a label on the generator to make test reports clearer */
def :|(l: Symbol): Gen[T] = label(l.name)
/** Put a label on the generator to make test reports clearer */
def |:(l: Symbol): Gen[T] = label(l.name)
/** Perform some RNG perturbation before generating */
def withPerturb(f: Seed => Seed): Gen[T] =
Gen.gen((p, seed) => doApply(p, f(seed)))
}
object Gen extends GenArities with GenVersionSpecific {
//// Private interface ////
import Arbitrary.arbitrary
/** Just an alias */
private type P = Parameters
class RetrievalError extends RuntimeException("couldn't generate value")
private[scalacheck] trait R[+T] {
def labels: Set[String] = Set()
// sieve is no longer used but preserved for binary compatibility
final def sieve[U >: T]: U => Boolean = (_: U) => true
protected def result: Option[T]
def seed: Seed
def retrieve: Option[T] = result
def copy[U >: T](
l: Set[String] = this.labels,
// s is no longer used but preserved for binary compatibility
s: U => Boolean = this.sieve,
r: Option[U] = this.result,
sd: Seed = this.seed
): R[U] = new R[U] {
override val labels = l
val seed = sd
val result = r
}
def map[U](f: T => U): R[U] =
r(retrieve.map(f), seed).copy(l = labels)
def flatMap[U](f: T => R[U]): R[U] =
retrieve match {
case None =>
r(None, seed).copy(l = labels)
case Some(t) =>
val r = f(t)
r.copy(l = labels | r.labels)
}
}
private[scalacheck] def r[T](r: Option[T], sd: Seed): R[T] = new R[T] {
val result = r
val seed = sd
}
/** Generator factory method */
private[scalacheck] def gen[T](f: (P, Seed) => R[T]): Gen[T] = new Gen[T] {
def doApply(p: P, seed: Seed): R[T] = p.useInitialSeed(seed)(f)
}
//// Public interface ////
/** Generator parameters, used by [[org.scalacheck.Gen.apply]] */
sealed abstract class Parameters extends Serializable { outer =>
override def toString: String = {
val sb = new StringBuilder
sb.append("Parameters(")
sb.append(s"size=$size, ")
sb.append(s"initialSeed=$initialSeed, ")
sb.append(s"useLegacyShrinking=$useLegacyShrinking)")
sb.toString
}
/**
* The size of the generated value. Generator implementations are
* allowed to freely interpret (or ignore) this value. During test
* execution, the value of this parameter is controlled by
* [[Test.Parameters.minSize]] and [[Test.Parameters.maxSize]].
*/
val size: Int
private[this] def cpy(
size0: Int = outer.size,
initialSeed0: Option[Seed] = outer.initialSeed,
useLegacyShrinking0: Boolean = outer.useLegacyShrinking
): Parameters =
new Parameters {
val size: Int = size0
val initialSeed: Option[Seed] = initialSeed0
override val useLegacyShrinking: Boolean = useLegacyShrinking0
}
/**
* Create a copy of this [[Gen.Parameters]] instance with
* [[Gen.Parameters.size]] set to the specified value.
*/
def withSize(size: Int): Parameters =
cpy(size0 = size)
/**
*
*/
val initialSeed: Option[Seed]
def withInitialSeed(o: Option[Seed]): Parameters =
cpy(initialSeed0 = o)
def withInitialSeed(seed: Seed): Parameters =
cpy(initialSeed0 = Some(seed))
def withInitialSeed(n: Long): Parameters =
cpy(initialSeed0 = Some(Seed(n)))
def withNoInitialSeed: Parameters =
cpy(initialSeed0 = None)
def useInitialSeed[A](seed: Seed)(f: (Parameters, Seed) => A): A =
initialSeed match {
case Some(s) => f(this.withNoInitialSeed, s)
case None => f(this, seed)
}
val useLegacyShrinking: Boolean = true
def disableLegacyShrinking: Parameters =
withLegacyShrinking(false)
def enableLegacyShrinking: Parameters =
withLegacyShrinking(true)
def withLegacyShrinking(b: Boolean): Parameters =
cpy(useLegacyShrinking0 = b)
// no longer used, but preserved for binary compatibility
@deprecated("cp is deprecated. use cpy.", "1.14.1")
private case class cp(size: Int = size, initialSeed: Option[Seed] = None) extends Parameters
}
/** Provides methods for creating [[org.scalacheck.Gen.Parameters]] values */
object Parameters {
/** Default generator parameters instance. */
val default: Parameters = new Parameters {
val size: Int = 100
val initialSeed: Option[Seed] = None
}
}
/** A wrapper type for range types */
trait Choose[T] extends Serializable {
/** Creates a generator that returns a value in the given inclusive range */
def choose(min: T, max: T): Gen[T]
}
/** Provides implicit [[org.scalacheck.Gen.Choose]] instances */
object Choose extends time.JavaTimeChoose {
class IllegalBoundsError[A](low: A, high: A)
extends IllegalArgumentException(s"invalid bounds: low=$low, high=$high")
/**
* This method gets a ton of use -- so we want it to be as fast as
* possible for many of our common cases.
*/
private def chLng(l: Long, h: Long)(p: P, seed: Seed): R[Long] = {
if (h < l) {
throw new IllegalBoundsError(l, h)
} else if (h == l) {
const(l).doApply(p, seed)
} else if (l == Long.MinValue && h == Long.MaxValue) {
val (n, s) = seed.long
r(Some(n), s)
} else if (l == Int.MinValue && h == Int.MaxValue) {
val (n, s) = seed.long
r(Some(n.toInt.toLong), s)
} else if (l == Short.MinValue && h == Short.MaxValue) {
val (n, s) = seed.long
r(Some(n.toShort.toLong), s)
} else if (l == 0L && h == Char.MaxValue) {
val (n, s) = seed.long
r(Some(n.toChar.toLong), s)
} else if (l == Byte.MinValue && h == Byte.MaxValue) {
val (n, s) = seed.long
r(Some(n.toByte.toLong), s)
} else {
val d = h - l + 1
if (d <= 0) {
var tpl = seed.long
var n = tpl._1
var s = tpl._2
while (n < l || n > h) {
tpl = s.long
n = tpl._1
s = tpl._2
}
r(Some(n), s)
} else {
val (n, s) = seed.long
r(Some(l + (n & 0x7fffffffffffffffL) % d), s)
}
}
}
private def chDbl(l: Double, h: Double)(p: P, seed: Seed): R[Double] = {
val d = h - l
if (d < 0) {
throw new IllegalBoundsError(l, h)
} else if (d > Double.MaxValue) {
val (x, seed2) = seed.long
if (x < 0) chDbl(l, 0d)(p, seed2) else chDbl(0d, h)(p, seed2)
} else if (d == 0) {
r(Some(l), seed)
} else {
val (n, s) = seed.double
r(Some(n * (h-l) + l), s)
}
}
/**
* Generate a random BigInt within [lower, lower + span).
*
* Note that unlike the choose method, whose bounds are inclusive,
* this method's upper bound is exclusive. We determine how many
* random bits we need (bitLen), and then round up to the nearest
* number of bytes (byteLen). We generate the bytes, possibly
* truncating the most significant byte (bytes(0)) if bitLen is
* not evenly-divisible by 8.
*
* Finally, we check to see if the BigInt we ended up with is in
* our range. If it is not, we restart this method. The likelihood
* of needing to restart depends on span. In the worst case we
* have almost a 50% chance of this (which occurs when span is a
* power of 2 + 1) and in the best case we never restart (which
* occurs when span is a power of 2).
*/
private def chBigInteger(lower: BigInteger, span: BigInteger, seed0: Seed): R[BigInteger] = {
val bitLen = span.bitLength
val byteLen = (bitLen + 7) / 8
val bytes = new Array[Byte](byteLen)
var seed = seed0
var i = 0
while (i < bytes.length) {
// generate a random long value (i.e. 8 random bytes)
val (x0, seed1) = seed.long
var x = x0
seed = seed1
// extract each byte in turn and add them to our byte array
var j = 0
while (j < 8 && i < bytes.length) {
val b = (x & 0xff).toByte
bytes(i) = b
x = x >>> 8
i += 1
j += 1
}
}
// we may not need all 8 bits of our most significant byte. if
// not, mask off any unneeded upper bits.
val bitRem = bitLen & 7
if (bitRem != 0) {
val mask = 0xff >>> (8 - bitRem)
bytes(0) = (bytes(0) & mask).toByte
}
// construct a BigInteger and see if its valid. if so, we're
// done. otherwise, we need to restart using our new seed.
val big = new BigInteger(1, bytes)
if (big.compareTo(span) < 0) {
r(Some(big.add(lower)), seed)
} else {
chBigInteger(lower, span, seed)
}
}
implicit val chooseLong: Choose[Long] =
new Choose[Long] {
def choose(low: Long, high: Long): Gen[Long] =
if (low > high) throw new IllegalBoundsError(low, high)
else gen(chLng(low,high))
}
implicit val chooseInt: Choose[Int] =
Choose.xmap[Long, Int](_.toInt, _.toLong)
implicit val chooseShort: Choose[Short] =
Choose.xmap[Long, Short](_.toShort, _.toLong)
implicit val chooseChar: Choose[Char] =
Choose.xmap[Long, Char](_.toChar, _.toLong)
implicit val chooseByte: Choose[Byte] =
Choose.xmap[Long, Byte](_.toByte, _.toLong)
implicit val chooseDouble: Choose[Double] =
new Choose[Double] {
def choose(low: Double, high: Double) =
if (low > high) throw new IllegalBoundsError(low, high)
else if (low == Double.NegativeInfinity)
frequency(1 -> const(Double.NegativeInfinity),
9 -> choose(Double.MinValue, high))
else if (high == Double.PositiveInfinity)
frequency(1 -> const(Double.PositiveInfinity),
9 -> choose(low, Double.MaxValue))
else gen(chDbl(low,high))
}
implicit val chooseFloat: Choose[Float] =
Choose.xmap[Double, Float](_.toFloat, _.toDouble)
implicit val chooseFiniteDuration: Choose[FiniteDuration] =
Choose.xmap[Long, FiniteDuration](Duration.fromNanos, _.toNanos)
implicit object chooseBigInt extends Choose[BigInt] {
def choose(low: BigInt, high: BigInt): Gen[BigInt] =
chooseBigInteger
.choose(low.bigInteger, high.bigInteger)
.map(BigInt(_))
}
implicit object chooseBigInteger extends Choose[BigInteger] {
def choose(low: BigInteger, high: BigInteger): Gen[BigInteger] =
(low.compareTo(high)) match {
case n if n > 0 => throw new IllegalBoundsError(low, high)
case 0 => Gen.const(low)
case _ => /* n < 0 */
val span = high.subtract(low).add(BigInteger.ONE)
gen((_, seed) => chBigInteger(low, span, seed))
}
}
/**
* Choose a BigDecimal number between two given numbers.
*
* The minimum scale used will be 34. That means that the
* fractional part will have at least 34 digits (more if one of
* the given numbers has a scale larger than 34).
*
* The minimum scale was chosen based on Scala's default scale for
* expanding infinite fractions:
*
* BigDecimal(1) / 3 // 0.3333333333333333333333333333333333
*
* See chooseBigDecimalScale for more information about scale.
*/
implicit val chooseBigDecimal: Choose[BigDecimal] =
chooseBigDecimalScale(minScale = 34)
/**
* The "scale" of a decimal number refers to the number of digits
* in the fractional part. For example, 3.0000 has a scale of 4.
*
* We can generate an arbitrary number of digits in the decimal
* expansion of a number, so if a user calls choose(0, 1) we need
* to decide "how much" work to do. The minScale ensures that we
* do "enough" work to generate interesting numbers.
*
* The implicit instance fixes this value, but since users may
* want to use other scales we expose this method as well.
*/
private[this] def chooseBigDecimalScale(minScale: Int): Choose[BigDecimal] =
new Choose[BigDecimal] {
private val c = chooseJavaBigDecimalScale(minScale)
def choose(low: BigDecimal, high: BigDecimal): Gen[BigDecimal] =
c.choose(low.bigDecimal, high.bigDecimal).map(BigDecimal(_))
}
/**
* Choose a java.math.BigDecimal number between two given numbers.
*
* See chooseBigDecimal and chooseBigDecimalScale for more comments.
*/
implicit val chooseJavaBigDecimal: Choose[JavaDecimal] =
chooseJavaBigDecimalScale(minScale = 34)
/**
* See chooseBigDecimalScale for comments.
*/
private[this] def chooseJavaBigDecimalScale(minScale: Int): Choose[JavaDecimal] =
new Choose[JavaDecimal] {
def choose(low: JavaDecimal, high: JavaDecimal): Gen[JavaDecimal] =
(low.compareTo(high)) match {
case n if n > 0 => throw new IllegalBoundsError(low, high)
case 0 => Gen.const(low)
case _ => /* n < 0 */
val s = (low.scale max high.scale) max minScale
val x = if (low.scale < s) low.setScale(s) else low
val y = if (high.scale < s) high.setScale(s) else high
chooseBigInteger
.choose(x.unscaledValue, y.unscaledValue)
.map(n => new JavaDecimal(n, s))
}
}
/** Transform a Choose[T] to a Choose[U] where T and U are two isomorphic
* types whose relationship is described by the provided transformation
* functions. (exponential functor map) */
def xmap[T, U](from: T => U, to: U => T)(implicit c: Choose[T]): Choose[U] =
new Choose[U] {
def choose(low: U, high: U): Gen[U] =
c.choose(to(low), to(high)).map(from)
}
}
//// Various Generator Combinators ////
/** A generator that always generates the given value */
implicit def const[T](x: T): Gen[T] = gen((p, seed) => r(Some(x), seed))
/** A generator that never generates a value */
def fail[T]: Gen[T] = gen((p, seed) => failed[T](seed))
/**
* A fixed point generator. This is useful for making recursive structures
* e.g.
*
* Gen.recursive[List[Int]] { recurse =>
* Gen.choose(0, 10).flatMap { idx =>
* if (idx < 5) recurse.map(idx :: _)
* else Gen.const(idx :: Nil)
* }
* }
*/
def recursive[A](fn: Gen[A] => Gen[A]): Gen[A] = {
lazy val result: Gen[A] = lzy(fn(result))
result
}
/** A result that never contains a value */
private[scalacheck] def failed[T](seed0: Seed): R[T] =
new R[T] {
val result: Option[T] = None
val seed = seed0
}
/** A generator that generates a random value in the given (inclusive)
* range. If the range is invalid, an IllegalBoundsError exception will be
* thrown. */
def choose[T](min: T, max: T)(implicit c: Choose[T]): Gen[T] =
c.choose(min, max)
/** Sequences generators. If any of the given generators fails, the
* resulting generator will also fail. */
def sequence[C,T](gs: Traversable[Gen[T]])(implicit b: Buildable[T, C]): Gen[C] = {
val g = gen { (p, seed) =>
gs.foldLeft(r(Some(Vector.empty[T]), seed)) {
case (rs,g) =>
val rt = g.doApply(p, rs.seed)
rt.flatMap(t => rs.map(_ :+ t)).copy(sd = rt.seed)
}
}
g.map(b.fromIterable)
}
/** Monadic recursion on Gen
* This is a stack-safe loop that is the same as:
*
* {{{
*
* fn(a).flatMap {
* case Left(a) => tailRec(a)(fn)
* case Right(b) => Gen.const(b)
* }
*
* }}}
*
* which is useful for doing monadic loops without blowing up the
* stack
*/
def tailRecM[A, B](a0: A)(fn: A => Gen[Either[A, B]]): Gen[B] = {
@tailrec
def tailRecMR(a: A, seed: Seed, labs: Set[String])(fn: (A, Seed) => R[Either[A, B]]): R[B] = {
val re = fn(a, seed)
val nextLabs = labs | re.labels
re.retrieve match {
case None => r(None, re.seed).copy(l = nextLabs)
case Some(Right(b)) => r(Some(b), re.seed).copy(l = nextLabs)
case Some(Left(a)) => tailRecMR(a, re.seed, nextLabs)(fn)
}
}
// This is the "Reader-style" approach to making a stack-safe loop:
// we put one outer closure around an explicitly tailrec loop
gen[B] { (p: P, seed: Seed) =>
tailRecMR(a0, seed, Set.empty) { (a, seed) => fn(a).doApply(p, seed) }
}
}
/** Wraps a generator lazily. The given parameter is only evaluated once,
* and not until the wrapper generator is evaluated. */
def lzy[T](g: => Gen[T]): Gen[T] = {
lazy val h = g
gen { (p, seed) => h.doApply(p, seed) }
}
/** Wraps a generator for later evaluation. The given parameter is
* evaluated each time the wrapper generator is evaluated. */
def delay[T](g: => Gen[T]): Gen[T] =
gen { (p, seed) => g.doApply(p, seed) }
/** Creates a generator that can access its generation parameters */
def parameterized[T](f: Parameters => Gen[T]): Gen[T] =
gen { (p, seed) => f(p).doApply(p, seed) }
/** Creates a generator that can access its generation size */
def sized[T](f: Int => Gen[T]): Gen[T] =
gen { (p, seed) => f(p.size).doApply(p, seed) }
/** A generator that returns the current generation size */
lazy val size: Gen[Int] = sized { sz => sz }
/** Creates a resized version of a generator */
def resize[T](s: Int, g: Gen[T]) = gen((p, seed) => g.doApply(p.withSize(s), seed))
/** Picks a random value from a list. */
def oneOf[T](xs: Iterable[T]): Gen[T] =
if (xs.isEmpty) {
throw new IllegalArgumentException("oneOf called on empty collection")
} else {
val vector = xs.toVector
choose(0, vector.size - 1).map(vector(_))
}
/** Picks a random value from a list.
* @todo Remove this overloaded method in the next major release. See #438.
*/
def oneOf[T](xs: Seq[T]): Gen[T] =
oneOf(xs: Iterable[T])
/** Picks a random value from a list */
def oneOf[T](t0: T, t1: T, tn: T*): Gen[T] = oneOf(t0 +: t1 +: tn)
/** Picks a random generator from a list */
def oneOf[T](g0: Gen[T], g1: Gen[T], gn: Gen[T]*): Gen[T] = {
val gs = g0 +: g1 +: gn
choose(0, gs.size - 1).flatMap(i => gs(i))
}
/** Makes a generator result optional. Either `Some(T)` or `None` will be provided. */
def option[T](g: Gen[T]): Gen[Option[T]] =
frequency(1 -> const(None), 9 -> some(g))
/** A generator that returns `Some(T)` */
def some[T](g: Gen[T]): Gen[Option[T]] =
g.map(Some.apply)
/** Generates a `Left` of `T` or a `Right` of `U` with equal probability. */
def either[T, U](gt: Gen[T], gu: Gen[U]): Gen[Either[T, U]] =
oneOf(gt.map(Left(_)), gu.map(Right(_)))
/** Chooses one of the given generators with a weighted random distribution */
def frequency[T](gs: (Int, Gen[T])*): Gen[T] = {
val filtered = gs.iterator.filter(_._1 > 0).toVector
if (filtered.isEmpty) {
throw new IllegalArgumentException("no items with positive weights")
} else {
var total = 0L
val builder = TreeMap.newBuilder[Long, Gen[T]]
filtered.foreach { case (weight, value) =>
total += weight
builder += ((total, value))
}
val tree = builder.result
choose(1L, total).flatMap(r => tree.rangeFrom(r).head._2)
}
}
/** Implicit convenience method for using the `frequency` method
* like this:
* {{{
* frequency((1, "foo"), (3, "bar"))
* }}}
*/
implicit def freqTuple[T](t: (Int,T)): (Int,Gen[T]) = (t._1, const(t._2))
//// List Generators ////
/** Generates a container of any Traversable type for which there exists an
* implicit [[org.scalacheck.util.Buildable]] instance. The elements in the
* container will be generated by the given generator. The size of the
* generated container is limited by `n`. Depending on what kind of container
* that is generated, the resulting container may contain fewer elements than
* `n`, but not more. If the given generator fails generating a value, the
* complete container generator will also fail. */
def buildableOfN[C,T](n: Int, g: Gen[T])(implicit
evb: Buildable[T,C], evt: C => Traversable[T]
): Gen[C] = {
require(n >= 0, s"invalid size given: $n")
new Gen[C] {
def doApply(p: P, seed0: Seed): R[C] = {
var seed: Seed = p.initialSeed.getOrElse(seed0)
val bldr = evb.builder
val allowedFailures = Gen.collectionRetries(n)
var failures = 0
var count = 0
while (count < n) {
val res = g.doApply(p, seed)
res.retrieve match {
case Some(t) =>
bldr += t
count += 1
case None =>
failures += 1
if (failures >= allowedFailures) return r(None, res.seed)
}
seed = res.seed
}
r(Some(bldr.result), seed)
}
}
}
/** Generates a container of any Traversable type for which there exists an
* implicit [[org.scalacheck.util.Buildable]] instance. The elements in the
* container will be generated by the given generator. The size of the
* container is bounded by the size parameter used when generating values. */
def buildableOf[C,T](g: Gen[T])(implicit
evb: Buildable[T,C], evt: C => Traversable[T]
): Gen[C] =
sized(s => choose(0, Integer.max(s, 0)))
.flatMap(n => buildableOfN(n, g)(evb, evt))
/** Generates a non-empty container of any Traversable type for which there
* exists an implicit [[org.scalacheck.util.Buildable]] instance. The
* elements in the container will be generated by the given generator. The
* size of the container is bounded by the size parameter used when
* generating values. */
def nonEmptyBuildableOf[C,T](g: Gen[T])(implicit
evb: Buildable[T,C], evt: C => Traversable[T]
): Gen[C] =
sized(s => choose(1, Integer.max(s, 1)))
.flatMap(n => buildableOfN(n, g)(evb, evt))
/** A convenience method for calling `buildableOfN[C[T],T](n,g)`. */
def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit
evb: Buildable[T,C[T]], evt: C[T] => Traversable[T]
): Gen[C[T]] = buildableOfN[C[T], T](n, g)(evb, evt)
/** A convenience method for calling `buildableOf[C[T],T](g)`. */
def containerOf[C[_],T](g: Gen[T])(implicit
evb: Buildable[T,C[T]], evt: C[T] => Traversable[T]
): Gen[C[T]] = buildableOf[C[T], T](g)(evb, evt)
/** A convenience method for calling `nonEmptyBuildableOf[C[T],T](g)`. */
def nonEmptyContainerOf[C[_],T](g: Gen[T])(implicit
evb: Buildable[T,C[T]], evt: C[T] => Traversable[T]
): Gen[C[T]] = nonEmptyBuildableOf[C[T], T](g)(evb, evt)
/** Generates a list of random length. The maximum length depends on the
* size parameter. This method is equal to calling
* `containerOf[List,T](g)`. */
def listOf[T](g: => Gen[T]) = buildableOf[List[T], T](g)
/** Generates a non-empty list of random length. The maximum length depends
* on the size parameter. This method is equal to calling
* `nonEmptyContainerOf[List,T](g)`. */
def nonEmptyListOf[T](g: => Gen[T]) = nonEmptyBuildableOf[List[T], T](g)
/** Generates a list with at most the given number of elements. This method
* is equal to calling `containerOfN[List,T](n,g)`. */
def listOfN[T](n: Int, g: Gen[T]) = buildableOfN[List[T], T](n, g)
/** Generates a map of random length. The maximum length depends on the
* size parameter. This method is equal to calling
* <code>containerOf[Map,(T,U)](g)</code>. */
def mapOf[T, U](g: => Gen[(T, U)]) = buildableOf[Map[T, U], (T, U)](g)
/** Generates a non-empty map of random length. The maximum length depends
* on the size parameter. This method is equal to calling
* <code>nonEmptyContainerOf[Map,(T,U)](g)</code>. */
def nonEmptyMap[T,U](g: => Gen[(T,U)]) = nonEmptyBuildableOf[Map[T, U],(T, U)](g)
/** Generates a map with at most the given number of elements. This method
* is equal to calling <code>containerOfN[Map,(T,U)](n,g)</code>. */
def mapOfN[T,U](n: Int, g: Gen[(T, U)]) = buildableOfN[Map[T, U],(T, U)](n, g)
/**
* Generates an infinite stream.
*
* Failures in the underlying generator may terminate the stream.
* Otherwise it will continue forever.
*/
def infiniteStream[T](g: => Gen[T]): Gen[Stream[T]] = {
val attemptsPerItem = 10
def unfold(p: P, seed: Seed, attemptsLeft: Int): Stream[T] =
if (attemptsLeft <= 0) {
Stream.empty
} else {
val r = g.doPureApply(p, seed)
r.retrieve match {
case Some(t) => t #:: unfold(p, r.seed, attemptsPerItem)
case None => unfold(p, r.seed, attemptsLeft - 1)
}
}
gen { (p, seed0) =>
val stream = unfold(p, seed0, attemptsPerItem)
r(Some(stream), seed0.slide)
}
}
/** A generator that picks a random number of elements from a list */
def someOf[T](l: Iterable[T]) =
choose(0, l.size).flatMap(pick(_,l))
/** A generator that picks a random number of elements from a list */
def someOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) =
choose(0, gs.length+2).flatMap(pick(_, g1, g2, gs: _*))
/** A generator that picks at least one element from a list */
def atLeastOne[T](l: Iterable[T]) = {
require(l.size > 0, "There has to be at least one option to choose from")
choose(1,l.size).flatMap(pick(_,l))
}
/** A generator that picks at least one element from a list */
def atLeastOne[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) =
choose(1, gs.length+2).flatMap(pick(_, g1, g2, gs: _*))
/** A generator that randomly picks a given number of elements from a list
*
* The elements are not guaranteed to be permuted in random order.
*/
def pick[T](n: Int, l: Iterable[T]): Gen[collection.Seq[T]] = {
if (n > l.size || n < 0) throw new IllegalArgumentException(s"invalid choice: $n")
else if (n == 0) Gen.const(Nil)
else gen { (p, seed0) =>
val buf = ArrayBuffer.empty[T]
val it = l.iterator
var seed = seed0
var count = 0
while (it.hasNext) {
val t = it.next
count += 1
if (count <= n) {
buf += t
} else {
val (x, s) = seed.long
val i = (x & Long.MaxValue % count).toInt
if (i < n) buf(i) = t
seed = s
}
}
r(Some(buf), seed)
}
}
/** A generator that randomly picks a given number of elements from a list
*
* The elements are not guaranteed to be permuted in random order.
*/
def pick[T](n: Int, g1: Gen[T], g2: Gen[T], gn: Gen[T]*): Gen[Seq[T]] =
pick(n, g1 +: g2 +: gn).flatMap(sequence[Seq[T], T](_))
/** Takes a function and returns a generator that generates arbitrary
* results of that function by feeding it with arbitrarily generated input
* parameters. */
def resultOf[T,R0](f: T => R0)(implicit a: Arbitrary[T]): Gen[R0] =
arbitrary[T].map(f)
/** Creates a Function0 generator. */
def function0[A](g: Gen[A]): Gen[() => A] =
g.map(a => () => a)
//// Character Generators ////
private def charSample(cs: Array[Char]): Gen[Char] =
new Gen[Char] {
def doApply(p: P, seed0: Seed): Gen.R[Char] = {
val seed1 = p.initialSeed.getOrElse(seed0)
val (x, seed2) = seed1.long
val i = ((x & Long.MaxValue) % cs.length).toInt
r(Some(cs(i)), seed2)
}
}
/** Generates a numerical character */
val numChar: Gen[Char] =
charSample(('0' to '9').toArray)
/** Generates an upper-case alpha character */
val alphaUpperChar: Gen[Char] =
charSample(('A' to 'Z').toArray)
/** Generates a lower-case alpha character */
val alphaLowerChar: Gen[Char] =
charSample(('a' to 'z').toArray)
/** Generates an alpha character */
val alphaChar: Gen[Char] =
charSample((('A' to 'Z') ++ ('a' to 'z')).toArray)
/** Generates an alphanumerical character */
val alphaNumChar: Gen[Char] =
charSample((('0' to '9') ++ ('A' to 'Z') ++ ('a' to 'z')).toArray)
/** Generates a ASCII character, with extra weighting for printable characters */
val asciiChar: Gen[Char] =
charSample((0.toChar to 127.toChar).toArray)
/** Generates a ASCII printable character */
val asciiPrintableChar: Gen[Char] =
charSample((32.toChar to 126.toChar).toArray)
/** Generates a character that can represent a valid hexadecimal digit. This
* includes both upper and lower case values.
*/
val hexChar: Gen[Char] =
charSample("0123456789abcdef0123456789ABCDEF".toArray)
//// String Generators ////
private def mkString(n: Int, sb: StringBuilder, gc: Gen[Char], p: P, seed0: Seed): R[String] = {
var seed: Seed = seed0
val allowedFailures = Gen.collectionRetries(n)
var failures = 0
var count = 0
while (count < n) {
val res = gc.doApply(p, seed)
res.retrieve match {
case Some(c) =>
sb += c
count += 1
case None =>
failures += 1
if (failures >= allowedFailures) return r(None, res.seed)
}
seed = res.seed
}
r(Some(sb.toString), seed)
}
def stringOfN(n: Int, gc: Gen[Char]): Gen[String] =
if (n <= 0) Gen.const("")
else gen { (p, seed) =>
mkString(n, new StringBuilder(n), gc, p, seed)
}
def stringOf(gc: Gen[Char]): Gen[String] =
gen { (p, seed0) =>
val (n, seed1) = Gen.mkSize(p, seed0)
if (n <= 0) r(Some(""), seed1)
else mkString(n, new StringBuilder(n), gc, p, seed1)
}
/** Generates a string that starts with a lower-case alpha character,
* and only contains alphanumerical characters */
val identifier: Gen[String] =
gen { (p, seed0) =>
val (n, seed1) = Gen.mkSize(p, seed0)
val sb = new StringBuilder
val res1 = alphaLowerChar.doApply(p, seed1)
sb += res1.retrieve.get
mkString(n - 1, sb, alphaNumChar, p, res1.seed)
}
/** Generates a string of digits */
val numStr: Gen[String] =
stringOf(numChar)
/** Generates a string of upper-case alpha characters */
val alphaUpperStr: Gen[String] =
stringOf(alphaUpperChar)
/** Generates a string of lower-case alpha characters */
val alphaLowerStr: Gen[String] =
stringOf(alphaLowerChar)
/** Generates a string of alpha characters */
val alphaStr: Gen[String] =
stringOf(alphaChar)
/** Generates a string of alphanumerical characters */
val alphaNumStr: Gen[String] =
stringOf(alphaNumChar)
/** Generates a string of ASCII characters, with extra weighting for printable characters */
val asciiStr: Gen[String] =
stringOf(asciiChar)
/** Generates a string of ASCII printable characters */
val asciiPrintableStr: Gen[String] =
stringOf(asciiPrintableChar)
/** Generates a string that can represent a valid hexadecimal digit. This
* includes both upper and lower case values.
*/
val hexStr: Gen[String] =
stringOf(hexChar)
//// Number Generators ////
/**
* Generate a uniformly-distributed Long.
*
* This method has an equally likely method of generating every
* possible Long value.
*/
val long: Gen[Long] =
gen { (_, s0) =>
val (n, s1) = s0.long
r(Some(n), s1)
}
/**
* Generate a Double uniformly-distributed in [0, 1).
*
* This method will generate one of 2^53 distinct Double values in
* the unit interval.
*/
val double: Gen[Double] =
gen { (_, s0) =>
val (x, s1) = s0.double
r(Some(x), s1)
}
/**
* Generates a Boolean which has the given chance to be true.
*
* - prob(1.0) is always true
* - prob(0.5) is true 50% of the time
* - prob(0.1) is true 10% of the time
* - prob(0.0) is never true
*/
def prob(chance: Double): Gen[Boolean] =
if (chance <= 0.0) Gen.const(false)
else if (chance >= 1.0) Gen.const(true)
else gen { (_, s0) =>
val (x, s1) = s0.double
r(Some(x < chance), s1)
}
/**
* Generates Double values according to the given gaussian
* distribution, specified by its mean and standard deviation.
*
* Gaussian distributions are also called normal distributions.
*
* The range of values is theoretically (-∞, ∞) but 99.7% of all
* values will be contained within (mean ± 3 * stdDev).
*/
def gaussian(mean: Double, stdDev: Double): Gen[Double] = {
def loop(s0: Seed): R[Double] = {
val (x0, s1) = s0.double
val (y0, s2) = s1.double
val x = x0 * 2.0 - 1.0
val y = y0 * 2.0 - 1.0
val s = x * x + y * y
if (s >= 1.0 || s == 0.0) {
loop(s2)
} else {
val scale = stdDev * Math.sqrt(-2.0 * Math.log(s) / s)
val res = x * scale + mean // dropping y * scale + mean
r(Some(res), s2)
}
}
gen((_, seed) => loop(seed))
}
/**
* Generates Double values according to the given exponential
* distribution, specified by its rate parameter.
*
* The mean and standard deviation are both equal to 1/rate.
*
* The range of values is [0, ∞).
*/
def exponential(rate: Double): Gen[Double] = {
require(rate > 0.0, s"rate must be positive (got: $rate)")
val mean = 1.0 / rate
gen { (_, s0) =>
val (x, s1) = s0.double
r(Some(-Math.log(x) * mean), s1)
}
}
/**
* Generates Int values according to the given geometric
* distribution, specified by its mean.
*
* This distribution represents the expected number of failures
* before a successful test, where the probability of a successful
* test is p = 1 / (mean + 1).
*
* The ideal range of values is [0, ∞), although the largest value
* that can be produced here is 2147483647 (Int.MaxValue).
*/
def geometric(mean: Double): Gen[Int] = {
require(mean > 0.0, s"mean must be positive (got: $mean)")
val p = 1.0 / (mean + 1.0)
val lognp = Math.log1p(-p) // log(1 - p)
gen { (_, s0) =>
val (u, s1) = s0.double
r(Some(Math.floor(Math.log(u) / lognp).toInt), s1)
}
}
/**
* Generates Int values according to the given Poisson distribution,
* specified by its rate parameters.
*
* The mean equals the rate; the standard deviation is sqrt(rate).
*
* In principle any positive value is a valid rate parameter.
* However, our method of generating values cannot handle large
* rates, so we require rate <= 745.
*/
def poisson(rate: Double): Gen[Int] = {
require(0 < rate && rate <= 745.0, s"rate must be between 0 and 745 (got $rate)")
val L = Math.exp(-rate)
def loop(s0: Seed, k: Int, p: Double): R[Int] =
if (p <= L) {
r(Some(k - 1), s0)
} else {
val (x, s1) = s0.double
loop(s1, k + 1, p * x)
}
gen((_, s) => loop(s, 0, 1.0))
}
/**
* Generates Int values according to the given binomial
* distribution, specified by the number of trials to conduct, and
* the probability of a true test.
*
* This distribution counts the number of trials which were
* successful according to a given test probability.
*
* The range of values is [0, trials].
*/
def binomial(test: Gen[Boolean], trials: Int): Gen[Int] = {
def loop(ps: Gen.Parameters, s: Seed, i: Int, n: Int): R[Int] =
if (i >= trials) {
r(Some(n), s)
} else {
val r = test.doPureApply(ps, s)
val success = r.retrieve.get
loop(ps, r.seed, i + 1, if (success) n + 1 else n)
}
gen((ps, s) => loop(ps, s, 0, 0))
}
/** Generates positive numbers of uniform distribution, with an
* upper bound of the generation size parameter. */
def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
import num._
num match {
case _: Fractional[_] => sized(n => c.choose(zero, max(fromInt(n), one)).suchThat(_ != zero))
case _ => sized(n => c.choose(one, max(fromInt(n), one)))
}
}
/** Generates negative numbers of uniform distribution, with an
* lower bound of the negated generation size parameter. */
def negNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = posNum.map(num.negate(_))
/** Generates numbers within the given inclusive range, with
* extra weight on zero, +/- unity, both extremities, and any special
* numbers provided. The special numbers must lie within the given range,
* otherwise they won't be included. */
def chooseNum[T](minT: T, maxT: T, specials: T*)(
implicit num: Numeric[T], c: Choose[T]
): Gen[T] = {
import num._
val basics = List(minT, maxT, zero, one, -one)
val basicsAndSpecials = for {
t <- specials ++ basics if t >= minT && t <= maxT
} yield (1, const(t))
val other = (basicsAndSpecials.length, c.choose(minT, maxT))
val allGens = basicsAndSpecials :+ other
frequency(allGens: _*)
}
//// Misc Generators ////
/** Generates a version 4 (random) UUID. */
lazy val uuid: Gen[UUID] = for { // FIXME: Remove lazy
l1 <- Gen.choose(Long.MinValue, Long.MaxValue)
l2 <- Gen.choose(Long.MinValue, Long.MaxValue)
y <- Gen.oneOf('8', '9', 'a', 'b')
} yield UUID.fromString(
new UUID(l1,l2).toString.updated(14, '4').updated(19, y)
)
lazy val calendar: Gen[Calendar] = { // FIXME: Remove lazy
import Calendar._
def adjust(c: Calendar)(f: Calendar => Unit): Calendar = { f(c); c }
// We want to be sure we always initialize the calendar's time. By
// default, Calendar.getInstance uses the system time. We always
// overwrite it with a deterministically-generated time to be sure
// that calendar generation is also deterministic.
//
// We limit the time (in milliseconds) because extreme values will
// cause Calendar.getTime calls to fail. This range is relatively
// large but safe:
//
// -62135751600000 is 1 CE
// 64087186649116 is 4000 CE
val calendar: Gen[Calendar] =
Gen.chooseNum(-62135751600000L, 64087186649116L).map { t =>
adjust(Calendar.getInstance)(_.setTimeInMillis(t))
}
def yearGen(c: Calendar): Gen[Int] =
Gen.chooseNum(c.getGreatestMinimum(YEAR), c.getLeastMaximum(YEAR))
def moveToNearestLeapDate(c: Calendar, year: Int): Calendar = {
@tailrec def loop(y: Int): Calendar = {
c.set(YEAR, y)
if (c.getActualMaximum(DAY_OF_YEAR) > 365) c else loop(y + 1)
}
loop(if (year + 4 > c.getLeastMaximum(YEAR)) year - 5 else year)
}
val beginningOfDayGen: Gen[Calendar] =
calendar.map(c => adjust(c) { c =>
c.set(HOUR_OF_DAY, 0)
c.set(MINUTE, 0)
c.set(SECOND, 0)
c.set(MILLISECOND, 0)
})
val endOfDayGen: Gen[Calendar] =
calendar.map(c => adjust(c) { c =>
c.set(HOUR_OF_DAY, 23)
c.set(MINUTE, 59)
c.set(SECOND, 59)
c.set(MILLISECOND, 59)
})
val firstDayOfYearGen: Gen[Calendar] =
for { c <- calendar; y <- yearGen(c) } yield adjust(c)(_.set(y, JANUARY, 1))
val lastDayOfYearGen: Gen[Calendar] =
for { c <- calendar; y <- yearGen(c) } yield adjust(c)(_.set(y, DECEMBER, 31))
val closestLeapDateGen: Gen[Calendar] =
for { c <- calendar; y <- yearGen(c) } yield moveToNearestLeapDate(c, y)
val lastDayOfMonthGen: Gen[Calendar] =
calendar.map(c => adjust(c)(_.set(DAY_OF_MONTH, c.getActualMaximum(DAY_OF_MONTH))))
val firstDayOfMonthGen: Gen[Calendar] =
calendar.map(c => adjust(c)(_.set(DAY_OF_MONTH, 1)))
Gen.frequency(
(1, firstDayOfYearGen),
(1, lastDayOfYearGen),
(1, closestLeapDateGen),
(1, beginningOfDayGen),
(1, endOfDayGen),
(1, firstDayOfMonthGen),
(1, lastDayOfMonthGen),
(7, calendar))
}
val finiteDuration: Gen[FiniteDuration] =
// Duration.fromNanos doesn't allow Long.MinValue since it would create a
// duration that cannot be negated.
chooseNum(Long.MinValue + 1, Long.MaxValue).map(Duration.fromNanos)
/**
* Generates instance of Duration.
*
* In addition to `FiniteDuration` values, this can generate `Duration.Inf`,
* `Duration.MinusInf`, and `Duration.Undefined`.
*/
val duration: Gen[Duration] = frequency(
1 -> const(Duration.Inf),
1 -> const(Duration.MinusInf),
1 -> const(Duration.Undefined),
1 -> const(Duration.Zero),
6 -> finiteDuration)
// used to compute a uniformly-distributed size
private def mkSize(p: Gen.Parameters, seed0: Seed): (Int, Seed) = {
val maxSize = Integer.max(p.size + 1, 1)
val (x, seed1) = seed0.long
(((x & Long.MaxValue) % maxSize).toInt, seed1)
}
// used to calculate how many per-item retries we should allow.
private def collectionRetries(n: Int): Int =
Integer.max(10, n / 10)
}
| rickynils/scalacheck | src/main/scala/org/scalacheck/Gen.scala | Scala | bsd-3-clause | 57,289 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.views.layouts
import org.scalatest.{Matchers, WordSpec}
import org.scalatestplus.play.guice.GuiceOneAppPerSuite
import play.api.Application
import play.api.inject.guice.GuiceApplicationBuilder
/*
* Verifies that old pre play 2.6 way of accessing templates
* continues to work after we've made them injectable.
*/
class BackwardsCompatibilityDIAndStaticSpec extends WordSpec with Matchers with GuiceOneAppPerSuite {
import uk.gov.hmrc.play.views.html.layouts._
override def fakeApplication(): Application =
new GuiceApplicationBuilder().configure(Map("play.allowGlobalApplication" -> "true")).build()
"It should be possible to access templates without DI" in {
article shouldBe an[Article]
attorney_banner shouldBe an[AttorneyBanner]
betaBanner shouldBe a[BetaBanner]
footer_links shouldBe a[FooterLinks]
header_nav shouldBe a[HeaderNav]
loginStatus shouldBe a[LoginStatus]
main_content shouldBe a[MainContent]
main_content_header shouldBe a[MainContentHeader]
serviceInfo shouldBe a[ServiceInfo]
sidebar shouldBe a[Sidebar]
}
}
| hmrc/play-ui | src/test/scala/uk/gov/hmrc/play/views/layouts/BackwardsCompatibilityDIAndStaticSpec.scala | Scala | apache-2.0 | 1,793 |
/*
* Copyright © 2014 Teo Klestrup, Carl Dybdahl
*
* This file is part of Republix.
*
* Republix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Republix is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Republix. If not, see <http://www.gnu.org/licenses/>.
*/
package republix
import java.io._
package object io {
trait In[+A] { self =>
// will build up backlog if not called
def setReceive(f: A => Unit): Unit
def close(): Unit
def listen(f: A => Unit): Unit = setReceive { x =>
f(x)
listen(f)
}
def map[B](f: A => B): In[B] = new In[B] {
def setReceive(g: B => Unit) = self.setReceive(f andThen g)
def close() = self.close()
}
}
trait Out[-A] { self =>
def backlog: Int
//non-blocking
def send(x: A): Unit
def close(): Unit
def comap[B](f: B => A): Out[B] = new Out[B] {
def backlog = self.backlog
def send(x: B) = self.send(f(x))
def close() = self.close()
}
}
def makeIn[A](toClose: () => Unit): (In[A], A => Unit) = {
val listeners = new java.util.concurrent.LinkedBlockingQueue[A => Unit]
@volatile var open = true
def produce(x: A) = {
var done = false
while (!done && open) {
val listen = listeners.poll(1, java.util.concurrent.TimeUnit.SECONDS)
if (listen ne null) {
listen(x)
done = true
}
}
}
(new In[A] {
def setReceive(f: A => Unit): Unit = { listeners.add(f) }
def close(): Unit = { open = false; toClose() }
}, produce _)
}
def generate[A](toClose: () => Unit)(generator: (A => Unit) => Unit): In[A] = {
val (in, produce) = makeIn[A](toClose)
generator(produce)
in
}
def generateIO[A](toClose: () => Unit)(generator: (A => Unit) => Unit): In[A] = {
generate(() =>
try { toClose() }
catch {
case ex: IOException =>
ex.printStackTrace
}) { produce =>
new Thread {
override def run() = {
try {
generator(produce)
}
catch {
case ex: IOException =>
ex.printStackTrace
}
}
}.start()
}
}
def fromInputStream(is: InputStream): In[ByteString] = generateIO(is.close _) { produce =>
while (true) {
val bytes = new Array[Byte](1024)
val len = is.read(bytes)
val res = ByteString(bytes.take(len))
if (res.length > 0) {
produce(res)
}
}
}
def fromOutputStream(os: OutputStream): Out[ByteString] = {
val queue = new java.util.concurrent.LinkedBlockingQueue[ByteString]
@volatile var open = true
new Thread {
override def run() = {
try {
while (open) {
val elem = queue.poll(1, java.util.concurrent.TimeUnit.SECONDS)
if (elem ne null) {
os.write(elem.toVector.toArray)
os.flush()
}
}
}
catch {
case ex: IOException =>
ex.printStackTrace
}
}
}.start
new Out[ByteString] {
def backlog = queue.size
def send(x: ByteString) = { queue.add(x) }
def close() = {
open = false
try {
os.close()
}
catch {
case ex: IOException =>
ex.printStackTrace
}
}
}
}
} | teozkr/republix-online | republix/src/main/scala/republix/io/package.scala | Scala | agpl-3.0 | 3,504 |
package ch.epfl.scala.index
package client
import autowire._
import api._
import rpc.AutowireClient
import org.scalajs.dom
import org.scalajs.dom.ext.KeyCode
import org.scalajs.dom.{Event, KeyboardEvent, document}
import org.scalajs.dom.raw.{Element, HTMLInputElement, HTMLUListElement, Node}
import scalatags.JsDom.all._
import scalajs.concurrent.JSExecutionContext.Implicits.queue
import scalajs.js.annotation.JSExport
import scalajs.js.JSApp
import scala.concurrent.Future
import scala.util.Try
trait ClientBase {
val searchId = "search"
val resultElementId = "list-result"
var completionSelection: CompletionSelection = CompletionSelection.empty
def getResultList: Option[Element] = getElement(resultElementId)
def getSearchBox: Option[Element] =
getElement(searchId)
def getSearchInput: Option[HTMLInputElement] =
getSearchBox.map(_.getInput)
def getElement(id: String): Option[Element] =
Try(document.getElementById(id)).toOption
def appendResult(owner: String, repo: String, description: String): Option[Node] = {
for {
resultContainer <- getResultList
newItem = newProjectItem(owner, repo, description)
} yield resultContainer.appendChild(newItem)
}
def newProjectItem(owner: String, repo: String, description: String): Element = {
li(
a(href := s"/$owner/$repo")(
p(s"$owner / $repo"),
span(description)
)
).render
}
def getQuery(input: Option[HTMLInputElement]): Option[String] = input match {
case Some(i) if i.value.length > 1 => Option(i.value)
case _ => None
}
def getProjects(query: String): Future[List[Autocompletion]] =
AutowireClient[Api].autocomplete(query).call()
def showResults(projects: List[Autocompletion]): List[Option[Node]] = {
completionSelection = CompletionSelection(None, projects)
projects.map {
case Autocompletion(organization, repository, description) =>
appendResult(
organization,
repository,
description
)
}
}
def cleanResults(): Unit = {
completionSelection = CompletionSelection.empty
getResultList.fold()(_.innerHTML = "")
}
@JSExport
def runSearch(event: dom.Event): Future[List[Option[Node]]] = {
cleanResults()
getQuery(getSearchInput)
.fold(
Future.successful(List.empty[Autocompletion])
)(getProjects)
.map(showResults)
}
def navigate(event: KeyboardEvent): Unit = {
if (event.keyCode == KeyCode.Up && completionSelection.choices.nonEmpty) {
moveSelection(
completionSelection.selected.map(_ - 1).filter(_ >= 0)
)
} else if (event.keyCode == KeyCode.Down && completionSelection.choices.nonEmpty) {
moveSelection(
completionSelection.selected.fold[Option[Int]](Some(0))(i =>
Some(math.min(i + 1, completionSelection.choices.size - 1)))
)
} else if (event.keyCode == KeyCode.Enter) {
completionSelection.selected.foreach { selected =>
event.preventDefault()
val Autocompletion(owner, repo, _) = completionSelection.choices(selected)
dom.window.location.assign(s"/$owner/$repo")
}
} else if (event.keyCode == KeyCode.Escape) {
cleanResults()
} else ()
def moveSelection(newSelected: Option[Int]): Unit = {
event.preventDefault()
completionSelection = completionSelection.copy(selected = newSelected)
updateSelection()
}
def updateSelection(): Unit = {
getResultList.foreach { resultList =>
for (i <- 0 until resultList.childElementCount) {
val resultElement = resultList.childNodes(i).asInstanceOf[HTMLUListElement]
if (completionSelection.selected.contains(i)) {
resultElement.classList.add("selected")
} else {
resultElement.classList.remove("selected")
}
}
}
}
}
implicit class ElementOps(e: Element) {
def getInput: HTMLInputElement = get[HTMLInputElement]
def get[A <: Element]: A = e.asInstanceOf[A]
}
case class CompletionSelection(selected: Option[Int], choices: List[Autocompletion])
object CompletionSelection {
val empty = CompletionSelection(None, Nil)
}
}
object Client extends JSApp with ClientBase {
override def main(): Unit = {
getSearchBox.foreach { searchBox =>
searchBox.addEventListener[Event]("input", runSearch _)
searchBox.addEventListener[KeyboardEvent]("keydown", navigate _)
}
}
}
| adamwy/scaladex | client/src/main/scala/ch.epfl.scala.index.client/Client.scala | Scala | bsd-3-clause | 4,494 |
package io.scalajs.npm.mongodb
import io.scalajs.nodejs.buffer.Buffer
import scala.scalajs.js
import scala.scalajs.js.Array
/**
* mongodb/gridfs package object
* @author lawrence.daniels@gmail.com
*/
package object gridfs {
/**
* GridFS Bucket Read Stream Extensions
* @author lawrence.daniels@gmail.com
*/
implicit class GridFSBucketReadStreamExtensions(val stream: GridFSBucketReadStream) extends AnyVal {
/**
* Fires when the stream loaded the file document corresponding to the provided id.
*/
@inline
def onFile(callback: js.Function): stream.type = stream.on("file", callback)
}
/**
* Grid Store Extensions
* @author lawrence.daniels@gmail.com
*/
implicit class GridStoreExtensions(val gridStore: GridStore) extends AnyVal {
/**
* Retrieve this file’s chunks collection.
*/
@inline
def chunkCollectionFuture(): js.Promise[Collection] = promiseMongoCallback1[Collection](gridStore.chunkCollection)
/**
* Saves this file to the database. This will overwrite the old entry if it already exists. This will work
* properly only if mode was initialized to “w” or “w+”.
*/
@inline
def closeFuture(): js.Promise[js.Any] = promiseMongoCallback1[js.Any](gridStore.close)
/**
* Retrieves the file collection associated with this object.
*/
@inline
def collectionFuture(): js.Promise[js.Any] = promiseMongoCallback1[js.Any](gridStore.collection)
/**
* Retrieves a single character from this file.
*/
@inline
def getcFuture(): js.Promise[String] = promiseMongoCallback1[String](gridStore.getc)
/**
* Opens the file from the database and initialize this object. Also creates a new one if file does not exist.
*/
@inline
def openFuture(): js.Promise[Db] = promiseMongoCallback1[Db](gridStore.open)
/**
* Writes a string to the file with a newline character appended at the end if the given string does not have one.
*/
@inline
def putsFuture(string: String): js.Promise[GridStore] = promiseMongoCallback1[GridStore](gridStore.puts(string, _))
/**
* Retrieves the contents of this file and advances the read/write head. Works with Buffers only.
*/
@inline
def readFuture[T <: js.Any](length: Int, buffer: Buffer): js.Promise[Array[T]] =
promiseMongoCallback1[js.Array[T]](gridStore.read(length, buffer, _))
/**
* Retrieves the contents of this file and advances the read/write head. Works with Buffers only.
*/
@inline
def readFuture[T <: js.Any](buffer: Buffer): js.Promise[Array[T]] =
promiseMongoCallback1[js.Array[T]](gridStore.read(buffer, _))
/**
* Retrieves the contents of this file and advances the read/write head. Works with Buffers only.
*/
@inline
def readFuture[T <: js.Any](): js.Promise[Array[T]] = promiseMongoCallback1[js.Array[T]](gridStore.read)
/**
* Reads the data of this file.
*/
@inline
def readlinesFuture(separator: String): js.Promise[Array[String]] =
promiseMongoCallback1[js.Array[String]](gridStore.readlines(separator, _))
/**
* Reads the data of this file.
*/
@inline
def readlinesFuture: js.Promise[Array[String]] = promiseMongoCallback1[js.Array[String]](gridStore.readlines)
/**
* Deletes all the chunks of this file in the database if mode was set to “w” or “w+” and resets the read/write
* head to the initial position.
*/
@inline
def rewindFuture(): js.Promise[js.Any] = promiseMongoCallback1[js.Any](gridStore.rewind)
/**
* Moves the read/write head to a new location.
*/
@inline
def seekFuture(position: Int, seekLocation: Int): js.Promise[GridStore] =
promiseMongoCallback1[GridStore](gridStore.seek(position, seekLocation, _))
/**
* Moves the read/write head to a new location.
*/
@inline
def seekFuture(seekLocation: Int): js.Promise[GridStore] =
promiseMongoCallback1[GridStore](gridStore.seek(seekLocation, _))
/**
* Moves the read/write head to a new location.
*/
@inline
def seekFuture(): js.Promise[GridStore] = promiseMongoCallback1[GridStore](gridStore.seek)
/**
* Deletes all the chunks of this file in the database.
*/
@inline
def unlinkFuture(): js.Promise[Boolean] = promiseMongoCallback1[Boolean](gridStore.unlink)
/**
* Writes some data. This method will work properly only if initialized with mode “w” or “w+”.
*/
@inline
def writeFuture(data: Buffer, close: Boolean): js.Promise[GridStore] =
promiseMongoCallback1[GridStore](gridStore.write(data, close, _))
/**
* Writes some data. This method will work properly only if initialized with mode “w” or “w+”.
*/
@inline
def writeFuture(data: String, close: Boolean): js.Promise[GridStore] =
promiseMongoCallback1[GridStore](gridStore.write(data, close, _))
/**
* Writes some data. This method will work properly only if initialized with mode “w” or “w+”.
*/
@inline
def writeFuture(data: Buffer): js.Promise[GridStore] =
promiseMongoCallback1[GridStore](gridStore.write(data, _))
/**
* Writes some data. This method will work properly only if initialized with mode “w” or “w+”.
*/
@inline
def writeFuture(data: String): js.Promise[GridStore] =
promiseMongoCallback1[GridStore](gridStore.write(data, _))
/**
* Stores a file from the file system to the GridFS database.
*/
@inline
def writeFileFuture(file: String): js.Promise[GridStore] = {
promiseMongoCallback1[GridStore](gridStore.writeFile(file, _))
}
}
}
| scalajs-io/mongodb | src/main/scala/io/scalajs/npm/mongodb/gridfs/package.scala | Scala | apache-2.0 | 5,813 |
package org.scalajs.openui5.sap.ui.unified
import org.scalajs.openui5.sap.ui.core.{Control, URI}
import org.scalajs.openui5.util.{Settings, SettingsMap, noSettings}
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSName, ScalaJSDefined}
@ScalaJSDefined
trait ShellSettings extends ShellLayoutSettings
object ShellSettings extends ShellSettingsBuilder(noSettings)
class ShellSettingsBuilder(val dict: SettingsMap)
extends Settings[ShellSettings, ShellSettingsBuilder](new ShellSettingsBuilder(_))
with ShellSetters[ShellSettings, ShellSettingsBuilder]
trait ShellSetters[T <: js.Object, B <: Settings[T, _]] extends ShellLayoutSetters[T, B] {
def icon(v: URI) = setting("icon", v)
def showCurtain(v: Boolean) = setting("showCurtain", v)
def showCurtainPane(v: Boolean) = setting("showCurtainPane", v)
def searchVisible(v: Boolean) = setting("searchVisible", v)
@deprecated(message = "Curtain is deprecated and replaced by ShellOverlay " +
"mechanism", since = "1.16.3")
def curtainContent(v: js.Array[Control]) = setting("curtainContent", v)
@deprecated(message = "Curtain is deprecated and replaced by ShellOverlay " +
"mechanism", since = "1.16.3")
def curtainPaneContent(v: js.Array[Control]) = setting("curtainPaneContent", v)
def headItems(v: js.Array[ShellHeadItem]) = setting("headItems", v)
def headEndItems(v: js.Array[ShellHeadItem]) = setting("headEndItems", v)
def search(v: Control) = setting("search", v)
def user(v: ShellHeadUserItem) = setting("user", v)
}
@JSName("sap.ui.unified.Shell")
@js.native
class Shell(id: js.UndefOr[String] = js.native,
settings: js.UndefOr[ShellSettings] = js.native)
extends ShellLayout {
def this(id: String) = this(id, js.undefined)
def this(settings: ShellSettings) = this(js.undefined, settings)
}
| lastsys/scalajs-openui5 | src/main/scala/org/scalajs/openui5/sap/ui/unified/Shell.scala | Scala | mit | 1,823 |
package core
import akka.util.Timeout
import scala.slick.jdbc.JdbcBackend.Database
import scala.concurrent.{Future, Await, ExecutionContext}
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import akka.actor.{ActorRef, Actor}
import akka.pattern.{ask, pipe}
import core.ProteinActor._
import db.{ProductionDB, DAL, Protein}
object ProteinActor {
case class ProteinSearch(query: String)
case class BatchProteinQuery(query: List[String])
}
class ProteinActor(db: ActorRef) extends Actor with ProductionDB {
implicit val timeout = Timeout(5 seconds)
def receive: Receive = {
case b@BatchProteinQuery(query) => {
val future = for {
x <- (db ? b).mapTo[List[Protein]]
} yield x
future pipeTo sender
}
// TODO: Implement this
// case Search(query) => {
// "Nothing"
// }
}
} | yumyai/fasta-search | src/main/scala/core/protein.scala | Scala | apache-2.0 | 880 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen.agg
import org.apache.flink.api.common.typeutils.TypeSerializer
import org.apache.flink.table.api.TableException
import org.apache.flink.table.data.GenericRowData
import org.apache.flink.table.expressions._
import org.apache.flink.table.functions.ImperativeAggregateFunction
import org.apache.flink.table.planner.JLong
import org.apache.flink.table.planner.codegen.CodeGenUtils.{ROW_DATA, _}
import org.apache.flink.table.planner.codegen.Indenter.toISC
import org.apache.flink.table.planner.codegen._
import org.apache.flink.table.planner.codegen.agg.AggsHandlerCodeGenerator._
import org.apache.flink.table.planner.expressions.DeclarativeExpressionResolver.toRexInputRef
import org.apache.flink.table.planner.expressions._
import org.apache.flink.table.planner.functions.aggfunctions.DeclarativeAggregateFunction
import org.apache.flink.table.planner.plan.utils.AggregateInfoList
import org.apache.flink.table.planner.typeutils.DataViewUtils.{DataViewSpec, ListViewSpec, MapViewSpec}
import org.apache.flink.table.planner.utils.JavaScalaConversionUtil.toScala
import org.apache.flink.table.runtime.dataview.{StateListView, StateMapView}
import org.apache.flink.table.runtime.generated._
import org.apache.flink.table.runtime.operators.window.slicing.SliceAssigner
import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromDataTypeToLogicalType
import org.apache.flink.table.types.DataType
import org.apache.flink.table.types.logical.utils.LogicalTypeUtils
import org.apache.flink.table.types.logical.{BooleanType, IntType, LogicalType, RowType}
import org.apache.flink.util.Collector
import org.apache.calcite.rex.RexLiteral
import org.apache.calcite.tools.RelBuilder
import java.util.Optional
import java.time.ZoneId
/**
* A code generator for generating [[AggsHandleFunction]].
*
* @param copyInputField copy input field element if true (only mutable type will be copied),
* set to true if field will be buffered (such as local aggregate)
*/
class AggsHandlerCodeGenerator(
ctx: CodeGeneratorContext,
relBuilder: RelBuilder,
inputFieldTypes: Seq[LogicalType],
copyInputField: Boolean) {
private val inputType = RowType.of(inputFieldTypes: _*)
/** constant expressions that act like a second input in the parameter indices. */
private var constants: Seq[RexLiteral] = Seq()
private var constantExprs: Seq[GeneratedExpression] = Seq()
/** window properties like window_start and window_end, only used in window aggregates */
private var namespaceClassName: String = _
private var windowProperties: Seq[PlannerWindowProperty] = Seq()
private var hasNamespace: Boolean = false
private var sliceAssignerTerm: String = _
private var shiftTimeZone: ZoneId = _
/** Aggregates informations */
private var accTypeInfo: RowType = _
private var aggBufferSize: Int = _
private var mergedAccExternalTypes: Array[DataType] = _
private var mergedAccOffset: Int = 0
private var mergedAccOnHeap: Boolean = false
private var ignoreAggValues: Array[Int] = Array()
private var isAccumulateNeeded = false
private var isRetractNeeded = false
private var isMergeNeeded = false
var valueType: RowType = _
/**
* The [[aggBufferCodeGens]] and [[aggActionCodeGens]] will be both created when code generate
* an [[AggsHandleFunction]] or [[NamespaceAggsHandleFunction]]. They both contain all the
* same AggCodeGens, but are different in the organizational form. The [[aggBufferCodeGens]]
* flatten all the AggCodeGens in a flat format. The [[aggActionCodeGens]] organize all the
* AggCodeGens in a tree format. If there is no distinct aggregate, the [[aggBufferCodeGens]]
* and [[aggActionCodeGens]] are totally the same.
*
* When different aggregate distinct on the same field but on different filter conditions,
* they will share the same distinct state, see DistinctAggCodeGen.DistinctValueGenerator
* for more information.
*/
/**
* The aggBufferCodeGens is organized according to the agg buffer order, which is in a flat
* format, and is only used to generate the methods relative to accumulators, Such as
* [[genCreateAccumulators()]], [[genGetAccumulators()]], [[genSetAccumulators()]].
*
* For example if we have :
* count(*), count(distinct a), count(distinct a) filter d > 5, sum(a), sum(distinct a)
*
* then the members of aggBufferCodeGens are organized looks like this:
* +----------+-----------+-----------+---------+---------+----------------+
* | count(*) | count(a') | count(a') | sum(a) | sum(a') | distinct(a) a' |
* +----------+-----------+-----------+---------+---------+----------------+
* */
private var aggBufferCodeGens: Array[AggCodeGen] = _
/**
* The aggActionCodeGens is organized according to the aggregate calling order, which is in
* a tree format. Such as the aggregates distinct on the same fields should be accumulated
* together when distinct is satisfied. And this is only used to generate the methods relative
* to aggregate action. Such as [[genAccumulate()]], [[genRetract()]], [[genMerge()]].
*
* For example if we have :
* count(*), count(distinct a), count(distinct a) filter d > 5, sum(a), sum(distinct a)
*
* then the members of aggActionCodeGens are organized looks like this:
*
* +----------------------------------------------------+
* | count(*) | sum(a) | distinct(a) a' |
* | | | |-- count(a') |
* | | | |-- count(a') (filter d > 5) |
* | | | |-- sum(a') |
* +----------------------------------------------------+
*/
private var aggActionCodeGens: Array[AggCodeGen] = _
/**
* Adds constant expressions that act like a second input in the parameter indices.
*/
def withConstants(literals: Seq[RexLiteral]): AggsHandlerCodeGenerator = {
// create constants
this.constants = literals
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
val exprs = literals.map(exprGenerator.generateExpression)
this.constantExprs = exprs.map(ctx.addReusableConstant(_, nullCheck = true))
this
}
/**
* Tells the generator to generate `accumulate(..)` method for the [[AggsHandleFunction]] and
* [[NamespaceAggsHandleFunction]]. Default not generate `accumulate(..)` method.
*/
def needAccumulate(): AggsHandlerCodeGenerator = {
this.isAccumulateNeeded = true
this
}
/**
* Tells the generator to generate `retract(..)` method for the [[AggsHandleFunction]] and
* [[NamespaceAggsHandleFunction]]. Default not generate `retract(..)` method.
*
* @return
*/
def needRetract(): AggsHandlerCodeGenerator = {
this.isRetractNeeded = true
this
}
/**
* Tells the generator to generate `merge(..)` method with the merged accumulator information
* for the [[AggsHandleFunction]] and [[NamespaceAggsHandleFunction]].
* Default not generate `merge(..)` method.
*
* @param mergedAccOffset the mergedAcc may come from local aggregate,
* this is the first buffer offset in the row
* @param mergedAccOnHeap true if the mergedAcc is on heap, otherwise
* @param mergedAccExternalTypes the merged acc types
*/
def needMerge(
mergedAccOffset: Int,
mergedAccOnHeap: Boolean,
mergedAccExternalTypes: Array[DataType] = null): AggsHandlerCodeGenerator = {
this.mergedAccOffset = mergedAccOffset
this.mergedAccOnHeap = mergedAccOnHeap
this.mergedAccExternalTypes = mergedAccExternalTypes
this.isMergeNeeded = true
this
}
/**
* Adds window properties such as window_start, window_end
*/
private def initialWindowProperties(
windowProperties: Seq[PlannerWindowProperty],
windowClass: Class[_],
shiftTimeZone: ZoneId): Unit = {
this.windowProperties = windowProperties
this.namespaceClassName = windowClass.getCanonicalName
this.hasNamespace = true
this.shiftTimeZone = shiftTimeZone
}
/**
* Adds aggregate infos into context
*/
private def initialAggregateInformation(aggInfoList: AggregateInfoList): Unit = {
this.accTypeInfo = RowType.of(
aggInfoList.getAccTypes.map(fromDataTypeToLogicalType): _*)
this.aggBufferSize = accTypeInfo.getFieldCount
var aggBufferOffset: Int = 0
if (mergedAccExternalTypes == null) {
mergedAccExternalTypes = aggInfoList.getAccTypes
}
val aggCodeGens = aggInfoList.aggInfos.map { aggInfo =>
val filterExpr = createFilterExpression(
aggInfo.agg.filterArg,
aggInfo.aggIndex,
aggInfo.agg.name)
val codegen = aggInfo.function match {
case _: DeclarativeAggregateFunction =>
new DeclarativeAggCodeGen(
ctx,
aggInfo,
filterExpr,
mergedAccOffset,
aggBufferOffset,
aggBufferSize,
inputFieldTypes,
constants,
relBuilder)
case _: ImperativeAggregateFunction[_, _] =>
new ImperativeAggCodeGen(
ctx,
aggInfo,
filterExpr,
mergedAccOffset,
aggBufferOffset,
aggBufferSize,
inputFieldTypes,
constantExprs,
relBuilder,
hasNamespace,
mergedAccOnHeap,
mergedAccExternalTypes(aggBufferOffset),
copyInputField)
}
aggBufferOffset = aggBufferOffset + aggInfo.externalAccTypes.length
codegen
}
val distinctCodeGens = aggInfoList.distinctInfos.zipWithIndex.map {
case (distinctInfo, index) =>
val innerCodeGens = distinctInfo.aggIndexes.map(aggCodeGens(_)).toArray
val distinctIndex = aggCodeGens.length + index
val filterExpr = distinctInfo.filterArgs.map(
createFilterExpression(_, distinctIndex, "distinct aggregate"))
val codegen = new DistinctAggCodeGen(
ctx,
distinctInfo,
index,
innerCodeGens,
filterExpr.toArray,
constantExprs,
mergedAccOffset,
aggBufferOffset,
aggBufferSize,
hasNamespace,
isMergeNeeded,
mergedAccOnHeap,
distinctInfo.consumeRetraction,
copyInputField,
relBuilder)
// distinct agg buffer occupies only one field
aggBufferOffset += 1
codegen
}
val distinctAggIndexes = aggInfoList.distinctInfos.flatMap(_.aggIndexes)
val nonDistinctAggIndexes = aggCodeGens.indices.filter(!distinctAggIndexes.contains(_)).toArray
this.aggBufferCodeGens = aggCodeGens ++ distinctCodeGens
this.aggActionCodeGens = nonDistinctAggIndexes.map(aggCodeGens(_)) ++ distinctCodeGens
// when input contains retractions, we inserted a count1 agg in the agg list
// the count1 agg value shouldn't be in the aggregate result
if (aggInfoList.indexOfCountStar.nonEmpty && aggInfoList.countStarInserted) {
ignoreAggValues ++= Array(aggInfoList.indexOfCountStar.get)
}
// the distinct value shouldn't be in the aggregate result
if (aggInfoList.distinctInfos.nonEmpty) {
ignoreAggValues ++= distinctCodeGens.indices.map(_ + aggCodeGens.length)
}
}
/**
* Creates filter argument access expression, none if no filter
*/
private def createFilterExpression(
filterArg: Int,
aggIndex: Int,
aggName: String): Option[Expression] = {
if (filterArg > 0) {
val name = s"agg_${aggIndex}_filter"
val filterType = inputFieldTypes(filterArg)
if (!filterType.isInstanceOf[BooleanType]) {
throw new TableException(s"filter arg must be boolean, but is $filterType, " +
s"the aggregate is $aggName.")
}
Some(toRexInputRef(relBuilder, filterArg, inputFieldTypes(filterArg)))
} else {
None
}
}
/**
* Generate [[GeneratedAggsHandleFunction]] with the given function name and aggregate infos.
*/
def generateAggsHandler(
name: String,
aggInfoList: AggregateInfoList): GeneratedAggsHandleFunction = {
initialAggregateInformation(aggInfoList)
// generates all methods body first to add necessary reuse code to context
val createAccumulatorsCode = genCreateAccumulators()
val getAccumulatorsCode = genGetAccumulators()
val setAccumulatorsCode = genSetAccumulators()
val resetAccumulatorsCode = genResetAccumulators()
val accumulateCode = genAccumulate()
val retractCode = genRetract()
val mergeCode = genMerge()
val getValueCode = genGetValue()
val functionName = newName(name)
val functionCode =
j"""
public final class $functionName implements $AGGS_HANDLER_FUNCTION {
${ctx.reuseMemberCode()}
private $STATE_DATA_VIEW_STORE store;
public $functionName(java.lang.Object[] references) throws Exception {
${ctx.reuseInitCode()}
}
private $RUNTIME_CONTEXT getRuntimeContext() {
return store.getRuntimeContext();
}
@Override
public void open($STATE_DATA_VIEW_STORE store) throws Exception {
this.store = store;
${ctx.reuseOpenCode()}
}
@Override
public void accumulate($ROW_DATA $ACCUMULATE_INPUT_TERM) throws Exception {
$accumulateCode
}
@Override
public void retract($ROW_DATA $RETRACT_INPUT_TERM) throws Exception {
$retractCode
}
@Override
public void merge($ROW_DATA $MERGED_ACC_TERM) throws Exception {
$mergeCode
}
@Override
public void setAccumulators($ROW_DATA $ACC_TERM) throws Exception {
$setAccumulatorsCode
}
@Override
public void resetAccumulators() throws Exception {
$resetAccumulatorsCode
}
@Override
public $ROW_DATA getAccumulators() throws Exception {
$getAccumulatorsCode
}
@Override
public $ROW_DATA createAccumulators() throws Exception {
$createAccumulatorsCode
}
@Override
public $ROW_DATA getValue() throws Exception {
$getValueCode
}
@Override
public void cleanup() throws Exception {
${ctx.reuseCleanupCode()}
}
@Override
public void close() throws Exception {
${ctx.reuseCloseCode()}
}
}
""".stripMargin
new GeneratedAggsHandleFunction(
functionName, functionCode, ctx.references.toArray, ctx.tableConfig.getConfiguration)
}
/**
* Generate [[GeneratedTableAggsHandleFunction]] with the given function name and aggregate
* infos.
*/
def generateTableAggsHandler(
name: String,
aggInfoList: AggregateInfoList): GeneratedTableAggsHandleFunction = {
initialAggregateInformation(aggInfoList)
// generates all methods body first to add necessary reuse code to context
val createAccumulatorsCode = genCreateAccumulators()
val getAccumulatorsCode = genGetAccumulators()
val setAccumulatorsCode = genSetAccumulators()
val resetAccumulatorsCode = genResetAccumulators()
val accumulateCode = genAccumulate()
val retractCode = genRetract()
val mergeCode = genMerge()
val emitValueCode = genEmitValue()
// gen converter
val aggExternalType = aggInfoList.getActualAggregateInfos(0).externalResultType
val recordInputName = newName("recordInput")
val recordToRowDataCode = genRecordToRowData(aggExternalType, recordInputName)
val functionName = newName(name)
val functionCode =
j"""
public final class $functionName implements ${className[TableAggsHandleFunction]} {
${ctx.reuseMemberCode()}
private $STATE_DATA_VIEW_STORE store;
private $CONVERT_COLLECTOR_TYPE_TERM $MEMBER_COLLECTOR_TERM;
public $functionName(java.lang.Object[] references) throws Exception {
${ctx.reuseInitCode()}
$MEMBER_COLLECTOR_TERM = new $CONVERT_COLLECTOR_TYPE_TERM(references);
}
private $RUNTIME_CONTEXT getRuntimeContext() {
return store.getRuntimeContext();
}
@Override
public void open($STATE_DATA_VIEW_STORE store) throws Exception {
this.store = store;
${ctx.reuseOpenCode()}
}
@Override
public void accumulate($ROW_DATA $ACCUMULATE_INPUT_TERM) throws Exception {
$accumulateCode
}
@Override
public void retract($ROW_DATA $RETRACT_INPUT_TERM) throws Exception {
$retractCode
}
@Override
public void merge($ROW_DATA $MERGED_ACC_TERM) throws Exception {
$mergeCode
}
@Override
public void setAccumulators($ROW_DATA $ACC_TERM) throws Exception {
$setAccumulatorsCode
}
@Override
public void resetAccumulators() throws Exception {
$resetAccumulatorsCode
}
@Override
public $ROW_DATA getAccumulators() throws Exception {
$getAccumulatorsCode
}
@Override
public $ROW_DATA createAccumulators() throws Exception {
$createAccumulatorsCode
}
@Override
public void emitValue(
$COLLECTOR<$ROW_DATA> $COLLECTOR_TERM, $ROW_DATA key, boolean isRetract)
throws Exception {
$MEMBER_COLLECTOR_TERM.reset(key, isRetract, $COLLECTOR_TERM);
$emitValueCode
}
@Override
public void cleanup() throws Exception {
${ctx.reuseCleanupCode()}
}
@Override
public void close() throws Exception {
${ctx.reuseCloseCode()}
}
private class $CONVERT_COLLECTOR_TYPE_TERM implements $COLLECTOR {
private $COLLECTOR<$ROW_DATA> $COLLECTOR_TERM;
private $ROW_DATA key;
private $JOINED_ROW result;
private boolean isRetract = false;
${ctx.reuseMemberCode()}
public $CONVERT_COLLECTOR_TYPE_TERM(java.lang.Object[] references) throws Exception {
${ctx.reuseInitCode()}
result = new $JOINED_ROW();
}
public void reset(
$ROW_DATA key, boolean isRetract, $COLLECTOR<$ROW_DATA> $COLLECTOR_TERM) {
this.key = key;
this.isRetract = isRetract;
this.$COLLECTOR_TERM = $COLLECTOR_TERM;
}
public $ROW_DATA convertToRowData(Object $recordInputName) throws Exception {
$recordToRowDataCode
}
@Override
public void collect(Object $recordInputName) throws Exception {
$ROW_DATA tempRowData = convertToRowData($recordInputName);
result.replace(key, tempRowData);
if (isRetract) {
result.setRowKind($ROW_KIND.DELETE);
} else {
result.setRowKind($ROW_KIND.INSERT);
}
$COLLECTOR_TERM.collect(result);
}
@Override
public void close() {
$COLLECTOR_TERM.close();
}
}
}
""".stripMargin
new GeneratedTableAggsHandleFunction(
functionName, functionCode, ctx.references.toArray, ctx.tableConfig.getConfiguration)
}
/**
* Generate [[NamespaceAggsHandleFunction]] with the given function name and aggregate infos
* and window properties.
*/
def generateNamespaceAggsHandler(
name: String,
aggInfoList: AggregateInfoList,
windowProperties: Seq[PlannerWindowProperty],
sliceAssigner: SliceAssigner,
shiftTimeZone: ZoneId): GeneratedNamespaceAggsHandleFunction[JLong] = {
this.sliceAssignerTerm = newName("sliceAssigner")
ctx.addReusableObjectWithName(sliceAssigner, sliceAssignerTerm)
// we use window end timestamp to indicate a window, see SliceAssigner
generateNamespaceAggsHandler(
name,
aggInfoList,
windowProperties,
classOf[JLong],
shiftTimeZone)
}
/**
* Generate [[NamespaceAggsHandleFunction]] with the given function name and aggregate infos
* and window properties.
*/
def generateNamespaceAggsHandler[N](
name: String,
aggInfoList: AggregateInfoList,
windowProperties: Seq[PlannerWindowProperty],
windowClass: Class[N],
shiftTimeZone: ZoneId): GeneratedNamespaceAggsHandleFunction[N] = {
initialWindowProperties(windowProperties, windowClass, shiftTimeZone)
initialAggregateInformation(aggInfoList)
// generates all methods body first to add necessary reuse code to context
val createAccumulatorsCode = genCreateAccumulators()
val getAccumulatorsCode = genGetAccumulators()
val setAccumulatorsCode = genSetAccumulators()
val accumulateCode = genAccumulate()
val retractCode = genRetract()
val mergeCode = genMerge()
val getValueCode = genGetValue()
val functionName = newName(name)
val functionCode =
j"""
public final class $functionName
implements $NAMESPACE_AGGS_HANDLER_FUNCTION<$namespaceClassName> {
${ctx.reuseMemberCode()}
private $STATE_DATA_VIEW_STORE store;
private $namespaceClassName $NAMESPACE_TERM;
public $functionName(Object[] references) throws Exception {
${ctx.reuseInitCode()}
}
private $RUNTIME_CONTEXT getRuntimeContext() {
return store.getRuntimeContext();
}
@Override
public void open($STATE_DATA_VIEW_STORE store) throws Exception {
this.store = store;
${ctx.reuseOpenCode()}
}
@Override
public void accumulate($ROW_DATA $ACCUMULATE_INPUT_TERM) throws Exception {
$accumulateCode
}
@Override
public void retract($ROW_DATA $RETRACT_INPUT_TERM) throws Exception {
$retractCode
}
@Override
public void merge(Object ns, $ROW_DATA $MERGED_ACC_TERM) throws Exception {
$NAMESPACE_TERM = ($namespaceClassName) ns;
$mergeCode
}
@Override
public void setAccumulators(Object ns, $ROW_DATA $ACC_TERM)
throws Exception {
$NAMESPACE_TERM = ($namespaceClassName) ns;
$setAccumulatorsCode
}
@Override
public $ROW_DATA getAccumulators() throws Exception {
$getAccumulatorsCode
}
@Override
public $ROW_DATA createAccumulators() throws Exception {
$createAccumulatorsCode
}
@Override
public $ROW_DATA getValue(Object ns) throws Exception {
$NAMESPACE_TERM = ($namespaceClassName) ns;
$getValueCode
}
@Override
public void cleanup(Object ns) throws Exception {
$NAMESPACE_TERM = ($namespaceClassName) ns;
${ctx.reuseCleanupCode()}
}
@Override
public void close() throws Exception {
${ctx.reuseCloseCode()}
}
}
""".stripMargin
new GeneratedNamespaceAggsHandleFunction[N](
functionName, functionCode, ctx.references.toArray, ctx.tableConfig.getConfiguration)
}
/**
* Generate [[NamespaceTableAggsHandleFunction]] with the given function name and aggregate infos
* and window properties.
*/
def generateNamespaceTableAggsHandler[N](
name: String,
aggInfoList: AggregateInfoList,
windowProperties: Seq[PlannerWindowProperty],
windowClass: Class[N],
shiftedTimeZone: ZoneId): GeneratedNamespaceTableAggsHandleFunction[N] = {
initialWindowProperties(windowProperties, windowClass, shiftedTimeZone)
initialAggregateInformation(aggInfoList)
// generates all methods body first to add necessary reuse code to context
val createAccumulatorsCode = genCreateAccumulators()
val getAccumulatorsCode = genGetAccumulators()
val setAccumulatorsCode = genSetAccumulators()
val accumulateCode = genAccumulate()
val retractCode = genRetract()
val mergeCode = genMerge()
val emitValueCode = genEmitValue(isWindow = true)
// gen converter
val aggExternalType = aggInfoList.getActualAggregateInfos(0).externalResultType
val recordInputName = newName("recordInput")
val recordToRowDataCode = genRecordToRowData(aggExternalType, recordInputName)
val functionName = newName(name)
val functionCode =
j"""
public final class $functionName
implements ${className[NamespaceTableAggsHandleFunction[_]]}<$namespaceClassName> {
${ctx.reuseMemberCode()}
private $STATE_DATA_VIEW_STORE store;
private $namespaceClassName $NAMESPACE_TERM;
private $CONVERT_COLLECTOR_TYPE_TERM $MEMBER_COLLECTOR_TERM;
public $functionName(Object[] references) throws Exception {
${ctx.reuseInitCode()}
$MEMBER_COLLECTOR_TERM = new $CONVERT_COLLECTOR_TYPE_TERM(references);
}
private $RUNTIME_CONTEXT getRuntimeContext() {
return store.getRuntimeContext();
}
@Override
public void open($STATE_DATA_VIEW_STORE store) throws Exception {
this.store = store;
${ctx.reuseOpenCode()}
}
@Override
public void accumulate($ROW_DATA $ACCUMULATE_INPUT_TERM) throws Exception {
$accumulateCode
}
@Override
public void retract($ROW_DATA $RETRACT_INPUT_TERM) throws Exception {
$retractCode
}
@Override
public void merge(Object ns, $ROW_DATA $MERGED_ACC_TERM) throws Exception {
$NAMESPACE_TERM = ($namespaceClassName) ns;
$mergeCode
}
@Override
public void setAccumulators(Object ns, $ROW_DATA $ACC_TERM)
throws Exception {
$NAMESPACE_TERM = ($namespaceClassName) ns;
$setAccumulatorsCode
}
@Override
public $ROW_DATA getAccumulators() throws Exception {
$getAccumulatorsCode
}
@Override
public $ROW_DATA createAccumulators() throws Exception {
$createAccumulatorsCode
}
@Override
public void emitValue(Object ns, $ROW_DATA $KEY_TERM,
$COLLECTOR<$ROW_DATA> $COLLECTOR_TERM) throws Exception {
$MEMBER_COLLECTOR_TERM.$COLLECTOR_TERM = $COLLECTOR_TERM;
$NAMESPACE_TERM = ($namespaceClassName) ns;
$emitValueCode
}
@Override
public void cleanup(Object ns) throws Exception {
$NAMESPACE_TERM = ($namespaceClassName) ns;
${ctx.reuseCleanupCode()}
}
@Override
public void close() throws Exception {
${ctx.reuseCloseCode()}
}
private class $CONVERT_COLLECTOR_TYPE_TERM implements $COLLECTOR {
public $COLLECTOR<$ROW_DATA> $COLLECTOR_TERM;
private $ROW_DATA timeProperties;
private $ROW_DATA key;
private $JOINED_ROW outerResult;
private $JOINED_ROW innerResult;
${ctx.reuseMemberCode()}
public $CONVERT_COLLECTOR_TYPE_TERM(java.lang.Object[] references) throws Exception {
${ctx.reuseInitCode()}
outerResult = new $JOINED_ROW();
innerResult = new $JOINED_ROW();
}
public void reset($ROW_DATA $KEY_TERM, $ROW_DATA timeProperties) {
this.timeProperties = timeProperties;
this.key = $KEY_TERM;
}
public $ROW_DATA convertToRowData(Object $recordInputName) throws Exception {
$recordToRowDataCode
}
@Override
public void collect(Object $recordInputName) throws Exception {
$ROW_DATA tempRowData = convertToRowData($recordInputName);
innerResult.replace(tempRowData, timeProperties);
outerResult.replace(key, innerResult);
$COLLECTOR_TERM.collect(outerResult);
}
@Override
public void close() {
$COLLECTOR_TERM.close();
}
}
}
""".stripMargin
new GeneratedNamespaceTableAggsHandleFunction[N](
functionName, functionCode, ctx.references.toArray, ctx.tableConfig.getConfiguration)
}
private def genCreateAccumulators(): String = {
val methodName = "createAccumulators"
ctx.startNewLocalVariableStatement(methodName)
// not need to bind input for ExprCodeGenerator
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
val initAccExprs = aggBufferCodeGens.flatMap(_.createAccumulator(exprGenerator))
val accTerm = newName("acc")
val resultExpr = exprGenerator.generateResultExpression(
initAccExprs,
accTypeInfo,
classOf[GenericRowData],
outRow = accTerm,
reusedOutRow = false)
s"""
|${ctx.reuseLocalVariableCode(methodName)}
|${ctx.reusePerRecordCode()}
|${resultExpr.code}
|return ${resultExpr.resultTerm};
""".stripMargin
}
private def genGetAccumulators(): String = {
val methodName = "getAccumulators"
ctx.startNewLocalVariableStatement(methodName)
// no need to bind input
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
val accExprs = aggBufferCodeGens.flatMap(_.getAccumulator(exprGenerator))
val accTerm = newName("acc")
// always create a new accumulator row
val resultExpr = exprGenerator.generateResultExpression(
accExprs,
accTypeInfo,
classOf[GenericRowData],
outRow = accTerm,
reusedOutRow = false)
s"""
|${ctx.reuseLocalVariableCode(methodName)}
|${resultExpr.code}
|return ${resultExpr.resultTerm};
""".stripMargin
}
private def genSetAccumulators(): String = {
val methodName = "setAccumulators"
ctx.startNewLocalVariableStatement(methodName)
// bind input1 as accumulators
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
.bindInput(accTypeInfo, inputTerm = ACC_TERM)
val body = aggBufferCodeGens.map(_.setAccumulator(exprGenerator)).mkString("\n")
s"""
|${ctx.reuseLocalVariableCode(methodName)}
|${ctx.reuseInputUnboxingCode(ACC_TERM)}
|$body
""".stripMargin
}
private def genResetAccumulators(): String = {
val methodName = "resetAccumulators"
ctx.startNewLocalVariableStatement(methodName)
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
val body = aggBufferCodeGens.map(_.resetAccumulator(exprGenerator)).mkString("\n")
s"""
|${ctx.reuseLocalVariableCode(methodName)}
|$body
""".stripMargin
}
private def genAccumulate(): String = {
if (isAccumulateNeeded) {
// validation check
checkNeededMethods(needAccumulate = true)
val methodName = "accumulate"
ctx.startNewLocalVariableStatement(methodName)
// bind input1 as inputRow
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
.bindInput(inputType, inputTerm = ACCUMULATE_INPUT_TERM)
val body = aggActionCodeGens.map(_.accumulate(exprGenerator)).mkString("\n")
s"""
|${ctx.reuseLocalVariableCode(methodName)}
|${ctx.reuseInputUnboxingCode(ACCUMULATE_INPUT_TERM)}
|${ctx.reusePerRecordCode()}
|$body
|""".stripMargin
} else {
genThrowException(
"This function not require accumulate method, but the accumulate method is called.")
}
}
private def genRetract(): String = {
if (isRetractNeeded) {
// validation check
checkNeededMethods(needRetract = true)
val methodName = "retract"
ctx.startNewLocalVariableStatement(methodName)
// bind input1 as inputRow
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
.bindInput(inputType, inputTerm = RETRACT_INPUT_TERM)
val body = aggActionCodeGens.map(_.retract(exprGenerator)).mkString("\n")
s"""
|${ctx.reuseLocalVariableCode(methodName)}
|${ctx.reuseInputUnboxingCode(RETRACT_INPUT_TERM)}
|${ctx.reusePerRecordCode()}
|$body
""".stripMargin
} else {
genThrowException(
"This function not require retract method, but the retract method is called.")
}
}
private def genMerge(): String = {
if (isMergeNeeded) {
// validation check
checkNeededMethods(needMerge = true)
val methodName = "merge"
ctx.startNewLocalVariableStatement(methodName)
// the mergedAcc is partial of mergedInput, such as <key, acc> in local-global, ignore keys
val internalAccTypes = mergedAccExternalTypes.map(fromDataTypeToLogicalType)
val mergedAccType = if (mergedAccOffset > 0) {
// concat padding types and acc types, use int type as padding
// the padding types will be ignored
val padding = Array.range(0, mergedAccOffset).map(_ => new IntType())
RowType.of(padding ++ internalAccTypes: _*)
} else {
RowType.of(internalAccTypes: _*)
}
// bind input1 as otherAcc
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
.bindInput(mergedAccType, inputTerm = MERGED_ACC_TERM)
val body = aggActionCodeGens.map(_.merge(exprGenerator)).mkString("\n")
s"""
|${ctx.reuseLocalVariableCode(methodName)}
|${ctx.reuseInputUnboxingCode(MERGED_ACC_TERM)}
|${ctx.reusePerRecordCode()}
|$body
""".stripMargin
} else {
genThrowException(
"This function not require merge method, but the merge method is called.")
}
}
private def getWindowExpressions(
windowProperties: Seq[PlannerWindowProperty]): Seq[GeneratedExpression] = {
if (namespaceClassName.equals(classOf[JLong].getCanonicalName)) {
// slicing optimization, we are using window end timestamp to indicate a window
windowProperties.map {
case w: PlannerWindowStart =>
// return a Timestamp(Internal is TimestampData)
GeneratedExpression(
s"$TIMESTAMP_DATA.fromEpochMillis($sliceAssignerTerm.getWindowStart($NAMESPACE_TERM))",
"false",
"",
w.getResultType)
case w: PlannerWindowEnd =>
// return a Timestamp(Internal is TimestampData)
GeneratedExpression(
s"$TIMESTAMP_DATA.fromEpochMillis($NAMESPACE_TERM)",
"false",
"",
w.getResultType)
case r: PlannerRowtimeAttribute =>
// return a rowtime, use TimestampData as internal type
GeneratedExpression(
s"""
|$TIMESTAMP_DATA.fromEpochMillis(
|${getShiftEpochMills(s"$NAMESPACE_TERM - 1")})
""".stripMargin,
"false",
"",
r.getResultType)
case p: PlannerProctimeAttribute =>
// ignore this property, it will be null at the position later
GeneratedExpression(s"$TIMESTAMP_DATA.fromEpochMillis(-1L)", "true", "", p.getResultType)
}
} else {
windowProperties.map {
case w: PlannerWindowStart =>
// return a Timestamp(Internal is TimestampData)
GeneratedExpression(
s"$TIMESTAMP_DATA.fromEpochMillis($NAMESPACE_TERM.getStart())",
"false",
"",
w.getResultType)
case w: PlannerWindowEnd =>
// return a Timestamp(Internal is TimestampData)
GeneratedExpression(
s"$TIMESTAMP_DATA.fromEpochMillis($NAMESPACE_TERM.getEnd())",
"false",
"",
w.getResultType)
case r: PlannerRowtimeAttribute =>
// return a rowtime, use TimestampData as internal type
GeneratedExpression(
s"""
|$TIMESTAMP_DATA.fromEpochMillis(
|${getShiftEpochMills(s"$NAMESPACE_TERM.getEnd() - 1")})
""".stripMargin,
"false",
"",
r.getResultType)
case p: PlannerProctimeAttribute =>
// ignore this property, it will be null at the position later
GeneratedExpression(s"$TIMESTAMP_DATA.fromEpochMillis(-1L)", "true", "", p.getResultType)
}
}
}
private def getShiftEpochMills(itemExpr: String): String = {
if ("UTC".equals(shiftTimeZone.getId)) {
itemExpr
} else {
val timeZoneId = ctx.addReusableShiftTimeZone(shiftTimeZone)
s"""
|$TIME_WINDOW_UTIL.toEpochMills($itemExpr, $timeZoneId)
""".stripMargin
}
}
private def genGetValue(): String = {
val methodName = "getValue"
ctx.startNewLocalVariableStatement(methodName)
// no need to bind input
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
var valueExprs = aggBufferCodeGens.zipWithIndex.filter { case (_, index) =>
// ignore the count1 agg codegen and distinct agg codegen
ignoreAggValues.isEmpty || !ignoreAggValues.contains(index)
}.map { case (codegen, _) =>
codegen.getValue(exprGenerator)
}
if (hasNamespace) {
// append window property results
val windowExprs = getWindowExpressions(windowProperties)
valueExprs = valueExprs ++ windowExprs
}
val aggValueTerm = newName("aggValue")
valueType = RowType.of(valueExprs.map(_.resultType): _*)
// always create a new result row
val resultExpr = exprGenerator.generateResultExpression(
valueExprs,
valueType,
classOf[GenericRowData],
outRow = aggValueTerm,
reusedOutRow = false)
s"""
|${ctx.reuseLocalVariableCode(methodName)}
|${resultExpr.code}
|return ${resultExpr.resultTerm};
""".stripMargin
}
private def genEmitValue(isWindow: Boolean = false): String = {
// validation check
checkNeededMethods(needEmitValue = true)
val methodName = "emitValue"
ctx.startNewLocalVariableStatement(methodName)
val windowCode =
if (isWindow) {
// no need to bind input
val exprGenerator = new ExprCodeGenerator(ctx, INPUT_NOT_NULL)
val valueExprs = getWindowExpressions(windowProperties)
val aggValueTerm = newName("windowProperties")
valueType = RowType.of(valueExprs.map(_.resultType): _*)
// always create a new result row
val resultExpr = exprGenerator.generateResultExpression(
valueExprs,
valueType,
classOf[GenericRowData],
outRow = aggValueTerm,
reusedOutRow = false)
s"""
|${ctx.reuseLocalVariableCode(methodName)}
|${resultExpr.code}
|$MEMBER_COLLECTOR_TERM.reset($KEY_TERM, ${resultExpr.resultTerm});
""".stripMargin
} else {
""
}
windowCode + aggBufferCodeGens(0).asInstanceOf[ImperativeAggCodeGen].emitValue
}
private def genRecordToRowData(aggExternalType: DataType, recordInputName: String): String = {
val resultType = fromDataTypeToLogicalType(aggExternalType)
val resultRowType = LogicalTypeUtils.toRowType(resultType)
val newCtx = CodeGeneratorContext(ctx.tableConfig)
val exprGenerator = new ExprCodeGenerator(newCtx, false).bindInput(resultType)
val resultExpr = exprGenerator.generateConverterResultExpression(
resultRowType, classOf[GenericRowData], "convertResult")
val converterCode = CodeGenUtils.genToInternalConverter(ctx, aggExternalType, recordInputName)
val resultTypeClass = boxedTypeTermForType(resultType)
s"""
|${newCtx.reuseMemberCode()}
|$resultTypeClass ${exprGenerator.input1Term} = ($resultTypeClass) $converterCode;
|${newCtx.reuseLocalVariableCode()}
|${newCtx.reuseInputUnboxingCode()}
|${resultExpr.code}
|return ${resultExpr.resultTerm};
""".stripMargin
}
private def checkNeededMethods(
needAccumulate: Boolean = false,
needRetract: Boolean = false,
needMerge: Boolean = false,
needReset: Boolean = false,
needEmitValue: Boolean = false): Unit = {
// check and validate the needed methods
aggBufferCodeGens.foreach(
_.checkNeededMethods(needAccumulate, needRetract, needMerge, needReset, needEmitValue))
}
private def genThrowException(msg: String): String = {
s"""
|throw new java.lang.RuntimeException("$msg");
""".stripMargin
}
}
object AggsHandlerCodeGenerator {
/** static terms **/
val ACC_TERM = "acc"
val MERGED_ACC_TERM = "otherAcc"
val ACCUMULATE_INPUT_TERM = "accInput"
val RETRACT_INPUT_TERM = "retractInput"
val DISTINCT_KEY_TERM = "distinctKey"
val NAMESPACE_TERM = "namespace"
val STORE_TERM = "store"
val COLLECTOR: String = className[Collector[_]]
val COLLECTOR_TERM = "out"
val MEMBER_COLLECTOR_TERM = "convertCollector"
val CONVERT_COLLECTOR_TYPE_TERM = "ConvertCollector"
val KEY_TERM = "groupKey"
val INPUT_NOT_NULL = false
/**
* Create DataView term, for example, acc1_map_dataview.
*
* @return term to access MapView or ListView
*/
def createDataViewTerm(spec: DataViewSpec): String = {
s"${spec.getStateId}_dataview"
}
/**
* Creates RawValueData term which wraps the specific DataView term.
*/
def createDataViewRawValueTerm(spec: DataViewSpec): String = {
s"${createDataViewTerm(spec)}_raw_value"
}
/**
* Create DataView backup term, for example, acc1_map_dataview_backup.
* The backup dataview term is used for merging two statebackend
* dataviews, e.g. session window.
*
* @return term to access backup MapView or ListView
*/
def createDataViewBackupTerm(spec: DataViewSpec): String = {
s"${spec.getStateId}_dataview_backup"
}
/**
* Creates RawValueData term which wraps the specific DataView backup term.
*/
def createDataViewBackupRawValueTerm(spec: DataViewSpec): String = {
s"${createDataViewBackupTerm(spec)}_raw_value"
}
def addReusableStateDataViews(
ctx: CodeGeneratorContext,
viewSpecs: Array[DataViewSpec],
hasNamespace: Boolean,
enableBackupDataView: Boolean): Unit = {
// add reusable dataviews to context
viewSpecs.foreach { spec =>
val stateId = '"' + spec.getStateId + '"'
val (viewTypeTerm, stateStoreCall) = spec match {
case spec: ListViewSpec =>
val viewTypeTerm = className[StateListView[_, _]]
val elementSerializerTerm = addReusableDataViewSerializer(
ctx,
spec.getElementSerializer,
() => spec.getElementDataType)
val stateStoreCall =
s"getStateListView($stateId, $elementSerializerTerm)"
(viewTypeTerm, stateStoreCall)
case spec: MapViewSpec =>
val viewTypeTerm = className[StateMapView[_, _, _]]
val withNullKey = spec.containsNullKey()
val keySerializerTerm = addReusableDataViewSerializer(
ctx,
spec.getKeySerializer,
() => spec.getKeyDataType
)
val valueSerializerTerm = addReusableDataViewSerializer(
ctx,
spec.getValueSerializer,
() => spec.getValueDataType)
val stateStoreCall =
s"getStateMapView($stateId, $withNullKey, $keySerializerTerm, $valueSerializerTerm)"
(viewTypeTerm, stateStoreCall)
}
val viewFieldTerm = createDataViewTerm(spec)
val viewFieldInternalTerm = createDataViewRawValueTerm(spec)
ctx.addReusableMember(s"private $viewTypeTerm $viewFieldTerm;")
ctx.addReusableMember(s"private $BINARY_RAW_VALUE $viewFieldInternalTerm;")
val openCode =
s"""
|$viewFieldTerm = ($viewTypeTerm) $STORE_TERM.$stateStoreCall;
|$viewFieldInternalTerm = $BINARY_RAW_VALUE.fromObject($viewFieldTerm);
""".stripMargin
ctx.addReusableOpenStatement(openCode)
// only cleanup dataview term, do not need to cleanup backup
val cleanupCode = if (hasNamespace) {
s"""
|$viewFieldTerm.setCurrentNamespace($NAMESPACE_TERM);
|$viewFieldTerm.clear();
""".stripMargin
} else {
s"""
|$viewFieldTerm.clear();
""".stripMargin
}
ctx.addReusableCleanupStatement(cleanupCode)
// generate backup dataview codes
if (enableBackupDataView) {
val backupViewTerm = createDataViewBackupTerm(spec)
val backupViewInternalTerm = createDataViewBackupRawValueTerm(spec)
// create backup dataview
ctx.addReusableMember(s"private $viewTypeTerm $backupViewTerm;")
ctx.addReusableMember(s"private $BINARY_RAW_VALUE $backupViewInternalTerm;")
val backupOpenCode =
s"""
|$backupViewTerm = ($viewTypeTerm) $STORE_TERM.$stateStoreCall;
|$backupViewInternalTerm = $BINARY_RAW_VALUE.fromObject($backupViewTerm);
""".stripMargin
ctx.addReusableOpenStatement(backupOpenCode)
}
}
}
private def addReusableDataViewSerializer(
ctx: CodeGeneratorContext,
legacySerializer: Optional[TypeSerializer[_]],
dataType: () => DataType)
: String = {
toScala(legacySerializer) match {
case Some(serializer) =>
ctx.addReusableObject(serializer, "serializer")
case None =>
ctx.addReusableExternalSerializer(dataType())
}
}
}
| StephanEwen/incubator-flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/agg/AggsHandlerCodeGenerator.scala | Scala | apache-2.0 | 47,026 |
package example
import akka.actor.{ Actor, ActorSystem, Props, ActorLogging }
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives
import akka.pattern.ask
import akka.stream.ActorMaterializer
import akka.util.Timeout
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.io.StdIn
case class Bid(userId: String, offer: Int)
case object GetBids
case class Bids(bids: List[Bid])
class Auction extends Actor with ActorLogging {
var bids = List.empty[Bid]
def receive = {
case bid @ Bid(userId, offer) =>
bids = bids :+ bid
log.info(s"Bid complete: $userId, $offer")
case GetBids => sender() ! Bids(bids)
case _ => log.info("Invalid message")
}
}
object Auction {
def main(args: Array[String]) {
import Directives._
import FailFastCirceSupport._
import io.circe.generic.auto._
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
// needed for the future flatMap/onComplete in the end
implicit val executionContext = system.dispatcher
val auction = system.actorOf(Props[Auction], "auction")
val route =
path("auction") {
concat(
put {
parameter("bid".as[Int], "user") { (bid, user) =>
// place a bid, fire-and-forget
auction ! Bid(user, bid)
complete((StatusCodes.Accepted, "bid placed"))
}
},
get {
implicit val timeout: Timeout = 5.seconds
// query the actor for the current auction state
val bids: Future[Bids] = (auction ? GetBids).mapTo[Bids]
complete(bids)
}
)
}
val bindingFuture = Http().bindAndHandle(route, "localhost", 8080)
println(s"Server online at http://localhost:8080/\\nPress RETURN to stop...")
StdIn.readLine() // let it run until user presses return
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ => system.terminate()) // and shutdown when done
}
}
| t-mochizuki/scala-study | akka-http-example/src/main/scala/example/Auction.scala | Scala | mit | 2,181 |
/* Copyright 2009-2016 EPFL, Lausanne */
object Fibonacci {
def fib(x: BigInt) : BigInt = {
require(x >= 0)
if(x < 2) {
x
} else {
fib(x - 1) + fib(x - 2)
}
}
// requires that fib is universally quantified to work...
def check() : Boolean = {
fib(5) == BigInt(5)
} ensuring(_ == true)
}
| regb/leon | src/test/resources/regression/termination/valid/Fibonacci.scala | Scala | gpl-3.0 | 331 |
package edu.gemini.model.p1.immutable
trait TrecsBlueprintBase extends GeminiBlueprintBase {
def instrument = Instrument.Trecs
} | spakzad/ocs | bundle/edu.gemini.model.p1/src/main/scala/edu/gemini/model/p1/immutable/TrecsBlueprintBase.scala | Scala | bsd-3-clause | 131 |
package urlshortener
import scala.concurrent.duration._
import io.gatling.core.Predef._
import io.gatling.http.Predef._
object Options {
val endpoint = sys.env("GATLING_ENDPOINT")
}
| roadrunners/gatling-tests | simulations/urlshortener/Options.scala | Scala | bsd-3-clause | 186 |
import stainless.collection._
import stainless.lang._
// Should work with --full-imperative even though we don't mention AnyHeapRef
object StainlessIssueFI {
def foo[T](l: List[T]): Unit = {
()
}
}
| epfl-lara/stainless | frontends/benchmarks/full-imperative/valid/NoAnyHeapRef.scala | Scala | apache-2.0 | 207 |
/*§
===========================================================================
KnapScal - Core
===========================================================================
Copyright (C) 2015-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.knapscal.knapsack
object TestProblems {
val problem_III_8 = new Problem(
Seq(
Item(28, 12),
Item(23, 8),
Item(11, 4),
Item(6, 4),
Item(2, 2),
Item(3, 1)
),
16
)
val problem_III_9 = new Problem(
Seq(
Item(52, 17),
Item(40, 5),
Item(38, 13),
Item(9, 3),
Item(1, 1)
),
20
)
}
| giancosta86/KnapScal-core | src/test/scala/info/gianlucacosta/knapscal/knapsack/TestProblems.scala | Scala | apache-2.0 | 1,324 |
// Copyright (c) 2011 Thomas Suckow
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// which accompanies this distribution, and is available at
// http://www.eclipse.org/legal/epl-v10.html
package net.codingwell.weave.languages.silk.ast
case class File( val members:Seq[GlobalStatement])
sealed abstract class Statement
class GlobalStatement() extends Statement {}
//case class Module( val identifier:Identifier ) extends GlobalStatement {}
case class Module( val identifier:Identifier, val parameters:Seq[Parameter] , val scope:Scope ) extends GlobalStatement {}
class ImportStatement( val packagespec:PackageSpecification ) extends GlobalStatement {}
class TypeDeclaration( val typespec:TypeSpecification ) extends GlobalStatement {}
class TypeIsDeclaration( typespec:TypeSpecification ) extends TypeDeclaration( typespec ) {}
class TypeExtendsDeclaration( typespec:TypeSpecification ) extends TypeDeclaration( typespec ) {}
class TypeSpecification() {}
case class PlainType( val indentifier:Identifier ) extends TypeSpecification {}
case class LiteralType() extends TypeSpecification {}
case class NumberType() extends TypeSpecification {}
case class ArrayType( val basetype:TypeSpecification ) extends TypeSpecification {}
object Import extends Function1[PackageSpecification,Import] {
def apply( packagespec:PackageSpecification ):Import = new Import( packagespec )
def unapply( i:Import ):Option[PackageSpecification] = Some(i.packagespec)
}
class Import( packagespec:PackageSpecification ) extends ImportStatement( packagespec ) {}
object ImportViral extends Function1[PackageSpecification,ImportViral] {
def apply( packagespec:PackageSpecification ) = new ImportViral( packagespec )
def unapply( i:ImportViral ):Option[PackageSpecification] = Some(i.packagespec)
}
class ImportViral( packagespec:PackageSpecification ) extends ImportStatement( packagespec ) {}
case class PackageSpecification( val identifiers:Seq[Identifier] ) {}
case class Parameter( val direction:Direction, val typespec:TypeSpecification, val identifier:Identifier ) {}
case class Identifier( val name:String ) extends SimpleExpression {}
case class Direction( val value:String ) {} //TODO:Should this have subclasses for directions?
case class Scope( val statements:Seq[Statement] ) extends Statement {}
case class ExpressionStatement( val body:ExpressionGroup ) extends Statement {}
case class ExpressionGroup( val expressions:Seq[Expression] ) extends SimpleExpression {}
case class Expression( val simple:SimpleExpression, val next:Option[ChainExpression] ) {}
sealed abstract class SimpleExpression {}
sealed abstract class ChainExpression {}
case class MemberDereference( val member:Identifier, val next:Option[ChainExpression] ) extends ChainExpression {}
case class ArrayExpression( val index:ExpressionGroup, val next:Option[ChainExpression] ) extends ChainExpression {}
//case class ArrayExpression( val base:Expression, val index:Expression ) extends Expression {}
//case class MemberDereference( val base:Expression, val member:Identifier ) extends Expression {}
case class Instantiation( instancetype:TypeSpecification, val identifier:Identifier ) extends Statement {}
case class ForLoop( val init:Expression, val conditional:Expression, val post:Expression, val body:Statement ) extends Statement {}
case class WhileLoop( val conditional:Expression, val body:Statement ) extends Statement {}
case class IfElse( val conditional:Expression, val body_true:Statement, val body_false:Statement ) extends Statement {}
| codingwell/Weave | lang-silk/src/main/scala/net/codingwell/weave/languages/silk/ast/ast.scala | Scala | epl-1.0 | 3,629 |
import scala.deriving.Mirror
object Test {
summon[Mirror.Of[(Int, String)] {
type MirroredElemTypes = (Int, Int, Int)
}] // error
// MirroredElemTypes missmatch, expected: (Int, String), found: (Int, Int, Int).
summon[Mirror.Of[(Int, String)] {
type MirroredElemLabels = ("_1", "_2", "_3")
}] // error
// MirroredElemLabels missmatch, expected: (("_1" : String), ("_2" : String)),
// found: (("_1" : String), ("_2" : String), ("_3" : String)).
}
| dotty-staging/dotty | tests/neg/7380.scala | Scala | apache-2.0 | 471 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming
import java.io.File
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.language.{implicitConversions, postfixOps}
import scala.util.Random
import org.apache.hadoop.conf.Configuration
import org.mockito.Matchers.any
import org.mockito.Mockito.{doThrow, reset, spy}
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StreamBlockId
import org.apache.spark.streaming.receiver.BlockManagerBasedStoreResult
import org.apache.spark.streaming.scheduler.{AllocatedBlocks, _}
import org.apache.spark.streaming.util._
import org.apache.spark.streaming.util.WriteAheadLogSuite._
import org.apache.spark.util.{Clock, ManualClock, SystemClock, Utils}
class ReceivedBlockTrackerSuite
extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
val hadoopConf = new Configuration()
val streamId = 1
var allReceivedBlockTrackers = new ArrayBuffer[ReceivedBlockTracker]()
var checkpointDirectory: File = null
var conf: SparkConf = null
before {
conf = new SparkConf().setMaster("local[2]").setAppName("ReceivedBlockTrackerSuite")
checkpointDirectory = Utils.createTempDir()
}
after {
allReceivedBlockTrackers.foreach { _.stop() }
Utils.deleteRecursively(checkpointDirectory)
}
test("block addition, and block to batch allocation") {
val receivedBlockTracker = createTracker(setCheckpointDir = false)
receivedBlockTracker.isWriteAheadLogEnabled should be (false) // should be disable by default
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
val blockInfos = generateBlockInfos()
blockInfos.map(receivedBlockTracker.addBlock)
// Verify added blocks are unallocated blocks
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual blockInfos
receivedBlockTracker.hasUnallocatedReceivedBlocks should be (true)
// Allocate the blocks to a batch and verify that all of them have been allocated
receivedBlockTracker.allocateBlocksToBatch(1)
receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos
receivedBlockTracker.getBlocksOfBatch(1) shouldEqual Map(streamId -> blockInfos)
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldBe empty
receivedBlockTracker.hasUnallocatedReceivedBlocks should be (false)
// Allocate no blocks to another batch
receivedBlockTracker.allocateBlocksToBatch(2)
receivedBlockTracker.getBlocksOfBatchAndStream(2, streamId) shouldBe empty
receivedBlockTracker.getBlocksOfBatch(2) shouldEqual Map(streamId -> Seq.empty)
// Verify that older batches have no operation on batch allocation,
// will return the same blocks as previously allocated.
receivedBlockTracker.allocateBlocksToBatch(1)
receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos
blockInfos.map(receivedBlockTracker.addBlock)
receivedBlockTracker.allocateBlocksToBatch(2)
receivedBlockTracker.getBlocksOfBatchAndStream(2, streamId) shouldBe empty
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual blockInfos
}
test("recovery with write ahead logs should remove only allocated blocks from received queue") {
val manualClock = new ManualClock
val batchTime = manualClock.getTimeMillis()
val tracker1 = createTracker(clock = manualClock)
tracker1.isWriteAheadLogEnabled should be (true)
val allocatedBlockInfos = generateBlockInfos()
val unallocatedBlockInfos = generateBlockInfos()
val receivedBlockInfos = allocatedBlockInfos ++ unallocatedBlockInfos
receivedBlockInfos.foreach { b => tracker1.writeToLog(BlockAdditionEvent(b)) }
val allocatedBlocks = AllocatedBlocks(Map(streamId -> allocatedBlockInfos))
tracker1.writeToLog(BatchAllocationEvent(batchTime, allocatedBlocks))
tracker1.stop()
val tracker2 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker2.getBlocksOfBatch(batchTime) shouldEqual allocatedBlocks.streamIdToAllocatedBlocks
tracker2.getUnallocatedBlocks(streamId) shouldEqual unallocatedBlockInfos
tracker2.stop()
}
test("block allocation to batch should not loose blocks from received queue") {
val tracker1 = spy(createTracker())
tracker1.isWriteAheadLogEnabled should be (true)
tracker1.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
// Add blocks
val blockInfos = generateBlockInfos()
blockInfos.map(tracker1.addBlock)
tracker1.getUnallocatedBlocks(streamId) shouldEqual blockInfos
// Try to allocate the blocks to a batch and verify that it's failing
// The blocks should stay in the received queue when WAL write failing
doThrow(new RuntimeException("Not able to write BatchAllocationEvent"))
.when(tracker1).writeToLog(any(classOf[BatchAllocationEvent]))
val errMsg = intercept[RuntimeException] {
tracker1.allocateBlocksToBatch(1)
}
assert(errMsg.getMessage === "Not able to write BatchAllocationEvent")
tracker1.getUnallocatedBlocks(streamId) shouldEqual blockInfos
tracker1.getBlocksOfBatch(1) shouldEqual Map.empty
tracker1.getBlocksOfBatchAndStream(1, streamId) shouldEqual Seq.empty
// Allocate the blocks to a batch and verify that all of them have been allocated
reset(tracker1)
tracker1.allocateBlocksToBatch(2)
tracker1.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
tracker1.hasUnallocatedReceivedBlocks should be (false)
tracker1.getBlocksOfBatch(2) shouldEqual Map(streamId -> blockInfos)
tracker1.getBlocksOfBatchAndStream(2, streamId) shouldEqual blockInfos
tracker1.stop()
// Recover from WAL to see the correctness
val tracker2 = createTracker(recoverFromWriteAheadLog = true)
tracker2.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
tracker2.hasUnallocatedReceivedBlocks should be (false)
tracker2.getBlocksOfBatch(2) shouldEqual Map(streamId -> blockInfos)
tracker2.getBlocksOfBatchAndStream(2, streamId) shouldEqual blockInfos
tracker2.stop()
}
test("recovery and cleanup with write ahead logs") {
val manualClock = new ManualClock
// Set the time increment level to twice the rotation interval so that every increment creates
// a new log file
def incrementTime() {
val timeIncrementMillis = 2000L
manualClock.advance(timeIncrementMillis)
}
// Generate and add blocks to the given tracker
def addBlockInfos(tracker: ReceivedBlockTracker): Seq[ReceivedBlockInfo] = {
val blockInfos = generateBlockInfos()
blockInfos.map(tracker.addBlock)
blockInfos
}
// Print the data present in the log ahead files in the log directory
def printLogFiles(message: String) {
val fileContents = getWriteAheadLogFiles().map { file =>
(s"\\n>>>>> $file: <<<<<\\n${getWrittenLogData(file).mkString("\\n")}")
}.mkString("\\n")
logInfo(s"\\n\\n=====================\\n$message\\n$fileContents\\n=====================\\n")
}
// Set WAL configuration
conf.set("spark.streaming.driver.writeAheadLog.rollingIntervalSecs", "1")
require(WriteAheadLogUtils.getRollingIntervalSecs(conf, isDriver = true) === 1)
// Start tracker and add blocks
val tracker1 = createTracker(clock = manualClock)
tracker1.isWriteAheadLogEnabled should be (true)
val blockInfos1 = addBlockInfos(tracker1)
tracker1.getUnallocatedBlocks(streamId).toList shouldEqual blockInfos1
// Verify whether write ahead log has correct contents
val expectedWrittenData1 = blockInfos1.map(BlockAdditionEvent)
getWrittenLogData() shouldEqual expectedWrittenData1
getWriteAheadLogFiles() should have size 1
tracker1.stop()
incrementTime()
// Recovery without recovery from WAL and verify list of unallocated blocks is empty
val tracker1_ = createTracker(clock = manualClock, recoverFromWriteAheadLog = false)
tracker1_.getUnallocatedBlocks(streamId) shouldBe empty
tracker1_.hasUnallocatedReceivedBlocks should be (false)
tracker1_.stop()
// Restart tracker and verify recovered list of unallocated blocks
val tracker2 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
val unallocatedBlocks = tracker2.getUnallocatedBlocks(streamId).toList
unallocatedBlocks shouldEqual blockInfos1
unallocatedBlocks.foreach { block =>
block.isBlockIdValid() should be (false)
}
// Allocate blocks to batch and verify whether the unallocated blocks got allocated
val batchTime1 = manualClock.getTimeMillis()
tracker2.allocateBlocksToBatch(batchTime1)
tracker2.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1
tracker2.getBlocksOfBatch(batchTime1) shouldEqual Map(streamId -> blockInfos1)
// Add more blocks and allocate to another batch
incrementTime()
val batchTime2 = manualClock.getTimeMillis()
val blockInfos2 = addBlockInfos(tracker2)
tracker2.allocateBlocksToBatch(batchTime2)
tracker2.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
tracker2.stop()
// Verify whether log has correct contents
val expectedWrittenData2 = expectedWrittenData1 ++
Seq(createBatchAllocation(batchTime1, blockInfos1)) ++
blockInfos2.map(BlockAdditionEvent) ++
Seq(createBatchAllocation(batchTime2, blockInfos2))
getWrittenLogData() shouldEqual expectedWrittenData2
// Restart tracker and verify recovered state
incrementTime()
val tracker3 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1
tracker3.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
tracker3.getUnallocatedBlocks(streamId) shouldBe empty
// Cleanup first batch but not second batch
val oldestLogFile = getWriteAheadLogFiles().head
incrementTime()
tracker3.cleanupOldBatches(batchTime2, waitForCompletion = true)
// Verify that the batch allocations have been cleaned, and the act has been written to log
tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual Seq.empty
getWrittenLogData(getWriteAheadLogFiles().last) should contain(createBatchCleanup(batchTime1))
// Verify that at least one log file gets deleted
eventually(timeout(10 seconds), interval(10 millisecond)) {
getWriteAheadLogFiles() should not contain oldestLogFile
}
printLogFiles("After clean")
tracker3.stop()
// Restart tracker and verify recovered state, specifically whether info about the first
// batch has been removed, but not the second batch
incrementTime()
val tracker4 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker4.getUnallocatedBlocks(streamId) shouldBe empty
tracker4.getBlocksOfBatchAndStream(batchTime1, streamId) shouldBe empty // should be cleaned
tracker4.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
tracker4.stop()
}
test("disable write ahead log when checkpoint directory is not set") {
// When checkpoint is disabled, then the write ahead log is disabled
val tracker1 = createTracker(setCheckpointDir = false)
tracker1.isWriteAheadLogEnabled should be (false)
}
test("parallel file deletion in FileBasedWriteAheadLog is robust to deletion error") {
conf.set("spark.streaming.driver.writeAheadLog.rollingIntervalSecs", "1")
require(WriteAheadLogUtils.getRollingIntervalSecs(conf, isDriver = true) === 1)
val addBlocks = generateBlockInfos()
val batch1 = addBlocks.slice(0, 1)
val batch2 = addBlocks.slice(1, 3)
val batch3 = addBlocks.slice(3, addBlocks.length)
assert(getWriteAheadLogFiles().length === 0)
// list of timestamps for files
val t = Seq.tabulate(5)(i => i * 1000)
writeEventsManually(getLogFileName(t(0)), Seq(createBatchCleanup(t(0))))
assert(getWriteAheadLogFiles().length === 1)
// The goal is to create several log files which should have been cleaned up.
// If we face any issue during recovery, because these old files exist, then we need to make
// deletion more robust rather than a parallelized operation where we fire and forget
val batch1Allocation = createBatchAllocation(t(1), batch1)
writeEventsManually(getLogFileName(t(1)), batch1.map(BlockAdditionEvent) :+ batch1Allocation)
writeEventsManually(getLogFileName(t(2)), Seq(createBatchCleanup(t(1))))
val batch2Allocation = createBatchAllocation(t(3), batch2)
writeEventsManually(getLogFileName(t(3)), batch2.map(BlockAdditionEvent) :+ batch2Allocation)
writeEventsManually(getLogFileName(t(4)), batch3.map(BlockAdditionEvent))
// We should have 5 different log files as we called `writeEventsManually` with 5 different
// timestamps
assert(getWriteAheadLogFiles().length === 5)
// Create the tracker to recover from the log files. We're going to ask the tracker to clean
// things up, and then we're going to rewrite that data, and recover using a different tracker.
// They should have identical data no matter what
val tracker = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
def compareTrackers(base: ReceivedBlockTracker, subject: ReceivedBlockTracker): Unit = {
subject.getBlocksOfBatchAndStream(t(3), streamId) should be(
base.getBlocksOfBatchAndStream(t(3), streamId))
subject.getBlocksOfBatchAndStream(t(1), streamId) should be(
base.getBlocksOfBatchAndStream(t(1), streamId))
subject.getBlocksOfBatchAndStream(t(0), streamId) should be(Nil)
}
// ask the tracker to clean up some old files
tracker.cleanupOldBatches(t(3), waitForCompletion = true)
assert(getWriteAheadLogFiles().length === 3)
val tracker2 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker2)
// rewrite first file
writeEventsManually(getLogFileName(t(0)), Seq(createBatchCleanup(t(0))))
assert(getWriteAheadLogFiles().length === 4)
// make sure trackers are consistent
val tracker3 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker3)
// rewrite second file
writeEventsManually(getLogFileName(t(1)), batch1.map(BlockAdditionEvent) :+ batch1Allocation)
assert(getWriteAheadLogFiles().length === 5)
// make sure trackers are consistent
val tracker4 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker4)
}
/**
* Create tracker object with the optional provided clock. Use fake clock if you
* want to control time by manually incrementing it to test log clean.
*/
def createTracker(
setCheckpointDir: Boolean = true,
recoverFromWriteAheadLog: Boolean = false,
clock: Clock = new SystemClock): ReceivedBlockTracker = {
val cpDirOption = if (setCheckpointDir) Some(checkpointDirectory.toString) else None
var tracker = new ReceivedBlockTracker(
conf, hadoopConf, Seq(streamId), clock, recoverFromWriteAheadLog, cpDirOption)
allReceivedBlockTrackers += tracker
tracker
}
/** Generate blocks infos using random ids */
def generateBlockInfos(): Seq[ReceivedBlockInfo] = {
List.fill(5)(ReceivedBlockInfo(streamId, Some(0L), None,
BlockManagerBasedStoreResult(StreamBlockId(streamId, math.abs(Random.nextInt)), Some(0L))))
}
/**
* Write received block tracker events to a file manually.
*/
def writeEventsManually(filePath: String, events: Seq[ReceivedBlockTrackerLogEvent]): Unit = {
val writer = HdfsUtils.getOutputStream(filePath, hadoopConf)
events.foreach { event =>
val bytes = Utils.serialize(event)
writer.writeInt(bytes.size)
writer.write(bytes)
}
writer.close()
}
/** Get all the data written in the given write ahead log file. */
def getWrittenLogData(logFile: String): Seq[ReceivedBlockTrackerLogEvent] = {
getWrittenLogData(Seq(logFile))
}
/** Get the log file name for the given log start time. */
def getLogFileName(time: Long, rollingIntervalSecs: Int = 1): String = {
checkpointDirectory.toString + File.separator + "receivedBlockMetadata" +
File.separator + s"log-$time-${time + rollingIntervalSecs * 1000}"
}
/**
* Get all the data written in the given write ahead log files. By default, it will read all
* files in the test log directory.
*/
def getWrittenLogData(logFiles: Seq[String] = getWriteAheadLogFiles)
: Seq[ReceivedBlockTrackerLogEvent] = {
logFiles.flatMap {
file => new FileBasedWriteAheadLogReader(file, hadoopConf).toSeq
}.flatMap { byteBuffer =>
val validBuffer = if (WriteAheadLogUtils.isBatchingEnabled(conf, isDriver = true)) {
Utils.deserialize[Array[Array[Byte]]](byteBuffer.array()).map(ByteBuffer.wrap)
} else {
Array(byteBuffer)
}
validBuffer.map(b => Utils.deserialize[ReceivedBlockTrackerLogEvent](b.array()))
}.toList
}
/** Get all the write ahead log files in the test directory */
def getWriteAheadLogFiles(): Seq[String] = {
import ReceivedBlockTracker._
val logDir = checkpointDirToLogDir(checkpointDirectory.toString)
getLogFilesInDirectory(logDir).map { _.toString }
}
/** Create batch allocation object from the given info */
def createBatchAllocation(time: Long, blockInfos: Seq[ReceivedBlockInfo])
: BatchAllocationEvent = {
BatchAllocationEvent(time, AllocatedBlocks(Map((streamId -> blockInfos))))
}
/** Create batch clean object from the given info */
def createBatchCleanup(time: Long, moreTimes: Long*): BatchCleanupEvent = {
BatchCleanupEvent((Seq(time) ++ moreTimes).map(Time.apply))
}
implicit def millisToTime(milliseconds: Long): Time = Time(milliseconds)
implicit def timeToMillis(time: Time): Long = time.milliseconds
}
| tejasapatil/spark | streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala | Scala | apache-2.0 | 19,022 |
package filodb.cassandra.metastore
import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.PatienceConfiguration.Timeout
import org.scalatest.FlatSpec
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.postfixOps
import filodb.cassandra.AsyncTest
import filodb.core._
import filodb.core.metadata.{Column, DataColumn}
class ColumnTableSpec extends FlatSpec with AsyncTest {
import Column.ColumnType
import scala.concurrent.ExecutionContext.Implicits.global
val firstColumn = DataColumn(0, "first", "foo", 1, ColumnType.StringColumn)
val fooRef = DatasetRef("foo", Some("unittest2"))
val config = ConfigFactory.load("application_test.conf").getConfig("filodb.cassandra")
val columnTable = new ColumnTable(config)
val timeout = Timeout(30 seconds)
// First create the columns table
override def beforeAll() {
super.beforeAll()
columnTable.createKeyspace(columnTable.keyspace)
columnTable.initialize().futureValue(timeout)
}
before {
columnTable.clearAll().futureValue(timeout)
}
import scala.concurrent.ExecutionContext.Implicits.global
"ColumnTable" should "return empty schema if a dataset does not exist in columns table" in {
columnTable.getSchema(fooRef, 1).futureValue(timeout) should equal (Map())
}
it should "add the first column and read it back as a schema" in {
columnTable.insertColumns(Seq(firstColumn), fooRef).futureValue should equal (Success)
columnTable.getSchema(fooRef, 2).futureValue(timeout) should equal (Map("first" -> firstColumn))
// Check that limiting the getSchema query to version 0 does not return the version 1 column
columnTable.getSchema(fooRef, 0).futureValue(timeout) should equal (Map())
}
it should "return MetadataException if illegal column type encoded in Cassandra" in {
columnTable.execCql(s"""INSERT INTO ${columnTable.tableString}
|(dataset, database, name, version, columntype, id) VALUES (
| 'bar', 'unittest2', 'age', 5, '_so_not_a_real_type', 0)""".stripMargin)
.futureValue
val barRef = DatasetRef("bar", Some("unittest2"))
columnTable.getSchema(barRef, 7).failed.futureValue(timeout) shouldBe a [MetadataException]
}
} | markhamstra/FiloDB | cassandra/src/test/scala/filodb.cassandra/metastore/ColumnTableSpec.scala | Scala | apache-2.0 | 2,287 |
package com.wlangiewicz.workouttracker.api
import akka.http.scaladsl.model.StatusCodes._
import com.wlangiewicz.workouttracker.domain._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
class ReportApiSpec extends ApiSpec {
it should "return report for all workouts" in {
Get("/report/all") ~> validCredentials ~> routes ~> check {
status shouldBe OK
val report = responseAs[Report]
report.totalDistanceMeters shouldBe 40000
}
}
it should "not return report if user has no workouts" in {
Get("/report/all") ~> validCredentialsUserWithoutWorkouts ~> routes ~> check {
status shouldBe OK
val weeklyReport = responseAs[Report]
weeklyReport.totalDistanceMeters shouldBe 0
}
}
it should "return weekly report for all workouts" in {
Get("/report/weekly") ~> validCredentials ~> routes ~> check {
status shouldBe OK
val report = responseAs[Map[String, Report]]
report("2016-6").totalDistanceMeters shouldBe 30000
report("2016-7").totalDistanceMeters shouldBe 10000
}
}
it should "not return weekly report if user has no workouts" in {
Get("/report/weekly") ~> validCredentialsUserWithoutWorkouts ~> routes ~> check {
status shouldBe OK
val report = responseAs[Map[String, Report]]
report.isEmpty shouldBe true
}
}
it should "return report for workouts in given range" in {
Get("/report/range/1355354589/1465354589") ~> validCredentials ~> routes ~> check {
status shouldBe OK
val report = responseAs[Report]
report.totalDistanceMeters shouldBe 40000
}
}
it should "return weekly report for workouts in given range" in {
Get("/report/range/1/2") ~> validCredentials ~> routes ~> check {
status shouldBe OK
val report = responseAs[Report]
report.totalDistanceMeters shouldBe 0
}
}
}
| wlk/workout-tracker-akka-http | src/test/scala/com/wlangiewicz/workouttracker/api/ReportApiSpec.scala | Scala | mit | 1,885 |
import co.technius.scalajs.mithril._
import org.scalatest._
import org.scalajs.dom
import scala.scalajs.js
class MithrilTest extends FlatSpec with Matchers with TestUtils {
"The facade" should "work" in {
// if the mithril functions don't work, this test will fail
m.version shouldNot be (js.undefined)
val comp = Component.viewOnly[js.Object] { vnode =>
m("div", "test")
}
mountApp(comp)
}
it should "mount components successfully" in {
val comp = Component.viewOnly[js.Object] { vnode =>
m("div", js.Array(
m("span", js.Dynamic.literal(id = "should-appear"))
))
}
mountApp(comp)
dom.document.getElementById("should-appear") shouldNot be (null)
}
it should "support streams" in {
val s = MithrilStream[Int](5)
s() shouldBe (5)
val child: MStream[Int] = s.map((_: Int) + 5)
child() shouldBe (10)
s() = 10
child() shouldBe(15)
val nums = MithrilStream[Int](1)
val sum = nums.fold(0)(_ + _)
sum() shouldBe (1)
nums() = 2
sum() shouldBe (3)
nums() = 3
sum() shouldBe (6)
val emptyStream = MithrilStream[String]()
emptyStream.toOption shouldBe (None)
}
it should "ensure stream type signatures are sound" in {
"""
val stream = MithrilStream[String]("foo")
val stream2: MStream[Any] = stream
stream2() = 5
""".stripMargin shouldNot typeCheck
}
}
| Technius/scalajs-mithril | tests/src/test/scala/MithrilTest.scala | Scala | mit | 1,408 |
package cdgp
import fuel.util.{Collector, Options, TRandom}
import scala.collection.mutable
/**
* Manages the set of test cases during evolution run. If test output is None, then
* the desired output for test's input is not known yet or can assume many possible values.
*
* @param newTests Set of counterexamples collected from the current generation. To be reset after each iteration.
*/
class TestsManagerCDGP[I,O](val tests: mutable.ArrayBuffer[(I, Option[O])],
val newTests: mutable.ArrayBuffer[(I, Option[O])],
val testsHistory: Boolean = false,
val printAddedTests: Boolean = false,
val saveTests: Boolean = false) {
private var flushNo = 0
def getNumFlushes: Int = flushNo
/**
* Stores inputs of the tests used so that duplicates can be quickly detected when needed.
* Tests from newTests are not taken into account.
*/
private val keysIndex = mutable.HashSet[I](tests.map(_._1):_*)
/** Stores the number of tests added after each use of flushHelpers. **/
val history: mutable.Map[Int, Int] = mutable.Map[Int, Int]()
/** Returns collected 'old' tests. */
def getTests(): Seq[(I, Option[O])] = tests
/** Returns collected 'new' tests. */
def getNewTests(): Seq[(I, Option[O])] = newTests
/** Returns both collected 'new' and 'old' tests. */
def getAllCollectedTests: List[(I, Option[O])] = tests.toList ++: newTests.toList
def getNumberOfTests: Int = tests.size
def getNumberOfKnownOutputs: Int = tests.count{ case (in, out) => out.isDefined}
def getNumberOfUnknownOutputs: Int = getNumberOfTests - getNumberOfKnownOutputs
def addNewTests(ts: Seq[(I, Option[O])], allowInputDuplicates: Boolean = true, allowTestDuplicates: Boolean = false) {
// Remove duplicates within the provided set of tests
val ts2 = TestsManagerCDGP.removeDuplicates(ts, allowInputDuplicates, allowTestDuplicates)
// Try to add tests and remove duplicates with the earlier accumulated tests
ts2.foreach(addNewTest(_, allowInputDuplicates, allowTestDuplicates))
}
/** This method takes into account only newTests and inputs of the tests. Duplicates of already accepted tests are not checked. **/
def addNewTest(t: (I, Option[O]), allowInputDuplicates: Boolean = true, allowTestDuplicates: Boolean = false) {
//println("** Trying to add new test: " + t)
(allowInputDuplicates, allowTestDuplicates) match {
case (true, true) => newTests.append(t)
case (false, _) => if (!isInputInIndex(t) && !newTests.exists(_._1.equals(t._1))) newTests.append(t) // every test duplicate is also an input duplicate
case (_, false) => if (!isInputInIndex(t) && !newTests.contains(t)) newTests.append(t)
}
}
def isInputInIndex(t: (I, Option[O]), index: mutable.HashSet[I] = keysIndex): Boolean = keysIndex.contains(t._1)
/** Updates test for the given index. **/
def updateTest(index: Int, t: (I, Option[O])) {
//println(s"** Updated test #$index: $t")
val prevKey = tests(index)._1
tests(index) = t
// updating keyIndex
if (!tests.exists(_._1.equals(prevKey)))
keysIndex.remove(prevKey) // key is not present, remove it
keysIndex += t._1
}
/**
* Returns a list of tests with n first element dropped.
*/
def dropFromTests(n: Int): Seq[(I, Option[O])] = {
if (tests.size == n) Seq() else tests.drop(n).toList
}
/**
* Moves elements from newTests to a global tests pool, and prepares manager for the next iteration
* by clearing newTests. No test redundancy checks are performed - such check was already conducted
* when the test was added to the newTests list.
*/
def flushHelpers() {
for (test <- newTests) {
tests.append(test) // append test
keysIndex.add(test._1)
if (printAddedTests) println(s"Added test: $test")
}
if (testsHistory && newTests.nonEmpty)
history.put(flushNo, newTests.size)
newTests.clear
flushNo += 1
}
/**
* Saves tests-related info and statistics in the collector.
*/
def reportData(coll: Collector, prefix: String = "tests") {
coll.set(s"$prefix.total", tests.size)
coll.set(s"$prefix.testsHistory", history.toList.sorted.mkString(", "))
coll.set(s"$prefix.totalKnownOutputs", getNumberOfKnownOutputs)
coll.set(s"$prefix.totalUnknownOutputs", getNumberOfUnknownOutputs)
if (saveTests)
coll.set(s"$prefix.collected", tests.toString)
}
}
object TestsManagerCDGP {
def apply[I,O](testsHistory: Boolean = false, printAddedTests: Boolean = false, saveTests: Boolean = false)
(implicit opt: Options, rng: TRandom): TestsManagerCDGP[I,O] = {
val tests = mutable.ArrayBuffer[(I, Option[O])]()
val newTests = mutable.ArrayBuffer[(I, Option[O])]()
new TestsManagerCDGP(tests, newTests, testsHistory, printAddedTests, saveTests)
}
def apply[I,O]()(implicit opt: Options, rng: TRandom): TestsManagerCDGP[I,O] = {
val testsHistory = opt('logTestsHistory, false)
val printAddedTests = opt('printTests, false)
val saveTests = opt('saveTests, false)
TestsManagerCDGP(testsHistory, printAddedTests, saveTests)
}
/**
* Removes, as specified by the parameters, redundant tests in the provided list of tests.
* Two types of redundancy are distinguished:
* - input duplicates - two tests with the same input
* - test duplicates - two test with the same input and output
*/
def removeDuplicates[I,O](ts: Seq[(I, Option[O])],
allowInputDuplicates: Boolean = true,
allowTestDuplicates: Boolean = false): Seq[(I, Option[O])] = {
(allowInputDuplicates, allowTestDuplicates) match {
case (true, true) => ts
case (false, _) => removeInputDuplicates(ts) // every test duplicate is also an input duplicate
case (_, false) => removeTestDuplicates(ts)
}
}
/** Removes all tests with the same input as some other test earlier in the sequence. **/
def removeInputDuplicates[I,O](ts: Seq[(I, Option[O])]): Seq[(I, Option[O])] = {
ts.foldLeft[Seq[(I, Option[O])]](Seq()) { case (prevTests, t) =>
if (prevTests.exists(_._1.equals(t._1))) prevTests else prevTests :+ t
}
}
/** Removes all tests which are exactly equivalent to one another, leaving only one copy of such tests. **/
def removeTestDuplicates[I,O](ts: Seq[(I, Option[O])]): Seq[(I, Option[O])] = ts.distinct
}
object NoiseAdderStdDev {
/**
* Returns a new instance of the tests manager with noise added to the tests. Noise can be added both
* to the dependent variable (noiseY) and independent variables (noiseX). Noise on the certain variable is
* generated from the normal distribution with the mean 0 and with the standard deviation equal to
* delta * standard deviation of the variable in question (computed from the sample).
*
* @param manager tests manager.
* @param deltaY Factor of the noise on the dependent variable. 0.0 means no noise.
* @param deltaX Factor of the noise on the independent variables. 0.0 means no noise.
* @return new tests manager instance with noise added to tests.
*/
def apply(manager: TestsManagerCDGP[Map[String, Any], Any], deltaY: Double, deltaX: Double = 0.0)
(implicit rng: TRandom): TestsManagerCDGP[Map[String, Any], Any] = {
if ((manager.tests.isEmpty && manager.newTests.isEmpty) || (deltaX == 0.0 && deltaY == 0.0)) {
manager // if no tests or both delta=0 then return manager
}
else {
val allTestsSet = manager.newTests.toSet ++ manager.tests.toSet
val keys = if (manager.tests.nonEmpty) manager.tests.head._1.keys else manager.newTests.head._1.keys
// Create a vector of std deviations for each column
val stdDevs = keys.map{ k: String => stdDevForInputVar(allTestsSet, k) }.toSeq
val stdDevOut = stdDevForOutput(allTestsSet)
// Randomizing tests
val tests2 = randomizeTests(manager.tests, stdDevs, stdDevOut, deltaY=deltaY, deltaX=deltaX)
val newTests2 = randomizeTests(manager.newTests, stdDevs, stdDevOut, deltaY=deltaY, deltaX=deltaX)
// Adding tests
val tests3 = mutable.ArrayBuffer[(Map[String, Any], Option[Any])]()
val newTests3 = mutable.ArrayBuffer[(Map[String, Any], Option[Any])]()
tests2.foreach(t => tests3.append(t))
newTests2.foreach(t => newTests3 += t)
// println("Tests before:\\n" + manager.getTests().mkString("\\n"))
// println("New tests before:\\n" + manager.newTests.mkString("\\n"))
val t = new TestsManagerCDGP[Map[String, Any], Any](tests3, newTests3, manager.testsHistory, manager.printAddedTests, manager.saveTests)
// println("Tests after:\\n" + t.getTests().mkString("\\n"))
// println("New tests after:\\n" + t.newTests.mkString("\\n"))
t
}
}
def apply(manager: TestsManagerCDGP[Map[String, Any], Any])
(implicit rng: TRandom, opt: Options): TestsManagerCDGP[Map[String, Any], Any] = {
val deltaY = opt('noiseDeltaY, 0.0)
val deltaX = opt('noiseDeltaX, 0.0)
apply(manager, deltaY=deltaY, deltaX=deltaX)
}
def randomizeTests(tests: Seq[(Map[String, Any], Option[Any])],
stdDevs: Seq[Double], stdDevOut: Double,
deltaY: Double, deltaX: Double)
(implicit rng: TRandom):
Seq[(Map[String, Any], Option[Any])] = {
tests.map { case (input, output) =>
if (output.isEmpty) (input, output)
else {
val newInput = input.toSeq.zip(stdDevs).map{case ((k, value), dev) =>
(k, addNoise(value.asInstanceOf[Double], dev, deltaX))
}.toMap
val newOutput = Some(addNoise(output.get.asInstanceOf[Double], stdDevOut, deltaY))
(newInput, newOutput)
}
}
}
/** Standard deviation for a set of tests where tests without defined output are ignored. */
def stdDevForInputVar(tests: Set[(Map[String, Any], Option[Any])], key: String): Double = {
val col = tests.filter(_._2.isDefined).map(_._1(key)).toSeq.asInstanceOf[Seq[Double]]
Tools.stddev(col)
}
/** Standard deviation for a set of tests where tests without defined output are ignored. */
def stdDevForOutput(tests: Set[(Map[String, Any], Option[Any])]): Double = {
val col = tests.filter(_._2.isDefined).map(_._2.get).toSeq.asInstanceOf[Seq[Double]]
Tools.stddev(col)
}
def addNoise(x: Double, stdDev: Double, delta: Double)(implicit rng: TRandom): Double = {
x + rng.nextGaussian() * stdDev * delta
}
} | kkrawiec/CDGP | src/main/scala/cdgp/TestsManager.scala | Scala | mit | 10,596 |
package controllers.quiz.derivativegraph
import com.artclod.mathml.{Match, Yes, No, Inconclusive}
import com.artclod.mathml._
import com.artclod.slick.JodaUTC
import controllers.quiz.{QuestionForms, QuizzesController}
import controllers.quiz.derivative.DerivativeQuestionForm
import controllers.quiz.tangent.{TangentQuestionForm, TangentAnswerForm}
import controllers.support.SecureSocialConsented
import models.quiz.question._
import models.quiz.question.support.DerivativeOrder
import models.support._
import models.user.User
import play.api.data.Form
import play.api.data.Forms._
import play.api.libs.json.{JsError, Json}
import play.api.mvc.{Action, Controller}
import play.api.data.format.Formats._
import models.quiz.question.support.DerivativeOrder.derivativeOrderFormatter
import com.artclod.mathml.MathMLEq.tightRange
import scala.util.{Success, Failure}
trait DerivativeGraphQuestionsControllon extends Controller with SecureSocialConsented {
def createDerivativeGraph(organizationId: OrganizationId, courseId: CourseId, quizId: QuizId) = ConsentedAction { implicit request => implicit user => implicit session =>
QuizzesController(organizationId, courseId, quizId) match {
case Left(notFoundResult) => notFoundResult
case Right((organization, course, quiz)) => {
DerivativeGraphQuestionForm.values.bindFromRequest.fold(
errors => BadRequest(views.html.quiz.quizView(course.access, course, quiz, None, controllers.quiz.QuestionForms.derivativeGraph(errors))),
form => {
DerivativeGraphQuestions.create(DerivativeGraphQuestionForm.toQuestion(user, form), quizId)
Redirect(controllers.quiz.routes.QuizzesController.view(organization.id, course.id, quiz.id, None))
})
}
}
}
case class DerivativeGraphDifficultyRequest(functionStr: String, function: String, derivativeOrder: String, showFunction: Boolean, partnerSkill: Double)
case class DerivativeGraphDifficultyResponse(functionStr: String, function: String, derivativeOrder: String, showFunction: Boolean, difficulty: Double, correctPoints: Double, incorrectPoints: Double)
implicit val formatDerivativeGraphDifficultyRequest = Json.format[DerivativeGraphDifficultyRequest]
implicit val formatDerivativeGraphDifficultyResponse = Json.format[DerivativeGraphDifficultyResponse]
def derivativeGraphQuestionDifficulty = Action { request =>
request.body.asJson.map { configJson =>
configJson.validate[DerivativeGraphDifficultyRequest]
.map { difficultyRequest =>
(MathML(difficultyRequest.function)) match {
case (Failure(e)) => BadRequest("Could not parse function [" + difficultyRequest.function + "] as mathml\\n" + e.getStackTraceString)
case (Success(function)) => {
val diff = DerivativeGraphQuestionDifficulty(function, difficultyRequest.showFunction)
val correct = QuestionScoring.teacherScore(diff, true, difficultyRequest.partnerSkill)
val incorrect = QuestionScoring.teacherScore(diff, false, difficultyRequest.partnerSkill)
Ok(Json.toJson(DerivativeGraphDifficultyResponse(difficultyRequest.functionStr, difficultyRequest.function, difficultyRequest.derivativeOrder, difficultyRequest.showFunction, diff, correct, incorrect)))
}
}
}.recoverTotal { e => BadRequest("Detected error:" + JsError.toFlatJson(e)) }
}.getOrElse(BadRequest("Expecting Json data"))
}
}
object DerivativeGraphQuestionForm {
// Field Names
val function = "function"
val functionStr = "functionStr"
val derivativeOrder = "derivativeOrder"
val showFunction = "showFunction"
// Validation Check Names
val rangeValid = "rangeValid"
val functionInvalid = "functionInvalid"
val functionsSame = "functionsSame"
val functionsDisplayNicely = "functionsDisplayNicely"
val values = Form(
mapping(function -> nonEmptyText.verifying(f => MathML(f).isSuccess),
functionStr -> nonEmptyText,
derivativeOrder -> of[DerivativeOrder],
showFunction -> boolean)
(DerivativeGraphQuestionForm.apply)(DerivativeGraphQuestionForm.unapply)
verifying(rangeValid, fields => verifyRangeValid(fields) )
verifying(functionInvalid, fields => QuestionForms.verifyFunctionValid(fields.functionMathML))
verifying(functionsSame, fields => verifyFunctionsDifferent(fields))
verifying(functionsDisplayNicely, fields => QuestionForms.verifyFunctionDisplaysNicely(fields.functionMathML))
)
def toQuestion(user: User, form: DerivativeGraphQuestionForm) = DerivativeGraphQuestion(null, user.id, form.functionMathML, form.functionStr, form.showFunction, JodaUTC.now, form.derivativeOrder, DerivativeGraphQuestionDifficulty(form.functionMathML, form.showFunction))
private def verifyRangeValid(f: DerivativeGraphQuestionForm) = f.rangeLow < f.rangeHigh
private def verifyFunctionsDifferent(form: DerivativeGraphQuestionForm) = {
val f = form.functionMathML
val fp = f.dx
val fpp = fp.dx
if( (f ?= fp) == Yes ) { false }
else if( (f ?= fpp) == Yes ) { false }
else if( (fp ?= fpp) == Yes ) { false }
else { true }
}
def fromQuestion(question: DerivativeGraphQuestion): Form[DerivativeGraphQuestionForm] = {
val formFill = DerivativeGraphQuestionForm(question.function.toString, question.functionStr, question.derivativeOrder, question.showFunction)
values.fill(formFill)
}
}
case class DerivativeGraphQuestionForm(function: String, functionStr : String, derivativeOrder: DerivativeOrder, showFunction: Boolean) {
val rangeLow = -1d * tightRange
val rangeHigh = tightRange
// TODO handle errors for .get
def functionMathML = MathML(function).get
} | kristiankime/web-education-games | app/controllers/quiz/derivativegraph/DerivativeGraphQuestionsControllon.scala | Scala | mit | 5,756 |
/*
* Copyright (C) 2017-present, Chenai Nakam(chenai.nakam@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hobby.chenai.nakam.autotx.core.coin
import scala.language.{existentials, implicitConversions, postfixOps}
/**
* @author Chenai Nakam(chenai.nakam@gmail.com)
* @version 1.0, 30/05/2017
*/
object EthGroup extends AbsTokenGroup {
override type COIN = Token
override type UNIT = COIN with Unt
override def unitStd = ETH
override def make(count: Long, unt: UNIT) = new Token(count) {
override def unit = unt
}
abstract class Token private[EthGroup](count: Long) extends AbsToken(count: Long) {
override def equals(obj: Any) = obj match {
case that: Token => that.canEqual(this) && that.count == this.count
case _ => false
}
override def canEqual(that: Any) = that.isInstanceOf[Token]
}
// 既是单位数据也是枚举
lazy val GWei: UNIT = new Token(1) with Unt {
override val name = "GWei"
}
// 以太的单位比较混乱,暂保留到GWei(1e9Wei, 1e-9Ether)。如果小数位数太多,则留给整数的位数就会减少。
lazy val ETH: UNIT = new Token(1000000000) with Unt {
override val name = "ETH"
// override val decmlFmt: Int = super.decmlFmt - 1 // 前一版设置了7个0但只保留6位有效位,故有次设置。
}
class ImpDsl(count: Double) {
implicit def GWei: COIN = EthGroup.GWei * count
implicit def ETH: COIN = EthGroup.ETH * count
}
implicit def wrapEthNum(count: Double): ImpDsl = new ImpDsl(count)
}
| chenakam/AutoTX | src/main/scala/hobby/chenai/nakam/autotx/core/coin/EthGroup.scala | Scala | apache-2.0 | 2,068 |
package com.citypay.pan.search
import java.nio.ByteBuffer
import com.citypay.pan.search.io.IndexedByteBuffer
import com.citypay.pan.search.nio.NioFileSystemScanner.Stats._
import com.citypay.pan.search.source.ScanListener
import com.citypay.pan.search.util.{LuhnCheck, Tracer}
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
import scala.util.control.NonFatal
/**
* Trait which reviews data for a match
*/
trait ScanReviewer {
def system: String
def sec: ScanExecutionContext
def ctx: SearchContext = sec.sc
def scanListener: ScanListener
protected def report: ScanReport = sec.report
protected lazy val analysisBuffer: ByteBuffer = ByteBuffer.allocate(ctx.config.analysisBufferSz)
protected lazy val inspectionBuffer = new IndexedByteBuffer(sec.sc.maximumLength * 2)
/**
* @param inspectionBuffer the inspection buffer to analyse, collated by review
* @param previous previous reviews that have been collated through the review process and may match, these
* are returned if not enough data has been established at the time of review and the [[PanSpec]]
* has a `length` greater than the value of `limit`
* @param offset an offset from the buffer
* @param limit the limit to which the review should hold, this is generally to where we
* have yet gathered data to
* @return
*/
def review(inspectionBuffer: IndexedByteBuffer,
previous: List[PotentialMatch],
location: String,
offset: Int,
limit: Int): List[MatchResult] = {
// level 1 review
val result = reviewBufferLevel1(inspectionBuffer, location, previous, offset, limit)
result.collect {
// a level 1 match helps to speed up searches of probable matches i.e 20 ranges vs 1000 ranges to check
// now we have a possible match, filter by level2 matches
case m: Match => level42Match(m)
case r: MatchResult => r
}
}
/**
* Reviews the underlying inspection buffer and returns any matches if found
*
* @param inspectionBuffer the inspection buffer to analyse, collated by review
* @param offset an offset from the buffer
* @param limit the limit to which the review should hold, this is generally to where we
* have yet gathered data to
* @return
*/
def reviewBufferLevel1(inspectionBuffer: IndexedByteBuffer,
location: String,
previous: List[PotentialMatch],
offset: Int,
limit: Int): List[MatchResult] = {
def findInSpec(specs: List[PanSpec], accum: List[MatchResult]): List[MatchResult] = {
specs.headOption.fold(accum) { spec =>
InspectionScanner("level1", spec, inspectionBuffer, offset, limit) match {
case i: InspectedPotential =>
findInSpec(specs.tail, accum :+ PotentialMatch(spec, i.offset, i.from, i.to))
case i: InspectedProposedFind =>
accum :+ Match(spec, s"Found ${spec.name} in $location at position ${i.from}", system, i.from, i.to, i.offset, location, i.value.getBytes)
case _ =>
findInSpec(specs.tail, accum)
}
}
}
def findInPrev(specs: List[PotentialMatch], accum: List[MatchResult]): List[MatchResult] = {
specs.headOption.fold(accum) { prev =>
InspectionScanner("previo", prev.spec, inspectionBuffer, prev.offset, limit) match {
case i: InspectedPotential =>
findInPrev(specs.tail, accum :+ prev)
case i: InspectedProposedFind =>
accum :+ Match(prev.spec, s"Found ${prev.spec.name} in $location at position ${i.from}", system, i.from, i.to, i.offset, location, i.value.getBytes)
case _ =>
findInPrev(specs.tail, accum)
}
}
}
findInSpec(ctx.level1, List()) ++ findInPrev(previous, List())
}
def checkFalsePositiveRegistry(str: String): Boolean = {
!ctx.falsePositives.contains(str)
}
/**
* Runs a level 2 match (with a bad reference to Mark King to show my age!)
* against a level 1 match, this aids in reducing false-positives
*
* @param matched the matched value
* @return an updated match value if found to exist or None otherwise
*/
def level42Match(matched: Match): MatchResult = {
val _tracer = new Tracer("analysisBufferL2")
import _tracer._
traceEnd(s"Reviewing match $matched")
@tailrec
def rec(spec: PanSpec, i: Int): Boolean = {
trace(s"InspectionScanner: level2, spec=$spec, i=$i")
val str = new String(matched.expectedValue)
// if we are equal or over the min length of the spec, run a luhn check on the whole value as a shortcut,
// return only if a valid match
if (i >= spec.length && LuhnCheck(str)) {
traceEnd(", match, luhncheck")
// check for false positive matches
checkFalsePositiveRegistry(str)
}
// use the expected length to restrict the search, return false if we have exceeded or matched it
else if (i >= matched.expectedLen) {
traceEnd(", exhausted")
false
}
// check the leading digits
else if (i < spec.leadingLen && spec.leadingBytes(i) != matched.expectedValue(i)) {
traceEnd(", no match")
false
}
// iterate recursively as apt
else {
traceEnd(", match...")
rec(spec, i + 1)
}
}
// only attempt level 2 checks against schemes with the same id and return the first one found
val declaredMatch = ctx.level2.filter(spec => spec.id == matched.searchSpec.id).find(spec => rec(spec, 0))
declaredMatch.fold[MatchResult](NoMatch())(d => matched.copy(searchSpec = d))
}
private def falsePos(tracer: Tracer) = {
import tracer._
trace(", inspection buffer > 2 (reset)")
// do not add it doesn't look like a card number based on sequence of entered digits
trace(", reset-buffer")
inspectionBuffer.reset()
0
}
private def traceReset(i: Int, offset: Int, location: String, reviewPos: Int, tracer: Tracer) = {
if (i > offset && i % 16 == 0) {
tracer.traceEnd()
}
tracer.trace("\\n%s, analysis-buffer: i=%06d, offset=%06d, pos=%06d", location, i, offset + i, reviewPos)
}
def reviewAnalysisBuffer(offset: Int,
read: Int,
location: String,
accumulativeResult: ReviewResult
): ReviewResult = {
var potentialMatch = List[PotentialMatch]()
var matches = ListBuffer[Match]()
var reviewPos = 0
var lineNo = accumulativeResult.lineNo
var colNo = accumulativeResult.colNo
val _tracer = new Tracer("analysisBuffer")
import _tracer._
try {
// iterate an index from 0 to read
for (i <- 0 until read) {
colNo = colNo + 1
traceReset(i, offset, location, reviewPos, _tracer)
val byte = analysisBuffer.get(offset + i) // review the incoming bytes and push to the inspection buffer if numeric
trace(", byte=0x%02X", byte)
// restrict further analysis based on an int value
if (byte >= 0x30 && byte <= 0x39) {
// we can say that if the last recorded value's index > 2 (i.e. allow 4000-000)
// then it is immediately a false positive result
if (inspectionBuffer.position() > 0 && (offset + i - inspectionBuffer.lastEnteredIndex) > 2) {
reviewPos = falsePos(_tracer)
// if this is the first inspected numeric then add
// but only add if it matches possible leading pan digits
} else if (inspectionBuffer.position() == 0 && ctx.leadingPanDigits.contains(byte)) {
val j = inspectionBuffer.put(byte, offset + i, lineNo, colNo)
trace(", L, put(idx=%s, i=%02d)", offset + i, j)
} else if (inspectionBuffer.position() == 0) {
trace(", not leadingPanDigit, ignoring")
// otherwise it seems to be middle numerics that may make up a pan, add and continue
} else {
val j = inspectionBuffer.put(byte, offset + i, lineNo, colNo)
trace(", d, put(idx=%s, i=%02d)", offset + i, j)
trace(s", ins-pos=${inspectionBuffer.position()}")
// check inspection buffer length to see if it is in a position for
// inspection which we can say is the minimum length digits or more
if (inspectionBuffer.position() >= ctx.minimumLength) {
traceEnd(s" ---> Inspect (ins-pos=${inspectionBuffer.position()}, rev-pos=$reviewPos) <--- ${potentialMatch.mkString("\\n", ",\\n", "")}")
val result = review(inspectionBuffer, potentialMatch, location, reviewPos, inspectionBuffer.position())
val matched = result.collect { case m: Match => m }
matched.foreach(m => sec.report.incStat(Matched))
if (ctx.stopOnFirstMatch && matched.nonEmpty) {
return ReviewResult(matched, lineNo, colNo)
}
matches ++= matched
trace("Matched=%d", matched.size)
potentialMatch = result.collect { case pm: PotentialMatch => pm }
trace(", Potential=%d", potentialMatch.size)
val noMatches = result.collect { case n: NoMatch => n }
traceEnd(", Nomatch=%d".format(noMatches.size))
// if we have no potential matches, we can reset the inspection buffer
if (potentialMatch.isEmpty) {
inspectionBuffer.shiftLeft(1)
trace(s"shift-buffer (${inspectionBuffer.position()})")
reviewPos = 0
} else {
reviewPos = reviewPos + 1
}
}
}
} else {
trace(", out-of-range")
// increment new line
if (byte == 0x0A) {
lineNo = lineNo + 1
colNo = 0
}
// offset added to 3 so we can consider windows end of line patterns
if ((offset + i - inspectionBuffer.lastEnteredIndex) > 3) {
trace(", inspection buffer > 2 (reset)")
// do not add it doesn't look like a card number based on sequence of entered digits
trace(", reset-buffer")
inspectionBuffer.reset()
reviewPos = 0
} else {
// check for grouping characters (space, -, new line or carriage return) as these are allowed
if (byte == 0x20 || byte == 0x2D) {
trace(", acceptable delimiter"); // continue
} else {
trace(", reset-buffer")
inspectionBuffer.reset()
reviewPos = 0
}
}
}
}
ReviewResult(matches.toList, lineNo, colNo)
} catch {
case NonFatal(e) => e.printStackTrace()
throw e
} finally {
traceEnd()
}
}
}
| citypay/citypay-pan-search | src/main/scala/com/citypay/pan/search/ScanReviewer.scala | Scala | mit | 11,145 |
import scala.language.{ reflectiveCalls }
object test1 {
val o1 = new Object { override def toString = "ohone" }
val o2 = new Object { override def toString = "ohtwo" }
val t1 = new Tata("tieone")
val t2 = new Tata("tietwo")
class Tata(name: String) {
override def toString = name
def tatMe = "oy"
}
class Titi extends Tata("titi")
object Rec {
val a = 1
val b = 2
val c = "hey"
def d(x: AnyRef) = new Object { override def toString = "dee" }
def e(x: Tata) = new Tata("iei")
def f(x: Int) = x + 1
def g(x: Int) = { v = x }
def h(x: Unit) = new Object { override def toString = "eitch" }
def i(x: Array[Int]) = x(0)
def j(x: Array[AnyRef]) = x(0)
def k(x: Array[Char]) = x(0)
def l(x: Array[Unit]) = x(0)
def m(x: Array[String]) = x(0)
def n(x: Array[Tata]) = x(0)
def o: Array[Int] = Array(1, 2, 3)
def p: Array[AnyRef] = Array(o1, o2)
def q: Array[Char] = Array('1', '2')
def r: Array[Unit] = Array((), ())
def s: Array[String] = Array("one", "two")
def t: Array[Tata] = Array(t1, t2)
def u[T](f: T=>T, v:T): T = f(v)
var v = 4
var w = 11
val x = t1
val y: Tata = null
def z(t: Tata) = ()
}
type rt = Object {
val a: Int;
val c: String;
def d(x: AnyRef): AnyRef
def e(x: Tata): Tata
def f(x: Int): Int;
def h(x: Unit): AnyRef;
def i(x: Array[Int]): Int
def j(x: Array[AnyRef]): AnyRef
def k(x: Array[Char]): Char
def l(x: Array[Unit]): Unit
def m(x: Array[String]): String
def n(x: Array[Tata]): Tata
def o: Array[Int]
def p: Array[AnyRef]
def q: Array[Char]
def r: Array[Unit]
def s: Array[String]
def t: Array[Tata]
def u[T](f: T=>T, v:T): T
var v: Int
val y: Tata
}
def l (r: rt) {
println(" 1. " + r.c)
println(" 2. " + r.a + 1)
println(" 3. " + r.d(o1))
println(" 4. " + r.e(t1))
println(" 5. " + (r.f(4) + 1))
println(" 6. " + r.f(4) + 1)
println(" 7. " + r.f(r.a))
println(" 8. " + r.v)
r.v = r.v + 1
println("10. " + r.v)
println("11. " + r.h(()))
println("12. " + r.i(Array(1, 2, 3)))
println("13. " + r.j(Array(o1, o2)))
println("14. " + r.k(Array('1', '2')))
println("15. " + r.l(Array((), ())))
println("16. " + r.m(Array("one", "two")))
println("17. " + r.n(Array(t1, t2)))
println("18. " + (r.o(0) + 1))
println("19. " + (r.p(0).hashCode() > 0))
println("20. " + r.q(0))
println("21. " + r.r(0))
println("22. " + r.m(r.s))
println("23. " + r.t(0).tatMe)
println("24. " + r.u[Int](_+1,0))
println("25. " + r.y)
println("26. " + r.e(null))
}
/*def ma[T](r: Object{def e(x: T): T; val x: T}) {
println("30. " + r.e(r.x)) // static error
}*/
def mb(r: Object { def e[T](x: T): T }) {
println("31. " + r.e[Int](4)) // while this is ok
}
def m1(r: Object { def z(x: Tata): Unit }) {
println("32. " + r.z(new Titi)) // while this is ok
}
def m2[T](r: Object { def e(x: Tata): T; val x: Tata }) {
println("33. " + r.e(r.x)) // and this too
}
class Rec3[T] {
def e(x: T): T = x
}
def m3[T](r: Rec3[T], x: T) {
println("33. " + r.e(x)) // and this too
}
Rec.g(11)
this.l(Rec)
this.mb(new Object{def e[T](x: T): T = x})
this.m1(Rec)
this.m2[Tata](Rec)
this.m3[Tata](new Rec3[Tata], t1)
}
object test2 {
class C extends { def f() { println("1") } }
val x1 = new C
x1.f()
abstract class D extends { def f() }
val x2 = new D { def f() { println("2") } }
x2.f()
val x3 = new { def f() { println("3") } }
def run(x: { def f() }) { x.f() }
run(x3)
type T = { def f() }
val x4 = new AnyRef { def f() { println("4") } } // ok!
//val x4 = new T { def f() { println("4") } } // error! (bug #1241)
x4.f()
val x5: T = new { def f() { println("5") } }
x5.f()
}
object test3 {
case class Exc() extends Exception
object Rec {
def f = throw Exc()
}
def m(r: { def f: Nothing }) =
try {
r.f
}
catch {
case e: Exc => println("caught")
case e: Throwable => println(e)
}
m(Rec)
}
object test4 {
class A
val aar = Array(new A, new A, new A)
val nar = Array(1, 2)
def f(p: {def size: Int}) = println(p.size)
//def g[T <: {def size: Int}](p: T) = println(p.size) // open issue
//def h[T <% {def size: Int}](p: T) = println(p.size) // open issue
f(aar)
f(nar)
//g(aar)
//g(nar)
//h(aar)
//h(nar)
}
object Test extends App {
test1
test2
test3
test4
}
| felixmulder/scala | test/files/run/structural.scala | Scala | bsd-3-clause | 4,571 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.workflow
import java.net.URI
import java.util.ServiceLoader
import akka.event.LoggingAdapter
import com.google.common.io.ByteStreams
import grizzled.slf4j.Logging
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import org.json4s.DefaultFormats
import org.json4s.Formats
import org.json4s.JObject
import org.json4s.JValue
import org.json4s.native.JsonMethods._
import scala.collection.JavaConversions._
import scala.collection.mutable
class EngineServerPluginContext(
val plugins: mutable.Map[String, mutable.Map[String, EngineServerPlugin]],
val pluginParams: mutable.Map[String, JValue],
val log: LoggingAdapter) {
def outputBlockers: Map[String, EngineServerPlugin] =
plugins.getOrElse(EngineServerPlugin.outputBlocker, Map.empty).toMap
def outputSniffers: Map[String, EngineServerPlugin] =
plugins.getOrElse(EngineServerPlugin.outputSniffer, Map.empty).toMap
}
object EngineServerPluginContext extends Logging {
implicit val formats: Formats = DefaultFormats
def apply(log: LoggingAdapter, engineVariant: String): EngineServerPluginContext = {
val plugins = mutable.Map[String, mutable.Map[String, EngineServerPlugin]](
EngineServerPlugin.outputBlocker -> mutable.Map(),
EngineServerPlugin.outputSniffer -> mutable.Map())
val pluginParams = mutable.Map[String, JValue]()
val serviceLoader = ServiceLoader.load(classOf[EngineServerPlugin])
stringFromFile(engineVariant).foreach { variantJson =>
(parse(variantJson) \ "plugins").extractOpt[JObject].foreach { pluginDefs =>
pluginDefs.obj.foreach { pluginParams += _ }
}
}
serviceLoader foreach { service =>
pluginParams.get(service.pluginName) map { params =>
if ((params \ "enabled").extractOrElse(false)) {
info(s"Plugin ${service.pluginName} is enabled.")
plugins(service.pluginType) += service.pluginName -> service
} else {
info(s"Plugin ${service.pluginName} is disabled.")
}
} getOrElse {
info(s"Plugin ${service.pluginName} is disabled.")
}
}
new EngineServerPluginContext(
plugins,
pluginParams,
log)
}
private def stringFromFile(filePath: String): Option[String] = {
try {
val fs = FileSystem.get(new Configuration())
val path = new Path(new URI(filePath))
if (fs.exists(path)) {
Some(new String(ByteStreams.toByteArray(fs.open(path)).map(_.toChar)))
} else {
None
}
} catch {
case e: java.io.IOException =>
error(s"Error reading from file: ${e.getMessage}. Aborting.")
sys.exit(1)
}
}
}
| takezoe/incubator-predictionio | core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginContext.scala | Scala | apache-2.0 | 3,535 |
package at.linuxhacker.procmetrics.values
import scala.language.implicitConversions
case class Pid( pid: String, cmdline: String )
trait ProcGenValue {
type A
def value: A
}
case class ProcStringValue( value: String ) extends ProcGenValue { type A = String }
case class ProcFloatValue( value: Float ) extends ProcGenValue { type A = Float }
case class ProcIntValue( value: Int ) extends ProcGenValue { type A = Int }
object ValueConverters {
implicit def f2s( x: ProcFloatValue ): ProcStringValue = ProcStringValue( x.value.toString )
implicit def i2s( x: ProcIntValue ): ProcStringValue = ProcStringValue( x.value.toString )
implicit def i2f( x: ProcIntValue ): ProcFloatValue = ProcFloatValue( x.value )
}
object ValueFactory {
def create( x: String ): ProcGenValue = { ProcStringValue( x ) }
def create( x: Float ): ProcGenValue = { ProcFloatValue( x ) }
def create( x: Int ): ProcGenValue = { ProcIntValue( x ) }
}
case class ProcValue( name: String, values: List[ProcGenValue] )
case class ProcCategory( pid: Pid, category: String, values: List[ProcValue] )
case class ProcGlobal( category: String, values: List[ProcValue] )
case class ProcCategory2( pid: Pid, keyValue: ProcGlobal )
object ProcValueFactory {
def create( name: String, value: ProcGenValue ): ProcValue = { ProcValue( name, List(value) ) }
def create( name: String, values: List[ProcGenValue] ): ProcValue = { ProcValue( name, values ) }
}
| hstraub/ProcMetrics | src/main/scala/at/linuxhacker/procmetrics/values/Values.scala | Scala | gpl-3.0 | 1,443 |
/*
* Copyright 2001-2011 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.Inside._
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.SharedHelpers.thisLineNumber
/* Uncomment after remove type aliases in org.scalatest package object
import org.scalatest.exceptions.TestFailedException
*/
class InsideSpec extends FunSpec with ShouldMatchers with OptionValues {
case class Address(street: String, city: String, state: String, zip: String)
case class Name(first: String, middle: String, last: String)
case class Record(name: Name, address: Address, age: Int)
describe("The inside construct") {
val rec = Record(
Name("Sally", "Mary", "Jones"),
Address("123 Main St", "Bluesville", "KY", "12345"),
29
)
it("should return normally when nested properties are inspected with matcher expressions that all succeed") {
inside (rec) { case Record(name, address, age) =>
inside (name) { case Name(first, middle, last) =>
first should be ("Sally")
middle should startWith ("Ma")
last should endWith ("nes")
}
inside (address) { case Address(street, city, state, zip) =>
street should be ("123 Main St")
city should be ("Bluesville")
state.toLowerCase should be ("ky")
zip should be ("12345")
}
age should be >= 21
}
}
it("should throw a TFE when the partial function isn't defined at the passed value") {
val caught = evaluating {
inside (rec) { case Record(name, _, 99) =>
name.first should be ("Sally")
}
} should produce [TestFailedException]
caught.message.value should be (Resources("insidePartialFunctionNotDefined", rec.toString))
caught.failedCodeLineNumber.value should equal (thisLineNumber - 5)
caught.failedCodeFileName.value should be ("InsideSpec.scala")
}
it("should include an inside clause when a matcher fails inside") {
val caught = evaluating {
inside (rec) { case Record(_, _, age) =>
age should be <= 21
}
} should produce [TestFailedException]
caught.message.value should be (Resources("insidePartialFunctionAppendSomeMsg", Resources("wasNotLessThanOrEqualTo", "29", "21"), rec.toString))
caught.failedCodeLineNumber.value should equal (thisLineNumber - 4)
caught.failedCodeFileName.value should be ("InsideSpec.scala")
}
it("should include a nested inside clause when a matcher fails inside a nested inside") {
val caught = evaluating {
inside (rec) { case Record(name, _, _) =>
inside (name) { case Name(first, _, _) =>
first should be ("Harry")
}
}
} should produce [TestFailedException]
caught.message.value should be (Resources("insidePartialFunctionAppendSomeMsg", Resources("insidePartialFunctionAppendSomeMsg", Resources("wasNotEqualTo", "\\"[Sall]y\\"", "\\"[Harr]y\\""), rec.name.toString), rec.toString))
caught.failedCodeLineNumber.value should equal (thisLineNumber - 5)
caught.failedCodeFileName.value should be ("InsideSpec.scala")
}
it("should throw a TFE when matcher fails inside due to exception") {
val caught = evaluating {
inside (rec) { case Record(name, address, age) =>
throw new TestFailedException(None, None, 0)
}
} should produce [TestFailedException]
caught.message.value should be (Resources("insidePartialFunctionAppendNone", rec))
caught.failedCodeLineNumber.value should equal (thisLineNumber - 4)
caught.failedCodeFileName.value should be ("InsideSpec.scala")
}
}
}
| hubertp/scalatest | src/test/scala/org/scalatest/InsideSpec.scala | Scala | apache-2.0 | 4,253 |
package chess
package variant
import scala.util.Random
case object Chess960 extends Variant(
id = 2,
key = "chess960",
name = "Chess960",
shortName = "960",
title = "Starting position of the home rank pieces is randomized",
standardInitialPosition = false) {
override def pieces = Variant.symmetricRank {
positions(scala.util.Random.nextInt(960)) flatMap Role.allByForsyth.get
}
def positionNumber(fen: String): Option[Int] =
positions.indexOf(fen.takeWhile('/'!=)).some.filter(-1!=)
private val positions = Array(
"bbqnnrkr",
"bqnbnrkr",
"bqnnrbkr",
"bqnnrkrb",
"qbbnnrkr",
"qnbbnrkr",
"qnbnrbkr",
"qnbnrkrb",
"qbnnbrkr",
"qnnbbrkr",
"qnnrbbkr",
"qnnrbkrb",
"qbnnrkbr",
"qnnbrkbr",
"qnnrkbbr",
"qnnrkrbb",
"bbnqnrkr",
"bnqbnrkr",
"bnqnrbkr",
"bnqnrkrb",
"nbbqnrkr",
"nqbbnrkr",
"nqbnrbkr",
"nqbnrkrb",
"nbqnbrkr",
"nqnbbrkr",
"nqnrbbkr",
"nqnrbkrb",
"nbqnrkbr",
"nqnbrkbr",
"nqnrkbbr",
"nqnrkrbb",
"bbnnqrkr",
"bnnbqrkr",
"bnnqrbkr",
"bnnqrkrb",
"nbbnqrkr",
"nnbbqrkr",
"nnbqrbkr",
"nnbqrkrb",
"nbnqbrkr",
"nnqbbrkr",
"nnqrbbkr",
"nnqrbkrb",
"nbnqrkbr",
"nnqbrkbr",
"nnqrkbbr",
"nnqrkrbb",
"bbnnrqkr",
"bnnbrqkr",
"bnnrqbkr",
"bnnrqkrb",
"nbbnrqkr",
"nnbbrqkr",
"nnbrqbkr",
"nnbrqkrb",
"nbnrbqkr",
"nnrbbqkr",
"nnrqbbkr",
"nnrqbkrb",
"nbnrqkbr",
"nnrbqkbr",
"nnrqkbbr",
"nnrqkrbb",
"bbnnrkqr",
"bnnbrkqr",
"bnnrkbqr",
"bnnrkqrb",
"nbbnrkqr",
"nnbbrkqr",
"nnbrkbqr",
"nnbrkqrb",
"nbnrbkqr",
"nnrbbkqr",
"nnrkbbqr",
"nnrkbqrb",
"nbnrkqbr",
"nnrbkqbr",
"nnrkqbbr",
"nnrkqrbb",
"bbnnrkrq",
"bnnbrkrq",
"bnnrkbrq",
"bnnrkrqb",
"nbbnrkrq",
"nnbbrkrq",
"nnbrkbrq",
"nnbrkrqb",
"nbnrbkrq",
"nnrbbkrq",
"nnrkbbrq",
"nnrkbrqb",
"nbnrkrbq",
"nnrbkrbq",
"nnrkrbbq",
"nnrkrqbb",
"bbqnrnkr",
"bqnbrnkr",
"bqnrnbkr",
"bqnrnkrb",
"qbbnrnkr",
"qnbbrnkr",
"qnbrnbkr",
"qnbrnkrb",
"qbnrbnkr",
"qnrbbnkr",
"qnrnbbkr",
"qnrnbkrb",
"qbnrnkbr",
"qnrbnkbr",
"qnrnkbbr",
"qnrnkrbb",
"bbnqrnkr",
"bnqbrnkr",
"bnqrnbkr",
"bnqrnkrb",
"nbbqrnkr",
"nqbbrnkr",
"nqbrnbkr",
"nqbrnkrb",
"nbqrbnkr",
"nqrbbnkr",
"nqrnbbkr",
"nqrnbkrb",
"nbqrnkbr",
"nqrbnkbr",
"nqrnkbbr",
"nqrnkrbb",
"bbnrqnkr",
"bnrbqnkr",
"bnrqnbkr",
"bnrqnkrb",
"nbbrqnkr",
"nrbbqnkr",
"nrbqnbkr",
"nrbqnkrb",
"nbrqbnkr",
"nrqbbnkr",
"nrqnbbkr",
"nrqnbkrb",
"nbrqnkbr",
"nrqbnkbr",
"nrqnkbbr",
"nrqnkrbb",
"bbnrnqkr",
"bnrbnqkr",
"bnrnqbkr",
"bnrnqkrb",
"nbbrnqkr",
"nrbbnqkr",
"nrbnqbkr",
"nrbnqkrb",
"nbrnbqkr",
"nrnbbqkr",
"nrnqbbkr",
"nrnqbkrb",
"nbrnqkbr",
"nrnbqkbr",
"nrnqkbbr",
"nrnqkrbb",
"bbnrnkqr",
"bnrbnkqr",
"bnrnkbqr",
"bnrnkqrb",
"nbbrnkqr",
"nrbbnkqr",
"nrbnkbqr",
"nrbnkqrb",
"nbrnbkqr",
"nrnbbkqr",
"nrnkbbqr",
"nrnkbqrb",
"nbrnkqbr",
"nrnbkqbr",
"nrnkqbbr",
"nrnkqrbb",
"bbnrnkrq",
"bnrbnkrq",
"bnrnkbrq",
"bnrnkrqb",
"nbbrnkrq",
"nrbbnkrq",
"nrbnkbrq",
"nrbnkrqb",
"nbrnbkrq",
"nrnbbkrq",
"nrnkbbrq",
"nrnkbrqb",
"nbrnkrbq",
"nrnbkrbq",
"nrnkrbbq",
"nrnkrqbb",
"bbqnrknr",
"bqnbrknr",
"bqnrkbnr",
"bqnrknrb",
"qbbnrknr",
"qnbbrknr",
"qnbrkbnr",
"qnbrknrb",
"qbnrbknr",
"qnrbbknr",
"qnrkbbnr",
"qnrkbnrb",
"qbnrknbr",
"qnrbknbr",
"qnrknbbr",
"qnrknrbb",
"bbnqrknr",
"bnqbrknr",
"bnqrkbnr",
"bnqrknrb",
"nbbqrknr",
"nqbbrknr",
"nqbrkbnr",
"nqbrknrb",
"nbqrbknr",
"nqrbbknr",
"nqrkbbnr",
"nqrkbnrb",
"nbqrknbr",
"nqrbknbr",
"nqrknbbr",
"nqrknrbb",
"bbnrqknr",
"bnrbqknr",
"bnrqkbnr",
"bnrqknrb",
"nbbrqknr",
"nrbbqknr",
"nrbqkbnr",
"nrbqknrb",
"nbrqbknr",
"nrqbbknr",
"nrqkbbnr",
"nrqkbnrb",
"nbrqknbr",
"nrqbknbr",
"nrqknbbr",
"nrqknrbb",
"bbnrkqnr",
"bnrbkqnr",
"bnrkqbnr",
"bnrkqnrb",
"nbbrkqnr",
"nrbbkqnr",
"nrbkqbnr",
"nrbkqnrb",
"nbrkbqnr",
"nrkbbqnr",
"nrkqbbnr",
"nrkqbnrb",
"nbrkqnbr",
"nrkbqnbr",
"nrkqnbbr",
"nrkqnrbb",
"bbnrknqr",
"bnrbknqr",
"bnrknbqr",
"bnrknqrb",
"nbbrknqr",
"nrbbknqr",
"nrbknbqr",
"nrbknqrb",
"nbrkbnqr",
"nrkbbnqr",
"nrknbbqr",
"nrknbqrb",
"nbrknqbr",
"nrkbnqbr",
"nrknqbbr",
"nrknqrbb",
"bbnrknrq",
"bnrbknrq",
"bnrknbrq",
"bnrknrqb",
"nbbrknrq",
"nrbbknrq",
"nrbknbrq",
"nrbknrqb",
"nbrkbnrq",
"nrkbbnrq",
"nrknbbrq",
"nrknbrqb",
"nbrknrbq",
"nrkbnrbq",
"nrknrbbq",
"nrknrqbb",
"bbqnrkrn",
"bqnbrkrn",
"bqnrkbrn",
"bqnrkrnb",
"qbbnrkrn",
"qnbbrkrn",
"qnbrkbrn",
"qnbrkrnb",
"qbnrbkrn",
"qnrbbkrn",
"qnrkbbrn",
"qnrkbrnb",
"qbnrkrbn",
"qnrbkrbn",
"qnrkrbbn",
"qnrkrnbb",
"bbnqrkrn",
"bnqbrkrn",
"bnqrkbrn",
"bnqrkrnb",
"nbbqrkrn",
"nqbbrkrn",
"nqbrkbrn",
"nqbrkrnb",
"nbqrbkrn",
"nqrbbkrn",
"nqrkbbrn",
"nqrkbrnb",
"nbqrkrbn",
"nqrbkrbn",
"nqrkrbbn",
"nqrkrnbb",
"bbnrqkrn",
"bnrbqkrn",
"bnrqkbrn",
"bnrqkrnb",
"nbbrqkrn",
"nrbbqkrn",
"nrbqkbrn",
"nrbqkrnb",
"nbrqbkrn",
"nrqbbkrn",
"nrqkbbrn",
"nrqkbrnb",
"nbrqkrbn",
"nrqbkrbn",
"nrqkrbbn",
"nrqkrnbb",
"bbnrkqrn",
"bnrbkqrn",
"bnrkqbrn",
"bnrkqrnb",
"nbbrkqrn",
"nrbbkqrn",
"nrbkqbrn",
"nrbkqrnb",
"nbrkbqrn",
"nrkbbqrn",
"nrkqbbrn",
"nrkqbrnb",
"nbrkqrbn",
"nrkbqrbn",
"nrkqrbbn",
"nrkqrnbb",
"bbnrkrqn",
"bnrbkrqn",
"bnrkrbqn",
"bnrkrqnb",
"nbbrkrqn",
"nrbbkrqn",
"nrbkrbqn",
"nrbkrqnb",
"nbrkbrqn",
"nrkbbrqn",
"nrkrbbqn",
"nrkrbqnb",
"nbrkrqbn",
"nrkbrqbn",
"nrkrqbbn",
"nrkrqnbb",
"bbnrkrnq",
"bnrbkrnq",
"bnrkrbnq",
"bnrkrnqb",
"nbbrkrnq",
"nrbbkrnq",
"nrbkrbnq",
"nrbkrnqb",
"nbrkbrnq",
"nrkbbrnq",
"nrkrbbnq",
"nrkrbnqb",
"nbrkrnbq",
"nrkbrnbq",
"nrkrnbbq",
"nrkrnqbb",
"bbqrnnkr",
"bqrbnnkr",
"bqrnnbkr",
"bqrnnkrb",
"qbbrnnkr",
"qrbbnnkr",
"qrbnnbkr",
"qrbnnkrb",
"qbrnbnkr",
"qrnbbnkr",
"qrnnbbkr",
"qrnnbkrb",
"qbrnnkbr",
"qrnbnkbr",
"qrnnkbbr",
"qrnnkrbb",
"bbrqnnkr",
"brqbnnkr",
"brqnnbkr",
"brqnnkrb",
"rbbqnnkr",
"rqbbnnkr",
"rqbnnbkr",
"rqbnnkrb",
"rbqnbnkr",
"rqnbbnkr",
"rqnnbbkr",
"rqnnbkrb",
"rbqnnkbr",
"rqnbnkbr",
"rqnnkbbr",
"rqnnkrbb",
"bbrnqnkr",
"brnbqnkr",
"brnqnbkr",
"brnqnkrb",
"rbbnqnkr",
"rnbbqnkr",
"rnbqnbkr",
"rnbqnkrb",
"rbnqbnkr",
"rnqbbnkr",
"rnqnbbkr",
"rnqnbkrb",
"rbnqnkbr",
"rnqbnkbr",
"rnqnkbbr",
"rnqnkrbb",
"bbrnnqkr",
"brnbnqkr",
"brnnqbkr",
"brnnqkrb",
"rbbnnqkr",
"rnbbnqkr",
"rnbnqbkr",
"rnbnqkrb",
"rbnnbqkr",
"rnnbbqkr",
"rnnqbbkr",
"rnnqbkrb",
"rbnnqkbr",
"rnnbqkbr",
"rnnqkbbr",
"rnnqkrbb",
"bbrnnkqr",
"brnbnkqr",
"brnnkbqr",
"brnnkqrb",
"rbbnnkqr",
"rnbbnkqr",
"rnbnkbqr",
"rnbnkqrb",
"rbnnbkqr",
"rnnbbkqr",
"rnnkbbqr",
"rnnkbqrb",
"rbnnkqbr",
"rnnbkqbr",
"rnnkqbbr",
"rnnkqrbb",
"bbrnnkrq",
"brnbnkrq",
"brnnkbrq",
"brnnkrqb",
"rbbnnkrq",
"rnbbnkrq",
"rnbnkbrq",
"rnbnkrqb",
"rbnnbkrq",
"rnnbbkrq",
"rnnkbbrq",
"rnnkbrqb",
"rbnnkrbq",
"rnnbkrbq",
"rnnkrbbq",
"rnnkrqbb",
"bbqrnknr",
"bqrbnknr",
"bqrnkbnr",
"bqrnknrb",
"qbbrnknr",
"qrbbnknr",
"qrbnkbnr",
"qrbnknrb",
"qbrnbknr",
"qrnbbknr",
"qrnkbbnr",
"qrnkbnrb",
"qbrnknbr",
"qrnbknbr",
"qrnknbbr",
"qrnknrbb",
"bbrqnknr",
"brqbnknr",
"brqnkbnr",
"brqnknrb",
"rbbqnknr",
"rqbbnknr",
"rqbnkbnr",
"rqbnknrb",
"rbqnbknr",
"rqnbbknr",
"rqnkbbnr",
"rqnkbnrb",
"rbqnknbr",
"rqnbknbr",
"rqnknbbr",
"rqnknrbb",
"bbrnqknr",
"brnbqknr",
"brnqkbnr",
"brnqknrb",
"rbbnqknr",
"rnbbqknr",
"rnbqkbnr",
"rnbqknrb",
"rbnqbknr",
"rnqbbknr",
"rnqkbbnr",
"rnqkbnrb",
"rbnqknbr",
"rnqbknbr",
"rnqknbbr",
"rnqknrbb",
"bbrnkqnr",
"brnbkqnr",
"brnkqbnr",
"brnkqnrb",
"rbbnkqnr",
"rnbbkqnr",
"rnbkqbnr",
"rnbkqnrb",
"rbnkbqnr",
"rnkbbqnr",
"rnkqbbnr",
"rnkqbnrb",
"rbnkqnbr",
"rnkbqnbr",
"rnkqnbbr",
"rnkqnrbb",
"bbrnknqr",
"brnbknqr",
"brnknbqr",
"brnknqrb",
"rbbnknqr",
"rnbbknqr",
"rnbknbqr",
"rnbknqrb",
"rbnkbnqr",
"rnkbbnqr",
"rnknbbqr",
"rnknbqrb",
"rbnknqbr",
"rnkbnqbr",
"rnknqbbr",
"rnknqrbb",
"bbrnknrq",
"brnbknrq",
"brnknbrq",
"brnknrqb",
"rbbnknrq",
"rnbbknrq",
"rnbknbrq",
"rnbknrqb",
"rbnkbnrq",
"rnkbbnrq",
"rnknbbrq",
"rnknbrqb",
"rbnknrbq",
"rnkbnrbq",
"rnknrbbq",
"rnknrqbb",
"bbqrnkrn",
"bqrbnkrn",
"bqrnkbrn",
"bqrnkrnb",
"qbbrnkrn",
"qrbbnkrn",
"qrbnkbrn",
"qrbnkrnb",
"qbrnbkrn",
"qrnbbkrn",
"qrnkbbrn",
"qrnkbrnb",
"qbrnkrbn",
"qrnbkrbn",
"qrnkrbbn",
"qrnkrnbb",
"bbrqnkrn",
"brqbnkrn",
"brqnkbrn",
"brqnkrnb",
"rbbqnkrn",
"rqbbnkrn",
"rqbnkbrn",
"rqbnkrnb",
"rbqnbkrn",
"rqnbbkrn",
"rqnkbbrn",
"rqnkbrnb",
"rbqnkrbn",
"rqnbkrbn",
"rqnkrbbn",
"rqnkrnbb",
"bbrnqkrn",
"brnbqkrn",
"brnqkbrn",
"brnqkrnb",
"rbbnqkrn",
"rnbbqkrn",
"rnbqkbrn",
"rnbqkrnb",
"rbnqbkrn",
"rnqbbkrn",
"rnqkbbrn",
"rnqkbrnb",
"rbnqkrbn",
"rnqbkrbn",
"rnqkrbbn",
"rnqkrnbb",
"bbrnkqrn",
"brnbkqrn",
"brnkqbrn",
"brnkqrnb",
"rbbnkqrn",
"rnbbkqrn",
"rnbkqbrn",
"rnbkqrnb",
"rbnkbqrn",
"rnkbbqrn",
"rnkqbbrn",
"rnkqbrnb",
"rbnkqrbn",
"rnkbqrbn",
"rnkqrbbn",
"rnkqrnbb",
"bbrnkrqn",
"brnbkrqn",
"brnkrbqn",
"brnkrqnb",
"rbbnkrqn",
"rnbbkrqn",
"rnbkrbqn",
"rnbkrqnb",
"rbnkbrqn",
"rnkbbrqn",
"rnkrbbqn",
"rnkrbqnb",
"rbnkrqbn",
"rnkbrqbn",
"rnkrqbbn",
"rnkrqnbb",
"bbrnkrnq",
"brnbkrnq",
"brnkrbnq",
"brnkrnqb",
"rbbnkrnq",
"rnbbkrnq",
"rnbkrbnq",
"rnbkrnqb",
"rbnkbrnq",
"rnkbbrnq",
"rnkrbbnq",
"rnkrbnqb",
"rbnkrnbq",
"rnkbrnbq",
"rnkrnbbq",
"rnkrnqbb",
"bbqrknnr",
"bqrbknnr",
"bqrknbnr",
"bqrknnrb",
"qbbrknnr",
"qrbbknnr",
"qrbknbnr",
"qrbknnrb",
"qbrkbnnr",
"qrkbbnnr",
"qrknbbnr",
"qrknbnrb",
"qbrknnbr",
"qrkbnnbr",
"qrknnbbr",
"qrknnrbb",
"bbrqknnr",
"brqbknnr",
"brqknbnr",
"brqknnrb",
"rbbqknnr",
"rqbbknnr",
"rqbknbnr",
"rqbknnrb",
"rbqkbnnr",
"rqkbbnnr",
"rqknbbnr",
"rqknbnrb",
"rbqknnbr",
"rqkbnnbr",
"rqknnbbr",
"rqknnrbb",
"bbrkqnnr",
"brkbqnnr",
"brkqnbnr",
"brkqnnrb",
"rbbkqnnr",
"rkbbqnnr",
"rkbqnbnr",
"rkbqnnrb",
"rbkqbnnr",
"rkqbbnnr",
"rkqnbbnr",
"rkqnbnrb",
"rbkqnnbr",
"rkqbnnbr",
"rkqnnbbr",
"rkqnnrbb",
"bbrknqnr",
"brkbnqnr",
"brknqbnr",
"brknqnrb",
"rbbknqnr",
"rkbbnqnr",
"rkbnqbnr",
"rkbnqnrb",
"rbknbqnr",
"rknbbqnr",
"rknqbbnr",
"rknqbnrb",
"rbknqnbr",
"rknbqnbr",
"rknqnbbr",
"rknqnrbb",
"bbrknnqr",
"brkbnnqr",
"brknnbqr",
"brknnqrb",
"rbbknnqr",
"rkbbnnqr",
"rkbnnbqr",
"rkbnnqrb",
"rbknbnqr",
"rknbbnqr",
"rknnbbqr",
"rknnbqrb",
"rbknnqbr",
"rknbnqbr",
"rknnqbbr",
"rknnqrbb",
"bbrknnrq",
"brkbnnrq",
"brknnbrq",
"brknnrqb",
"rbbknnrq",
"rkbbnnrq",
"rkbnnbrq",
"rkbnnrqb",
"rbknbnrq",
"rknbbnrq",
"rknnbbrq",
"rknnbrqb",
"rbknnrbq",
"rknbnrbq",
"rknnrbbq",
"rknnrqbb",
"bbqrknrn",
"bqrbknrn",
"bqrknbrn",
"bqrknrnb",
"qbbrknrn",
"qrbbknrn",
"qrbknbrn",
"qrbknrnb",
"qbrkbnrn",
"qrkbbnrn",
"qrknbbrn",
"qrknbrnb",
"qbrknrbn",
"qrkbnrbn",
"qrknrbbn",
"qrknrnbb",
"bbrqknrn",
"brqbknrn",
"brqknbrn",
"brqknrnb",
"rbbqknrn",
"rqbbknrn",
"rqbknbrn",
"rqbknrnb",
"rbqkbnrn",
"rqkbbnrn",
"rqknbbrn",
"rqknbrnb",
"rbqknrbn",
"rqkbnrbn",
"rqknrbbn",
"rqknrnbb",
"bbrkqnrn",
"brkbqnrn",
"brkqnbrn",
"brkqnrnb",
"rbbkqnrn",
"rkbbqnrn",
"rkbqnbrn",
"rkbqnrnb",
"rbkqbnrn",
"rkqbbnrn",
"rkqnbbrn",
"rkqnbrnb",
"rbkqnrbn",
"rkqbnrbn",
"rkqnrbbn",
"rkqnrnbb",
"bbrknqrn",
"brkbnqrn",
"brknqbrn",
"brknqrnb",
"rbbknqrn",
"rkbbnqrn",
"rkbnqbrn",
"rkbnqrnb",
"rbknbqrn",
"rknbbqrn",
"rknqbbrn",
"rknqbrnb",
"rbknqrbn",
"rknbqrbn",
"rknqrbbn",
"rknqrnbb",
"bbrknrqn",
"brkbnrqn",
"brknrbqn",
"brknrqnb",
"rbbknrqn",
"rkbbnrqn",
"rkbnrbqn",
"rkbnrqnb",
"rbknbrqn",
"rknbbrqn",
"rknrbbqn",
"rknrbqnb",
"rbknrqbn",
"rknbrqbn",
"rknrqbbn",
"rknrqnbb",
"bbrknrnq",
"brkbnrnq",
"brknrbnq",
"brknrnqb",
"rbbknrnq",
"rkbbnrnq",
"rkbnrbnq",
"rkbnrnqb",
"rbknbrnq",
"rknbbrnq",
"rknrbbnq",
"rknrbnqb",
"rbknrnbq",
"rknbrnbq",
"rknrnbbq",
"rknrnqbb",
"bbqrkrnn",
"bqrbkrnn",
"bqrkrbnn",
"bqrkrnnb",
"qbbrkrnn",
"qrbbkrnn",
"qrbkrbnn",
"qrbkrnnb",
"qbrkbrnn",
"qrkbbrnn",
"qrkrbbnn",
"qrkrbnnb",
"qbrkrnbn",
"qrkbrnbn",
"qrkrnbbn",
"qrkrnnbb",
"bbrqkrnn",
"brqbkrnn",
"brqkrbnn",
"brqkrnnb",
"rbbqkrnn",
"rqbbkrnn",
"rqbkrbnn",
"rqbkrnnb",
"rbqkbrnn",
"rqkbbrnn",
"rqkrbbnn",
"rqkrbnnb",
"rbqkrnbn",
"rqkbrnbn",
"rqkrnbbn",
"rqkrnnbb",
"bbrkqrnn",
"brkbqrnn",
"brkqrbnn",
"brkqrnnb",
"rbbkqrnn",
"rkbbqrnn",
"rkbqrbnn",
"rkbqrnnb",
"rbkqbrnn",
"rkqbbrnn",
"rkqrbbnn",
"rkqrbnnb",
"rbkqrnbn",
"rkqbrnbn",
"rkqrnbbn",
"rkqrnnbb",
"bbrkrqnn",
"brkbrqnn",
"brkrqbnn",
"brkrqnnb",
"rbbkrqnn",
"rkbbrqnn",
"rkbrqbnn",
"rkbrqnnb",
"rbkrbqnn",
"rkrbbqnn",
"rkrqbbnn",
"rkrqbnnb",
"rbkrqnbn",
"rkrbqnbn",
"rkrqnbbn",
"rkrqnnbb",
"bbrkrnqn",
"brkbrnqn",
"brkrnbqn",
"brkrnqnb",
"rbbkrnqn",
"rkbbrnqn",
"rkbrnbqn",
"rkbrnqnb",
"rbkrbnqn",
"rkrbbnqn",
"rkrnbbqn",
"rkrnbqnb",
"rbkrnqbn",
"rkrbnqbn",
"rkrnqbbn",
"rkrnqnbb",
"bbrkrnnq",
"brkbrnnq",
"brkrnbnq",
"brkrnnqb",
"rbbkrnnq",
"rkbbrnnq",
"rkbrnbnq",
"rkbrnnqb",
"rbkrbnnq",
"rkrbbnnq",
"rkrnbbnq",
"rkrnbnqb",
"rbkrnnbq",
"rkrbnnbq",
"rkrnnbbq",
"rkrnnqbb")
}
| psuter/scalachess | src/main/scala/variant/Chess960.scala | Scala | mit | 15,910 |
object SCL8119A {
trait A[+This] {
def foo[TT >: DV[Int]](implicit z: Z[TT]): TT = ???
}
class V[T] extends A[V[T]]
class DV[T] extends V[T] with A[DV[T]] {
def x = 1
}
class Z[T]
object V {
implicit def repr[T]: Z[V[T]] = ???
implicit def repr1[T]: Z[DV[T]] = ???
}
val dv: DV[Int] = ???
/*start*/dv.foo.x/*end*/
}
//Int | ilinum/intellij-scala | testdata/typeInference/bugs5/SCL8119A.scala | Scala | apache-2.0 | 361 |
package org.oxbow.codebridge.ui.common
import javafx.scene.layout.BorderPane
import javafx.scene.Node
import javafx.scene.layout.StackPane
import javafx.beans.property.SimpleBooleanProperty
import javafx.beans.property.BooleanProperty
import javafx.beans.value.ObservableValue
import org.oxbow.codebridge.util.JFXImplicits._
import java.lang.{Boolean=>JBoolean}
import javafx.geometry.Insets
import javafx.beans.property.SimpleObjectProperty
import javafx.beans.property.ObjectProperty
import javafx.concurrent.Service
import javafx.concurrent.Worker
import javafx.animation.Timeline
import javafx.animation.KeyFrame
import javafx.util.Duration
import javafx.event.ActionEvent
import javafx.concurrent.Task
class ProgressContaner[T <: Node]( val content: T ) extends BorderPane {
require( content != null, "Progress contanier content cannot be null" )
private val stackPane = new StackPane
setCenter(stackPane)
stackPane.getChildren.add(content)
// overlayVisible property
private val overlayVisible: BooleanProperty = new SimpleBooleanProperty(false)
overlayVisible.addListener{ visible: JBoolean =>
val b = if (visible) stackPane.getChildren.add(progressOverlay.get) else stackPane.getChildren.remove(progressOverlay.get)
}
def overlayVisibleProperty() = overlayVisible
def setOverlayVisible(visible: Boolean) = overlayVisible.set(visible)
def isOverlayVisible = overlayVisible.get
// progressOverlay property
private val progressOverlay: ObjectProperty[Node] = new SimpleObjectProperty(new ProgressOverlay)
def progressOverlayProperty() = progressOverlay
def setProgressOverlay( overlay: Node ) = progressOverlay.set(overlay)
def getProgressOverlay: Node = progressOverlay.get
// Service implementation which is aware of Progress container
// Automatically shows it as service starts and hides it as service stops
abstract class ProgressAwareService[T] extends Service[T] {
val progressDelayMs = 100
lazy val timeline = new Timeline(
new KeyFrame( Duration.millis(progressDelayMs), { setOverlayVisible(true)} ))
import Worker.State._
stateProperty.addListener{ state: Worker.State =>
state match {
case RUNNING => timeline.play
case CANCELLED | SUCCEEDED | FAILED => {
setOverlayVisible(false)
timeline.stop
}
case _ =>
}
}
protected final def createTask = new Task[T]{
protected def call = task
}
def task: T
}
} | eugener/codebridge | codebridge/src/main/scala/org/oxbow/codebridge/ui/common/ProgressContaner.scala | Scala | gpl-3.0 | 2,717 |
/**
* Copyright 2012-2013 StackMob
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stackmob.newman.test.concurrent
import org.specs2.Specification
import com.stackmob.newman.concurrent.InMemoryAsyncMutex
import scala.concurrent._
import java.util.concurrent.{Executors, CountDownLatch}
import java.util.concurrent.atomic.AtomicBoolean
class InMemoryAsyncMutexSpecs extends Specification { def is =
"InMemoryAsyncMutexSpecs".title ^ end ^
"InMemoryAsyncMutex is the AsyncMutex implementation that's based on Twitter's AsyncMutex class" ^ end ^
"a future should wait until a previous future holding the lock finishes" ! waitsForLock ^ end ^
end
private def waitsForLock = {
val mutex = new InMemoryAsyncMutex
val f1StartLatch = new CountDownLatch(1)
val f1Started = new AtomicBoolean(false)
val f1EndLatch = new CountDownLatch(1)
val f2StartLatch = new CountDownLatch(1)
val f2Started = new AtomicBoolean(false)
val f2EndLatch = new CountDownLatch(1)
implicit lazy val ctx = ExecutionContext.fromExecutorService(Executors.newCachedThreadPool)
lazy val f1 = Future {
f1StartLatch.await()
f1Started.set(true)
f1EndLatch.await()
}
lazy val f2 = Future {
f2StartLatch.await()
f2Started.set(true)
f2EndLatch.await()
}
mutex(f1)
mutex(f2)
val noneStarted = (f1Started.get must beEqualTo(false)) and (f2Started.get must beEqualTo(false))
f1StartLatch.countDown()
f2StartLatch.countDown()
val oneStarted = (f1Started.get must beEqualTo(true)) or (f2Started.get must beEqualTo(true))
f1EndLatch.countDown()
f2EndLatch.countDown()
val bothStarted = (f1Started.get must beEqualTo(true)) and (f2Started.get must beEqualTo(true))
noneStarted and oneStarted and bothStarted
}
}
| indykish/newman | src/test/scala/com/stackmob/newman/test/concurrent/InMemoryAsyncMutexSpecs.scala | Scala | apache-2.0 | 2,484 |
package ru.primetalk.synapse.slick.lifted
import slick.ast._
import slick.lifted._
import scala.reflect.ClassTag
import slick.lifted.{TupleShape, FlatShapeLevel, MappedProductShape, Shape}
import slick.util.{TupleSupport, ProductWrapper}
import scala.language.implicitConversions
import ru.primetalk.synapse.map._
/**
* Shape - represents types of rows in 3 different forms
* - unpacked - actual row values
* - packed - column names
* - mixed - mixture of column names and ordinary types. This is used to represent constant column values within column sequence.
* buildParams is only available for Shapes where Mixed = Unpacked
* encodeRef is only available for Shapes where Mixed = Packed.
* The conversion to tuple and from tuple is represented with TypeMapping (returned by #toNode)
* @author zhizhelev, 31.01.15.
*/
class TypedMapShape[E,Level <: ShapeLevel, P <: Product](val keys: Seq[Key0])(
/** The Shapes for the product elements. */
val shapes: Seq[Shape[_, _, _, _]])
extends Shape[Level, P, TypedMap[E], P] {
/** Get an Iterator of a record value's element values. The default
* implementation repeatedly calls `getElement`. */
def getIterator(value: Any): Iterator[Any] =
shapes.iterator.zipWithIndex.map(t => getElement(value, t._2))
/** Convert a value of this Shape's (mixed) type to the fully packed type */
def pack(value: Mixed) = {
val elems = shapes.iterator.zip(getIterator(value)).
map {
case (p, f) => p.pack(f.asInstanceOf[p.Mixed])
}
buildValue(elems.toIndexedSeq).asInstanceOf[Packed]
}
/** Return the fully packed Shape. */
def packedShape: Shape[Level, Packed, Unpacked, Packed] =
new TypedMapShape(keys)(shapes.map(_.packedShape.asInstanceOf[Shape[_ <: ShapeLevel, _, _, _]])).asInstanceOf[Shape[Level, Packed, Unpacked, Packed]]
/** Build a packed representation containing QueryParameters that can extract
* data from the unpacked representation later.
* This method is not available for shapes where Mixed and Unpacked are
* different types. */
def buildParams(extract: Any => Unpacked): Packed = {
val elems = shapes.iterator.zipWithIndex.map { case (p, idx) =>
def chExtract(u: Any): p.Unpacked = getElement(u, idx).asInstanceOf[p.Unpacked]
p.buildParams(extract.andThen(chExtract))
}
buildValue(elems.toIndexedSeq).asInstanceOf[Packed]
}
/** Encode a reference into a value of this Shape.
* This method may not be available for shapes where Mixed and Packed are
* different types. */
def encodeRef(value: Mixed, path: Node) = {
val elems = shapes.iterator.zip(getIterator(value)).zipWithIndex.map {
case ((p, x), pos) => p.encodeRef(x.asInstanceOf[p.Mixed], path)//???)///*new SimplyTypedNode(ElementSymbol(pos + 1)) ::*/ path)
}
buildValue(elems.toIndexedSeq)
}
/** Return an AST Node representing a mixed value. */
def toNode(value: Mixed) =
TypeMapping(
ProductNode(shapes.iterator.zip(getIterator(value)).map {
case (p, f) => p.toNode(f.asInstanceOf[p.Mixed])
}.toSeq),
MappedScalaType.Mapper(toBase, toMapped, None), classTag)
def toBase(v: Any) = new ProductWrapper(getIterator(v).toIndexedSeq)
def toMapped(v: Any) = buildValue(TupleSupport.buildIndexedSeq(v.asInstanceOf[Product]))
def classTag = implicitly[ClassTag[TypedMap[E]]]
/** Build a record value represented by this Shape from its element values. */
def buildValue(elems: IndexedSeq[Any]): Any =
TypedMap(
keys.zip(elems).flatMap {
case (OptionalKey0(key), Some(value)) =>
Some(KeyValue(key.asInstanceOf[Key[E,Any]], value))
case (OptionalKey0(key), None) =>
None
case (OptionalKey0(key), other) =>
throw new IllegalArgumentException(s"Cannot convert $other to Option")
case (key: Key[_,_], value) =>
Some(KeyValue(key.asInstanceOf[Key[Any,Any]], value))
}: _*
)
/** Get the element value from a record value at the specified index. */
def getElement(value: Any, idx: Int): Any = value match {
case t: TypedMap[_] =>
t.get0(keys(idx)).get
case p: Product =>
p.productElement(idx)
}
}
object TypedMapShape {
implicit class ColumnsTupleEx[P <: Product](columns: P)(implicit shape: Shape[FlatShapeLevel, P, _, P]) {
def typedMap[E](keys: Key[E,_]*) = new TypedMapShape[E, FlatShapeLevel, P](keys.toSeq)(shape.asInstanceOf[TupleShape[_, _, _, _]].shapes)
def toProvenShape[E](keys: Key[E,_]*) = ProvenShape.proveShapeOf(columns)(typedMap(keys: _*))
}
/** Build a TypedMap from its element values. */
def buildTypedMap(keys: Seq[Key0], elems: IndexedSeq[Any]): TypedMap[Any] =
TypedMap(
keys.zip(elems).flatMap {
case (OptionalKey0(key), Some(value)) =>
Some(KeyValue(key.asInstanceOf[Key[Any,Any]], value))
case (OptionalKey0(key), None) =>
None
case (OptionalKey0(key), other) =>
throw new IllegalArgumentException(s"Cannot convert $other to Option")
case (key: Key[_,_], value) =>
Some(KeyValue(key.asInstanceOf[Key[Any,Any]], value))
}: _*
)
def typedMapToValues(keys: Seq[Key0])(map:TypedMap[_]):IndexedSeq[Any] =
keys.map{
case OptionalKey0(key) => map.get0(key)
case key: Key[_,_] => map.get0(key).get
}.toIndexedSeq
}
object TypedMapProjection{
implicit def toShapedValue2[T](value:T):ToShapedValue2[T] = new ToShapedValue2[T](value)
// Work-around for SI-3346
final class ToShapedValue2[T](val value: T) extends AnyVal {
@inline def toTypedMap[E,U<:Product](keys: Key0*)(//f: (U => R), g: (R => U))(
implicit shape: Shape[_ <: FlatShapeLevel, T, U, _]) = {
val g:TypedMap[_]=>U = (m:TypedMap[_]) => TupleSupport.buildTuple(TypedMapShape.typedMapToValues(keys)(m)).asInstanceOf[U]
val f:U=>TypedMap[_] = u=> TypedMapShape.buildTypedMap(keys, u.productIterator.toIndexedSeq)
new MappedProjection[TypedMap[E], U](shape.toNode(value),
MappedScalaType.Mapper(g.asInstanceOf[Any => Any], f.asInstanceOf[Any => Any], None), implicitly[ClassTag[TypedMap[E]]])
}
}
} | Primetalk/typed-map | synapse-slick-lifted/src/main/scala/ru/primetalk/synapse/slick/lifted/TypedMapShape.scala | Scala | bsd-2-clause | 6,169 |
package lila.tournament
case class MiniStanding(
tour: Tournament,
standing: Option[RankedPlayers])
case class PlayerInfo(rank: Int, withdraw: Boolean) {
def page = {
math.floor((rank - 1) / 10) + 1
}.toInt
}
case class VisibleTournaments(
created: List[Tournament],
started: List[Tournament],
finished: List[Tournament])
case class PlayerInfoExt(
tour: Tournament,
user: lila.user.User,
player: Player,
povs: List[lila.game.Pov])
| terokinnunen/lila | modules/tournament/src/main/model.scala | Scala | mit | 461 |
package com.olegych.scastie.util
import akka.actor.ActorRef
import akka.stream.{Outlet, SourceShape}
import akka.stream.stage.{GraphStageLogic, OutHandler}
import scala.collection.mutable.{Queue => MQueue}
import scala.reflect.runtime.universe._
class GraphStageLogicForwarder[T: TypeTag, U: TypeTag](out: Outlet[T],
shape: SourceShape[T],
coordinator: ActorRef,
graphId: U)
extends GraphStageLogic(shape) {
setHandler(
out,
new OutHandler {
override def onPull(): Unit = {
deliver()
}
}
)
override def preStart(): Unit = {
val thisGraphStageActorRef = getStageActor(bufferElement).ref
coordinator ! ((graphId, thisGraphStageActorRef))
}
private val buffer = MQueue.empty[T]
private def deliver(): Unit =
if (isAvailable(out) && !buffer.isEmpty)
push[T](out, buffer.dequeue)
private def bufferElement(receive: (ActorRef, Any)): Unit =
receive match {
case (_, element: T) =>
buffer.enqueue(element)
deliver()
}
}
| OlegYch/scastie | utils/src/main/scala/com.olegych.scastie/util/GraphStageLogicForwarder.scala | Scala | apache-2.0 | 1,185 |
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.facade.utils
import java.net.URLDecoder
import com.hypertino.binders.value.{Obj, Text}
import com.hypertino.hyperbus.model.HRL
import com.hypertino.hyperbus.utils.uri._
import scala.collection.mutable
// todo: this also needs total refactoring
object ResourcePatternMatcher {
def matchResource(resource: HRL, resourcePatterns: Set[HRL]): Option[HRL] = {
resourcePatterns
.iterator
.map(matchResource(resource, _))
.find(_.nonEmpty)
.flatten
}
def matchResource(source: HRL, pattern: HRL): Option[HRL] = {
if (source.authority == pattern.authority) {
val patternPathUri = pattern.path
val sourceTokens = UriPathParser.tokens(source.path).toSeq
var args = mutable.MutableList[(String, String)]()
val patternTokens = UriPathParser.tokens(patternPathUri).toSeq
val patternTokenIter = patternTokens.iterator
val sourceUriTokenIter = sourceTokens.iterator
var matchesCorrectly = patternTokenIter.hasNext == sourceUriTokenIter.hasNext
var previousSourceUriToken: Option[Token] = None
val patternQuery = pattern.query.toMap
val sourceQuery = source.query.toMap
while (patternTokenIter.hasNext && sourceUriTokenIter.hasNext && matchesCorrectly) {
val sourceUriToken = normalizePath(sourceUriTokenIter, previousSourceUriToken)
val nextPatternToken = patternTokenIter.next()
nextPatternToken match {
case SlashToken ⇒
matchesCorrectly = (SlashToken == sourceUriToken) &&
(patternTokenIter.hasNext == sourceUriTokenIter.hasNext)
case t: TextToken ⇒
matchesCorrectly = (t == sourceUriToken) &&
(patternTokenIter.hasNext == sourceUriTokenIter.hasNext)
case ParameterToken(patternParamName) ⇒
sourceUriToken match {
case TextToken(value) ⇒
val sourceParamValue = URLDecoder.decode(value, "UTF-8")
args += patternParamName → sourceParamValue
matchesCorrectly = patternQuery.get(patternParamName).map { paramPattern ⇒
matchResource(HRL(sourceParamValue.toString), HRL(paramPattern.toString)).isDefined &&
patternTokenIter.hasNext == sourceUriTokenIter.hasNext
} getOrElse {
patternTokenIter.hasNext == sourceUriTokenIter.hasNext
}
case ParameterToken(paramName) ⇒
matchesCorrectly = patternQuery.get(paramName).map { paramPattern ⇒
sourceQuery.get(paramName).map { sourceParamValue ⇒
matchResource(HRL(sourceParamValue.toString), HRL(paramPattern.toString)).isDefined
} getOrElse {
false
}
} getOrElse {
patternTokenIter.hasNext == sourceUriTokenIter.hasNext
}
case _ ⇒
matchesCorrectly = false
}
}
previousSourceUriToken = Some(sourceUriToken)
}
if (!matchesCorrectly) None
else
Some(pattern.copy(query = Obj(args.map(kv ⇒ kv._1 → Text(kv._2)).toMap)))
}
else {
None
}
}
/**
* This method removes multiple slashes in request
*/
def normalizePath(iter: Iterator[Token], previousToken: Option[Token]): Token = {
iter.next() match {
case SlashToken ⇒ previousToken match {
case Some(SlashToken) ⇒
if (iter.hasNext) normalizePath(iter, previousToken)
else SlashToken
case Some(_) ⇒ SlashToken
case None ⇒ SlashToken
}
case other ⇒ other
}
}
}
| hypertino/hyperfacade | src/main/scala/com/hypertino/facade/utils/ResourcePatternMatcher.scala | Scala | mpl-2.0 | 3,993 |
package scrapers
import play.api.libs.json.Json
object TestScrapers {
def main(args: Array[String]): Unit = {
testInfobae()
println("---------------------------------------\n")
testClarin()
println("---------------------------------------\n")
testCronista()
println("---------------------------------------\n")
testNacion()
}
private def testInfobae() : Unit = {
val infobaeLinks : List[String] = List(
"http://www.infobae.com/america/america-latina/2017/04/24/paso-a-paso-como-fue-el-meticuloso-plan-para-desvalijar-la-sede-de-prosegur-en-ciudad-del-este/",
"http://www.infobae.com/america/america-latina/2017/04/24/como-era-el-impresionante-arsenal-utilizado-por-la-banda-que-ataco-una-empresa-de-caudales-en-paraguay/",
"http://www.infobae.com/politica/2017/04/24/la-familia-macri-vende-su-parte-de-autopistas-del-sol-para-evitar-conflictos-politicos/")
val infobaeScraper : InfobaeScraper = new InfobaeScraper()
infobaeLinks.foreach{x =>
val news = infobaeScraper.getArticleData(x,Option("Mauricio Macri"),0)
if(news.isDefined)
println(Json.toJson(news.get).toString())
}
}
private def testClarin() : Unit = {
val clarinLinks : List[String] = List(
"https://www.clarin.com/politica/ruta-dinero-pidieron-indagatoria-cristina_0_Hkxl-io0x.html",
"https://www.clarin.com/politica/gobierno-alicia-kirchner-presento-denuncia-penal-danos-atentado-orden-constitucional-sedicion-violacion-domicilio_0_BJZpsvjRl.html",
"https://www.clarin.com/mundo/emmanuel-macron-nuevo-presidente-frances_0_HyFLdyTyZ.html",
"https://www.clarin.com/policiales/ataque-comando-pueblo-brasil-20-ladrones-armas-guerra-robo-millonario_0_HyII9jsAg.html")
val clarinScraper : ClarinScraper = new ClarinScraper()
clarinLinks.foreach{x =>
val news = clarinScraper.getArticleData(x,Option("Emmanuel Macron"),0)
if(news.isDefined)
println(Json.toJson(news.get).toString())
}
}
private def testCronista() : Unit = {
val cronistaLinks : List[String] = List(
"http://www.cronista.com/economiapolitica/Efecto-Macron-Wall-Street-crece-en-linea-con-las-bolsas-del-mundo--20170424-0060.html",
"http://www.cronista.com/economiapolitica/Asalto-con-explosivos-a-sede-de-Prosegur-en-Paraguay-para-robar-us-40-millones-20170424-0067.html",
"http://www.cronista.com/cartelera/Como-es-Planetario-Fest-el-festival-al-aire-libre-para-los-chicos-en-Pilar-20170418-0097.html",
"http://www.cronista.com/columnistas/El-Gobierno-jugara-a-la-politica-mientras-espera-que-menor-inflacion-y-mas-consumo-sean-noticia-en-mayo-20170424-0057.html")
val elCronistaScraper : ElCronistaScraper = new ElCronistaScraper()
cronistaLinks.foreach{x =>
val news = elCronistaScraper.getArticleData(x,Option("Denise Duarte"),0)
if(news.isDefined)
println(Json.toJson(news.get).toString())
}
}
private def testNacion() : Unit = {
val laNacionLinks : List[String] = List(
"http://www.lanacion.com.ar/2016294-el-gobierno-de-alicia-kirchner-denuncio-a-los-manifestantes-por-los-incidentes-frente-a-la-residencia-oficial",
"http://www.lanacion.com.ar/2016308-lanzan-ba-taxi-la-aplicacion-para-viajar-en-taxi-en-buenos-aires",
"http://www.lanacion.com.ar/2016301-el-fiscal-campagnoli-pidio-la-detencion-del-suspendido-jefe-de-la-policia-de-la-ciudad")
val laNacionScraper : LaNacionScraper = new LaNacionScraper()
laNacionLinks.foreach{x =>
val news = laNacionScraper.getArticleData(x,Option("Ricardo Farias"),0)
if(news.isDefined)
println(Json.toJson(news.get).toString())
}
}
private def testLinkedIn() : Unit = {
val linkedInScraper : LinkedinUserProfileScraper= new LinkedinUserProfileScraper()
val linkedInLinks : List[String] = List(
"https://ar.linkedin.com/in/ignacio-cassol-894a0935",
"https://ar.linkedin.com/in/javier-isoldi-5a937091?trk=pub-pbmap",
"https://ar.linkedin.com/in/andres-scoccimarro-303412",
"https://ar.linkedin.com/in/santiagofuentes?trk=pub-pbmap",
"https://ar.linkedin.com/in/kevstessens?trk=pub-pbmap",
"https://ar.linkedin.com/in/ignacio-juan-nu%C3%B1ez-560183b7?trk=pub-pbmap",
"https://ar.linkedin.com/in/joaqu%C3%ADn-bucca-04428278?trk=pub-pbmap",
"https://ar.linkedin.com/in/sofiabraun?trk=pub-pbmap",
"https://ar.linkedin.com/in/tmsmateus?trk=pub-pbmap")
linkedInLinks.foreach{x=>
val a = linkedInScraper.getLinkedinProfile(x,0)
println(Json.toJson(a))
}
val scrap = linkedInScraper.getLinkedinProfile("https://ar.linkedin.com/in/ignacio-cassol-894a0935",0)
println(Json.toJson(scrap))
}
}
| TVilaboa/Egresados | app/scrapers/TestScrapers.scala | Scala | gpl-3.0 | 4,740 |
package com.nefariouszhen.khronos
case class Time(nanos: Long) extends AnyVal with Ordered[Time] {
def toSeconds: Double = nanos.toDouble / 1e9
def toDouble: Double = nanos
override def compare(that: Time): Int = nanos.compare(that.nanos)
}
object Time {
def fromSeconds(tm: Double): Time = Time((tm * 1e9).toLong)
}
| khronos-metrics/khronos | khronos-core/src/main/scala/com/nefariouszhen/khronos/Time.scala | Scala | apache-2.0 | 327 |
//import scala.collection.mutable.Set
import scala.collection.immutable.Set
import scala.io.StdIn
/**
* Auto-generated code below aims at helping you parse
* the standard input according to the problem statement.
**/
object Solution extends App {
val n = StdIn.readInt // the number of adjacency relations
val time0 = System.currentTimeMillis()
//Console.err.println(time0)
val graph = new Graph(n)
for (i <- 0 until n) {
// xi: the ID of a person which is adjacent to yi
// yi: the ID of a person which is adjacent to xi
val Array(id1, id2) = for (id <- StdIn.readLine split " ") yield id.toInt
graph.addEdges(id1, id2)
}
val time1 = System.currentTimeMillis()
//Console.err.println(time1)
Console.err.println(time1 - time0)
//Console.err.println(graph)
// Write an action using println
// To debug: Console.err.println("Debug messages...")
// The minimal amount of steps required to completely propagate the advertisement
val result = graph.computeMinimalTime()
val time2 = System.currentTimeMillis()
//Console.err.println(time2)
Console.err.println(time2 - time1)
println(result.toString)
}
class Graph(val adjacencyRelations: Int) {
//private val nodesById: mutable.OpenHashMap[Int, Set[Int]] = new mutable.OpenHashMap[Int, Set[Int]](adjacencyRelations + 50)
private val nodesById: Array[Set[Int]] = new Array[Set[Int]](adjacencyRelations + 3)
private var nodes: Set[Int] = Set.empty[Int]
def addEdges(source: Int, destination: Int): Unit = {
def addOrUpdateNode(source: Int, destination: Int): Unit = {
val adjacencySet = nodesById(source)
if (adjacencySet == null) {
nodesById(source) = Set[Int](destination)
} else {
nodesById(source) = adjacencySet + destination
}
}
addOrUpdateNode(source, destination)
addOrUpdateNode(destination, source)
nodes = nodes + source + destination
}
def computeAdjacencyReachRec(n: Int): Int = {
@annotation.tailrec
def compute(n: Int, alreadyVisitedNodes: Set[Int], currentSetToProcess: Set[Int]): Int = {
if (currentSetToProcess.isEmpty) {
n
}
else {
compute(n + 1, alreadyVisitedNodes ++ currentSetToProcess, currentSetToProcess.flatMap(n => this.nodesById(n)) -- (alreadyVisitedNodes ++ currentSetToProcess))
}
}
compute(0, Set[Int](n), nodesById(n))
}
def computeMinimalTime(): Int = {
//this.nodesById.keySet.par.map(computeAdjacencyReachRec(_)).min
(for (n <- nodes if (nodesById(n) != null)) yield (computeAdjacencyReachRec(n))).min
}
} | TGITS/programming-workouts | scala/codingame/practice/medium/teads_contest/archive/solution_with_array.scala | Scala | mit | 2,593 |
package com.socialthingy.plusf.p2p.discovery
import java.net.InetSocketAddress
import akka.actor.ActorSystem
import org.scalatest.{FlatSpec, Matchers}
import scala.language.postfixOps
import scala.concurrent.duration._
import scala.language.postfixOps
class DiscoveryRepositorySpec extends FlatSpec with Matchers {
val testAddr1 = new InetSocketAddress("localhost", 7000)
val testAddr2 = new InetSocketAddress("localhost", 7001)
"DiscoveryRepository" should "create a new session under a session ID it hasn't seen before" in new TestDiscoveryRepository {
connectToSession("newId", testAddr1) shouldBe None
}
it should "return the session initiator's address when a new client connects to a prior session" in new TestDiscoveryRepository {
connectToSession("newId", testAddr1)
connectToSession("newId", testAddr2) shouldBe Some(testAddr1)
}
it should "do nothing when the initiator attempts to connect to a session it has already initiated" in new TestDiscoveryRepository {
connectToSession("newId", testAddr1)
connectToSession("newId", testAddr1) shouldBe None
}
it should "clean out a session once two peers have connected" in new TestDiscoveryRepository {
connectToSession("newId", testAddr1)
connectToSession("newId", testAddr2)
connectToSession("newId", testAddr2) shouldBe None
}
it should "clean out expired sessions when scheduled cleanup takes place" in new TestDiscoveryRepository {
scheduleCleanup(1 second, 5 seconds)
connectToSession("newId", testAddr1) shouldBe None
Thread.sleep(6000)
connectToSession("newId", testAddr2) shouldBe None
}
it should "clean out a session when told to cancel it" in new TestDiscoveryRepository {
connectToSession("toBeCancelled", testAddr1) shouldBe None
cancelSession("toBeCancelled", testAddr1)
connectToSession("toBeCancelled", testAddr2) shouldBe None
}
trait TestDiscoveryRepository extends DiscoveryRepository {
val system = ActorSystem()
}
}
| alangibson27/plus-f | discovery/src/test/scala/com/socialthingy/plusf/p2p/discovery/DiscoveryRepositorySpec.scala | Scala | mit | 2,002 |
package dotty.tools
package dotc
package interactive
import scala.annotation.tailrec
import scala.collection._
import ast.{NavigateAST, Trees, tpd, untpd}
import core._, core.Decorators._
import Contexts._, Flags._, Names._, NameOps._, Symbols._, Trees._, Types._
import transform.SymUtils.decorateSymbol
import util.Spans._, util.SourceFile, util.SourcePosition
import core.Denotations.SingleDenotation
import NameKinds.SimpleNameKind
import config.Printers.interactiv
import StdNames.nme
/** High-level API to get information out of typed trees, designed to be used by IDEs.
*
* @see `InteractiveDriver` to get typed trees from code.
*/
object Interactive {
import ast.tpd._
object Include {
case class Set private[Include] (val bits: Int) extends AnyVal {
def | (that: Set): Set = Set(bits | that.bits)
def except(that: Set): Set = Set(bits & ~that.bits)
def isEmpty: Boolean = bits == 0
def isOverridden: Boolean = (bits & overridden.bits) != 0
def isOverriding: Boolean = (bits & overriding.bits) != 0
def isReferences: Boolean = (bits & references.bits) != 0
def isDefinitions: Boolean = (bits & definitions.bits) != 0
def isLinkedClass: Boolean = (bits & linkedClass.bits) != 0
def isImports: Boolean = (bits & imports.bits) != 0
def isLocal: Boolean = (bits & local.bits) != 0
}
/** The empty set */
val empty: Set = Set(0)
/** Include trees whose symbol is overridden by `sym` */
val overridden: Set = Set(1 << 0)
/** Include trees whose symbol overrides `sym` */
val overriding: Set = Set(1 << 1)
/** Include references */
val references: Set = Set(1 << 2)
/** Include definitions */
val definitions: Set = Set(1 << 3)
/** Include `sym.linkedClass */
val linkedClass: Set = Set(1 << 4)
/** Include imports in the results */
val imports: Set = Set(1 << 5)
/** Include local symbols, inspect local trees */
val local: Set = Set(1 << 6)
/** All the flags */
val all: Set = Set(~0)
}
/** Does this tree define a symbol ? */
def isDefinition(tree: Tree): Boolean =
tree.isInstanceOf[NamedDefTree]
/** The type of the closest enclosing tree with a type containing position `pos`. */
def enclosingType(trees: List[SourceTree], pos: SourcePosition)(implicit ctx: Context): Type = {
val path = pathTo(trees, pos)
if (path.isEmpty) NoType
else path.head.tpe
}
/** The closest enclosing tree with a symbol containing position `pos`, or the `EmptyTree`.
*/
def enclosingTree(trees: List[SourceTree], pos: SourcePosition)(implicit ctx: Context): Tree =
enclosingTree(pathTo(trees, pos))
/** The closes enclosing tree with a symbol, or the `EmptyTree`.
*/
def enclosingTree(path: List[Tree])(implicit ctx: Context): Tree =
path.dropWhile(!_.symbol.exists).headOption.getOrElse(tpd.EmptyTree)
/**
* The source symbols that are the closest to `path`.
*
* If this path ends in an import, then this returns all the symbols that are imported by this
* import statement.
*
* @param path The path to the tree whose symbols to extract.
* @return The source symbols that are the closest to `path`.
*
* @see sourceSymbol
*/
def enclosingSourceSymbols(path: List[Tree], pos: SourcePosition)(implicit ctx: Context): List[Symbol] = {
val syms = path match {
// For a named arg, find the target `DefDef` and jump to the param
case NamedArg(name, _) :: Apply(fn, _) :: _ =>
val funSym = fn.symbol
if (funSym.name == StdNames.nme.copy
&& funSym.is(Synthetic)
&& funSym.owner.is(CaseClass))
List(funSym.owner.info.member(name).symbol)
else {
val classTree = funSym.topLevelClass.asClass.rootTree
val paramSymbol =
for {
DefDef(_, _, paramss, _, _) <- tpd.defPath(funSym, classTree).lastOption
param <- paramss.flatten.find(_.name == name)
}
yield param.symbol
List(paramSymbol.getOrElse(fn.symbol))
}
// For constructor calls, return the `<init>` that was selected
case _ :: (_: New) :: (select: Select) :: _ =>
List(select.symbol)
case (_: untpd.ImportSelector) :: (imp: Import) :: _ =>
importedSymbols(imp, _.span.contains(pos.span))
case (imp: Import) :: _ =>
importedSymbols(imp, _.span.contains(pos.span))
case _ =>
List(enclosingTree(path).symbol)
}
syms.map(_.sourceSymbol).filter(_.exists)
}
/** Check if `tree` matches `sym`.
* This is the case if the symbol defined by `tree` equals `sym`,
* or the source symbol of tree equals sym,
* or `include` is `overridden`, and `tree` is overridden by `sym`,
* or `include` is `overriding`, and `tree` overrides `sym`.
*/
def matchSymbol(tree: Tree, sym: Symbol, include: Include.Set)(implicit ctx: Context): Boolean = {
def overrides(sym1: Symbol, sym2: Symbol) =
sym1.owner.derivesFrom(sym2.owner) && sym1.overriddenSymbol(sym2.owner.asClass) == sym2
( sym == tree.symbol
|| sym.exists && sym == tree.symbol.sourceSymbol
|| !include.isEmpty && sym.name == tree.symbol.name && sym.maybeOwner != tree.symbol.maybeOwner
&& ( include.isOverridden && overrides(sym, tree.symbol)
|| include.isOverriding && overrides(tree.symbol, sym)
)
)
}
/** Find named trees with a non-empty position whose symbol match `sym` in `trees`.
*
* Note that nothing will be found for symbols not defined in source code,
* use `sourceSymbol` to get a symbol related to `sym` that is defined in
* source code.
*/
def namedTrees(trees: List[SourceTree], include: Include.Set, sym: Symbol)
(implicit ctx: Context): List[SourceTree] =
if (!sym.exists)
Nil
else
namedTrees(trees, include, matchSymbol(_, sym, include))
/** Find named trees with a non-empty position satisfying `treePredicate` in `trees`.
*
* @param trees The trees to inspect.
* @param include Whether to include references, definitions, etc.
* @param treePredicate An additional predicate that the trees must match.
* @return The trees with a non-empty position satisfying `treePredicate`.
*/
def namedTrees(trees: List[SourceTree],
include: Include.Set,
treePredicate: NameTree => Boolean = util.common.alwaysTrue
)(implicit ctx: Context): List[SourceTree] = safely {
val buf = new mutable.ListBuffer[SourceTree]
def traverser(source: SourceFile) =
new untpd.TreeTraverser {
private def handle(utree: untpd.NameTree): Unit = {
val tree = utree.asInstanceOf[tpd.NameTree]
if (tree.symbol.exists
&& tree.name != StdNames.nme.ERROR
&& !tree.symbol.is(Synthetic)
&& !tree.symbol.isPrimaryConstructor
&& tree.span.exists
&& !tree.span.isZeroExtent
&& (include.isReferences || isDefinition(tree))
&& treePredicate(tree))
buf += SourceTree(tree, source)
}
override def traverse(tree: untpd.Tree)(implicit ctx: Context) =
tree match {
case imp: untpd.Import if include.isImports && tree.hasType =>
val tree = imp.asInstanceOf[tpd.Import]
val selections = tpd.importSelections(tree)
traverse(imp.expr)
selections.foreach(traverse)
case utree: untpd.ValOrDefDef if tree.hasType =>
handle(utree)
if (include.isLocal) traverseChildren(tree)
case utree: untpd.NameTree if tree.hasType =>
handle(utree)
traverseChildren(tree)
case tree: untpd.Inlined =>
traverse(tree.call)
case _ =>
traverseChildren(tree)
}
}
trees.foreach(t => traverser(t.source).traverse(t.tree))
buf.toList
}
/**
* Find trees that match `symbol` in `trees`.
*
* @param trees The trees to inspect.
* @param includes Whether to include references, definitions, etc.
* @param symbol The symbol for which we want to find references.
* @param predicate An additional predicate that the trees must match.
*/
def findTreesMatching(trees: List[SourceTree],
includes: Include.Set,
symbol: Symbol,
predicate: NameTree => Boolean = util.common.alwaysTrue
)(implicit ctx: Context): List[SourceTree] = {
val linkedSym = symbol.linkedClass
val fullPredicate: NameTree => Boolean = tree =>
( (includes.isDefinitions || !Interactive.isDefinition(tree))
&& ( Interactive.matchSymbol(tree, symbol, includes)
|| ( includes.isLinkedClass
&& linkedSym.exists
&& Interactive.matchSymbol(tree, linkedSym, includes)
)
)
&& predicate(tree)
)
namedTrees(trees, includes, fullPredicate)
}
/** The reverse path to the node that closest encloses position `pos`,
* or `Nil` if no such path exists. If a non-empty path is returned it starts with
* the tree closest enclosing `pos` and ends with an element of `trees`.
*/
def pathTo(trees: List[SourceTree], pos: SourcePosition)(implicit ctx: Context): List[Tree] =
trees.find(_.pos.contains(pos)) match {
case Some(tree) => pathTo(tree.tree, pos.span)
case None => Nil
}
def pathTo(tree: Tree, span: Span)(implicit ctx: Context): List[Tree] =
if (tree.span.contains(span))
NavigateAST.pathTo(span, tree, skipZeroExtent = true)
.collect { case t: untpd.Tree => t }
.dropWhile(!_.hasType).asInstanceOf[List[tpd.Tree]]
else Nil
def contextOfStat(stats: List[Tree], stat: Tree, exprOwner: Symbol, ctx: Context): Context = stats match {
case Nil =>
ctx
case first :: _ if first eq stat =>
ctx.exprContext(stat, exprOwner)
case (imp: Import) :: rest =>
contextOfStat(rest, stat, exprOwner, ctx.importContext(imp, imp.symbol(ctx)))
case _ :: rest =>
contextOfStat(rest, stat, exprOwner, ctx)
}
def contextOfPath(path: List[Tree])(implicit ctx: Context): Context = path match {
case Nil | _ :: Nil =>
ctx.run.runContext.fresh.setCompilationUnit(ctx.compilationUnit)
case nested :: encl :: rest =>
val outer = contextOfPath(encl :: rest)
try encl match {
case tree @ PackageDef(pkg, stats) =>
assert(tree.symbol.exists)
if (nested `eq` pkg) outer
else contextOfStat(stats, nested, pkg.symbol.moduleClass, outer.packageContext(tree, tree.symbol))
case tree: DefDef =>
assert(tree.symbol.exists)
val localCtx = outer.localContext(tree, tree.symbol).setNewScope
for (tparam <- tree.tparams) localCtx.enter(tparam.symbol)
for (vparams <- tree.vparamss; vparam <- vparams) localCtx.enter(vparam.symbol)
// Note: this overapproximates visibility a bit, since value parameters are only visible
// in subsequent parameter sections
localCtx
case tree: MemberDef =>
assert(tree.symbol.exists)
outer.localContext(tree, tree.symbol)
case tree @ Block(stats, expr) =>
val localCtx = outer.fresh.setNewScope
stats.foreach {
case stat: MemberDef => localCtx.enter(stat.symbol)
case _ =>
}
contextOfStat(stats, nested, ctx.owner, localCtx)
case tree @ CaseDef(pat, guard, rhs) if nested `eq` rhs =>
val localCtx = outer.fresh.setNewScope
pat.foreachSubTree {
case bind: Bind => localCtx.enter(bind.symbol)
case _ =>
}
localCtx
case tree @ Template(constr, parents, self, _) =>
if ((constr :: self :: parents).contains(nested)) ctx
else contextOfStat(tree.body, nested, tree.symbol, outer.inClassContext(self.symbol))
case _ =>
outer
}
catch {
case ex: CyclicReference => outer
}
}
/** The first tree in the path that is a definition. */
def enclosingDefinitionInPath(path: List[Tree])(implicit ctx: Context): Tree =
path.find(_.isInstanceOf[DefTree]).getOrElse(EmptyTree)
/**
* Find the definitions of the symbol at the end of `path`. In the case of an import node,
* all imported symbols will be considered.
*
* @param path The path to the symbol for which we want the definitions.
* @param driver The driver responsible for `path`.
* @return The definitions for the symbol at the end of `path`.
*/
def findDefinitions(path: List[Tree], pos: SourcePosition, driver: InteractiveDriver): List[SourceTree] = {
implicit val ctx = driver.currentCtx
val enclTree = enclosingTree(path)
val includeOverridden = enclTree.isInstanceOf[MemberDef]
val symbols = enclosingSourceSymbols(path, pos)
val includeExternal = symbols.exists(!_.isLocal)
findDefinitions(symbols, driver, includeOverridden, includeExternal)
}
/**
* Find the definitions of `symbols`.
*
* @param symbols The list of symbols for which to find a definition.
* @param driver The driver responsible for the given symbols.
* @param includeOverridden If true, also include the symbols overridden by any of `symbols`.
* @param includeExternal If true, also look for definitions on the classpath.
* @return The definitions for the symbols in `symbols`, and if `includeOverridden` is set, the
* definitions for the symbols that they override.
*/
def findDefinitions(symbols: List[Symbol],
driver: InteractiveDriver,
includeOverridden: Boolean,
includeExternal: Boolean): List[SourceTree] = {
implicit val ctx = driver.currentCtx
val include = Include.definitions | Include.overriding |
(if (includeOverridden) Include.overridden else Include.empty)
symbols.flatMap { sym =>
val name = sym.name.sourceModuleName.toString
val includeLocal = if (sym.exists && sym.isLocal) Include.local else Include.empty
val trees =
if (includeExternal) driver.allTreesContaining(name)
else driver.sourceTreesContaining(name)
findTreesMatching(trees, include | includeLocal, sym)
}
}
/**
* Given `sym`, originating from `sourceDriver`, find its representation in
* `targetDriver`.
*
* @param symbol The symbol to expression in the new driver.
* @param sourceDriver The driver from which `symbol` originates.
* @param targetDriver The driver in which we want to get a representation of `symbol`.
* @return A representation of `symbol` in `targetDriver`.
*/
def localize(symbol: Symbol, sourceDriver: InteractiveDriver, targetDriver: InteractiveDriver): Symbol = {
def in[T](driver: InteractiveDriver)(fn: Context ?=> T): T =
fn(using driver.currentCtx)
if (sourceDriver == targetDriver) symbol
else {
val owners = in(sourceDriver) {
symbol.ownersIterator.toList.reverse.map(_.name)
}
in(targetDriver) {
val base: Symbol = defn.RootClass
owners.tail.foldLeft(base) { (prefix, symbolName) =>
if (prefix.exists) prefix.info.member(symbolName).symbol
else NoSymbol
}
}
}
}
/**
* Return a predicate function that determines whether a given `NameTree` is an implementation of
* `sym`.
*
* @param sym The symbol whose implementations to find.
* @return A function that determines whether a `NameTree` is an implementation of `sym`.
*/
def implementationFilter(sym: Symbol)(implicit ctx: Context): NameTree => Boolean =
if (sym.isClass) {
case td: TypeDef =>
val treeSym = td.symbol
(treeSym != sym || !treeSym.isOneOf(AbstractOrTrait)) && treeSym.derivesFrom(sym)
case _ =>
false
}
else {
case md: MemberDef =>
matchSymbol(md, sym, Include.overriding) && !md.symbol.is(Deferred)
case _ =>
false
}
/**
* Is this tree using a renaming introduced by an import statement or an alias for `this`?
*
* @param tree The tree to inspect
* @return True, if this tree's name is different than its symbol's name, indicating that
* it uses a renaming introduced by an import statement or an alias for `this`.
*/
def isRenamed(tree: NameTree)(implicit ctx: Context): Boolean = {
val symbol = tree.symbol
symbol.exists && !sameName(tree.name, symbol.name)
}
/** Are the two names the same? */
def sameName(n0: Name, n1: Name): Boolean =
n0.stripModuleClassSuffix.toTermName eq n1.stripModuleClassSuffix.toTermName
private[interactive] def safely[T](op: => List[T]): List[T] =
try op catch { case ex: TypeError => Nil }
}
| som-snytt/dotty | compiler/src/dotty/tools/dotc/interactive/Interactive.scala | Scala | apache-2.0 | 17,070 |
package com.reactific.jfxtend.animation
import javafx.animation.Animation
import com.reactific.jfxtend.Extension
/** Unit Tests For AnimationExtensions */
trait AnimationExtensions[T <: Animation] extends Extension[T] {
}
| reactific/jfxtensions | src/main/scala/com/reactific/jfxtend/animation/AnimationExtensions.scala | Scala | apache-2.0 | 226 |
package kofre.rga
import kofre.sets.TwoPSet
trait SetLike[A, F] {
def add(set: F, value: A): F
def contains(set: F, value: A): Boolean
}
object SetLike {
given setLike[A]: SetLike[A, Set[A]] = new:
override def add(set: Set[A], value: A): Set[A] = set + value
override def contains(set: Set[A], value: A): Boolean = set.contains(value)
given twoPSetLike[A]: SetLike[A, TwoPSet[A]] = new:
override def add(set: TwoPSet[A], value: A): TwoPSet[A] = set.add(value)
override def contains(set: TwoPSet[A], value: A): Boolean = set.contains(value)
}
| guidosalva/REScala | Code/Extensions/Kofre/src/main/scala/kofre/rga/SetLike.scala | Scala | apache-2.0 | 579 |
package au.com.dius.pact.provider.sbtsupport
import java.util.concurrent.Executors
import au.com.dius.pact.model._
import au.com.dius.pact.provider.{EnterStateRequest, ServiceInvokeRequest}
import org.scalatest.exceptions.TestFailedException
import org.scalatest.{Assertions, FreeSpec}
import scala.collection.JavaConversions
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future}
class PactSpec(config: PactConfiguration, pact: RequestResponsePact)(implicit timeout: Duration = 10.seconds) extends FreeSpec with Assertions {
implicit val executionContext = ExecutionContext.fromExecutor(Executors.newCachedThreadPool)
JavaConversions.asScalaBuffer(pact.getInteractions).toList.foreach { interaction =>
s"""pact for consumer ${pact.getConsumer.getName}
|provider ${pact.getProvider.getName}
|interaction "${interaction.getDescription}"
|in state: "${interaction.getProviderState}" """.stripMargin in {
val stateChangeFuture = (Option.apply(config.getStateChangeUrl), Option.apply(interaction.getProviderState)) match {
case (Some(stateChangeUrl), Some(providerState)) => HttpClient.run(EnterStateRequest(stateChangeUrl.url, providerState))
case (_, _) => Future.successful(new Response(200))
}
val pactResponseFuture: Future[Response] = for {
_ <- stateChangeFuture
response <- HttpClient.run(ServiceInvokeRequest(config.getProviderRoot.url, interaction.getRequest))
} yield response
val actualResponse = Await.result(pactResponseFuture, timeout)
val responseMismatches = ResponseMatching.responseMismatches(interaction.getResponse, actualResponse)
if (responseMismatches.nonEmpty) {
throw new TestFailedException(s"There were response mismatches: \\n${responseMismatches.mkString("\\n")}", 10)
}
}
}
}
| olga-vasylchenko/pact-jvm | pact-jvm-provider/src/main/scala/au/com/dius/pact/provider/sbtsupport/PactSpec.scala | Scala | apache-2.0 | 1,903 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.thriftserver
import org.apache.commons.logging.LogFactory
import org.apache.hadoop.hive.ql.session.SessionState
import org.apache.hive.service.cli.{RowSet, RowSetFactory, TableSchema}
/**
* Various utilities for hive-thriftserver used to upgrade the built-in Hive.
*/
private[thriftserver] object ThriftserverShimUtils {
private[thriftserver] type TProtocolVersion = org.apache.hive.service.cli.thrift.TProtocolVersion
private[thriftserver] type Client = org.apache.hive.service.cli.thrift.TCLIService.Client
private[thriftserver] type TOpenSessionReq = org.apache.hive.service.cli.thrift.TOpenSessionReq
private[thriftserver] type TGetSchemasReq = org.apache.hive.service.cli.thrift.TGetSchemasReq
private[thriftserver] type TGetTablesReq = org.apache.hive.service.cli.thrift.TGetTablesReq
private[thriftserver] def getConsole: SessionState.LogHelper = {
val LOG = LogFactory.getLog(classOf[SparkSQLCLIDriver])
new SessionState.LogHelper(LOG)
}
private[thriftserver] def resultRowSet(
getResultSetSchema: TableSchema,
getProtocolVersion: TProtocolVersion): RowSet = {
RowSetFactory.create(getResultSetSchema, getProtocolVersion)
}
}
| aosagie/spark | sql/hive-thriftserver/v1.2.1/src/main/scala/org/apache/spark/sql/hive/thriftserver/ThriftserverShimUtils.scala | Scala | apache-2.0 | 2,020 |
/* Copyright 2013 Nest Labs
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package nest.sparkle.loader.avro
import nest.sparkle.loader.ArrayRecordColumns
import nest.sparkle.store.Event
import org.apache.avro.generic.GenericData
import org.scalatest.{Matchers, FunSuite}
class TestAvroSingleRecordDecoder extends FunSuite with Matchers {
test("trip through the AvroSingleRecordDecoder") {
val schema = MillisDoubleAvro.schema
val record = new GenericData.Record(schema)
record.put("id", "abc")
record.put("time", 1L)
record.put("value", 2.1)
val decoder = AvroSingleRecordDecoder.decoder(schema = schema,
idFields = Seq(("id", None)),
keyField = "time")
val resultColumns: ArrayRecordColumns = decoder.decodeRecord(record)
resultColumns.ids.size shouldBe 1
resultColumns.ids(0) match {
case Some(id) => id.toString() shouldBe "abc"
case _ => throw new Exception("id not in the ArrayRecordColumn")
}
resultColumns.columns.size shouldBe 1
resultColumns.columns(0).size shouldBe 1
val resultEvent = resultColumns.columns(0)(0).asInstanceOf[Event[Long,Double]]
resultEvent.key shouldBe 1L
resultEvent.value shouldBe 2.1
}
}
| mighdoll/sparkle | avro-loader/src/it/scala/nest/sparkle/loader/avro/TestAvroSingleRecordDecoder.scala | Scala | apache-2.0 | 1,727 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import slamdata.Predef._
import quasar.fp._
import quasar.fp.ski._
import argonaut._, Argonaut._
import matryoshka._
import scalaz._, Scalaz._
final case class RenderedTree(nodeType: List[String], label: Option[String], children: List[RenderedTree]) {
def simpleType: Option[String] = nodeType.headOption
def retype(f: List[String] => List[String]) = this.copy(nodeType = f(nodeType))
/** A tree that describes differences between two trees:
* - If the two trees are identical, the result is the same as (either) input.
* - If the trees differ only in the labels on nodes, then the result has those
* nodes decorated with "[Changed] old -> new".
* - If a single node is unmatched on either side, it is decorated with "[Added]"
* or "[Deleted]".
* As soon as a difference is found and decorated, the subtree(s) beneath the
* decorated nodes are not inspected.
*
* Node types are not compared or necessarily preserved.
*/
def diff(that: RenderedTree): RenderedTree = {
def prefixedType(t: RenderedTree, p: String): List[String] = t.nodeType match {
case first :: rest => (p + " " + first) :: rest
case Nil => p :: Nil
}
def prefixType(t: RenderedTree, p: String): RenderedTree = t.copy(nodeType = prefixedType(t, p))
val deleted = ">>>"
val added = "<<<"
(this, that) match {
case (RenderedTree(nodeType1, l1, children1), RenderedTree(nodeType2, l2, children2)) => {
if (nodeType1 =/= nodeType2 || l1 =/= l2)
RenderedTree(List("[Root differs]"), None,
prefixType(this, deleted) ::
prefixType(that, added) ::
Nil)
else {
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def matchChildren(children1: List[RenderedTree], children2: List[RenderedTree]): List[RenderedTree] = (children1, children2) match {
case (Nil, Nil) => Nil
case (x :: xs, Nil) => prefixType(x, deleted) :: matchChildren(xs, Nil)
case (Nil, x :: xs) => prefixType(x, added) :: matchChildren(Nil, xs)
case (a :: as, b :: bs) if a.typeAndLabel ≟ b.typeAndLabel => a.diff(b) :: matchChildren(as, bs)
case (a1 :: a2 :: as, b :: bs) if a2.typeAndLabel ≟ b.typeAndLabel => prefixType(a1, deleted) :: a2.diff(b) :: matchChildren(as, bs)
case (a :: as, b1 :: b2 :: bs) if a.typeAndLabel ≟ b2.typeAndLabel => prefixType(b1, added) :: a.diff(b2) :: matchChildren(as, bs)
case (a :: as, b :: bs) => prefixType(a, deleted) :: prefixType(b, added) :: matchChildren(as, bs)
}
RenderedTree(nodeType1, l1, matchChildren(children1, children2))
}
}
}
}
/**
A 2D String representation of this Tree, separated into lines. Based on
scalaz Tree's show, but improved to use a single line per node, use
unicode box-drawing glyphs, and to handle newlines in the rendered
nodes.
*/
def draw: Stream[String] = {
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def drawSubTrees(s: List[RenderedTree]): Stream[String] = s match {
case Nil => Stream.Empty
case t :: Nil => shift("╰─ ", " ", t.draw)
case t :: ts => shift("├─ ", "│ ", t.draw) append drawSubTrees(ts)
}
def shift(first: String, other: String, s: Stream[String]): Stream[String] =
(first #:: Stream.continually(other)).zip(s).map {
case (a, b) => a + b
}
val (prefix, body, suffix) = (simpleType, label) match {
case (None, None) => ("", "", "")
case (None, Some(label)) => ("", label, "")
case (Some(simpleType), None) => ("", simpleType, "")
case (Some(simpleType), Some(label)) => (simpleType + "(", label, ")")
}
val indent = " " * (prefix.length-2)
val lines = body.split("\\n")
lines.zipWithIndex.map { case (a, index) =>
def first = index == 0
def last = index == lines.length - 1
val pre = if (first) prefix else indent
val suf = if (last) suffix else ""
pre + a + suf
} ++: drawSubTrees(children)
}
private def typeAndLabel: String = (simpleType, label) match {
case (None, None) => ""
case (None, Some(label)) => label
case (Some(simpleType), None) => simpleType
case (Some(simpleType), Some(label)) => simpleType + "(" + label + ")"
}
}
object RenderedTree {
implicit val RenderedTreeShow: Show[RenderedTree] = new Show[RenderedTree] {
override def show(t: RenderedTree) = t.draw.mkString("\\n")
}
implicit val RenderedTreeEncodeJson: EncodeJson[RenderedTree] = EncodeJson {
case RenderedTree(nodeType, label, children) =>
Json.obj((
(nodeType match {
case Nil => None
case _ => Some("type" := nodeType.reverse.mkString("/"))
}) ::
Some("label" := label) ::
{
if (children.empty) None
else Some("children" := children.map(RenderedTreeEncodeJson.encode(_)))
} ::
Nil).foldMap(_.toList): _*)
}
implicit val renderTree: RenderTree[RenderedTree] = RenderTree.make(ι)
}
object Terminal {
def apply(nodeType: List[String], label: Option[String])
: RenderedTree =
RenderedTree(nodeType, label, Nil)
}
object NonTerminal {
def apply(nodeType: List[String], label: Option[String], children: List[RenderedTree])
: RenderedTree =
RenderedTree(nodeType, label, children)
}
| drostron/quasar | foundation/src/main/scala/quasar/RenderedTree.scala | Scala | apache-2.0 | 6,172 |
package org.alitouka.spark.dbscan.util.io
import org.apache.spark.SparkContext
import scala.collection.mutable.WrappedArray.ofDouble
import org.alitouka.spark.dbscan.{DbscanModel, RawDataSet, ClusterId, PointCoordinates}
import org.apache.spark.rdd.RDD
import org.alitouka.spark.dbscan.spatial.Point
/** Contains functions for reading and writing data
*
*/
object IOHelper {
/** Reads a dataset from a CSV file. That file should contain double values separated by commas
*
* @param sc A SparkContext into which the data should be loaded
* @param path A path to the CSV file
* @return A [[org.alitouka.spark.dbscan.RawDataSet]] populated with points
*/
def readDataset (sc: SparkContext, path: String): RawDataSet = {
val rawData = sc.textFile (path)
rawData.map (
line => {
new Point (line.split(separator).map( _.toDouble ))
}
)
}
/** Saves clustering result into a CSV file. The resulting file will contain the same data as the input file,
* with a cluster ID appended to each record. The order of records is not guaranteed to be the same as in the
* input file
*
* @param model A [[org.alitouka.spark.dbscan.DbscanModel]] obtained from Dbscan.train method
* @param outputPath Path to a folder where results should be saved. The folder will contain multiple
* partXXXX files
*/
def saveClusteringResult (model: DbscanModel, outputPath: String) {
model.allPoints.map ( pt => {
pt.coordinates.mkString(separator) + separator + pt.clusterId
} ).saveAsTextFile(outputPath)
}
private [dbscan] def saveTriples (data: RDD[(Double, Double, Long)], outputPath: String) {
data.map ( x => x._1 + separator + x._2 + separator + x._3 ).saveAsTextFile(outputPath)
}
private def separator = ","
}
| alitouka/spark_dbscan | src/src/main/scala/org/alitouka/spark/dbscan/util/io/IOHelper.scala | Scala | apache-2.0 | 1,832 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.pattern.stream
import akka.actor.ActorSystem
import akka.stream.scaladsl.{GraphDSL, RunnableGraph}
import akka.stream.{ActorMaterializer, ClosedShape}
import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.Eventually
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import org.squbs.testkit.Timeouts._
import scala.concurrent.Await
class PersistentBufferCommitOrderSpec extends FlatSpec with Matchers with BeforeAndAfterAll with Eventually {
implicit val system = ActorSystem("PersistentBufferCommitOrderSpec")
implicit val mat = ActorMaterializer()
implicit val serializer = QueueSerializer[Int]()
import StreamSpecUtil._
override def afterAll = {
Await.ready(system.terminate(), awaitMax)
}
it should "fail when an out of order commit is attempted and commit-order-policy = strict" in {
val util = new StreamSpecUtil[Int, Event[Int]]
import util._
val buffer = PersistentBufferAtLeastOnce[Int](ConfigFactory.parseString("commit-order-policy = strict").withFallback(config))
val commit = buffer.commit[Int]
val streamGraph = RunnableGraph.fromGraph(GraphDSL.create(flowCounter) { implicit builder =>
sink =>
import GraphDSL.Implicits._
in ~> buffer.async ~> filterARandomElement ~> commit ~> sink
ClosedShape
})
val sinkF = streamGraph.run()
Await.result(sinkF.failed, awaitMax) shouldBe an[CommitOrderException]
clean()
}
it should "not fail when an out of order commit is attempted and commit-order-policy = lenient" in {
val util = new StreamSpecUtil[Int, Event[Int]]
import util._
val buffer = PersistentBufferAtLeastOnce[Int](ConfigFactory.parseString("commit-order-policy = lenient").withFallback(config))
val commit = buffer.commit[Int]
val streamGraph = RunnableGraph.fromGraph(GraphDSL.create(flowCounter) { implicit builder =>
sink =>
import GraphDSL.Implicits._
in ~> buffer.async ~> filterARandomElement ~> commit ~> sink
ClosedShape
})
val countFuture = streamGraph.run()
val count = Await.result(countFuture, awaitMax)
count shouldBe elementCount - 1
eventually { buffer.queue shouldBe 'closed }
clean()
}
}
| SarathChandran/squbs | squbs-pattern/src/test/scala/org/squbs/pattern/stream/PersistentBufferCommitOrderSpec.scala | Scala | apache-2.0 | 2,833 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import org.apache.spark.annotation.DeveloperApi
@DeveloperApi
object TaskLocality extends Enumeration {
// Process local is expected to be used ONLY within TaskSetManager for now.
val PROCESS_LOCAL, NODE_LOCAL, RACK_LOCAL, ANY = Value
type TaskLocality = Value
def isAllowed(constraint: TaskLocality, condition: TaskLocality): Boolean = {
condition <= constraint
}
}
| yelshater/hadoop-2.3.0 | spark-core_2.10-1.0.0-cdh5.1.0/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala | Scala | apache-2.0 | 1,222 |
package com.monochromeroad.play.xwiki.rendering.plugin
import com.monochromeroad.play.xwiki.rendering.XWikiRendererConfiguration
/**
* @author Masatoshi Hayashi
*/
object DefaultXWikiRenderingPluginConfiguration {
private final val KEY_MACRO: String = "xwiki.rendering.default.macros"
private final val KEY_MACRO_ENABLED: String = KEY_MACRO + ".enabled"
private val configuration = play.api.Play.current.configuration
private val macroListKeys = configuration.keys filter(p => p.startsWith(KEY_MACRO) && !p.startsWith(KEY_MACRO_ENABLED))
val macroList = macroListKeys.map(configuration.getString(_).get)
val rendererConfiguration: XWikiRendererConfiguration = configuration.getBoolean(KEY_MACRO_ENABLED) match {
case Some(macrosEnabled) => new XWikiRendererConfiguration(macrosEnabled = macrosEnabled)
case _ => new XWikiRendererConfiguration()
}
}
| literalice/play-xwiki-rendering | src/main/scala/com/monochromeroad/play/xwiki/rendering/plugin/DefaultXWikiRenderingPluginConfiguration.scala | Scala | lgpl-2.1 | 889 |
package uk.co.morleydev.zander.client.test.spec.update
import uk.co.morleydev.zander.client.test.spec.{ResponseCodes, SpecTest}
import uk.co.morleydev.zander.client.util.using
import uk.co.morleydev.zander.client.test.gen.GenNative
import java.io.File
class UpdateWithExistingOutOfDateArtefactsAndSourcesInTheCacheAndInstalledArtefactsThatAreUpToDateTests extends SpecTest {
override def cmakeTestCase(compiler : String, mode: String, cmakeBuildType: String, generator: String) = {
describe("Given the project/compiler endpoint exists and the cache already contains the source but out-of-date artefacts") {
describe("When up-to-date artefacts are installed and update is carried out for %s.%s".format(compiler, mode)) {
using(this.start()) {
testHarness =>
val artefactVersion = GenNative.genAlphaNumericString(10, 100)
val gitUrl = "http://git_url/request/at_me"
val expectedFiles = Seq[String]("include/" + GenNative.genAlphaNumericString(1, 20),
"include/sub_dir/" + GenNative.genAlphaNumericString(1, 20) + ".h",
"lib/" + GenNative.genAlphaNumericString(1, 20) + ".a",
"lib/subdir/" + GenNative.genAlphaNumericString(1, 20) + ".a",
"lib/" + GenNative.genAlphaNumericString(1, 20) + ".dll",
"lib/" + GenNative.genAlphaNumericString(1, 20) + ".so",
"lib/" + GenNative.genAlphaNumericString(1, 20) + ".so.12.2",
"lib/subdir2/" + GenNative.genAlphaNumericString(1, 20) + ".so",
"lib/subdir2/" + GenNative.genAlphaNumericString(1, 20) + ".so.12.32",
"lib/subdir2/" + GenNative.genAlphaNumericString(1, 20) + ".dll",
"bin/" + GenNative.genAlphaNumericString(1, 20) + ".dll",
"bin/" + GenNative.genAlphaNumericString(1, 20) + ".so",
"bin/" + GenNative.genAlphaNumericString(1, 20) + ".so.12.25.a",
"bin/subdir/" + GenNative.genAlphaNumericString(1, 20) + ".dll",
"bin/subdir2/" + GenNative.genAlphaNumericString(1, 20) + ".so",
"bin/subdir/" + GenNative.genAlphaNumericString(1, 20) + ".so.12.25.a")
.map(s => new File(s).toString)
testHarness
.givenAServer()
.givenFullGitPipelineIsPossible(artefactVersion, isUpdate = true)
.givenFullCMakePipelineIsPossible(expectedFiles)
.whenUpdating(compiler = compiler, mode = mode)
.whenTheCacheAlreadyContainsTheSourceCode()
.whenTheCacheAlreadyContainsArtefacts(GenNative.genAlphaNumericStringExcluding(1, 20, Seq[String](artefactVersion)),
expectedFiles)
.whenTheArtefactsAreLocallyInstalled(artefactVersion, expectedFiles)
.expectSuccessfulRequest(gitUrl)
.invokeMain()
.thenTheExpectedServerRequestsWereHandled()
.thenAGitUpdateWasInvoked()
.thenAGitCheckoutWasInvoked()
.thenTheGitVersionWasRetrieved()
.thenACMakePreBuildWasInvoked(cmakeBuildType, generator)
.thenACMakeBuildWasInvoked(cmakeBuildType)
.thenACMakeInstallWasInvoked(cmakeBuildType)
.thenExpectedResponseCodeWasReturned(ResponseCodes.Success)
.thenTheLocalArtefactsWereTaggedWithTheExpectedVersion(artefactVersion)
.thenTheLocalArtefactsWereTaggedWithTheExpectedFiles(expectedFiles)
.thenTheExpectedFilesWereInstalledLocally(expectedFiles)
}
}
}
}
runSingleCmakeCase()
}
| MorleyDev/zander.client | src/test/scala/uk/co/morleydev/zander/client/test/spec/update/UpdateWithExistingOutOfDateArtefactsAndSourcesInTheCacheAndInstalledArtefactsThatAreUpToDateTests.scala | Scala | mit | 3,643 |
// see the following discussions to understand what's being tested here:
// * https://github.com/scala/bug/issues/6992
// * https://github.com/scala/bug/issues/8048
// * https://stackoverflow.com/questions/14370842/getting-a-structural-type-with-an-anonymous-classs-methods-from-a-macro
// * https://stackoverflow.com/questions/18480707/method-cannot-be-accessed-in-macro-generated-class/18485004#18485004
// * https://groups.google.com/forum/#!topic/scala-internals/eXQt-BPm4i8
import scala.language.experimental.macros
import scala.reflect.macros.whitebox.Context
object Macros {
def impl1(c: Context) = {
import c.universe._
q"""
trait Foo { def x = 2 }
new Foo {}
"""
}
def foo1: Any = macro impl1
def impl2(c: Context) = {
import c.universe._
q"""
class Foo { def x = 2 }
new Foo
"""
}
def foo2: Any = macro impl2
def impl3(c: Context) = {
import c.universe._
q"""
new { def x = 2 }
"""
}
def foo3: Any = macro impl3
}
| scala/scala | test/files/run/t8048b/Macros_1.scala | Scala | apache-2.0 | 1,012 |
package models
sealed trait FileDescriptor
case object Empty extends FileDescriptor
case class Binary(href: String) extends FileDescriptor
case class Image(href: String) extends FileDescriptor
case class Text(text: String) extends FileDescriptor
case class Markdown(html: String) extends FileDescriptor
| sne11ius/archetypes | app/models/FileDescriptor.scala | Scala | gpl-3.0 | 305 |
package org.pinky.example
import servlets._
import org.pinky.comet.CometServlet
import org.eclipse.jetty.continuation.ContinuationFilter
import com.google.inject.{Scopes, AbstractModule}
import org.pinky.guice.{CakeServletModule, ScalaServletModule, ScalaModule, PinkyServletContextListener, RepresentationModule}
import com.google.inject.servlet.ServletModule
import org.pinky.actor.ActorClient
/**
* Listener example which demonstrates how to configure guice managed filters, servlets and other components the "pinky way"
*
* @author peter hausel gmail com (Peter Hausel)
*
*/
class ExampleListener extends PinkyServletContextListener
{
modules = Array (
new RepresentationModule(),
new ScalaModule{
override def configure {
bind[ActorClient].to[PingPongClient]
bind[ContinuationFilter].in(Scopes.SINGLETON)
}
},
new ServletModule {
override def configureServlets {
serve("/comet*") by (classOf[ExampleCometServlet])
}
},
new ScalaServletModule {
override def configureServlets {
bindFilter[ExampleFilter].toUrl("/hello/*")
bindFilter[ContinuationFilter].toUrl("/comet*")
bindServlet[ExampleRssServlet].toUrl("*.rss")
//bindServlet[ExampleServlet].toUrl("/hello/*")
}
},
new CakeServletModule with CakeExampleContainer with ExampleServletCakeContainer {
val example = new Eater
override def configureServlets {
bindServlet(new ExampleServletCake).toUrl("/hello/*")
}
}
)
}
| d5nguyenvan/pinky | src/main/scala/org/pinky/example/ExampleListener.scala | Scala | bsd-3-clause | 1,553 |
package register_simulator.nodes
/**
* Created by Domenico on 10/12/2015.
*/
class HaltNode extends Instruction {
override def toString = "HALT"
override def execute() = ""
}
| helicopter88/register_machine_simulator | src/main/scala/register_simulator/nodes/HaltNode.scala | Scala | gpl-2.0 | 186 |
package se.uprise.graphql_alt2.starwars
// An example for how to map a GraphQL schema on to Scala's type system.
// Work in progress and still lots of unknowns but the great benefit we are after is true type safety at compile time.
// Tries to mimic the original Star Wars schema found in graphql-js reference impl.
// The schema below does not deal with Futures at all but it something we definitively need to support.
import se.uprise.graphql_alt2.GraphQLInterface
import scala.annotation.StaticAnnotation
// Test fixture data
object Data {
val luke = new Human(
id = "1000",
name = "Luke Skywalker",
friendIds = List("1002", "1003", "2000", "2001"),
appearsInIds = List(4, 5, 6),
_homePlanet = Some("Tatooine")
)
val vader = new Human(
id = "1001",
name = "Darth Vader",
friendIds = List("1004"),
appearsInIds = List(4, 5, 6),
_homePlanet = Some("Tatooine")
)
val han = new Human(
id = "1002",
name = "Han Solo",
friendIds = List("1000", "1003", "2001"),
appearsInIds = List(4, 5, 6)
)
val leia = new Human(
id = "1002",
name = "Leia Organa",
friendIds = List("1000", "1002", "2000", "2001"),
appearsInIds = List(4, 5, 6),
_homePlanet = Some("Alderaan")
)
val tarkin = new Human(
id = "1004",
name = "Wilhuff Tarkin",
friendIds = List("1001"),
appearsInIds = List(4)
)
val humanData = Map(
"1000" -> luke,
"1001" -> vader,
"1002" -> han,
"1003" -> leia,
"1004" -> tarkin
)
val threepio = new Droid(
id = "2000",
name = "C-3PO",
friendIds = List("1000", "1002", "1003", "2001"),
appearsInIds = List(4, 5, 6),
_primaryFunction = Some("Protocol")
)
val artoo = new Droid(
id = "2001",
name = "R2-D2",
friendIds = List("1000", "1002", "1003"),
appearsInIds = List(4, 5, 6),
_primaryFunction = Some("Astromech")
)
val droidData = Map(
"2000" -> threepio,
"2001" -> artoo
)
/** Helper function to get a character by ID. */
def getCharacter(id: String) = Option(humanData.getOrElse(id, droidData.getOrElse(id, null)))
/** Helper function to get a character by ID. */
def getEpisode(id: Int) = Option(Episode.lookup(id))
/** Allows us to query for a character's friends. */
def getFriends(friendsIds: List[String]): List[Character] = friendsIds.flatMap(getCharacter)
/** Allows us to query for a character's friends. */
def getEpisodes(episodeIds: List[Int]) = episodeIds.flatMap(getEpisode)
}
// Placeholders for now. For Scala to retain annotations at runtime, they need to be declared in Java.
// Annotations can be used to express metadata needed by the GraphQL introspection.
case class Interface(desc: String = "__notset") extends StaticAnnotation
case class Field(desc: String = "__notset") extends StaticAnnotation
case class Object(desc: String = "__notset") extends StaticAnnotation
// A NonNull annotation might be useful over Scala's Option since GraphQL can guarantee
// that input are for example, non-null. Hence no need for handling it in end-user code at all.
case class NonNull() extends StaticAnnotation
// Ghetto implementation of a GraphQL Enum. Consider it a placeholder.
// TBD: Use Scala Enumeration or a GraphQLEnum?
case class Episode(value: Any, description: String)
object Episode {
val NEWHOPE = Episode(4, "Released in 1977.")
val EMPIRE = Episode(5, "Released in 1980.")
val JEDI = Episode(6, "Released in 1983.")
val lookup = Map(NEWHOPE.value -> NEWHOPE, EMPIRE.value -> EMPIRE, JEDI.value -> JEDI)
}
// Example below uses Scala native data types instead of GraphQLString, GraphQLList etc.
// Still something we need to figure out.
@Interface(desc="A character in the Star Wars Trilogy")
trait Character extends GraphQLInterface {
@Field(desc="The id of the character.")
def id: String
@Field(desc="The name of the character.")
def name: String
@Field(desc="The friends of the character, or an empty list if they have none.")
def friends: List[Character]
@Field(desc="Which movies they appear in.")
def appearsIn: List[Episode]
// TBD: How do handle resolveType?
// In graphql-js, Character is aware of its implementors (why?).
}
// Human and Droid are actual classes and instances unlike graphql-js. The reason for this is to enforce type safety.
// Rather than relying on the untyped "source" parameter JS object/Scala Map,
// the data required for Human/Droid to do it's job is passed to the constructor.
// It is of course a fine balance how much data you should require in the constructor.
// Preferably as little a possible to avoid overfetching from underlying data sources (other services or a database).
// Additional nesting of fields/objects will likely help address this problem.
@Object(desc="A humanoid creature in the Star Wars universe.")
case class Human(id: String, name: String, friendIds: List[String], appearsInIds: List[Int], _homePlanet: Option[String] = None) extends Character {
// Note how friends and episodes will only be resolved when asked for (by calling the function, aka the resolver)
// Right now, these are fast in-memory lookups but they can be something slow, say a database query.
override def friends = Data.getFriends(friendIds)
override def appearsIn = Data.getEpisodes(appearsInIds)
@Field(desc="The home planet of the human, or null if unknown.")
def homePlanet: String = _homePlanet.orNull
}
@Object(desc="A mechanical creature in the Star Wars universe.")
case class Droid(id: String, name: String, friendIds: List[String], appearsInIds: List[Int], _primaryFunction: Option[String] = None) extends Character {
// Same as in Human. The reason for these being duplicated in each GraphQL object class (Human and Droid)
// is that a GraphQLInterface cannot have resolve functions.
override def friends = Data.getFriends(friendIds)
override def appearsIn = Data.getEpisodes(appearsInIds)
@Field(desc="The primary function of the droid.")
def primaryFunction: String = _primaryFunction.orNull
}
@Object
class Query {
def hero: Character = Data.artoo
def human(@NonNull @Field("ID of the human") id: String): Human = {
// Not ideal null checking, but got type erasure if matching on Option[Character].
val c = Data.getCharacter(id).orNull
c match {
case h: Human => h
case _ => throw new IllegalArgumentException("Invalid human ID given")
}
}
def droid(@NonNull id: String): Droid = {
val c = Data.getCharacter(id).orNull
c match {
case d: Droid => d
case _ => throw new IllegalArgumentException("Invalid droid ID given")
}
}
}
object Test {
def main(args: Array[String]) {
val q = new Query()
println(s"The true hero in Star Wars is ${q.hero.name}")
println(s"A friend of the hero is ${q.hero.friends(0).name}")
println(s"Darth Vader appears in ${q.human("1001").appearsIn}")
println(s"C3-PO's primary function is ${q.droid("2000").primaryFunction}")
}
} | hrosenhorn/graphql-scala | src/test/scala/se/uprise/graphql_alt2/starwars/Schema.scala | Scala | mit | 7,006 |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.models.bayes
import breeze.stats.distributions.ContinuousDistr
import io.github.mandar2812.dynaml.algebra.PartitionedVector
import io.github.mandar2812.dynaml.models.StochasticProcessModel
import io.github.mandar2812.dynaml.pipes.MetaPipe
import io.github.mandar2812.dynaml.probability.{ContinuousRVWithDistr, RandomVarWithDistr}
import spire.algebra.InnerProductSpace
/**
* Represents a stochastic process with a
* defined prior distribution.
*
* @author mandar2812 date: 16/02/2017.
*
* */
trait StochasticProcessPrior[
I, Y, Y1,
D <: RandomVarWithDistr[Y1, _], W,
StochasticModel <: StochasticProcessModel[Seq[(I, Y)], I, Y, W]] extends Serializable {
protected var hyperPrior: Map[String, ContinuousRVWithDistr[Double, ContinuousDistr[Double]]] = Map()
def hyperPrior_(h: Map[String, ContinuousRVWithDistr[Double, ContinuousDistr[Double]]]) = {
hyperPrior = h
}
def posteriorModel(data: Seq[(I, Y)]): StochasticModel
def posteriorRV[U <: Seq[I]](data: Seq[(I, Y)])(test: U): W =
posteriorModel(data).predictiveDistribution(test)
def priorDistribution[U <: Seq[I]](d: U): D
}
/**
* A template for general stochastic priors
* having a linear trend function (or mean function)
*
* */
trait LinearTrendStochasticPrior[
I, D <: RandomVarWithDistr[PartitionedVector, _], W,
StochasticModel <: StochasticProcessModel[Seq[(I, Double)], I, Double, W]]
extends StochasticProcessPrior[
I, Double, PartitionedVector,
D, W, StochasticModel] {
val innerProduct: InnerProductSpace[I, Double]
protected var params: (I, Double)
val meanFunctionPipe = MetaPipe(
(parameters: (I, Double)) => (x: I) => innerProduct.dot(parameters._1, x) + parameters._2
)
} | transcendent-ai-labs/DynaML | dynaml-core/src/main/scala/io/github/mandar2812/dynaml/models/bayes/StochasticProcessPrior.scala | Scala | apache-2.0 | 2,517 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.pipeline.api.keras.utils
import com.intel.analytics.bigdl.nn.abstractnn.Activity
import com.intel.analytics.bigdl.nn.keras.{Input, KerasLayer, Model}
import com.intel.analytics.bigdl.nn.{Graph, Linear}
import com.intel.analytics.bigdl.utils.{Engine, Shape}
import com.intel.analytics.zoo.common.NNContext
import com.intel.analytics.zoo.pipeline.api.keras.ZooSpecHelper
import com.intel.analytics.zoo.pipeline.api.keras.layers.Dense
import com.intel.analytics.zoo.pipeline.api.keras.layers.utils.{AbstractModuleRef, EngineRef, GraphRef, KerasLayerRef}
import org.apache.spark.{SparkConf, SparkContext}
import scala.collection.mutable
class KerasLayerRefSpec extends ZooSpecHelper {
"invokeMethod excludeInvalidLayers" should "work properly" in {
new KerasLayerRef[Float](Dense[Float](2).asInstanceOf[KerasLayer[Activity, Activity, Float]])
.excludeInvalidLayers(Seq(Dense[Float](2)))
}
"invokeMethod setInputShape" should "work properly" in {
new KerasLayerRef[Float](Dense[Float](2).asInstanceOf[KerasLayer[Activity, Activity, Float]])
.setInputShape(Shape(3))
}
"invokeMethod setOutputShape" should "work properly" in {
new KerasLayerRef[Float](Dense[Float](2).asInstanceOf[KerasLayer[Activity, Activity, Float]])
.setOutShape(Shape(3))
}
"invokeMethod checkWithCurrentInputShape" should "work properly" in {
new KerasLayerRef[Float](Dense[Float](2).asInstanceOf[KerasLayer[Activity, Activity, Float]])
.checkWithCurrentInputShape(Shape(3))
}
"invokeMethod validateInput" should "work properly" in {
new KerasLayerRef[Float](Dense[Float](2).asInstanceOf[KerasLayer[Activity, Activity, Float]])
.validateInput(Seq(Dense[Float](3)))
intercept[Exception] {
new KerasLayerRef[Float](Dense[Float](2).asInstanceOf[KerasLayer[Activity, Activity, Float]])
.validateInput(Seq(Linear[Float](2, 3)))
}
}
"invokeMethod checkDuplicate" should "work properly" in {
new KerasLayerRef[Float](Dense[Float](2).asInstanceOf[KerasLayer[Activity, Activity, Float]])
.checkDuplicate(mutable.HashSet(2))
}
}
class AbstractModuleRefSpec extends ZooSpecHelper {
"invokeMethod build" should "work properly" in {
val outputShape = new AbstractModuleRef[Float](Dense[Float](2)
.asInstanceOf[KerasLayer[Activity, Activity, Float]])
.build(Shape(-1, 3))
assert(outputShape == Shape(-1, 2))
}
}
class GraphRefSpec extends ZooSpecHelper {
"invokeMethod getOutputs" should "work properly" in {
val input = Input[Float](inputShape = Shape(3))
val model = Model(input, Dense[Float](2).inputs(input))
val outputs = new GraphRef[Float](model.labor.asInstanceOf[Graph[Float]])
.getOutputs()
assert(outputs.length == 1)
}
}
class EngineRefSpec extends ZooSpecHelper {
private var sc: SparkContext = _
override def doBefore(): Unit = {
val conf = new SparkConf()
.setMaster("local[4]")
sc = NNContext.initNNContext(conf, appName = "TrainingSpec")
}
override def doAfter(): Unit = {
if (sc != null) {
sc.stop()
}
}
"invokeMethod set and coreNumber" should "work properly" in {
Engine.init
val num = EngineRef.getCoreNumber()
print(num)
}
}
| intel-analytics/analytics-zoo | zoo/src/test/scala/com/intel/analytics/zoo/pipeline/api/keras/utils/ReflectionSpec.scala | Scala | apache-2.0 | 3,867 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.