code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import scala.collection.mutable.{ArrayBuffer, BitSet}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions.{Expression, PlanExpression}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanExec, AdaptiveSparkPlanHelper, QueryStageExec}
object ExplainUtils extends AdaptiveSparkPlanHelper {
/**
* Given a input physical plan, performs the following tasks.
* 1. Computes the whole stage codegen id for current operator and records it in the
* operator by setting a tag.
* 2. Generate the two part explain output for this plan.
* 1. First part explains the operator tree with each operator tagged with an unique
* identifier.
* 2. Second part explains each operator in a verbose manner.
*
* Note : This function skips over subqueries. They are handled by its caller.
*
* @param plan Input query plan to process
* @param append function used to append the explain output
* @param collectedOperators The IDs of the operators that are already collected and we shouldn't
* collect again.
*/
private def processPlanSkippingSubqueries[T <: QueryPlan[T]](
plan: T,
append: String => Unit,
collectedOperators: BitSet): Unit = {
try {
generateWholeStageCodegenIds(plan)
QueryPlan.append(
plan,
append,
verbose = false,
addSuffix = false,
printOperatorId = true)
append("\\n")
val operationsWithID = ArrayBuffer.empty[QueryPlan[_]]
collectOperatorsWithID(plan, operationsWithID, collectedOperators)
operationsWithID.foreach(p => append(p.verboseStringWithOperatorId()))
} catch {
case e: AnalysisException => append(e.toString)
}
}
/**
* Given a input physical plan, performs the following tasks.
* 1. Generates the explain output for the input plan excluding the subquery plans.
* 2. Generates the explain output for each subquery referenced in the plan.
*/
def processPlan[T <: QueryPlan[T]](plan: T, append: String => Unit): Unit = {
try {
var currentOperatorID = 0
currentOperatorID = generateOperatorIDs(plan, currentOperatorID)
val subqueries = ArrayBuffer.empty[(SparkPlan, Expression, BaseSubqueryExec)]
getSubqueries(plan, subqueries)
subqueries.foldLeft(currentOperatorID) {
(curId, plan) => generateOperatorIDs(plan._3.child, curId)
}
val collectedOperators = BitSet.empty
processPlanSkippingSubqueries(plan, append, collectedOperators)
var i = 0
for (sub <- subqueries) {
if (i == 0) {
append("\\n===== Subqueries =====\\n\\n")
}
i = i + 1
append(s"Subquery:$i Hosting operator id = " +
s"${getOpId(sub._1)} Hosting Expression = ${sub._2}\\n")
// For each subquery expression in the parent plan, process its child plan to compute
// the explain output. In case of subquery reuse, we don't print subquery plan more
// than once. So we skip [[ReusedSubqueryExec]] here.
if (!sub._3.isInstanceOf[ReusedSubqueryExec]) {
processPlanSkippingSubqueries(sub._3.child, append, collectedOperators)
}
append("\\n")
}
} finally {
removeTags(plan)
}
}
/**
* Traverses the supplied input plan in a bottom-up fashion and records the operator id via
* setting a tag in the operator.
* Note :
* - Operator such as WholeStageCodegenExec and InputAdapter are skipped as they don't
* appear in the explain output.
* - Operator identifier starts at startOperatorID + 1
*
* @param plan Input query plan to process
* @param startOperatorID The start value of operation id. The subsequent operations will be
* assigned higher value.
* @return The last generated operation id for this input plan. This is to ensure we always
* assign incrementing unique id to each operator.
*/
private def generateOperatorIDs(plan: QueryPlan[_], startOperatorID: Int): Int = {
var currentOperationID = startOperatorID
// Skip the subqueries as they are not printed as part of main query block.
if (plan.isInstanceOf[BaseSubqueryExec]) {
return currentOperationID
}
def setOpId(plan: QueryPlan[_]): Unit = if (plan.getTagValue(QueryPlan.OP_ID_TAG).isEmpty) {
currentOperationID += 1
plan.setTagValue(QueryPlan.OP_ID_TAG, currentOperationID)
}
plan.foreachUp {
case _: WholeStageCodegenExec =>
case _: InputAdapter =>
case p: AdaptiveSparkPlanExec =>
currentOperationID = generateOperatorIDs(p.executedPlan, currentOperationID)
if (!p.executedPlan.fastEquals(p.initialPlan)) {
currentOperationID = generateOperatorIDs(p.initialPlan, currentOperationID)
}
setOpId(p)
case p: QueryStageExec =>
currentOperationID = generateOperatorIDs(p.plan, currentOperationID)
setOpId(p)
case other: QueryPlan[_] =>
setOpId(other)
other.innerChildren.foldLeft(currentOperationID) {
(curId, plan) => generateOperatorIDs(plan, curId)
}
}
currentOperationID
}
/**
* Traverses the supplied input plan in a bottom-up fashion and collects operators with assigned
* ids.
*
* @param plan Input query plan to process
* @param operators An output parameter that contains the operators.
* @param collectedOperators The IDs of the operators that are already collected and we shouldn't
* collect again.
*/
private def collectOperatorsWithID(
plan: QueryPlan[_],
operators: ArrayBuffer[QueryPlan[_]],
collectedOperators: BitSet): Unit = {
// Skip the subqueries as they are not printed as part of main query block.
if (plan.isInstanceOf[BaseSubqueryExec]) {
return
}
def collectOperatorWithID(plan: QueryPlan[_]): Unit = {
plan.getTagValue(QueryPlan.OP_ID_TAG).foreach { id =>
if (collectedOperators.add(id)) operators += plan
}
}
plan.foreachUp {
case _: WholeStageCodegenExec =>
case _: InputAdapter =>
case p: AdaptiveSparkPlanExec =>
collectOperatorsWithID(p.executedPlan, operators, collectedOperators)
if (!p.executedPlan.fastEquals(p.initialPlan)) {
collectOperatorsWithID(p.initialPlan, operators, collectedOperators)
}
collectOperatorWithID(p)
case p: QueryStageExec =>
collectOperatorsWithID(p.plan, operators, collectedOperators)
collectOperatorWithID(p)
case other: QueryPlan[_] =>
collectOperatorWithID(other)
other.innerChildren.foreach(collectOperatorsWithID(_, operators, collectedOperators))
}
}
/**
* Traverses the supplied input plan in a top-down fashion and records the
* whole stage code gen id in the plan via setting a tag.
*/
private def generateWholeStageCodegenIds(plan: QueryPlan[_]): Unit = {
var currentCodegenId = -1
def setCodegenId(p: QueryPlan[_], children: Seq[QueryPlan[_]]): Unit = {
if (currentCodegenId != -1) {
p.setTagValue(QueryPlan.CODEGEN_ID_TAG, currentCodegenId)
}
children.foreach(generateWholeStageCodegenIds)
}
// Skip the subqueries as they are not printed as part of main query block.
if (plan.isInstanceOf[BaseSubqueryExec]) {
return
}
plan.foreach {
case p: WholeStageCodegenExec => currentCodegenId = p.codegenStageId
case _: InputAdapter => currentCodegenId = -1
case p: AdaptiveSparkPlanExec => setCodegenId(p, Seq(p.executedPlan))
case p: QueryStageExec => setCodegenId(p, Seq(p.plan))
case other: QueryPlan[_] => setCodegenId(other, other.innerChildren)
}
}
/**
* Generate detailed field string with different format based on type of input value
*/
def generateFieldString(fieldName: String, values: Any): String = values match {
case iter: Iterable[_] if (iter.size == 0) => s"${fieldName}: []"
case iter: Iterable[_] => s"${fieldName} [${iter.size}]: ${iter.mkString("[", ", ", "]")}"
case str: String if (str == null || str.isEmpty) => s"${fieldName}: None"
case str: String => s"${fieldName}: ${str}"
case _ => throw new IllegalArgumentException(s"Unsupported type for argument values: $values")
}
/**
* Given a input plan, returns an array of tuples comprising of :
* 1. Hosting operator id.
* 2. Hosting expression
* 3. Subquery plan
*/
private def getSubqueries(
plan: => QueryPlan[_],
subqueries: ArrayBuffer[(SparkPlan, Expression, BaseSubqueryExec)]): Unit = {
plan.foreach {
case a: AdaptiveSparkPlanExec =>
getSubqueries(a.executedPlan, subqueries)
case p: SparkPlan =>
p.expressions.foreach (_.collect {
case e: PlanExpression[_] =>
e.plan match {
case s: BaseSubqueryExec =>
subqueries += ((p, e, s))
getSubqueries(s, subqueries)
case _ =>
}
})
}
}
/**
* Returns the operator identifier for the supplied plan by retrieving the
* `operationId` tag value.
*/
def getOpId(plan: QueryPlan[_]): String = {
plan.getTagValue(QueryPlan.OP_ID_TAG).map(v => s"$v").getOrElse("unknown")
}
def removeTags(plan: QueryPlan[_]): Unit = {
def remove(p: QueryPlan[_], children: Seq[QueryPlan[_]]): Unit = {
p.unsetTagValue(QueryPlan.OP_ID_TAG)
p.unsetTagValue(QueryPlan.CODEGEN_ID_TAG)
children.foreach(removeTags)
}
plan foreach {
case p: AdaptiveSparkPlanExec => remove(p, Seq(p.executedPlan, p.initialPlan))
case p: QueryStageExec => remove(p, Seq(p.plan))
case plan: QueryPlan[_] => remove(plan, plan.innerChildren)
}
}
}
| wangmiao1981/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/ExplainUtils.scala | Scala | apache-2.0 | 10,794 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package methods
class Methods {
val foo = 1
def bar = 2
def baz() = 3
}
object Methods {
val a = new Methods
a.foo
a.bar
a.baz()
} | Kwestor/scala-ide | org.scala-ide.sdt.core.tests/test-workspace/custom-highlighting/src/custom/Methods.scala | Scala | bsd-3-clause | 214 |
/*
Copyright (c) 2013, Noel Raymond Cower
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.spifftastic.spastic.graphics
/**
* ColorSpace classes used to coerce Color components from one ColorSpace to another. Currently
* only provides HSV and RGB ColorSpaces.
*/
sealed abstract class ColorSpace {
/** Coerces a color array from its previous ColorSpace to this ColorSpace by modifying it. */
final def coerce(previousColorSpace: ColorSpace, color: Array[Float], colorOffset: Int): Unit =
coerce(previousColorSpace: ColorSpace, color, colorOffset, color, colorOffset)
/**
* Coerces a color array from its previous ColorSpace to this ColorSpace and writes the results
* to a given output array.
*
* Implementations must respect that both color and output may be the same array and act
* accordingly to avoid trashing inputs (just compute the results before you writing to output).
*/
def coerce(previousColorSpace: ColorSpace, color: Array[Float], colorOffset: Int, out: Array[Float], outOffset: Int): Unit
}
object ColorSpace {
@inline
private def clamped(f: Float): Float =
if (f < 0f) 0f
else if (f > 1f) 1f
else f
@inline
private def wrapped(f: Float): Float =
if (f < 0f) (f % 1f) + 1f
else if (f > 1f) f % 1f
else f
/**
* HSV (hue, saturation, value) ColorSpace for Color objects.
*/
case object HSV extends ColorSpace {
import RGB.SaturationEpsilon
import Math.{min, max}
val OneOverSix = 1f / 6f
def coerce(previousColorSpace: ColorSpace, color: Array[Float], colorOffset: Int, out: Array[Float], outOffset: Int): Unit = {
previousColorSpace match {
case HSV =>
case RGB =>
val r = clamped(color(colorOffset))
val g = clamped(color(colorOffset + 1))
val b = clamped(color(colorOffset + 2))
val lower = min(min(r, g), b)
val upper = max(max(r, g), b)
val chroma = upper - lower
if (chroma <= 0f) {
out(outOffset) = 0f
out(outOffset + 1) = 0f
} else {
if (upper == r) {
val proposed = ((g - b) / chroma) * OneOverSix
if (proposed < 0f) out(outOffset) = proposed + 1f
else out(outOffset) = proposed
} else if (upper == g) {
out(outOffset) = (2f + (b - r) / chroma) * OneOverSix
} else {
out(outOffset) = (4f + (r - g) / chroma) * OneOverSix
}
out(outOffset + 1) = chroma / upper
}
out(outOffset + 2) = upper
}
}
}
/**
* RGB (red, green, blue) ColorSpace for Color objects.
*/
case object RGB extends ColorSpace {
val SaturationEpsilon = Color.ConversionError / 2f
def coerce(previousColorSpace: ColorSpace, color: Array[Float], colorOffset: Int, out: Array[Float], outOffset: Int): Unit = {
previousColorSpace match {
case RGB =>
case HSV =>
val hue = color(colorOffset) * 6f
val saturation = clamped(color(colorOffset + 1))
val value = clamped(color(colorOffset + 2))
if (saturation < 0.001f) {
out(outOffset) = value
out(outOffset + 1) = value
out(outOffset + 2) = value
} else {
val face: Int = hue.toInt
val huePrime = hue - face
val common: Float = value * (1f - saturation)
face % 6 match {
case 0 | 6 =>
out(outOffset) = value
out(outOffset + 1) = value * (1f - saturation * (1f - huePrime))
out(outOffset + 2) = common
case 1 =>
out(outOffset) = value * (1f - huePrime * saturation)
out(outOffset + 1) = value
out(outOffset + 2) = common
case 2 =>
out(outOffset) = common
out(outOffset + 1) = value
out(outOffset + 2) = value * (1f - saturation * (1f - huePrime))
case 3 =>
out(outOffset) = common
out(outOffset + 1) = value * (1f - huePrime * saturation)
out(outOffset + 2) = value
case 4 =>
out(outOffset) = value * (1f - saturation * (1f - huePrime))
out(outOffset + 1) = common
out(outOffset + 2) = value
case 5 =>
out(outOffset) = value
out(outOffset + 1) = common
out(outOffset + 2) = value * (1f - huePrime * saturation)
}
}
}
}
}
}
| nilium/spastic | src/main/scala/graphics/ColorSpace.scala | Scala | bsd-2-clause | 5,926 |
package rere.ql.options
import rere.ql.queries.values
trait IndexTypeOptions {
sealed trait IndexMultiplicityOptions extends ComposableOptions
case object SimpleIndex extends IndexMultiplicityOptions with DefaultOption
case object MultiIndex extends IndexMultiplicityOptions with NonDefaultOption {
def view = "multi" -> values.expr(true) :: Nil
}
sealed trait IndexNatureOptions extends ComposableOptions
case object RangeIndex extends IndexNatureOptions with DefaultOption
case object GeoIndex extends IndexNatureOptions with NonDefaultOption {
def view = "geo" -> values.expr(true) :: Nil
}
}
| pbaun/rere | modules/ql/src/main/scala/rere/ql/options/IndexTypeOptions.scala | Scala | apache-2.0 | 629 |
package demo.components
import japgolly.scalajs.react._
import scala.scalajs.js
object CallbackDebug {
trait Print[T] {
def print(t: T): String
}
trait PrintLower {
final implicit def PrintAny[T]: Print[T] =
new Print[T] {
override def print(t: T): String =
if (t == js.undefined) "undefined"
else if (t == null) "null"
else t.toString
}
}
object Print extends PrintLower {
def apply[T: Print](t: T): String =
implicitly[Print[T]].print(t)
implicit def PrintEvent[E <: ReactEvent]: Print[E] =
new Print[E] {
override def print(e: E): String = {
val d = e.asInstanceOf[js.Dynamic]
val u = js.undefined.asInstanceOf[js.Dynamic]
val event =
if (d.clipboardData != u) "ReactClipboardEvent"
else if (d.data != u) "ReactCompositionEvent"
else if (d.dataTransfer != u) "ReactDragEvent"
else if (d.relatedTarget != u) "ReactFocusEvent"
else if (d.locale != u) "ReactKeyboardEvent"
else if (d.buttons != u) "ReactMouseEvent"
else if (d.touches != u) "ReactTouchEvent"
else if (d.detail != u) "ReactUIEvent"
else if (d.deltaZ != u) "ReactWheelEvent"
else "ReactEvent"
val t = e.target.asInstanceOf[js.Dynamic]
val target =
if (t.value != u) "I"
else if (t.offsetTop != u) "H"
else ""
val values = js.Object.keys(e).map{
key =>
val valueU: js.Any = e.asInstanceOf[js.Dictionary[js.Any]](key)
val valueS = if (js.isUndefined(valueU)) "empty"
else if (js.typeOf(valueU) == "function") "function"
else util.Try(js.JSON.stringify(valueU)).getOrElse("circular")
s"$key: $valueS"
}
s"$event$target: ${values.mkString("{", ", ", "}")}"
}
}
}
private def base(name: String, params: String*): Callback =
Callback.info(s"Event handler: $name(${params.mkString(", ")})")
def f0(name: String): Callback =
base(name)
def f1[T1: Print](name: String): js.UndefOr[T1 => Callback] =
(_1: T1) => base(name, Print(_1))
def f2[T1: Print, T2: Print](name: String): js.UndefOr[(T1, T2) => Callback] =
(_1: T1, _2: T2) => base(name, Print(_1), Print(_2))
def f3[T1: Print, T2: Print, T3: Print](name: String): js.UndefOr[(T1, T2, T3) => Callback] =
(_1: T1, _2: T2, _3: T3) => base(name, Print(_1), Print(_2), Print(_3))
}
| aparo/scalajs-react-components | demo/src/main/scala/demo/components/CallbackDebug.scala | Scala | apache-2.0 | 2,561 |
package org.scalawiki.wlx.dto
import java.time.ZonedDateTime
import com.typesafe.config.{Config, ConfigFactory, ConfigParseOptions, ConfigResolveOptions}
import org.scalawiki.wlx.stat.RateConfig
import scala.util.Try
/**
* Represents Wiki Loves X contest
*
* @param contestType
* @param country
* @param year
* @param startDate
* @param endDate
* @param uploadConfigs
* @param specialNominations
*/
case class Contest(
contestType: ContestType,
country: Country,
year: Int,
startDate: String = "",
endDate: String = "",
uploadConfigs: Seq[UploadConfig] = Nil,
specialNominations: Seq[SpecialNomination] = Nil,
rateConfig: RateConfig = RateConfig(),
config: Option[Config] = None
) extends HasImagesCategory {
def campaign = contestType.code + "-" + country.code
def name = s"${contestType.name} $year" + countryName.fold("")(" in " + _)
def countryName: Option[String] =
if (country != NoAdmDivision)
Some(country.name)
else
None
/**
* @return Name of category containing contest images
*/
override def imagesCategory: String = s"Category:Images from $name".replaceAll(" ", "_")
/**
* @return name of template that monument lists consist of
*/
def listTemplate: Option[String] = uploadConfigs.headOption.map(_.listTemplate)
/**
* @return name of template that marks a contest image with monument id
*/
def fileTemplate: Option[String] = uploadConfigs.headOption.map(_.fileTemplate)
def listsHost: Option[String] = {
uploadConfigs.head.listsHost
.orElse(
country.languageCodes.headOption.map(_ + ".wikipedia.org")
)
}
}
object Contest {
val opts = ConfigParseOptions.defaults.setAllowMissing(false)
def load(name: String): Option[Contest] = {
Try {
ConfigFactory.load(name, opts, ConfigResolveOptions.defaults)
}.map(fromConfig)
.getOrElse {
val Campaign = "(\\\\w+)_(\\\\w+).conf".r
name match {
case Campaign(typeCode, countryCode) =>
for (contestType <- ContestType.byCode(typeCode);
country <- Country.byCode(countryCode)
) yield
Contest(contestType, country, ZonedDateTime.now.getYear)
case _ => None
}
}
}
def byCampaign(campaign: String): Option[Contest] = {
load(campaign.replace("-", "_") + ".conf")
}
def fromConfig(config: Config): Option[Contest] = {
val (typeStr, countryStr, year) = (
config.getString("type"),
config.getString("country"),
config.getInt("year"))
val uploadConfig = UploadConfig.fromConfig(config)
for (contestType <- ContestType.byCode(typeStr.toLowerCase);
country <- Country.fromJavaLocales.find(country => country.name == countryStr || country.code == countryStr))
yield new Contest(contestType, country, year, uploadConfigs = Seq(uploadConfig), config = Some(config))
}
def ESPCUkraine(year: Int, startDate: String = "01-09", endDate: String = "30-09") =
new Contest(ContestType.ESPC, Country.Ukraine, year, startDate, endDate, Nil)
def WLMUkraine(year: Int) =
load("wlm_ua.conf").get.copy(year = year)
def WLEUkraine(year: Int) =
load("wle_ua.conf").get.copy(year = year)
}
| intracer/scalawiki | scalawiki-wlx/src/main/scala/org/scalawiki/wlx/dto/Contest.scala | Scala | apache-2.0 | 3,444 |
package engine.api
import akka.actor.{ActorRef, ActorSystem, Props}
import engine.api.MapReduce.{MapReduceUpB, ReducerT}
import engine.util.EngineLogger
import akka.pattern._
import akka.util.Timeout
import scala.concurrent.Future
/**
* Entry and management class to control the engine, creating new Jobs, adding, removing them, and logging information about the environment
*/
object EngineContext extends EngineLogger {
private val akkaSysName = "TypedMapReduceSystem"
private val actorSystem = ActorSystem(akkaSysName)
def submit(j: Job): Future[Any] = {
val input = j.getInputDataSetPaths
val mapper = j.getSingleMapperClass.newInstance
val reducer = if (mapper.isInstanceOf[MapReduceUpB]) mapper.asInstanceOf[ReducerT] else j.getReducer.get.newInstance
val outputDataSet = j.getOutput.get
val jobId = Job.newJobId(j)
val mapReduceCoordinator = actorSystem.actorOf(Props(new engine.executors.MapReduceCoordinatorActor(input, mapper, reducer, outputDataSet, j.getName, jobId)), j.getName)
implicit val timeout = Timeout(j.getTimeout._1, j.getTimeout.unit)
val f = mapReduceCoordinator ? Start
f
}
def newJob(jobName: String): Job = {
val j = Job(jobName)
logger.debug(s"created fresh Job by name $jobName in EngineContext")
j
}
}
| filipegmiranda/typed-mapreduce | src/main/scala/engine/api/EngineContext.scala | Scala | apache-2.0 | 1,305 |
package katas.scala
import scala.concurrent.duration.{FiniteDuration, MILLISECONDS}
object Util {
def measureDuration(f: => Unit): FiniteDuration = {
val start = System.currentTimeMillis()
f
val end = System.currentTimeMillis()
FiniteDuration(end - start, MILLISECONDS)
}
} | dkandalov/katas | scala/src/katas/scala/Util.scala | Scala | unlicense | 285 |
import com.krrrr38.play.autodoc.AutodocHelpers
object TestHelper extends AutodocHelpers {
def myhelper = "foo"
}
| krrrr38/play-autodoc | example/test/TestHelper.scala | Scala | mit | 116 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail
import cats.laws._
import cats.laws.discipline._
import monix.eval.Coeval
import monix.execution.exceptions.DummyException
object IterantScanSuite extends BaseTestSuite {
test("scan evolves state") { implicit s =>
check1 { (source: Iterant[Coeval, Int]) =>
sealed trait State[+A] { def count: Int }
case object Init extends State[Nothing] { def count = 0 }
case class Current[A](current: A, count: Int) extends State[A]
val scanned = source.scan(Init: State[Int]) { (acc, a) =>
acc match {
case Init => Current(a, 1)
case Current(_, count) => Current(a, count + 1)
}
}
val fa = scanned
.takeWhile(_.count < 10)
.collect { case Current(a, _) => a }
fa.toListL <-> source.take(10).toListL.map(_.take(9))
}
}
test("scan protects against exceptions initial") { implicit s =>
val dummy = DummyException("dummy")
val fa = Iterant[Coeval].of(1, 2, 3)
val r = fa.scan((throw dummy): Int)((_, e) => e).attempt.toListL
assertEquals(r.value(), List(Left(dummy)))
}
test("scan protects against exceptions in f") { implicit s =>
val dummy = DummyException("dummy")
val fa = Iterant[Coeval].of(1, 2, 3)
val r = fa.scan(0)((_, _) => throw dummy).attempt.toListL
assertEquals(r.value(), List(Left(dummy)))
}
test("scan0 emits seed as first element") { implicit s =>
check2 { (source: Iterant[Coeval, Int], seed: Int) =>
source.scan0(seed)(_ + _).headOptionL <-> Coeval.pure(Some(seed))
}
}
test("scan0.drop(1) <-> scan") { implicit s =>
check2 { (source: Iterant[Coeval, Int], seed: Int) =>
source.scan0(seed)(_ + _).drop(1) <-> source.scan(seed)(_ + _)
}
}
}
| monix/monix | monix-tail/shared/src/test/scala/monix/tail/IterantScanSuite.scala | Scala | apache-2.0 | 2,430 |
package scala.slick.ql
import scala.slick.session.Session
/**
* A DDL object contains the SQL statements for creating and dropping
* database entities. DDLs can be combined for creating or dropping multiple
* entities together, even if they have circular dependencies.
*/
trait DDL { self =>
/** Statements to execute first for create(), e.g. creating tables and indexes. */
protected def createPhase1: Iterable[String]
/** Statements to execute after createPhase1, e.g. creating foreign keys. */
protected def createPhase2: Iterable[String]
/** All statements to execute for create() */
def createStatements: Iterator[String] = createPhase1.iterator ++ createPhase2.iterator
/** Create the entities described by this DDL object */
def create(implicit session: Session): Unit = session.withTransaction {
for(s <- createStatements)
session.withPreparedStatement(s)(_.execute)
}
/** Statements to execute first for drop(), e.g. removing connections from other entities. */
protected def dropPhase1: Iterable[String]
/** Statements to execute after dropPhase1, e.g. actually dropping a table. */
protected def dropPhase2: Iterable[String]
/** All statements to execute for drop() */
def dropStatements: Iterator[String] = dropPhase1.iterator ++ dropPhase2.iterator
/** Drop the entities described by this DDL object */
def drop(implicit session: Session): Unit = session.withTransaction {
for(s <- dropStatements)
session.withPreparedStatement(s)(_.execute)
}
/** Create a new DDL object which combines this and the other DDL object. */
def ++(other: DDL): DDL = new DDL {
protected lazy val createPhase1 = self.createPhase1 ++ other.createPhase1
protected lazy val createPhase2 = self.createPhase2 ++ other.createPhase2
protected lazy val dropPhase1 = self.dropPhase1 ++ other.dropPhase1
protected lazy val dropPhase2 = self.dropPhase2 ++ other.dropPhase2
}
}
| szeiger/scala-query | src/main/scala/scala/slick/ql/DDL.scala | Scala | bsd-2-clause | 1,950 |
package com.productfoundry.akka.messaging
import akka.actor.Actor
/**
* Indicates this actor handles published messages.
*/
trait MessageSubscriber extends Actor | odd/akka-cqrs | core/src/main/scala/com/productfoundry/akka/messaging/MessageSubscriber.scala | Scala | apache-2.0 | 165 |
package atto.syntax.stream
import scala.language.implicitConversions
import scalaz.syntax.Ops
import atto._
trait ParserOps[A] extends Ops[Parser[A]] {
}
trait ToParserOps {
// N.B. ensure this name doesn't shadow the one from core :-\\
implicit def toStreamParserOps[A](p: Parser[A]): ParserOps[A] =
new ParserOps[A] {
val self = p
}
} | coltfred/atto | stream/src/main/scala/atto/syntax/stream/ParserOps.scala | Scala | mit | 360 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.hbase.jobs
import java.nio.charset.StandardCharsets
import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2
import org.apache.hadoop.hbase.security.visibility.CellVisibility
import org.apache.hadoop.hbase.{HBaseConfiguration, HConstants, TableName}
import org.apache.hadoop.io.Writable
import org.apache.hadoop.mapreduce._
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.locationtech.geomesa.hbase.data.{HBaseConnectionPool, HBaseDataStore, HBaseIndexAdapter}
import org.locationtech.geomesa.index.api.WritableFeature.FeatureWrapper
import org.locationtech.geomesa.index.api.{MultiRowKeyValue, SingleRowKeyValue, WritableFeature, WriteConverter}
import org.locationtech.geomesa.index.conf.partition.TablePartition
import org.locationtech.geomesa.jobs.GeoMesaConfigurator
import org.locationtech.geomesa.jobs.mapreduce.GeoMesaOutputFormat
import org.locationtech.geomesa.utils.index.IndexMode
import org.locationtech.geomesa.utils.io.WithStore
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import scala.util.control.NonFatal
/**
* Converts simple features to HBase mutations
*/
class HBaseIndexFileMapper extends Mapper[Writable, SimpleFeature, ImmutableBytesWritable, Put] with LazyLogging {
private var sft: SimpleFeatureType = _
private var wrapper: FeatureWrapper[WritableFeature] = _
private var writer: WriteConverter[_] = _
private var features: Counter = _
private var entries: Counter = _
private var failed: Counter = _
private val bytes = new ImmutableBytesWritable
override def setup(context: HBaseIndexFileMapper.MapContext): Unit = {
WithStore[HBaseDataStore](GeoMesaConfigurator.getDataStoreOutParams(context.getConfiguration)) { ds =>
require(ds != null, "Could not find data store - check your configuration and hbase-site.xml")
sft = ds.getSchema(HBaseIndexFileMapper.getTypeName(context.getConfiguration))
require(sft != null, "Could not find schema - check your configuration")
wrapper = WritableFeature.wrapper(sft, ds.adapter.groups)
writer = GeoMesaConfigurator.getIndicesOut(context.getConfiguration) match {
case Some(Seq(idx)) => ds.manager.index(sft, idx, IndexMode.Write).createConverter()
case _ => throw new IllegalArgumentException("Could not find write index - check your configuration")
}
}
features = context.getCounter(GeoMesaOutputFormat.Counters.Group, GeoMesaOutputFormat.Counters.Written)
entries = context.getCounter(GeoMesaOutputFormat.Counters.Group, "entries")
failed = context.getCounter(GeoMesaOutputFormat.Counters.Group, GeoMesaOutputFormat.Counters.Failed)
}
override def cleanup(context: HBaseIndexFileMapper.MapContext): Unit = {}
override def map(key: Writable, value: SimpleFeature, context: HBaseIndexFileMapper.MapContext): Unit = {
// TODO create a common writer that will create mutations without writing them
try {
val feature = wrapper.wrap(value)
writer.convert(feature) match {
case kv: SingleRowKeyValue[_] =>
kv.values.foreach { value =>
val put = new Put(kv.row)
put.addImmutable(value.cf, value.cq, value.value)
if (!value.vis.isEmpty) {
put.setCellVisibility(new CellVisibility(new String(value.vis, StandardCharsets.UTF_8)))
}
put.setDurability(HBaseIndexAdapter.durability)
bytes.set(put.getRow)
context.write(bytes, put)
entries.increment(1L)
}
case mkv: MultiRowKeyValue[_] =>
mkv.rows.foreach { row =>
mkv.values.foreach { value =>
val put = new Put(row)
put.addImmutable(value.cf, value.cq, value.value)
if (!value.vis.isEmpty) {
put.setCellVisibility(new CellVisibility(new String(value.vis, StandardCharsets.UTF_8)))
}
put.setDurability(HBaseIndexAdapter.durability)
bytes.set(put.getRow)
context.write(bytes, put)
entries.increment(1L)
}
}
}
features.increment(1L)
} catch {
case NonFatal(e) =>
logger.error(s"Error writing feature ${Option(value).orNull}", e)
failed.increment(1L)
}
}
}
object HBaseIndexFileMapper {
type MapContext = Mapper[Writable, SimpleFeature, ImmutableBytesWritable, Put]#Context
private val TypeNameKey = "org.locationtech.geomesa.hbase.type"
private def setTypeName(conf: Configuration, typeName: String): Unit = conf.set(TypeNameKey, typeName)
private def getTypeName(conf: Configuration): String = conf.get(TypeNameKey)
/**
* Sets mapper class, reducer class, output format and associated options
*
* @param job job
* @param params data store params for output data
* @param typeName feature type name to write (schema must exist already)
* @param index index table to write
* @param output output path for HFiles
*/
def configure(
job: Job,
params: Map[String, String],
typeName: String,
index: String,
output: Path): Unit = {
WithStore[HBaseDataStore](params) { ds =>
require(ds != null, s"Could not find data store with provided parameters ${params.mkString(",")}")
val sft = ds.getSchema(typeName)
require(sft != null, s"Schema $typeName does not exist, please create it first")
require(!TablePartition.partitioned(sft), "Writing to partitioned tables is not currently supported")
val idx = ds.manager.index(sft, index, IndexMode.Write)
val tableName = idx.getTableNames(None) match {
case Seq(t) => TableName.valueOf(t) // should always be writing to a single table here
case tables => throw new IllegalStateException(s"Expected a single table but got: ${tables.mkString(", ")}")
}
val table = ds.connection.getTable(tableName)
GeoMesaConfigurator.setDataStoreOutParams(job.getConfiguration, params)
GeoMesaConfigurator.setIndicesOut(job.getConfiguration, Seq(idx.identifier))
GeoMesaConfigurator.setSerialization(job.getConfiguration, sft)
setTypeName(job.getConfiguration, sft.getTypeName)
FileOutputFormat.setOutputPath(job, output)
// this defaults to /user/<user>/hbase-staging, which generally doesn't exist...
job.getConfiguration.set(HConstants.TEMPORARY_FS_DIRECTORY_KEY,
s"${System.getProperty("java.io.tmpdir")}/hbase-staging")
// this requires a connection to hbase, which we don't always have
// TODO allow this as an option
job.getConfiguration.set(HFileOutputFormat2.LOCALITY_SENSITIVE_CONF_KEY, "false")
job.setMapperClass(classOf[HBaseIndexFileMapper])
job.setMapOutputKeyClass(classOf[ImmutableBytesWritable])
job.setMapOutputValueClass(classOf[Put])
// set hbase config
HBaseConfiguration.merge(job.getConfiguration, HBaseConfiguration.create(job.getConfiguration))
HBaseConnectionPool.configureSecurity(job.getConfiguration)
// sets reducer, output classes and num-reducers
// override the libjars hbase tries to set, as they end up conflicting with the ones we set
val libjars = job.getConfiguration.get("tmpjars")
HFileOutputFormat2.configureIncrementalLoad(job, table, ds.connection.getRegionLocator(tableName))
job.getConfiguration.set("tmpjars", libjars)
}
}
}
| elahrvivaz/geomesa | geomesa-hbase/geomesa-hbase-jobs/src/main/scala/org/locationtech/geomesa/hbase/jobs/HBaseIndexFileMapper.scala | Scala | apache-2.0 | 8,153 |
package scalaxy.fx
import scala.language.implicitConversions
import javafx.beans.binding._
import javafx.beans.property._
import javafx.collections._
import scala.language.experimental.macros
private[fx] trait Properties
{
/** Creates a simple property of type T. */
def newProperty
[T, J, B <: Binding[J], P <: Property[J]]
(value: T)
(implicit ev: GenericType[T, J, B, P]): P =
macro impl.PropertyMacros.newProperty[T, P]
implicit def propertyValue(p: SimpleIntegerProperty): Int =
macro impl.PropertyMacros.propertyValue[Int, SimpleIntegerProperty]
implicit def propertyValue(p: SimpleLongProperty): Long =
macro impl.PropertyMacros.propertyValue[Long, SimpleLongProperty]
implicit def propertyValue(p: SimpleFloatProperty): Float =
macro impl.PropertyMacros.propertyValue[Float, SimpleFloatProperty]
implicit def propertyValue(p: SimpleDoubleProperty): Double =
macro impl.PropertyMacros.propertyValue[Double, SimpleDoubleProperty]
implicit def propertyValue(p: SimpleBooleanProperty): Boolean =
macro impl.PropertyMacros.propertyValue[Boolean, SimpleBooleanProperty]
implicit def propertyValue[A](p: SimpleListProperty[A]): ObservableList[A] =
macro impl.PropertyMacros.propertyValue[ObservableList[A], SimpleListProperty[A]]
implicit def propertyValue[A, B](p: SimpleMapProperty[A, B]): ObservableMap[A, B] =
macro impl.PropertyMacros.propertyValue[ObservableMap[A, B], SimpleMapProperty[A, B]]
implicit def bindingValue(b: IntegerBinding): Int =
macro impl.PropertyMacros.bindingValue[Int, IntegerBinding]
implicit def bindingValue(b: LongBinding): Long =
macro impl.PropertyMacros.bindingValue[Long, LongBinding]
implicit def bindingValue(b: FloatBinding): Float =
macro impl.PropertyMacros.bindingValue[Float, FloatBinding]
implicit def bindingValue(b: DoubleBinding): Double =
macro impl.PropertyMacros.bindingValue[Double, DoubleBinding]
implicit def bindingValue(b: BooleanBinding): Boolean =
macro impl.PropertyMacros.bindingValue[Boolean, BooleanBinding]
implicit def bindingValue[A](b: SimpleListProperty[A]): ObservableList[A] =
macro impl.PropertyMacros.bindingValue[ObservableList[A], SimpleListProperty[A]]
implicit def bindingValue[A, B](b: SimpleMapProperty[A, B]): ObservableMap[A, B] =
macro impl.PropertyMacros.bindingValue[ObservableMap[A, B], SimpleMapProperty[A, B]]
}
| nativelibs4java/Scalaxy | Fx/src/main/scala/scalaxy/fx/Properties.scala | Scala | bsd-3-clause | 2,426 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.persistence.cluster
import akka.actor.ExtendedActorSystem
import akka.serialization.SerializationExtension
import com.lightbend.lagom.persistence.ActorSystemSpec
class ClusterStartupTaskSerializerSpec extends ActorSystemSpec {
val serializer = new ClusterStartupTaskSerializer(system.asInstanceOf[ExtendedActorSystem])
def checkSerialization(obj: AnyRef): Unit = {
// check that it is configured
SerializationExtension(system).serializerFor(obj.getClass).getClass should be(classOf[ClusterStartupTaskSerializer])
// verify serialization-deserialization round trip
val blob = serializer.toBinary(obj)
val obj2 = serializer.fromBinary(blob, serializer.manifest(obj))
obj2 should be(obj)
}
"ClusterStartupTaskSerializerSpec" must {
"serialize Execute" in {
checkSerialization(ClusterStartupTaskActor.Execute)
}
}
}
| rstento/lagom | persistence/core/src/test/scala/com/lightbend/lagom/internal/persistence/cluster/ClusterStartupTaskSerializerSpec.scala | Scala | apache-2.0 | 980 |
package japgolly.scalajs.react
package extra
import scalaz.effect.IO
import utest._
import vdom.prefix_<^._
import test._
import ScalazReact._
object ReusabilityTest extends TestSuite {
object SampleComponent1 {
case class Picture(id: Long, url: String, title: String)
case class Props(name: String, age: Option[Int], pic: Picture)
implicit val picReuse = Reusability.by((_: Picture).id)
implicit val propsReuse = Reusability.caseclass3(Props.unapply)
var renderCount = 0
val component = ReactComponentB[Props]("Demo")
.getInitialState(identity)
.render { (_, *) =>
renderCount += 1
<.div(
<.p("Name: ", *.name),
<.p("Age: ", *.age.fold("Unknown")(_.toString)),
<.img(^.src := *.pic.url, ^.title := *.pic.title))
}
.configure(Reusability.shouldComponentUpdate)
.build
}
object SampleComponent2 {
var outerRenderCount = 0
var innerRenderCount = 0
type M = Map[Int, String]
val outerComponent = ReactComponentB[M]("Demo")
.getInitialState(identity)
.backend(new Backend(_))
.render(_.backend.render)
.build
class Backend($: BackendScope[_, M]) {
val updateUser = ReusableFn((id: Int, data: String) =>
$.modStateIO(_.updated(id, data)))
def render = {
outerRenderCount += 1
<.div(
$.state.map { case (id, name) =>
innerComponent.withKey(id)(InnerProps(name, updateUser(id)))
}.toJsArray)
}
}
case class InnerProps(name: String, update: String ~=> IO[Unit])
implicit val propsReuse = Reusability.caseclass2(InnerProps.unapply)
val innerComponent = ReactComponentB[InnerProps]("PersonEditor")
.stateless
.render { (p, _) =>
innerRenderCount += 1
<.input(
^.`type` := "text",
^.value := p.name,
^.onChange ~~> ((e: ReactEventI) => p.update(e.target.value)))
}
.configure(Reusability.shouldComponentUpdate)
.build
}
val tests = TestSuite {
'shouldComponentUpdate {
'reusableState {
import SampleComponent1._
val pic1a = Picture(1, "asdf", "qer")
val pic1b = Picture(1, "eqwrg", "seafr")
val pic2 = Picture(2, "asdf", "qer")
val c = ReactTestUtils renderIntoDocument component(Props("n", None, pic1a))
def test(expectDelta: Int, s: Props): Unit = {
val a = renderCount
c setState s
assert(renderCount == a + expectDelta)
}
val (update,ignore) = (1,0)
test(ignore, Props("n", None, pic1a))
test(update, Props("!", None, pic1a))
test(ignore, Props("!", None, pic1a))
test(ignore, Props("!", None, pic1b))
test(update, Props("!", None, pic2))
test(ignore, Props("!", None, pic2))
test(update, Props("!", Some(3), pic2))
test(update, Props("!", Some(4), pic2))
test(ignore, Props("!", Some(4), pic2))
test(update, Props("!", Some(5), pic2))
}
'reusableProps {
import SampleComponent2._
val data1: M = Map(1 -> "One", 2 -> "Two", 3 -> "Three")
val data2: M = Map(1 -> "One", 2 -> "Two", 3 -> "33333")
val c = ReactTestUtils renderIntoDocument outerComponent(data1)
assert(outerRenderCount == 1, innerRenderCount == 3)
c.forceUpdate()
assert(outerRenderCount == 2, innerRenderCount == 3)
c setState data2
assert(outerRenderCount == 3, innerRenderCount == 4)
}
}
'option {
def test(vs: Option[Boolean]*) =
for {a <- vs; b <- vs}
assert((a ~=~ b) == (a == b))
test(None, Some(true), Some(false))
}
'fns {
type F1[A] = Int ~=> A
type F2[A] = Int ~=> F1[A]
type F3[A] = Int ~=> F2[A]
def test1[A](f: F1[A], g: F1[A]): Unit = {
f ~=~ f
f ~/~ g
}
def test2[A](f: F2[A], g: F2[A]): Unit = {
test1(f, g)
f(1) ~=~ f(1)
f(1) ~/~ f(2)
f(1) ~/~ g(1)
}
def test3[A](f: F3[A], g: F3[A]): Unit = {
test2(f, g)
f(1)(2) ~=~ f(1)(2)
f(1)(2) ~/~ f(1)(3)
f(1)(2) ~/~ f(2)(2)
f(1)(2) ~/~ f(2)(1)
f(2)(1) ~=~ f(2)(1)
f(1)(2) ~/~ g(1)(2)
}
'fn1 {
val f = ReusableFn((i: Int) => i + 1)
val g = ReusableFn((i: Int) => i + 10)
test1(f, g)
assert(f(5) == 6)
}
'fn2 {
val f = ReusableFn((a: Int, b: Int) => a + b)
val g = ReusableFn((a: Int, b: Int) => a * b)
test2(f, g)
assert(f(1)(2) == 3)
}
'fn3 {
val f = ReusableFn((a: Int, b: Int, c: Int) => a + b + c)
val g = ReusableFn((a: Int, b: Int, c: Int) => a * b * c)
test3(f, g)
assert(f(1)(2)(3) == 6)
}
'overComponent {
import TestUtil.Inference._
test[BackendScope[A, S] ]($ => ReusableFn($).modState ).expect[(S => S) ~=> Unit]
test[ReactComponentM[A, S, B, N]]($ => ReusableFn($).modStateIO).expect[(S => S) ~=> IO[Unit]]
test[CompStateFocus[S] ]($ => ReusableFn($).setStateIO).expect[S ~=> IO[Unit]]
}
'endoOps {
import TestUtil.Inference._
case class Counter(count: Int) {
def add(i: Int): Counter = copy(count = count + i)
}
test[BackendScope[A, S] ]($ => ReusableFn($).modStateIO.endoZoom(st_s) ).expect[T ~=> IO[Unit]]
test[BackendScope[A, Counter] ]($ => ReusableFn($).modState .endoCall(_.add) ).expect[Int ~=> Unit]
test[BackendScope[A, Map[Int, S]]]($ => ReusableFn($).modState .endoCall2(_.updated)).expect[Int ~=> (S ~=> Unit)]
}
'byName {
var state = 10
val fn = ReusableFn.byName((_: Int) + state)
assert(fn(2) == 12)
state = 20
assert(fn(2) == 22)
}
}
}
}
| elacin/scalajs-react | test/src/test/scala/japgolly/scalajs/react/extra/ReusabilityTest.scala | Scala | apache-2.0 | 5,959 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import com.intel.analytics.bigdl.dllib.nn.abstractnn.TensorModule
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
/**
* [[Masking]] Use a mask value to skip timesteps for a sequence
*
* @param maskValue mask value
*/
class Masking[T: ClassTag](maskValue: Double = 0.0)
(implicit ev: TensorNumeric[T]) extends TensorModule[T]{
val batchDim = 1
val timeDim = 2
override def updateOutput(input: Tensor[T]): Tensor[T] = {
output.resizeAs(input)
var timeIndex = 1
var batchIndex = 1
val fillValue = ev.fromType(0.0)
while(batchIndex <= input.size(batchDim)) {
val batchInput = input.select(batchDim, batchIndex)
val batchOutput = output.select(batchDim, batchIndex)
while(timeIndex <= input.size(timeDim)) {
val slicedTensor = batchInput.select(timeDim - 1, timeIndex)
if (!slicedTensor.notEqualValue(maskValue)) {
batchOutput.select(timeDim - 1, timeIndex).fill(fillValue)
} else {
batchOutput.select(timeDim - 1, timeIndex).copy(slicedTensor)
}
timeIndex += 1
}
batchIndex += 1
timeIndex = 1
}
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
require(input.isSameSizeAs(gradOutput),
"Input should have the same size as gradOutput" +
s"input size(${input.size().foreach(x => x)})" +
s"gradOutput size(${gradOutput.size().foreach(x => x)})")
gradInput.resizeAs(input)
var timeIndex = 1
var batchIndex = 1
val fillValue = ev.fromType(0.0)
while(batchIndex <= input.size(batchDim)) {
val batchInput = input.select(batchDim, batchIndex)
val batchgradOutput = gradOutput.select(batchDim, batchIndex)
val batchgradInput = gradInput.select(batchDim, batchIndex)
while(timeIndex <= input.size(timeDim)) {
val slicedTensor = batchInput.select(timeDim - 1, timeIndex)
if (!slicedTensor.notEqualValue(maskValue)) {
batchgradInput.select(timeDim - 1, timeIndex).fill(fillValue)
} else {
batchgradInput.select(timeDim - 1, timeIndex).copy(
batchgradOutput.select(timeDim - 1, timeIndex))
}
timeIndex += 1
}
batchIndex += 1
timeIndex = 1
}
gradInput
}
}
object Masking {
def apply[T : ClassTag](maskValue: Double)(implicit ev: TensorNumeric[T]): Masking[T]
= new Masking[T](maskValue)
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/Masking.scala | Scala | apache-2.0 | 3,193 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.serializer
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import java.nio.ByteBuffer
import com.esotericsoftware.kryo.io.{Output, Input}
import org.apache.avro.{SchemaBuilder, Schema}
import org.apache.avro.generic.GenericData.Record
import org.apache.spark.{SparkFunSuite, SharedSparkContext}
class GenericAvroSerializerSuite extends SparkFunSuite with SharedSparkContext {
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
val schema : Schema = SchemaBuilder
.record("testRecord").fields()
.requiredString("data")
.endRecord()
val record = new Record(schema)
record.put("data", "test data")
test("schema compression and decompression") {
val genericSer = new GenericAvroSerializer(conf.getAvroSchema)
assert(schema === genericSer.decompress(ByteBuffer.wrap(genericSer.compress(schema))))
}
test("record serialization and deserialization") {
val genericSer = new GenericAvroSerializer(conf.getAvroSchema)
val outputStream = new ByteArrayOutputStream()
val output = new Output(outputStream)
genericSer.serializeDatum(record, output)
output.flush()
output.close()
val input = new Input(new ByteArrayInputStream(outputStream.toByteArray))
assert(genericSer.deserializeDatum(input) === record)
}
test("uses schema fingerprint to decrease message size") {
val genericSerFull = new GenericAvroSerializer(conf.getAvroSchema)
val output = new Output(new ByteArrayOutputStream())
val beginningNormalPosition = output.total()
genericSerFull.serializeDatum(record, output)
output.flush()
val normalLength = output.total - beginningNormalPosition
conf.registerAvroSchemas(schema)
val genericSerFinger = new GenericAvroSerializer(conf.getAvroSchema)
val beginningFingerprintPosition = output.total()
genericSerFinger.serializeDatum(record, output)
val fingerprintLength = output.total - beginningFingerprintPosition
assert(fingerprintLength < normalLength)
}
test("caches previously seen schemas") {
val genericSer = new GenericAvroSerializer(conf.getAvroSchema)
val compressedSchema = genericSer.compress(schema)
val decompressedScheam = genericSer.decompress(ByteBuffer.wrap(compressedSchema))
assert(compressedSchema.eq(genericSer.compress(schema)))
assert(decompressedScheam.eq(genericSer.decompress(ByteBuffer.wrap(compressedSchema))))
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | core/src/test/scala/org/apache/spark/serializer/GenericAvroSerializerSuite.scala | Scala | apache-2.0 | 3,259 |
package org.kimbasoft.akka.extension
import akka.actor.{Props, Actor}
import org.kimbasoft.akka.extension.ExtensionActor.Messages.{ManualAction, ConfigAction}
/**
* Missing documentation.
*
* @author <a href="steffen.krause@soabridge.com">Steffen Krause</a>
* @since 1.0
*/
class ExtensionActor extends Actor with Counting {
import org.kimbasoft.akka.extension.ExtensionActor.Messages.CountAction
val name = self.path.name
// Accessing ConfigExtension registered with this ActorSystem
val configExt = ConfigExtension(context.system)
// Accessing ManualExtension registered with this ActorSystem
val manualExt = ManualExtension(context.system)
override def receive: Receive = {
case ConfigAction =>
println(s" $name - ConfigAction: ${configExt.configMsg}")
case CountAction =>
println(s" $name - CountAction : Current Counter state is ${increment()}")
case ManualAction =>
println(s" $name - ManualAction: ${manualExt.manualMsg}")
case action =>
println(s" !! Unrecognized action $action")
}
}
object ExtensionActor {
val props = Props[ExtensionActor]
object Messages {
case object ConfigAction
case object CountAction
case object ManualAction
}
}
| kimba74/sandbox-scala | src/main/scala/org/kimbasoft/akka/extension/ExtensionActor.scala | Scala | gpl-3.0 | 1,244 |
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.core
import org.apache.accumulo.core.client.IteratorSetting
import org.opengis.feature.simple.SimpleFeatureType
import scala.collection.JavaConversions._
package object iterators {
val FEATURE_ENCODING = "geomesa.feature.encoding"
val USER_DATA = ".userdata."
implicit class RichIteratorSetting(cfg: IteratorSetting) {
/**
* Copy UserData entries taken from a SimpleFeatureType into an IteratorSetting for later transfer back into
* a SimpleFeatureType
*
* This works around the fact that SimpleFeatureTypes.encodeType ignores the UserData
*
*/
def encodeUserData(userData: java.util.Map[AnyRef,AnyRef], keyPrefix: String) {
val fullPrefix = keyPrefix + USER_DATA
userData.foreach { case (k, v) => cfg.addOption(fullPrefix + k.toString, v.toString)}
}
}
implicit class RichIteratorSimpleFeatureType(sft: SimpleFeatureType) {
/**
* Copy UserData entries taken from an IteratorSetting/Options back into
* a SimpleFeatureType
*
* This works around the fact that SimpleFeatureTypes.encodeType ignores the UserData
*
*/
def decodeUserData(options: java.util.Map[String,String], keyPrefix:String) {
val fullPrefix = keyPrefix + USER_DATA
options
.filter { case (k, _) => k.startsWith(fullPrefix) }
.foreach { case (k, v) => sft.getUserData.put(k.stripPrefix(fullPrefix), v) }
}
}
}
| mmatz-ccri/geomesa | geomesa-core/src/main/scala/org/locationtech/geomesa/core/iterators/package.scala | Scala | apache-2.0 | 2,075 |
/*
* Copyright 2017-2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.actorregistry.japi
import java.util.Optional
import java.util.concurrent.CompletionStage
import akka.actor.{ActorRef, ActorRefFactory}
import akka.util.Timeout
import org.squbs.actorregistry.{ActorLookup => SActorLookup}
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._
import scala.concurrent.duration._
/**
* Java API: Factory for obtaining a lookup object.
*/
object ActorLookup {
/**
* Creates a reusable ActorLookup
*
* @param refFactory The ActorSystem or ActorContext
* @return An ActorLookup object
*/
def create(refFactory: ActorRefFactory): ActorLookup[AnyRef] =
new ActorLookup(None, None, classOf[AnyRef])(refFactory)
}
/**
* Java API: The reusable ActorLookup instance, only to be created from ActorLookup object.
*
* @param refFactory The ActorSystem or ActorContext
*/
class ActorLookup[T <: AnyRef] private[japi] (private[japi] val name: Option[String],
private[japi] val requestType: Option[Class[_]],
private[japi] val responseType: Class[T])
(private[japi] implicit val refFactory: ActorRefFactory) {
private def getLookup(msg: Any) =
new SActorLookup(responseType, Some(msg.getClass), name, responseType != classOf[AnyRef])
/**
* Sends a message to an ActorRef
*
* @param msg The message to send
* @param sender The sender's actor reference
*/
def tell(msg: Any, sender: ActorRef): Unit = getLookup(msg).tell(msg, sender)
/**
* Sends a message to an ActorRef, obtaining a future for the response
*
* @param msg The message to send
* @param timeout The response timeout, in milliseconds
* @return The CompletionStage of the response
*/
def ask(msg: Any, timeout: Long): CompletionStage[T] = ask(msg, timeout.milliseconds)
/**
* Sends a message to an ActorRef
* @param msg The message to send
* @param timeout The response timeout
* @return The CompletionStage of the response
*/
def ask(msg: Any, timeout: Timeout): CompletionStage[T] = getLookup(msg).ask(msg)(timeout, refFactory).toJava
def resolveOne(timeout: FiniteDuration): CompletionStage[ActorRef] =
new SActorLookup(responseType, requestType, name, responseType != classOf[AnyRef]).resolveOne(timeout).toJava
/**
* Creates an ActorLookup based on a response type
*
* @param responseType The class representing the response type
* @tparam U The response type for the lookup
* @return The ActorLookup looking up matching the response type
*/
def lookup[U <: AnyRef](responseType: Class[U]): ActorLookup[U] =
new ActorLookup(None, None, responseType)(refFactory)
/**
* Creates an ActorLookup looking up an actor by name.
*
* @param name The actor name, as registered
* @return The ActorLookup matching the actor name
*/
def lookup(name: String): ActorLookup[AnyRef] =
new ActorLookup(Some(name), None, classOf[AnyRef])(refFactory)
/**
* Creates an ActorLookup looking up an actor by both name and response type.
*
* @param name The actor name, as registered
* @param responseType The class representing the response type
* @tparam U The response type for the lookup
* @return The ActorLookup matching the name and the response type
*/
def lookup[U <: AnyRef](name: String, responseType: Class[U]): ActorLookup[U] =
new ActorLookup(Some(name), None, responseType)(refFactory)
/**
* Creates an ActorLookup looking up an actor by name.
*
* @param name The actor name, if any, as registered
* @return The ActorLookup matching the actor name
*/
def lookup(name: Optional[String]): ActorLookup[AnyRef] =
new ActorLookup(name.asScala, None, classOf[AnyRef])(refFactory)
/**
* Creates an ActorLookup looking up an actor by both name and response type.
*
* @param name The actor name, if any, as registered
* @param responseType The class representing the response type
* @tparam U The response type for the lookup
* @return The ActorLookup matching the name and the response type
*/
def lookup[U <: AnyRef](name: Optional[String], responseType: Class[U]): ActorLookup[U] =
new ActorLookup(name.asScala, None, responseType)(refFactory)
def lookup[U <: AnyRef](name: Optional[String], requestType: Optional[Class[_]],
responseType: Class[U]): ActorLookup[U] =
new ActorLookup(name.asScala, requestType.asScala, responseType)(refFactory)
}
| akara/squbs | squbs-actorregistry/src/main/scala/org/squbs/actorregistry/japi/ActorLookup.scala | Scala | apache-2.0 | 5,235 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import kafka.cluster.{Broker, Cluster}
import kafka.consumer.TopicCount
import org.I0Itec.zkclient.{IZkDataListener, ZkClient}
import org.I0Itec.zkclient.exception.{ZkNodeExistsException, ZkNoNodeException, ZkMarshallingError}
import org.I0Itec.zkclient.serialize.ZkSerializer
import collection._
import kafka.api.LeaderAndIsr
import mutable.ListBuffer
import org.apache.zookeeper.data.Stat
import java.util.concurrent.locks.{ReentrantLock, Condition}
import kafka.admin._
import kafka.common.{KafkaException, NoEpochForPartitionException}
import kafka.controller.ReassignedPartitionsContext
import kafka.controller.PartitionAndReplica
import kafka.controller.KafkaController
import scala.{collection, Some}
import kafka.controller.LeaderIsrAndControllerEpoch
import kafka.common.TopicAndPartition
import kafka.utils.Utils.inLock
import scala.collection
object ZkUtils extends Logging {
val ConsumersPath = "/consumers"
val BrokerIdsPath = "/brokers/ids"
val BrokerTopicsPath = "/brokers/topics"
val TopicConfigPath = "/config/topics"
val TopicConfigChangesPath = "/config/changes"
val ControllerPath = "/controller"
val ControllerEpochPath = "/controller_epoch"
val ReassignPartitionsPath = "/admin/reassign_partitions"
val DeleteTopicsPath = "/admin/delete_topics"
val PreferredReplicaLeaderElectionPath = "/admin/preferred_replica_election"
def getTopicPath(topic: String): String = {
BrokerTopicsPath + "/" + topic
}
def getTopicPartitionsPath(topic: String): String = {
getTopicPath(topic) + "/partitions"
}
def getTopicConfigPath(topic: String): String =
TopicConfigPath + "/" + topic
def getDeleteTopicPath(topic: String): String =
DeleteTopicsPath + "/" + topic
def getController(zkClient: ZkClient): Int = {
readDataMaybeNull(zkClient, ControllerPath)._1 match {
case Some(controller) => KafkaController.parseControllerId(controller)
case None => throw new KafkaException("Controller doesn't exist")
}
}
def getTopicPartitionPath(topic: String, partitionId: Int): String =
getTopicPartitionsPath(topic) + "/" + partitionId
def getTopicPartitionLeaderAndIsrPath(topic: String, partitionId: Int): String =
getTopicPartitionPath(topic, partitionId) + "/" + "state"
def getSortedBrokerList(zkClient: ZkClient): Seq[Int] =
ZkUtils.getChildren(zkClient, BrokerIdsPath).map(_.toInt).sorted
def getAllBrokersInCluster(zkClient: ZkClient): Seq[Broker] = {
val brokerIds = ZkUtils.getChildrenParentMayNotExist(zkClient, ZkUtils.BrokerIdsPath).sorted
brokerIds.map(_.toInt).map(getBrokerInfo(zkClient, _)).filter(_.isDefined).map(_.get)
}
def getLeaderIsrAndEpochForPartition(zkClient: ZkClient, topic: String, partition: Int):Option[LeaderIsrAndControllerEpoch] = {
val leaderAndIsrPath = getTopicPartitionLeaderAndIsrPath(topic, partition)
val leaderAndIsrInfo = readDataMaybeNull(zkClient, leaderAndIsrPath)
val leaderAndIsrOpt = leaderAndIsrInfo._1
val stat = leaderAndIsrInfo._2
leaderAndIsrOpt match {
case Some(leaderAndIsrStr) => parseLeaderAndIsr(leaderAndIsrStr, topic, partition, stat)
case None => None
}
}
def getLeaderAndIsrForPartition(zkClient: ZkClient, topic: String, partition: Int):Option[LeaderAndIsr] = {
getLeaderIsrAndEpochForPartition(zkClient, topic, partition).map(_.leaderAndIsr)
}
def setupCommonPaths(zkClient: ZkClient) {
for(path <- Seq(ConsumersPath, BrokerIdsPath, BrokerTopicsPath, TopicConfigChangesPath, TopicConfigPath, DeleteTopicsPath))
makeSurePersistentPathExists(zkClient, path)
}
def parseLeaderAndIsr(leaderAndIsrStr: String, topic: String, partition: Int, stat: Stat)
: Option[LeaderIsrAndControllerEpoch] = {
Json.parseFull(leaderAndIsrStr) match {
case Some(m) =>
val leaderIsrAndEpochInfo = m.asInstanceOf[Map[String, Any]]
val leader = leaderIsrAndEpochInfo.get("leader").get.asInstanceOf[Int]
val epoch = leaderIsrAndEpochInfo.get("leader_epoch").get.asInstanceOf[Int]
val isr = leaderIsrAndEpochInfo.get("isr").get.asInstanceOf[List[Int]]
val controllerEpoch = leaderIsrAndEpochInfo.get("controller_epoch").get.asInstanceOf[Int]
val zkPathVersion = stat.getVersion
debug("Leader %d, Epoch %d, Isr %s, Zk path version %d for partition [%s,%d]".format(leader, epoch,
isr.toString(), zkPathVersion, topic, partition))
Some(LeaderIsrAndControllerEpoch(LeaderAndIsr(leader, epoch, isr, zkPathVersion), controllerEpoch))
case None => None
}
}
def getLeaderForPartition(zkClient: ZkClient, topic: String, partition: Int): Option[Int] = {
val leaderAndIsrOpt = readDataMaybeNull(zkClient, getTopicPartitionLeaderAndIsrPath(topic, partition))._1
leaderAndIsrOpt match {
case Some(leaderAndIsr) =>
Json.parseFull(leaderAndIsr) match {
case Some(m) =>
Some(m.asInstanceOf[Map[String, Any]].get("leader").get.asInstanceOf[Int])
case None => None
}
case None => None
}
}
/**
* This API should read the epoch in the ISR path. It is sufficient to read the epoch in the ISR path, since if the
* leader fails after updating epoch in the leader path and before updating epoch in the ISR path, effectively some
* other broker will retry becoming leader with the same new epoch value.
*/
def getEpochForPartition(zkClient: ZkClient, topic: String, partition: Int): Int = {
val leaderAndIsrOpt = readDataMaybeNull(zkClient, getTopicPartitionLeaderAndIsrPath(topic, partition))._1
leaderAndIsrOpt match {
case Some(leaderAndIsr) =>
Json.parseFull(leaderAndIsr) match {
case None => throw new NoEpochForPartitionException("No epoch, leaderAndISR data for partition [%s,%d] is invalid".format(topic, partition))
case Some(m) => m.asInstanceOf[Map[String, Any]].get("leader_epoch").get.asInstanceOf[Int]
}
case None => throw new NoEpochForPartitionException("No epoch, ISR path for partition [%s,%d] is empty"
.format(topic, partition))
}
}
/**
* Gets the in-sync replicas (ISR) for a specific topic and partition
*/
def getInSyncReplicasForPartition(zkClient: ZkClient, topic: String, partition: Int): Seq[Int] = {
val leaderAndIsrOpt = readDataMaybeNull(zkClient, getTopicPartitionLeaderAndIsrPath(topic, partition))._1
leaderAndIsrOpt match {
case Some(leaderAndIsr) =>
Json.parseFull(leaderAndIsr) match {
case Some(m) => m.asInstanceOf[Map[String, Any]].get("isr").get.asInstanceOf[Seq[Int]]
case None => Seq.empty[Int]
}
case None => Seq.empty[Int]
}
}
/**
* Gets the assigned replicas (AR) for a specific topic and partition
*/
def getReplicasForPartition(zkClient: ZkClient, topic: String, partition: Int): Seq[Int] = {
val jsonPartitionMapOpt = readDataMaybeNull(zkClient, getTopicPath(topic))._1
jsonPartitionMapOpt match {
case Some(jsonPartitionMap) =>
Json.parseFull(jsonPartitionMap) match {
case Some(m) => m.asInstanceOf[Map[String, Any]].get("partitions") match {
case Some(replicaMap) => replicaMap.asInstanceOf[Map[String, Seq[Int]]].get(partition.toString) match {
case Some(seq) => seq
case None => Seq.empty[Int]
}
case None => Seq.empty[Int]
}
case None => Seq.empty[Int]
}
case None => Seq.empty[Int]
}
}
def registerBrokerInZk(zkClient: ZkClient, id: Int, host: String, port: Int, timeout: Int, jmxPort: Int) {
val brokerIdPath = ZkUtils.BrokerIdsPath + "/" + id
val timestamp = SystemTime.milliseconds.toString
val brokerInfo = Json.encode(Map("version" -> 1, "host" -> host, "port" -> port, "jmx_port" -> jmxPort, "timestamp" -> timestamp))
val expectedBroker = new Broker(id, host, port)
try {
createEphemeralPathExpectConflictHandleZKBug(zkClient, brokerIdPath, brokerInfo, expectedBroker,
(brokerString: String, broker: Any) => Broker.createBroker(broker.asInstanceOf[Broker].id, brokerString).equals(broker.asInstanceOf[Broker]),
timeout)
} catch {
case e: ZkNodeExistsException =>
throw new RuntimeException("A broker is already registered on the path " + brokerIdPath
+ ". This probably " + "indicates that you either have configured a brokerid that is already in use, or "
+ "else you have shutdown this broker and restarted it faster than the zookeeper "
+ "timeout so it appears to be re-registering.")
}
info("Registered broker %d at path %s with address %s:%d.".format(id, brokerIdPath, host, port))
}
def getConsumerPartitionOwnerPath(group: String, topic: String, partition: Int): String = {
val topicDirs = new ZKGroupTopicDirs(group, topic)
topicDirs.consumerOwnerDir + "/" + partition
}
def leaderAndIsrZkData(leaderAndIsr: LeaderAndIsr, controllerEpoch: Int): String = {
Json.encode(Map("version" -> 1, "leader" -> leaderAndIsr.leader, "leader_epoch" -> leaderAndIsr.leaderEpoch,
"controller_epoch" -> controllerEpoch, "isr" -> leaderAndIsr.isr))
}
/**
* Get JSON partition to replica map from zookeeper.
*/
def replicaAssignmentZkData(map: Map[String, Seq[Int]]): String = {
Json.encode(Map("version" -> 1, "partitions" -> map))
}
/**
* make sure a persistent path exists in ZK. Create the path if not exist.
*/
def makeSurePersistentPathExists(client: ZkClient, path: String) {
if (!client.exists(path))
client.createPersistent(path, true) // won't throw NoNodeException or NodeExistsException
}
/**
* create the parent path
*/
private def createParentPath(client: ZkClient, path: String): Unit = {
val parentDir = path.substring(0, path.lastIndexOf('/'))
if (parentDir.length != 0)
client.createPersistent(parentDir, true)
}
/**
* Create an ephemeral node with the given path and data. Create parents if necessary.
*/
private def createEphemeralPath(client: ZkClient, path: String, data: String): Unit = {
try {
client.createEphemeral(path, data)
} catch {
case e: ZkNoNodeException => {
createParentPath(client, path)
client.createEphemeral(path, data)
}
}
}
/**
* Create an ephemeral node with the given path and data.
* Throw NodeExistException if node already exists.
*/
def createEphemeralPathExpectConflict(client: ZkClient, path: String, data: String): Unit = {
try {
createEphemeralPath(client, path, data)
} catch {
case e: ZkNodeExistsException => {
// this can happen when there is connection loss; make sure the data is what we intend to write
var storedData: String = null
try {
storedData = readData(client, path)._1
} catch {
case e1: ZkNoNodeException => // the node disappeared; treat as if node existed and let caller handles this
case e2: Throwable => throw e2
}
if (storedData == null || storedData != data) {
info("conflict in " + path + " data: " + data + " stored data: " + storedData)
throw e
} else {
// otherwise, the creation succeeded, return normally
info(path + " exists with value " + data + " during connection loss; this is ok")
}
}
case e2: Throwable => throw e2
}
}
/**
* Create an ephemeral node with the given path and data.
* Throw NodeExistsException if node already exists.
* Handles the following ZK session timeout bug:
*
* https://issues.apache.org/jira/browse/ZOOKEEPER-1740
*
* Upon receiving a NodeExistsException, read the data from the conflicted path and
* trigger the checker function comparing the read data and the expected data,
* If the checker function returns true then the above bug might be encountered, back off and retry;
* otherwise re-throw the exception
*/
def createEphemeralPathExpectConflictHandleZKBug(zkClient: ZkClient, path: String, data: String, expectedCallerData: Any, checker: (String, Any) => Boolean, backoffTime: Int): Unit = {
while (true) {
try {
createEphemeralPathExpectConflict(zkClient, path, data)
return
} catch {
case e: ZkNodeExistsException => {
// An ephemeral node may still exist even after its corresponding session has expired
// due to a Zookeeper bug, in this case we need to retry writing until the previous node is deleted
// and hence the write succeeds without ZkNodeExistsException
ZkUtils.readDataMaybeNull(zkClient, path)._1 match {
case Some(writtenData) => {
if (checker(writtenData, expectedCallerData)) {
info("I wrote this conflicted ephemeral node [%s] at %s a while back in a different session, ".format(data, path)
+ "hence I will backoff for this node to be deleted by Zookeeper and retry")
Thread.sleep(backoffTime)
} else {
throw e
}
}
case None => // the node disappeared; retry creating the ephemeral node immediately
}
}
case e2: Throwable => throw e2
}
}
}
/**
* Create an persistent node with the given path and data. Create parents if necessary.
*/
def createPersistentPath(client: ZkClient, path: String, data: String = ""): Unit = {
try {
client.createPersistent(path, data)
} catch {
case e: ZkNoNodeException => {
createParentPath(client, path)
client.createPersistent(path, data)
}
}
}
def createSequentialPersistentPath(client: ZkClient, path: String, data: String = ""): String = {
client.createPersistentSequential(path, data)
}
/**
* Update the value of a persistent node with the given path and data.
* create parrent directory if necessary. Never throw NodeExistException.
* Return the updated path zkVersion
*/
def updatePersistentPath(client: ZkClient, path: String, data: String) = {
try {
client.writeData(path, data)
} catch {
case e: ZkNoNodeException => {
createParentPath(client, path)
try {
client.createPersistent(path, data)
} catch {
case e: ZkNodeExistsException =>
client.writeData(path, data)
case e2: Throwable => throw e2
}
}
case e2: Throwable => throw e2
}
}
/**
* Conditional update the persistent path data, return (true, newVersion) if it succeeds, otherwise (the path doesn't
* exist, the current version is not the expected version, etc.) return (false, -1)
*/
def conditionalUpdatePersistentPath(client: ZkClient, path: String, data: String, expectVersion: Int): (Boolean, Int) = {
try {
val stat = client.writeDataReturnStat(path, data, expectVersion)
debug("Conditional update of path %s with value %s and expected version %d succeeded, returning the new version: %d"
.format(path, data, expectVersion, stat.getVersion))
(true, stat.getVersion)
} catch {
case e: Exception =>
error("Conditional update of path %s with data %s and expected version %d failed due to %s".format(path, data,
expectVersion, e.getMessage))
(false, -1)
}
}
/**
* Conditional update the persistent path data, return (true, newVersion) if it succeeds, otherwise (the current
* version is not the expected version, etc.) return (false, -1). If path doesn't exist, throws ZkNoNodeException
*/
def conditionalUpdatePersistentPathIfExists(client: ZkClient, path: String, data: String, expectVersion: Int): (Boolean, Int) = {
try {
val stat = client.writeDataReturnStat(path, data, expectVersion)
debug("Conditional update of path %s with value %s and expected version %d succeeded, returning the new version: %d"
.format(path, data, expectVersion, stat.getVersion))
(true, stat.getVersion)
} catch {
case nne: ZkNoNodeException => throw nne
case e: Exception =>
error("Conditional update of path %s with data %s and expected version %d failed due to %s".format(path, data,
expectVersion, e.getMessage))
(false, -1)
}
}
/**
* Update the value of a persistent node with the given path and data.
* create parrent directory if necessary. Never throw NodeExistException.
*/
def updateEphemeralPath(client: ZkClient, path: String, data: String): Unit = {
try {
client.writeData(path, data)
} catch {
case e: ZkNoNodeException => {
createParentPath(client, path)
client.createEphemeral(path, data)
}
case e2: Throwable => throw e2
}
}
def deletePath(client: ZkClient, path: String): Boolean = {
try {
client.delete(path)
} catch {
case e: ZkNoNodeException =>
// this can happen during a connection loss event, return normally
info(path + " deleted during connection loss; this is ok")
false
case e2: Throwable => throw e2
}
}
def deletePathRecursive(client: ZkClient, path: String) {
try {
client.deleteRecursive(path)
} catch {
case e: ZkNoNodeException =>
// this can happen during a connection loss event, return normally
info(path + " deleted during connection loss; this is ok")
case e2: Throwable => throw e2
}
}
def maybeDeletePath(zkUrl: String, dir: String) {
try {
val zk = new ZkClient(zkUrl, 30*1000, 30*1000, ZKStringSerializer)
zk.deleteRecursive(dir)
zk.close()
} catch {
case _: Throwable => // swallow
}
}
def readData(client: ZkClient, path: String): (String, Stat) = {
val stat: Stat = new Stat()
val dataStr: String = client.readData(path, stat)
(dataStr, stat)
}
def readDataMaybeNull(client: ZkClient, path: String): (Option[String], Stat) = {
val stat: Stat = new Stat()
val dataAndStat = try {
(Some(client.readData(path, stat)), stat)
} catch {
case e: ZkNoNodeException =>
(None, stat)
case e2: Throwable => throw e2
}
dataAndStat
}
def getChildren(client: ZkClient, path: String): Seq[String] = {
import scala.collection.JavaConversions._
// triggers implicit conversion from java list to scala Seq
client.getChildren(path)
}
def getChildrenParentMayNotExist(client: ZkClient, path: String): Seq[String] = {
import scala.collection.JavaConversions._
// triggers implicit conversion from java list to scala Seq
try {
client.getChildren(path)
} catch {
case e: ZkNoNodeException => return Nil
case e2: Throwable => throw e2
}
}
/**
* Check if the given path exists
*/
def pathExists(client: ZkClient, path: String): Boolean = {
client.exists(path)
}
def getCluster(zkClient: ZkClient) : Cluster = {
val cluster = new Cluster
val nodes = getChildrenParentMayNotExist(zkClient, BrokerIdsPath)
for (node <- nodes) {
val brokerZKString = readData(zkClient, BrokerIdsPath + "/" + node)._1
cluster.add(Broker.createBroker(node.toInt, brokerZKString))
}
cluster
}
def getPartitionLeaderAndIsrForTopics(zkClient: ZkClient, topicAndPartitions: Set[TopicAndPartition])
: mutable.Map[TopicAndPartition, LeaderIsrAndControllerEpoch] = {
val ret = new mutable.HashMap[TopicAndPartition, LeaderIsrAndControllerEpoch]
for(topicAndPartition <- topicAndPartitions) {
ZkUtils.getLeaderIsrAndEpochForPartition(zkClient, topicAndPartition.topic, topicAndPartition.partition) match {
case Some(leaderIsrAndControllerEpoch) => ret.put(topicAndPartition, leaderIsrAndControllerEpoch)
case None =>
}
}
ret
}
def getReplicaAssignmentForTopics(zkClient: ZkClient, topics: Seq[String]): mutable.Map[TopicAndPartition, Seq[Int]] = {
val ret = new mutable.HashMap[TopicAndPartition, Seq[Int]]
topics.foreach { topic =>
val jsonPartitionMapOpt = readDataMaybeNull(zkClient, getTopicPath(topic))._1
jsonPartitionMapOpt match {
case Some(jsonPartitionMap) =>
Json.parseFull(jsonPartitionMap) match {
case Some(m) => m.asInstanceOf[Map[String, Any]].get("partitions") match {
case Some(repl) =>
val replicaMap = repl.asInstanceOf[Map[String, Seq[Int]]]
for((partition, replicas) <- replicaMap){
ret.put(TopicAndPartition(topic, partition.toInt), replicas)
debug("Replicas assigned to topic [%s], partition [%s] are [%s]".format(topic, partition, replicas))
}
case None =>
}
case None =>
}
case None =>
}
}
ret
}
def getPartitionAssignmentForTopics(zkClient: ZkClient, topics: Seq[String]): mutable.Map[String, collection.Map[Int, Seq[Int]]] = {
val ret = new mutable.HashMap[String, Map[Int, Seq[Int]]]()
topics.foreach{ topic =>
val jsonPartitionMapOpt = readDataMaybeNull(zkClient, getTopicPath(topic))._1
val partitionMap = jsonPartitionMapOpt match {
case Some(jsonPartitionMap) =>
Json.parseFull(jsonPartitionMap) match {
case Some(m) => m.asInstanceOf[Map[String, Any]].get("partitions") match {
case Some(replicaMap) =>
val m1 = replicaMap.asInstanceOf[Map[String, Seq[Int]]]
m1.map(p => (p._1.toInt, p._2))
case None => Map[Int, Seq[Int]]()
}
case None => Map[Int, Seq[Int]]()
}
case None => Map[Int, Seq[Int]]()
}
debug("Partition map for /brokers/topics/%s is %s".format(topic, partitionMap))
ret += (topic -> partitionMap)
}
ret
}
def getPartitionsForTopics(zkClient: ZkClient, topics: Seq[String]): mutable.Map[String, Seq[Int]] = {
getPartitionAssignmentForTopics(zkClient, topics).map { topicAndPartitionMap =>
val topic = topicAndPartitionMap._1
val partitionMap = topicAndPartitionMap._2
debug("partition assignment of /brokers/topics/%s is %s".format(topic, partitionMap))
(topic -> partitionMap.keys.toSeq.sortWith((s,t) => s < t))
}
}
def getPartitionsBeingReassigned(zkClient: ZkClient): Map[TopicAndPartition, ReassignedPartitionsContext] = {
// read the partitions and their new replica list
val jsonPartitionMapOpt = readDataMaybeNull(zkClient, ReassignPartitionsPath)._1
jsonPartitionMapOpt match {
case Some(jsonPartitionMap) =>
val reassignedPartitions = parsePartitionReassignmentData(jsonPartitionMap)
reassignedPartitions.map(p => (p._1 -> new ReassignedPartitionsContext(p._2)))
case None => Map.empty[TopicAndPartition, ReassignedPartitionsContext]
}
}
def parsePartitionReassignmentData(jsonData: String): Map[TopicAndPartition, Seq[Int]] = {
val reassignedPartitions: mutable.Map[TopicAndPartition, Seq[Int]] = mutable.Map()
Json.parseFull(jsonData) match {
case Some(m) =>
m.asInstanceOf[Map[String, Any]].get("partitions") match {
case Some(partitionsSeq) =>
partitionsSeq.asInstanceOf[Seq[Map[String, Any]]].foreach(p => {
val topic = p.get("topic").get.asInstanceOf[String]
val partition = p.get("partition").get.asInstanceOf[Int]
val newReplicas = p.get("replicas").get.asInstanceOf[Seq[Int]]
reassignedPartitions += TopicAndPartition(topic, partition) -> newReplicas
})
case None =>
}
case None =>
}
reassignedPartitions
}
def parseTopicsData(jsonData: String): Seq[String] = {
var topics = List.empty[String]
Json.parseFull(jsonData) match {
case Some(m) =>
m.asInstanceOf[Map[String, Any]].get("topics") match {
case Some(partitionsSeq) =>
val mapPartitionSeq = partitionsSeq.asInstanceOf[Seq[Map[String, Any]]]
mapPartitionSeq.foreach(p => {
val topic = p.get("topic").get.asInstanceOf[String]
topics ++= List(topic)
})
case None =>
}
case None =>
}
topics
}
def getPartitionReassignmentZkData(partitionsToBeReassigned: Map[TopicAndPartition, Seq[Int]]): String = {
Json.encode(Map("version" -> 1, "partitions" -> partitionsToBeReassigned.map(e => Map("topic" -> e._1.topic, "partition" -> e._1.partition,
"replicas" -> e._2))))
}
def updatePartitionReassignmentData(zkClient: ZkClient, partitionsToBeReassigned: Map[TopicAndPartition, Seq[Int]]) {
val zkPath = ZkUtils.ReassignPartitionsPath
partitionsToBeReassigned.size match {
case 0 => // need to delete the /admin/reassign_partitions path
deletePath(zkClient, zkPath)
info("No more partitions need to be reassigned. Deleting zk path %s".format(zkPath))
case _ =>
val jsonData = getPartitionReassignmentZkData(partitionsToBeReassigned)
try {
updatePersistentPath(zkClient, zkPath, jsonData)
info("Updated partition reassignment path with %s".format(jsonData))
} catch {
case nne: ZkNoNodeException =>
ZkUtils.createPersistentPath(zkClient, zkPath, jsonData)
debug("Created path %s with %s for partition reassignment".format(zkPath, jsonData))
case e2: Throwable => throw new AdminOperationException(e2.toString)
}
}
}
def getPartitionsUndergoingPreferredReplicaElection(zkClient: ZkClient): Set[TopicAndPartition] = {
// read the partitions and their new replica list
val jsonPartitionListOpt = readDataMaybeNull(zkClient, PreferredReplicaLeaderElectionPath)._1
jsonPartitionListOpt match {
case Some(jsonPartitionList) => PreferredReplicaLeaderElectionCommand.parsePreferredReplicaElectionData(jsonPartitionList)
case None => Set.empty[TopicAndPartition]
}
}
def deletePartition(zkClient : ZkClient, brokerId: Int, topic: String) {
val brokerIdPath = BrokerIdsPath + "/" + brokerId
zkClient.delete(brokerIdPath)
val brokerPartTopicPath = BrokerTopicsPath + "/" + topic + "/" + brokerId
zkClient.delete(brokerPartTopicPath)
}
def getConsumersInGroup(zkClient: ZkClient, group: String): Seq[String] = {
val dirs = new ZKGroupDirs(group)
getChildren(zkClient, dirs.consumerRegistryDir)
}
def getConsumersPerTopic(zkClient: ZkClient, group: String) : mutable.Map[String, List[String]] = {
val dirs = new ZKGroupDirs(group)
val consumers = getChildrenParentMayNotExist(zkClient, dirs.consumerRegistryDir)
val consumersPerTopicMap = new mutable.HashMap[String, List[String]]
for (consumer <- consumers) {
val topicCount = TopicCount.constructTopicCount(group, consumer, zkClient)
for ((topic, consumerThreadIdSet) <- topicCount.getConsumerThreadIdsPerTopic) {
for (consumerThreadId <- consumerThreadIdSet)
consumersPerTopicMap.get(topic) match {
case Some(curConsumers) => consumersPerTopicMap.put(topic, consumerThreadId :: curConsumers)
case _ => consumersPerTopicMap.put(topic, List(consumerThreadId))
}
}
}
for ( (topic, consumerList) <- consumersPerTopicMap )
consumersPerTopicMap.put(topic, consumerList.sortWith((s,t) => s < t))
consumersPerTopicMap
}
/**
* This API takes in a broker id, queries zookeeper for the broker metadata and returns the metadata for that broker
* or throws an exception if the broker dies before the query to zookeeper finishes
* @param brokerId The broker id
* @param zkClient The zookeeper client connection
* @return An optional Broker object encapsulating the broker metadata
*/
def getBrokerInfo(zkClient: ZkClient, brokerId: Int): Option[Broker] = {
ZkUtils.readDataMaybeNull(zkClient, ZkUtils.BrokerIdsPath + "/" + brokerId)._1 match {
case Some(brokerInfo) => Some(Broker.createBroker(brokerId, brokerInfo))
case None => None
}
}
def getAllTopics(zkClient: ZkClient): Seq[String] = {
val topics = ZkUtils.getChildrenParentMayNotExist(zkClient, BrokerTopicsPath)
if(topics == null)
Seq.empty[String]
else
topics
}
def getAllPartitions(zkClient: ZkClient): Set[TopicAndPartition] = {
val topics = ZkUtils.getChildrenParentMayNotExist(zkClient, BrokerTopicsPath)
if(topics == null) Set.empty[TopicAndPartition]
else {
topics.map { topic =>
getChildren(zkClient, getTopicPartitionsPath(topic)).map(_.toInt).map(TopicAndPartition(topic, _))
}.flatten.toSet
}
}
}
class LeaderExistsOrChangedListener(topic: String,
partition: Int,
leaderLock: ReentrantLock,
leaderExistsOrChanged: Condition,
oldLeaderOpt: Option[Int] = None,
zkClient: ZkClient = null) extends IZkDataListener with Logging {
@throws(classOf[Exception])
def handleDataChange(dataPath: String, data: Object) {
val t = dataPath.split("/").takeRight(3).head
val p = dataPath.split("/").takeRight(2).head.toInt
inLock(leaderLock) {
if(t == topic && p == partition){
if(oldLeaderOpt == None){
trace("In leader existence listener on partition [%s, %d], leader has been created".format(topic, partition))
leaderExistsOrChanged.signal()
}
else {
val newLeaderOpt = ZkUtils.getLeaderForPartition(zkClient, t, p)
if(newLeaderOpt.isDefined && newLeaderOpt.get != oldLeaderOpt.get){
trace("In leader change listener on partition [%s, %d], leader has been moved from %d to %d".format(topic, partition, oldLeaderOpt.get, newLeaderOpt.get))
leaderExistsOrChanged.signal()
}
}
}
}
}
@throws(classOf[Exception])
def handleDataDeleted(dataPath: String) {
inLock(leaderLock) {
leaderExistsOrChanged.signal()
}
}
}
object ZKStringSerializer extends ZkSerializer {
@throws(classOf[ZkMarshallingError])
def serialize(data : Object) : Array[Byte] = data.asInstanceOf[String].getBytes("UTF-8")
@throws(classOf[ZkMarshallingError])
def deserialize(bytes : Array[Byte]) : Object = {
if (bytes == null)
null
else
new String(bytes, "UTF-8")
}
}
class ZKGroupDirs(val group: String) {
def consumerDir = ZkUtils.ConsumersPath
def consumerGroupDir = consumerDir + "/" + group
def consumerRegistryDir = consumerGroupDir + "/ids"
}
class ZKGroupTopicDirs(group: String, topic: String) extends ZKGroupDirs(group) {
def consumerOffsetDir = consumerGroupDir + "/offsets/" + topic
def consumerOwnerDir = consumerGroupDir + "/owners/" + topic
}
class ZKConfig(props: VerifiableProperties) {
/** ZK host string */
val zkConnect = props.getString("zookeeper.connect")
/** zookeeper session timeout */
val zkSessionTimeoutMs = props.getInt("zookeeper.session.timeout.ms", 6000)
/** the max time that the client waits to establish a connection to zookeeper */
val zkConnectionTimeoutMs = props.getInt("zookeeper.connection.timeout.ms",zkSessionTimeoutMs)
/** how far a ZK follower can be behind a ZK leader */
val zkSyncTimeMs = props.getInt("zookeeper.sync.time.ms", 2000)
}
| unix1986/universe | tool/kafka-0.8.1.1-src/core/src/main/scala/kafka/utils/ZkUtils.scala | Scala | bsd-2-clause | 32,618 |
package de.leanovate.swaggercheck.fixtures.model
import java.util.UUID
import org.scalacheck.{Arbitrary, Gen}
import play.api.libs.json.Json
case class OtherBase(
id: UUID,
firstName: Option[String],
lastName: String
)
object OtherBase {
implicit val jsonFormat = Json.format[OtherBase]
implicit val arbitrary = Arbitrary(for {
id <- Gen.uuid
firstName <- Arbitrary.arbitrary[Option[String]]
lastName <- Arbitrary.arbitrary[String]
} yield OtherBase(id, firstName, lastName))
} | leanovate/swagger-check | swagger-check-core/src/test/scala/de/leanovate/swaggercheck/fixtures/model/OtherBase.scala | Scala | mit | 588 |
package com.learn.spark.mllib
import breeze.linalg.{DenseVector, DenseMatrix}
import scala.io.Source
/**
* Created by xiaojie on 17/7/27.
* 支持向量机
* 参考文档:图解机器学习 机器学习实战
* http://www.cnblogs.com/wsine/p/5180615.html
*/
object LearnSVM {
def main(args: Array[String]): Unit = {
val (xMatrix, yMatrix) = loadDataSet
}
//读取数据
def loadDataSet(): (DenseMatrix[Double], DenseMatrix[Double]) = {
val path = Thread.currentThread().getContextClassLoader.getResource("testSet.txt").getPath
val dataSet = Source.fromFile(path).getLines().map(_.split("\\t").map(_.toDouble)).toArray
val xDataSet = dataSet.map(i => {
Array(i(0), i(1))
})
val yDataSet = dataSet.map(i => {
Array(i(2))
})
val m = dataSet.length
val n = dataSet.head.length - 1
val xMatrix = DenseMatrix.rand[Double](m, n)
val yMatrix = DenseMatrix.rand[Double](m, 1)
for (i <- 0 until m) {
xMatrix(i, ::) := DenseVector.apply[Double](xDataSet(i)).t
yMatrix(i, ::) := DenseVector.apply[Double](yDataSet(i)).t
}
(xMatrix, yMatrix)
}
}
| xiaoJacky/sparkLearning | sparkLearning/src/main/scala/com/learn/spark/mllib/LearnSVM.scala | Scala | apache-2.0 | 1,238 |
package AXIDefs
{
import Chisel._
import Literal._
import Node._
// Part I: Definitions for the actual data carried over AXI channels
// in part II we will provide definitions for the actual AXI interfaces
// by wrapping the part I types in Decoupled (ready/valid) bundles
// AXI channel data definitions
class AXIAddress(addrWidthBits: Int, idBits: Int) extends Bundle {
// address for the transaction, should be burst aligned if bursts are used
val addr = UInt(width = addrWidthBits)
// size of data beat in bytes
// set to UInt(log2Up((dataBits/8)-1)) for full-width bursts
val size = UInt(width = 3)
// number of data beats -1 in burst: max 255 for incrementing, 15 for wrapping
val len = UInt(width = 8)
// burst mode: 0 for fixed, 1 for incrementing, 2 for wrapping
val burst = UInt(width = 2)
// transaction ID for multiple outstanding requests
val id = UInt(width = idBits)
// set to 1 for exclusive access
val lock = Bool()
// cachability, set to 0010 or 0011
val cache = UInt(width = 4)
// generally ignored, set to to all zeroes
val prot = UInt(width = 3)
// not implemented, set to zeroes
val qos = UInt(width = 4)
override def clone = { new AXIAddress(addrWidthBits, idBits).asInstanceOf[this.type] }
}
class AXIWriteData(dataWidthBits: Int) extends Bundle {
val data = UInt(width = dataWidthBits)
val strb = UInt(width = dataWidthBits/8)
val last = Bool()
override def clone = { new AXIWriteData(dataWidthBits).asInstanceOf[this.type] }
}
class AXIWriteResponse(idBits: Int) extends Bundle {
val id = UInt(width = idBits)
val resp = UInt(width = 2)
override def clone = { new AXIWriteResponse(idBits).asInstanceOf[this.type] }
}
class AXIReadData(dataWidthBits: Int, idBits: Int) extends Bundle {
val data = UInt(width = dataWidthBits)
val id = UInt(width = idBits)
val last = Bool()
val resp = UInt(width = 2)
override def clone = { new AXIReadData(dataWidthBits, idBits).asInstanceOf[this.type] }
}
// Part II: Definitions for the actual AXI interfaces
// TODO add full slave interface definition
class AXIMasterIF(addrWidthBits: Int, dataWidthBits: Int, idBits: Int) extends Bundle {
// write address channel
val writeAddr = Decoupled(new AXIAddress(addrWidthBits, idBits))
// write data channel
val writeData = Decoupled(new AXIWriteData(dataWidthBits))
// write response channel (for memory consistency)
val writeResp = Decoupled(new AXIWriteResponse(idBits)).flip
// read address channel
val readAddr = Decoupled(new AXIAddress(addrWidthBits, idBits))
// read data channel
val readData = Decoupled(new AXIReadData(dataWidthBits, idBits)).flip
// rename signals to be compatible with those in the Xilinx template
def renameSignals() {
// write address channel
writeAddr.bits.addr.setName("M_AXI_AWADDR")
writeAddr.bits.prot.setName("M_AXI_AWPROT")
writeAddr.bits.size.setName("M_AXI_AWSIZE")
writeAddr.bits.len.setName("M_AXI_AWLEN")
writeAddr.bits.burst.setName("M_AXI_AWBURST")
writeAddr.bits.lock.setName("M_AXI_AWLOCK")
writeAddr.bits.cache.setName("M_AXI_AWCACHE")
writeAddr.bits.qos.setName("M_AXI_AWQOS")
writeAddr.bits.id.setName("M_AXI_AWID")
writeAddr.valid.setName("M_AXI_AWVALID")
writeAddr.ready.setName("M_AXI_AWREADY")
// write data channel
writeData.bits.data.setName("M_AXI_WDATA")
writeData.bits.strb.setName("M_AXI_WSTRB")
writeData.bits.last.setName("M_AXI_WLAST")
writeData.valid.setName("M_AXI_WVALID")
writeData.ready.setName("M_AXI_WREADY")
// write response channel
writeResp.bits.resp.setName("M_AXI_BRESP")
writeResp.bits.id.setName("M_AXI_BID")
writeResp.valid.setName("M_AXI_BVALID")
writeResp.ready.setName("M_AXI_BREADY")
// read address channel
readAddr.bits.addr.setName("M_AXI_ARADDR")
readAddr.bits.prot.setName("M_AXI_ARPROT")
readAddr.bits.size.setName("M_AXI_ARSIZE")
readAddr.bits.len.setName("M_AXI_ARLEN")
readAddr.bits.burst.setName("M_AXI_ARBURST")
readAddr.bits.lock.setName("M_AXI_ARLOCK")
readAddr.bits.cache.setName("M_AXI_ARCACHE")
readAddr.bits.qos.setName("M_AXI_ARQOS")
readAddr.bits.id.setName("M_AXI_ARID")
readAddr.valid.setName("M_AXI_ARVALID")
readAddr.ready.setName("M_AXI_ARREADY")
// read data channel
readData.bits.id.setName("M_AXI_RID")
readData.bits.data.setName("M_AXI_RDATA")
readData.bits.resp.setName("M_AXI_RRESP")
readData.bits.last.setName("M_AXI_RLAST")
readData.valid.setName("M_AXI_RVALID")
readData.ready.setName("M_AXI_RREADY")
}
override def clone = { new AXIMasterIF(addrWidthBits, dataWidthBits, idBits).asInstanceOf[this.type] }
}
}
| maltanar/axi-in-chisel | AXIDefs.scala | Scala | mit | 4,788 |
/**
* Created by oji on 7/11/16.
*/
/**
* A semi-functioning, purely-bodged spaghetti of code, for a pure object-oriented implementation of Integers
* DISCLAIMER, the code is a spaghetti of a mess, and may not be the most efficient or pretty, or cleaned up of tests scraps xD
**/
abstract class Integer {
def isZero: Boolean
def isNegative: Boolean
def isPositive: Boolean = !isNegative
def predecessor: Integer
def successor: Integer // = new Successor(this)
def + (that: Integer): Integer
def toNegative (acc: Integer = Zero): Integer
def toPositive (acc: Integer = Zero): Integer
def - (that: Integer): Integer = if (that isZero) this else this + (-that)
def * (that: Integer): Integer
def mult (that: Integer, acc: Integer): Integer
def / (that: Integer): Integer = ???
def % (that: Integer): Integer = ???
def unary_- : Integer
def toInt (acc: Int): Int
def toInt (acc: Int = 0, op: (Int, Int) => Int): Int // = if (isZero) acc else this.predecessor.toInt(op(acc, 1), op)
override def toString: String = "Int(" + toInt(0) + ")"
def > (that: Integer): Boolean = (this - that) isPositive
def < (that: Integer): Boolean = (this - that) isNegative
def == (that: Integer): Boolean = (this - that) isZero
def >= (that: Integer): Boolean = (this > that) || (this == that)
def <= (that: Integer): Boolean = (this < that) || (this == that)
}
object Zero extends Integer {
override def isZero: Boolean = true
override def isNegative: Boolean = false
override def successor: Integer = new Positive(this)
override def predecessor: Integer = new Negative(this)
override def toInt (acc: Int): Int = 0
override def + (that: Integer): Integer = that
def unary_- : Integer = this
override def - (that: Integer): Integer = -that
override def toNegative (acc: Integer = Zero): Integer = this
override def toPositive (acc: Integer = Zero): Integer = this
override def toInt (acc: Int, op: (Int, Int) => Int): Int = 0
override def * (that: Integer) = Zero
override def mult (that: Integer, acc: Integer) = Zero
}
class Positive (val predecessor: Integer) extends Integer {
override def isZero: Boolean = false
override def isNegative: Boolean = false
override def successor: Integer = new Positive(this)
override def + (that: Integer): Integer = if (that isZero) this else if (that isNegative) this - (-that) else new Positive(this + that.predecessor)
def toNegative (acc: Integer = Zero): Integer = if (this.predecessor isZero) acc.predecessor else this.predecessor.toNegative(acc.predecessor)
def toPositive (acc: Integer = Zero): Integer = this
def unary_- : Integer = toNegative()
override def - (that: Integer): Integer = if (that isZero) this else if (that isNegative) this + (-that) else this.predecessor - that.predecessor
override def toInt (acc: Int): Int = if (this.predecessor.isZero) acc + 1 else this.predecessor.toInt(acc + 1) //toInt(0, (x, y) => x + y)
override def > (that: Integer): Boolean = if (that.isNegative || that.isZero) true else super.>(that)
override def < (that: Integer): Boolean = if (that.isNegative || that.isZero) false else super.<(that)
override def toInt (acc: Int, op: (Int, Int) => Int): Int = ???
override def * (that: Integer): Integer = if (that isNegative) -mult(-that, this) else mult(that, this)
override def mult (that: Integer, acc: Integer): Integer = if (that.predecessor isZero) acc else mult(that - Zero.successor, acc + this)
}
class Negative (val successor: Integer) extends Integer {
override def isZero: Boolean = false
override def isNegative: Boolean = true
override def predecessor: Integer = new Negative(this)
override def toInt (acc: Int): Int = toInt(0, (x, y) => x - y)
override def + (that: Integer): Integer = if (that isZero) this else if (that isNegative) -((-this) + (-that)) else that + this
override def - (that: Integer): Integer = if (that isZero) this else if (that isNegative) (-that) + this else -((-this) + that) //this + (-that)
def toPositive (acc: Integer = Zero): Integer = if (this.successor isZero) acc.successor else this.successor.toPositive(acc.successor)
def toNegative (acc: Integer = Zero): Integer = this
def unary_- : Integer = toPositive()
override def > (that: Integer): Boolean = if (that.isPositive || that.isZero) false else super.>(that)
override def < (that: Integer): Boolean = if (that.isPositive || that.isZero) true else super.<(that)
override def toInt (acc: Int, op: (Int, Int) => Int): Int = if (this.successor.isZero) acc - 1 else this.successor.toInt(op(acc, 1), op)
override def * (that: Integer): Integer = if (that isPositive) mult(that, this) else -mult(-that, this)
override def mult (that: Integer, acc: Integer): Integer = if (that.predecessor isZero) acc else mult(that - Zero.successor, acc + this)
}
object MainInts extends App {
val five = new Positive(new Positive(new Positive(new Positive(Zero.successor))))
val minus_two = new Negative(Zero.predecessor)
val positive_two = new Positive(Zero.successor)
println(minus_two * minus_two)
}
| ojizero/random_stuff | ScalaTests/integers.scala | Scala | gpl-3.0 | 5,093 |
/*
* This file is part of CoAnSys project.
* Copyright (c) 2012-2015 ICM-UW
*
* CoAnSys is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* CoAnSys is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with CoAnSys. If not, see <http://www.gnu.org/licenses/>.
*/
package pl.edu.icm.coansys.citations.converters
import scala.collection.JavaConversions.asScalaBuffer
import pl.edu.icm.coansys.citations.data.CitationMatchingProtos.MatchableEntityData
import pl.edu.icm.coansys.citations.data.MatchableEntity
import pl.edu.icm.coansys.models.DocumentProtos.BasicMetadata
/**
* Converter of BasicMetadata object to MatchableEntity object
*
* @author Mateusz Fedoryszak (m.fedoryszak@icm.edu.pl)
* @author madryk
*/
class BasicMetadataToEntityConverter extends Serializable {
def convert(id: String, meta: BasicMetadata): MatchableEntity = {
val data = MatchableEntityData.newBuilder()
data.setId(id)
fillUsingBasicMetadata(data, meta)
new MatchableEntity(data.build())
}
private def fillUsingBasicMetadata(data: MatchableEntityData.Builder, meta: BasicMetadata) {
if (meta.getAuthorCount > 0)
data.setAuthor(meta.getAuthorList.map(a => if (a.hasName) a.getName else a.getForenames + " " + a.getSurname).mkString(", "))
if (meta.hasJournal)
data.setSource(meta.getJournal)
if (meta.getTitleCount > 0)
data.setTitle(meta.getTitleList.map(_.getText).mkString(". "))
if (meta.hasPages)
data.setPages(meta.getPages)
if (meta.hasIssue)
data.setIssue(meta.getIssue)
if (meta.hasVolume)
data.setVolume(meta.getVolume)
if (meta.hasYear)
data.setYear(meta.getYear)
}
} | pdendek/CoAnSys | citation-matching/citation-matching-coansys-code/src/main/scala/pl/edu/icm/coansys/citations/converters/BasicMetadataToEntityConverter.scala | Scala | agpl-3.0 | 2,143 |
package com.nthportal
package versions
package v2
/**
* A version of the form `major`.`minor` (such as, for example, `1.3`).
*
* @param major the major version number
* @param minor the minor version number
*/
final case class Version(major: Int, minor: Int) extends VersionBase[Version, ExtendedVersion] {
// Validate values
require(major >= 0 && minor >= 0, "major and minor values must be >= 0")
override private[versions] def companion = Version
override private[versions] def extendedCompanion = ExtendedVersion
override def toString = s"$major.$minor"
}
object Version extends VersionCompanion[Version, ExtendedVersion] with Of[Dot[Version]] {
override private[versions] val ordering: Ordering[Version] =
Ordering
.by[Version, Int](_.major)
.orElseBy(_.minor)
override def of(major: Int): Dot[Version] = minor => apply(major, minor)
override protected def versionFromArray = { case Array(major, minor) => apply(major, minor) }
/**
* Extracts a version from a string.
*
* @param version the string from which to extract a version
* @return an [[Option]] containing the major and minor version numbers;
* [[None]] if the string did not represent a valid version
*/
def unapply(version: String): Option[(Int, Int)] = parseAsOption(version) flatMap unapply
}
| NthPortal/versions | src/main/scala/com/nthportal/versions/v2/Version.scala | Scala | apache-2.0 | 1,336 |
package scalan.staged
import scalan.{DFunc, Nullable, Scalan}
import debox.{Buffer => DBuffer}
import scalan.util.GraphUtil
import spire.syntax.all.cfor
import scala.collection.mutable
trait ProgramGraphs extends AstGraphs { self: Scalan =>
type PGraph = ProgramGraph
/** Deboxed function to obtain usages of a given node.
* Represents adjacency matrix of the reversed graph `g`.
* @param g original graph whose usages are computed */
class PGraphUsages(g: AstGraph) extends DFunc[Int, DBuffer[Int]] {
override def apply(nodeId: Int) = {
val us = g.usagesOf(nodeId)
us
}
}
/** Immutable graph collected from `roots` following Ref.node.deps links. */
case class ProgramGraph(roots: Seq[Sym], mapping: Nullable[Transformer], filterNode: Nullable[Sym => Boolean])
extends AstGraph {
def this(roots: Seq[Sym], filterNode: Nullable[Sym => Boolean] = Nullable.None) { this(roots, Nullable.None, filterNode) }
def this(root: Sym) { this(List(root)) }
override lazy val rootIds: DBuffer[Int] = super.rootIds
override def boundVars = Nil
override def isIdentity: Boolean = false
override def freeVars = mutable.WrappedArray.empty[Sym]
override lazy val scheduleIds = {
val neighbours: DFunc[Int, DBuffer[Int]] = filterNode match {
case Nullable(pred) =>
new DFunc[Int, DBuffer[Int]] { def apply(id: Int) = {
val deps = getSym(id).node.deps
val len = deps.length
val res = DBuffer.ofSize[Int](len)
cfor(0)(_ < len, _ + 1) { i =>
val sym = deps(i)
if (pred(sym) && !sym.isVar) // TODO optimize: remove isVar condition here and below
res += sym.node.nodeId
}
res
}}
case _ =>
new DFunc[Int, DBuffer[Int]] { def apply(id: Int) = {
val deps = getSym(id).node.deps
val len = deps.length
val res = DBuffer.ofSize[Int](len)
cfor(0)(_ < len, _ + 1) { i =>
val sym = deps(i)
if (!sym.isVar)
res += sym.node.nodeId
}
res
}}
}
val sch = GraphUtil.depthFirstOrderFrom(rootIds, neighbours)
sch
}
/** Mirror all the nodes of this graph applying transformer and performing rewriting.
* @param m mirror instance to be used for mirroring of nodes
* @param rw rewriter to be tried for each new created mirrored node
* @param t transformer of symbols, to be used for substitution of symbols in the new nodes.
* @return new graph which is not necessary clone of this graph, but should be semantically
* equivalent to this graph (provided all rw rules preserve equivalence).
* If rw is identity, then the resulting graph is alpha-equivalent to this graph
* as long as t is bijection.
*/
def transform(m: Mirror, rw: Rewriter, t: Transformer): ProgramGraph = {
val t0 = mapping match {
case Nullable(mapping) => t merge mapping
case _ => t
}
val t1 = m.mirrorSymbols(t0, rw, this, scheduleIds)
val newRoots = roots map { t1(_) }
new ProgramGraph(newRoots, Nullable(t1), filterNode)
}
/** Remove transformer component of the graph. */
def withoutContext = ProgramGraph(roots, Nullable.None, filterNode)
override def toString: String = {
val mappingStr = if (mapping.isEmpty) "None" else mapping.toString
val filterNodeStr = if (filterNode.isDefined) filterNode.toString else "None"
s"ProgramGraph($roots, $mappingStr, $filterNodeStr)"
}
}
object ProgramGraph {
def transform[A](s: Ref[A], rw: Rewriter = NoRewriting, t: MapTransformer = MapTransformer.empty()): Ref[A] = {
val g = ProgramGraph(List(s), Nullable.None, Nullable.None)
val g1 = g.transform(DefaultMirror, rw, t)
g1.roots(0).asInstanceOf[Ref[A]]
}
}
}
| ScorexFoundation/sigmastate-interpreter | core/src/main/scala/scalan/staged/ProgramGraphs.scala | Scala | mit | 3,995 |
package org.plummtw.jinrou.data
import scala.xml._
import net.liftweb._
import net.liftweb.mapper._
import http._
import js._
import util._
import S._
import SHtml._
import Helpers._
import org.plummtw.jinrou.model._
import org.plummtw.jinrou.enum._
import org.plummtw.jinrou.util._
class ActionData(action: MTypeEnum.Value, str: String, name: String, targetable_boolean: Boolean) {
def action_enum = action
def tag_string = str
def command_name = name
def targetable = targetable_boolean
def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= true
def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
}
override def toString(): String = "[" + tag_string + "]"
}
trait NoActionTrait
object ActionKick extends ActionData(MTypeEnum.VOTE_KICK, "踢人", "kick", true)
object ActionStartGame extends ActionData(MTypeEnum.VOTE_STARTGAME, "開始遊戲!", "start_game", false)
object ActionVote extends ActionData(MTypeEnum.VOTE_HANG, "投票", "vote", true) {
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is) &&
(x.hasnt_flag(UserEntryFlagEnum.HIDED)))
val result2 = if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
if (user.current_role == RoleEnum.MADMAN) {
val duel_messages = SystemMessage.findAll(By(SystemMessage.roomday_id, room_day.id.is),
By(SystemMessage.actioner_id, user.id.is),
By(SystemMessage.mtype, MTypeEnum.VOTE_MADMAN_DUEL.toString))
if (duel_messages.length != 0) {
result2.filter(x=>(x.id != duel_messages(0).actionee_id.is))
} else
result2
}
else
result2
}
}
object ActionBecomeMob extends ActionData(MTypeEnum.VOTE_BECOMEMOB, "暴民模式!", "becomemob", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((room.has_flag(RoomFlagEnum.MOB_MODE)) && (user_entrys.length>=22) && (room_day.day_no.is == 11))
}
}
object ActionHide extends ActionData(MTypeEnum.VOTE_HIDE, "神隱!", "hide", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (room_day.day_no.is != 1)
}
}
object ActionReverseVote extends ActionData(MTypeEnum.VOTE_REVERSEVOTE, "逆轉投票!", "reversemob", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return (user.hasnt_flag(UserEntryFlagEnum.REVERSE_USED))
}
}
object ActionAugure extends ActionData(MTypeEnum.VOTE_AUGURER, "占卜", "augure", true)
object ActionVillagerDetect extends ActionData(MTypeEnum.VOTE_VILLAGER, "推理", "detect", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((room.has_flag(RoomFlagEnum.VILLAGER_DETECT)) && (room_day.day_no.is == 7))
}
}
object ActionGuard extends ActionData(MTypeEnum.VOTE_HUNTER, "護衛", "guard", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (room_day.day_no.is != 1)
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
if (room.has_flag(RoomFlagEnum.HUNTER_OPTION2))
user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.live.is))
else
user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
}
}
object ActionAugHunterAugure extends ActionData(MTypeEnum.VOTE_AUGURER, "占卜", "aughunter_augure", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (room_day.day_no.is %4 == 1)
}
}
object ActionAugHunterGuard extends ActionData(MTypeEnum.VOTE_HUNTER, "護衛", "aughunter_guard", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (room_day.day_no.is %4 == 3)
}
}
object ActionRun extends ActionData(MTypeEnum.VOTE_RUNNER, "逃亡", "run", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (room_day.day_no.is != 1)
}
}
object ActionClericBless extends ActionData(MTypeEnum.VOTE_CLERIC_BLESS, "祝福術", "cleric_bless", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.CLERIC_OPTION1)) ||
(user_entrys.filter{x=>(x.current_role == RoleEnum.DEMON)&&(x.has_flag(UserEntryFlagEnum.BITED))}.length != 0))
}
}
object ActionClericSancture extends ActionData(MTypeEnum.VOTE_CLERIC_SANCTURE, "聖域術!", "cleric_sancture", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (room_day.day_no.is != 1)
}
}
object ActionHerbalistElixir extends ActionData(MTypeEnum.VOTE_HERBALIST_ELIXIR, "使用 治療藥", "herbalist_elixir", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room_day.day_no.is != 1) && (user.hasnt_flag(UserEntryFlagEnum.ELIXIR_USED)))
}
}
object ActionHerbalistPoison extends ActionData(MTypeEnum.VOTE_HERBALIST_POISON, "使用 毒藥", "herbalist_poison", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room_day.day_no.is != 1) && (user.hasnt_flag(UserEntryFlagEnum.POISON_USED)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionHerbalistMix extends ActionData(MTypeEnum.VOTE_HERBALIST_MIX, "調製藥品!", "herbalist_mix", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((user.has_flag(UserEntryFlagEnum.ELIXIR_USED)) &&
(user.has_flag(UserEntryFlagEnum.POISON_USED)) &&
(room.has_flag(RoomFlagEnum.HERBALIST_MIX)))
}
}
object ActionHerbalistDrop extends ActionData(MTypeEnum.VOTE_HERBALIST_DROP, "放棄藥品!", "herbalist_drop", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (((user.hasnt_flag(UserEntryFlagEnum.ELIXIR_USED)) ||
(user.hasnt_flag(UserEntryFlagEnum.POISON_USED))) &&
(room.has_flag(RoomFlagEnum.HERBALIST_DROP)))
}
}
object ActionAlchemistElixir extends ActionData(MTypeEnum.VOTE_ALCHEMIST_ELIXIR, "治療藥(水風)", "alchemist_elixir", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room_day.day_no.is != 1) && (user.has_flag(UserEntryFlagEnum.WATER)) &&
(user.has_flag(UserEntryFlagEnum.AIR)))
}
}
object ActionAlchemistPoison extends ActionData(MTypeEnum.VOTE_ALCHEMIST_POISON, "毒藥(地火)", "alchemist_poison", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room_day.day_no.is != 1) && (user.has_flag(UserEntryFlagEnum.EARTH)) &&
(user.has_flag(UserEntryFlagEnum.FIRE)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionScholarExamine extends ActionData(MTypeEnum.VOTE_SCHOLAR_EXAMINE, "個案調查", "scholar_examine", true) {
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
user_entrys.filter(x=> (x.id.is != user.id.is))
}
}
object ActionScholarExamine2 extends ActionData(MTypeEnum.VOTE_SCHOLAR_EXAMINE2, "強力調查", "scholar_examine2", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((!user.has_flag(UserEntryFlagEnum.ALTERNATE)) &&
(room.has_flag(RoomFlagEnum.SCHOLAR_OPTION4)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
user_entrys.filter(x=> (x.id.is != user.id.is))
}
}
object ActionScholarAnalyze extends ActionData(MTypeEnum.VOTE_SCHOLAR_ANALYZE, "事件分析!", "scholar_analyze", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room_day.day_no.is != 1) && (user.hasnt_flag(UserEntryFlagEnum.ANALYZED)) &&
(room.has_flag(RoomFlagEnum.SCHOLAR_OPTION3)))
}
}
object ActionScholarReport extends ActionData(MTypeEnum.VOTE_SCHOLAR_REPORT, "現況報告!", "scholar_report", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room_day.day_no.is != 1) && (user.hasnt_flag(UserEntryFlagEnum.REPORTED)))
}
}
object ActionDispell extends ActionData(MTypeEnum.VOTE_ARCHMAGE_DISPELL, "解除魔法", "archmage_dispell", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.action_point.is >= 3)
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionSummon extends ActionData(MTypeEnum.VOTE_ARCHMAGE_SUMMON, "召喚水元素!", "archmage_summon", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((user.action_point.is >= 3) && (user.has_flag(UserEntryFlagEnum.WATER_ELEM_USED)))
}
}
object ActionWerewolf extends ActionData(MTypeEnum.VOTE_WEREWOLF, "咬人", "wolf_eat", true) {
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result =
if ((room_day.day_no.is == 1) && (!room.has_flag(RoomFlagEnum.NO_DUMMY)))
user_entrys.filter(x=>(x.uname.is == "dummy_boy"))
else
user_entrys.filter(x=>(x.current_role != RoleEnum.WEREWOLF) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionWolfcub extends ActionData(MTypeEnum.VOTE_WOLFCUB, "咬人", "wolfcub_eat", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room_day.day_no.is == 15) ||
((room_day.day_no.is == 17) && (room.has_flag(RoomFlagEnum.WOLFCUB_OPTION1))))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionMadmanStun1 extends ActionData(MTypeEnum.VOTE_MADMAN_STUN1, "擊昏1", "madman_stun1", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.MADMAN_STUN)) && (user.action_point.is >= 1))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionMadmanStun3 extends ActionData(MTypeEnum.VOTE_MADMAN_STUN3, "擊昏3", "madman_stun3", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.MADMAN_STUN)) && (user.action_point.is >= 2))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionMadmanStun extends ActionData(MTypeEnum.VOTE_MADMAN_STUN, "擊忘", "madman_stun", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.MADMAN_STUN)) && (user.action_point.is >= 2))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionMadmanSuicide extends ActionData(MTypeEnum.VOTE_MADMAN_SUICIDE, "自爆!", "madman_suicide", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (room.has_flag(RoomFlagEnum.MADMAN_SUICIDE))
}
}
object ActionMadmanDuel extends ActionData(MTypeEnum.VOTE_MADMAN_DUEL, "單挑", "madman_duel", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.MADMAN_DUEL)) && (room_day.day_no.is % 4 == 3))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionSorcerorAugure extends ActionData(MTypeEnum.VOTE_SORCEROR_AUGURE, "占卜術", "sorceror_augure", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.action_point.is >= 2)
}
}
object ActionSorcerorWhisper extends ActionData(MTypeEnum.VOTE_SORCEROR_WHISPER, "密言術!", "sorceror_whisper", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((user.action_point.is >= 3)) // ||
// (room.has_flag(RoomFlagEnum.SORCEROR_WHISPER1) && (user.action_point.is >= 2)))
}
}
object ActionSorcerorConjure extends ActionData(MTypeEnum.VOTE_SORCEROR_CONJURE, "咒殺術", "sorceror_conjure", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.action_point.is >= 4)
}
}
object ActionSorcerorShout extends ActionData(MTypeEnum.VOTE_SORCEROR_SHOUT, "鼓舞術!", "sorceror_shout", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((user.action_point.is >= 5) && (user.hasnt_flag(UserEntryFlagEnum.SHOUTED)))
}
}
object ActionSorcerorBelieve extends ActionData(MTypeEnum.VOTE_SORCEROR_BELIEVE, "狼信化", "sorceror_believe", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.SORCEROR_BELIEVE)) && (user.action_point.is >= 5))
}
}
object ActionSorcerorSear extends ActionData(MTypeEnum.VOTE_SORCEROR_SEAR, "灼熱", "sorceror_sear", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.SORCEROR_SEAR)) && (user.action_point.is >= 3))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is) &&
(x.hasnt_flag(UserEntryFlagEnum.SEAR)))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionSorcerorSummon extends ActionData(MTypeEnum.VOTE_SORCEROR_SUMMON, "召喚狼元素!", "sorceror_summon", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.SORCEROR_SUMMON)) && (user.action_point.is >= 5))
}
}
object ActionFox extends ActionData(MTypeEnum.VOTE_FOX, "指定背德", "fox_choose_betrayer", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((room.has_flag(RoomFlagEnum.FOX_OPTION1)) &&
(room.has_flag(RoomFlagEnum.ROLE_BETRAYER)) &&
(user_entrys.length >= 20) &&
(room_day.day_no.is == 1))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
user_entrys.filter(x=> (x.live.is) && (x.current_role == RoleEnum.VILLAGER))
// (x.uname.is != "dummy_boy") &&
}
}
object ActionFox1 extends ActionData(MTypeEnum.VOTE_FOX1, "指定背德且結界", "fox_betrayer_barrier", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((room.has_flag(RoomFlagEnum.FOX_OPTION1)) &&
(room.has_flag(RoomFlagEnum.ROLE_BETRAYER)) &&
(user_entrys.length >= 20) &&
(room_day.day_no.is == 1) &&
(room.has_flag(RoomFlagEnum.FOX_OPTION3)) &&
(user.hasnt_flag(UserEntryFlagEnum.FOX_SPECIAL)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
user_entrys.filter(x=> (x.live.is) && (x.current_role == RoleEnum.VILLAGER))
// (x.uname.is != "dummy_boy") &&
}
}
object ActionFox2 extends ActionData(MTypeEnum.VOTE_FOX2, "結界!", "fox_barrier", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((room.has_flag(RoomFlagEnum.FOX_OPTION3)) &&
(user.hasnt_flag(UserEntryFlagEnum.FOX_SPECIAL)) &&
!((room.has_flag(RoomFlagEnum.FOX_OPTION1)) &&
(room.has_flag(RoomFlagEnum.ROLE_BETRAYER)) &&
(user_entrys.length >= 20) &&
(room_day.day_no.is == 1)))
}
}
object ActionFoxDisguise extends ActionData(MTypeEnum.VOTE_BETRAYER_DISGUISE, "偽裝", "fox_disguise", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
if (room.room_flags.is.indexOf(RoomFlagEnum.FOX_OPTION4.toString) == -1)
return false
if (!RoleFox.betrayer_mimic(user_entrys))
return false
if (room.room_flags.is.indexOf(RoomFlagEnum.CLERIC_OPTION2.toString) == -1)
return (user.action_point.is >= 3)
return (user.action_point.is >= 2)
}
}
object ActionBetrayerDisguise extends ActionData(MTypeEnum.VOTE_BETRAYER_DISGUISE, "偽裝", "betrayer_disguise", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
if (room.room_flags.is.indexOf(RoomFlagEnum.BETRAYER_OPTION1.toString) == -1)
return false
if (room.room_flags.is.indexOf(RoomFlagEnum.CLERIC_OPTION2.toString) == -1)
return (user.action_point.is >= 3)
return (user.action_point.is >= 2)
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionBetrayerChange extends ActionData(MTypeEnum.VOTE_BETRAYER_CHANGE, "變化", "betrayer_change", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
if (room.room_flags.is.indexOf(RoomFlagEnum.BETRAYER_OPTION2.toString) == -1)
return false
if (user.subrole.is != "")
return false
return (user.action_point.is >= 2)
}
}
object ActionBetrayerFog extends ActionData(MTypeEnum.VOTE_BETRAYER_FOG, "粉紅迷霧!", "betrayer_fog", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
if (room.room_flags.is.indexOf(RoomFlagEnum.BETRAYER_OPTION3.toString) == -1)
return false
return (user.action_point.is >= 4)
}
}
object ActionGodfatSpecial1 extends ActionData(MTypeEnum.VOTE_GODFAT_SPECIAL1, "咒術特化!", "godfat_special1", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((room_day.day_no.is == 1) && (room.has_flag(RoomFlagEnum.GODFAT_SPECIAL1)))
}
}
object ActionGodfatSpecial2 extends ActionData(MTypeEnum.VOTE_GODFAT_SPECIAL2, "方陣特化!", "godfat_special2", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((room_day.day_no.is == 1) && (room.has_flag(RoomFlagEnum.GODFAT_SPECIAL2)))
}
}
object ActionGodfatSpecial3 extends ActionData(MTypeEnum.VOTE_GODFAT_SPECIAL3, "秘術特化!", "godfat_special3", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((room_day.day_no.is == 1) && (room.has_flag(RoomFlagEnum.GODFAT_SPECIAL3)) &&
(user.subrole.is != SubroleEnum.WOLFBELIEVER.toString) &&
(user.subrole.is != SubroleEnum.SUBPONTIFF.toString))
}
}
object ActionGodfatSpecial4 extends ActionData(MTypeEnum.VOTE_GODFAT_SPECIAL4, "預言特化!", "godfat_special4", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((room_day.day_no.is == 1) && (room.has_flag(RoomFlagEnum.GODFAT_SPECIAL4)) &&
(user.subrole.is != SubroleEnum.WOLFBELIEVER.toString) &&
(user.subrole.is != SubroleEnum.SUBPONTIFF.toString))
}
}
object ActionGodfatDeathGaze extends ActionData(MTypeEnum.VOTE_GODFAT_DEATHGAZE, "絕望視線", "godfat_deathgaze", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL1)) &&
(user.hasnt_flag(UserEntryFlagEnum.GODFAT_SPECIAL_USED)) &&
(targetable_users(room, room_day, user, user_entrys).length != 0))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is) &&
(x.has_flag(UserEntryFlagEnum.GODFAT_TARGETED)))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionGodfatHellword extends ActionData(MTypeEnum.VOTE_GODFAT_HELLWORD, "言咒!", "godfat_hellword", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL1)) &&
(user.hasnt_flag(UserEntryFlagEnum.GODFAT_SPECIAL2_USED)))
}
}
object ActionGodfatColorSpray extends ActionData(MTypeEnum.VOTE_GODFAT_COLORSPRAY, "七彩噴射", "godfat_colorspray", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL2)) &&
(user.hasnt_flag(UserEntryFlagEnum.GODFAT_SPECIAL_USED)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionGodfatBlind extends ActionData(MTypeEnum.VOTE_GODFAT_BLIND, "眩光", "godfat_blind", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL2)) &&
(user.hasnt_flag(UserEntryFlagEnum.GODFAT_SPECIAL2_USED)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionGodfatBlind2 extends ActionData(MTypeEnum.VOTE_GODFAT_BLIND2, "眩光!", "godfat_blind2", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL2)) &&
(user.hasnt_flag(UserEntryFlagEnum.GODFAT_SPECIAL2_USED)))
}
}
object ActionGodfatExchange extends ActionData(MTypeEnum.VOTE_GODFAT_EXCHANGE, "秘術換身", "godfat_exchange", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return ((user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL3)) &&
(user.hasnt_flag(UserEntryFlagEnum.GODFAT_SPECIAL_USED)) &&
(targetable_users(room, room_day, user, user_entrys).length != 0))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
return user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is) &&
(x.current_role == RoleEnum.FOX) && (x.has_flag(UserEntryFlagEnum.GODFAT_TARGETED)))
}
}
object ActionGodfatNecromancer extends ActionData(MTypeEnum.VOTE_GODFAT_NECROMANCER, "靈能", "godfat_necromancer", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return (user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL4))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
return user_entrys.filter(x=> (x.id.is != user.id.is) && (x.hasnt_flag(UserEntryFlagEnum.GODFAT_PREDICTED)))
}
}
object ActionGodfatHunter extends ActionData(MTypeEnum.VOTE_GODFAT_HUNTER, "獵人", "godfat_hunter", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return (user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL4))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
return user_entrys.filter(x=> (x.id.is != user.id.is) && (x.hasnt_flag(UserEntryFlagEnum.GODFAT_PREDICTED)))
}
}
object ActionGodfatHerbalist extends ActionData(MTypeEnum.VOTE_GODFAT_HERBALIST, "藥師", "godfat_herbalist", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return (user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL4))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
return user_entrys.filter(x=> (x.id.is != user.id.is) && (x.hasnt_flag(UserEntryFlagEnum.GODFAT_PREDICTED)))
}
}
object ActionGodfatPoisoner extends ActionData(MTypeEnum.VOTE_GODFAT_POISONER, "埋毒", "godfat_poisoner", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return (user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL4))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
return user_entrys.filter(x=> (x.id.is != user.id.is) && (x.hasnt_flag(UserEntryFlagEnum.GODFAT_PREDICTED)))
}
}
object ActionGodfatScholar extends ActionData(MTypeEnum.VOTE_GODFAT_SCHOLAR, "學者", "godfat_scholar", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean = {
return (user.has_flag(UserEntryFlagEnum.GODFAT_SPECIAL4))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
return user_entrys.filter(x=> (x.id.is != user.id.is) && (x.hasnt_flag(UserEntryFlagEnum.GODFAT_PREDICTED)))
}
}
object ActionDemonChaos extends ActionData(MTypeEnum.VOTE_DEMON_CHAOS, "混沌", "demon_chaos", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((user.hasnt_flag(UserEntryFlagEnum.BITED)) &&
((room.has_flag(RoomFlagEnum.NO_DUMMY)) || (room_day.day_no.is != 1)))
// (room_day.day_no.is %4 == 3))
}
}
object ActionDemonDominate extends ActionData(MTypeEnum.VOTE_DEMON_DOMINATE, "支配", "demon_donimate", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((user.has_flag(UserEntryFlagEnum.BITED)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionDemonCurse extends ActionData(MTypeEnum.VOTE_DEMON_CURSE, "詛咒!", "demon_curse", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((user.has_flag(UserEntryFlagEnum.BITED)) &&
(room.room_flags.is.indexOf(RoomFlagEnum.DEMON_OPTION2.toString) == -1))
// (user.hasnt_flag(UserEntryFlagEnum.CURSE_USED)))
}
}
object ActionDemonCurse2 extends ActionData(MTypeEnum.VOTE_DEMON_CURSE2, "詛咒", "demon_curse2", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.DEMON_OPTION2)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionDemonVortex extends ActionData(MTypeEnum.VOTE_DEMON_VORTEX, "斗轉星移", "demon_vortex", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((room.has_flag(RoomFlagEnum.DEMON_OPTION3)) &&
(user.hasnt_flag(UserEntryFlagEnum.VORTEX_USED)) &&
(user.role.is.length == 1))
}
}
object ActionFallenAngelFallen extends ActionData(MTypeEnum.VOTE_FALLENANGEL_FALLEN, "墮落", "fallenangel_fallen", true) {
}
object ActionPenguinIce extends ActionData(MTypeEnum.VOTE_PENGUIN_ICE, "冰凍", "penguin_ice", true) {
}
object ActionPenguinChill extends ActionData(MTypeEnum.VOTE_PENGUIN_CHILL, "冰凍且寒冰圍繞", "penguin_chill", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.hasnt_flag(UserEntryFlagEnum.CHILL_USED) && (!room.has_flag(RoomFlagEnum.PENGUIN_OPTION3)))
}
}
object ActionPontiff extends ActionData(MTypeEnum.VOTE_PONTIFF, "拉人入教", "pontiff", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
if ((user.role.is.length != 1) && (room_day.day_no.is % 4 == 1))
return false
if ((room_day.day_no.is == 1) &&
((room.has_flag(RoomFlagEnum.PONTIFF_OPTION2)) ||
(room.has_flag(RoomFlagEnum.SUBROLE_SUBPONTIFF))))
return false
return ((user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.current_role != RoleEnum.PONTIFF) && (x.live.is) &&
(x.hasnt_flag(UserEntryFlagEnum.RELIGION))).length != 0) &&
(user.hasnt_flag(UserEntryFlagEnum.PONTIFF_STUNNED)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.current_role != RoleEnum.PONTIFF) && (x.live.is) &&
(x.hasnt_flag(UserEntryFlagEnum.RELIGION)))
}
}
object ActionPontiffCommand extends ActionData(MTypeEnum.VOTE_PONTIFF_COMMAND, "指定投票", "pontiff_command", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
if (user.has_flag(UserEntryFlagEnum.PONTIFF_COMMAND_USED))
return false
return ((user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.current_role != RoleEnum.PONTIFF) && (x.live.is) &&
(x.hasnt_flag(UserEntryFlagEnum.RELIGION))).length != 0) &&
(user.hasnt_flag(UserEntryFlagEnum.PONTIFF_STUNNED)))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.current_role != RoleEnum.PONTIFF) && (x.live.is) &&
(x.hasnt_flag(UserEntryFlagEnum.RELIGION)))
}
}
object ActionPontiffAura extends ActionData(MTypeEnum.VOTE_PONTIFF_AURA, "教主光環!", "pontiff_aura", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return ((user.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA)) &&
(room_day.day_no.is >= 13) &&
(user.hasnt_flag(UserEntryFlagEnum.PONTIFF_STUNNED)))
}
}
object ActionInheriter extends ActionData(MTypeEnum.VOTE_INHERITER, "繼承", "inherit", true)
object ActionShifter extends ActionData(MTypeEnum.VOTE_SHIFTER, "模仿", "shift", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (room_day.day_no.is == 1)
}
}
object ActionShifterDemon extends ActionData(MTypeEnum.VOTE_SHIFTER2, "模仿惡魔!", "shift_demon", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (room.has_flag(RoomFlagEnum.ROLE_PENGUIN) && (room_day.day_no.is == 1))
}
}
object ActionCardFool extends ActionData(MTypeEnum.VOTE_CARD_FOOL, "愚者", "card_fool", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.has_flag(UserEntryFlagEnum.CARD_FOOL))
}
}
object ActionCardMagician extends ActionData(MTypeEnum.VOTE_CARD_MAGICIAN, "魔術師", "card_magician", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.has_flag(UserEntryFlagEnum.CARD_MAGICIAN))
}
}
object ActionCardChariot extends ActionData(MTypeEnum.VOTE_CARD_CHARIOT, "戰車", "card_chariot", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.has_flag(UserEntryFlagEnum.CARD_CHARIOT))
}
}
object ActionCardHermit extends ActionData(MTypeEnum.VOTE_CARD_HERMIT, "隱者", "card_hermit", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.has_flag(UserEntryFlagEnum.CARD_HERMIT))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionCardStrength extends ActionData(MTypeEnum.VOTE_CARD_STRENGTH, "力", "card_strength", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.has_flag(UserEntryFlagEnum.CARD_STRENGTH))
}
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ActionCardJustice extends ActionData(MTypeEnum.VOTE_CARD_JUSTICE, "正義", "card_justice", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.has_flag(UserEntryFlagEnum.CARD_JUSTICE))
}
}
object ActionCardTower extends ActionData(MTypeEnum.VOTE_CARD_TOWER, "塔", "card_tower", true) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.has_flag(UserEntryFlagEnum.CARD_TOWER))
}
}
object ActionCardSun extends ActionData(MTypeEnum.VOTE_CARD_SUN, "太陽!", "card_sun", false) {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (user.has_flag(UserEntryFlagEnum.CARD_SUN))
}
}
object ActionNoAction extends ActionData(MTypeEnum.VOTE_NO_ACTION, "不行動!", "no_action", false) with NoActionTrait
object ActionNoAction2 extends ActionData(MTypeEnum.VOTE_NO_ACTION, "不行動!", "no_action", false) with NoActionTrait {
override def enabled(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : Boolean= {
return (!((room.has_flag(RoomFlagEnum.FOX_OPTION1)) &&
(room.has_flag(RoomFlagEnum.ROLE_BETRAYER)) &&
(user_entrys.length >= 20) &&
(room_day.day_no.is == 1)))
}
}
| Plummtw/jinrou_Lift | src/main/scala/org/plummtw/jinrou/data/ActionData.scala | Scala | apache-2.0 | 42,837 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.nlp.lemma
import java.io.{FileInputStream, InputStream}
import cc.factorie.app.nlp._
import cc.factorie.app.nlp.pos.{PennPosDomain, PennPosTag}
import cc.factorie.app.nlp.wordnet.WordNet
import cc.factorie.util.ClasspathURL
// TODO Rather than reading the WordNet files here, I think this object should simply depend on newly-written methods in wordnet.WordNet. -akm
class WordNetLemmatizer(val inputStreamFactory: String=>InputStream) extends DocumentAnnotator {
def this(wordNetDir:java.io.File) = this((string:String) => new FileInputStream(new java.io.File(wordNetDir, string)))
val resourcePath = "dict/"
def sourceFactory(string:String): io.Source = io.Source.fromInputStream(inputStreamFactory(resourcePath+string))
val NOUN = "n"
val VERB = "v"
val ADJC = "aj"
val ADVB = "av"
/* Wordnet suffixes - from file morph.c in wordnet
* Look at each word, check its suffix, and change its suffix to
* the corresponding "end" as defined by wordnet (if you can find
* the suffix in one of the following suffix lists)*/
val nounSufx = List("s", "ses", "xes", "zes", "ches", "shes", "men", "ies")
val nounEnds = List("", "s", "x", "z", "ch", "sh", "man", "y")
val verbSufx = List("s", "es", "es", "ed", "ed", "ies", "ing", "ing")
val verbEnds = List("", "e", "", "e", "", "y", "e", "")
val adjcSufx = List("er", "est")
val adjcEnds = List("", "", "e", "e")
val sufxMap = Map (NOUN -> nounSufx.zip(nounEnds), VERB -> verbSufx.zip(verbEnds), ADJC -> adjcSufx.zip(adjcEnds))
/* Store wordnet exceptions */
val exceptionMap = Map[String, scala.collection.mutable.HashMap[String,String]](
NOUN -> new scala.collection.mutable.HashMap[String, String]() { override def default(key:String): String = key },
VERB -> new scala.collection.mutable.HashMap[String, String]() { override def default(key:String): String = key },
ADJC -> new scala.collection.mutable.HashMap[String, String]() { override def default(key:String): String = key },
ADVB -> new scala.collection.mutable.HashMap[String, String]() { override def default(key:String): String = key }
)
val wordNetWords = Map[String, scala.collection.mutable.HashSet[String]](
NOUN -> new scala.collection.mutable.HashSet[String](),
VERB -> new scala.collection.mutable.HashSet[String](),
ADJC -> new scala.collection.mutable.HashSet[String](),
ADVB -> new scala.collection.mutable.HashSet[String]()
)
for ((f, pos) <- Seq(("adj", ADJC), ("adv", ADVB), ("noun", NOUN), ("verb", VERB))) {
for (line <- sourceFactory(f + ".exc").getLines()) {
val fields = line.split(" ")
if (fields(0).indexOf('_') == -1) // For now skip multi-word phrases (indicated by underscore in WordNet)
exceptionMap(pos)(fields(0)) = fields(1)
}
for (line <- sourceFactory("index." + f).getLines()) {
val word = line.split(" ")(0)
if (!word.contains('_')) wordNetWords(pos) += word.toLowerCase
}
}
def lemma(raw:String, partOfSpeech:String): String = {
val rawlc = raw.toLowerCase
val pos = {
if (PennPosDomain.isAdjective(partOfSpeech)) ADJC
else if (PennPosDomain.isNoun(partOfSpeech)) NOUN
else if (PennPosDomain.isVerb(partOfSpeech)) VERB
else ADVB
}
if (exceptionMap(pos).contains(rawlc)) exceptionMap(pos)(rawlc)
else if (wordNetWords(pos).contains(rawlc)) rawlc
else if (pos == ADVB && rawlc.endsWith("ly")) rawlc.dropRight(2) /* this rule does not appear in wordnet */
else if (pos == ADVB) rawlc /* wordnet contains many unlemmatized adverbs */
else if (rawlc.length <= 2) rawlc
else if (PennPosDomain.isNoun(pos) && rawlc.endsWith("ss")) rawlc
else if (PennPosDomain.isNoun(pos) && rawlc.endsWith("ful")) this.wordbase(rawlc.dropRight(3), NOUN) + "ful"
else wordbase(rawlc, pos)
}
def process(document:Document): Document = {
for (token <- document.tokens) token.attr += new WordNetTokenLemma(token, lemma(token.string, token.posTag.categoryValue))
document
}
override def tokenAnnotationString(token:Token): String = { val l = token.attr[WordNetTokenLemma]; if (l ne null) l.value else "_"}
def prereqAttrs: Iterable[Class[_]] = List(classOf[PennPosTag])
def postAttrs: Iterable[Class[_]] = List(classOf[WordNetTokenLemma])
private def wordbase(w: String, pos: String): String = {
val candidates = this.sufxMap(pos).filter(sufxAndEnd => w.endsWith(sufxAndEnd._1))
val transformed = candidates.map(sufxAndEnd => w.dropRight(sufxAndEnd._1.length).concat(sufxAndEnd._2))
val areDefined = transformed.filter(word => wordNetWords(pos).contains(word))
if (areDefined.length <= 0 ) w
else areDefined.last /* TODO: be smarter than taking the last? this at least take longest endsWith match */
}
}
object WordNetLemmatizer extends WordNetLemmatizer(string => ClasspathURL.fromDirectory[WordNet](string).openConnection().getInputStream)
//string => {
//import java.io.File
//import java.util.jar.JarFile
// val propertyName = "cc.factorie.app.nlp.wordnet.jar"
// val jarLocationProperty = System.getProperty(propertyName, null)
// if (jarLocationProperty ne null) {
// // Try to load from .jar in filesystem location specified by System property cc.factorie.app.nlp.lexicon.jar
// val file = new File(jarLocationProperty)
// if (!file.exists) throw new Error("File not found at System Property "+propertyName+" value: "+jarLocationProperty)
// try {
// val jarFile = new JarFile(file)
// val jarEntry = jarFile.getJarEntry(string)
// jarFile.getInputStream(jarEntry)
// } catch {
// case e:Exception => throw new Error("Error loading resource '"+string+"' from jar '"+file+"'", e)
// }
// } else {
// // Try to load from .jar on classpath
// try {
// wordnet.WordNet.getClass.getResourceAsStream(string)
// } catch {
// case e:Exception => throw new Error("Could not find resource for cc.factorie.app.nlp.lexicon: "+string+". \\nDownload factorie-nlp-lexicon.jar and then either add it classpath or set Java System Property 'cc.factorie.app.nlp.lexicon.jar' to its file system location.", e)
// }
// }
//})
//object WordNetLemmatizer {
// // TODO Move this to a JUnit test. -akm
// def main(args: Array[String]) {
// val wnl = new cc.factorie.app.nlp.lemma.WordNetLemmatizer(new java.io.File(args(0)))
// val testWords = List(
// ("grass", "N"),
// ("blue", "J"),
// ("makings", "N"),
// ("gooey", "J"),
// ("aklsdjflk", "N"),
// ("aggressively", "adverb"),
// ("sparked", "V"),
// ("walking", "V"),
// ("loves", "V"),
// ("polyhedra", "N"),
// ("orogami", "N"),
// ("watches", "V"),
// ("watches", "N"),
// ("spying", "V"),
// ("news", "N"),
// ("mathematics", "N"),
// ("POLITICS", "N"),
// ("wonderful", "J")
// )
//
// testWords.map(x => println(x._1 + " -> " + wnl.lemma(x._1, x._2)))
// }
//}
class WordNetTokenLemma(token:Token, s:String) extends TokenLemma(token, s)
| patverga/factorie | src/main/scala/cc/factorie/app/nlp/lemma/WordNetLemmatizer.scala | Scala | apache-2.0 | 7,853 |
package controller
import org.scalatest._
import skinny.test._
class FileUploadControllerSpec extends FunSpec with Matchers {
def createMockController = new FileUploadController with MockServlet
describe("FileUploadController") {
it("should work with MockServlet") {
try {
val controller = createMockController
controller.form
controller.status should equal(200)
} catch {
case e: Exception =>
e.printStackTrace
throw e
}
}
}
}
| seratch/skinny-framework | example/src/test/scala/controller/FileUploadControllerSpec.scala | Scala | mit | 512 |
//
// Analysis.scala -- Scala class/trait/object Analysis
// Project OrcScala
//
// Created by amp on Jun 2, 2013.
//
// Copyright (c) 2018 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.ast.porc
import scala.collection.mutable
import orc.values.ValueMetadata
case class AnalysisResults(
isNotFuture: Boolean,
doesNotThrowHalt: Boolean,
fastTerminating: Boolean,
siteMetadata: Option[ValueMetadata]) {
}
sealed trait AnalysisProvider[E <: PorcAST] {
outer =>
def apply(e: PorcAST.Z): AnalysisResults
def get(e: PorcAST.Z): Option[AnalysisResults]
object ImplicitResults {
import scala.language.implicitConversions
implicit def expressionCtxWithResults(e: PorcAST.Z): AnalysisResults = apply(e)
}
def withDefault: AnalysisProvider[E] = {
new AnalysisProvider[E] {
def apply(e: PorcAST.Z): AnalysisResults = get(e).getOrElse(AnalysisResults(false, false, false, None))
def get(e: PorcAST.Z): Option[AnalysisResults] = outer.get(e)
}
}
}
/** A cache for storing all the results of a bunch of expressions.
*/
class Analyzer extends AnalysisProvider[PorcAST] {
val cache = mutable.Map[PorcAST.Z, AnalysisResults]()
// TODO: Somehow this is running the system out of memory on some programs. For example, /OrcExamples/OrcSites/simanim/baboon.orc
def apply(e: PorcAST.Z) = {
cache.get(e) match {
case Some(r) => {
r
}
case None => {
val r = analyze(e)
cache += e -> r
r
}
}
}
def get(e: PorcAST.Z) = Some(apply(e))
def analyze(e: PorcAST.Z): AnalysisResults = {
AnalysisResults(nonFuture(e), nonHalt(e), fastTerminating(e), siteMetadata(e))
}
def translateArguments(vs: List[Argument], formals: List[Variable], s: Set[Variable]): Set[Variable] = {
val m = (formals zip vs).toMap
s.collect(m).collect { case v: Variable => v }
}
def nonFuture(e: PorcAST.Z): Boolean = {
???
}
def siteMetadata(e: PorcAST.Z): Option[ValueMetadata] = {
???
}
def nonHalt(e: PorcAST.Z): Boolean = {
???
}
// TODO: detect more fast cases for defcall and call. This is important for eliminating spawns at def calls when they are not needed.
def fastTerminating(e: PorcAST.Z): Boolean = {
???
}
}
object Analysis {
val closureCost = 5
val spawnCost = 4
val forceCost = 3
val killCost = 2
val callkillhandlersCost = 5
val callCost = 1
val externalCallCost = 5
val atomicOperation = 2
def cost(e: PorcAST): Int = {
val cs = e.subtrees.asInstanceOf[Iterable[PorcAST]]
(e match {
case _: Spawn => spawnCost
case _: Force => forceCost
case _: Kill => killCost
case _: Continuation | _: MethodCPS | _: MethodDirect => closureCost
case _: NewTerminator => closureCost
case _: CallContinuation => callCost
case _: MethodDirectCall => externalCallCost
case _: MethodCPSCall => externalCallCost + spawnCost
case _: Kill | _: HaltToken | _: NewToken => atomicOperation
case _ => 0
}) + (cs.map(cost).sum)
}
def count(t: PorcAST, p: (Expression => Boolean)): Int = {
val cs = t.subtrees.asInstanceOf[Iterable[PorcAST]]
(t match {
case e: Expression if p(e) => 1
case _ => 0
}) +
(cs.map(count(_, p)).sum)
}
}
| orc-lang/orc | OrcScala/src/orc/ast/porc/Analysis.scala | Scala | bsd-3-clause | 3,525 |
// This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
package ducttape.util
import collection._
object MultiSet {
// TODO: Can we make this more efficient
def empty[A] = new MultiSet[A]
}
class MultiSet[A] extends ImmutableMultiSet[A] {
private val map = new mutable.HashMap[A,Int]
def this(other: MultiSet[A]) {
this()
map ++= other.map
}
def +=(a: A) {
map.get(a) match {
case Some(n) => map.put(a, n+1)
case None => map.put(a, 1)
}
}
def -=(a: A) {
map.get(a) match {
case Some(n) if n == 1 => map.remove(a)
case Some(n) if n > 1 => map.put(a, n-1)
case None => throw new NoSuchElementException(a.toString)
}
}
def find(func: A => Boolean): Option[A] = map.keys.find(func)
def removeAll(a: A) {
map.remove(a)
}
def ++=(xs: TraversableOnce[A]) = for (x <- xs) this += x
def --=(xs: TraversableOnce[A]) = for (x <- xs) this -= x
def apply(a: A) = map.contains(a)
def contains(a: A) = map.contains(a)
def keys() = map.keys
def view() = map.keys.view
def toList(): List[A] = map.keys.toList
override def toString() = map.toString
}
| jhclark/ducttape | src/main/scala/ducttape/util/MultiSet.scala | Scala | mpl-2.0 | 1,287 |
package org.bitcoins.feeprovider
import org.bitcoins.crypto.StringFactory
sealed abstract class FeeProviderName
object FeeProviderName extends StringFactory[FeeProviderName] {
final case object BitcoinerLive extends FeeProviderName
final case object BitGo extends FeeProviderName
final case object Constant extends FeeProviderName
final case object MempoolSpace extends FeeProviderName
val all: Vector[FeeProviderName] =
Vector(BitcoinerLive, BitGo, Constant, MempoolSpace)
override def fromStringOpt(str: String): Option[FeeProviderName] = {
all.find(_.toString.toLowerCase == str.toLowerCase)
}
override def fromString(string: String): FeeProviderName = {
fromStringOpt(string) match {
case Some(state) => state
case None =>
sys.error(s"Could not find FeeProviderName for string=$string")
}
}
}
| bitcoin-s/bitcoin-s | fee-provider/src/main/scala/org/bitcoins/feeprovider/FeeProviderName.scala | Scala | mit | 861 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import junit.framework.Assert._
import org.junit.Test
import org.scalatest.junit.JUnit3Suite
import java.util.Properties
import kafka.utils._
import kafka.log._
import kafka.zk.ZooKeeperTestHarness
import kafka.utils.{Logging, ZkUtils, TestUtils}
import kafka.common.{TopicExistsException, TopicAndPartition}
import kafka.server.{KafkaServer, KafkaConfig}
import java.io.File
import TestUtils._
class AdminTest extends JUnit3Suite with ZooKeeperTestHarness with Logging {
@Test
def testReplicaAssignment() {
val brokerList = List(0, 1, 2, 3, 4)
// test 0 replication factor
intercept[AdminOperationException] {
AdminUtils.assignReplicasToBrokers(brokerList, 10, 0)
}
// test wrong replication factor
intercept[AdminOperationException] {
AdminUtils.assignReplicasToBrokers(brokerList, 10, 6)
}
// correct assignment
val expectedAssignment = Map(
0 -> List(0, 1, 2),
1 -> List(1, 2, 3),
2 -> List(2, 3, 4),
3 -> List(3, 4, 0),
4 -> List(4, 0, 1),
5 -> List(0, 2, 3),
6 -> List(1, 3, 4),
7 -> List(2, 4, 0),
8 -> List(3, 0, 1),
9 -> List(4, 1, 2))
val actualAssignment = AdminUtils.assignReplicasToBrokers(brokerList, 10, 3, 0)
val e = (expectedAssignment.toList == actualAssignment.toList)
assertTrue(expectedAssignment.toList == actualAssignment.toList)
}
@Test
def testManualReplicaAssignment() {
val brokers = List(0, 1, 2, 3, 4)
TestUtils.createBrokersInZk(zkClient, brokers)
// duplicate brokers
intercept[IllegalArgumentException] {
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, "test", Map(0->Seq(0,0)))
}
// inconsistent replication factor
intercept[IllegalArgumentException] {
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, "test", Map(0->Seq(0,1), 1->Seq(0)))
}
// good assignment
val assignment = Map(0 -> List(0, 1, 2),
1 -> List(1, 2, 3))
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, "test", assignment)
val found = ZkUtils.getPartitionAssignmentForTopics(zkClient, Seq("test"))
assertEquals(assignment, found("test"))
}
@Test
def testTopicCreationInZK() {
val expectedReplicaAssignment = Map(
0 -> List(0, 1, 2),
1 -> List(1, 2, 3),
2 -> List(2, 3, 4),
3 -> List(3, 4, 0),
4 -> List(4, 0, 1),
5 -> List(0, 2, 3),
6 -> List(1, 3, 4),
7 -> List(2, 4, 0),
8 -> List(3, 0, 1),
9 -> List(4, 1, 2),
10 -> List(1, 2, 3),
11 -> List(1, 3, 4)
)
val leaderForPartitionMap = Map(
0 -> 0,
1 -> 1,
2 -> 2,
3 -> 3,
4 -> 4,
5 -> 0,
6 -> 1,
7 -> 2,
8 -> 3,
9 -> 4,
10 -> 1,
11 -> 1
)
val topic = "test"
TestUtils.createBrokersInZk(zkClient, List(0, 1, 2, 3, 4))
// create the topic
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment)
// create leaders for all partitions
TestUtils.makeLeaderForPartition(zkClient, topic, leaderForPartitionMap, 1)
val actualReplicaList = leaderForPartitionMap.keys.toArray.map(p => (p -> ZkUtils.getReplicasForPartition(zkClient, topic, p))).toMap
assertEquals(expectedReplicaAssignment.size, actualReplicaList.size)
for(i <- 0 until actualReplicaList.size)
assertEquals(expectedReplicaAssignment.get(i).get, actualReplicaList(i))
intercept[TopicExistsException] {
// shouldn't be able to create a topic that already exists
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment)
}
}
private def getBrokersWithPartitionDir(servers: Iterable[KafkaServer], topic: String, partitionId: Int): Set[Int] = {
servers.filter(server => new File(server.config.logDirs.head, topic + "-" + partitionId).exists)
.map(_.config.brokerId)
.toSet
}
@Test
def testPartitionReassignmentWithLeaderInNewReplicas() {
val expectedReplicaAssignment = Map(0 -> List(0, 1, 2))
val topic = "test"
// create brokers
val servers = TestUtils.createBrokerConfigs(4, false).map(b => TestUtils.createServer(KafkaConfig.fromProps(b)))
// create the topic
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment)
// reassign partition 0
val newReplicas = Seq(0, 2, 3)
val partitionToBeReassigned = 0
val topicAndPartition = TopicAndPartition(topic, partitionToBeReassigned)
val reassignPartitionsCommand = new ReassignPartitionsCommand(zkClient, Map(topicAndPartition -> newReplicas))
assertTrue("Partition reassignment attempt failed for [test, 0]", reassignPartitionsCommand.reassignPartitions())
// wait until reassignment is completed
TestUtils.waitUntilTrue(() => {
val partitionsBeingReassigned = ZkUtils.getPartitionsBeingReassigned(zkClient).mapValues(_.newReplicas);
ReassignPartitionsCommand.checkIfPartitionReassignmentSucceeded(zkClient, topicAndPartition, newReplicas,
Map(topicAndPartition -> newReplicas), partitionsBeingReassigned) == ReassignmentCompleted;
},
"Partition reassignment should complete")
val assignedReplicas = ZkUtils.getReplicasForPartition(zkClient, topic, partitionToBeReassigned)
// in sync replicas should not have any replica that is not in the new assigned replicas
checkForPhantomInSyncReplicas(zkClient, topic, partitionToBeReassigned, assignedReplicas)
assertEquals("Partition should have been reassigned to 0, 2, 3", newReplicas, assignedReplicas)
ensureNoUnderReplicatedPartitions(zkClient, topic, partitionToBeReassigned, assignedReplicas, servers)
TestUtils.waitUntilTrue(() => getBrokersWithPartitionDir(servers, topic, 0) == newReplicas.toSet,
"New replicas should exist on brokers")
servers.foreach(_.shutdown())
}
@Test
def testPartitionReassignmentWithLeaderNotInNewReplicas() {
val expectedReplicaAssignment = Map(0 -> List(0, 1, 2))
val topic = "test"
// create brokers
val servers = TestUtils.createBrokerConfigs(4, false).map(b => TestUtils.createServer(KafkaConfig.fromProps(b)))
// create the topic
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment)
// reassign partition 0
val newReplicas = Seq(1, 2, 3)
val partitionToBeReassigned = 0
val topicAndPartition = TopicAndPartition(topic, partitionToBeReassigned)
val reassignPartitionsCommand = new ReassignPartitionsCommand(zkClient, Map(topicAndPartition -> newReplicas))
assertTrue("Partition reassignment failed for test, 0", reassignPartitionsCommand.reassignPartitions())
// wait until reassignment is completed
TestUtils.waitUntilTrue(() => {
val partitionsBeingReassigned = ZkUtils.getPartitionsBeingReassigned(zkClient).mapValues(_.newReplicas);
ReassignPartitionsCommand.checkIfPartitionReassignmentSucceeded(zkClient, topicAndPartition, newReplicas,
Map(topicAndPartition -> newReplicas), partitionsBeingReassigned) == ReassignmentCompleted;
},
"Partition reassignment should complete")
val assignedReplicas = ZkUtils.getReplicasForPartition(zkClient, topic, partitionToBeReassigned)
assertEquals("Partition should have been reassigned to 0, 2, 3", newReplicas, assignedReplicas)
checkForPhantomInSyncReplicas(zkClient, topic, partitionToBeReassigned, assignedReplicas)
ensureNoUnderReplicatedPartitions(zkClient, topic, partitionToBeReassigned, assignedReplicas, servers)
TestUtils.waitUntilTrue(() => getBrokersWithPartitionDir(servers, topic, 0) == newReplicas.toSet,
"New replicas should exist on brokers")
servers.foreach(_.shutdown())
}
@Test
def testPartitionReassignmentNonOverlappingReplicas() {
val expectedReplicaAssignment = Map(0 -> List(0, 1))
val topic = "test"
// create brokers
val servers = TestUtils.createBrokerConfigs(4, false).map(b => TestUtils.createServer(KafkaConfig.fromProps(b)))
// create the topic
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment)
// reassign partition 0
val newReplicas = Seq(2, 3)
val partitionToBeReassigned = 0
val topicAndPartition = TopicAndPartition(topic, partitionToBeReassigned)
val reassignPartitionsCommand = new ReassignPartitionsCommand(zkClient, Map(topicAndPartition -> newReplicas))
assertTrue("Partition reassignment failed for test, 0", reassignPartitionsCommand.reassignPartitions())
// wait until reassignment is completed
TestUtils.waitUntilTrue(() => {
val partitionsBeingReassigned = ZkUtils.getPartitionsBeingReassigned(zkClient).mapValues(_.newReplicas);
ReassignPartitionsCommand.checkIfPartitionReassignmentSucceeded(zkClient, topicAndPartition, newReplicas,
Map(topicAndPartition -> newReplicas), partitionsBeingReassigned) == ReassignmentCompleted;
},
"Partition reassignment should complete")
val assignedReplicas = ZkUtils.getReplicasForPartition(zkClient, topic, partitionToBeReassigned)
assertEquals("Partition should have been reassigned to 2, 3", newReplicas, assignedReplicas)
checkForPhantomInSyncReplicas(zkClient, topic, partitionToBeReassigned, assignedReplicas)
ensureNoUnderReplicatedPartitions(zkClient, topic, partitionToBeReassigned, assignedReplicas, servers)
TestUtils.waitUntilTrue(() => getBrokersWithPartitionDir(servers, topic, 0) == newReplicas.toSet,
"New replicas should exist on brokers")
servers.foreach(_.shutdown())
}
@Test
def testReassigningNonExistingPartition() {
val topic = "test"
// create brokers
val servers = TestUtils.createBrokerConfigs(4, false).map(b => TestUtils.createServer(KafkaConfig.fromProps(b)))
// reassign partition 0
val newReplicas = Seq(2, 3)
val partitionToBeReassigned = 0
val topicAndPartition = TopicAndPartition(topic, partitionToBeReassigned)
val reassignPartitionsCommand = new ReassignPartitionsCommand(zkClient, Map(topicAndPartition -> newReplicas))
assertTrue("Partition reassignment failed for test, 0", reassignPartitionsCommand.reassignPartitions())
val reassignedPartitions = ZkUtils.getPartitionsBeingReassigned(zkClient)
assertFalse("Partition should not be reassigned", reassignedPartitions.contains(topicAndPartition))
servers.foreach(_.shutdown())
}
@Test
def testResumePartitionReassignmentThatWasCompleted() {
val expectedReplicaAssignment = Map(0 -> List(0, 1))
val topic = "test"
// create the topic
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment)
// put the partition in the reassigned path as well
// reassign partition 0
val newReplicas = Seq(0, 1)
val partitionToBeReassigned = 0
val topicAndPartition = TopicAndPartition(topic, partitionToBeReassigned)
val reassignPartitionsCommand = new ReassignPartitionsCommand(zkClient, Map(topicAndPartition -> newReplicas))
reassignPartitionsCommand.reassignPartitions
// create brokers
val servers = TestUtils.createBrokerConfigs(2, false).map(b => TestUtils.createServer(KafkaConfig.fromProps(b)))
// wait until reassignment completes
TestUtils.waitUntilTrue(() => !checkIfReassignPartitionPathExists(zkClient),
"Partition reassignment should complete")
val assignedReplicas = ZkUtils.getReplicasForPartition(zkClient, topic, partitionToBeReassigned)
assertEquals("Partition should have been reassigned to 0, 1", newReplicas, assignedReplicas)
checkForPhantomInSyncReplicas(zkClient, topic, partitionToBeReassigned, assignedReplicas)
// ensure that there are no under replicated partitions
ensureNoUnderReplicatedPartitions(zkClient, topic, partitionToBeReassigned, assignedReplicas, servers)
TestUtils.waitUntilTrue(() => getBrokersWithPartitionDir(servers, topic, 0) == newReplicas.toSet,
"New replicas should exist on brokers")
servers.foreach(_.shutdown())
}
@Test
def testPreferredReplicaJsonData() {
// write preferred replica json data to zk path
val partitionsForPreferredReplicaElection = Set(TopicAndPartition("test", 1), TopicAndPartition("test2", 1))
PreferredReplicaLeaderElectionCommand.writePreferredReplicaElectionData(zkClient, partitionsForPreferredReplicaElection)
// try to read it back and compare with what was written
val preferredReplicaElectionZkData = ZkUtils.readData(zkClient,
ZkUtils.PreferredReplicaLeaderElectionPath)._1
val partitionsUndergoingPreferredReplicaElection =
PreferredReplicaLeaderElectionCommand.parsePreferredReplicaElectionData(preferredReplicaElectionZkData)
assertEquals("Preferred replica election ser-de failed", partitionsForPreferredReplicaElection,
partitionsUndergoingPreferredReplicaElection)
}
@Test
def testBasicPreferredReplicaElection() {
val expectedReplicaAssignment = Map(1 -> List(0, 1, 2))
val topic = "test"
val partition = 1
val preferredReplica = 0
// create brokers
val serverConfigs = TestUtils.createBrokerConfigs(3, false).map(KafkaConfig.fromProps)
// create the topic
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment)
val servers = serverConfigs.reverse.map(s => TestUtils.createServer(s))
// broker 2 should be the leader since it was started first
val currentLeader = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, partition, oldLeaderOpt = None).get
// trigger preferred replica election
val preferredReplicaElection = new PreferredReplicaLeaderElectionCommand(zkClient, Set(TopicAndPartition(topic, partition)))
preferredReplicaElection.moveLeaderToPreferredReplica()
val newLeader = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, partition, oldLeaderOpt = Some(currentLeader)).get
assertEquals("Preferred replica election failed", preferredReplica, newLeader)
servers.foreach(_.shutdown())
}
@Test
def testShutdownBroker() {
val expectedReplicaAssignment = Map(1 -> List(0, 1, 2))
val topic = "test"
val partition = 1
// create brokers
val serverConfigs = TestUtils.createBrokerConfigs(3, false).map(KafkaConfig.fromProps)
val servers = serverConfigs.reverse.map(s => TestUtils.createServer(s))
// create the topic
TestUtils.createTopic(zkClient, topic, partitionReplicaAssignment = expectedReplicaAssignment, servers = servers)
val controllerId = ZkUtils.getController(zkClient)
val controller = servers.find(p => p.config.brokerId == controllerId).get.kafkaController
var partitionsRemaining = controller.shutdownBroker(2)
var activeServers = servers.filter(s => s.config.brokerId != 2)
try {
// wait for the update metadata request to trickle to the brokers
TestUtils.waitUntilTrue(() =>
activeServers.foldLeft(true)(_ && _.apis.metadataCache.getPartitionInfo(topic,partition).get.leaderIsrAndControllerEpoch.leaderAndIsr.isr.size != 3),
"Topic test not created after timeout")
assertEquals(0, partitionsRemaining.size)
var partitionStateInfo = activeServers.head.apis.metadataCache.getPartitionInfo(topic,partition).get
var leaderAfterShutdown = partitionStateInfo.leaderIsrAndControllerEpoch.leaderAndIsr.leader
assertEquals(0, leaderAfterShutdown)
assertEquals(2, partitionStateInfo.leaderIsrAndControllerEpoch.leaderAndIsr.isr.size)
assertEquals(List(0,1), partitionStateInfo.leaderIsrAndControllerEpoch.leaderAndIsr.isr)
partitionsRemaining = controller.shutdownBroker(1)
assertEquals(0, partitionsRemaining.size)
activeServers = servers.filter(s => s.config.brokerId == 0)
partitionStateInfo = activeServers.head.apis.metadataCache.getPartitionInfo(topic,partition).get
leaderAfterShutdown = partitionStateInfo.leaderIsrAndControllerEpoch.leaderAndIsr.leader
assertEquals(0, leaderAfterShutdown)
assertTrue(servers.foldLeft(true)(_ && _.apis.metadataCache.getPartitionInfo(topic,partition).get.leaderIsrAndControllerEpoch.leaderAndIsr.leader == 0))
partitionsRemaining = controller.shutdownBroker(0)
assertEquals(1, partitionsRemaining.size)
// leader doesn't change since all the replicas are shut down
assertTrue(servers.foldLeft(true)(_ && _.apis.metadataCache.getPartitionInfo(topic,partition).get.leaderIsrAndControllerEpoch.leaderAndIsr.leader == 0))
}
finally {
servers.foreach(_.shutdown())
}
}
/**
* This test creates a topic with a few config overrides and checks that the configs are applied to the new topic
* then changes the config and checks that the new values take effect.
*/
@Test
def testTopicConfigChange() {
val partitions = 3
val topic = "my-topic"
val server = TestUtils.createServer(KafkaConfig.fromProps(TestUtils.createBrokerConfig(0)))
def makeConfig(messageSize: Int, retentionMs: Long) = {
var props = new Properties()
props.setProperty(LogConfig.MaxMessageBytesProp, messageSize.toString)
props.setProperty(LogConfig.RententionMsProp, retentionMs.toString)
props
}
def checkConfig(messageSize: Int, retentionMs: Long) {
TestUtils.retry(10000) {
for(part <- 0 until partitions) {
val logOpt = server.logManager.getLog(TopicAndPartition(topic, part))
assertTrue(logOpt.isDefined)
assertEquals(retentionMs, logOpt.get.config.retentionMs)
assertEquals(messageSize, logOpt.get.config.maxMessageSize)
}
}
}
try {
// create a topic with a few config overrides and check that they are applied
val maxMessageSize = 1024
val retentionMs = 1000*1000
AdminUtils.createTopic(server.zkClient, topic, partitions, 1, makeConfig(maxMessageSize, retentionMs))
checkConfig(maxMessageSize, retentionMs)
// now double the config values for the topic and check that it is applied
AdminUtils.changeTopicConfig(server.zkClient, topic, makeConfig(2*maxMessageSize, 2 * retentionMs))
checkConfig(2*maxMessageSize, 2 * retentionMs)
} finally {
server.shutdown()
server.config.logDirs.map(Utils.rm(_))
}
}
}
| WillCh/cs286A | dataMover/kafka/core/src/test/scala/unit/kafka/admin/AdminTest.scala | Scala | bsd-2-clause | 19,432 |
package eu.timepit.refined.scalacheck
import eu.timepit.refined.api.{RefType, Refined}
import eu.timepit.refined.collection.{NonEmpty, Size}
import eu.timepit.refined.string._
import eu.timepit.refined.types.string.TrimmedString
import org.scalacheck.Arbitrary
import shapeless.Witness
/**
* Module that provides `Arbitrary` instances for `String` related
* predicates.
*/
object string extends StringInstances with StringInstancesBinCompat1
trait StringInstances {
implicit def endsWithArbitrary[F[_, _], S <: String](implicit
rt: RefType[F],
ws: Witness.Aux[S]
): Arbitrary[F[String, EndsWith[S]]] =
arbitraryRefType(Arbitrary.arbString.arbitrary.map(_ + ws.value))
implicit def startsWithArbitrary[F[_, _], S <: String](implicit
rt: RefType[F],
ws: Witness.Aux[S]
): Arbitrary[F[String, StartsWith[S]]] =
arbitraryRefType(Arbitrary.arbString.arbitrary.map(ws.value + _))
implicit def nonEmptyStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, NonEmpty]] =
collection.buildableNonEmptyArbitrary[F, String, Char]
implicit def stringSizeArbitrary[F[_, _]: RefType, P](implicit
arbChar: Arbitrary[Char],
arbSize: Arbitrary[Int Refined P]
): Arbitrary[F[String, Size[P]]] =
collection.buildableSizeArbitrary[F, String, Char, P]
implicit def uuidStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, Uuid]] =
arbitraryRefType(Arbitrary.arbUuid.arbitrary.map(_.toString))
implicit def validByteStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, ValidByte]] =
arbitraryRefType(Arbitrary.arbByte.arbitrary.map(_.toString))
implicit def validShortStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, ValidShort]] =
arbitraryRefType(Arbitrary.arbShort.arbitrary.map(_.toString))
implicit def validIntStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, ValidInt]] =
arbitraryRefType(Arbitrary.arbInt.arbitrary.map(_.toString))
implicit def validLongStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, ValidLong]] =
arbitraryRefType(Arbitrary.arbLong.arbitrary.map(_.toString))
implicit def validFloatStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, ValidFloat]] =
arbitraryRefType(Arbitrary.arbFloat.arbitrary.map(_.toString))
implicit def validDoubleStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, ValidDouble]] =
arbitraryRefType(Arbitrary.arbDouble.arbitrary.map(_.toString))
implicit def validBigIntStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, ValidBigInt]] =
arbitraryRefType(Arbitrary.arbBigInt.arbitrary.map(_.toString))
implicit def validBigDecimalStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, ValidBigDecimal]] =
arbitraryRefType(Arbitrary.arbBigDecimal.arbitrary.map(_.toString))
}
trait StringInstancesBinCompat1 {
implicit def trimmedStringArbitrary[F[_, _]](implicit
rt: RefType[F]
): Arbitrary[F[String, Trimmed]] =
arbitraryRefType(Arbitrary.arbString.arbitrary.map(TrimmedString.trim(_).value))
}
| fthomas/refined | modules/scalacheck/shared/src/main/scala/eu/timepit/refined/scalacheck/string.scala | Scala | mit | 3,260 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.content
import com.ibm.spark.kernel.protocol.v5.{KernelMessageContent, Data, Metadata}
import play.api.libs.json._
case class DisplayData(
source: String,
data: Data,
metadata: Metadata
) extends KernelMessageContent {
override def content : String =
Json.toJson(this)(DisplayData.displayDataWrites).toString
}
object DisplayData {
implicit val displayDataReads = Json.reads[DisplayData]
implicit val displayDataWrites = Json.writes[DisplayData]
}
| bpburns/spark-kernel | protocol/src/main/scala/com/ibm/spark/kernel/protocol/v5/content/DisplayData.scala | Scala | apache-2.0 | 1,102 |
package org.jetbrains.plugins.scala
package annotator
import org.jetbrains.plugins.scala.base.ScalaLightPlatformCodeInsightTestCaseAdapter
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScPattern
import org.jetbrains.plugins.scala.extensions.toPsiElementExt
import org.junit.Assert.assertEquals
import org.jetbrains.plugins.scala.ScalaBundle
/**
* Nikolay.Tropin
* 2014-04-03
*/
class PatternAnnotatorTest extends ScalaLightPlatformCodeInsightTestCaseAdapter {
private def fruitless(exprType: String, patType: String) = ScalaBundle.message("fruitless.type.test", exprType, patType)
private def incompatible(exprType: String, patType: String) = ScalaBundle.message("scrutinee.incompatible.pattern.type", exprType, patType)
private def cannotBeUsed(typeText: String) = s"type $typeText cannot be used in a type pattern or isInstanceOf test"
private def collectAnnotatorMessages(text: String) = {
configureFromFileTextAdapter("dummy.scala", text)
val mock = new AnnotatorHolderMock
val annotator = new PatternAnnotator {}
val patterns = getFileAdapter.depthFirst.collect {
case p: ScPattern => p
}
patterns.foreach(p => annotator.annotatePattern(p, mock, true))
mock.annotations
}
private def emptyMessages(text: String) {
assertEquals(Nil, collectAnnotatorMessages(text))
}
private def checkWarning(text: String, element: String, expectedMsg: String) {
collectAnnotatorMessages(text).foreach {
case Warning(elem, msg) =>
assertEquals(element, elem)
assertEquals(expectedMsg, msg)
case _ => false
}
}
private def checkError(text: String, element: String, expectedMsg: String) {
collectAnnotatorMessages(text).foreach {
case Error(elem, msg) =>
assertEquals(element, elem)
assertEquals(expectedMsg, msg)
case _ => false
}
}
def testConstructorPatternFruitless() = {
checkWarning("val Some(x) = None", "Some(x)", fruitless("None.type", "Some[A]"))
checkWarning("val Vector(a) = Nil", "Vector(a)", fruitless("Nil.type", "Vector[A]"))
checkWarning("val Vector(a) = List(1)", "Vector(a)", fruitless("List[Int]", "Vector[A]"))
checkWarning("val List(seq: Seq[Int]) = List(List(\\"\\"))", "seq: Seq[Int]",
fruitless("List[String]", "Seq[Int]") + ScalaBundle.message("erasure.warning"))
emptyMessages("val Seq(a) = List(1)")
emptyMessages("val Vector(a) = Seq(1)")
}
def testStableIdPattern() {
checkWarning("val xs = List(\\"\\"); val a :: `xs` = 1 :: List(1)", "`xs`", fruitless("List[Int]", "List[String]"))
}
def testLiteralPattern() {
checkWarning("val \\"a\\" :: xs = 1 :: Nil", "\\"a\\"", fruitless("Int", "String"))
}
def testNullLiteralPattern() {
checkWarning("val null :: xs = 1 :: Nil", "null", fruitless("Int", "AnyRef"))
emptyMessages("val null :: xs = \\"1\\" :: Nil")
}
def testTuplePattern() = {
checkWarning("val (x, y) = (1, 2, 3)", "(x, y)", fruitless("(Int, Int, Int)", "(Int, Int)"))
checkError("val (x: String, y) = (1, 2)", "x: String", incompatible("Int", "String"))
emptyMessages("def a: AnyRef = null; val (x, y) = a")
}
def testIncompatible() {
checkError("val Some(x: Int) = \\"\\"", "x: Int", incompatible("String", "Int"))
checkError("val (x: Int) :: xs = List(\\"1\\", \\"2\\")", "x: Int", incompatible("String", "Int"))
}
def testCannotBeUsed() {
checkError("""x match {
| case _: AnyVal =>
|}""".stripMargin.replace("\\r", ""), "_: AnyVal", cannotBeUsed("AnyVal"))
checkError("""x match {
| case n: Null =>
|}""".stripMargin.replace("\\r", ""), "n: Null", cannotBeUsed("Null"))
checkError("""x match {
| case n: Nothing =>
|}""".stripMargin.replace("\\r", ""), "n: Nothing", cannotBeUsed("Nothing"))
}
def testUncheckedRefinement() {
checkWarning("val Some(x: AnyRef{def foo(i: Int): Int}) = Some(new AnyRef())", "AnyRef{def foo(i: Int): Int}",
ScalaBundle.message("pattern.on.refinement.unchecked"))
}
}
| consulo/consulo-scala | test/org/jetbrains/plugins/scala/annotator/PatternAnnotatorTest.scala | Scala | apache-2.0 | 4,105 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.math
import java.math.BigDecimal
import org.junit.Test
import org.junit.Assert._
class BigDecimalTest {
// java.lang.Math.BigDecimal Int/Long Constructors
@Test def `should accept 3 as aLong`(): Unit = {
val bd = BigDecimal.valueOf(3L)
assertEquals(3, bd.intValue())
assertTrue(bd.longValue == 3L)
}
@Test def `should accept 999999999 as aLong`(): Unit = {
val bd = BigDecimal.valueOf(999999999L)
assertEquals(999999999, bd.intValue())
assertTrue(bd.longValue == 999999999L)
}
@Test def `should accept 9999999999 as aLong`(): Unit = {
val bd = BigDecimal.valueOf(9999999999L)
assertTrue(bd.longValue == 9999999999L)
}
@Test def `should accept -999999999 as aLong`(): Unit = {
val bd = BigDecimal.valueOf(-999999999L)
assertEquals(-999999999, bd.intValue())
assertTrue(bd.longValue == -999999999L)
}
@Test def `should accept -9999999999 as aLong`(): Unit = {
val bd = BigDecimal.valueOf(-9999999999L)
assertTrue(bd.longValue == -9999999999L)
}
@Test def `should accept 3 as a string`(): Unit = {
val bd = new BigDecimal("3")
assertEquals(3, bd.intValue())
assertTrue(bd.longValue == 3L)
}
@Test def `should accept 99 as a string`(): Unit = {
val bd = new BigDecimal("99")
assertEquals(99, bd.intValue())
assertTrue(bd.longValue == 99L)
}
@Test def `should accept 999999999 as string`(): Unit = {
val bd = new BigDecimal("999999999")
assertEquals(999999999, bd.intValue())
assertTrue(bd.longValue == 999999999L)
}
@Test def `should accept 9999999999 as a string`(): Unit = {
val bd = new BigDecimal("9999999999")
assertTrue(bd.longValue == 9999999999L)
}
@Test def `should accept -99 as a string`(): Unit = {
val bd = new BigDecimal("-99")
assertEquals(-99, bd.intValue())
assertTrue(bd.longValue == -99L)
}
@Test def `should accept -999999999 as sting`(): Unit = {
val bd = new BigDecimal("-999999999")
assertEquals(-999999999, bd.intValue())
assertTrue(bd.longValue == -999999999L)
}
@Test def `should accept -9999999999 as a string`(): Unit = {
val bd = new BigDecimal("-9999999999")
assertTrue(bd.longValue == -9999999999L)
}
@Test def `should accept 9.9 as a string`(): Unit = {
val bd = new BigDecimal("9.9")
assertEquals("9.9", bd.toString)
assertEquals(9.9, bd.doubleValue(), 0.0)
}
@Test def `should accept 99.99 as a string`(): Unit = {
val bd = new BigDecimal("99.99")
assertEquals(99.99, bd.doubleValue(), 0.0)
}
@Test def `should accept 999.999 as a string`(): Unit = {
val bd = new BigDecimal("999.999")
assertEquals(999.999, bd.doubleValue(), 0.0)
}
@Test def `should accept 9999.9999 as a string`(): Unit = {
val bd = new BigDecimal("9999.9999")
assertEquals(9999.9999, bd.doubleValue(), 0.0)
}
// java.lang.Math.BigDecimal double Constructors
@Test def `should accept 3.3 as a double`(): Unit = {
val d = 3.3
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept 99.99 as a double`(): Unit = {
val d = 99.99
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept 9999.9999 as a double`(): Unit = {
val d:Double = 9999.9999
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept 99999999.99999999 as a double`(): Unit = {
val d = 99999999.99999999
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept 999999999.999999999 as a double`(): Unit = {
val d = 999999999.999999999
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept 9999999999.9999999999 as a double`(): Unit = {
val d = 9999999999.9999999999
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept -3.3 as a double`(): Unit = {
val d = -3.3
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept -99.99 as a double`(): Unit = {
val d = -99.99
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept -99999999.99999999 as a double`(): Unit = {
val d = -99999999.99999999
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept -999999999.999999999 as a double`(): Unit = {
val d = -999999999.999999999
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
@Test def `should accept -9999999999.9999999999 as a double`(): Unit = {
val d = -9999999999.9999999999
val bd = new BigDecimal(d)
assertEquals(d, bd.doubleValue(), 0.0)
}
}
| lrytz/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/math/BigDecimalTest.scala | Scala | bsd-3-clause | 5,367 |
package com.twitter.util
import org.specs.SpecificationWithJUnit
class CancellableSpec extends SpecificationWithJUnit {
"CancellableSink" should {
"cancel once" in {
var count = 0
val s = new CancellableSink { count += 1 }
s.cancel()
count must be_==(1)
s.cancel()
count must be_==(1)
}
}
}
| mosesn/util | util-core/src/test/scala/com/twitter/util/CancellableSpec.scala | Scala | apache-2.0 | 341 |
package org.vaadin.addons.rinne
import com.vaadin.ui.Panel
import org.vaadin.addons.rinne.mixins.PanelMixin
class VPanel extends Panel with PanelMixin | LukaszByczynski/rinne | src/main/scala/org/vaadin/addons/rinne/VPanel.scala | Scala | apache-2.0 | 152 |
package com.github.j5ik2o.forseti.adaptor.validator
import com.github.j5ik2o.forseti.domain.GrantType
import com.github.j5ik2o.forseti.domain.client.Client
import com.github.j5ik2o.forseti.domain.exception.{OAuthException, UnsupportedGrantTypeException}
import com.github.j5ik2o.forseti.infrastructure.util.EitherTUtil._
import scala.concurrent.{ExecutionContext, Future}
import scalaz._
trait GrantTypeValidator {
def validate(
grantType: GrantType.Value,
client: Client,
targetGrantType: Maybe[GrantType.Value]
)(
implicit ec: ExecutionContext
): EitherT[Future, OAuthException, Unit]
}
object GrantTypeValidator {
def ofDefault: GrantTypeValidator = new Default
private class Default extends GrantTypeValidator {
def validate(
grantType: GrantType.Value,
client: Client,
targetGrantType: Maybe[GrantType.Value]
)(
implicit ec: ExecutionContext
): EitherT[Future, OAuthException, Unit] = {
if (!client.grantTypes.contains(grantType) || targetGrantType.toOption.fold(false)(
_ != grantType
))
createLeftOfEitherT[OAuthException, Unit](
new UnsupportedGrantTypeException(Maybe.just(s"Unsupported grant_type: $grantType"))
)
else
().toRightTFuture[OAuthException]
}
}
}
| j5ik2o/forseti | server/server-use-case-port/src/main/scala/com/github/j5ik2o/forseti/adaptor/validator/GrantTypeValidator.scala | Scala | mit | 1,332 |
package cn.scala.chapter13
//全部是抽象成员,与java的interface等同
trait MySQLDAO{
def delete(id:String):Boolean
def add(o:Any):Boolean
def update(o:Any):Int
def query(id:String):List[Any]
} | Dax1n/Scala | ScalaLearning/src/main/scala/cn/scala/chapter13/MySQLDAO.scala | Scala | apache-2.0 | 211 |
import sbt._
object Version {
val akka = "2.3.11"
val scalaz = "7.1.3"
val scalatest = "2.2.5"
val scalacheck = "1.12.2"
}
object Library {
val akka = "com.typesafe.akka" %% "akka-actor" % Version.akka
val scalaz = "org.scalaz" %% "scalaz-core" % Version.scalaz
val akkatest = "com.typesafe.akka" %% "akka-testkit" % Version.akka % "test"
val scalatest = "org.scalatest" %% "scalatest" % Version.scalatest % "test"
val scalacheck = "org.scalacheck" %% "scalacheck" % Version.scalacheck % "test"
}
| nikhilRP/akka_requests | project/Dependencies.scala | Scala | apache-2.0 | 570 |
package de.htwg.zeta.common.format.entity
import java.util.UUID
import de.htwg.zeta.common.models.entity.TimedTask
import play.api.libs.json.JsObject
import play.api.libs.json.Json
import play.api.libs.json.JsResult
import play.api.libs.json.JsValue
import play.api.libs.json.OFormat
/**
* Parse JsValue to TimedTask and TimedTask to JsValue
*/
@SuppressWarnings(Array("org.wartremover.warts.DefaultArguments"))
class TimedTaskFormat(
sId: String = "id",
sName: String = "name",
sGeneratorId: String = "generatorId",
sFilterId: String = "filterId",
sInterval: String = "interval",
sStart: String = "start",
sDeleted: String = "deleted"
) extends OFormat[TimedTask] {
override def writes(o: TimedTask): JsObject = Json.obj(
sId -> o.id.toString,
sName -> o.name,
sGeneratorId -> o.generatorId,
sFilterId -> o.filterId,
sInterval -> o.interval,
sStart -> o.start
)
override def reads(json: JsValue): JsResult[TimedTask] = for {
id <- (json \\ sId).validateOpt[UUID]
name <- (json \\ sName).validate[String]
generator <- (json \\ sGeneratorId).validate[UUID]
filter <- (json \\ sFilterId).validate[UUID]
interval <- (json \\ sInterval).validate[Int]
start <- (json \\ sStart).validate[String]
} yield {
TimedTask(id.getOrElse(UUID.randomUUID()), name, generator, filter, interval, start)
}
}
| Zeta-Project/zeta | api/common/src/main/scala/de/htwg/zeta/common/format/entity/TimedTaskFormat.scala | Scala | bsd-2-clause | 1,381 |
package scalacookbook.chapter20
/**
* Created by liguodong on 2016/8/27.
*/
object ExpressionOrientedProgram extends App{
// understand the difference between a statement and an expression.
//Statements do not return results and are executed solely for their side effects,
//while expressions always return a result and often do not have side effects at all.
//statements
/*
order.calculateTaxes()
order.updatePrices()
*/
//Expressions
/*
val tax = calculateTax(order)
val price = calculatePrice(order)
*/
import section03._
val stock = new Stock("GOOG", "Google", "", "", "", "")
val url = stock.buildUrl(stock.symbol)
stock.html = stock.getUrlContent(url)
// a series of calls on an object ('statements')
stock.setPriceUsingHtml
stock.setVolumeUsingHtml
stock.setHighUsingHtml
stock.setLowUsingHtml
import section01._
// a series of expressions
val url2 = StockUtils.buildUrl(stock.symbol)
val html = NetUtils.getUrlContent(url2)
val price = StockUtils.getPrice(html)
val volume = StockUtils.getVolume(stock.symbol,html)
val high = StockUtils.getHigh(stock.symbol,html)
val low = StockUtils.getLow(stock.symbol,html)
val date = DateUtils.currentDate
val stockInstance = StockInstance2(stock.symbol, date, price, high, low)
//Discussion
println(2 + 2)
println(List(1,2,3,4,5).filter(_ > 2))
val a=1
val b=2
//an if/else expression returns a value
val greater = if (a > b) a else b
//Match expressions also return a result
val evenOrOdd = b match {
case 1 | 3 | 5 | 7 | 9 => println("odd")
case 2 | 4 | 6 | 8 | 10 => println("even")
}
//Even a try/catch block returns a value
val result = try {
"1".toInt
} catch {
case _ => 0
}
/*
Because expressions always return a result, and generally don’t have side effects, there
are several benefits to EOP:
• The code is easier to reason about. Inputs go in,
a result is returned, and there are no side effects.
• The code is easier to test.
• Combined with Scala’s syntax, EOP also results in concise, expressive code.
• Although it has only been hinted at in these examples, expressions can often be
executed in any order. This subtle feature lets you execute expressions in parallel,
which can be a big help when you’re trying to take advantage of modern multicore CPUs.
*/
}
package section03{
// an intentionally bad example
class Stock (var symbol: String,
var company: String,
var price: String,
var volume: String,
var high: String,
var low: String) {
var html: String = _
def buildUrl(stockSymbol: String): String = { "" }
def getUrlContent(url: String):String = { "" }
def setPriceUsingHtml() { this.price = "" }
def setVolumeUsingHtml() { this.volume = "" }
def setHighUsingHtml() { this.high = "" }
def setLowUsingHtml() { this.low = "" }
}
}
| liguodongIOT/java-scala-mix-sbt | src/main/scala/scalacookbook/chapter20/ExpressionOrientedProgram.scala | Scala | apache-2.0 | 2,981 |
package com.cloudray.scalapress.item.controller.admin
import org.scalatest.{OneInstancePerTest, FunSuite}
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito
import org.springframework.ui.ModelMap
import com.cloudray.scalapress.theme.{ThemeDao, Theme}
/** @author Stephen Samuel */
class ThemePopulatorTest extends FunSuite with MockitoSugar with OneInstancePerTest {
val t1 = new Theme
t1.id = 3
t1.name = "Murdock"
val t2 = new Theme
t2.id = 4
t2.name = "Face man"
val t3 = new Theme
t3.id = 2
t3.name = "hannibal"
val populator = new ThemePopulator {
val themeDao: ThemeDao = mock[ThemeDao]
}
Mockito.when(populator.themeDao.findAll).thenReturn(List(t1, t2, t3))
test("that themes are populated in order") {
val model = new ModelMap
populator.themes(model)
val themes = model.get("themesMap").asInstanceOf[java.util.Map[String, String]]
assert(4 === themes.size)
val it = themes.entrySet().iterator()
assert("-None-" === it.next().getValue)
assert("#2 hannibal" === it.next().getValue)
assert("#3 Murdock" === it.next().getValue)
assert("#4 Face man" === it.next().getValue)
}
}
| vidyacraghav/scalapress | src/test/scala/com/cloudray/scalapress/item/controller/admin/ThemePopulatorTest.scala | Scala | apache-2.0 | 1,173 |
// cf. pos/t8300-patmat.scala
trait Universe {
type Name >: Null <: AnyRef with NameApi
trait NameApi
type TermName >: Null <: TermNameApi with Name
trait TermNameApi extends NameApi
}
object Test extends App {
val u: Universe = ???
import u._
val ScalaName: TermName = ???
locally {
??? match {
case Test.ScalaName => ???
}
import Test.ScalaName._
??? match {
case ScalaName => ???
}
import ScalaName._
// both the pattern and import led to
// stable identifier required, but SN found. Note that value SN
// is not stable because its type, Test.u.TermName, is volatile.
val SN = ScalaName
??? match {
case SN => ???
}
import SN._
}
}
| AlexSikia/dotty | tests/pending/pos/t8301b.scala | Scala | bsd-3-clause | 737 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.clustering
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.Logging
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.mllib.linalg.BLAS.{axpy, scal}
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.Utils
import org.apache.spark.util.random.XORShiftRandom
/**
* K-means clustering with support for multiple parallel runs and a k-means++ like initialization
* mode (the k-means|| algorithm by Bahmani et al). When multiple concurrent runs are requested,
* they are executed together with joint passes over the data for efficiency.
*
* This is an iterative algorithm that will make multiple passes over the data, so any RDDs given
* to it should be cached by the user.
*/
@Since("0.8.0")
class JSKMeans private (
private var k: Int,
private var maxIterations: Int,
private var runs: Int,
private var initializationMode: String,
private var initializationSteps: Int,
private var epsilon: Double,
private var seed: Long) extends Serializable with Logging {
/**
* Constructs a KMeans instance with default parameters: {k: 2, maxIterations: 20, runs: 1,
* initializationMode: "k-means||", initializationSteps: 5, epsilon: 1e-4, seed: random}.
*/
@Since("0.8.0")
def this() = this(2, 20, 1, JSKMeans.K_MEANS_PARALLEL, 5, 1e-4, Utils.random.nextLong())
/**
* Number of clusters to create (k).
*/
@Since("1.4.0")
def getK: Int = k
/**
* Set the number of clusters to create (k). Default: 2.
*/
@Since("0.8.0")
def setK(k: Int): this.type = {
this.k = k
this
}
/**
* Maximum number of iterations to run.
*/
@Since("1.4.0")
def getMaxIterations: Int = maxIterations
/**
* Set maximum number of iterations to run. Default: 20.
*/
@Since("0.8.0")
def setMaxIterations(maxIterations: Int): this.type = {
this.maxIterations = maxIterations
this
}
/**
* The initialization algorithm. This can be either "random" or "k-means||".
*/
@Since("1.4.0")
def getInitializationMode: String = initializationMode
/**
* Set the initialization algorithm. This can be either "random" to choose random points as
* initial cluster centers, or "k-means||" to use a parallel variant of k-means++
* (Bahmani et al., Scalable K-Means++, VLDB 2012). Default: k-means||.
*/
@Since("0.8.0")
def setInitializationMode(initializationMode: String): this.type = {
JSKMeans.validateInitMode(initializationMode)
this.initializationMode = initializationMode
this
}
/**
* :: Experimental ::
* Number of runs of the algorithm to execute in parallel.
*/
@Since("1.4.0")
@deprecated("Support for runs is deprecated. This param will have no effect in 1.7.0.", "1.6.0")
def getRuns: Int = runs
/**
* :: Experimental ::
* Set the number of runs of the algorithm to execute in parallel. We initialize the algorithm
* this many times with random starting conditions (configured by the initialization mode), then
* return the best clustering found over any run. Default: 1.
*/
@Since("0.8.0")
@deprecated("Support for runs is deprecated. This param will have no effect in 1.7.0.", "1.6.0")
def setRuns(runs: Int): this.type = {
if (runs <= 0) {
throw new IllegalArgumentException("Number of runs must be positive")
}
this.runs = runs
this
}
/**
* Number of steps for the k-means|| initialization mode
*/
@Since("1.4.0")
def getInitializationSteps: Int = initializationSteps
/**
* Set the number of steps for the k-means|| initialization mode. This is an advanced
* setting -- the default of 5 is almost always enough. Default: 5.
*/
@Since("0.8.0")
def setInitializationSteps(initializationSteps: Int): this.type = {
if (initializationSteps <= 0) {
throw new IllegalArgumentException("Number of initialization steps must be positive")
}
this.initializationSteps = initializationSteps
this
}
/**
* The distance threshold within which we've consider centers to have converged.
*/
@Since("1.4.0")
def getEpsilon: Double = epsilon
/**
* Set the distance threshold within which we've consider centers to have converged.
* If all centers move less than this Euclidean distance, we stop iterating one run.
*/
@Since("0.8.0")
def setEpsilon(epsilon: Double): this.type = {
this.epsilon = epsilon
this
}
/**
* The random seed for cluster initialization.
*/
@Since("1.4.0")
def getSeed: Long = seed
/**
* Set the random seed for cluster initialization.
*/
@Since("1.4.0")
def setSeed(seed: Long): this.type = {
this.seed = seed
this
}
// Initial cluster centers can be provided as a KMeansModel object rather than using the
// random or k-means|| initializationMode
private var initialModel: Option[KMeansModel] = None
/**
* Set the initial starting point, bypassing the random initialization or k-means||
* The condition model.k == this.k must be met, failure results
* in an IllegalArgumentException.
*/
@Since("1.4.0")
def setInitialModel(model: KMeansModel): this.type = {
require(model.k == k, "mismatched cluster count")
initialModel = Some(model)
this
}
/**
* Train a K-means model on the given set of points; `data` should be cached for high
* performance, because this is an iterative algorithm.
*/
@Since("0.8.0")
def run(data: RDD[Vector]): KMeansModel = {
if (data.getStorageLevel == StorageLevel.NONE) {
logWarning("The input data is not directly cached, which may hurt performance if its"
+ " parent RDDs are also uncached.")
}
// Compute squared norms and cache them.
val norms = data.map(Vectors.norm(_, 2.0))
norms.persist()
val zippedData = data.zip(norms).map { case (v, norm) =>
new VectorWithNorm(v, norm)
}
val model = runAlgorithm(zippedData)
norms.unpersist()
// Warn at the end of the run as well, for increased visibility.
if (data.getStorageLevel == StorageLevel.NONE) {
logWarning("The input data was not directly cached, which may hurt performance if its"
+ " parent RDDs are also uncached.")
}
model
}
/**
* Implementation of K-Means algorithm.
*/
private def runAlgorithm(data: RDD[VectorWithNorm]): KMeansModel = {
val sc = data.sparkContext
val initStartTime = System.nanoTime()
// Only one run is allowed when initialModel is given
val numRuns = if (initialModel.nonEmpty) {
if (runs > 1) logWarning("Ignoring runs; one run is allowed when initialModel is given.")
1
} else {
runs
}
val centers = initialModel match {
case Some(kMeansCenters) => {
Array(kMeansCenters.clusterCenters.map(s => new VectorWithNorm(s)))
}
case None => {
if (initializationMode == JSKMeans.RANDOM) {
initRandom(data)
} else {
initKMeansParallel(data)
}
}
}
val initTimeInSeconds = (System.nanoTime() - initStartTime) / 1e9
logInfo(s"Initialization with $initializationMode took " + "%.3f".format(initTimeInSeconds) +
" seconds.")
val active = Array.fill(numRuns)(true)
val costs = Array.fill(numRuns)(0.0)
var activeRuns = new ArrayBuffer[Int] ++ (0 until numRuns)
var iteration = 0
val iterationStartTime = System.nanoTime()
// Execute iterations of Lloyd's algorithm until all runs have converged
while (iteration < maxIterations && !activeRuns.isEmpty) {
type WeightedPoint = (Vector, Long)
def mergeContribs(x: WeightedPoint, y: WeightedPoint): WeightedPoint = {
axpy(1.0, x._1, y._1)
(y._1, x._2 + y._2)
}
val activeCenters = activeRuns.map(r => centers(r)).toArray
val costAccums = activeRuns.map(_ => sc.accumulator(0.0))
val bcActiveCenters = sc.broadcast(activeCenters)
// Find the sum and count of points mapping to each center
val totalContribs = data.mapPartitions { points =>
val thisActiveCenters = bcActiveCenters.value
val runs = thisActiveCenters.length
val k = thisActiveCenters(0).length
val dims = thisActiveCenters(0)(0).vector.size
val sums = Array.fill(runs, k)(Vectors.zeros(dims))
val counts = Array.fill(runs, k)(0L)
points.foreach { point =>
(0 until runs).foreach { i =>
val (bestCenter, cost) = JSKMeans.findClosest(thisActiveCenters(i), point)
costAccums(i) += cost
val sum = sums(i)(bestCenter)
axpy(1.0, point.vector, sum)
counts(i)(bestCenter) += 1
}
}
val contribs = for (i <- 0 until runs; j <- 0 until k) yield {
((i, j), (sums(i)(j), counts(i)(j)))
}
contribs.iterator
}.reduceByKey(mergeContribs).collectAsMap()
bcActiveCenters.unpersist(blocking = false)
// Update the cluster centers and costs for each active run
for ((run, i) <- activeRuns.zipWithIndex) {
var changed = false
var j = 0
while (j < k) {
val (sum, count) = totalContribs((i, j))
if (count != 0) {
scal(1.0 / count, sum)
val newCenter = new VectorWithNorm(sum)
if (JSKMeans.fastSquaredDistance(newCenter, centers(run)(j)) > epsilon * epsilon) {
changed = true
}
centers(run)(j) = newCenter
}
j += 1
}
if (!changed) {
active(run) = false
logInfo("Run " + run + " finished in " + (iteration + 1) + " iterations")
}
costs(run) = costAccums(i).value
}
activeRuns = activeRuns.filter(active(_))
iteration += 1
}
val iterationTimeInSeconds = (System.nanoTime() - iterationStartTime) / 1e9
logInfo(s"Iterations took " + "%.3f".format(iterationTimeInSeconds) + " seconds.")
if (iteration == maxIterations) {
logInfo(s"KMeans reached the max number of iterations: $maxIterations.")
} else {
logInfo(s"KMeans converged in $iteration iterations.")
}
val (minCost, bestRun) = costs.zipWithIndex.min
logInfo(s"The cost for the best run is $minCost.")
new KMeansModel(centers(bestRun).map(_.vector))
}
/**
* Initialize `runs` sets of cluster centers at random.
*/
private def initRandom(data: RDD[VectorWithNorm])
: Array[Array[VectorWithNorm]] = {
// Sample all the cluster centers in one pass to avoid repeated scans
val sample = data.takeSample(true, runs * k, new XORShiftRandom(this.seed).nextInt()).toSeq
Array.tabulate(runs)(r => sample.slice(r * k, (r + 1) * k).map { v =>
new VectorWithNorm(Vectors.dense(v.vector.toArray), v.norm)
}.toArray)
}
/**
* Initialize `runs` sets of cluster centers using the k-means|| algorithm by Bahmani et al.
* (Bahmani et al., Scalable K-Means++, VLDB 2012). This is a variant of k-means++ that tries
* to find with dissimilar cluster centers by starting with a random center and then doing
* passes where more centers are chosen with probability proportional to their squared distance
* to the current cluster set. It results in a provable approximation to an optimal clustering.
*
* The original paper can be found at http://theory.stanford.edu/~sergei/papers/vldb12-kmpar.pdf.
*/
private def initKMeansParallel(data: RDD[VectorWithNorm])
: Array[Array[VectorWithNorm]] = {
// Initialize empty centers and point costs.
val centers = Array.tabulate(runs)(r => ArrayBuffer.empty[VectorWithNorm])
var costs = data.map(_ => Array.fill(runs)(Double.PositiveInfinity))
// Initialize each run's first center to a random point.
val seed = new XORShiftRandom(this.seed).nextInt()
val sample = data.takeSample(true, runs, seed).toSeq
val newCenters = Array.tabulate(runs)(r => ArrayBuffer(sample(r).toDense))
/** Merges new centers to centers. */
def mergeNewCenters(): Unit = {
var r = 0
while (r < runs) {
centers(r) ++= newCenters(r)
newCenters(r).clear()
r += 1
}
}
// On each step, sample 2 * k points on average for each run with probability proportional
// to their squared distance from that run's centers. Note that only distances between points
// and new centers are computed in each iteration.
var step = 0
while (step < initializationSteps) {
val bcNewCenters = data.context.broadcast(newCenters)
val preCosts = costs
costs = data.zip(preCosts).map { case (point, cost) =>
Array.tabulate(runs) { r =>
math.min(JSKMeans.pointCost(bcNewCenters.value(r), point), cost(r))
}
}.persist(StorageLevel.MEMORY_AND_DISK)
val sumCosts = costs
.aggregate(new Array[Double](runs))(
seqOp = (s, v) => {
// s += v
var r = 0
while (r < runs) {
s(r) += v(r)
r += 1
}
s
},
combOp = (s0, s1) => {
// s0 += s1
var r = 0
while (r < runs) {
s0(r) += s1(r)
r += 1
}
s0
}
)
bcNewCenters.unpersist(blocking = false)
preCosts.unpersist(blocking = false)
val chosen = data.zip(costs).mapPartitionsWithIndex { (index, pointsWithCosts) =>
val rand = new XORShiftRandom(seed ^ (step << 16) ^ index)
pointsWithCosts.flatMap { case (p, c) =>
val rs = (0 until runs).filter { r =>
rand.nextDouble() < 2.0 * c(r) * k / sumCosts(r)
}
if (rs.length > 0) Some(p, rs) else None
}
}.collect()
mergeNewCenters()
chosen.foreach { case (p, rs) =>
rs.foreach(newCenters(_) += p.toDense)
}
step += 1
}
mergeNewCenters()
costs.unpersist(blocking = false)
// Finally, we might have a set of more than k candidate centers for each run; weigh each
// candidate by the number of points in the dataset mapping to it and run a local k-means++
// on the weighted centers to pick just k of them
val bcCenters = data.context.broadcast(centers)
val weightMap = data.flatMap { p =>
Iterator.tabulate(runs) { r =>
((r, JSKMeans.findClosest(bcCenters.value(r), p)._1), 1.0)
}
}.reduceByKey(_ + _).collectAsMap()
bcCenters.unpersist(blocking = false)
val finalCenters = (0 until runs).par.map { r =>
val myCenters = centers(r).toArray
val myWeights = (0 until myCenters.length).map(i => weightMap.getOrElse((r, i), 0.0)).toArray
LocalKMeans.kMeansPlusPlus(r, myCenters, myWeights, k, 30)
}
finalCenters.toArray
}
}
/**
* Top-level methods for calling K-means clustering.
*/
@Since("0.8.0")
object JSKMeans {
// Initialization mode names
@Since("0.8.0")
val RANDOM = "random"
@Since("0.8.0")
val K_MEANS_PARALLEL = "k-means||"
/**
* Trains a k-means model using the given set of parameters.
*
* @param data training points stored as `RDD[Vector]`
* @param k number of clusters
* @param maxIterations max number of iterations
* @param runs number of parallel runs, defaults to 1. The best model is returned.
* @param initializationMode initialization model, either "random" or "k-means||" (default).
* @param seed random seed value for cluster initialization
*/
@Since("1.3.0")
def train(
data: RDD[Vector],
k: Int,
maxIterations: Int,
runs: Int,
initializationMode: String,
seed: Long): KMeansModel = {
new JSKMeans().setK(k)
.setMaxIterations(maxIterations)
.setRuns(runs)
.setInitializationMode(initializationMode)
.setSeed(seed)
.run(data)
}
/**
* Trains a k-means model using the given set of parameters.
*
* @param data training points stored as `RDD[Vector]`
* @param k number of clusters
* @param maxIterations max number of iterations
* @param runs number of parallel runs, defaults to 1. The best model is returned.
* @param initializationMode initialization model, either "random" or "k-means||" (default).
*/
@Since("0.8.0")
def train(
data: RDD[Vector],
k: Int,
maxIterations: Int,
runs: Int,
initializationMode: String): KMeansModel = {
new JSKMeans().setK(k)
.setMaxIterations(maxIterations)
.setRuns(runs)
.setInitializationMode(initializationMode)
.run(data)
}
/**
* Trains a k-means model using specified parameters and the default values for unspecified.
*/
@Since("0.8.0")
def train(
data: RDD[Vector],
k: Int,
maxIterations: Int): KMeansModel = {
train(data, k, maxIterations, 1, K_MEANS_PARALLEL)
}
/**
* Trains a k-means model using specified parameters and the default values for unspecified.
*/
@Since("0.8.0")
def train(
data: RDD[Vector],
k: Int,
maxIterations: Int,
runs: Int): KMeansModel = {
train(data, k, maxIterations, runs, K_MEANS_PARALLEL)
}
/**
* Returns the index of the closest center to the given point, as well as the squared distance.
*/
private[mllib] def findClosest(
centers: TraversableOnce[VectorWithNorm],
point: VectorWithNorm): (Int, Double) = {
var bestDistance = Double.PositiveInfinity
var bestIndex = 0
var i = 0
centers.foreach { center =>
// Since `\\|a - b\\| \\geq |\\|a\\| - \\|b\\||`, we can use this lower bound to avoid unnecessary
// distance computation.
var lowerBoundOfSqDist = center.norm - point.norm
lowerBoundOfSqDist = lowerBoundOfSqDist * lowerBoundOfSqDist
if (lowerBoundOfSqDist < bestDistance) {
val distance: Double = fastSquaredDistance(center, point)
if (distance < bestDistance) {
bestDistance = distance
bestIndex = i
}
}
i += 1
}
(bestIndex, bestDistance)
}
/**
* Returns the K-means cost of a given point against the given cluster centers.
*/
private[mllib] def pointCost(
centers: TraversableOnce[VectorWithNorm],
point: VectorWithNorm): Double =
findClosest(centers, point)._2
/**
* Returns the squared Euclidean distance between two vectors computed by
* [[org.apache.spark.mllib.util.MLUtils#fastSquaredDistance]].
*/
private[clustering] def fastSquaredDistance(
v1: VectorWithNorm,
v2: VectorWithNorm): Double = {
MLUtils.fastSquaredDistance(v1.vector, v1.norm, v2.vector, v2.norm)
}
private[spark] def validateInitMode(initMode: String): Boolean = {
initMode match {
case JSKMeans.RANDOM => true
case JSKMeans.K_MEANS_PARALLEL => true
case _ => false
}
}
}
/**
* A vector with its norm for fast distance computation.
*
* @see [[org.apache.spark.mllib.clustering.KMeans#fastSquaredDistance]]
*/
private[clustering]
class VectorWithNorm(val vector: Vector, val norm: Double) extends Serializable {
def this(vector: Vector) = this(vector, Vectors.norm(vector, 2.0))
def this(array: Array[Double]) = this(Vectors.dense(array))
/** Converts the vector to a dense vector. */
def toDense: VectorWithNorm = new VectorWithNorm(Vectors.dense(vector.toArray), norm)
} | librairy/modeler-lda | src/main/scala/org/apache/spark/mllib/clustering/JSKMeans.scala | Scala | apache-2.0 | 20,898 |
/*
* Copyright 2016 Tamer AbdulRadi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package troy
package schema
import org.scalatest._
import troy.cql.ast.{ SelectStatement, _ }
import troy.cql.ast.ddl.{ Keyspace => CqlKeyspace, Table => CqlTable }
import troy.cql.ast.dml.Select
class DeleteSpec extends FlatSpec with Matchers {
import VTestUtils._
val schemaStatements = CqlParser.parseSchema("""
CREATE KEYSPACE test WITH replication = {'class': 'SimpleStrategy' , 'replication_factor': '1'};
CREATE TABLE test.posts (
author_id uuid,
post_id uuid,
author_name text static,
post_rating int,
post_title text,
comments map<int, text>,
PRIMARY KEY ((author_id), post_id)
);
CREATE TABLE test.post_details (
author_id uuid,
id uuid,
rating int,
title text,
tags set<text>,
comment_ids set<int>,
comment_userIds list<uuid>,
comment_bodies list<text>,
comments map<int, text>,
PRIMARY KEY ((author_id), id)
);
""").get
val schema = SchemaEngine(schemaStatements).get
"Schema" should "support simple delete statement" ignore {
val statement = parse("DELETE FROM test.posts WHERE author_id = uuid();")
val (rowType, variableTypes) = schema(statement).get
rowType.asInstanceOf[SchemaEngine.Columns].types.isEmpty shouldBe true
variableTypes.isEmpty shouldBe true
}
it should "delete statement with variables" in {
val statement = parse("DELETE FROM test.posts WHERE author_id = ?; ")
val (rowType, variableTypes) = schema(statement).get
rowType.asInstanceOf[SchemaEngine.Columns].types.isEmpty shouldBe true
variableTypes.size shouldBe 1
variableTypes(0) shouldBe DataType.Uuid
}
it should "delete statement with IF EXISTS flag" in {
val statement = parse("DELETE FROM test.posts WHERE author_id = ? IF EXISTS;")
val (rowType, variableTypes) = schema(statement).get
variableTypes shouldBe Seq(DataType.Uuid)
val columnTypes = rowType.asInstanceOf[SchemaEngine.Columns].types
columnTypes.size shouldBe 1
columnTypes(0) shouldBe DataType.Boolean
}
it should "delete statement with IF simple condition variables" in {
val statement = parse("DELETE FROM test.posts WHERE author_id = ? IF post_title = ?;")
val (rowType, variableTypes) = schema(statement).get
variableTypes shouldBe Seq(DataType.Uuid, DataType.Text)
val columnTypes = rowType.asInstanceOf[SchemaEngine.Columns].types
columnTypes.size shouldBe 1
columnTypes(0) shouldBe DataType.Boolean
}
it should "delete statement with IF IN condition variables" in {
val statement = parse("DELETE FROM test.posts WHERE author_id = ? IF post_title IN ?;")
val (rowType, variableTypes) = schema(statement).get
variableTypes shouldBe Seq(DataType.Uuid, DataType.Tuple(Seq(DataType.Text)))
val columnTypes = rowType.asInstanceOf[SchemaEngine.Columns].types
columnTypes.size shouldBe 1
columnTypes(0) shouldBe DataType.Boolean
}
it should "delete statement with IF CONTAINS KEY condition" in {
val statement = parse("DELETE FROM test.posts WHERE author_id = ? IF comments CONTAINS KEY ?;")
val (rowType, variableTypes) = schema(statement).get
variableTypes shouldBe Seq(DataType.Uuid, DataType.Text)
val columnTypes = rowType.asInstanceOf[SchemaEngine.Columns].types
columnTypes.size shouldBe 1
columnTypes(0) shouldBe DataType.Boolean
}
it should "delete statement with IF complex condition" in {
val statement = parse("DELETE FROM test.post_details WHERE author_id = ? IF comment_userIds CONTAINS ? AND comment_bodies CONTAINS ? AND rating = ?;")
val (rowType, variableTypes) = schema(statement).get
variableTypes shouldBe Seq(DataType.Uuid, DataType.Uuid, DataType.Text, DataType.Int)
val columnTypes = rowType.asInstanceOf[SchemaEngine.Columns].types
columnTypes.size shouldBe 1
columnTypes(0) shouldBe DataType.Boolean
}
def parse(s: String) = CqlParser.parseDML(s) match {
case CqlParser.Success(result, _) =>
result
case CqlParser.Failure(msg, _) =>
fail(msg)
}
}
| schemasafe/troy | troy-schema/src/test/scala/troy/schema/DeleteSpec.scala | Scala | apache-2.0 | 4,702 |
package org.scalaide.core.internal
import scala.collection.mutable
import scala.tools.nsc.settings.ScalaVersion
import org.eclipse.core.resources.IFile
import org.eclipse.core.resources.IProject
import org.eclipse.core.resources.IResourceChangeEvent
import org.eclipse.core.resources.IResourceChangeListener
import org.eclipse.core.resources.IResourceDelta
import org.eclipse.core.resources.IResourceDeltaVisitor
import org.eclipse.core.resources.ResourcesPlugin
import org.eclipse.core.runtime.Platform
import org.eclipse.core.runtime.content.IContentType
import org.eclipse.jdt.core.ElementChangedEvent
import org.eclipse.jdt.core.IClassFile
import org.eclipse.jdt.core.ICompilationUnit
import org.eclipse.jdt.core.IElementChangedListener
import org.eclipse.jdt.core.IJavaElement
import org.eclipse.jdt.core.IJavaElementDelta
import org.eclipse.jdt.core.IJavaProject
import org.eclipse.jdt.core.JavaCore
import org.eclipse.ui.IEditorInput
import org.eclipse.ui.PlatformUI
import org.osgi.framework.BundleContext
import org.scalaide.core.IScalaInstallation
import org.scalaide.core.IScalaPlugin
import org.scalaide.core.SdtConstants
import org.scalaide.core.internal.builder.zinc.CompilerBridgeStore
import org.scalaide.core.internal.jdt.model.ScalaClassFile
import org.scalaide.core.internal.jdt.model.ScalaCompilationUnit
import org.scalaide.core.internal.jdt.model.ScalaSourceFile
import org.scalaide.core.internal.project._
import org.scalaide.core.internal.project.ScalaInstallation.platformInstallation
import org.scalaide.logging.HasLogger
import org.scalaide.logging.PluginLogConfigurator
import org.scalaide.ui.internal.diagnostic
import org.scalaide.ui.internal.editor.ScalaDocumentProvider
import org.scalaide.ui.internal.migration.RegistryExtender
import org.scalaide.ui.internal.templates.ScalaTemplateManager
import org.scalaide.util.Utils.WithAsInstanceOfOpt
import org.scalaide.core.internal.statistics.Statistics
import org.scalaide.util.eclipse.OSGiUtils
import org.scalaide.util.internal.CompilerUtils._
import org.scalaide.util.internal.FixedSizeCache
import org.eclipse.jdt.internal.ui.JavaPlugin
object ScalaPlugin {
@volatile private var plugin: ScalaPlugin = _
def apply(): ScalaPlugin = plugin
}
class ScalaPlugin extends IScalaPlugin with PluginLogConfigurator with IResourceChangeListener with IElementChangedListener with HasLogger {
/**
* Check if the given version is compatible with the current plug-in version.
* Check on the major/minor number, discard the maintenance number.
*
* For example 2.9.1 and 2.9.2-SNAPSHOT are compatible versions whereas
* 2.8.1 and 2.9.0 aren't.
*/
def isCompatibleVersion(version: ScalaVersion, project: ScalaProject): Boolean = project.getCompatibilityMode match {
case Same ⇒
isBinarySame(ScalaVersion.current, version) // don't treat 2 unknown versions as equal
case Previous ⇒
isBinaryPrevious(ScalaVersion.current, version)
case Subsequent ⇒
isBinarySubsequent(ScalaVersion.current, version)
}
private lazy val sdtCoreBundle = getBundle()
lazy val zincCompilerBundle = Platform.getBundle(SdtConstants.ZincPluginId)
lazy val zincCompilerBridgeBundle = Platform.getBundle(SdtConstants.ZincCompilerBridgePluginId)
lazy val zincCompilerBridge = OSGiUtils.pathInBundle(zincCompilerBridgeBundle, "/")
lazy val templateManager = new ScalaTemplateManager()
lazy val scalaSourceFileContentType: IContentType =
Platform.getContentTypeManager().getContentType("scala.tools.eclipse.scalaSource")
lazy val scalaClassFileContentType: IContentType =
Platform.getContentTypeManager().getContentType("scala.tools.eclipse.scalaClass")
/**
* The document provider needs to exist only a single time because it caches
* compilation units (their working copies). Each `ScalaSourceFileEditor` is
* associated with this document provider.
*/
private[scalaide] lazy val documentProvider = new ScalaDocumentProvider
override def start(context: BundleContext) = {
ScalaPlugin.plugin = this
super.start(context)
if (!headlessMode) {
PlatformUI.getWorkbench.getEditorRegistry.setDefaultEditor("*.scala", SdtConstants.EditorId)
diagnostic.StartupDiagnostics.run
new RegistryExtender().perform()
}
ResourcesPlugin.getWorkspace.addResourceChangeListener(this, IResourceChangeEvent.PRE_CLOSE | IResourceChangeEvent.POST_CHANGE)
JavaCore.addElementChangedListener(this)
logger.info("Scala compiler bundle: " + platformInstallation.compiler.classJar.toOSString())
// force creation of statistics tracker
statistics
logger.info("Statistics tracker started")
}
override def stop(context: BundleContext) = {
ResourcesPlugin.getWorkspace.removeResourceChangeListener(this)
for {
iProject <- ResourcesPlugin.getWorkspace.getRoot.getProjects
if iProject.isOpen
scalaProject <- asScalaProject(iProject)
} scalaProject.projectSpecificStorage.save()
super.stop(context)
ScalaPlugin.plugin = null
}
/** The compiler-bridge store, located in this plugin configuration area (usually inside the metadata directory */
lazy val compilerBridgeStore: CompilerBridgeStore = new CompilerBridgeStore(Platform.getStateLocation(sdtCoreBundle), this)
/** A LRU cache of class loaders for Scala builders */
lazy val classLoaderStore: FixedSizeCache[IScalaInstallation,ClassLoader] = new FixedSizeCache(initSize = 2, maxSize = 3)
// TODO: eventually scala plugin should have its own image description registry
lazy val imageDescriptorRegistry = JavaPlugin.getImageDescriptorRegistry
// Scala project instances
private val projects = new mutable.HashMap[IProject, ScalaProject]
private lazy val stats = new Statistics
/** Returns the statistics tracker. */
def statistics = stats
override def scalaCompilationUnit(input: IEditorInput): Option[ScalaCompilationUnit] = {
def unitOfSourceFile = Option(documentProvider.getWorkingCopy(input)) map (ScalaCompilationUnit.castFrom)
def unitOfClassFile = input.getAdapter(classOf[IClassFile]) match {
case tr: ScalaClassFile => Some(tr)
case _ => None
}
unitOfSourceFile orElse unitOfClassFile
}
def getJavaProject(project: IProject) = JavaCore.create(project)
override def getScalaProject(project: IProject): ScalaProject = projects.synchronized {
projects.get(project) getOrElse {
val scalaProject = ScalaProject(project)
projects(project) = scalaProject
scalaProject
}
}
override def asScalaProject(project: IProject): Option[ScalaProject] = {
if (ScalaProject.isScalaProject(project)) {
Some(getScalaProject(project))
} else {
None
}
}
def disposeProject(project: IProject): Unit = {
projects.synchronized {
projects.get(project) foreach { (scalaProject) =>
projects.remove(project)
scalaProject.dispose()
}
}
}
/** Restart all presentation compilers in the workspace. Need to do it in order
* for them to pick up the new std out/err streams.
*/
def resetAllPresentationCompilers(): Unit = {
for {
iProject <- ResourcesPlugin.getWorkspace.getRoot.getProjects
if iProject.isOpen
scalaProject <- asScalaProject(iProject)
} scalaProject.presentationCompiler.askRestart()
}
override def resourceChanged(event: IResourceChangeEvent): Unit = {
(event.getResource, event.getType) match {
case (project: IProject, IResourceChangeEvent.PRE_CLOSE) =>
disposeProject(project)
case _ =>
}
Option(event.getDelta()) foreach (_.accept(new IResourceDeltaVisitor() {
override def visit(delta: IResourceDelta): Boolean = {
// This is obtained at project opening or closing, meaning the 'openness' state changed
if (delta.getFlags == IResourceDelta.OPEN) {
val resource = delta.getResource().asInstanceOfOpt[IProject]
resource foreach { r =>
// that particular classpath check can set the Installation (used, e.g., for sbt-eclipse imports)
// setting the Installation triggers a recursive check
asScalaProject(r) foreach { p =>
try {
// It's important to save this /before/ checking classpath : classpath
// checks create their own preference modifications under some conditions.
// Doing them concurrently can wreak havoc.
p.projectSpecificStorage.save()
} finally {
p.checkClasspath(true)
}
}
}
false
}
else
true
}
}))
}
override def elementChanged(event: ElementChangedEvent): Unit = {
import scala.collection.mutable.ListBuffer
import IJavaElement._
import IJavaElementDelta._
// check if the changes are linked with the build path
val modelDelta = event.getDelta()
// check that the notification is about a change (CHANGE) of some elements (F_CHILDREN) of the java model (JAVA_MODEL)
if (modelDelta.getElement().getElementType() == JAVA_MODEL && modelDelta.getKind() == CHANGED && (modelDelta.getFlags() & F_CHILDREN) != 0) {
for (innerDelta <- modelDelta.getAffectedChildren()) {
// check that the notification no the child is about a change (CHANDED) relative to a resolved classpath change (F_RESOLVED_CLASSPATH_CHANGED)
if (innerDelta.getKind() == CHANGED && (innerDelta.getFlags() & IJavaElementDelta.F_RESOLVED_CLASSPATH_CHANGED) != 0) {
innerDelta.getElement() match {
// classpath change should only impact projects
case javaProject: IJavaProject =>
asScalaProject(javaProject.getProject()) foreach (_.classpathHasChanged(queue = false))
case _ =>
}
}
}
}
// process deleted files
val buff = new ListBuffer[ScalaSourceFile]
val changed = new ListBuffer[ICompilationUnit]
val projectsToReset = new mutable.HashSet[ScalaProject]
def findRemovedSources(delta: IJavaElementDelta): Unit = {
val isChanged = delta.getKind == CHANGED
val isRemoved = delta.getKind == REMOVED
val isAdded = delta.getKind == ADDED
def hasFlag(flag: Int) = (delta.getFlags & flag) != 0
val elem = delta.getElement
val processChildren: Boolean = elem.getElementType match {
case JAVA_MODEL =>
true
case JAVA_PROJECT if isRemoved =>
disposeProject(elem.getJavaProject.getProject)
false
case JAVA_PROJECT if !hasFlag(F_CLOSED) =>
true
case PACKAGE_FRAGMENT_ROOT =>
val hasContentChanged = isRemoved || hasFlag(F_REMOVED_FROM_CLASSPATH | F_ADDED_TO_CLASSPATH | F_ARCHIVE_CONTENT_CHANGED)
if (hasContentChanged) {
logger.info("package fragment root changed (resetting presentation compiler): " + elem.getElementName())
asScalaProject(elem.getJavaProject().getProject).foreach(projectsToReset += _)
}
!hasContentChanged
case PACKAGE_FRAGMENT =>
val hasContentChanged = isAdded || isRemoved
if (hasContentChanged) {
logger.debug("package fragment added or removed: " + elem.getElementName())
asScalaProject(elem.getJavaProject().getProject).foreach(projectsToReset += _)
}
// stop recursion here, we need to reset the PC anyway
!hasContentChanged
// TODO: the check should be done with isInstanceOf[ScalaSourceFile] instead of
// endsWith(scalaFileExtn), but it is not working for Play 2.0 because of #1000434
case COMPILATION_UNIT if isChanged && elem.getResource != null && elem.getResource.getName.endsWith(SdtConstants.ScalaFileExtn) =>
val hasContentChanged = hasFlag(IJavaElementDelta.F_CONTENT)
if (hasContentChanged)
// mark the changed Scala files to be refreshed in the presentation compiler if needed
changed += elem.asInstanceOf[ICompilationUnit]
false
case COMPILATION_UNIT if elem.isInstanceOf[ScalaSourceFile] && isRemoved =>
buff += elem.asInstanceOf[ScalaSourceFile]
false
case COMPILATION_UNIT if isAdded =>
logger.debug("added compilation unit " + elem.getElementName())
asScalaProject(elem.getJavaProject().getProject).foreach(projectsToReset += _)
false
case _ =>
false
}
if (processChildren)
delta.getAffectedChildren foreach findRemovedSources
}
findRemovedSources(event.getDelta)
// ask for the changed scala files to be refreshed in each project presentation compiler if needed
if (changed.nonEmpty) {
changed.toList groupBy (_.getJavaProject.getProject) foreach {
case (project, units) =>
asScalaProject(project) foreach { p =>
if (project.isOpen && !projectsToReset(p)) {
p.presentationCompiler(_.refreshChangedFiles(units.map(_.getResource.asInstanceOf[IFile])))
}
}
}
}
projectsToReset.foreach(_.presentationCompiler.askRestart())
if (buff.nonEmpty) {
buff.toList groupBy (_.getJavaProject.getProject) foreach {
case (project, srcs) =>
asScalaProject(project) foreach { p =>
if (project.isOpen && !projectsToReset(p))
p.presentationCompiler.internal (_.filesDeleted(srcs))
}
}
}
}
}
| sschaef/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/core/internal/ScalaPlugin.scala | Scala | bsd-3-clause | 13,566 |
// Copyright (c) 2010, Stephen D. Strowes
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * The author named in the above copyright notice may not be used to
// endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package com.sdstrowes.util
import scala.collection.immutable._
object BGPPathsToLinks {
import scala.io.Source
def deriveASNum(in: String) = {
val asN = in.split('.')
if (asN.size > 1)
asN(0).toInt * 65536 + asN(1).toInt
else
asN(0).toInt
}
def main(args: Array[String]) : Unit = {
val lines = Source.fromInputStream(System.in).getLines()
var nodes = HashMap[Int, Set[Int]]()
for (line <- lines) {
val asnums = line.trim.split(" ")
for (i <- 0 until asnums.size-1) {
try {
val as1 = deriveASNum(asnums(i))
val as2 = deriveASNum(asnums(i+1))
if (as1 != as2) {
try {
nodes = nodes + (as1 -> (nodes(as1) + as2))
} catch {
case e:NoSuchElementException => {
nodes = nodes + (as1 -> Set(as2))
}
}
try {
nodes = nodes + (as2 -> (nodes(as2) + as1))
} catch {
case e:NoSuchElementException => {
nodes = nodes + (as2 -> Set(as1))
}
}
}
} catch {
/* deriveASNum can throw an exception if the input is not an
* integer. (A strange pre-pending bug noticed via
* routeviews.eqix appears to give an AS number 3699236992,
* which fails.) Numbers that are not numbers will be
* skipped, but the rest of the path will be output. */
case e:NumberFormatException => {}
}
}
}
for (key <- nodes.keys) {
for (value <- nodes(key)) {
println(key+" "+value)
}
}
}
}
| sdstrowes/scala-util | BGPPathsToLinks.scala | Scala | bsd-3-clause | 3,647 |
/* Copyright (C) 2009-2010 Univ of Massachusetts Amherst, Computer Science Dept
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://code.google.com/p/factorie/
This software is provided under the terms of the Eclipse Public License 1.0
as published by http://www.opensource.org. For further information,
see the file `LICENSE.txt' included with this distribution. */
package cc.factorie
import scala.collection.mutable.{ListBuffer,HashSet,ArrayBuffer}
/** Similar to the definition of "gate" from Tom Minka and Jon Winn.
A discrete variable that indicates which one among a finite number relations are currently in effect.
Here can be thought of as a collection of @ref{GatedRefVariable}s.
Primarily used to implement a "mixture choice" in finite mixture models.
@see MixtureChoice
@author Andrew McCallum */
trait Gate extends DiscreteVariable {
/** The collection of variable references controlled by the gate. */
private var _gatedRefs: List[AbstractGatedRefVariable] = Nil
def gatedRefs: List[AbstractGatedRefVariable] = _gatedRefs
def +=(v:AbstractGatedRefVariable): this.type = {
//println("Gate.+= "+v)
assert(_gatedRefs ne null)
assert(v.domainSize == domainSize)
_gatedRefs = v :: _gatedRefs
assert(v.gate == this)
//println("Gate.+= setByIndex="+this.intValue) // xxx
v.setByIndex(this.intValue)(null)
this
}
override def set(newIndex:Int)(implicit d:DiffList): Unit = {
super.set(newIndex)
//println("Gate.setByIndex _gatedRefs="+_gatedRefs) // xxx
//new Exception().printStackTrace()
if (_gatedRefs ne null) for (ref <- _gatedRefs) {
//println("Gate.setByIndex ref="+ref)
ref.setByIndex(newIndex)
}
}
def setToNull(implicit d:DiffList): Unit = {
super.set(-1)
for (ref <- _gatedRefs) ref.setToNull
}
}
/** Abstract stand-in for GatedRefVariable that doesn't take type parameters.
Among other things, this avoids impossible contravariant typing in MixtureComponentRef.
@author Andrew McCallum */
trait AbstractGatedRefVariable {
def gate: Gate
//def gate_=(g:Gate): Unit
def domainSize: Int
def setToNull(implicit d:DiffList): Unit
def setByIndex(newIndex:Int)(implicit d:DiffList): Unit
def abstractValue: AnyRef
}
/** A RefVariable whose value is controled by a Gate. This is used as a reference to the Distribution of samples generated from a Mixture.
@author Andrew McCallum */
trait GatedRefVariable[A<:AnyRef] extends RefVariable[A] with AbstractGatedRefVariable {
type VariableType <: GatedRefVariable[A]
//private var _gate: Gate = null
def gate: Gate // TODO Are we sure we need to know who our gate is? Can we save memory by deleting this?
//def gate_=(g:Gate): Unit = if (_gate == null) _gate = g else throw new Error("Gate already set.")
// Not the current value of this GatedRefVariable.
// Returns the value associated with a certain integer index value of the gate.
// The gate uses this to call grf.set(grf.value(this.intValue)).
def valueForIndex(index:Int): A
def setByIndex(index:Int)(implicit d:DiffList): Unit = set(valueForIndex(index))
def setToNull(implicit d:DiffList): Unit = set(null.asInstanceOf[A])
def domainSize: Int
}
| andrewmilkowski/factorie | src/main/scala/cc/factorie/Gate.scala | Scala | epl-1.0 | 3,306 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import org.apache.spark.sql.execution.adaptive.DisableAdaptiveExecutionSuite
import org.apache.spark.sql.execution.command.DataWritingCommandExec
import org.apache.spark.sql.execution.metric.SQLMetricsTestUtils
import org.apache.spark.sql.hive.HiveUtils
import org.apache.spark.sql.hive.test.TestHiveSingleton
// Disable AQE because metric info is different with AQE on/off
class SQLMetricsSuite extends SQLMetricsTestUtils with TestHiveSingleton
with DisableAdaptiveExecutionSuite {
test("writing data out metrics: hive") {
testMetricsNonDynamicPartition("hive", "t1")
}
test("writing data out metrics dynamic partition: hive") {
withSQLConf(("hive.exec.dynamic.partition.mode", "nonstrict")) {
testMetricsDynamicPartition("hive", "hive", "t1")
}
}
test("SPARK-34567: Add metrics for CTAS operator") {
Seq(false, true).foreach { canOptimized =>
withSQLConf(HiveUtils.CONVERT_METASTORE_CTAS.key -> canOptimized.toString) {
withTable("t") {
val df = sql(s"CREATE TABLE t STORED AS PARQUET AS SELECT 1 as a")
val dataWritingCommandExec =
df.queryExecution.executedPlan.asInstanceOf[DataWritingCommandExec]
dataWritingCommandExec.executeCollect()
val createTableAsSelect = dataWritingCommandExec.cmd
if (canOptimized) {
assert(createTableAsSelect.isInstanceOf[OptimizedCreateHiveTableAsSelectCommand])
} else {
assert(createTableAsSelect.isInstanceOf[CreateHiveTableAsSelectCommand])
}
assert(createTableAsSelect.metrics.contains("numFiles"))
assert(createTableAsSelect.metrics("numFiles").value == 1)
assert(createTableAsSelect.metrics.contains("numOutputBytes"))
assert(createTableAsSelect.metrics("numOutputBytes").value > 0)
assert(createTableAsSelect.metrics.contains("numOutputRows"))
assert(createTableAsSelect.metrics("numOutputRows").value == 1)
}
}
}
}
}
| BryanCutler/spark | sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLMetricsSuite.scala | Scala | apache-2.0 | 2,847 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx.lib
import scala.reflect.ClassTag
import org.apache.spark.graphx._
/** Label Propagation algorithm. */
object LabelPropagation {
/**
* Run static Label Propagation for detecting communities in networks.
*
* Each node in the network is initially assigned to its own community. At every superstep, nodes
* send their community affiliation to all neighbors and update their state to the mode community
* affiliation of incoming messages.
*
* LPA is a standard community detection algorithm for graphs. It is very inexpensive
* computationally, although (1) convergence is not guaranteed and (2) one can end up with
* trivial solutions (all nodes are identified into a single community).
*
* @tparam ED the edge attribute type (not used in the computation)
*
* @param graph the graph for which to compute the community affiliation
* @param maxSteps the number of supersteps of LPA to be performed. Because this is a static
* implementation, the algorithm will run for exactly this many supersteps.
*
* @return a graph with vertex attributes containing the label of community affiliation
*/
def run[VD, ED: ClassTag](graph: Graph[VD, ED], maxSteps: Int): Graph[VertexId, ED] = {
require(maxSteps > 0, s"Maximum of steps must be greater than 0, but got ${maxSteps}")
val lpaGraph = graph.mapVertices { case (vid, _) => vid }
def sendMessage(e: EdgeTriplet[VertexId, ED]): Iterator[(VertexId, Map[VertexId, Long])] = {
Iterator((e.srcId, Map(e.dstAttr -> 1L)), (e.dstId, Map(e.srcAttr -> 1L)))
}
def mergeMessage(count1: Map[VertexId, Long], count2: Map[VertexId, Long])
: Map[VertexId, Long] = {
(count1.keySet ++ count2.keySet).map { i =>
val count1Val = count1.getOrElse(i, 0L)
val count2Val = count2.getOrElse(i, 0L)
i -> (count1Val + count2Val)
}.toMap
}
def vertexProgram(vid: VertexId, attr: Long, message: Map[VertexId, Long]): VertexId = {
if (message.isEmpty) attr else message.maxBy(_._2)._1
}
val initialMessage = Map[VertexId, Long]()
Pregel(lpaGraph, initialMessage, maxIterations = maxSteps)(
vprog = vertexProgram,
sendMsg = sendMessage,
mergeMsg = mergeMessage)
}
}
| aokolnychyi/spark | graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala | Scala | apache-2.0 | 3,086 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import io.gatling.core.session.Session
import scala.concurrent.duration._
import io.gatling.core.Predef._
import io.gatling.http.Predef._
class Scenario {
//#bootstrapping
scenario("My Scenario")
//#bootstrapping
//#exec-example
scenario("My Scenario")
.exec(http("Get Homepage").get("http://github.com/gatling/gatling"))
//#exec-example
//#session-lambda
exec { session =>
// displays the content of the session in the console (debugging only)
println(session)
// return the original session
session
}
exec { session =>
// return a new session instance with a new "foo" attribute whose value is "bar"
session.set("foo", "bar")
}
//#session-lambda
def someSessionBasedCondition(session: Session): Boolean = true
//#session-improper
exec { session =>
if (someSessionBasedCondition(session)) {
// just create a builder that is immediately discarded, hence doesn't do anything
// you should be using a doIf here
http("Get Homepage").get("http://github.com/gatling/gatling")
}
session
}
//#session-improper
//#flattenMapIntoAttributes
// assuming the Session contains an attribute named "theMap" whose content is :
// Map("foo" -> "bar", "baz" -> "qix")
exec(flattenMapIntoAttributes("${theMap}"))
// the Session contains 2 new attributes "foo" and "baz".
//#flattenMapIntoAttributes
//#pace
forever(
pace(5 seconds)
.exec(
pause(1 second, 4 seconds) // Will be run every 5 seconds, irrespective of what pause time is used
)
)
//#pace
val times = 4
val counterName, sequenceName, elementName = "foo"
val myChain = exec(Session.Identity(_))
val condition, exitASAP = true
val duration = 5 seconds
//#repeat-example
repeat(times, counterName) {
myChain
}
//#repeat-example
//#repeat-variants
repeat(20) {myChain} // will loop on myChain 20 times
repeat("${myKey}") {myChain} // will loop on myChain (Int value of the Session attribute myKey) times
repeat(session => session("foo").as[Int] /* or anything that returns an Int*/) {myChain}
//#repeat-variants
//#foreach
foreach(sequenceName, elementName, counterName) {
myChain
}
//#foreach
//#during
during(duration, counterName, exitASAP) {
myChain
}
//#during
//#asLongAs
asLongAs(condition, counterName, exitASAP) {
myChain
}
//#asLongAs
//#doWhile
doWhile(condition, counterName) {
myChain
}
//#doWhile
//#asLongAsDuring
asLongAsDuring(condition, duration, counterName) {
myChain
}
//#asLongAsDuring
//#doWhileDuring
doWhileDuring(condition, duration, counterName) {
myChain
}
//#doWhileDuring
//#forever
forever(counterName) {
myChain
}
//#forever
//#doIf
doIf("${myBoolean}") {
// executed if the session value stored in "myBoolean" is true
exec(http("...").get("..."))
}
//#doIf
//#doIf-session
doIf(session => session("myKey").as[String].startsWith("admin")) {
// executed if the session value stored in "myKey" starts with "admin"
exec(http("if true").get("..."))
}
//#doIf-session
//#doIfEquals
doIfEquals("${actualValue}", "expectedValue") {
// executed if the session value stored in "actualValue" is equal to "expectedValue"
exec(http("...").get("..."))
}
//#doIfEquals
//#doIfOrElse
doIfOrElse(session => session("myKey").as[String].startsWith("admin")) {
// executed if the session value stored in "myKey" starts with "admin"
exec(http("if true").get("..."))
} {
// executed if the session value stored in "myKey" does not start with "admin"
exec(http("if false").get("..."))
}
//#doIfOrElse
//#doIfEqualsOrElse
doIfEqualsOrElse(session => session("actualValue").as[String], "expectedValue") {
// executed if the session value stored in "actualValue" equals to "expectedValue"
exec(http("if true").get("..."))
} {
// executed if the session value stored in "actualValue" is not equal to "expectedValue"
exec(http("if false").get("..."))
}
//#doIfEqualsOrElse
val chain1, chain2, myFallbackChain = myChain
val key1, key2 = "foo"
val percentage1, percentage2 = .50
//#doSwitch
doSwitch("${myKey}") ( // beware: use parentheses, not curly braces!
key1 -> chain1,
key1-> chain2
)
//#doSwitch
//#doSwitchOrElse
doSwitchOrElse("${myKey}") ( // beware: use parentheses, not curly braces!
key1 -> chain1,
key1-> chain2
) (
myFallbackChain
)
//#doSwitchOrElse
//#randomSwitch
randomSwitch( // beware: use parentheses, not curly braces!
percentage1 -> chain1,
percentage2 -> chain2
)
//#randomSwitch
//#randomSwitchOrElse
randomSwitchOrElse( // beware: use parentheses, not curly braces!
percentage1 -> chain1,
percentage2 -> chain2
) {
myFallbackChain
}
//#randomSwitchOrElse
//#uniformRandomSwitch
uniformRandomSwitch( // beware: use parentheses, not curly braces!
chain1,
chain2
)
//#uniformRandomSwitch
//#roundRobinSwitch
roundRobinSwitch( // beware: use parentheses, not curly braces!
chain1,
chain2
)
//#roundRobinSwitch
//#tryMax
tryMax(times, counterName) {
myChain
}
//#tryMax
//#exitBlockOnFail
exitBlockOnFail {
myChain
}
//#exitBlockOnFail
//#exitHereIfFailed
exitHereIfFailed
//#exitHereIfFailed
val groupName = "foo"
//#group
group(groupName) {
myChain
}
//#group
val scn = scenario("foo")
val httpConf = http
//#protocol
scn.inject(atOnceUsers(5)).protocols(httpConf)
//#protocol
//#throttling
scn.inject(rampUsers(500) over (10 minutes)).throttle(reachRps(100) in (10 seconds), holdFor(10 minutes))
//#throttling
}
| MykolaB/gatling | src/sphinx/general/code/Scenario.scala | Scala | apache-2.0 | 6,373 |
package org.opendata.data
import play.api.test._
import play.api.test.Helpers._
import org.mockito.Mockito._
import org.junit._
import org.junit.Assert._
import play.api.libs.json.Json
import play.api.libs.json._
import play.api.libs.iteratee._
import org.opengraph.data._
trait TestData {
import DataFormatter._
val USERID = 0
val newGraph = NewGraph("simpleGraph", None)
val simpleGraph = CreateGraph(newGraph, None)
val simpleDataPoint = DataPoint("10", 4, None)
def dump[T](point: T) = Json.toJson(simpleGraph)
def loadGraphs(store: DataStore) = {
store.createGraph(USERID, newGraph)
}
}
class RestControllerTest extends TestData {
var store: DataStore = _
var controller: DataController = _
@Before
def setup = {
// store = mock(classOf[DataStore])
controller = new DataController()
}
def request(method: String, location:String, json: JsValue) =
FakeRequest(method, location)
.withHeaders(("Content-Type", "application/json"))
.withJsonBody(json)
def restRequest(method: String, json: JsValue) = request(method, "/v1/data/0", json)
@Test
def basicPost = {
running(FakeApplication()) {
val req = route(restRequest(POST, dump(simpleGraph))).get
assertEquals(status(req), 200)
// assertTrue(contentAsText(req) contains "graph recieved")
// assertTrue(req.body contains "graph recieved")
}
}
// @Test
// def variousPost = {
// val r = controller.post(restRequest(POST, dump(simpleGraph)))
// }
}
| mresposito/openData | test/RestControllerTest.scala | Scala | apache-2.0 | 1,526 |
package im.actor.server.api.rpc.service.sequence
import scala.util.Try
import com.typesafe.config.{ ConfigFactory, Config }
case class SequenceServiceConfig(maxDifferenceSize: Long)
object SequenceServiceConfig {
def load(config: Config): Try[SequenceServiceConfig] =
for {
maxDifferenceSize ← Try(config.getBytes("max-difference-size"))
} yield SequenceServiceConfig(maxDifferenceSize)
def load(): Try[SequenceServiceConfig] =
load(ConfigFactory.load().getConfig("enabled-modules.sequence"))
} | luoxiaoshenghustedu/actor-platform | actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/sequence/SequenceServiceConfig.scala | Scala | mit | 523 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.content
import org.scalatest.{FunSpec, Matchers}
import play.api.data.validation.ValidationError
import play.api.libs.json._
class HistoryRequestSpec extends FunSpec with Matchers {
val historyRequestJson: JsValue = Json.parse("""
{
"output": true,
"ras": true,
"hist_access_type": "<STRING>",
"session": 1,
"start": 0,
"stop": 5,
"n": 1,
"pattern": "<STRING>",
"unique": true
}
""")
val historyRequest = HistoryRequest(
true, true, "<STRING>", 1, 0, 5, 1, "<STRING>", true
)
describe("HistoryRequest") {
describe("implicit conversions") {
it("should implicitly convert from valid json to a HistoryRequest instance") {
// This is the least safe way to convert as an error is thrown if it fails
historyRequestJson.as[HistoryRequest] should be (historyRequest)
}
it("should also work with asOpt") {
// This is safer, but we lose the error information as it returns
// None if the conversion fails
val newCompleteRequest = historyRequestJson.asOpt[HistoryRequest]
newCompleteRequest.get should be (historyRequest)
}
it("should also work with validate") {
// This is the safest as it collects all error information (not just first error) and reports it
val CompleteRequestResults = historyRequestJson.validate[HistoryRequest]
CompleteRequestResults.fold(
(invalid: Seq[(JsPath, Seq[ValidationError])]) => println("Failed!"),
(valid: HistoryRequest) => valid
) should be (historyRequest)
}
it("should implicitly convert from a HistoryRequest instance to valid json") {
Json.toJson(historyRequest) should be (historyRequestJson)
}
}
}
}
| bpburns/spark-kernel | protocol/src/test/scala/com/ibm/spark/kernel/protocol/v5/content/HistoryRequestSpec.scala | Scala | apache-2.0 | 2,396 |
package br.unb.cic.poo.gol
import scala.io.StdIn.{readInt, readLine}
/**
* Representa o componente View do GoL
*
* @author Breno Xavier (baseado na implementacao Java de rbonifacio@unb.br
*/
object GameView {
private final val CONWAY = 1
private final val HIGHLIFE = 2
//private final val SEEDS = 3
private final val LINE = "+-----+"
private final val DEAD_CELL = "| |"
private final val ALIVE_CELL = "| o |"
private final val INVALID_OPTION = 0
private final val MAKE_CELL_ALIVE = 1
private final val NEXT_GENERATION = 2
private final val HALT = 3
/**
* Keeper of rules: define o conjunto de regras a serem utilizados e injeta as regras na classe Rules, que controlara
* o jogo a partir deste ponto.
*/
def configureRules: Unit = {
var option = 0
do{
println("Which set of rules do you wish to use?\\n\\n")
println("[1] Conway")
println("[2] HighLife")
//println("[3] Seeds")
print("\\n\\n Rule: ")
option = parseOptions(scala.io.StdIn.readLine())
}while (option == 0)
option match {
case CONWAY => { }
case HIGHLIFE => GameController.rules = new RuleBook(new HighLifeRules)
//case SEEDS => return SEEDS
case _ => 0
}
def parseOptions(option: String): Int = option match {
case "1" => CONWAY
case "2" => HIGHLIFE
//case "3" => SEEDS
case _ => INVALID_OPTION
}
}
/**
* Atualiza o componente view (representado pela classe GameBoard),
* possivelmente como uma resposta a uma atualizacao do jogo.
*/
def update {
printFirstRow
printLine
for(i <- (0 until Main.height)) {
for(j <- (0 until Main.width)) {
print(if (GameController.rules.isCellAlive(i, j)) ALIVE_CELL else DEAD_CELL);
}
println(" " + i)
printLine
}
printOptions
}
private def printOptions {
var option = 0
println("\\n\\n")
do{
println("Select one of the options: \\n \\n");
println("[1] Make a cell alive");
println("[2] Next generation");
println("[3] Halt");
print("\\n \\n Option: ");
option = parseOption(readLine)
}while(option == 0)
option match {
case MAKE_CELL_ALIVE => makeCellAlive
case NEXT_GENERATION => nextGeneration
case HALT => halt
}
}
private def makeCellAlive {
var i = 0
var j = 0
do {
print("\\n Inform the row number (0 - " + (Main.height - 1) + "): ")
i = readInt
print("\\n Inform the column number (0 - " + (Main.width - 1) + "): ")
j = readInt
} while(!validPosition(i,j))
GameController.makeCellAlive(i, j)
}
private def nextGeneration = GameController.nextGeneration
private def halt = GameController.halt
private def validPosition(i: Int, j: Int): Boolean = {
println(i);
println(j);
i >= 0 && i < Main.height && j >= 0 && j < Main.width
}
def parseOption(option: String): Int = option match {
case "1" => MAKE_CELL_ALIVE
case "2" => NEXT_GENERATION
case "3" => HALT
case _ => INVALID_OPTION
}
/* Imprime uma linha usada como separador das linhas do tabuleiro */
private def printLine() {
for(j <- (0 until Main.width)) {
print(LINE)
}
println()
}
/*
* Imprime os identificadores das colunas na primeira linha do tabuleiro
*/
private def printFirstRow {
println("\\n \\n");
for(j <- (0 until Main.width)) {
print(" " + j + " ")
}
println()
}
} | PeterTowers/TP1-022017 | GoLScala/GoLScala_ID/src/br/unb/cic/poo/gol/GameView.scala | Scala | mit | 3,463 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.action.ws2
import io.gatling.commons.validation.Validation
import io.gatling.core.action.{ Action, ExitableAction, RequestAction }
import io.gatling.core.session.{ Session, _ }
import io.gatling.core.stats.StatsEngine
import io.gatling.core.util.NameGen
import io.gatling.http.action.async.ws.WsAction
import io.gatling.http.action.ws2.fsm.ClientCloseRequest
class WsClose(
override val requestName: Expression[String],
wsName: String,
override val statsEngine: StatsEngine,
val next: Action
) extends RequestAction with WsAction with ExitableAction with NameGen {
override val name = genName("wsClose")
override def sendRequest(requestName: String, session: Session): Validation[Unit] =
for {
wsActor <- fetchActor(wsName, session)
} yield {
logger.info(s"Closing websocket '$wsName': Scenario '${session.scenario}', UserId #${session.userId}")
wsActor ! ClientCloseRequest(requestName, session, next)
}
}
| MykolaB/gatling | gatling-http/src/main/scala/io/gatling/http/action/ws2/WsClose.scala | Scala | apache-2.0 | 1,637 |
package ch.ethz.dalab.dissolve.app
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import breeze.linalg.Vector
import ch.ethz.dalab.dissolve.classification.StructSVMModel
import ch.ethz.dalab.dissolve.classification.StructSVMWithDBCFW
import ch.ethz.dalab.dissolve.optimization.DissolveFunctions
import ch.ethz.dalab.dissolve.optimization.GapThresholdCriterion
import ch.ethz.dalab.dissolve.optimization.SolverOptions
import ch.ethz.dalab.dissolve.regression.LabeledObject
/**
* This defines the x-part of the training example
* For example, in case of sequence OCR this would be a (d x n) matrix, with
* each column containing the pixel representation of a character
*/
case class Pattern() {
}
/**
* This defined the y-part of the training example
* Once again, in case of OCR this would be a n-dimensional vector, with the
* i-th element containing label for i-th character of x.
*/
case class Label() {
}
/**
* This is the core of your Structured SVM application.
* In here, you'll find three functions and a driver program that you'll need
* to fill in to get your application running.
*
* The interface is inspired by SVM^struct by Joachims et al.
* (http://www.cs.cornell.edu/people/tj/svm_light/svm_struct.html)
*/
object DSApp extends DissolveFunctions[Pattern, Label] {
/**
* ============== Joint feature Map: \\phi(x, y) ==============
*
* This encodes the complex input-output (x, y) pair in a Vector space (done
* using vectors from the Breeze library)
*/
def featureFn(x: Pattern, y: Label): Vector[Double] = {
// Insert code here for Joint feature map here
???
}
/**
* ============== Structured Loss Function: \\Delta(y, y^m) ==============
*
* Loss for predicting <yPredicted> instead of <yTruth>.
* This needs to be 0 if <yPredicted> == <yTruth>
*/
def lossFn(yPredicted: Label, yTruth: Label): Double = {
// Insert code for Loss function here
???
}
/**
* ============== Maximization Oracle: H^m(w) ==============
*
* Finds the most violating constraint by solving the loss-augmented decoding
* subproblem.
* This is equivalent to predicting
* y* = argmax_{y} \\Delta(y, y^m) + < w, \\phi(x^m, y) >
* for some training example (x^m, y^m) and parameters w
*
* Make sure the loss-augmentation is consistent with the \\Delta defined above.
*
* By default, the prediction function calls this oracle with y^m = null.
* In which case, the loss-augmentation can be skipped using a simple check
* on y^m.
*
* For examples, or common oracle/decoding functions (like BP Loopy, Viterbi
* or BP on Chain CF) refer to the examples package.
*/
def oracleFn(model: StructSVMModel[Pattern, Label], x: Pattern, y: Label): Label = {
val weightVec = model.weights
// Insert code for maximization Oracle here
???
}
/**
* ============== Prediction Function ==============
*
* Finds the best output candidate for x, given parameters w.
* This is equivalent to solving:
* y* = argmax_{y} < w, \\phi(x^m, y) >
*
* Note that this is very similar to the maximization oracle, but without
* the loss-augmentation. So, by default, we call the oracle function by
* setting y as null.
*/
def predictFn(model: StructSVMModel[Pattern, Label], x: Pattern): Label =
oracleFn(model, x, null)
/**
* ============== Driver ==============
*
* This is the entry point into the program.
* In here, we initialize the SparkContext, set the parameters and call the
* optimization routine.
*
* To begin with the training, we'll need three things:
* a. A SparkContext instance (Defaults provided)
* b. Solver Parameters (Defaults provided)
* c. Data
*
* To execute, you should package this into a jar and provide it using
* spark-submit (http://spark.apache.org/docs/latest/submitting-applications.html).
*
* Alternately, you can right-click and Run As -> Scala Application to run
* within Eclipse.
*/
def main(args: Array[String]): Unit = {
val appname = "DSApp"
/**
* ============== Initialize Spark ==============
*
* Alternately, use:
* val conf = new SparkConf().setAppName(appname).setMaster("local[4]")
* if you're planning to execute within Eclipse using 4 cores
*/
val conf = new SparkConf().setAppName(appname)
val sc = new SparkContext(conf)
sc.setCheckpointDir("checkpoint-files")
/**
* ============== Set Solver parameters ==============
*/
val solverOptions = new SolverOptions[Pattern, Label]()
// Regularization paramater
solverOptions.lambda = 0.01
// Stopping criterion
solverOptions.stoppingCriterion = GapThresholdCriterion
solverOptions.gapThreshold = 1e-3
solverOptions.gapCheck = 25 // Checks for gap every gapCheck rounds
// Set the fraction of data to be used in training during each round
// In this case, 50% of the data is uniformly sampled for training at the
// beginning of each round
solverOptions.sampleFrac = 0.5
// Set how many partitions you want to split the data into.
// These partitions will be local to each machine and the respective dual
// variables associated with these partitions will reside locally.
// Ideally, you want to set this to: #cores x #workers x 2.
// If this is disabled, Spark decides on the partitioning, which be may
// be suboptimal.
solverOptions.enableManualPartitionSize = true
solverOptions.NUM_PART = 8
// Optionally, you can enable obtaining additional statistics like the
// the training, test errors w.r.t to rounds, along with the gap
// This is expensive as it involves a complete pass through the data.
solverOptions.debug = false
// This computes the statistics every debugMultiplier^i rounds.
// So, in this case, it does so in 1, 2, 4, 8, ...
// Beyond the 50th round, statistics is collected every 10 rounds.
solverOptions.debugMultiplier = 2
// Writes the statistics in CSV format in the provided path
solverOptions.debugInfoPath = "path/to/statistics.csv"
/**
* ============== Provide Data ==============
*/
val trainDataRDD: RDD[LabeledObject[Pattern, Label]] = {
// Insert code to load TRAIN data here
???
}
val testDataRDD: RDD[LabeledObject[Pattern, Label]] = {
// Insert code to load TEST data here
???
}
// Optionally, set to None in case you don't want statistics on test data
solverOptions.testDataRDD = Some(testDataRDD)
/**
* ============== Training ==============
*/
val trainer: StructSVMWithDBCFW[Pattern, Label] =
new StructSVMWithDBCFW[Pattern, Label](
trainDataRDD,
DSApp,
solverOptions)
val model: StructSVMModel[Pattern, Label] = trainer.trainModel()
/**
* ============== Store Model ==============
*
* Optionally, you can store the model's weight parameters.
*
* To load a model, you can use
* val weights = breeze.linalg.csvread(new java.io.File(weightOutPath))
* val model = new StructSVMModel[Pattern, Label](weights, 0.0, null, DSApp)
*/
val weightOutPath = "path/to/weights.csv"
val weights = model.weights.toDenseVector.toDenseMatrix
breeze.linalg.csvwrite(new java.io.File(weightOutPath), weights)
}
} | dalab/dissolve-struct | dissolve-struct-application/src/main/scala/ch/ethz/dalab/dissolve/app/DSApp.scala | Scala | apache-2.0 | 7,449 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.filters
import java.util.Date
import com.typesafe.scalalogging.LazyLogging
import org.joda.time.Period
import org.locationtech.geomesa.features.SerializationOption.SerializationOptions
import org.locationtech.geomesa.features.kryo.KryoBufferSimpleFeature
import org.locationtech.geomesa.index.api.{GeoMesaFeatureIndex, GeoMesaIndexManager}
import org.locationtech.geomesa.index.iterators.IteratorCache
import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties.SystemProperty
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.SimpleFeatureType
import scala.util.control.NonFatal
/**
* Age-off a feature based on an attribute time
*/
trait DtgAgeOffFilter extends AgeOffFilter with LazyLogging {
protected def manager: GeoMesaIndexManager[_, _, _]
protected var sft: SimpleFeatureType = _
protected var index: GeoMesaFeatureIndex[_, _, _] = _
protected var reusableSf: KryoBufferSimpleFeature = _
protected var dtgIndex: Int = -1
override def init(options: Map[String, String]): Unit = {
import DtgAgeOffFilter.Configuration.{DtgOpt, IndexOpt, SftOpt}
super.init(options)
val spec = options(SftOpt)
sft = IteratorCache.sft(spec)
index = try { manager.index(options(IndexOpt)) } catch {
case NonFatal(_) => throw new RuntimeException(s"Index option not configured correctly: ${options.get(IndexOpt)}")
}
// noinspection ScalaDeprecation
val withId = if (index.serializedWithId) { SerializationOptions.none } else { SerializationOptions.withoutId }
reusableSf = IteratorCache.serializer(spec, withId).getReusableFeature
dtgIndex = options(DtgOpt).toInt // note: keep this last, for back-compatibility with DtgAgeOffIterator
}
override def accept(row: Array[Byte],
rowOffset: Int,
rowLength: Int,
value: Array[Byte],
valueOffset: Int,
valueLength: Int,
timestamp: Long): Boolean = {
try {
reusableSf.setBuffer(value, valueOffset, valueLength)
reusableSf.getDateAsLong(dtgIndex) > expiry
} catch {
case NonFatal(e) =>
logger.error(s"Error checking age-off for " +
Option(value).getOrElse(Array.empty).mkString("[", ",", s"], offset: $valueOffset, length: $valueLength"))
false
}
}
}
object DtgAgeOffFilter {
// configuration keys
object Configuration {
val SftOpt = "sft"
val IndexOpt = "index"
val DtgOpt = "dtg"
}
def configure(sft: SimpleFeatureType,
index: GeoMesaFeatureIndex[_, _, _],
expiry: Period,
dtgField: Option[String]): Map[String, String] = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
require(!sft.isTableSharing || SystemProperty("geomesa.age-off.override").option.exists(_.toBoolean),
"DtgAgeOff filter should only be applied to features that don't use table sharing. You may override this check" +
"by setting the system property 'geomesa.age-off.override=true', however please note that age-off" +
"will affect all shared feature types in the same catalog, and may not work correctly with multiple feature" +
"types")
val dtgIndex = dtgField match {
case None =>
sft.getDtgIndex.getOrElse {
throw new IllegalArgumentException("Simple feature type does not have a valid date field")
}
case Some(dtg) =>
val i = sft.indexOf(dtg)
if (i == -1 || !classOf[Date].isAssignableFrom(sft.getDescriptor(i).getType.getBinding)) {
throw new IllegalArgumentException(s"Simple feature type does not have a valid date field '$dtg'")
}
i
}
AgeOffFilter.configure(sft, expiry) ++ Map (
Configuration.SftOpt -> SimpleFeatureTypes.encodeType(sft),
Configuration.IndexOpt -> index.identifier,
Configuration.DtgOpt -> dtgIndex.toString
)
}
}
| ronq/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/filters/DtgAgeOffFilter.scala | Scala | apache-2.0 | 4,558 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.UnresolvedException
import org.apache.spark.sql.types.{DataType, NumericType}
/**
* The trait of the Window Specification (specified in the OVER clause or WINDOW clause) for
* Window Functions.
*/
sealed trait WindowSpec
/**
* The specification for a window function.
* @param partitionSpec It defines the way that input rows are partitioned.
* @param orderSpec It defines the ordering of rows in a partition.
* @param frameSpecification It defines the window frame in a partition.
*/
case class WindowSpecDefinition(
partitionSpec: Seq[Expression],
orderSpec: Seq[SortOrder],
frameSpecification: WindowFrame) extends Expression with WindowSpec with Unevaluable {
def validate: Option[String] = frameSpecification match {
case UnspecifiedFrame =>
Some("Found a UnspecifiedFrame. It should be converted to a SpecifiedWindowFrame " +
"during analysis. Please file a bug report.")
case frame: SpecifiedWindowFrame => frame.validate.orElse {
def checkValueBasedBoundaryForRangeFrame(): Option[String] = {
if (orderSpec.length > 1) {
// It is not allowed to have a value-based PRECEDING and FOLLOWING
// as the boundary of a Range Window Frame.
Some("This Range Window Frame only accepts at most one ORDER BY expression.")
} else if (orderSpec.nonEmpty && !orderSpec.head.dataType.isInstanceOf[NumericType]) {
Some("The data type of the expression in the ORDER BY clause should be a numeric type.")
} else {
None
}
}
(frame.frameType, frame.frameStart, frame.frameEnd) match {
case (RangeFrame, vp: ValuePreceding, _) => checkValueBasedBoundaryForRangeFrame()
case (RangeFrame, vf: ValueFollowing, _) => checkValueBasedBoundaryForRangeFrame()
case (RangeFrame, _, vp: ValuePreceding) => checkValueBasedBoundaryForRangeFrame()
case (RangeFrame, _, vf: ValueFollowing) => checkValueBasedBoundaryForRangeFrame()
case (_, _, _) => None
}
}
}
override def children: Seq[Expression] = partitionSpec ++ orderSpec
override lazy val resolved: Boolean =
childrenResolved && checkInputDataTypes().isSuccess &&
frameSpecification.isInstanceOf[SpecifiedWindowFrame]
override def toString: String = simpleString
override def nullable: Boolean = true
override def foldable: Boolean = false
override def dataType: DataType = throw new UnsupportedOperationException
}
/**
* A Window specification reference that refers to the [[WindowSpecDefinition]] defined
* under the name `name`.
*/
case class WindowSpecReference(name: String) extends WindowSpec
/**
* The trait used to represent the type of a Window Frame.
*/
sealed trait FrameType
/**
* RowFrame treats rows in a partition individually. When a [[ValuePreceding]]
* or a [[ValueFollowing]] is used as its [[FrameBoundary]], the value is considered
* as a physical offset.
* For example, `ROW BETWEEN 1 PRECEDING AND 1 FOLLOWING` represents a 3-row frame,
* from the row precedes the current row to the row follows the current row.
*/
case object RowFrame extends FrameType
/**
* RangeFrame treats rows in a partition as groups of peers.
* All rows having the same `ORDER BY` ordering are considered as peers.
* When a [[ValuePreceding]] or a [[ValueFollowing]] is used as its [[FrameBoundary]],
* the value is considered as a logical offset.
* For example, assuming the value of the current row's `ORDER BY` expression `expr` is `v`,
* `RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING` represents a frame containing rows whose values
* `expr` are in the range of [v-1, v+1].
*
* If `ORDER BY` clause is not defined, all rows in the partition is considered as peers
* of the current row.
*/
case object RangeFrame extends FrameType
/**
* The trait used to represent the type of a Window Frame Boundary.
*/
sealed trait FrameBoundary {
def notFollows(other: FrameBoundary): Boolean
}
/** UNBOUNDED PRECEDING boundary. */
case object UnboundedPreceding extends FrameBoundary {
def notFollows(other: FrameBoundary): Boolean = other match {
case UnboundedPreceding => true
case vp: ValuePreceding => true
case CurrentRow => true
case vf: ValueFollowing => true
case UnboundedFollowing => true
}
override def toString: String = "UNBOUNDED PRECEDING"
}
/** <value> PRECEDING boundary. */
case class ValuePreceding(value: Int) extends FrameBoundary {
def notFollows(other: FrameBoundary): Boolean = other match {
case UnboundedPreceding => false
case ValuePreceding(anotherValue) => value >= anotherValue
case CurrentRow => true
case vf: ValueFollowing => true
case UnboundedFollowing => true
}
override def toString: String = s"$value PRECEDING"
}
/** CURRENT ROW boundary. */
case object CurrentRow extends FrameBoundary {
def notFollows(other: FrameBoundary): Boolean = other match {
case UnboundedPreceding => false
case vp: ValuePreceding => false
case CurrentRow => true
case vf: ValueFollowing => true
case UnboundedFollowing => true
}
override def toString: String = "CURRENT ROW"
}
/** <value> FOLLOWING boundary. */
case class ValueFollowing(value: Int) extends FrameBoundary {
def notFollows(other: FrameBoundary): Boolean = other match {
case UnboundedPreceding => false
case vp: ValuePreceding => false
case CurrentRow => false
case ValueFollowing(anotherValue) => value <= anotherValue
case UnboundedFollowing => true
}
override def toString: String = s"$value FOLLOWING"
}
/** UNBOUNDED FOLLOWING boundary. */
case object UnboundedFollowing extends FrameBoundary {
def notFollows(other: FrameBoundary): Boolean = other match {
case UnboundedPreceding => false
case vp: ValuePreceding => false
case CurrentRow => false
case vf: ValueFollowing => false
case UnboundedFollowing => true
}
override def toString: String = "UNBOUNDED FOLLOWING"
}
/**
* The trait used to represent the a Window Frame.
*/
sealed trait WindowFrame
/** Used as a place holder when a frame specification is not defined. */
case object UnspecifiedFrame extends WindowFrame
/** A specified Window Frame. */
case class SpecifiedWindowFrame(
frameType: FrameType,
frameStart: FrameBoundary,
frameEnd: FrameBoundary) extends WindowFrame {
/** If this WindowFrame is valid or not. */
def validate: Option[String] = (frameType, frameStart, frameEnd) match {
case (_, UnboundedFollowing, _) =>
Some(s"$UnboundedFollowing is not allowed as the start of a Window Frame.")
case (_, _, UnboundedPreceding) =>
Some(s"$UnboundedPreceding is not allowed as the end of a Window Frame.")
// case (RowFrame, start, end) => ??? RowFrame specific rule
// case (RangeFrame, start, end) => ??? RangeFrame specific rule
case (_, start, end) =>
if (start.notFollows(end)) {
None
} else {
val reason =
s"The end of this Window Frame $end is smaller than the start of " +
s"this Window Frame $start."
Some(reason)
}
}
override def toString: String = frameType match {
case RowFrame => s"ROWS BETWEEN $frameStart AND $frameEnd"
case RangeFrame => s"RANGE BETWEEN $frameStart AND $frameEnd"
}
}
object SpecifiedWindowFrame {
/**
*
* @param hasOrderSpecification If the window spec has order by expressions.
* @param acceptWindowFrame If the window function accepts user-specified frame.
* @return
*/
def defaultWindowFrame(
hasOrderSpecification: Boolean,
acceptWindowFrame: Boolean): SpecifiedWindowFrame = {
if (hasOrderSpecification && acceptWindowFrame) {
// If order spec is defined and the window function supports user specified window frames,
// the default frame is RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW.
SpecifiedWindowFrame(RangeFrame, UnboundedPreceding, CurrentRow)
} else {
// Otherwise, the default frame is
// ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING.
SpecifiedWindowFrame(RowFrame, UnboundedPreceding, UnboundedFollowing)
}
}
}
/**
* Every window function needs to maintain a output buffer for its output.
* It should expect that for a n-row window frame, it will be called n times
* to retrieve value corresponding with these n rows.
*/
trait WindowFunction extends Expression {
def init(): Unit
def reset(): Unit
def prepareInputParameters(input: InternalRow): AnyRef
def update(input: AnyRef): Unit
def batchUpdate(inputs: Array[AnyRef]): Unit
def evaluate(): Unit
def get(index: Int): Any
def newInstance(): WindowFunction
}
case class UnresolvedWindowFunction(
name: String,
children: Seq[Expression])
extends Expression with WindowFunction with Unevaluable {
override def dataType: DataType = throw new UnresolvedException(this, "dataType")
override def foldable: Boolean = throw new UnresolvedException(this, "foldable")
override def nullable: Boolean = throw new UnresolvedException(this, "nullable")
override lazy val resolved = false
override def init(): Unit = throw new UnresolvedException(this, "init")
override def reset(): Unit = throw new UnresolvedException(this, "reset")
override def prepareInputParameters(input: InternalRow): AnyRef =
throw new UnresolvedException(this, "prepareInputParameters")
override def update(input: AnyRef): Unit = throw new UnresolvedException(this, "update")
override def batchUpdate(inputs: Array[AnyRef]): Unit =
throw new UnresolvedException(this, "batchUpdate")
override def evaluate(): Unit = throw new UnresolvedException(this, "evaluate")
override def get(index: Int): Any = throw new UnresolvedException(this, "get")
override def toString: String = s"'$name(${children.mkString(",")})"
override def newInstance(): WindowFunction = throw new UnresolvedException(this, "newInstance")
}
case class UnresolvedWindowExpression(
child: UnresolvedWindowFunction,
windowSpec: WindowSpecReference) extends UnaryExpression with Unevaluable {
override def dataType: DataType = throw new UnresolvedException(this, "dataType")
override def foldable: Boolean = throw new UnresolvedException(this, "foldable")
override def nullable: Boolean = throw new UnresolvedException(this, "nullable")
override lazy val resolved = false
}
case class WindowExpression(
windowFunction: WindowFunction,
windowSpec: WindowSpecDefinition) extends Expression with Unevaluable {
override def children: Seq[Expression] = windowFunction :: windowSpec :: Nil
override def dataType: DataType = windowFunction.dataType
override def foldable: Boolean = windowFunction.foldable
override def nullable: Boolean = windowFunction.nullable
override def toString: String = s"$windowFunction $windowSpec"
}
/**
* Extractor for making working with frame boundaries easier.
*/
object FrameBoundaryExtractor {
def unapply(boundary: FrameBoundary): Option[Int] = boundary match {
case CurrentRow => Some(0)
case ValuePreceding(offset) => Some(-offset)
case ValueFollowing(offset) => Some(offset)
case _ => None
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala | Scala | apache-2.0 | 12,147 |
package com.lookout.borderpatrol
import java.net.URL
import com.twitter.finagle.httpx.path.Path
/**
*
* @param name
* @param path
* @param hosts List of URLs to upstream manager
*/
case class Manager(name: String, path: Path, hosts: Set[URL])
/**
*
* @param name
* @param path
* @param hosts list of URLs to upstream manager
* @param loginPath
* @param identityManager
* @param accessManager
*/
case class LoginManager(name: String, path: Path, hosts: Set[URL],
loginPath: Path, identityManager: Manager, accessManager: Manager)
| jamescway/borderpatrol | core/src/main/scala/com/lookout/borderpatrol/Manager.scala | Scala | mit | 569 |
package com.eigengo.lift.exercise.classifiers.workflows
import akka.stream.stage.{TerminationDirective, Directive, Context, PushPullStage}
import scala.collection.mutable
/**
* Streaming stage that buffers events and slides a window over streaming input data. Transmits each observed window
* downstream.
*
* @param size size of the internal buffer and so the sliding window size
*/
class SlidingWindow[A] private (size: Int) extends PushPullStage[A, List[A]] {
require(size > 0)
private val buffer = mutable.Queue[A]()
private var isSaturated = false
override def onPush(elem: A, ctx: Context[List[A]]): Directive = {
if (buffer.length == size) {
// Buffer is full, so push new window
buffer.dequeue()
buffer.enqueue(elem)
ctx.push(buffer.toList)
} else {
// Buffer is not yet full, so keep consuming from our upstream
buffer.enqueue(elem)
if (buffer.length == size) {
// Buffer has become full, so push new window and record saturation
isSaturated = true
ctx.push(buffer.toList)
} else {
ctx.pull()
}
}
}
override def onPull(ctx: Context[List[A]]): Directive = {
if (ctx.isFinishing) {
// Streaming stage is shutting down, so we ensure that all buffer elements are flushed prior to finishing
if (buffer.isEmpty) {
// Buffer is empty, so we simply finish
ctx.finish()
} else if (buffer.length == 1) {
// Buffer is non-empty, so empty it by sending undersized (non-empty) truncated window sequence and finish
if (isSaturated) {
// Buffer was previously saturated, so head element has already been seen
buffer.dequeue()
ctx.finish()
} else {
// Buffer was never saturated, so head element needs to be pushed
ctx.pushAndFinish(List(buffer.dequeue()))
}
} else {
// Buffer is non-empty, so empty it by sending undersized (non-empty) truncated window sequence - we will eventually finish here
if (isSaturated) {
// Buffer was previously saturated, so head element has already been seen
buffer.dequeue()
ctx.push(buffer.toList)
} else {
// Buffer was never saturated, so head element should be part of truncated window
val window = buffer.toList
buffer.dequeue()
ctx.push(window)
}
}
} else {
ctx.pull()
}
}
override def onUpstreamFinish(ctx: Context[List[A]]): TerminationDirective = {
ctx.absorbTermination()
}
}
object SlidingWindow {
def apply[A](size: Int): SlidingWindow[A] = {
new SlidingWindow(size)
}
}
| lachatak/lift | server/exercise/src/main/scala/com/eigengo/lift/exercise/classifiers/workflows/SlidingWindow.scala | Scala | apache-2.0 | 2,695 |
/*
* Copyright 2015 University of Basel, Graphics and Vision Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalismo.mesh
import java.io.File
import java.net.URLDecoder
import scalismo.ScalismoTestSuite
import scalismo.geometry.EuclideanVector3D
import scalismo.io.MeshIO
import scalismo.utils.Random
import scala.language.implicitConversions
class MeshLineIntersectionTests extends ScalismoTestSuite {
implicit val rng: Random = Random(1024L)
describe("A intersection of a line with a tetrahedral mesh") {
object Fixture {
val path = getClass.getResource("/tetraMesh.vtk").getPath
val testMesh = MeshIO.readTetrahedralMesh(new File(URLDecoder.decode(path, "UTF-8"))).get
}
it("should contain a known intersection point") {
val mesh = Fixture.testMesh
for (_ <- 0 until 100) {
val tetId = TetrahedronId(rng.scalaRandom.nextInt(mesh.tetrahedralization.tetrahedrons.size))
// an intersection point will lie within the triangle
val tet = mesh.tetrahedralization.tetrahedron(tetId)
val tri = tet.triangles(rng.scalaRandom.nextInt(4))
val bc3 = BarycentricCoordinates.randomUniform
val v = mesh.pointSet.point(tri.ptId1).toVector * bc3.a +
mesh.pointSet.point(tri.ptId2).toVector * bc3.b +
mesh.pointSet.point(tri.ptId3).toVector * bc3.c
val intersectionPoint = v.toPoint
// random direction
val direction = EuclideanVector3D(
rng.scalaRandom.nextGaussian(),
rng.scalaRandom.nextGaussian(),
rng.scalaRandom.nextGaussian()
).normalize
// select point on the line given by direction an intersection point
val anchorPoint = intersectionPoint + direction * rng.scalaRandom.nextDouble() * 100
// try to find the intersection point
val intersections = mesh.operations.getIntersectionPoints(anchorPoint, direction)
val distances = intersections.map(ip => (ip - intersectionPoint).norm)
val closestDistanceToTrueIntersectionPoint = distances.min
closestDistanceToTrueIntersectionPoint should be < 1.0e-8
}
}
}
}
| unibas-gravis/scalismo | src/test/scala/scalismo/mesh/MeshLineIntersectionTests.scala | Scala | apache-2.0 | 2,691 |
package scutil.color
import scutil.color.extension._
object literals extends literals
trait literals
extends StringContextImplicits
| ritschwumm/scutil | modules/core/src/main/scala/scutil/color/literals.scala | Scala | bsd-2-clause | 136 |
package io.sqooba.oss.timeseries.stats
import org.scalatest.{FlatSpec, Matchers}
import scala.util.Random
class ThreadUnsafeDoubleUniformReservoirSpec extends FlatSpec with Matchers {
"A ThreadUnsafeDoubleUniformReservoir" should "report its size as at most the size of the underlying array" in {
val res = new ThreadUnsafeDoubleUniformReservoir(2)
res.size shouldBe 0
res.update(1.0)
res.size shouldBe 1
res.update(2.0)
res.size shouldBe 2
res.update(3.0)
res.size shouldBe 2
}
it should "properly handle adding many more values than the reservoir's size" in {
val res = new ThreadUnsafeDoubleUniformReservoir(10)
val rand = new Random()
noException should be thrownBy {
for (_ <- 0 to 1000) {
res.update(rand.nextDouble())
}
res.snapshot()
}
}
it should "properly add all values to the reservoir as long as it is not filled" in {
val res = new ThreadUnsafeDoubleUniformReservoir(2)
res.update(1.0)
res.update(2.0)
res.values shouldBe Array(1.0, 2.0)
}
it should "properly pass the real min and max to the snapshot" in {
val snap = new ThreadUnsafeDoubleUniformReservoir(2)
.update(1.0)
.update(2.0)
.update(3.0)
.snapshot()
snap.min shouldBe 1.0
snap.max shouldBe 3.0
}
it should "return a zero-snapshot if no value was sampled" in {
new ThreadUnsafeDoubleUniformReservoir(2)
.snapshot()
.shouldBe(Stats(.0, .0, .0, .0, .0))
}
it should "properly copy the reservoir's relevant entries if it was not filled" in {
val res = new ThreadUnsafeDoubleUniformReservoir(3)
res
.update(2.0)
.update(1.0)
res.snapshot() shouldBe Stats(1.0, 2.0, 1.5, 0.7071067811865476, 1.5)
res.values shouldBe Array(2.0, 1.0, .0)
// Fill up the reservoir
res.update(3.0)
res.values shouldBe Array(2.0, 1.0, 3.0)
// Now the snapshot function passes the whole reservoir to underlying logic
res.snapshot() shouldBe Stats(1.0, 3.0, 2.0, 1.0, 2.0)
res.values shouldBe Array(1.0, 2.0, 3.0)
}
}
| Shastick/tslib | src/test/scala/io/sqooba/oss/timeseries/stats/ThreadUnsafeDoubleUniformReservoirSpec.scala | Scala | mit | 2,092 |
/*
* Copyright (c) 2012-2017 by its authors. Some rights reserved.
* See the project homepage at: https://github.com/monix/shade
*
* Licensed under the MIT License (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy
* of the License at:
*
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
package shade.memcached
import java.io._
import scala.annotation.implicitNotFound
import scala.language.implicitConversions
import scala.reflect.ClassTag
import scala.util.control.NonFatal
/**
* Represents a type class that needs to be implemented
* for serialization/deserialization to work.
*/
@implicitNotFound("Could not find any Codec implementation for type ${T}. Please provide one or import shade.memcached.MemcachedCodecs._")
trait Codec[T] {
def serialize(value: T): Array[Byte]
def deserialize(data: Array[Byte]): T
}
object Codec extends BaseCodecs
trait BaseCodecs {
implicit object IntBinaryCodec extends Codec[Int] {
def serialize(value: Int): Array[Byte] =
Array(
(value >>> 24).asInstanceOf[Byte],
(value >>> 16).asInstanceOf[Byte],
(value >>> 8).asInstanceOf[Byte],
value.asInstanceOf[Byte]
)
def deserialize(data: Array[Byte]): Int =
(data(0).asInstanceOf[Int] & 255) << 24 |
(data(1).asInstanceOf[Int] & 255) << 16 |
(data(2).asInstanceOf[Int] & 255) << 8 |
data(3).asInstanceOf[Int] & 255
}
implicit object DoubleBinaryCodec extends Codec[Double] {
import java.lang.{ Double => JvmDouble }
def serialize(value: Double): Array[Byte] = {
val l = JvmDouble.doubleToLongBits(value)
LongBinaryCodec.serialize(l)
}
def deserialize(data: Array[Byte]): Double = {
val l = LongBinaryCodec.deserialize(data)
JvmDouble.longBitsToDouble(l)
}
}
implicit object FloatBinaryCodec extends Codec[Float] {
import java.lang.{ Float => JvmFloat }
def serialize(value: Float): Array[Byte] = {
val i = JvmFloat.floatToIntBits(value)
IntBinaryCodec.serialize(i)
}
def deserialize(data: Array[Byte]): Float = {
val i = IntBinaryCodec.deserialize(data)
JvmFloat.intBitsToFloat(i)
}
}
implicit object LongBinaryCodec extends Codec[Long] {
def serialize(value: Long): Array[Byte] =
Array(
(value >>> 56).asInstanceOf[Byte],
(value >>> 48).asInstanceOf[Byte],
(value >>> 40).asInstanceOf[Byte],
(value >>> 32).asInstanceOf[Byte],
(value >>> 24).asInstanceOf[Byte],
(value >>> 16).asInstanceOf[Byte],
(value >>> 8).asInstanceOf[Byte],
value.asInstanceOf[Byte]
)
def deserialize(data: Array[Byte]): Long =
(data(0).asInstanceOf[Long] & 255) << 56 |
(data(1).asInstanceOf[Long] & 255) << 48 |
(data(2).asInstanceOf[Long] & 255) << 40 |
(data(3).asInstanceOf[Long] & 255) << 32 |
(data(4).asInstanceOf[Long] & 255) << 24 |
(data(5).asInstanceOf[Long] & 255) << 16 |
(data(6).asInstanceOf[Long] & 255) << 8 |
data(7).asInstanceOf[Long] & 255
}
implicit object BooleanBinaryCodec extends Codec[Boolean] {
def serialize(value: Boolean): Array[Byte] =
Array((if (value) 1 else 0).asInstanceOf[Byte])
def deserialize(data: Array[Byte]): Boolean =
data.isDefinedAt(0) && data(0) == 1
}
implicit object CharBinaryCodec extends Codec[Char] {
def serialize(value: Char): Array[Byte] = Array(
(value >>> 8).asInstanceOf[Byte],
value.asInstanceOf[Byte]
)
def deserialize(data: Array[Byte]): Char =
((data(0).asInstanceOf[Int] & 255) << 8 |
data(1).asInstanceOf[Int] & 255)
.asInstanceOf[Char]
}
implicit object ShortBinaryCodec extends Codec[Short] {
def serialize(value: Short): Array[Byte] = Array(
(value >>> 8).asInstanceOf[Byte],
value.asInstanceOf[Byte]
)
def deserialize(data: Array[Byte]): Short =
((data(0).asInstanceOf[Short] & 255) << 8 |
data(1).asInstanceOf[Short] & 255)
.asInstanceOf[Short]
}
implicit object StringBinaryCodec extends Codec[String] {
def serialize(value: String): Array[Byte] = value.getBytes("UTF-8")
def deserialize(data: Array[Byte]): String = new String(data, "UTF-8")
}
implicit object ArrayByteBinaryCodec extends Codec[Array[Byte]] {
def serialize(value: Array[Byte]): Array[Byte] = value
def deserialize(data: Array[Byte]): Array[Byte] = data
}
}
trait GenericCodec {
private[this] class GenericCodec[S <: Serializable](classTag: ClassTag[S]) extends Codec[S] {
def using[T <: Closeable, R](obj: T)(f: T => R): R =
try
f(obj)
finally
try obj.close() catch {
case NonFatal(_) => // does nothing
}
def serialize(value: S): Array[Byte] =
using (new ByteArrayOutputStream()) { buf =>
using (new ObjectOutputStream(buf)) { out =>
out.writeObject(value)
out.close()
buf.toByteArray
}
}
def deserialize(data: Array[Byte]): S =
using (new ByteArrayInputStream(data)) { buf =>
val in = new GenericCodecObjectInputStream(classTag, buf)
using (in) { inp =>
inp.readObject().asInstanceOf[S]
}
}
}
implicit def AnyRefBinaryCodec[S <: Serializable](implicit ev: ClassTag[S]): Codec[S] =
new GenericCodec[S](ev)
}
trait MemcachedCodecs extends BaseCodecs with GenericCodec
object MemcachedCodecs extends MemcachedCodecs
| lloydmeta/shade | src/main/scala/shade/memcached/Codec.scala | Scala | mit | 5,575 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import java.io._
import java.net.URI
import scala.util.Random
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs._
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.util.Utils
abstract class CheckpointFileManagerTests extends SparkFunSuite {
def createManager(path: Path): CheckpointFileManager
test("mkdirs, list, createAtomic, open, delete, exists") {
withTempPath { p =>
val basePath = new Path(p.getAbsolutePath)
val fm = createManager(basePath)
// Mkdirs
val dir = new Path(s"$basePath/dir/subdir/subsubdir")
assert(!fm.exists(dir))
fm.mkdirs(dir)
assert(fm.exists(dir))
fm.mkdirs(dir)
// List
val acceptAllFilter = new PathFilter {
override def accept(path: Path): Boolean = true
}
val rejectAllFilter = new PathFilter {
override def accept(path: Path): Boolean = false
}
assert(fm.list(basePath, acceptAllFilter).exists(_.getPath.getName == "dir"))
assert(fm.list(basePath, rejectAllFilter).length === 0)
// Create atomic without overwrite
var path = new Path(s"$dir/file")
assert(!fm.exists(path))
fm.createAtomic(path, overwriteIfPossible = false).cancel()
assert(!fm.exists(path))
fm.createAtomic(path, overwriteIfPossible = false).close()
assert(fm.exists(path))
quietly {
intercept[IOException] {
// should throw exception since file exists and overwrite is false
fm.createAtomic(path, overwriteIfPossible = false).close()
}
}
// Create atomic with overwrite if possible
path = new Path(s"$dir/file2")
assert(!fm.exists(path))
fm.createAtomic(path, overwriteIfPossible = true).cancel()
assert(!fm.exists(path))
fm.createAtomic(path, overwriteIfPossible = true).close()
assert(fm.exists(path))
fm.createAtomic(path, overwriteIfPossible = true).close() // should not throw exception
// Open and delete
fm.open(path).close()
fm.delete(path)
assert(!fm.exists(path))
intercept[IOException] {
fm.open(path)
}
fm.delete(path) // should not throw exception
}
}
protected def withTempPath(f: File => Unit): Unit = {
val path = Utils.createTempDir()
path.delete()
try f(path) finally Utils.deleteRecursively(path)
}
}
class CheckpointFileManagerSuite extends SparkFunSuite with SharedSparkSession {
test("CheckpointFileManager.create() should pick up user-specified class from conf") {
withSQLConf(
SQLConf.STREAMING_CHECKPOINT_FILE_MANAGER_CLASS.parent.key ->
classOf[CreateAtomicTestManager].getName) {
val fileManager =
CheckpointFileManager.create(new Path("/"), spark.sessionState.newHadoopConf)
assert(fileManager.isInstanceOf[CreateAtomicTestManager])
}
}
test("CheckpointFileManager.create() should fallback from FileContext to FileSystem") {
import CheckpointFileManagerSuiteFileSystem.scheme
spark.conf.set(s"fs.$scheme.impl", classOf[CheckpointFileManagerSuiteFileSystem].getName)
quietly {
withTempDir { temp =>
val metadataLog = new HDFSMetadataLog[String](spark, s"$scheme://${temp.toURI.getPath}")
assert(metadataLog.add(0, "batch0"))
assert(metadataLog.getLatest() === Some(0 -> "batch0"))
assert(metadataLog.get(0) === Some("batch0"))
assert(metadataLog.get(None, Some(0)) === Array(0 -> "batch0"))
val metadataLog2 = new HDFSMetadataLog[String](spark, s"$scheme://${temp.toURI.getPath}")
assert(metadataLog2.get(0) === Some("batch0"))
assert(metadataLog2.getLatest() === Some(0 -> "batch0"))
assert(metadataLog2.get(None, Some(0)) === Array(0 -> "batch0"))
}
}
}
}
class FileContextBasedCheckpointFileManagerSuite extends CheckpointFileManagerTests {
override def createManager(path: Path): CheckpointFileManager = {
new FileContextBasedCheckpointFileManager(path, new Configuration())
}
}
class FileSystemBasedCheckpointFileManagerSuite extends CheckpointFileManagerTests {
override def createManager(path: Path): CheckpointFileManager = {
new FileSystemBasedCheckpointFileManager(path, new Configuration())
}
}
/** A fake implementation to test different characteristics of CheckpointFileManager interface */
class CreateAtomicTestManager(path: Path, hadoopConf: Configuration)
extends FileSystemBasedCheckpointFileManager(path, hadoopConf) {
import CheckpointFileManager._
override def createAtomic(path: Path, overwrite: Boolean): CancellableFSDataOutputStream = {
if (CreateAtomicTestManager.shouldFailInCreateAtomic) {
CreateAtomicTestManager.cancelCalledInCreateAtomic = false
}
val originalOut = super.createAtomic(path, overwrite)
new CancellableFSDataOutputStream(originalOut) {
override def close(): Unit = {
if (CreateAtomicTestManager.shouldFailInCreateAtomic) {
throw new IOException("Copy failed intentionally")
}
super.close()
}
override def cancel(): Unit = {
CreateAtomicTestManager.cancelCalledInCreateAtomic = true
originalOut.cancel()
}
}
}
}
object CreateAtomicTestManager {
@volatile var shouldFailInCreateAtomic = false
@volatile var cancelCalledInCreateAtomic = false
}
/**
* CheckpointFileManagerSuiteFileSystem to test fallback of the CheckpointFileManager
* from FileContext to FileSystem API.
*/
private class CheckpointFileManagerSuiteFileSystem extends RawLocalFileSystem {
import CheckpointFileManagerSuiteFileSystem.scheme
override def getUri: URI = {
URI.create(s"$scheme:///")
}
}
private object CheckpointFileManagerSuiteFileSystem {
val scheme = s"CheckpointFileManagerSuiteFileSystem${math.abs(Random.nextInt)}"
}
| tejasapatil/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/CheckpointFileManagerSuite.scala | Scala | apache-2.0 | 6,850 |
package at.logic.gapt.formats.tip
import at.logic.gapt.expr._
import at.logic.gapt.expr.hol.{ existentialClosure, universalClosure }
import at.logic.gapt.proofs.{ Context, Sequent }
case class TipConstructor( constr: Const, projectors: Seq[Const] ) {
val FunctionType( datatype, fieldTypes ) = constr.ty
require( fieldTypes.size == projectors.size )
projectors foreach { case Const( _, FunctionType( to, from ) ) => require( from == Seq( datatype ) ) }
def arity = projectors size
def projectorDefinitions: Seq[Formula] = {
val fieldVars = fieldTypes.zipWithIndex.map { case ( t, i ) => Var( s"x$i", t ) }
( projectors, fieldVars ).zipped map { ( p, f ) => p( constr( fieldVars: _* ) ) === f }
}
}
case class TipDatatype( t: TBase, constructors: Seq[TipConstructor] ) {
constructors foreach { ctr => require( ctr.datatype == t ) }
}
case class TipFun( fun: Const, definitions: Seq[Formula] )
case class TipProblem(
ctx: Context,
sorts: Seq[TBase], datatypes: Seq[TipDatatype],
uninterpretedConsts: Seq[Const], functions: Seq[TipFun],
assumptions: Seq[Formula], goal: Formula
) {
def constructorInjectivity =
for {
TipDatatype( ty, ctrs ) <- datatypes
if ty != To // FIXME
( TipConstructor( ctr1, _ ), i1 ) <- ctrs.zipWithIndex
( TipConstructor( ctr2, _ ), i2 ) <- ctrs.zipWithIndex
if i1 < i2 // ignore symmetric pairs
FunctionType( _, args1 ) = ctr1.ty
FunctionType( _, args2 ) = ctr2.ty
} yield universalClosure(
ctr1( ( for ( ( t, j ) <- args1.zipWithIndex ) yield Var( s"x$j", t ) ): _* ) !==
ctr2( ( for ( ( t, j ) <- args2.zipWithIndex ) yield Var( s"y$j", t ) ): _* )
)
def toSequent = existentialClosure(
datatypes.flatMap( _.constructors ).flatMap( _.projectorDefinitions ) ++:
functions.flatMap( _.definitions ) ++:
constructorInjectivity ++:
assumptions ++:
Sequent()
:+ goal
)
def context: Context = ctx
override def toString: String = toSequent.toSigRelativeString( context )
}
| gebner/gapt | core/src/main/scala/at/logic/gapt/formats/tip/problem.scala | Scala | gpl-3.0 | 2,052 |
package com.github.agourlay.cornichon.steps.wrapped
import com.github.agourlay.cornichon.core._
import com.github.agourlay.cornichon.steps.StepUtilSpec
import com.github.agourlay.cornichon.steps.regular.assertStep.{ AssertStep, GenericEqualityAssertion }
import org.scalatest.{ Matchers, AsyncWordSpec }
class RepeatStepSpec extends AsyncWordSpec with Matchers with StepUtilSpec {
"RepeatStep" must {
"fail if 'repeat' block contains a failed step" in {
val nested = AssertStep(
"always fails",
s ⇒ GenericEqualityAssertion(true, false)
) :: Nil
val repeatStep = RepeatStep(nested, 5, None)
val s = Scenario("scenario with Repeat", repeatStep :: Nil)
engine.runScenario(Session.newEmpty)(s).map(_.isSuccess should be(false))
}
"repeat steps inside a 'repeat' block" in {
var uglyCounter = 0
val loop = 5
val nested = AssertStep(
"increment captured counter",
s ⇒ {
uglyCounter = uglyCounter + 1
GenericEqualityAssertion(true, true)
}
) :: Nil
val repeatStep = RepeatStep(nested, loop, None)
val s = Scenario("scenario with Repeat", repeatStep :: Nil)
engine.runScenario(Session.newEmpty)(s).map { res ⇒
res.isSuccess should be(true)
uglyCounter should be(loop)
}
}
"expose indice in session" in {
var uglyCounter = 0
val loop = 5
val indiceKeyName = "my-counter"
val nested = AssertStep(
"increment captured counter",
s ⇒ {
uglyCounter = uglyCounter + 1
GenericEqualityAssertion(s.getUnsafe(indiceKeyName), uglyCounter.toString)
}
) :: Nil
val repeatStep = RepeatStep(nested, loop, Some(indiceKeyName))
val s = Scenario("scenario with Repeat", repeatStep :: Nil)
engine.runScenario(Session.newEmpty)(s).map { res ⇒
res.isSuccess should be(true)
uglyCounter should be(loop)
}
}
}
}
| OlegIlyenko/cornichon | cornichon-core/src/test/scala/com/github/agourlay/cornichon/steps/wrapped/RepeatStepSpec.scala | Scala | apache-2.0 | 1,992 |
package com.twitter.finatra.validation.constraints
import com.twitter.finatra.validation.ErrorCode
import com.twitter.util.validation.constraintvalidation.TwitterConstraintValidatorContext
import jakarta.validation.{ConstraintValidator, ConstraintValidatorContext}
@deprecated("Users should prefer to use standard constraints.", "2021-03-05")
private[validation] class AssertTrueConstraintValidator
extends ConstraintValidator[AssertTrue, Boolean] {
override def isValid(
obj: Boolean,
constraintValidatorContext: ConstraintValidatorContext
): Boolean = {
val valid = obj
if (!valid) {
TwitterConstraintValidatorContext
.withDynamicPayload(ErrorCode.InvalidBooleanValue(obj))
.withMessageTemplate("must be true")
.addConstraintViolation(constraintValidatorContext)
}
valid
}
}
| twitter/finatra | validation/src/main/scala/com/twitter/finatra/validation/constraints/AssertTrueConstraintValidator.scala | Scala | apache-2.0 | 846 |
package com.sksamuel.elastic4s.search.suggestions
import com.sksamuel.elastic4s.Indexable
import com.sksamuel.elastic4s.testkit.ElasticSugar
import org.elasticsearch.common.unit.Fuzziness
import org.scalatest.{Matchers, WordSpec}
class CompletionSuggestionsTest extends WordSpec with Matchers with ElasticSugar {
implicit object SongIndexable extends Indexable[Song] {
override def json(t: Song): String = s"""{"name":"${t.name}", "artist":"${t.artist}"}"""
}
private val Index = "complsuggest"
private val indexType = Index / "music"
client.execute {
createIndex(Index).mappings(
mapping("music").fields(
completionField("name")
)
)
}.await
client.execute(
bulk(
indexInto(indexType) doc Song("Rocket Man", "Kate Bush"),
indexInto(indexType) doc Song("Rubberband Girl", "Kate Bush"),
indexInto(indexType) doc Song("Running Up that Hill", "Kate Bush"),
indexInto(indexType) doc Song("The Fog", "Kate Bush"),
indexInto(indexType) doc Song("The Red Shoes", "Kate Bush"),
indexInto(indexType) doc Song("The Dreaming", "Kate Bush"),
indexInto(indexType) doc Song("The Big Sky", "Kate Bush")
)
).await
blockUntilCount(7, Index)
val resp = client.execute {
search(indexType).suggestions {
completionSuggestion("a").on("name").prefix("Ru")
}
}.await
val result = resp.suggestion("a")
println(result)
val entries = result.entries.toList
println(entries)
"completion suggestions" should {
"support lookups by text" in {
val resp = client.execute {
search(indexType).suggestions {
completionSuggestion("a").on("name").text("The B")
}
}.await
val entry = resp.suggestion("a").entries.head
entry.optionsText shouldBe List("The Big Sky")
}
"support max results" in {
val resp = client.execute {
search(indexType).suggestions {
completionSuggestion("a").on("name").prefix("r").size(1)
}
}.await
val entry = resp.suggestion("a").entries.head
entry.optionsText shouldBe List("Rocket Man")
}
"support lookups by prefix" in {
val resp = client.execute {
search(indexType).suggestions {
completionSuggestion("a").on("name").prefix("ru")
}
}.await
val entry = resp.suggestion("a").entries.head
entry.optionsText shouldBe List("Rubberband Girl", "Running Up that Hill")
}
"support fuzzy prefix lookups" in {
val resp = client.execute {
search(indexType).suggestions {
completionSuggestion("a").on("name").prefix("Rabber", Fuzziness.ONE)
}
}.await
val entry = resp.suggestion("a").entries.head
entry.optionsText shouldBe List("Rubberband Girl")
}
}
}
| aroundus-inc/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/search/suggestions/CompletionSuggestionsTest.scala | Scala | apache-2.0 | 2,810 |
/*
* Copyright (c) 2012-2017 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.rdbloader
package config
import org.specs2.Specification
import cats.syntax.either._
import io.circe._
import io.circe.yaml.parser
// This project
import SnowplowConfig.Codecs._
class SnowplowConfigSpec extends Specification { def is = s2"""
Parse storage without folder, but with comment, using auto decoder $e1
Parse monitoring.snowplow with custom decoder $e2
Parse monitoring with custom decoder $e3
Parse enrich with custom decoder $e4
Parse s3 with custom decoder $e7
Parse whole configuration using parse method $e8
Get correct path in decoding failure $e9
"""
def e1 = {
val storageYaml =
"""
|versions:
| rdb_shredder: 1.8.0
| hadoop_elasticsearch: 0.1.0
""".stripMargin
val ast: Either[Error, Json] = parser.parse(storageYaml)
val storage = ast.flatMap(_.as[SnowplowConfig.Storage])
storage must beRight(SnowplowConfig.Storage(SnowplowConfig.StorageVersions(Semver(1,8,0), Semver(0,1,0))))
}
def e2 = {
val monitoringYaml =
"""
|method: get
|app_id: ADD HERE # e.g. snowplow
|collector: ADD HERE # e.g. d3rkrsqld9gmqf.cloudfront.net
""".stripMargin
val ast: Either[Error, Json] = parser.parse(monitoringYaml)
val storage = ast.flatMap(_.as[SnowplowConfig.SnowplowMonitoring])
storage must beRight(SnowplowConfig.SnowplowMonitoring(Some(SnowplowConfig.GetMethod), Some("ADD HERE"), Some("ADD HERE")))
}
def e3 = {
val monitoringYaml =
"""
|tags: {fromString: bar} # Name-value pairs describing this job
|logging:
| level: DEBUG # You can optionally switch to INFO for production
|snowplow:
| method: get
| app_id: ADD HERE # e.g. snowplow
| collector: ADD HERE # e.g. d3rkrsqld9gmqf.cloudfront.net
""".stripMargin
val ast: Either[Error, Json] = parser.parse(monitoringYaml)
val storage = ast.flatMap(_.as[SnowplowConfig.Monitoring])
val snowplow = SnowplowConfig.SnowplowMonitoring(Some(SnowplowConfig.GetMethod), Some("ADD HERE"), Some("ADD HERE"))
val logging = SnowplowConfig.Logging(SnowplowConfig.DebugLevel)
val expected = SnowplowConfig.Monitoring(Map("fromString" -> "bar"), logging, Some(snowplow))
storage must beRight(expected)
}
def e4 = {
val enrichYaml =
"""
|versions:
| spark_enrich: 1.8.0 # Version of the Hadoop Enrichment process
|continue_on_unexpected_error: false # Set to 'true' (and set :out_errors: above) if you don't want any exceptions thrown from ETL
|output_compression: NONE # Compression only supported with Redshift, set to NONE if you have Postgres targets. Allowed formats: NONE, GZIP
""".stripMargin
val ast: Either[Error, Json] = parser.parse(enrichYaml)
val storage = ast.flatMap(_.as[SnowplowConfig.Enrich])
val versions = SnowplowConfig.EnrichVersions(Semver(1,8,0))
val expected = SnowplowConfig.Enrich(versions, SnowplowConfig.NoneCompression)
storage must beRight(expected)
}
def e7 = {
val s3Yaml =
"""
|region: ADD HERE
|buckets:
| assets: s3://snowplow-hosted-assets # DO NOT CHANGE unless you are hosting the jarfiles etc yourself in your own bucket
| jsonpath_assets: s3://myownassets/foo # If you have defined your own JSON Schemas, add the s3:// path to your own JSON Path files in your own bucket here
| log: s3://logs
| raw:
| in: # Multiple in buckets are permitted
| - s3n://my-in-bucket
| processing: s3://processing
| archive: s3n://archive # e.g. s3://my-archive-bucket/raw
| enriched:
| good: s3://enriched-good # e.g. s3://my-out-bucket/enriched/good
| bad: s3://enriched-bad # e.g. s3://my-out-bucket/enriched/bad
| errors: s3://errors # Leave blank unless :continue_on_unexpected_error: set to true below
| archive: s3://path/to/archive # Where to archive enriched events to, e.g. s3://my-archive-bucket/enriched
| shredded:
| good: s3://shredded # e.g. s3://my-out-bucket/shredded/good
| bad: s3://shredded/bad # e.g. s3://my-out-bucket/shredded/bad
| errors: s3://errors # Leave blank unless :continue_on_unexpected_error: set to true below
| archive: s3://archive # Where to archive shredded events to, e.g. s3://my-archive-bucket/shredded
""".stripMargin
val ast: Either[Error, Json] = parser.parse(s3Yaml)
val s3 = ast.flatMap(_.as[SnowplowConfig.SnowplowS3])
s3 must beRight
}
def e8 = {
val configYaml =
"""
|aws:
| # Credentials can be hardcoded or set in environment variables
| access_key_id: <%= ENV['AWS_SNOWPLOW_ACCESS_KEY'] %>
| secret_access_key: <%= ENV['AWS_SNOWPLOW_SECRET_KEY'] %>
| s3:
| region: ADD HERE
| buckets:
| assets: s3://snowplow-hosted-assets # DO NOT CHANGE unless you are hosting the jarfiles etc yourself in your own bucket
| jsonpath_assets: # If you have defined your own JSON Schemas, add the s3:// path to your own JSON Path files in your own bucket here
| log: s3://log-bucket
| raw:
| in: # Multiple in buckets are permitted
| - s3://in-first/ # e.g. s3://my-in-bucket
| - s3://in-second/path/to/logs
| processing: s3://processing-logs/
| archive: s3://my-archive/ # e.g. s3://my-archive-bucket/raw
| enriched:
| good: s3://enriched-good/path/to/1 # e.g. s3://my-out-bucket/enriched/good
| bad: s3://snowplow-bad/PATH/to/1 # e.g. s3://my-out-bucket/enriched/bad
| errors: # Leave blank unless :continue_on_unexpected_error: set to true below
| archive: s3://enriched/ # Where to archive enriched events to, e.g. s3://my-archive-bucket/enriched
| shredded:
| good: s3://my-shredded-events/path/to/some/destination # e.g. s3://my-out-bucket/shredded/good
| bad: s3://my_not_shredded-events/ # e.g. s3://my-out-bucket/shredded/bad
| errors: s3://bucket/with/errors # Leave blank unless :continue_on_unexpected_error: set to true below
| archive: s3://path # Where to archive shredded events to, e.g. s3://my-archive-bucket/shredded
| emr:
| ami_version: 4.5.0
| region: ADD HERE # Always set this
| jobflow_role: EMR_EC2_DefaultRole # Created using $ aws emr create-default-roles
| service_role: EMR_DefaultRole # Created using $ aws emr create-default-roles
| placement: ADD HERE # Set this if not running in VPC. Leave blank otherwise
| ec2_subnet_id: ADD HERE # Set this if running in VPC. Leave blank otherwise
| ec2_key_name: ADD HERE
| bootstrap: [] # Set this to specify custom boostrap actions. Leave empty otherwise
| software:
| hbase: # Optional. To launch on cluster, provide version, "0.92.0", keep quotes. Leave empty otherwise.
| lingual: # Optional. To launch on cluster, provide version, "1.1", keep quotes. Leave empty otherwise.
| # Adjust your Hadoop cluster below
| jobflow:
| job_name: Snowplow ETL # Give your job a name
| master_instance_type: m1.medium
| core_instance_count: 2
| core_instance_type: m1.medium
| task_instance_count: 0 # Increase to use spot instances
| task_instance_type: m1.medium
| task_instance_bid: 0.015 # In USD. Adjust bid, or leave blank for non-spot-priced (i.e. on-demand) task instances
| bootstrap_failure_tries: 3 # Number of times to attempt the job in the event of bootstrap failures
| additional_info: # Optional JSON string for selecting additional features
|collectors:
| format: cloudfront # For example: 'clj-tomcat' for the Clojure Collector, 'thrift' for Thrift records, 'tsv/com.amazon.aws.cloudfront/wd_access_log' for Cloudfront access logs or 'ndjson/urbanairship.connect/v1' for UrbanAirship Connect events
|enrich:
| versions:
| spark_enrich: 1.8.0 # Version of the Hadoop Enrichment process
| continue_on_unexpected_error: false # Set to 'true' (and set :out_errors: above) if you don't want any exceptions thrown from ETL
| output_compression: NONE # Compression only supported with Redshift, set to NONE if you have Postgres targets. Allowed formats: NONE, GZIP
|storage:
| download:
| folder: # Postgres-only config option. Where to store the downloaded files. Leave blank for Redshift
| versions:
| rdb_shredder: 0.10.0 # Version of the Hadoop Shredding process
| hadoop_elasticsearch: 0.1.0 # Version of the Hadoop to Elasticsearch copying process
|monitoring:
| tags: {} # Name-value pairs describing this job
| logging:
| level: DEBUG # You can optionally switch to INFO for production
| snowplow:
| method: get
| app_id: ADD HERE # e.g. snowplow
| collector: ADD HERE # e.g. d3rkrsqld9gmqf.cloudfront.net
""".stripMargin
val result = SnowplowConfig.parse(configYaml)
result must beRight
}
def e9 = {
// buckets.shredded.good cannot be integer
val s3Yaml =
"""
|region: ADD HERE
|buckets:
| assets: s3://snowplow-hosted-assets # DO NOT CHANGE unless you are hosting the jarfiles etc yourself in your own bucket
| jsonpath_assets: # If you have defined your own JSON Schemas, add the s3:// path to your own JSON Path files in your own bucket here
| log: s3://log-bucket/
| raw:
| in: # Multiple in buckets are permitted
| - s3://in-first/ # e.g. s3://my-in-bucket
| - s3://in-second/path/to/logs
| processing: s3://processing-logs/
| archive: s3://my-archive/ # e.g. s3://my-archive-bucket/raw
| shredded:
| good: 0 # e.g. s3://my-out-bucket/shredded/good
| bad: s3://foo # e.g. s3://my-out-bucket/shredded/bad
| errors: # Leave blank unless :continue_on_unexpected_error: set to true below
| archive: s3://bar # Where to archive shredded events to, e.g. s3://my-archive-bucket/shredded
""".stripMargin
val ast: Either[Error, Json] = parser.parse(s3Yaml)
val s3 = ast.flatMap(_.as[SnowplowConfig.SnowplowS3])
val path = List(CursorOp.DownField("good"), CursorOp.DownField("shredded"), CursorOp.DownField("buckets"))
s3.leftMap(_.asInstanceOf[DecodingFailure].history) must beLeft(path)
}
}
| Propertyfinder/snowplow | 4-storage/rdb-loader/src/test/scala/com/snowplowanalytics/snowplow/rdbloader/config/SnowplowConfigSpec.scala | Scala | apache-2.0 | 11,919 |
package com.peterpotts.common.sample
class SampleIterable[A](iterable: Iterable[Sample[A]]) extends Sample[Iterable[A]] {
def next(): Iterable[A] = iterable.map(_.next())
}
object SampleIterable {
def apply[A](iterable: Iterable[Sample[A]]) = new SampleIterable(iterable)
def apply[A](sampleA: Sample[A], size: Int = defaultSampleSize): Sample[Iterable[A]] =
apply(Iterable.fill(size)(sampleA))
def apply[A](sampleA: Sample[A], sizes: IndexedSeq[Int]): Sample[Iterable[A]] =
SamplePick(sizes).flatMap(size => apply(sampleA, size))
}
| peterpotts/mobius | src/main/scala/com/peterpotts/common/sample/SampleIterable.scala | Scala | mit | 553 |
package library
sealed trait List[+T] {
def head: T
def tail: List[T]
def zip[U](l: List[U]): List[Tuple2[T, U]] = if (this == Nil) {
Nil
} else {
new Cons[Tuple2[T, U]](
new Tuple2(
head,
l.head),
tail.zip(l.tail))
}
def zipWithIndex: List[Tuple2[T, Int]] = zipWithStartingIndex(0)
def zipWithStartingIndex(starting: Int): List[Tuple2[T, Int]] = if (this == Nil) {
Nil
} else {
new Cons(
new Tuple2(
head,
starting),
tail.zipWithStartingIndex(starting + 1))
}
def map[U](f: T => U): List[U] = if (this == Nil) {
Nil
} else {
new Cons(f(head), tail.map(f))
}
def foldLeft[U](z: U, f: (U, T) => U): U = {
if (this == Nil)
z
else
tail.foldLeft(f(z, head), f)
}
def length: Int = if (this == Nil) 0 else tail.length + 1
}
class Cons[T](val head: T, val tail: List[T]) extends List[T] {
override def equals(that: Any): Boolean = {
val thatList = that.asInstanceOf[List[Any]]
thatList.head == head && thatList.tail == tail
}
}
object Nil extends List[Nothing] {
def head: Nothing = throw new RuntimeException("Head of the empty list!")
def tail: List[Nothing] = throw new RuntimeException("Tail of the empty list!")
}
/*
* Does not work with the interpreter!
*/
object List {
def apply[T](x1: T, x2: T, x3: T, x4: T): List[T] =
new Cons(x1, new Cons(x2, new Cons(x3, new Cons(x4, Nil))))
def apply[T](x1: T, x2: T, x3: T): List[T] =
new Cons(x1, new Cons(x2, new Cons(x3, Nil)))
def apply[T](x1: T, x2: T): List[T] =
new Cons(x1, new Cons(x2, Nil))
def apply[T](x1: T): List[T] =
new Cons(x1, Nil)
def apply[T](): List[T] =
Nil
}
class Tuple2[+T, +U](val _1: T, val _2: U) {
override def equals(x: Any): Boolean = {
if (x.isInstanceOf[Tuple2[Any, Any]]) {
val that = x.asInstanceOf[Tuple2[Any, Any]]
(this._1 equals that._1) && (this._2 == that._2)
} else false
}
override def toString: String = s"(${_1},${_2})"
}
import ch.epfl.scalact._
object Numeric {
@ct implicit def dnum(): Numeric[Double] @ct = DoubleNumeric
@ct implicit def dnumct(): Numeric[Double @ct] @ct = DoubleNumericCT
}
trait Numeric[T] {
def plus(x: T, y: T): T
def minus(x: T, y: T): T
def times(x: T, y: T): T
def fromInt(x: Int): T
def zero(): T
def one(): T
class Ops(lhs: T) {
@ct def +(rhs: T) = plus(lhs, rhs)
@ct def -(rhs: T) = minus(lhs, rhs)
@ct def *(rhs: T) = times(lhs, rhs)
}
@ct implicit def mkNumericOps(lhs: T): Ops = new (Ops @ ct)(lhs)
}
object DoubleNumeric extends Numeric[Double] @ct {
@ct def plus(x: Double, y: Double): Double = x + y
@ct def minus(x: Double, y: Double): Double = x - y
@ct def times(x: Double, y: Double): Double = x * y
@ct def fromInt(x: Int): Double = x
@ct def one: Double = 1.0
@ct def zero: Double = 0.0
}
object DoubleNumericCT extends Numeric[Double @ct] @ct {
@ct def plus(x: Double @ct, y: Double @ct): Double @ct = x + y
@ct def minus(x: Double @ct, y: Double @ct): Double @ct = x - y
@ct def times(x: Double @ct, y: Double @ct): Double @ct = x * y
@ct def fromInt(x: Int @ct): Double = x
@ct def one: Double @ct = ct(1.0)
@ct def zero: Double @ct = ct(0.0)
}
| scala-ct/scala-ct | test/library/Library.scala | Scala | bsd-3-clause | 3,260 |
package is.hail.expr.ir
import is.hail.annotations.{Region, StagedRegionValueBuilder}
import is.hail.asm4s._
import is.hail.types.physical.{PCode, PType}
import is.hail.utils._
object StagedRegion {
def apply(
r: Value[Region],
allowSubregions: Boolean = false,
parents: Seq[ParentStagedRegion] = Seq(),
description: String = "root"
): ParentStagedRegion =
new ParentStagedRegion(r, parents, allowSubregions, description)
def apply(r: Value[Region]): StagedRegion = new StagedRegion {
val code = r
def <=(that: ParentStagedRegion): Boolean = false
def assertSubRegion(that: ParentStagedRegion) {
assert(this <= that, s"root\\n${that.description}")
}
}
def swap(mb: EmitMethodBuilder[_], x: OwnedStagedRegion, y: OwnedStagedRegion): Code[Unit] = {
x.parent assertEqual y.parent
(x, y) match {
case (x: RealOwnedStagedRegion, y: RealOwnedStagedRegion) =>
val temp = mb.newLocal[Region]("sr_swap")
Code(temp := x.r, x.r := y.r, y.r := temp)
case (x: DummyOwnedStagedRegion, y: DummyOwnedStagedRegion) =>
Code._empty
}
}
}
abstract class StagedRegion {
def code: Value[Region]
final def asParent(allowAllocations: Boolean, description: String): ParentStagedRegion = {
val parents = this match {
case child: ChildStagedRegion => child.otherAncestors :+ child.parent
case parent: ParentStagedRegion => Seq(parent)
case _ => Seq()
}
StagedRegion(code, allowAllocations, parents, description)
}
def <=(that: ParentStagedRegion): Boolean
def assertSubRegion(that: ParentStagedRegion): Unit
}
class ParentStagedRegion(
val code: Value[Region],
val parents: Seq[ParentStagedRegion],
val allowSubregions: Boolean,
desc: String
) extends StagedRegion { self =>
final def description: String = parents match {
case Seq() => desc
case Seq(p) => s"$desc < ${ p.description }"
case ps => s"$desc < ${ ps.map(_.description).mkString(" {", " | ", " }") }"
}
def createChildRegion(mb: EmitMethodBuilder[_]): OwnedStagedRegion =
if (allowSubregions) {
val newR = mb.genFieldThisRef[Region]("staged_region_child")
new RealOwnedStagedRegion(newR, this)
} else {
new DummyOwnedStagedRegion(code, this)
}
def createChildRegionArray(mb: EmitMethodBuilder[_], length: Int): OwnedStagedRegionArray =
if (allowSubregions) {
val regionArray = mb.genFieldThisRef[Array[Region]]("staged_region_child_array")
def get(i: Value[Int]): Settable[Region] = new Settable[Region] {
def get: Code[Region] = regionArray(i)
def store(rhs: Code[Region]): Code[Unit] = regionArray.update(i, rhs)
}
new OwnedStagedRegionArray {
def apply(i: Value[Int]): OwnedStagedRegion = new RealOwnedStagedRegion(get(i), self)
def allocateRegions(mb: EmitMethodBuilder[_], size: Int): Code[Unit] = {
val i = mb.newLocal[Int]("sora_alloc_i")
Code(
regionArray := Code.newArray(length),
Code.forLoop(i := 0, i < length, i := i + 1, apply(i).allocateRegion(size)))
}
def freeAll(mb: EmitMethodBuilder[_]): Code[Unit] = {
val i = mb.newLocal[Int]("sora_free_i")
Code(
Code.forLoop(i := 0, i < length, i := i + 1, apply(i).free()),
regionArray := Code._null)
}
}
} else {
new OwnedStagedRegionArray {
def apply(i: Value[Int]): OwnedStagedRegion = new DummyOwnedStagedRegion(code, self)
def allocateRegions(mb: EmitMethodBuilder[_], size: Int): Code[Unit] =
Code._empty
def freeAll(mb: EmitMethodBuilder[_]): Code[Unit] =
Code._empty
}
}
override def equals(that: Any): Boolean = that match {
case that: ParentStagedRegion =>
(this.allowSubregions == that.allowSubregions) && (this.code eq that.code)
case _ => false
}
final def <=(that: ParentStagedRegion): Boolean =
(this == that) || parents.exists(_ <= that)
def assertEqual(that: ParentStagedRegion) {
assert(this == that, s"${this.description}\\n${that.description}")
}
def assertSubRegion(that: ParentStagedRegion) {
assert(this <= that, s"${this.description}\\n${that.description}")
}
}
abstract class ChildStagedRegion extends StagedRegion {
def parent: ParentStagedRegion
def otherAncestors: Seq[ParentStagedRegion]
def asSubregionOf(that: ParentStagedRegion): ChildStagedRegion
def createSiblingRegion(mb: EmitMethodBuilder[_]): OwnedStagedRegion
final def createSiblingRegionArray(mb: EmitMethodBuilder[_], length: Int): OwnedStagedRegionArray =
parent.createChildRegionArray(mb, length)
final def copyToParent(mb: EmitMethodBuilder[_], value: PCode, destType: PType): PCode =
copyTo(mb, value, parent, destType)
final def copyToParent(mb: EmitMethodBuilder[_], value: PCode): PCode =
copyTo(mb, value, parent)
def copyTo(mb: EmitMethodBuilder[_], value: PCode, dest: StagedRegion, destType: PType): PCode
def copyTo(mb: EmitMethodBuilder[_], value: PCode, dest: StagedRegion): PCode
final def <=(that: ParentStagedRegion): Boolean =
(this.parent <= that) || otherAncestors.exists(_ <= that)
def description: String = if (otherAncestors.isEmpty)
parent.description
else
otherAncestors.map(_.description).mkString(s"{ ${parent.description} | ", " | ", "}")
def assertSubRegion(that: ParentStagedRegion) {
assert(this <= that, s"${this.parent.description}\\n${that.description}")
}
}
trait OwnedStagedRegion extends ChildStagedRegion {
def allocateRegion(size: Int): Code[Unit]
def free(): Code[Unit]
def clear(): Code[Unit]
def giveToParent(): Code[Unit]
def giveToSibling(dest: ChildStagedRegion): Code[Unit]
def shareWithSibling(dest: ChildStagedRegion): Code[Unit]
def addToParentRVB(srvb: StagedRegionValueBuilder, value: PCode): Code[Unit]
}
abstract class OwnedStagedRegionArray {
def apply(i: Value[Int]): OwnedStagedRegion
def allocateRegions(mb: EmitMethodBuilder[_], size: Int): Code[Unit]
def freeAll(mb: EmitMethodBuilder[_]): Code[Unit]
}
class RealOwnedStagedRegion(
val r: Settable[Region],
val parent: ParentStagedRegion,
val otherAncestors: Seq[ParentStagedRegion] = Seq()
) extends OwnedStagedRegion {
assert(parent.allowSubregions)
def code: Value[Region] = r
def asSubregionOf(that: ParentStagedRegion): ChildStagedRegion =
new RealOwnedStagedRegion(r, parent, otherAncestors :+ that)
def createSiblingRegion(mb: EmitMethodBuilder[_]): OwnedStagedRegion = {
val newR = mb.genFieldThisRef[Region]("staged_region_child")
new RealOwnedStagedRegion(newR, parent, otherAncestors)
}
def allocateRegion(size: Int): Code[Unit] = r := Region.stagedCreate(size)
def free(): Code[Unit] = Code(r.invalidate(), r := Code._null)
def clear(): Code[Unit] = (r: Value[Region]).clear()
def giveToParent(): Code[Unit] = r.invoke[Region, Unit]("move", parent.code)
def copyTo(mb: EmitMethodBuilder[_], value: PCode, dest: StagedRegion, destType: PType): PCode = {
dest assertSubRegion parent
value.copyToRegion(mb, dest.code, destType)
}
def copyTo(mb: EmitMethodBuilder[_], value: PCode, dest: StagedRegion): PCode =
copyTo(mb, value, dest, value.pt)
def giveToSibling(dest: ChildStagedRegion): Code[Unit] = {
dest assertSubRegion parent
r.invoke[Region, Unit]("move", dest.code)
}
def shareWithSibling(dest: ChildStagedRegion): Code[Unit] = {
dest assertSubRegion parent
dest.code.invoke[Region, Unit]("addReferenceTo", r)
}
def addToParentRVB(srvb: StagedRegionValueBuilder, value: PCode): Code[Unit] =
srvb.addIRIntermediate(value, deepCopy = true)
}
class DummyOwnedStagedRegion(
val code: Value[Region],
val parent: ParentStagedRegion,
val otherAncestors: Seq[ParentStagedRegion] = Seq()
) extends OwnedStagedRegion {
assert(!parent.allowSubregions)
def asSubregionOf(that: ParentStagedRegion): ChildStagedRegion =
new DummyOwnedStagedRegion(code, parent, otherAncestors :+ that)
def createSiblingRegion(mb: EmitMethodBuilder[_]): OwnedStagedRegion = {
new DummyOwnedStagedRegion(code, parent, otherAncestors)
}
def allocateRegion(size: Int): Code[Unit] = Code._empty
def free(): Code[Unit] = Code._empty
def clear(): Code[Unit] = Code._empty
def giveToParent(): Code[Unit] = Code._empty
def copyTo(mb: EmitMethodBuilder[_], value: PCode, dest: StagedRegion, destType: PType): PCode = {
dest assertSubRegion parent
value.castTo(mb, parent.code, destType)
}
def copyTo(mb: EmitMethodBuilder[_], value: PCode, dest: StagedRegion): PCode = {
dest assertSubRegion parent
value
}
def giveToSibling(dest: ChildStagedRegion): Code[Unit] = {
dest assertSubRegion parent
Code._empty
}
def shareWithSibling(dest: ChildStagedRegion): Code[Unit] = {
dest assertSubRegion parent
Code._empty
}
def addToParentRVB(srvb: StagedRegionValueBuilder, value: PCode): Code[Unit] =
srvb.addIRIntermediate(value, deepCopy = false)
}
| cseed/hail | hail/src/main/scala/is/hail/expr/ir/StagedRegion.scala | Scala | mit | 9,103 |
object Test extends App {
val emptyTuple: Tuple = Tuple()
val tuple: Tuple = ("1", "2", "3", "4", "5")
val tupleXXL: Tuple = ("11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35")
type Id[X] = X
val f: [t] => t => Id[t] = [t] => (x: t) => {
val str = x.asInstanceOf[String]
str.updated(0, (str(0) + 1).toChar).asInstanceOf[t]
}
// Test all possible combinations of making
println(emptyTuple.map(f))
println(tuple.map(f))
println(tupleXXL.map(f))
}
| dotty-staging/dotty | tests/run/tuple-map.scala | Scala | apache-2.0 | 575 |
package io.github.shogowada.scala.jsonrpc.server
import io.github.shogowada.scala.jsonrpc.server.JSONRPCServer.RequestJSONHandler
class JSONRPCRequestJSONHandlerRepository {
private var methodNameToHandlerMap: Map[String, RequestJSONHandler] = Map.empty
def add(methodName: String, requestJSONHandler: RequestJSONHandler): Unit = this.synchronized {
methodNameToHandlerMap = methodNameToHandlerMap + (methodName -> requestJSONHandler)
}
def addIfAbsent(methodName: String, handlerFactory: () => (RequestJSONHandler)): Unit = this.synchronized {
if (!methodNameToHandlerMap.contains(methodName)) {
add(methodName, handlerFactory())
}
}
def get(methodName: String): Option[RequestJSONHandler] = {
methodNameToHandlerMap.get(methodName)
}
def remove(methodName: String): Unit = this.synchronized {
methodNameToHandlerMap = methodNameToHandlerMap - methodName
}
}
| shogowada/scala-json-rpc | shared/src/main/scala/io/github/shogowada/scala/jsonrpc/server/JSONRPCRequestJSONHandlerRepository.scala | Scala | mit | 910 |
import scala.quoted._
inline def isTrue: Boolean = true
inline def oneOf(): String = {
inline if isTrue then
"foo"
else
"bar"
}
def test1 = oneOf()
| lampepfl/dotty | tests/pos/i10107c.scala | Scala | apache-2.0 | 163 |
package mstparser
import java.io.BufferedOutputStream
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
import java.io.ObjectInputStream
import java.io.ObjectOutputStream
import mstparser.io.DependencyReader
import mstparser.io.DependencyWriter
class DependencyPipe(
protected val options: ParserOptions,
val dataAlphabet: Alphabet[String],
val typeAlphabet: Alphabet[String],
var labeled: Boolean
) {
def this(options: ParserOptions) = this(options, new Alphabet, new Alphabet, false)
private val depReader = DependencyReader.createDependencyReader(this.options.format, this.options.discourseMode)
private var depWriter: DependencyWriter = _
private var instances: IndexedSeq[DependencyInstance] = _
def initInputFile(file: String) {
this.labeled = this.depReader.startReading(file)
}
def initOutputFile(file: String) {
this.depWriter = DependencyWriter.createDependencyWriter(this.options.format, this.labeled)
this.depWriter.startWriting(file)
}
def outputInstance(instance: DependencyInstance) {
this.depWriter.write(instance)
}
def close() {
if (this.depWriter != null) this.depWriter.finishWriting()
}
def add(f: String, fv: FeatureVector) {
this.add(f, 1.0, fv)
}
def add(f: String, v: Double, fv: FeatureVector) {
val i = this.dataAlphabet.lookupIndex(f)
if (i >= 0) fv.add(i, v)
}
def createAlphabets(file: String) {
print("Creating Alphabet ... ")
this.labeled = this.depReader.startReading(file)
depReader.foreach { instance =>
instance.deprels.foreach(this.typeAlphabet.lookupIndex(_))
this.createFeatureVector(instance)
}
this.dataAlphabet.stopGrowth()
this.typeAlphabet.stopGrowth()
println("Done.")
}
def createFeatureVector(instance: DependencyInstance) = {
val fv = new FeatureVector
instance.heads.zip(instance.deprels).zipWithIndex.filter(_._1._1 > -1).foreach {
case ((h, l), i) =>
val small = math.min(h, i)
val large = math.max(h, i)
val attR = i >= h
this.addCoreFeatures(instance, small, large, attR, fv)
if (this.labeled) {
this.addLabeledFeatures(instance, i, l, attR, true, fv)
this.addLabeledFeatures(instance, h, l, attR, false, fv)
}
}
this.addExtendedFeatures(instance, fv)
fv
}
protected def addExtendedFeatures(instance: DependencyInstance, fv: FeatureVector) {}
def createInstances(file: String, featFile: File, createForest: Boolean) = {
this.labeled = this.depReader.startReading(file)
val out = if (createForest)
new ObjectOutputStream(new BufferedOutputStream(new FileOutputStream(featFile), 65536))
else null
println("Creating Feature Vector Instances: ")
this.instances = this.depReader.zipWithIndex.map { case (instance, i) =>
print(i + " ")
instance.featureVector = this.createFeatureVector(instance)
if (createForest) this.writeInstance(instance, out)
instance
}.toIndexedSeq
println()
if (createForest) out.close()
this.instances
}
protected def writeInstance(instance: DependencyInstance, out: ObjectOutputStream) {
(0 until instance.length).foreach { w1 =>
((w1 + 1) until instance.length).foreach { w2 =>
val prodFV1 = new FeatureVector
this.addCoreFeatures(instance, w1, w2, true, prodFV1)
out.writeObject(prodFV1.keys)
val prodFV2 = new FeatureVector
this.addCoreFeatures(instance, w1, w2, false, prodFV2)
out.writeObject(prodFV2.keys)
}
}
out.writeInt(-3)
if (this.labeled) {
(0 until instance.length).foreach { w1 =>
this.typeAlphabet.values.foreach { t =>
val prodFV1 = new FeatureVector
this.addLabeledFeatures(instance, w1, t, true, true, prodFV1)
out.writeObject(prodFV1.keys)
val prodFV2 = new FeatureVector
this.addLabeledFeatures(instance, w1, t, true, false, prodFV2)
out.writeObject(prodFV2.keys)
val prodFV3 = new FeatureVector
this.addLabeledFeatures(instance, w1, t, false, true, prodFV3)
out.writeObject(prodFV3.keys)
val prodFV4 = new FeatureVector
this.addLabeledFeatures(instance, w1, t, false, false, prodFV4)
out.writeObject(prodFV4.keys)
}
}
out.writeInt(-3)
}
this.writeExtendedFeatures(instance, out)
out.reset()
}
protected def writeExtendedFeatures(instance: DependencyInstance, out: ObjectOutputStream) {}
def fillFeatureVectors(
instance: DependencyInstance,
fvs: Array[Array[Array[FeatureVector]]],
probs: Array[Array[Array[Double]]],
fvsTr: Array[Array[Array[FeatureVector]]],
probsTr: Array[Array[Array[Double]]],
fvsSi: Array[Array[Array[FeatureVector]]],
probsSi: Array[Array[Array[Double]]],
fvsNt: Array[Array[Array[Array[FeatureVector]]]],
probsNt: Array[Array[Array[Array[Double]]]],
params: Parameters
) {
(0 until instance.length).foreach { w1 =>
((w1 + 1) until instance.length).foreach { w2 =>
val prodFV1 = new FeatureVector
this.addCoreFeatures(instance, w1, w2, true, prodFV1)
fvs(w1)(w2)(0) = prodFV1
probs(w1)(w2)(0) = params.getScore(prodFV1)
val prodFV2 = new FeatureVector
this.addCoreFeatures(instance, w1, w2, false, prodFV2)
fvs(w1)(w2)(1) = prodFV2
probs(w1)(w2)(1) = params.getScore(prodFV2)
}
}
if (this.labeled) {
(0 until instance.length).foreach { w1 =>
this.typeAlphabet.values.zipWithIndex.foreach { case (t, i) =>
val prodFV1 = new FeatureVector
this.addLabeledFeatures(instance, w1, t, true, true, prodFV1)
fvsNt(w1)(i)(0)(0) = prodFV1
probsNt(w1)(i)(0)(0) = params.getScore(prodFV1)
val prodFV2 = new FeatureVector
this.addLabeledFeatures(instance, w1, t, true, false, prodFV2)
fvsNt(w1)(i)(0)(1) = prodFV2
probsNt(w1)(i)(0)(1) = params.getScore(prodFV2)
val prodFV3 = new FeatureVector
this.addLabeledFeatures(instance, w1, t, false, true, prodFV3)
fvsNt(w1)(i)(1)(0) = prodFV3
probsNt(w1)(i)(1)(0) = params.getScore(prodFV3)
val prodFV4 = new FeatureVector
this.addLabeledFeatures(instance, w1, t, false, false, prodFV4)
fvsNt(w1)(i)(1)(1) = prodFV4
probsNt(w1)(i)(1)(1) = params.getScore(prodFV4)
}
}
}
}
def readInstance(
in: ObjectInputStream, len: Int,
fvs: Array[Array[Array[FeatureVector]]],
probs: Array[Array[Array[Double]]],
fvsTr: Array[Array[Array[FeatureVector]]],
probsTr: Array[Array[Array[Double]]],
fvsSi: Array[Array[Array[FeatureVector]]],
probsSi: Array[Array[Array[Double]]],
fvsNt: Array[Array[Array[Array[FeatureVector]]]],
probsNt: Array[Array[Array[Array[Double]]]],
params: Parameters
) {
try {
(0 until len).foreach { w1 =>
((w1 + 1) until len).foreach { w2 =>
val prodFV1 = FeatureVector.fromKeys(in.readObject().asInstanceOf[Array[Int]])
fvs(w1)(w2)(0) = prodFV1
probs(w1)(w2)(0) = params.getScore(prodFV1)
val prodFV2 = FeatureVector.fromKeys(in.readObject().asInstanceOf[Array[Int]])
fvs(w1)(w2)(1) = prodFV2
probs(w1)(w2)(1) = params.getScore(prodFV2)
}
}
if (in.readInt() != -3) { println("Error reading file."); sys.exit(0) }
if (this.labeled) {
(0 until len).foreach { w1 =>
this.typeAlphabet.values.zipWithIndex.foreach { case (t, i) =>
val prodFV1 = FeatureVector.fromKeys(in.readObject().asInstanceOf[Array[Int]])
fvsNt(w1)(i)(0)(0) = prodFV1
probsNt(w1)(i)(0)(0) = params.getScore(prodFV1)
val prodFV2 = FeatureVector.fromKeys(in.readObject().asInstanceOf[Array[Int]])
fvsNt(w1)(i)(0)(1) = prodFV2
probsNt(w1)(i)(0)(1) = params.getScore(prodFV2)
val prodFV3 = FeatureVector.fromKeys(in.readObject().asInstanceOf[Array[Int]])
fvsNt(w1)(i)(1)(0) = prodFV3
probsNt(w1)(i)(1)(0) = params.getScore(prodFV3)
val prodFV4 = FeatureVector.fromKeys(in.readObject().asInstanceOf[Array[Int]])
fvsNt(w1)(i)(1)(1) = prodFV4
probsNt(w1)(i)(1)(1) = params.getScore(prodFV4)
}
}
if (in.readInt() != -3) { println("Error reading file."); sys.exit(0) }
}
} catch { case e: IOException => println("Error reading file."); sys.exit(0) }
}
private def addCoreFeatures(instance: DependencyInstance, small: Int, large: Int, attR: Boolean, fv: FeatureVector) {
val attDist = "&%s&%d".format(
if (attR) "RA" else "LA",
math.abs(large - small) match {
case dist if dist > 10 => 10
case dist if dist > 5 => 5
case dist => dist - 1
}
)
this.addLinearFeatures("POS", instance.postags, small, large, attDist, fv)
this.addLinearFeatures("CPOS", instance.cpostags, small, large, attDist, fv)
val (headIndex, childIndex) = if (attR) (small, large) else (large, small)
this.addTwoObsFeatures("HC",
instance.forms(headIndex),
instance.postags(headIndex),
instance.forms(childIndex),
instance.postags(childIndex),
attDist,
fv
)
if (this.options.format == "CONLL") {
this.addTwoObsFeatures("HCA",
instance.forms(headIndex),
instance.cpostags(headIndex),
instance.forms(childIndex),
instance.cpostags(childIndex),
attDist,
fv
)
this.addTwoObsFeatures("HCC",
instance.lemmas(headIndex),
instance.postags(headIndex),
instance.lemmas(childIndex),
instance.postags(childIndex),
attDist,
fv
)
this.addTwoObsFeatures("HCD",
instance.lemmas(headIndex),
instance.cpostags(headIndex),
instance.lemmas(childIndex),
instance.cpostags(childIndex),
attDist,
fv
)
if (options.discourseMode) {
/* Note: The features invoked here are designed for discourse parsing
* (as opposed to sentential parsing). It is conceivable that they
* could help for sentential parsing, but current testing indicates
* that they hurt sentential parsing performance.
*/
this.addDiscourseFeatures(instance, small, large, headIndex, childIndex, attDist, fv)
} else {
/* Add in features from the feature lists. It assumes the feature
* lists can have different lengths for each item. For example, nouns
* might have a different number of morphological features than verbs.
*/
for {
i <- 0 until instance.feats(headIndex).length
j <- 0 until instance.feats(childIndex).length
} {
this.addTwoObsFeatures("FF" + i + "*" + j,
instance.forms(headIndex),
instance.feats(headIndex)(i),
instance.forms(childIndex),
instance.feats(childIndex)(j),
attDist, fv
)
this.addTwoObsFeatures("LF" + i + "*" + j,
instance.lemmas(headIndex),
instance.feats(headIndex)(i),
instance.lemmas(childIndex),
instance.feats(childIndex)(j),
attDist, fv
)
}
}
} else {
/* We are using the old MST format. Pick up stem features the way they
* used to be done. This is kept for replicability of results for old
* versions.
*/
val hL = instance.forms(headIndex).length
val cL = instance.forms(childIndex).length
if (hL > 5 || cL > 5) {
this.addOldMSTStemFeatures(
instance.lemmas(headIndex),
instance.postags(headIndex),
instance.lemmas(childIndex),
instance.postags(childIndex),
attDist, hL, cL, fv
)
}
}
}
protected def addLinearFeatures(
label: String,
vals: IndexedSeq[String],
first: Int,
second: Int,
attDist: String,
fv: FeatureVector
) {
val pL = if (first > 0) vals(first - 1) else "STR"
val pR = if (second < vals.length - 1) vals(second + 1) else "END"
val pLR = if (first < second - 1) vals(first + 1) else "MID"
val pRL = if (second > first + 1) vals(second - 1) else "MID"
val pos = label + "PC=" + vals(first) + " " + vals(second)
vals.drop(first + 1).take(second - first - 1).map(pos + " " + _).foreach { allPos =>
this.add(allPos, fv)
this.add(allPos + attDist, fv)
}
this.addCorePosFeatures(label + "PT", pL, vals(first), pLR, pRL, vals(second), pR, attDist, fv)
}
private def addLabeledFeatures(
instance: DependencyInstance,
word: Int,
label: String,
attR: Boolean,
childFeatures: Boolean,
fv: FeatureVector
) {
val att = (if (attR) "RA" else "LA") + "&" + childFeatures
val w = instance.forms(word)
val wP = instance.postags(word)
val wPm1 = if (word > 0) instance.postags(word - 1) else "STR"
val wPp1 = if (word < instance.size) instance.postags(word + 1) else "END"
this.add("NTS1=" + label + "&" + att, fv)
this.add("ANTS1=" + label, fv)
Seq("&" + att, "").map("&" + label + _).foreach { suff =>
this.add("NTH=" + w + " " + wP + suff, fv)
this.add("NTI=" + wP + suff, fv)
this.add("NTIA=" + wPm1 + " " + wP + suff, fv)
this.add("NTIB=" + wP + " " + wPp1 + suff, fv)
this.add("NTIC=" + wPm1 + " " + wP + " " + wPp1 + suff, fv)
this.add("NTJ=" + w + suff, fv)
}
}
private def addWithAtt(feat: String, att: String, fv: FeatureVector) {
this.add(feat, fv)
this.add(feat + att, fv)
}
private def addCorePosFeatures(
prefix: String,
leftOf1: String, one: String, rightOf1: String,
leftOf2: String, two: String, rightOf2: String,
attDistance: String,
fv: FeatureVector
) {
val att = "*" + attDistance
this.add(prefix + "=" + leftOf1 + " " + one + " " + two + att, fv)
var feat = prefix + "1=" + leftOf1+ " " +one+ " " +two
this.add(feat, fv)
feat += " " + rightOf2
this.addWithAtt(feat, att, fv)
this.addWithAtt(prefix + "2=" + leftOf1 + " " + two + " " + rightOf2, att, fv)
this.addWithAtt(prefix + "3=" + leftOf1 + " " + one + " " + rightOf2, att, fv)
this.addWithAtt(prefix + "4=" + one + " " + two + " " + rightOf2, att, fv)
val prefix2 = "A" + prefix
feat = prefix2 + "1=" + one + " " + rightOf1 + " " + leftOf2
this.addWithAtt(feat, att, fv)
feat += " " + two
this.addWithAtt(feat, att, fv)
this.addWithAtt(prefix2 + "2=" + one + " " + rightOf1 + " " + two, att, fv)
this.addWithAtt(prefix2 + "3=" + one + " " + leftOf2 + " " + two, att, fv)
this.addWithAtt(prefix2 + "4=" + rightOf1 + " " + leftOf2 + " " + two, att, fv)
val prefix3 = "B" + prefix2
this.addWithAtt(prefix3 + "1=" + leftOf1 + " " + one + " " + leftOf2 + " " + two, att, fv)
this.addWithAtt(prefix3 + "2=" + one + " " + rightOf1 + " " + two + " " + rightOf2, att, fv)
}
protected def addTwoObsFeatures(
prefix: String,
item1F1: String, item1F2: String,
item2F1: String, item2F2: String,
attDistance: String,
fv: FeatureVector
) {
val att = "*" + attDistance
this.addWithAtt(prefix + "2FF1=" + item1F1, att, fv)
this.addWithAtt(prefix + "2FF1=" + item1F1 + " " + item1F2, att, fv)
this.addWithAtt(prefix + "2FF1=" + item1F1 + " " + item1F2 + " " + item2F2, att, fv)
this.addWithAtt(prefix + "2FF1=" + item1F1 + " " + item1F2 + " " + item2F2 + " " + item2F1, att, fv)
this.addWithAtt(prefix + "2FF2=" + item1F1 + " " + item2F1, att, fv)
this.addWithAtt(prefix + "2FF3=" + item1F1 + " " + item2F2, att, fv)
this.addWithAtt(prefix + "2FF4=" + item1F2 + " " + item2F1, att, fv)
this.addWithAtt(prefix + "2FF4=" + item1F2 + " " + item2F1 + " " + item2F2, att, fv)
this.addWithAtt(prefix + "2FF5=" + item1F2 + " " + item2F2, att, fv)
this.addWithAtt(prefix + "2FF6=" + item2F1 + " " + item2F2, att, fv)
this.addWithAtt(feat = prefix + "2FF7=" + item1F2, att, fv)
this.addWithAtt(prefix + "2FF8=" + item2F1, att, fv)
this.addWithAtt(prefix + "2FF9=" + item2F2, att, fv)
}
private def addDiscourseFeatures(
instance: DependencyInstance,
small: Int,
large: Int,
headIndex: Int,
childIndex: Int,
attDist: String,
fv: FeatureVector
) {
addLinearFeatures("FORM", instance.forms, small, large, attDist, fv);
addLinearFeatures("LEMMA", instance.lemmas, small, large, attDist, fv);
addTwoObsFeatures("HCB1", instance.forms(headIndex),
instance.lemmas(headIndex),
instance.forms(childIndex),
instance.lemmas(childIndex),
attDist, fv);
addTwoObsFeatures("HCB2", instance.forms(headIndex),
instance.lemmas(headIndex),
instance.forms(childIndex),
instance.postags(childIndex),
attDist, fv);
addTwoObsFeatures("HCB3", instance.forms(headIndex),
instance.lemmas(headIndex),
instance.forms(childIndex),
instance.cpostags(childIndex),
attDist, fv);
addTwoObsFeatures("HC2", instance.forms(headIndex),
instance.postags(headIndex),
instance.forms(childIndex),
instance.cpostags(childIndex), attDist, fv);
addTwoObsFeatures("HCC2", instance.lemmas(headIndex),
instance.postags(headIndex),
instance.lemmas(childIndex),
instance.cpostags(childIndex),
attDist, fv);
//// Use this if your extra feature lists all have the same length.
(0 until instance.feats.size).foreach { i =>
addLinearFeatures("F" +i, instance.feats(i), small, large, attDist, fv);
addTwoObsFeatures("FF" +i,
instance.forms(headIndex),
instance.feats(i)(headIndex),
instance.forms(childIndex),
instance.feats(i)(childIndex),
attDist, fv);
addTwoObsFeatures("LF" +i,
instance.lemmas(headIndex),
instance.feats(i)(headIndex),
instance.lemmas(childIndex),
instance.feats(i)(childIndex),
attDist, fv);
addTwoObsFeatures("PF" +i,
instance.postags(headIndex),
instance.feats(i)(headIndex),
instance.postags(childIndex),
instance.feats(i)(childIndex),
attDist, fv);
addTwoObsFeatures("CPF" +i,
instance.cpostags(headIndex),
instance.feats(i)(headIndex),
instance.cpostags(childIndex),
instance.feats(i)(childIndex),
attDist, fv);
(i + 1 until instance.feats.size).foreach { j =>
addTwoObsFeatures("CPF" +i+ "_" +j,
instance.feats(i)(headIndex),
instance.feats(j)(headIndex),
instance.feats(i)(childIndex),
instance.feats(j)(childIndex),
attDist, fv);
}
(0 until instance.feats.size).foreach { j =>
addTwoObsFeatures("XFF" +i+ "_" +j,
instance.forms(headIndex),
instance.feats(i)(headIndex),
instance.forms(childIndex),
instance.feats(j)(childIndex),
attDist, fv);
addTwoObsFeatures("XLF" +i+ "_" +j,
instance.lemmas(headIndex),
instance.feats(i)(headIndex),
instance.lemmas(childIndex),
instance.feats(j)(childIndex),
attDist, fv);
addTwoObsFeatures("XPF" +i+ "_" +j,
instance.postags(headIndex),
instance.feats(i)(headIndex),
instance.postags(childIndex),
instance.feats(j)(childIndex),
attDist, fv);
addTwoObsFeatures("XCF" +i+ "_" +j,
instance.cpostags(headIndex),
instance.feats(i)(headIndex),
instance.cpostags(childIndex),
instance.feats(j)(childIndex),
attDist, fv);
}
}
if (options.useRelationalFeatures) {
//for (int rf_index=0; rf_index<2; rf_index++) {
(0 until instance.relFeats.size).foreach { rf_index =>
val headToChild =
"H2C" +rf_index+instance.relFeats(rf_index).getFeature(headIndex, childIndex);
addTwoObsFeatures("RFA1",
instance.forms(headIndex),
instance.lemmas(headIndex),
instance.postags(childIndex),
headToChild,
attDist, fv);
addTwoObsFeatures("RFA2",
instance.postags(headIndex),
instance.cpostags(headIndex),
instance.forms(childIndex),
headToChild,
attDist, fv);
addTwoObsFeatures("RFA3",
instance.lemmas(headIndex),
instance.postags(headIndex),
instance.forms(childIndex),
headToChild,
attDist, fv);
addTwoObsFeatures("RFB1",
headToChild,
instance.postags(headIndex),
instance.forms(childIndex),
instance.lemmas(childIndex),
attDist, fv);
addTwoObsFeatures("RFB2",
headToChild,
instance.forms(headIndex),
instance.postags(childIndex),
instance.cpostags(childIndex),
attDist, fv);
addTwoObsFeatures("RFB3",
headToChild,
instance.forms(headIndex),
instance.lemmas(childIndex),
instance.postags(childIndex),
attDist, fv);
}
}
}
private def addOldMSTStemFeatures(
hLemma: String,
headP: String,
cLemma: String,
childP: String,
attDist: String,
hL: Int,
cL: Int,
fv: FeatureVector
) {
val all = hLemma + " " + headP + " " + cLemma + " " + childP
val hPos = headP + " " + cLemma + " " + childP
val cPos = hLemma + " " + headP + " " + childP
val hP = headP + " " + cLemma
val cP = hLemma + " " + childP
val oPos = headP + " " + childP
val oLex = hLemma + " " + cLemma
this.add("SA=" + all + attDist, fv)
this.add("SF=" + oLex + attDist, fv)
this.add("SAA=" + all, fv)
this.add("SFF=" + oLex, fv)
if (cL > 5) {
this.add("SB=" + hPos + attDist, fv)
this.add("SD=" + hP + attDist, fv)
this.add("SK=" + cLemma + " " + childP+attDist, fv)
this.add("SM=" + cLemma+ attDist, fv)
this.add("SBB=" + hPos, fv)
this.add("SDD=" + hP, fv)
this.add("SKK=" + cLemma + " " + childP, fv)
this.add("SMM=" + cLemma, fv)
}
if (hL > 5) {
this.add("SC=" + cPos + attDist, fv)
this.add("SE=" + cP + attDist, fv)
this.add("SH=" + hLemma + " " + headP + attDist, fv)
this.add("SJ=" + hLemma + attDist, fv)
this.add("SCC=" + cPos, fv)
this.add("SEE=" + cP, fv)
this.add("SHH=" + hLemma + " " + headP, fv)
this.add("SJJ=" + hLemma, fv)
}
}
}
| travisbrown/mstparser | src/main/scala/mstparser/DependencyPipe.scala | Scala | epl-1.0 | 22,964 |
/* **************************************************************************
* *
* Copyright (C) 2011 Christian Krause *
* *
* Christian Krause <kizkizzbangbang@googlemail.com> *
* *
****************************************************************************
* *
* This file is part of 'scadulix'. *
* *
* This project is free software: you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation, either version 3 of the License, or *
* any later version. *
* *
* This project is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this project. If not, see <http://www.gnu.org/licenses/>. *
* *
****************************************************************************/
package scadulix
/** Holds [[scadulix.Organisation]] instances. */
object Organisation extends util.Values {
lazy val OSI = Organisation("Open Source Initiative", "http://opensource.org/")
}
/** License-approving organisation.
*
* @param name Returns the name of this organisation.
* @param website Returns the website of this organisation.
*/
case class Organisation(name: String, website: URL) {
/** Returns the name of this organisation. */
override def toString = name
}
| wookietreiber/scadulix | src/main/scala/organisations.scala | Scala | gpl-3.0 | 2,369 |
package io.buoyant.linkerd
package protocol
import com.twitter.conversions.time._
import com.twitter.finagle.Failure
import com.twitter.finagle.http.{Status, Request}
import io.buoyant.linkerd.protocol.TlsUtils._
import io.buoyant.test.Awaits
import org.scalatest.FunSuite
class TlsStaticValidationTest extends FunSuite with Awaits {
val init = Linker.Initializers(
protocol = Seq(HttpInitializer)
)
test("tls router + plain upstream with static validation") {
withCerts("linkerd") { certs =>
val dog = Downstream.constTls("dogs", "woof", certs.serviceCerts("linkerd").cert,
certs.serviceCerts("linkerd").key)
try {
val linkerConfig =
s"""
|routers:
|- protocol: http
| dtab: |
| /p/dog => /$$/inet/127.1/${dog.port} ;
| /svc/clifford => /p/dog ;
| servers:
| - port: 0
| client:
| tls:
| commonName: linkerd
| trustCerts:
| - ${certs.caCert.getPath}
|""".stripMargin
val linker = init.load(linkerConfig)
val router = linker.routers.head.initialize()
try {
val server = router.servers.head.serve()
try {
val client = upstream(server)
try {
val rsp = {
val req = Request()
req.host = "clifford"
await(client(req))
}
assert(rsp.contentString == "woof")
()
} finally await(client.close())
} finally await(server.close())
} finally await(router.close())
} finally await(dog.server.close())
}
}
test("tls router + plain upstream with static validation and incorrect common name") {
withCerts("linkerd") { certs =>
val dog = Downstream.constTls("dogs", "woof", certs.serviceCerts("linkerd").cert,
certs.serviceCerts("linkerd").key)
try {
val linkerConfig =
s"""
|routers:
|- protocol: http
| dtab: |
| /p/dog => /$$/inet/127.1/${dog.port} ;
| /svc/clifford => /p/dog ;
| servers:
| - port: 0
| service:
| retries:
| budget:
| minRetriesPerSec: 0
| percentCanRetry: 0.0
| client:
| tls:
| commonName: wrong
| trustCerts:
| - ${certs.caCert.getPath}
|""".stripMargin
val linker = init.load(linkerConfig)
val router = linker.routers.head.initialize()
try {
val server = router.servers.head.serve()
try {
val client = upstream(server)
try {
val rsp = {
val req = Request()
req.host = "clifford"
assert(await(client(req)).status == Status.BadGateway)
}
} finally await(client.close())
} finally await(server.close())
} finally await(router.close())
} finally await(dog.server.close())
}
}
}
| denverwilliams/linkerd | linkerd/protocol/http/src/integration/scala/io/buoyant/linkerd/protocol/TlsStaticValidationTest.scala | Scala | apache-2.0 | 3,264 |
package functional
import org.scalatest.{FunSpec, Matchers}
class TestNoJacketWhenHot extends FunSpec with Matchers {
describe("NoJacketWhenHot") {
it("should pass if it is cold and you try to put a jacket on") {
NoJacketWhenHot(PJsOnly, PutOnJacket, COLD) should be (Pass)
}
it("should fail if it is hot and you try to put a jacket on") {
NoJacketWhenHot(PJsOnly, PutOnJacket, HOT) should be (Fail)
}
it("should pass if you are not trying to put a jacket on") {
NoJacketWhenHot(PJsOnly, LeaveHouse, HOT) should be (Pass)
}
}
}
| dkettlestrings/adornment | src/test/scala/functional/TestNoJacketWhenHot.scala | Scala | mit | 588 |
package com.overviewdocs.models
import java.nio.charset.Charset
import java.time.Instant
case class CsvImport(
id: Long,
documentSetId: Long,
filename: String,
charsetName: String, // Charset isn't serializable
lang: String,
loid: Long,
nBytes: Long,
nBytesProcessed: Long,
nDocuments: Int,
cancelled: Boolean,
estimatedCompletionTime: Option[Instant],
createdAt: Instant
) {
def charset: Charset = Charset.forName(charsetName)
}
object CsvImport {
case class CreateAttributes(
documentSetId: Long,
filename: String,
charsetName: String,
lang: String,
loid: Long,
nBytes: Long,
nBytesProcessed: Long = 0L,
nDocuments: Int = 0,
cancelled: Boolean = false,
estimatedCompletionTime: Option[Instant] = None,
createdAt: Instant = Instant.now
)
}
| overview/overview-server | common/src/main/scala/com/overviewdocs/models/CsvImport.scala | Scala | agpl-3.0 | 818 |
package com.cloudera.hue.livy.repl
import com.cloudera.hue.livy.repl.python.PythonSession
import org.json4s.JsonAST.JValue
import org.json4s.{Extraction, DefaultFormats}
import org.scalatest.{BeforeAndAfter, FunSpec}
import org.scalatest.matchers.ShouldMatchers
import _root_.scala.concurrent.Await
import _root_.scala.concurrent.duration.Duration
class PythonSessionSpec extends FunSpec with ShouldMatchers with BeforeAndAfter {
implicit val formats = DefaultFormats
var session: Session = null
before {
session = PythonSession.createPython()
}
after {
session.close()
}
describe("A python session") {
it("should start in the starting or idle state") {
session.state should (equal (Session.Starting()) or equal (Session.Idle()))
}
it("should eventually become the idle state") {
session.waitForStateChange(Session.Starting())
session.state should equal (Session.Idle())
}
it("should execute `1 + 2` == 3") {
val result = Await.result(session.execute("1 + 2"), Duration.Inf)
val expectedResult = Extraction.decompose(Map(
"status" -> "ok",
"execution_count" -> 0,
"data" -> Map(
"text/plain" -> "3"
)
))
result should equal (expectedResult)
}
it("should execute `x = 1`, then `y = 2`, then `x + y`") {
var result = Await.result(session.execute("x = 1"), Duration.Inf)
var expectedResult = Extraction.decompose(Map(
"status" -> "ok",
"execution_count" -> 0,
"data" -> Map(
"text/plain" -> ""
)
))
result should equal (expectedResult)
result = Await.result(session.execute("y = 2"), Duration.Inf)
expectedResult = Extraction.decompose(Map(
"status" -> "ok",
"execution_count" -> 1,
"data" -> Map(
"text/plain" -> ""
)
))
result should equal (expectedResult)
result = Await.result(session.execute("x + y"), Duration.Inf)
expectedResult = Extraction.decompose(Map(
"status" -> "ok",
"execution_count" -> 2,
"data" -> Map(
"text/plain" -> "3"
)
))
result should equal (expectedResult)
}
it("should do table magic") {
val result = Await.result(session.execute("x = [[1, 'a'], [3, 'b']]\\n%table x"), Duration.Inf)
val expectedResult = Extraction.decompose(Map(
"status" -> "ok",
"execution_count" -> 1,
"data" -> Map(
"application/vnd.livy.table.v1+json" -> Map(
"headers" -> List(
Map("type" -> "INT_TYPE", "name" -> "0"),
Map("type" -> "STRING_TYPE", "name" -> "1")),
"data" -> List(List(1, "a"), List(3, "b"))
)
)
))
result should equal (expectedResult)
}
it("should capture stdout") {
val result = Await.result(session.execute("""print 'Hello World'"""), Duration.Inf)
val expectedResult = Extraction.decompose(Map(
"status" -> "ok",
"execution_count" -> 0,
"data" -> Map(
"text/plain" -> "Hello World"
)
))
result should equal (expectedResult)
}
it("should report an error if accessing an unknown variable") {
val result = Await.result(session.execute("""x"""), Duration.Inf)
val expectedResult = Extraction.decompose(Map(
"status" -> "error",
"execution_count" -> 0,
"traceback" -> List(
"Traceback (most recent call last):\\n",
"NameError: name 'x' is not defined\\n"
),
"ename" -> "NameError",
"evalue" -> "name 'x' is not defined"
))
result should equal (expectedResult)
}
it("should report an error if exception is thrown") {
val result = Await.result(session.execute(
"""def foo():
| raise Exception()
|foo()
|""".stripMargin), Duration.Inf)
val expectedResult = Extraction.decompose(Map(
"status" -> "error",
"execution_count" -> 0,
"traceback" -> List(
"Traceback (most recent call last):\\n",
"Exception\\n"
),
"ename" -> "Exception",
"evalue" -> ""
))
result should equal (expectedResult)
}
}
}
| vitan/hue | apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/PythonSessionSpec.scala | Scala | apache-2.0 | 4,313 |
package nxt
import nxt.util.Logger
import scala.util.Try
import java.sql.ResultSet
import resource._
class TransactionQueryBuilder {
protected val sql = "SELECT * FROM transaction WHERE amount >= 0 "
private def withSuffix(suffix:String) = {
val s = this.sql
new TransactionQueryBuilder{
override protected val sql = s"$s $suffix "
}
}
def withSender(accountId:Long) = withSuffix(s"AND sender_id = $accountId")
def withRecipient(accountId:Long) = withSuffix(s"AND recipient_id = $accountId")
def withId(id:Long) = withSuffix(s"AND id = $id")
def withHeightMoreThan(height:Int) = withSuffix(s"AND height > $height")
def withHeightLessThan(height:Int) = withSuffix(s"AND height < $height")
def withPlainMessage() = withSuffix("AND has_message = true")
def withType(txType:Byte) = withSuffix(s"AND type = $txType")
def withType(txType:Byte, subType:Byte) = withSuffix(s"AND type = $txType AND subtype = $subType")
def withReferenceToTransaction(tx:Transaction) = withSuffix(s"AND referenced_transaction_full_hash = '${tx.getFullHash}'")
def query():Try[Seq[Transaction]] = {
Logger.logDebugMessage(s"Going to execute query: $sql")
Try{
managed(Db.db.getConnection).map {con=>
val pstmt = con.prepareStatement(sql)
val rs: ResultSet = pstmt.executeQuery
new Iterator[Transaction] {
def hasNext = rs.next()
def next() = TransactionDb.loadTransaction(con,rs)
}.toList
}.opt.get
}
}
} | kushti/NxtScala | src/main/scala/nxt/TransactionQueryBuilder.scala | Scala | cc0-1.0 | 1,513 |
package controllers
import play.api.mvc.{Action, Controller}
import play.api.libs.json._
import play.api.Logger
import apidoc.models.json._
import apidoc.models.sample.SampleImpl
object Samples extends Controller {
private final val logger = Logger
def getGuid(guid: String) = Action {
logger.info(s"GUID: $guid")
val sampleObj = SampleImpl(guid, "SIX Team")
Ok(Json.toJson(sampleObj))
}
}
| riccardomerolla/activator-six-app | svc/app/controllers/Samples.scala | Scala | apache-2.0 | 411 |
package model
/**
* Created by einevea on 19/03/2014.
*/
object StoryType extends Enumeration{
class MyValue(override val id: Int, val url:String) extends Val(i=id)
object MyValue{
def apply(url: String) = new MyValue(nextId, url)
}
final def usingName(s: String): StoryType = {
withName(s) match {
case st: StoryType.StoryType => st
case _ => throw new ClassCastException
}
}
type StoryType = MyValue
val bug = MyValue("bug.svg")
val improvement = MyValue("improvement.svg")
val feature = MyValue("feature.svg")
val task = MyValue("task.svg")
}
| einevea/eKanban | app/model/StoryType.scala | Scala | mit | 602 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.alignment
import enumeratum.{EnumEntry, Enum => SmartEnum}
import io.truthencode.ddo.model.alignment.LawAxis.{Chaotic, Lawful, Neutral}
import io.truthencode.ddo.model.alignment.MoralAxis.{Evil, Good}
import io.truthencode.ddo.support.SearchPrefix
/**
* Represents the dual axis Alignments
*
* @param law
* The range between Lawful to Chaotic
* @param moral
* The moral range between Good and evil
*/
sealed class Alignments(
override val law: _root_.io.truthencode.ddo.model.alignment.LawAxis,
override val moral: MoralAxis
) extends EnumEntry with AlignmentCombination
object Alignments extends SmartEnum[Alignments] with SearchPrefix {
val values = findValues
/**
* Used when qualifying a search with a prefix. Examples include finding "HalfElf" from qualified
* "Race:HalfElf"
*
* @return
* A default or applied prefix
*/
override def searchPrefixSource: String = "Alignment"
case object ChaoticGood extends Alignments(Chaotic, Good)
case object ChaoticNeutral extends Alignments(Chaotic, MoralAxis.Neutral)
case object ChaoticEvil extends Alignments(Chaotic, Evil)
case object NeutralGood extends Alignments(Neutral, Good)
case object TrueNeutral extends Alignments(Neutral, MoralAxis.Neutral)
case object NeutralEvil extends Alignments(Neutral, Evil)
case object LawfulGood extends Alignments(Lawful, Good)
case object LawfulNeutral extends Alignments(Lawful, MoralAxis.Neutral)
case object LawfulEvil extends Alignments(Lawful, Evil)
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/alignment/Alignments.scala | Scala | apache-2.0 | 2,261 |
/*
*************************************************************************************
* Copyright 2013 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.web.rest
import com.normation.rudder.rule.category._
import net.liftweb.common._
import com.normation.rudder.domain.nodes.NodeGroupCategoryId
import com.normation.rudder.repository.FullNodeGroupCategory
sealed trait DetailLevel {
def value : String
}
case object FullDetails extends DetailLevel {
val value = "full"
}
case object MinimalDetails extends DetailLevel {
val value = "minimal"
}
case class RestRuleCategory(
name : Option[String] = None
, description : Option[String] = None
, parent : Option[RuleCategoryId] = None
) {
def update(ruleCategory:RuleCategory) = {
val updateName = name.getOrElse(ruleCategory.name)
val updateDescription = description.getOrElse(ruleCategory.description)
ruleCategory.copy(
name = updateName
, description = updateDescription
)
}
def create(id : RuleCategoryId) : Box[RuleCategory]= {
name match {
case Some(name) =>
Full(
RuleCategory(
id
, name
, description.getOrElse("")
, Nil
)
)
case None =>
Failure("Could not create Rule Category, cause name is not defined")
}
}
}
case class RestGroupCategory(
name : Option[String] = None
, description : Option[String] = None
, parent : Option[NodeGroupCategoryId] = None
) {
def update(category:FullNodeGroupCategory) = {
val updateName = name.getOrElse(category.name)
val updateDescription = description.getOrElse(category.description)
category.copy(
name = updateName
, description = updateDescription
)
}
def create(id : NodeGroupCategoryId) : Box[FullNodeGroupCategory]= {
name match {
case Some(name) =>
Full(
FullNodeGroupCategory(
id
, name
, description.getOrElse("")
, Nil
, Nil
)
)
case None =>
Failure("Could not create Group Category, cause name is not defined")
}
}
} | armeniaca/rudder | rudder-web/src/main/scala/com/normation/rudder/web/rest/RestData.scala | Scala | gpl-3.0 | 3,781 |
package svoysh.util
object Strings extends Strings
trait Strings {
/**
* Returns <code>true</code> if <code>string</code> either <code>null</code>
* or equals to <code>""</code> (empty string) or consists of white spaces
* only.
*/
def isEmpty(string: String): Boolean = (
string == null || string.trim.length == 0
)
/**
* Trim trailing whitespaces.
*/
def rtrim(str: String): String = {
if (str == null) return null
var len = str.length
while (len > 0) {
len -= 1
if (!Character.isWhitespace(str.charAt(len))) {
return str.substring(0, len + 1)
}
}
""
}
} | Svoysh/svoysh-util | src/main/scala/svoysh/util/Strings.scala | Scala | apache-2.0 | 603 |
package org.openmole.buildsystem
import sbt._
import Keys._
import OMKeys._
import com.typesafe.sbt.osgi.{ OsgiKeys, SbtOsgi }
trait OsgiBundler {
self: BuildSystemDefaults ⇒
protected val bundleMap = Map("Bundle-ActivationPolicy" -> "lazy")
protected def osgiSettings = SbtOsgi.osgiSettings ++ Seq(
OsgiKeys.bundleSymbolicName <<= (name, OSGi.singleton) { case (name, singleton) ⇒ name + ";singleton:=" + singleton },
autoAPIMappings := true,
bundleProj := true,
OsgiKeys.bundleVersion <<= version,
OsgiKeys.exportPackage <<= name { n ⇒ Seq(n + ".*") },
OsgiKeys.bundleActivator := None,
install in Compile <<= publishLocal in Compile,
installRemote in Compile <<= publish in Compile,
OsgiKeys.bundle <<= OsgiKeys.bundle tag Tags.Disk,
(update in install) <<= update in install tag Tags.Network,
bundleType := Set("default"),
test in (Test, test) <<= test in (Test, test) tag (Tags.Disk),
publishTo <<= isSnapshot(if (_) Some("OpenMOLE Nexus" at "http://maven.openmole.org/snapshots") else Some("OpenMOLE Nexus" at "http://maven.openmole.org/releases"))
) ++ scalariformDefaults
def OsgiProject(artifactSuffix: String,
pathFromDir: String = "",
exports: Seq[String] = Seq(),
privatePackages: Seq[String] = Seq(),
singleton: Boolean = false,
settings: Seq[Setting[_]] = Nil,
bundleActivator: Option[String] = None,
dynamicImports: Seq[String] = Seq(),
imports: Seq[String] = Seq("*;resolution:=optional"))(implicit artifactPrefix: Option[String] = None) = {
require(artifactPrefix.forall(!_.endsWith(".")), "Do not end your artifact prefix with ., it will be added automatically.")
val artifactId = artifactPrefix map (_ + "." + artifactSuffix) getOrElse artifactSuffix
val base = dir / (if (pathFromDir == "") artifactId else pathFromDir)
val exportedPackages = if (exports.isEmpty) Seq(artifactId + ".*") else exports
Project(artifactId.replace('.', '-'), base, settings = settings).settings(commonsSettings ++ osgiSettings: _*).settings(
name := artifactId,
organization := org,
OSGi.singleton := singleton,
OSGi.openMOLEScope := None,
OsgiKeys.exportPackage := exportedPackages,
OsgiKeys.additionalHeaders <<=
(OSGi.openMOLEScope) {
omScope ⇒ omScope.map(os ⇒ Map("OpenMOLE-Scope" -> os)).getOrElse(Map()) ++ Map("Bundle-ActivationPolicy" -> "lazy")
},
OsgiKeys.privatePackage := privatePackages,
OsgiKeys.dynamicImportPackage := dynamicImports,
OsgiKeys.importPackage := imports,
OsgiKeys.bundleActivator <<= OsgiKeys.bundleActivator { bA ⇒ bundleActivator.orElse(bA) }
)
}
def OsgiGUIProject(
name: String,
ext: ClasspathDep[ProjectReference],
client: ClasspathDep[ProjectReference],
server: ClasspathDep[ProjectReference]) = OsgiProject(name) dependsOn (ext, client, server)
}
| ISCPIF/PSEExperiments | openmole-src/build-system/src/main/scala/org/openmole/buildsystem/OsgiBundler.scala | Scala | agpl-3.0 | 3,042 |
package dotty.tools.dotc
package transform
import core._
import MegaPhase.MiniPhase
import dotty.tools.dotc.core.Contexts._
import ast._
import Flags._
import Symbols._
import ExplicitOuter.isOuterParamAccessor
import collection.mutable
object CountOuterAccesses:
val name: String = "countOuterAccesses"
val description: String = "identify outer accessors that can be dropped"
/** Characterizes outer accessors and outer fields that can be dropped
* if there are no references to them from within the toplevel class
* where they are defined.
*/
def mightBeDropped(sym: Symbol)(using Context) =
def isLocal(cls: Symbol) =
cls.isAnonymousClass
|| cls.owner.isTerm
|| cls.accessBoundary(defn.RootClass).isContainedIn(cls.topLevelClass)
(sym.is(OuterAccessor) || sym.isOuterParamAccessor) && isLocal(sym.owner)
/** Counts number of accesses to outer accessors and outer fields of
* classes that are visible only within one source file. The info
* is collected in `outerAccessCount` and used in the subsequent
* DropOuterAccessors phase
*/
class CountOuterAccesses extends MiniPhase:
thisPhase =>
import tpd._
override def phaseName: String = CountOuterAccesses.name
override def description: String = CountOuterAccesses.description
override def runsAfter: Set[String] = Set(LambdaLift.name)
// LambdaLift can create outer paths. These need to be known in this phase.
/** The number of times an outer accessor that might be dropped is accessed */
val outerAccessCount = new mutable.HashMap[Symbol, Int] {
override def default(s: Symbol): Int = 0
}
private def markAccessed(tree: RefTree)(using Context): Tree =
val sym = tree.symbol
if CountOuterAccesses.mightBeDropped(sym) then outerAccessCount(sym) += 1
tree
override def transformIdent(tree: Ident)(using Context): Tree =
markAccessed(tree)
override def transformSelect(tree: Select)(using Context): Tree =
markAccessed(tree)
| dotty-staging/dotty | compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala | Scala | apache-2.0 | 1,989 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dbis.pig.plan.rewriting.dsl.words
import dbis.pig.op.PigOperator
import dbis.pig.plan.rewriting.dsl.builders.PigOperatorBuilder
import dbis.pig.plan.rewriting.dsl.traits.{CheckWordT, BuilderT, EndWordT}
class ReplaceWord[FROM <: PigOperator](override val b: BuilderT[FROM, PigOperator])
extends EndWordT[FROM, PigOperator] with CheckWordT[FROM, PigOperator] {
}
| ksattler/piglet | src/main/scala/dbis/pig/plan/rewriting/dsl/words/ReplaceWord.scala | Scala | apache-2.0 | 1,175 |
package org.trustedanalytics.sparktk.saveload
import org.apache.spark.SparkContext
import org.json4s.JsonAST.JValue
/**
* Trait for companion objects of classes that want to work with the SaveLoad methodology
*/
trait TkSaveableObject {
/**
*
* @param sc active spark context
* @param path the source path
* @param formatVersion the version of the format for the tk metadata that should be recorded.
* @param tkMetadata the data to save (should be a case class), must be serializable to JSON using json4s
* @return
*/
def load(sc: SparkContext, path: String, formatVersion: Int, tkMetadata: JValue): Any
/**
* ID for the format of how the object is save/load-ed. By default it is the object's type name
* @return
*/
def formatId: String = this.getClass.getName
/**
* helper which validates a given version is in the list of candidates
* @param version version to validate
* @param validCandidates valid versions
*/
def validateFormatVersion(version: Int, validCandidates: Int*) = {
require(validCandidates.contains(version),
s"Mismatched format version during load for $formatId. Expected $validCandidates 1, got $version")
}
}
| shibanis1/spark-tk | core/src/main/scala/org/trustedanalytics/sparktk/saveload/TkSaveableObject.scala | Scala | apache-2.0 | 1,203 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.