code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
// Test the handling of generics by the nullability transform. // There are two classes here: JavaCat is Java-defined, and ScalaCat // is Scala-defined. class ScalaCat[T] {} class Test { // It's safe to return a JavaCat[String]|Null (no inner |Null), // because JavaCat, being a Java class, _already_ nullifies its // fields. val jc: JavaCat[String]|Null = J.getJavaCat[String]() // ScalaCat is Scala-defined, so we need the inner |Null. val sc: ScalaCat[String|Null]|Null = J.getScalaCat[String]() import java.util.List val las: List[Array[String|Null]]|Null = J.getListOfStringArray() val als: Array[List[String]|Null]|Null = J.getArrayOfStringList() val css: List[Array[List[Array[String|Null]]|Null]]|Null = J.getComplexStrings() }
som-snytt/dotty
tests/explicit-nulls/pos/interop-poly-src/S.scala
Scala
apache-2.0
760
package collins.util.config import collins.models.AssetSort import collins.models.AssetType object NodeclassifierConfig extends Configurable { override val namespace = "nodeclassifier" override val referenceConfigFilename = "nodeclassifier_reference.conf" val SortTypes = AssetSort.values.map(_.toString) val DefaultSortType = AssetSort.Distance.toString def assetType = getString("assetType").orElse(Some("CONFIGURATION")).map { t => AssetType.findByName(t) match { case None => throw globalError("%s is not a valid asset type".format(t)) case Some(t) => t } }.get def identifyingMetaTag = getString("identifyingMetaTag", "IS_NODECLASS").toUpperCase def displayNameAttribute = getString("displayNameAttribute", "NAME").toUpperCase def excludeMetaTags = getStringSet("excludeMetaTags").map(_.toUpperCase) ++ Set(displayNameAttribute) def sortKeys = getStringSet("sortKeys", Set(DefaultSortType)).filter(SortTypes.contains(_)) override protected def validateConfig() { assetType sortKeys } }
byxorna/collins
app/collins/util/config/NodeclassifierConfig.scala
Scala
apache-2.0
1,052
package org.http4s.client.asynchttpclient import java.util.concurrent.atomic.AtomicInteger import org.reactivestreams.{Subscription, Publisher, Subscriber} import org.reactivestreams.tck.SubscriberWhiteboxVerification.WhiteboxSubscriberProbe import org.reactivestreams.tck.{SubscriberWhiteboxVerification, TestEnvironment} import org.testng.annotations._ import org.testng.Assert._ import scalaz.-\\/ class QueueSubscriberTest extends SubscriberWhiteboxVerification[Integer](new TestEnvironment) { private lazy val counter = new AtomicInteger override def createSubscriber(theProbe: WhiteboxSubscriberProbe[Integer]): Subscriber[Integer] = { val subscriber = new QueueSubscriber[Integer](2) with WhiteboxSubscriber[Integer] { override def probe: WhiteboxSubscriberProbe[Integer] = theProbe } subscriber } def createSubscriber(): QueueSubscriber[Integer] = new QueueSubscriber[Integer](1) override def createElement(element: Int): Integer = counter.getAndIncrement @Test def emitsToProcess() = { val publisher = createHelperPublisher(10) val subscriber = createSubscriber() publisher.subscribe(subscriber) assertEquals(subscriber.process.runLog.run.size, 10) } @Test def failsProcessOnError() = { object SadTrombone extends Exception val publisher = new Publisher[Integer] { override def subscribe(s: Subscriber[_ >: Integer]): Unit = { s.onSubscribe(new Subscription { override def cancel(): Unit = {} override def request(n: Long): Unit = {} }) s.onError(SadTrombone) } } val subscriber = createSubscriber() publisher.subscribe(subscriber) assertEquals(subscriber.process.runLog.attemptRun, -\\/(SadTrombone)) } @Test def closesQueueOnComplete() = { object SadTrombone extends Exception val publisher = new Publisher[Integer] { override def subscribe(s: Subscriber[_ >: Integer]): Unit = { s.onSubscribe(new Subscription { override def cancel(): Unit = {} override def request(n: Long): Unit = {} }) s.onComplete() } } val subscriber = createSubscriber() publisher.subscribe(subscriber) assertEquals(subscriber.process.runLog.run, Vector.empty) } }
hvesalai/http4s
async-http-client/src/test/scala/org/http4s/client/asynchttpclient/QueueSubscriberTest.scala
Scala
apache-2.0
2,288
/* * Copyright 2011-2017 Chris de Vreeze * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.cdevreeze.yaidom.convert import scala.collection.immutable import eu.cdevreeze.yaidom.core.QName import eu.cdevreeze.yaidom.core.Scope import eu.cdevreeze.yaidom.simple.Comment import eu.cdevreeze.yaidom.simple.Elem import eu.cdevreeze.yaidom.simple.ElemConverter import eu.cdevreeze.yaidom.simple.EntityRef import eu.cdevreeze.yaidom.simple.Node import eu.cdevreeze.yaidom.simple.ProcessingInstruction import eu.cdevreeze.yaidom.simple.Text /** * Converter from yaidom nodes to Scala XML nodes, in particular from [[eu.cdevreeze.yaidom.simple.Elem]] to a `scala.xml.Elem`. * * There is no conversion from yaidom Documents to Scala XML documents, because there is no direct way to create Scala XML * documents. * * @author Chris de Vreeze */ trait YaidomToScalaXmlConversions extends ElemConverter[scala.xml.Elem] { /** * Converts a yaidom node to a Scala XML node, given a parent Scala XML scope. * * The parent NamespaceBinding is passed as extra parameter, in order to try to prevent the creation of any unnecessary * namespace declarations. */ final def convertNode(node: Node, parentNamespaceBinding: scala.xml.NamespaceBinding): scala.xml.Node = { node match { case e: Elem => convertElem(e, parentNamespaceBinding) case t: Text => convertText(t) case pi: ProcessingInstruction => convertProcessingInstruction(pi) case er: EntityRef => convertEntityRef(er) case c: Comment => convertComment(c) } } /** * Converts a yaidom `Elem` to a Scala XML element. */ final def convertElem(elm: Elem): scala.xml.Elem = convertElem(elm, scala.xml.TopScope) /** * Converts a yaidom `Elem` to a Scala XML element, given a parent Scala XML scope. * * The parent NamespaceBinding is passed as extra parameter, in order to try to prevent the creation of any unnecessary * namespace declarations. */ final def convertElem(elm: Elem, parentNamespaceBinding: scala.xml.NamespaceBinding): scala.xml.Elem = { // Not tail-recursive, but the recursion depth should be limited val prefix = elm.qname.prefixOption.orNull val label = elm.qname.localPart val attributes = convertAttributes(elm.attributes) val nsBinding = convertScope(elm.scope, parentNamespaceBinding) val children: immutable.IndexedSeq[scala.xml.Node] = elm.children.map(ch => convertNode(ch, nsBinding)) val minimizeEmpty = children.isEmpty new scala.xml.Elem(prefix, label, attributes, nsBinding, minimizeEmpty, children: _*) } /** * Converts a yaidom `Text` to a Scala XML `Atom[String]`. */ final def convertText(text: Text): scala.xml.Atom[String] = { if (text.isCData) scala.xml.PCData(text.text) else scala.xml.Text(text.text) } /** * Converts a yaidom `ProcessingInstruction` to a Scala XML `ProcInstr`. */ final def convertProcessingInstruction( processingInstruction: ProcessingInstruction): scala.xml.ProcInstr = { new scala.xml.ProcInstr(processingInstruction.target, processingInstruction.data) } /** * Converts a yaidom `EntityRef` to a Scala XML `EntityRef`. */ final def convertEntityRef(entityRef: EntityRef): scala.xml.EntityRef = { new scala.xml.EntityRef(entityRef.entity) } /** * Converts a yaidom `Comment` to a Scala XML `Comment`. */ final def convertComment(comment: Comment): scala.xml.Comment = { new scala.xml.Comment(comment.text) } private def convertAttributes(attributes: Iterable[(QName, String)]): scala.xml.MetaData = { var result: scala.xml.MetaData = scala.xml.Null for (attr <- attributes) { result = result.append( scala.xml.Attribute( attr._1.prefixOption, attr._1.localPart, Seq(scala.xml.Text(attr._2)), result)) } result } /** * Converts the yaidom Scope to a Scala XML NamespaceBinding. */ private def convertScope(scope: Scope): scala.xml.NamespaceBinding = { def editedPrefix(pref: String): String = if ((pref ne null) && pref.isEmpty) null.asInstanceOf[String] else pref // scalastyle:off null if (scope.isEmpty) { scala.xml.TopScope } else { val scopeAsSeq = scope.prefixNamespaceMap.toSeq map { case (pref, uri) => (editedPrefix(pref) -> uri) } assert(scopeAsSeq.nonEmpty) val topScope: scala.xml.NamespaceBinding = scala.xml.TopScope val nsBinding: scala.xml.NamespaceBinding = scopeAsSeq.foldLeft(topScope) { case (acc, (pref, nsUri)) => scala.xml.NamespaceBinding(pref, nsUri, acc) } nsBinding } } /** * Converts the yaidom Scope to a Scala XML NamespaceBinding, but tries to keep re-use the parent NamespaceBinding * (possible prepending some "children") as much as possible. This helps in preventing many creations of duplicate * namespace declarations. * * See method scala.xml.NamespaceBinding.buildString(StringBuilder, scala.xml.NamespaceBinding) for how (implicit) namespace * declarations are inferred from a NamespaceBinding and one of its ancestors. */ // scalastyle:off null private def convertScope(scope: Scope, parentNamespaceBinding: scala.xml.NamespaceBinding): scala.xml.NamespaceBinding = { val decls = toScope(parentNamespaceBinding).relativize(scope) if (decls.retainingUndeclarations.nonEmpty) { // No way to re-use the immutable parent NamespaceBinding, so converting the scope without using the parent NamespaceBinding convertScope(scope) } else { val nsBinding = decls.prefixNamespaceMap.foldLeft(parentNamespaceBinding) { case (acc, (pref, uri)) => val editedPrefix = if ((pref ne null) && (pref.isEmpty)) null.asInstanceOf[String] else pref scala.xml.NamespaceBinding(editedPrefix, uri, acc) } nsBinding } } /** * Converts the `scala.xml.NamespaceBinding` to a yaidom `Scope`. * * This implementation is brittle because of bug: SI 6939: Namespace binding (xmlns) is duplicated if a child redefines a prefix. * (see https://issues.scala-lang.org/browse/SI-6939 and https://github.com/scala/scala/pull/1858). Still, this implementation * tries to work around that bug. * * This method is the same as extractScope in ScalaXmlToYaidomConversions, but repeated here in order not to depend on that * other trait. */ // scalastyle:off null private def toScope(scope: scala.xml.NamespaceBinding): Scope = { if ((scope eq null) || (scope.uri eq null) || (scope == scala.xml.TopScope)) { Scope.Empty } else { val prefix = if (scope.prefix eq null) "" else scope.prefix // Recursive call (not tail-recursive), and working around the above-mentioned bug val parentScope = toScope(scope.parent) if (scope.uri.isEmpty) { // Namespace undeclaration (which, looking at the NamespaceBinding API doc, seems not to exist) // Works for the default namespace too (knowing that "edited" prefix is not null but can be empty) parentScope -- Set(prefix) } else { // Works for namespace overrides too parentScope ++ Scope.from(prefix -> scope.uri) } } } }
dvreeze/yaidom
shared/src/main/scala/eu/cdevreeze/yaidom/convert/YaidomToScalaXmlConversions.scala
Scala
apache-2.0
7,891
package com.wavesplatform.transaction.validation.impl import com.wavesplatform.transaction.TxValidationError.GenericError import com.wavesplatform.transaction.transfer.MassTransferTransaction import com.wavesplatform.transaction.transfer.MassTransferTransaction.MaxTransferCount import com.wavesplatform.transaction.validation.{TxValidator, ValidatedV} object MassTransferTxValidator extends TxValidator[MassTransferTransaction] { override def validate(tx: MassTransferTransaction): ValidatedV[MassTransferTransaction] = { import tx._ V.seq(tx)( V.noOverflow(fee +: transfers.map(_.amount): _*), V.cond(transfers.length <= MaxTransferCount, GenericError(s"Number of transfers ${transfers.length} is greater than $MaxTransferCount")), V.transferAttachment(attachment), V.cond(transfers.forall(_.amount >= 0), GenericError("One of the transfers has negative amount")), V.fee(fee), V.chainIds(chainId, transfers.map(_.address.chainId): _*) ) } }
wavesplatform/Waves
node/src/main/scala/com/wavesplatform/transaction/validation/impl/MassTransferTxValidator.scala
Scala
mit
997
package ml.wolfe.term import ml.wolfe.WolfeSpec /** * @author riedel */ class FunSpecs extends WolfeSpec { import ml.wolfe.term.TermImplicits._ "A function creator" should { "create a typed scala function using a term" in { val f = fun(Bools){x => I(x)} f(true) should be (1.0) } "create a typed scala function with two arguments" in { val f = fun(Bools,Bools){(x,y) => I(x && y)} f(true,true) should be (1.0) } } }
wolfe-pack/wolfe
wolfe-core/src/test/scala/ml/wolfe/term/FunSpecs.scala
Scala
apache-2.0
470
/* * ScalaQCMS -- Scala Quantum Circuit Model Simulator * * Copyright (c) 2012 Antti Vikman */ package models.preprocessingpolicies import models.EquationEntity /** * PreProcessingPolicy is an abstract class to provide unified interface for all pre processing policies. * @todo Looks like this could be refactored to Trait */ abstract class PreProcessingPolicy { /** * Pre processes the equation using the given set of pre processors * * @param equation is the equation to be pre processed * @param preProcessors is a set of PreProcessor names that can be used in pre processing * @return the pre processed equation */ def process(equation: EquationEntity, preProcessors: Set[String]): EquationEntity }
n-a-g-r-o-m/ScaQCMS
app/models/preprocessingpolicies/PreProcessingPolicy.scala
Scala
mit
735
package mr.merc.politics import cats.{Eval, Monoid} import mr.merc.army.Warrior import mr.merc.politics.Province.{ProductTradingInfo, ProductionTradingInfo} import mr.merc.economics.PopulationNeedsType.{LifeNeeds, LuxuryNeeds, RegularNeeds} import mr.merc.economics.PopulationType.Bureaucrats import mr.merc.economics.Products.{IndustryProduct, Product} import mr.merc.economics.{BuildFactoryProject, BusinessProject, Culture, Enterprise, FourSeasonsTerrainHex, IndustrialFactory, Population, PopulationNeedsType, PopulationType, RecruitWarriorProject, RegionMarket, RegionPopulation, RegionWarriors, StateBusinessProject, WorldConstants} import mr.merc.economics.MapUtil.FloatOperations.MapWithFloatOperations class Province(val name: String, private var _owner: State, val regionMarket: RegionMarket, val hexes: Set[FourSeasonsTerrainHex], val capital: FourSeasonsTerrainHex) { private var neighbourProvinces:Option[Set[Province]] = None private var _controller: State = owner def controller: State = _controller def controller_=(c: State): Unit = { _controller = c if (controller != owner) { deleteStateProjects() } } def owner: State = _owner def owner_=(o: State): Unit = { _owner = o deleteStateProjects() } override def toString: String = super.toString + s"|$name" private var neighboursInited = false def initNeighbours(set:Set[Province]): Unit = { require(!neighboursInited, "Only one time to init neighbours") this.neighbourProvinces = Some(set) _doubleNeighbours = Some((neighbourProvinces.getOrElse(Set()).flatMap(_.neighbourProvinces.getOrElse(Set())) - this).toList) regionWarriors.initNeighbours(set) neighboursInited = true } val regionPopulation = new RegionPopulation(this) def neighbours:List[Province] = { require(neighboursInited, "neighbours are not inited!") neighbourProvinces.get.toList } def neighboursSet:Set[Province] = { require(neighboursInited, "neighbours are not inited!") neighbourProvinces.get } private var _doubleNeighbours:Option[List[Province]] = None def doubleNeighbours:List[Province] = { require(neighboursInited, "neighbours are not inited!") _doubleNeighbours.get } val regionWarriors = new RegionWarriors(Nil) def totalPopulation:Int = regionPopulation.popsList.map(_.populationCount).sum def culture:Culture = { val cultures = regionPopulation.popsList.groupBy(_.culture).view.mapValues(_.map(_.populationCount).sum) if (cultures.isEmpty) owner.primeCulture else cultures.maxBy(_._2)._1 } def ownerWarriors: List[Warrior] = regionWarriors.allWarriors.filter(_.owner == owner) var enterprises:Vector[Enterprise] = Vector() def factories:Map[IndustryProduct, IndustrialFactory] = enterprises.collect { case f:IndustrialFactory => f.product -> f }.toMap def presentFactoriesAndProjects:Set[IndustryProduct] = { factories.keySet ++ projects.collect { case p:BuildFactoryProject => p.product } } def moneyToFulfillNeeds(populationType: PopulationType): Map[PopulationNeedsType, Double] = { val pops = regionPopulation.popsByType(populationType) val currentMoney = pops.map(_.moneyReserves).sum val map = regionPopulation.popsByType(populationType).map(moneyToFulfillNeeds). foldLeft(Map[PopulationNeedsType, Double]())(_ |+| _).withDefaultValue(0d) if (currentMoney <= map(LifeNeeds)) { val rem = map(LifeNeeds) - currentMoney map + (LifeNeeds -> rem) } else if (currentMoney <= map(LifeNeeds) + map(RegularNeeds)) { val rem = map(LifeNeeds) + map(RegularNeeds) - currentMoney map + (LifeNeeds -> 0d) + (RegularNeeds -> rem) } else if (currentMoney <= map.values.sum) { val rem = map.values.sum - currentMoney Map(LifeNeeds -> 0d, RegularNeeds -> 0d, LuxuryNeeds -> rem) } else { Map(LifeNeeds -> 0d, RegularNeeds -> 0d, LuxuryNeeds -> 0d) } } def moneyToFulfillNeeds(population: Population): Map[PopulationNeedsType, Double] = { population.needs.transform { case (_, needs) => needs dot regionMarket.currentPrices } } def moneyToFulfillArmyNeeds(owner: State): Double = { regionWarriors.allWarriors.filter(_.owner == owner).map(_.needs).reduceOption(_ |+| _).map(_ dot regionMarket.currentPrices).getOrElse(0) } var projects:List[BusinessProject] = Nil def removeCompletedProjects(): Unit = { val (finished, notFinished) = projects.partition(_.isComplete) this.projects = notFinished finished.foreach { p => p.returnNotSpentMoneyToInvestor() p.executeProjectAim() } } def deleteProject(project: BusinessProject): Unit = { val (toDelete, remain) = projects.partition(_ == project) projects = remain toDelete.foreach(_.returnNotSpentMoneyToInvestor()) } def deleteStateProjects(): Unit = { projects.collect { case s:StateBusinessProject => s }.foreach(deleteProject) } def addInvestmentsToBusinessProjects(): Unit = { this.projects.foreach { p => p.takeMoreMoneyFromInvestorIfNeeded(regionMarket.currentPrices) } } def removeBankruptFactories(): Unit = { enterprises.collect { case e:IndustrialFactory if e.isBankrupt => e }.foreach { f => if (f.level > 1) { f.decreaseLevel() } else { f.giveBudgetToOwners() } } enterprises = enterprises.collect { case f:IndustrialFactory if !f.isBankrupt => f case x => x } } private var bureaucratsPercentageVariable = Eval.later(bureaucratsPercentageF) def bureaucratsPercentage:Double = bureaucratsPercentageVariable.value def bureaucratsPercentageFromMaxEff:Double = bureaucratsPercentage / WorldConstants.Population.BureaucratsPercentageForMaxEff private def bureaucratsPercentageF:Double = { val totalPopulation = regionPopulation.popsList.map(_.populationCount).sum val bureaucrats = regionPopulation.popsByType(Bureaucrats).map(_.populationCount).sum if (totalPopulation == 0) 0 else bureaucrats.toDouble / totalPopulation } def goodsProducedLastTurn:Map[Product, Double] = { enterprises.map { e => e.dayRecords.lastOption.map { day => Map(e.product -> day.produced) }.getOrElse(Map()) }.reduceOption(_ |+| _).getOrElse(Map()) } def gdp:Double = { goodsProducedLastTurn dot regionMarket.currentPrices } def soldToMarket:Map[State, ProductionTradingInfo] = { enterprises.flatMap { e => e.dayRecords.lastOption.map { day => day.sold.map { case (region, profit) => region -> ProductTradingInfo(e.product, profit.request.sold, profit.request.receivedMoney) } }.getOrElse(Map()).toList }.groupBy(_._1.controller).map { case (owner, list) => owner -> Monoid.combineAll(list.map { case (_, pti) => ProductionTradingInfo(Map(pti.product -> pti)) }) } } def civilianVictimsOfBattleDied(units: Int): Unit = { regionPopulation.civilianVictims(units * WorldConstants.Population.PeoplePerWarrior) } def endTurn(): Unit = { bureaucratsPercentageVariable = Eval.later(bureaucratsPercentageF) } def possibleWarriorsToRecruit:Map[Culture, Int] = { if (owner == controller) { val possibleCultures = if (owner.rulingParty.votersPolicy.canBeMulticulturalArmy) { regionPopulation.cultureMembers.keySet } else Set(owner.primeCulture) possibleCultures.map { culture => val alreadyInProgress = projects.collect { case r:RecruitWarriorProject if r.warriorCulture == culture => r }.size culture -> (regionPopulation.maxOrderPeopleToArmy(owner.primeCulture) - alreadyInProgress) }.toMap } else Map() } def neighboursStream: LazyList[(Int, Province)] = { def nextNeighbours(currentCircle: Set[Province], alreadySent: Set[Province]): Set[Province] = { val present = currentCircle & alreadySent currentCircle.flatMap(_.neighbours) -- present } def unfold[A, S](z: S)(f: S => Option[(A, S)]): LazyList[A] = { f(z) match { case None => LazyList.empty case Some((a, s)) => LazyList.cons(a, unfold(s)(f)) } } case class StreamState(step: Int, circle: Set[Province], sent: Set[Province]) unfold[Set[(Int, Province)], StreamState](StreamState(0, Set(this), Set())) { case StreamState(step, circle, sent) => val newCircle = nextNeighbours(circle, sent) if (newCircle.nonEmpty) { val stream = newCircle.map(c => (step + 1) -> c) val ss = StreamState(step + 1, newCircle, circle & sent) Some((stream, ss)) } else { None } }.flatten } } object Province { implicit val ProductionTradingInfoMonoid:Monoid[ProductionTradingInfo] = new Monoid[ProductionTradingInfo] { override def empty: ProductionTradingInfo = ProductionTradingInfo(Map()) override def combine(x: ProductionTradingInfo, y: ProductionTradingInfo): ProductionTradingInfo = { val intersection = x.perProduct.keySet & y.perProduct.keySet val xOnly = x.perProduct.keySet -- intersection val xOnlyMap = x.perProduct.view.filterKeys(xOnly.contains).toMap val yOnly = y.perProduct.keySet -- intersection val yOnlyMap = y.perProduct.view.filterKeys(yOnly.contains).toMap val intersectionMap = intersection.map { k => val xp = x.perProduct(k) val yp = y.perProduct(k) k -> ProductTradingInfo(k, xp.count + yp.count, xp.totalPrice + yp.totalPrice) }.toMap ProductionTradingInfo(xOnlyMap ++ yOnlyMap ++ intersectionMap) } } case class ProductionTradingInfo(perProduct:Map[Product, ProductTradingInfo]) { lazy val totalSum: Double = perProduct.values.map(_.totalPrice).sum } case class ProductTradingInfo(product: Product, count: Double, totalPrice: Double) }
RenualdMarch/merc
src/main/scala/mr/merc/politics/Province.scala
Scala
gpl-3.0
9,919
// This file is machine-generated. package org.apache.avro.scala.test.generated.scala { import _root_.scala.collection.JavaConverters._ class RecordWithAllTypes( val nullField : Null, val booleanField : Boolean, val intField : Int, val longField : Long, val floatField : Float, val doubleField : Double, val stringField : String, val bytesField : Seq[Byte], val fixedField : Seq[Byte], val intArrayField : Seq[Int], val intMapField : Map[String, Int], val intArrayArrayField : Seq[Seq[Int]], val intMapMapField : Map[String, Map[String, Int]] ) extends org.apache.avro.scala.ImmutableRecordBase { def copy(nullField : Null = this.nullField, booleanField : Boolean = this.booleanField, intField : Int = this.intField, longField : Long = this.longField, floatField : Float = this.floatField, doubleField : Double = this.doubleField, stringField : String = this.stringField, bytesField : Seq[Byte] = this.bytesField, fixedField : Seq[Byte] = this.fixedField, intArrayField : Seq[Int] = this.intArrayField, intMapField : Map[String, Int] = this.intMapField, intArrayArrayField : Seq[Seq[Int]] = this.intArrayArrayField, intMapMapField : Map[String, Map[String, Int]] = this.intMapMapField): RecordWithAllTypes = new RecordWithAllTypes( nullField = nullField, booleanField = booleanField, intField = intField, longField = longField, floatField = floatField, doubleField = doubleField, stringField = stringField, bytesField = bytesField, fixedField = fixedField, intArrayField = intArrayField, intMapField = intMapField, intArrayArrayField = intArrayArrayField, intMapMapField = intMapMapField ) override def getSchema(): org.apache.avro.Schema = { return RecordWithAllTypes.schema } override def get(index: Int): AnyRef = { index match { case 0 => org.apache.avro.scala.Conversions.scalaToJava(null).asInstanceOf[AnyRef] case 1 => org.apache.avro.scala.Conversions.scalaToJava(booleanField).asInstanceOf[AnyRef] case 2 => org.apache.avro.scala.Conversions.scalaToJava(intField).asInstanceOf[AnyRef] case 3 => org.apache.avro.scala.Conversions.scalaToJava(longField).asInstanceOf[AnyRef] case 4 => org.apache.avro.scala.Conversions.scalaToJava(floatField).asInstanceOf[AnyRef] case 5 => org.apache.avro.scala.Conversions.scalaToJava(doubleField).asInstanceOf[AnyRef] case 6 => org.apache.avro.scala.Conversions.scalaToJava(stringField).asInstanceOf[AnyRef] case 7 => org.apache.avro.scala.Conversions.scalaToJava(java.nio.ByteBuffer.wrap(bytesField.toArray[Byte])).asInstanceOf[AnyRef] case 8 => org.apache.avro.scala.Conversions.scalaToJava(new org.apache.avro.generic.GenericData.Fixed(getSchema(), fixedField.toArray[Byte])).asInstanceOf[AnyRef] case 9 => org.apache.avro.scala.Conversions.scalaToJava(intArrayField).asInstanceOf[AnyRef] case 10 => org.apache.avro.scala.Conversions.scalaToJava(intMapField).asInstanceOf[AnyRef] case 11 => org.apache.avro.scala.Conversions.scalaToJava(intArrayArrayField).asInstanceOf[AnyRef] case 12 => org.apache.avro.scala.Conversions.scalaToJava(intMapMapField).asInstanceOf[AnyRef] case _ => throw new org.apache.avro.AvroRuntimeException("Bad index: " + index) } } override def encode(encoder: org.apache.avro.io.Encoder): Unit = { encoder.writeNull() encoder.writeBoolean(this.booleanField) encoder.writeInt(this.intField) encoder.writeLong(this.longField) encoder.writeFloat(this.floatField) encoder.writeDouble(this.doubleField) encoder.writeString(this.stringField) encoder.writeBytes(this.bytesField.asInstanceOf[Array[Byte]]) encoder.writeBytes(this.fixedField.asInstanceOf[Array[Byte]]) encoder.writeArrayStart() encoder.setItemCount(this.intArrayField.size) for (arrayItem <- this.intArrayField) { encoder.startItem() encoder.writeInt(arrayItem) } encoder.writeArrayEnd() encoder.writeMapStart() encoder.setItemCount(this.intMapField.size) for ((mapKey, mapValue) <- this.intMapField) { encoder.startItem() encoder.writeString(mapKey) encoder.writeInt(mapValue) } encoder.writeMapEnd() encoder.writeArrayStart() encoder.setItemCount(this.intArrayArrayField.size) for (arrayItem <- this.intArrayArrayField) { encoder.startItem() encoder.writeArrayStart() encoder.setItemCount(arrayItem.size) for (arrayItem <- arrayItem) { encoder.startItem() encoder.writeInt(arrayItem) } encoder.writeArrayEnd() } encoder.writeArrayEnd() encoder.writeMapStart() encoder.setItemCount(this.intMapMapField.size) for ((mapKey, mapValue) <- this.intMapMapField) { encoder.startItem() encoder.writeString(mapKey) encoder.writeMapStart() encoder.setItemCount(mapValue.size) for ((mapKey, mapValue) <- mapValue) { encoder.startItem() encoder.writeString(mapKey) encoder.writeInt(mapValue) } encoder.writeMapEnd() } encoder.writeMapEnd() } def toMutable: MutableRecordWithAllTypes = new MutableRecordWithAllTypes( this.nullField, this.booleanField, this.intField, this.longField, this.floatField, this.doubleField, this.stringField, scala.collection.mutable.Buffer[Byte]((this.bytesField).toSeq: _*), Array[Byte]((this.fixedField).toSeq: _*), scala.collection.mutable.ArrayBuffer[Int]((this.intArrayField): _*), scala.collection.mutable.Map[String, Int]((this.intMapField).toSeq: _*), scala.collection.mutable.ArrayBuffer[scala.collection.mutable.Buffer[Int]]((this.intArrayArrayField.map { v0 => scala.collection.mutable.ArrayBuffer[Int]((v0): _*) }): _*), scala.collection.mutable.Map[String, scala.collection.mutable.Map[String, Int]]((this.intMapMapField.mapValues { v0 => scala.collection.mutable.Map[String, Int]((v0).toSeq: _*) }).toSeq: _*) ) def canEqual(other: Any): Boolean = other.isInstanceOf[RecordWithAllTypes] || other.isInstanceOf[MutableRecordWithAllTypes] } class MutableRecordWithAllTypes( var nullField : Null = null, var booleanField : Boolean = false, var intField : Int = 0, var longField : Long = 0, var floatField : Float = 0, var doubleField : Double = 0, var stringField : String = null, var bytesField : scala.collection.mutable.Buffer[Byte] = scala.collection.mutable.Buffer[Byte](), var fixedField : Array[Byte] = new Array[Byte](16), var intArrayField : scala.collection.mutable.Buffer[Int] = scala.collection.mutable.ArrayBuffer[Int]().asInstanceOf[scala.collection.mutable.Buffer[Int]], var intMapField : scala.collection.mutable.Map[String, Int] = scala.collection.mutable.Map[String, Int]().asInstanceOf[scala.collection.mutable.Map[String, Int]], var intArrayArrayField : scala.collection.mutable.Buffer[scala.collection.mutable.Buffer[Int]] = scala.collection.mutable.ArrayBuffer[scala.collection.mutable.Buffer[Int]]().asInstanceOf[scala.collection.mutable.Buffer[scala.collection.mutable.Buffer[Int]]], var intMapMapField : scala.collection.mutable.Map[String, scala.collection.mutable.Map[String, Int]] = scala.collection.mutable.Map[String, scala.collection.mutable.Map[String, Int]]().asInstanceOf[scala.collection.mutable.Map[String, scala.collection.mutable.Map[String, Int]]] ) extends org.apache.avro.scala.MutableRecordBase[RecordWithAllTypes] { def this() = this(null, false, 0, 0, 0, 0, null, scala.collection.mutable.Buffer[Byte](), new Array[Byte](16), scala.collection.mutable.ArrayBuffer[Int]().asInstanceOf[scala.collection.mutable.Buffer[Int]], scala.collection.mutable.Map[String, Int]().asInstanceOf[scala.collection.mutable.Map[String, Int]], scala.collection.mutable.ArrayBuffer[scala.collection.mutable.Buffer[Int]]().asInstanceOf[scala.collection.mutable.Buffer[scala.collection.mutable.Buffer[Int]]], scala.collection.mutable.Map[String, scala.collection.mutable.Map[String, Int]]().asInstanceOf[scala.collection.mutable.Map[String, scala.collection.mutable.Map[String, Int]]]) override def getSchema(): org.apache.avro.Schema = { return RecordWithAllTypes.schema } override def get(index: Int): AnyRef = { index match { case 0 => org.apache.avro.scala.Conversions.scalaToJava(null).asInstanceOf[AnyRef] case 1 => org.apache.avro.scala.Conversions.scalaToJava(booleanField).asInstanceOf[AnyRef] case 2 => org.apache.avro.scala.Conversions.scalaToJava(intField).asInstanceOf[AnyRef] case 3 => org.apache.avro.scala.Conversions.scalaToJava(longField).asInstanceOf[AnyRef] case 4 => org.apache.avro.scala.Conversions.scalaToJava(floatField).asInstanceOf[AnyRef] case 5 => org.apache.avro.scala.Conversions.scalaToJava(doubleField).asInstanceOf[AnyRef] case 6 => org.apache.avro.scala.Conversions.scalaToJava(stringField).asInstanceOf[AnyRef] case 7 => org.apache.avro.scala.Conversions.scalaToJava(java.nio.ByteBuffer.wrap(bytesField.toArray[Byte])).asInstanceOf[AnyRef] case 8 => org.apache.avro.scala.Conversions.scalaToJava(new org.apache.avro.generic.GenericData.Fixed(getSchema(), fixedField.toArray[Byte])).asInstanceOf[AnyRef] case 9 => org.apache.avro.scala.Conversions.scalaToJava(intArrayField).asInstanceOf[AnyRef] case 10 => org.apache.avro.scala.Conversions.scalaToJava(intMapField).asInstanceOf[AnyRef] case 11 => org.apache.avro.scala.Conversions.scalaToJava(intArrayArrayField).asInstanceOf[AnyRef] case 12 => org.apache.avro.scala.Conversions.scalaToJava(intMapMapField).asInstanceOf[AnyRef] case _ => throw new org.apache.avro.AvroRuntimeException("Bad index: " + index) } } override def put(index: Int, javaValue: AnyRef): Unit = { val value = org.apache.avro.scala.Conversions.javaToScala(javaValue) index match { case 0 => () case 1 => this.booleanField = value.asInstanceOf[Boolean] case 2 => this.intField = value.asInstanceOf[Int] case 3 => this.longField = value.asInstanceOf[Long] case 4 => this.floatField = value.asInstanceOf[Float] case 5 => this.doubleField = value.asInstanceOf[Double] case 6 => this.stringField = value.toString case 7 => this.bytesField = collection.mutable.Buffer[Byte]() ++ value.asInstanceOf[java.nio.ByteBuffer].array() case 8 => this.fixedField = value.asInstanceOf[org.apache.avro.generic.GenericData.Fixed].bytes() case 9 => this.intArrayField = value.asInstanceOf[scala.collection.mutable.Buffer[Int]] case 10 => this.intMapField = value.asInstanceOf[scala.collection.mutable.Map[String, Int]] case 11 => this.intArrayArrayField = value.asInstanceOf[scala.collection.mutable.Buffer[scala.collection.mutable.Buffer[Int]]] case 12 => this.intMapMapField = value.asInstanceOf[scala.collection.mutable.Map[String, scala.collection.mutable.Map[String, Int]]] case _ => throw new org.apache.avro.AvroRuntimeException("Bad index: " + index) } } def build(): RecordWithAllTypes = { return new RecordWithAllTypes( nullField = this.nullField, booleanField = this.booleanField, intField = this.intField, longField = this.longField, floatField = this.floatField, doubleField = this.doubleField, stringField = this.stringField, bytesField = this.bytesField, fixedField = this.fixedField, intArrayField = this.intArrayField.toList, intMapField = this.intMapField.toMap, intArrayArrayField = this.intArrayArrayField.map { _.toList }.toList, intMapMapField = this.intMapMapField.mapValues { _.toMap }.toMap ) } override def encode(encoder: org.apache.avro.io.Encoder): Unit = { encoder.writeNull() encoder.writeBoolean(this.booleanField) encoder.writeInt(this.intField) encoder.writeLong(this.longField) encoder.writeFloat(this.floatField) encoder.writeDouble(this.doubleField) encoder.writeString(this.stringField) encoder.writeBytes(this.bytesField.toArray) encoder.writeFixed(this.fixedField) encoder.writeArrayStart() encoder.setItemCount(this.intArrayField.size) for (arrayItem <- this.intArrayField) { encoder.startItem() encoder.writeInt(arrayItem) } encoder.writeArrayEnd() encoder.writeMapStart() encoder.setItemCount(this.intMapField.size) for ((mapKey, mapValue) <- this.intMapField) { encoder.startItem() encoder.writeString(mapKey) encoder.writeInt(mapValue) } encoder.writeMapEnd() encoder.writeArrayStart() encoder.setItemCount(this.intArrayArrayField.size) for (arrayItem <- this.intArrayArrayField) { encoder.startItem() encoder.writeArrayStart() encoder.setItemCount(arrayItem.size) for (arrayItem <- arrayItem) { encoder.startItem() encoder.writeInt(arrayItem) } encoder.writeArrayEnd() } encoder.writeArrayEnd() encoder.writeMapStart() encoder.setItemCount(this.intMapMapField.size) for ((mapKey, mapValue) <- this.intMapMapField) { encoder.startItem() encoder.writeString(mapKey) encoder.writeMapStart() encoder.setItemCount(mapValue.size) for ((mapKey, mapValue) <- mapValue) { encoder.startItem() encoder.writeString(mapKey) encoder.writeInt(mapValue) } encoder.writeMapEnd() } encoder.writeMapEnd() } def decode(decoder: org.apache.avro.io.Decoder): Unit = { {decoder.readNull(); null} this.booleanField = decoder.readBoolean() this.intField = decoder.readInt() this.longField = decoder.readLong() this.floatField = decoder.readFloat() this.doubleField = decoder.readDouble() this.stringField = decoder.readString() this.bytesField = decoder.readBytes(null).array.toBuffer this.fixedField = { val bytes = new Array[Byte](16); decoder.readFixed(bytes); bytes } this.intArrayField = { val array = scala.collection.mutable.ArrayBuffer[Int]() var blockSize: Long = decoder.readArrayStart() while(blockSize != 0L) { for (_ <- 0L until blockSize) { val arrayItem = ( decoder.readInt()) array.append(arrayItem) } blockSize = decoder.arrayNext() } array } this.intMapField = { val map = scala.collection.mutable.Map[String, Int]() var blockSize: Long = decoder.readMapStart() while (blockSize != 0L) { for (_ <- 0L until blockSize) { val key: String = decoder.readString() val value = ( decoder.readInt()) map += (key -> value) } blockSize = decoder.mapNext() } map } this.intArrayArrayField = { val array = scala.collection.mutable.ArrayBuffer[scala.collection.mutable.Buffer[Int]]() var blockSize: Long = decoder.readArrayStart() while(blockSize != 0L) { for (_ <- 0L until blockSize) { val arrayItem = ( { val array = scala.collection.mutable.ArrayBuffer[Int]() var blockSize: Long = decoder.readArrayStart() while(blockSize != 0L) { for (_ <- 0L until blockSize) { val arrayItem = ( decoder.readInt()) array.append(arrayItem) } blockSize = decoder.arrayNext() } array }) array.append(arrayItem) } blockSize = decoder.arrayNext() } array.asInstanceOf[scala.collection.mutable.Buffer[scala.collection.mutable.Buffer[Int]]] } this.intMapMapField = { val map = scala.collection.mutable.Map[String, scala.collection.mutable.Map[String, Int]]() var blockSize: Long = decoder.readMapStart() while (blockSize != 0L) { for (_ <- 0L until blockSize) { val key: String = decoder.readString() val value = ( { val map = scala.collection.mutable.Map[String, Int]() var blockSize: Long = decoder.readMapStart() while (blockSize != 0L) { for (_ <- 0L until blockSize) { val key: String = decoder.readString() val value = ( decoder.readInt()) map += (key -> value) } blockSize = decoder.mapNext() } map }) map += (key -> value) } blockSize = decoder.mapNext() } map.asInstanceOf[scala.collection.mutable.Map[String, scala.collection.mutable.Map[String, Int]]] } } def canEqual(other: Any): Boolean = other.isInstanceOf[RecordWithAllTypes] || other.isInstanceOf[MutableRecordWithAllTypes] } object RecordWithAllTypes extends org.apache.avro.scala.RecordType[RecordWithAllTypes, MutableRecordWithAllTypes] { final val schema: org.apache.avro.Schema = new org.apache.avro.Schema.Parser().parse(""" |{ | "type" : "record", | "name" : "RecordWithAllTypes", | "namespace" : "org.apache.avro.scala.test.generated", | "fields" : [ { | "name" : "null_field", | "type" : "null" | }, { | "name" : "boolean_field", | "type" : "boolean" | }, { | "name" : "int_field", | "type" : "int" | }, { | "name" : "long_field", | "type" : "long" | }, { | "name" : "float_field", | "type" : "float" | }, { | "name" : "double_field", | "type" : "double" | }, { | "name" : "string_field", | "type" : "string" | }, { | "name" : "bytes_field", | "type" : "bytes" | }, { | "name" : "fixed_field", | "type" : { | "type" : "fixed", | "name" : "anon_fixed_16", | "size" : 16 | } | }, { | "name" : "int_array_field", | "type" : { | "type" : "array", | "items" : "int" | } | }, { | "name" : "int_map_field", | "type" : { | "type" : "map", | "values" : "int" | } | }, { | "name" : "int_array_array_field", | "type" : { | "type" : "array", | "items" : { | "type" : "array", | "items" : "int" | } | } | }, { | "name" : "int_map_map_field", | "type" : { | "type" : "map", | "values" : { | "type" : "map", | "values" : "int" | } | } | } ] |} """ .stripMargin) } } // package org.apache.avro.scala.test.generated.scala
julianpeeters/avro
lang/scala/src/test/resources/testdata/org/apache/avro/scala/test/generated/scala/RecordWithAllTypes.scala
Scala
apache-2.0
19,304
package lensimpl.bench import lensimpl.bench.Impl.STD import scala.util.Try case class Row(impls: Map[Impl, Result]) case class Matrix(value: Map[Method, Row]) { def addResult(method: Method, impl: Impl, result: Result): Matrix = value.get(method).fold(this)(row => Matrix(value + (method -> Row(row.impls + (impl -> result)))) ) def get(method: Method, impl: Impl): Option[Result] = value.get(method).flatMap(_.impls.get(impl)) def normalised: NormalisedMatrix = NormalisedMatrix(value.foldLeft(Map.empty[Method, NormalisedRow]) { case (acc, (method, row)) => row.impls.get(STD).fold(acc)(stdRes => acc + (method -> NormalisedRow( row.impls.map{ case (impl, implRes) => impl -> Try(stdRes.score / implRes.score).toOption }.collect{ case (impl, Some(ratio)) => impl -> ratio } )) ) }) } object Matrix { val empty: Matrix = Matrix(Method.all.map(_ -> Row(Map.empty)).toMap) } case class NormalisedMatrix(value: Map[Method, NormalisedRow]) { def get(method: Method, impl: Impl): Option[Double] = value.get(method).flatMap(_.impls.get(impl)) } case class NormalisedRow(impls: Map[Impl, Double])
julien-truffaut/LensImpl
bench/src/main/scala/lensimpl/bench/Matrix.scala
Scala
mit
1,197
package com.softwaremill.session import akka.http.scaladsl.model.{DateTime, HttpHeader} import akka.http.scaladsl.model.headers.{RawHeader, HttpCookie, Cookie, `Set-Cookie`} import akka.http.scaladsl.testkit.ScalatestRouteTest import com.softwaremill.session.SessionOptions._ import com.softwaremill.session.TestData._ trait MultipleTransportTest { this: ScalatestRouteTest => trait TestUsingTransport { def transportName: String def getSession: Option[String] def setSessionHeader(s: String): HttpHeader def isSessionExpired: Boolean def getRefreshToken: Option[String] def setRefreshTokenHeader(s: String): HttpHeader def isRefreshTokenExpired: Boolean def getSessionTransport: GetSessionTransport def setSessionTransport: SetSessionTransport } object TestUsingCookies extends TestUsingTransport { val sessionCookieName = sessionConfig.sessionCookieConfig.name val refreshTokenCookieName = sessionConfig.refreshTokenCookieConfig.name val transportName = "cookies" def cookiesMap: Map[String, HttpCookie] = headers .collect { case `Set-Cookie`(cookie) => cookie.name -> cookie }.toMap def getSession = cookiesMap.get(sessionCookieName).map(_.value) def setSessionHeader(s: String) = Cookie(sessionCookieName, s) def isSessionExpired = cookiesMap.get(sessionCookieName).flatMap(_.expires).contains(DateTime.MinValue) def getRefreshToken = cookiesMap.get(refreshTokenCookieName).map(_.value) def setRefreshTokenHeader(s: String) = Cookie(refreshTokenCookieName, s) def isRefreshTokenExpired = cookiesMap.get(refreshTokenCookieName).flatMap(_.expires).contains(DateTime.MinValue) def getSessionTransport = usingCookies def setSessionTransport = usingCookies } object TestUsingHeaders extends TestUsingTransport { val transportName = "headers" def getSession = header(sessionConfig.sessionHeaderConfig.sendToClientHeaderName).map(_.value) def setSessionHeader(s: String) = RawHeader(sessionConfig.sessionHeaderConfig.getFromClientHeaderName, s) def isSessionExpired = getSession.contains("") def getRefreshToken = header(sessionConfig.refreshTokenHeaderConfig.sendToClientHeaderName).map(_.value) def setRefreshTokenHeader(s: String) = RawHeader(sessionConfig.refreshTokenHeaderConfig.getFromClientHeaderName, s) def isRefreshTokenExpired = getRefreshToken.contains("") def getSessionTransport = usingHeaders def setSessionTransport = usingHeaders } }
ilyai/akka-http-session
core/src/test/scala/com/softwaremill/session/MultipleTransportTest.scala
Scala
apache-2.0
2,503
package collins.callbacks import java.beans.PropertyChangeEvent import play.api.Logger trait CallbackActionRunner[T] extends CallbackActionHandler { protected val logger = Logger("CallbackActionHandler") val command: Seq[String] val METHOD_CALL_REGEX = """\\<(.*?)\\>""".r def commandString = command.mkString(" ") override def apply(pce: PropertyChangeEvent) { val value = getValue(pce) if (value == null) { logger.warn("Got no value back to use with command %s".format(commandString)) return } val replacements = getMethodReplacements() logger.debug("Got replacements for command %s: %s".format(commandString, replacements.map(_.toString).mkString(", ") )) val replacementsWithValues = replacements.map(_.runMethod(value)) logger.debug("Got replacements (with values) for command %s: %s".format(commandString, replacementsWithValues.map(_.toString).mkString(", ") )) val cmdValue = formatCommand(value, replacementsWithValues) logger.debug("Got new command with replacements: %s".format(cmdValue)) runCommand(cmdValue) } protected def runCommand(cmd: T): Unit protected def formatCommand(v: AnyRef, replacements: Set[MethodReplacement]): T protected def getMethodReplacements(): Set[MethodReplacement] = METHOD_CALL_REGEX.findAllIn(commandString).matchData.map { md => MethodReplacement(md.group(0), md.group(1)) }.toSet }
funzoneq/collins
app/collins/callbacks/CallbackActionRunner.scala
Scala
apache-2.0
1,435
/* * Copyright (C) Lightbend Inc. <https://www.lightbend.com> */ package play.it.http.parsing import akka.stream.scaladsl.Source import akka.util.ByteString import play.api.Application import play.api.mvc._ import play.api.test._ class AnyContentBodyParserSpec extends PlaySpecification { "The anyContent body parser" should { def parse(method: String, contentType: Option[String], body: ByteString, maxLength: Option[Long] = None)( implicit app: Application ) = { implicit val mat = app.materializer val parsers = app.injector.instanceOf[PlayBodyParsers] val request = FakeRequest(method, "/x").withHeaders(contentType.map(CONTENT_TYPE -> _).toSeq: _*) await(parsers.anyContent(maxLength).apply(request).run(Source.single(body))) } "parse text bodies for DELETE requests" in new WithApplication(_.globalApp(false)) { parse("DELETE", Some("text/plain"), ByteString("bar")) must beRight(AnyContentAsText("bar")) } "parse text bodies for GET requests" in new WithApplication(_.globalApp(false)) { parse("GET", Some("text/plain"), ByteString("bar")) must beRight(AnyContentAsText("bar")) } "parse empty bodies as raw for GET requests" in new WithApplication(_.globalApp(false)) { parse("PUT", None, ByteString.empty) must beRight.like { case AnyContentAsRaw(rawBuffer) => rawBuffer.asBytes() must beSome.like { case outBytes => outBytes must beEmpty } } } "parse text bodies for HEAD requests" in new WithApplication(_.globalApp(false)) { parse("HEAD", Some("text/plain"), ByteString("bar")) must beRight(AnyContentAsText("bar")) } "parse text bodies for OPTIONS requests" in new WithApplication(_.globalApp(false)) { parse("OPTIONS", Some("text/plain"), ByteString("bar")) must beRight(AnyContentAsText("bar")) } "parse XML bodies for PATCH requests" in new WithApplication(_.globalApp(false)) { parse("POST", Some("text/xml"), ByteString("<bar></bar>")) must beRight(AnyContentAsXml(<bar></bar>)) } "parse text bodies for POST requests" in new WithApplication(_.globalApp(false)) { parse("POST", Some("text/plain"), ByteString("bar")) must beRight(AnyContentAsText("bar")) } "parse JSON bodies for PUT requests" in new WithApplication(_.globalApp(false)) { parse("PUT", Some("application/json"), ByteString("""{"foo":"bar"}""")) must beRight.like { case AnyContentAsJson(json) => (json \\ "foo").as[String] must_== "bar" } } "parse unknown bodies as raw for PUT requests" in new WithApplication(_.globalApp(false)) { parse("PUT", None, ByteString.empty) must beRight.like { case AnyContentAsRaw(rawBuffer) => rawBuffer.asBytes() must beSome.like { case outBytes => outBytes must beEmpty } } } "accept greater than 2G bytes. not Int overflow" in new WithApplication(_.globalApp(false)) { parse("POST", Some("text/plain"), ByteString("bar"), maxLength = Some(Int.MaxValue.toLong + 2L)) must beRight( AnyContentAsText("bar") ) } } }
mkurz/playframework
core/play-integration-test/src/it/scala/play/it/http/parsing/AnyContentBodyParserSpec.scala
Scala
apache-2.0
3,161
package net.mkowalski.sparkfim.util import net.mkowalski.sparkfim.cache.{CacheRemovePolicy, PersistenceManager} object DriverUtil { def createPersistenceManager(params: Map[String, String]): PersistenceManager = { val cacheRemovePolicy = params.get("cacheRemovePolicy") match { case Some(policyName) => CacheRemovePolicy.fromName(policyName) case None => CacheRemovePolicy.defaultPolicy } val serializedStorage = getBooleanOrDefault(params.get("serializedStorage"), defaultValue = false) val allowDiskStorage = getBooleanOrDefault(params.get("allowDiskStorage"), defaultValue = false) PersistenceManager(cacheRemovePolicy, serializedStorage, allowDiskStorage) } private def getBooleanOrDefault(param: Option[String], defaultValue: Boolean): Boolean = { param match { case Some(value) => value.toBoolean case None => defaultValue } } }
mjkowalski/spark-fim
src/main/scala/net/mkowalski/sparkfim/util/DriverUtil.scala
Scala
mit
900
package chat.tox.antox.av import java.util.concurrent.TimeUnit import chat.tox.antox.av.CallEndReason.CallEndReason import chat.tox.antox.av.CameraFacing.CameraFacing import chat.tox.antox.tox.ToxSingleton import chat.tox.antox.utils.AntoxLog import chat.tox.antox.wrapper.{CallNumber, ContactKey} import im.tox.tox4j.av.data._ import im.tox.tox4j.av.enums.{ToxavCallControl, ToxavFriendCallState} import im.tox.tox4j.exceptions.ToxException import org.apache.commons.collections4.queue.CircularFifoQueue import rx.lang.scala.JavaConversions._ import rx.lang.scala.schedulers.NewThreadScheduler import rx.lang.scala.subjects.BehaviorSubject import rx.lang.scala.{Observable, Subject} import scala.collection.mutable.ArrayBuffer import scala.concurrent.duration.Duration import scala.util.Try final case class Call(callNumber: CallNumber, contactKey: ContactKey, incoming: Boolean) { private val friendStateSubject = BehaviorSubject[Set[ToxavFriendCallState]](Set.empty[ToxavFriendCallState]) private def friendState: Set[ToxavFriendCallState] = friendStateSubject.getValue // only describes self state private val selfStateSubject = BehaviorSubject[SelfCallState](SelfCallState.DEFAULT) def selfStateObservable: Observable[SelfCallState] = selfStateSubject.asJavaObservable def selfState = selfStateSubject.getValue // is video enabled in any way val videoEnabledObservable = selfStateObservable.map(state => state.sendingVideo || state.receivingVideo) private val endedSubject = Subject[CallEndReason]() // called only once, when the call ends with the reason it ended def endedObservable: Observable[CallEndReason] = endedSubject.asJavaObservable //only for outgoing audio private val samplingRate = SamplingRate.Rate48k //in Hz private val audioLength = AudioLength.Length20 //in milliseconds private val channels = AudioChannels.Stereo val audioBufferLength = 3 // in frames val videoBufferLength = 3 // in frames val defaultRingTime = Duration(30, TimeUnit.SECONDS) // ringing by default (call should only be created if it is ringing) private val ringingSubject = BehaviorSubject[Boolean](true) def ringingObservable: Observable[Boolean] = ringingSubject.asJavaObservable def ringing = ringingSubject.getValue var started = false var startTime: Duration = Duration(0, TimeUnit.MILLISECONDS) def duration: Duration = Duration(System.currentTimeMillis(), TimeUnit.MILLISECONDS) - startTime //in milliseconds val enhancements: ArrayBuffer[CallEnhancement] = new ArrayBuffer() /** * Describes a state in which the call is not FINISHED or ERROR. * When the call is on hold or ringing (not yet answered) this will return true. */ def active: Boolean = isActive(friendState) && !selfState.ended private def isActive(state: Set[ToxavFriendCallState]): Boolean = { !state.contains(ToxavFriendCallState.FINISHED) && !state.contains(ToxavFriendCallState.ERROR) } def onHold: Boolean = friendState.isEmpty val audioCapture: AudioCapture = new AudioCapture(samplingRate.value, channels.value) val audioPlayer = new AudioPlayer(samplingRate.value, channels.value, audioBufferLength) private val videoFrameSubject = Subject[StridedYuvFrame]() def videoFrameObservable: Observable[StridedYuvFrame] = videoFrameSubject.onBackpressureDrop(_ => AntoxLog.debug("Dropped a video frame due to back-pressure.")) var cameraFrameBuffer: Option[CircularFifoQueue[NV21Frame]] = None // default value, not checked based on device capabilities private val cameraFacingSubject = BehaviorSubject[CameraFacing](CameraFacing.Front) def cameraFacingObservable: Observable[CameraFacing] = cameraFacingSubject.asJavaObservable private def frameSize = SampleCount(audioLength, samplingRate) private def logCallEvent(event: String): Unit = AntoxLog.debug(s"Call $callNumber belonging to $contactKey $event") // make sure the call ends eventually if it's still ringing endAfterTime(defaultRingTime) def startCall(sendingAudio: Boolean, sendingVideo: Boolean): Unit = { logCallEvent(s"started sending audio:$sendingAudio and video:$sendingVideo") ToxSingleton.toxAv.call( callNumber, if (sendingAudio) selfState.audioBitRate else BitRate.Disabled, if (sendingVideo) selfState.videoBitRate else BitRate.Disabled) selfStateSubject.onNext(selfState.copy(audioMuted = !sendingAudio, videoHidden = !sendingVideo)) } def answerCall(): Unit = { val sendingAudio: Boolean = true //always send audio val sendingVideo: Boolean = selfState.receivingVideo // only send video if we're receiving it logCallEvent(s"answered sending audio:$sendingAudio and video:$sendingVideo") ToxSingleton.toxAv.answer(callNumber, selfState.audioBitRate, selfState.videoBitRate) showFriendVideo() selfStateSubject.onNext(selfState.copy(audioMuted = !sendingAudio, videoHidden = !sendingVideo)) startCall() ringingSubject.onNext(false) } def onIncoming(audioEnabled: Boolean, videoEnabled: Boolean): Unit = { logCallEvent(s"incoming receiving audio:$audioEnabled and video:$videoEnabled") selfStateSubject.onNext(selfState.copy(receivingAudio = audioEnabled, receivingVideo = videoEnabled)) } //end the call after `ringTime` private def endAfterTime(ringTime: Duration): Unit = { Observable .timer(defaultRingTime) .subscribeOn(NewThreadScheduler()) .foreach(_ => { if (active && ringing) { val reason = if (incoming) { // call was missed CallEndReason.Missed } else { // call was unanswered CallEndReason.Unanswered } end(reason) } }) } def updateFriendState(state: Set[ToxavFriendCallState]): Unit = { logCallEvent(s"friend call state updated to $state") val answered: Boolean = friendState.isEmpty && isActive(state) && !incoming val ended: Boolean = !isActive(state) val newSelfState = selfState.copy( receivingAudio = state.contains(ToxavFriendCallState.SENDING_A), receivingVideo = state.contains(ToxavFriendCallState.SENDING_V) ) if (answered && !started) { startCall() ringingSubject.onNext(false) } else if (ended) { end() } else { if (newSelfState != selfState) selfStateSubject.onNext(newSelfState) } friendStateSubject.onNext(friendState) } private def startCall(): Unit = { assert(!started) started = true startTime = Duration(System.currentTimeMillis(), TimeUnit.MILLISECONDS) logCallEvent(event = s"started at time $startTime") new Thread(new Runnable { override def run(): Unit = { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO) logCallEvent("audio send thread started") audioCapture.start() while (active) { val start = System.currentTimeMillis() if (selfState.sendingAudio) { try { ToxSingleton.toxAv.audioSendFrame(callNumber, audioCapture.readAudio(frameSize.value, channels.value), frameSize, channels, samplingRate) } catch { case e: ToxException[_] => AntoxLog.debug("Ignoring audio send frame exception.") } } val timeTaken = System.currentTimeMillis() - start if (timeTaken < audioLength.value.toMillis) Thread.sleep(audioLength.value.toMillis - timeTaken) } logCallEvent("audio send thread stopped") } }, "AudioSendThread").start() new Thread(new Runnable { override def run(): Unit = { logCallEvent("video send thread started ") while (active) { if (cameraFrameBuffer.isEmpty) Thread.sleep(100) val maybeCameraFrame = cameraFrameBuffer.flatMap(buffer => Option(buffer.poll())) maybeCameraFrame.foreach(cameraFrame => { if (active && selfState.sendingVideo && !ringing) { val startSendTime = System.currentTimeMillis() val yuvFrame = FormatConversions.nv21toYuv420(cameraFrame) try { ToxSingleton.toxAv.videoSendFrame(callNumber, yuvFrame.width, yuvFrame.height, yuvFrame.y, yuvFrame.u, yuvFrame.v) } catch { case e: ToxException[_] => AntoxLog.debug("Ignoring video send frame exception.") } println(s"sending frame took ${System.currentTimeMillis() - startSendTime}") } }) } } }, "VideoSendThread").start() audioPlayer.start() } def onAudioFrame(pcm: Array[Short], channels: AudioChannels, samplingRate: SamplingRate): Unit = { audioPlayer.bufferAudioFrame(pcm, channels.value, samplingRate.value) } def onVideoFrame(videoFrame: StridedYuvFrame): Unit = { videoFrameSubject.onNext(videoFrame) } def muteSelfAudio(): Unit = { selfStateSubject.onNext(selfState.copy(audioMuted = true)) ToxSingleton.toxAv.setAudioBitRate(callNumber, BitRate.Disabled) audioCapture.stop() } def unmuteSelfAudio(): Unit = { selfStateSubject.onNext(selfState.copy(audioMuted = false)) ToxSingleton.toxAv.setAudioBitRate(callNumber, selfState.audioBitRate) audioCapture.start() } def enableLoudspeaker(): Unit = { selfStateSubject.onNext(selfState.copy(loudspeakerEnabled = true)) } def disableLoudspeaker(): Unit = { selfStateSubject.onNext(selfState.copy(loudspeakerEnabled = false)) } def hideSelfVideo(): Unit = { selfStateSubject.onNext(selfState.copy(videoHidden = true)) } def showSelfVideo(): Unit = { selfStateSubject.onNext(selfState.copy(videoHidden = false)) } def muteFriendAudio(): Unit = { ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.MUTE_AUDIO) } def unmuteFriendAudio(): Unit = { ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.UNMUTE_AUDIO) } def hideFriendVideo(): Unit = { ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.HIDE_VIDEO) } def showFriendVideo(): Unit = { ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.SHOW_VIDEO) } def rotateCamera(): Unit = { cameraFacingSubject.onNext(CameraFacing.swap(cameraFacingSubject.getValue)) } def end(reason: CallEndReason = CallEndReason.Normal): Unit = { logCallEvent(s"ended reason:$reason") // only send a call control if the call wasn't ended unexpectedly if (reason != CallEndReason.Error) { Try(ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.CANCEL)) } selfStateSubject.onNext(selfState.copy(ended = true)) endedSubject.onNext(reason) endedSubject.onCompleted() onCallEnded() } private def onCallEnded(): Unit = { audioCapture.stop() cleanUp() } private def cleanUp(): Unit = { audioPlayer.cleanUp() audioCapture.cleanUp() } }
wiiam/Antox
app/src/main/scala/chat/tox/antox/av/Call.scala
Scala
gpl-3.0
11,060
/* * ============= Ryft-Customized BSD License ============ * Copyright (c) 2015, Ryft Systems, Inc. * All rights reserved. * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation and/or * other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software must display the following acknowledgement: * This product includes software developed by Ryft Systems, Inc. * 4. Neither the name of Ryft Systems, Inc. nor the names of its contributors may be used * to endorse or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY RYFT SYSTEMS, INC. ''AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL RYFT SYSTEMS, INC. BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ============ */ package com.ryft.spark.connector.query.value import com.ryft.spark.connector.domain.RyftValueOperator import com.ryft.spark.connector.exception.RyftSparkException import com.ryft.spark.connector.query.value.model._ import org.apache.spark.Logging /** * Implementation of Ryft IPv4 search */ case class IPv4Value(valueA: Option[IPv4] = None, operatorA: Option[RyftValueOperator] = None, operatorB: RyftValueOperator, valueB: IPv4) extends RyftQueryValue { if (valueA.isDefined) { require(valueA.get.isValid, s"IPv4 ${valueA.get} is invalid") } require(valueB.isValid, s"IPv4 $valueB is invalid") override def toString: String = { val expression = if (valueA.isDefined && operatorA.isDefined) { s""""${valueA.get}" ${operatorA.get.value} IP ${operatorB.value} "$valueB"""" } else { s"""IP ${operatorB.value} "$valueB"""" } s"IPV4($expression)" } } object IPv4Value extends Logging { def apply(operator: RyftValueOperator, value: IPv4) = new IPv4Value(None, None, operator, value) def apply(valueA: IPv4, operatorA: RyftValueOperator, operatorB: RyftValueOperator, valueB: IPv4) = new IPv4Value(Some(valueA), Some(operatorA), operatorB, valueB) def apply(params: Params): IPv4Value = params match { case shortParams: ShortParams => apply(shortParams.operator, shortParams.value.asInstanceOf[IPv4]) case fullParams: FullParams => apply(fullParams.valueA.asInstanceOf[IPv4], fullParams.operatorA, fullParams.operatorB, fullParams.valueB.asInstanceOf[IPv4]) case unknown => val msg = s"Unknown query value parameters: $unknown" logWarning(msg) throw RyftSparkException(msg) } }
getryft/spark-ryft-connector
spark-ryft-connector/src/main/scala/com/ryft/spark/connector/query/value/IPv4Value.scala
Scala
bsd-3-clause
3,612
package org.scalatra import org.scalatra.test.specs2.MutableScalatraSpec class CsrfTokenServlet extends ScalatraServlet with CsrfTokenSupport { get("/renderForm") { <html> <body> <form method="post"><input type="hidden" name={ csrfKey } value={ csrfToken }/></form> </body> </html> } post("/renderForm") { "SUCCESS" } } object CsrfTokenSpec extends MutableScalatraSpec { addServlet(classOf[CsrfTokenServlet], "/*") "the get request should include the CSRF token" in { get("/renderForm") { body must beMatching("""(?s).*value="\\w+".*""") } } "the post should be valid when it uses the right csrf token" in { var token = "" session { get("/renderForm") { token = ("value=\\"(\\\\w+)\\"".r findFirstMatchIn body).get.subgroups.head } post("/renderForm", CsrfTokenSupport.DefaultKey -> token) { body must be_==("SUCCESS") } } } "the post should be invalid when it uses a different csrf token" in { session { get("/renderForm") { } post("/renderForm", CsrfTokenSupport.DefaultKey -> "Hey I'm different") { status must be_==(403) body must not be_== ("SUCCESS") } } } "the token should remain valid across multiple request" in { var token = "" session { get("/renderForm") { token = ("value=\\"(\\\\w+)\\"".r findFirstMatchIn body).get.subgroups.head } get("/renderForm") { val token2 = ("value=\\"(\\\\w+)\\"".r findFirstMatchIn body).get.subgroups.head token must be_==(token2) } post("/renderForm", CsrfTokenSupport.DefaultKey -> token) { body must be_==("SUCCESS") } } } } // vim: set si ts=2 sw=2 sts=2 et:
lightvector/scalatra
core/src/test/scala/org/scalatra/CsrfTokenSpec.scala
Scala
bsd-2-clause
1,758
/** * ContainsDuplicates.scala * Mike Abraham * * Implement an algorithm to determine if a string has all unique characters. * * This solution is case-sensitive. * For example, A and a are treated as different characters. * * Running time = O(n^2). * * TODO: Reimplement this using merge sort, since we could catch dups on the merge step, * and the whole thing would run in O(n logn) */ object ContainsDuplicates { def containsDuplicates(str: String): Boolean = { // Walk in 2 nested loops. // For each character, walk the rest of the string and check for dups of that character. // If we are already at the end of the string, we are done (unique). // If we hit a duplicate, we are done (not unique). // We never need to check the chars "behind" us, because if there was a dup, // we would have exited when its traverse reached us. @scala.annotation.tailrec def walk(index: Int, str: Array[Char]): Boolean = { @scala.annotation.tailrec def walkFromIndex(index: Int, chr: Char, str: Array[Char]): Boolean = { if (index == str.length - 1) false else if (chr == str(index + 1)) true else walkFromIndex(index + 1, chr, str) } if (index == str.length) false else if (walkFromIndex(index, str(index), str)) true else walk(index + 1, str) } walk(0, str.toArray) } }
mikeabraham/InterviewQuestionsInScala
src/main/scala/ContainsDuplicates.scala
Scala
unlicense
1,394
package org.jetbrains.plugins.scala.codeInsight.intention.comprehension import org.jetbrains.plugins.scala.codeInsight.intentions.ScalaIntentionTestBase import org.junit.Assert._ class ConvertToParenthesesIntentionTest extends ScalaIntentionTestBase { override def familyName = ConvertToParenthesesIntention.FamilyName def testIntentionAvailableInSimpleForYieldStatement(): Unit = { checkIntentionIsAvailable(s"for$CARET_MARKER{i <- 1 to 10} yield i + 1") } def testIntentionAvailableInLargeForYieldStatement(): Unit = { checkIntentionIsAvailable( s"""for$CARET_MARKER{ | i <- 1 to 10 | c <- 'a' to 'e' |} yield "" + c + i""".stripMargin) } def testIntentionNotAvailableOnForKeyword(): Unit = { checkIntentionIsNotAvailable(s"f${CARET_MARKER}or{i <- 1 to 10} yield i + 1") } def testIntentionNotAvailableInForBlock(): Unit = { checkIntentionIsNotAvailable(s"for{${CARET_MARKER}i <- 1 to 10} yield i + 1") } def testIntentionNotAvailableInSimpleForStatementWithPatentheses(): Unit = { checkIntentionIsNotAvailable(s"for$CARET_MARKER(i <- 1 to 10) yield i + 1") } def testIntentionNotAvailableInLargeForYieldStatementWithPatentheses(): Unit = { checkIntentionIsNotAvailable( s"""for$CARET_MARKER( | i <- 1 to 10; | c <- 'a' to 'e' |) yield "" + c + i""".stripMargin) } def testIntentionActionForOnSimpleForStatement(): Unit = { val text = s"for$CARET_MARKER{i <- 1 to 10} yield i + 1" val result = "for (i <- 1 to 10) yield i + 1" doTest(text, result, familyName) } def testIntentionActionOnLargeForYieldStatement(): Unit = { val text = s"""for$CARET_MARKER{ | i <- 1 to 10 | c <- 'a' to 'e' |} yield "" + c + i""".stripMargin val result = """for (i <- 1 to 10; c <- 'a' to 'e') yield "" + c + i""" doTest(text, result, familyName) } }
whorbowicz/intellij-scala
test/org/jetbrains/plugins/scala/codeInsight/intention/comprehension/ConvertToParenthesesIntentionTest.scala
Scala
apache-2.0
1,915
package model.services.rewardRules import java.util.UUID import model.services.GamificationEngineTrait class RewardRule_RemoveUploadedFile() extends RewardRuleTrait{ override val action_id: Int = GamificationEngineTrait.REMOVE_UPLOADED_FILE override def getPoints(user_id: UUID): Int = { -10 } }
scify/DemocracIT-Web
app/model/services/rewardRules/RewardRule_RemoveUploadedFile.scala
Scala
apache-2.0
311
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.predictionio.data.storage /** AWS S3 implementation of storage traits, supporting model data only * * @group Implementation */ package object s3 {}
PredictionIO/PredictionIO
storage/s3/src/main/scala/org/apache/predictionio/data/storage/s3/package.scala
Scala
apache-2.0
977
/* * This file is part of the "silex" library of helpers for Apache Spark. * * Copyright (c) 2015 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.redhat.et.silex.util trait Logging { // Adapted from the Logging trait in Spark 1.x import org.slf4j.{Logger, LoggerFactory} @transient private var _logger: Logger = null def logger: Logger = { if (_logger != null) { _logger } else { _logger = LoggerFactory.getLogger(this.getClass.getName.stripSuffix("$")) _logger } } protected def logInfo(msg: => String) { if (logger.isInfoEnabled) logger.info(msg) } protected def logDebug(msg: => String) { if (logger.isDebugEnabled) logger.debug(msg) } protected def logWarning(msg: => String) { if (logger.isWarnEnabled) logger.warn(msg) } protected def logError(msg: => String) { if (logger.isErrorEnabled) logger.error(msg) } }
willb/silex
src/main/scala/com/redhat/et/silex/util/logging.scala
Scala
apache-2.0
1,449
package webserviceclients.fakes import org.joda.time.{DateTime, Instant} import uk.gov.dvla.vehicles.presentation.common.services.DateService import uk.gov.dvla.vehicles.presentation.common.views.models.DayMonthYear import webserviceclients.fakes.FakeDateServiceImpl.DateOfAcquisitionDayValid import webserviceclients.fakes.FakeDateServiceImpl.DateOfAcquisitionMonthValid import webserviceclients.fakes.FakeDateServiceImpl.DateOfAcquisitionYearValid final class FakeDateServiceImpl extends DateService { override def today = DayMonthYear( DateOfAcquisitionDayValid.toInt, DateOfAcquisitionMonthValid.toInt, DateOfAcquisitionYearValid.toInt ) override def now = Instant.now() override def dateTimeISOChronology: String = new DateTime( DateOfAcquisitionYearValid.toInt, DateOfAcquisitionMonthValid.toInt, DateOfAcquisitionDayValid.toInt, 0, 0).toString } object FakeDateServiceImpl { final val VALID_AGE:Integer = 30 final val DateOfAcquisitionDayValid = "25" final val DateOfAcquisitionMonthValid = "11" final val DateOfAcquisitionYearValid = (DateTime.now.getYear - VALID_AGE).toString private final val dateTime = DateTime.now final val TodayDay = dateTime.toString("dd") final val TodayMonth = dateTime.toString("MM") final val TodayYear = dateTime.getYear.toString }
dvla/vehicles-acquire-online
test/webserviceclients/fakes/FakeDateServiceImpl.scala
Scala
mit
1,333
package com.arcusys.learn.liferay.update.version270 import com.arcusys.learn.liferay.LiferayClasses.LUpgradeProcess import com.arcusys.learn.liferay.services.RatingsStatsLocalServiceHelper import com.arcusys.learn.liferay.util.PortalUtilHelper import com.liferay.portal.kernel.dao.orm.RestrictionsFactoryUtil import scala.collection.JavaConverters.asScalaBufferConverter // convert package ratings statistics class DBUpdater2722 extends LUpgradeProcess { override def getThreshold = 2722 val newAssetClassName = "com.arcusys.valamis.lesson.model.Lesson" val oldAssetClassNames = Set( "com.arcusys.valamis.lesson.model.BaseManifest" ) override def doUpgrade(): Unit = { val newClassNameId = PortalUtilHelper.getClassNameId(newAssetClassName) oldAssetClassNames.toStream .map { PortalUtilHelper.getClassNameId } .foreach { classNameId => updateClassNameId(classNameId, newClassNameId) } } private def updateClassNameId(classNameId: Long, newClassNameId: Long) = { val query = RatingsStatsLocalServiceHelper.dynamicQuery() .add(RestrictionsFactoryUtil.eq("classNameId", classNameId)) val statistics = RatingsStatsLocalServiceHelper.dynamicQuery(query) for (stats <- statistics) { val newStats = RatingsStatsLocalServiceHelper.getStats(newAssetClassName, stats.getClassPK) newStats.setAverageScore(stats.getAverageScore) newStats.setTotalEntries(stats.getTotalEntries) newStats.setTotalScore(stats.getTotalScore) RatingsStatsLocalServiceHelper.updateRatingsStats(newStats) RatingsStatsLocalServiceHelper.deleteRatingsStats(stats.getStatsId) } } }
igor-borisov/valamis
learn-portlet/src/main/scala/com/arcusys/learn/liferay/update/version270/DBUpdater2722.scala
Scala
gpl-3.0
1,681
package org.webant.extension.link import java.sql.SQLException import org.apache.commons.dbutils.handlers.BeanListHandler import org.apache.commons.lang3.reflect.FieldUtils import org.apache.log4j.LogManager import org.webant.commons.entity.Link import org.webant.commons.link.JdbcLinkProvider import org.webant.commons.utils.BeanUtils import scala.collection.JavaConverters._ class DerbyLinkProvider extends JdbcLinkProvider { private val logger = LogManager.getLogger(classOf[DerbyLinkProvider]) DRIVER = "org.apache.derby.jdbc.EmbeddedDriver" override def init(params: java.util.Map[String, Object]): Boolean = { // val url = "jdbc:derby:data/derby/data;create=true" // val user = "" // val password = "" if (!super.init(params)) { logger.error(s"init ${getClass.getSimpleName} failed!") return false } logger.info(s"init ${getClass.getSimpleName} success!") createTable() } override def createTable(): Boolean = { val sql = "CREATE TABLE link (" + " id varchar(64) NOT NULL," + " taskId varchar(64) DEFAULT NULL," + " siteId varchar(64) DEFAULT NULL," + " url varchar(1024) DEFAULT NULL," + " referer varchar(1024) DEFAULT NULL," + " priority smallint DEFAULT NULL," + " lastCrawlTime TimeStamp DEFAULT NULL," + " status varchar(32) DEFAULT NULL," + " dataVersion int DEFAULT NULL," + " dataCreateTime TimeStamp DEFAULT NULL," + " dataUpdateTime TimeStamp DEFAULT NULL," + " dataDeleteTime TimeStamp DEFAULT NULL," + " PRIMARY KEY (id)" + ")" try { runner.update(conn, sql) } catch { case e: SQLException => e.printStackTrace() return false } true } override def read(): Iterable[Link] = { val links = read(Link.LINK_STATUS_INIT, batch) if (links.nonEmpty) { val pending = links.map(link => { link.setStatus(Link.LINK_STATUS_PENDING) link }) update(pending) } links } private def read(status: String, size: Int): Iterable[Link] = { val sql = "SELECT * " + s"FROM link WHERE status = ? ORDER by dataCreateTime desc OFFSET ? ROWS FETCH NEXT ? ROWS ONLY" val offset: Integer = 0 val pageSize: Integer = if (size <= 0 || size > 1000) 1000 else size val selectParams = Array[Object](status, offset, pageSize) var rs = Iterable.empty[Link] conn.setAutoCommit(false) try { rs = runner.query(conn, sql, new BeanListHandler[Link](classOf[Link]), selectParams: _*).asScala if (rs.nonEmpty) { val updateSql = "update link set status = ? where id = ?" val updateParams = rs.map(link => { Array[Object](Link.LINK_STATUS_PENDING, link.getId) }).toArray runner.batch(conn, updateSql, updateParams) } conn.commit() } catch { case e: Exception => conn.rollback() e.printStackTrace() } rs } def update(links: Iterable[Link]): Int = { 0 } override def write(link: Link): Int = { require(conn != null) if (link == null) return 0 insert(link) } def insert(link: Link): Int = { require(conn != null) if (link == null) return 0 val fieldNames = BeanUtils.getDeclaredFields(link).map(_.getName) val columns = fieldNames.mkString("(", ", ", ")") val placeholders = fieldNames.map(_ => "?").mkString("(", ",", ")") val values = fieldNames.map(fieldName => FieldUtils.readField(link, fieldName, true)) val sql = s"insert into link $columns values $placeholders" runner.update(conn, sql, values: _*) } override def write(links: Iterable[Link]): Int = { require(conn != null) if (links == null || links.isEmpty) return 0 val fieldNames = BeanUtils.getDeclaredFields(links.head).map(_.getName) val columns = fieldNames.mkString("(", ", ", ")") val placeholders = fieldNames.map(_ => "?").mkString("(", ",", ")") val sql = s"insert into link $columns values $placeholders" val params = links.map(link => { fieldNames.map(fieldName => FieldUtils.readField(link, fieldName, true)) }).toArray runner.batch(conn, sql, params).sum } override def update(link: Link): Int = { require(conn != null) if (link == null) return 0 val fieldNames = BeanUtils.getDeclaredFields(link).map(_.getName) val excludes = Set("id") val filterFieldNames = fieldNames.filter(!excludes.contains(_)) val filterPlaceholders = filterFieldNames.map(fieldName => s"$fieldName = ?").mkString(", ") val filterValues = filterFieldNames.map(fieldName => FieldUtils.readField(link, fieldName, true)) val sql = s"update link set $filterPlaceholders, dataVersion = dataVersion + 1, dataUpdateTime = datetime('now', 'localtime') where id = '${link.getId}'" // val sql = s"insert into fun $columns values $placeholders ON DUPLICATE KEY UPDATE dataVersion = dataVersion + 1, dataUpdateTime = now()" runner.update(conn, sql, filterValues: _*) } override def upsert(link: Link): Int = { require(conn != null) if (link == null) return 0 val fieldNames = BeanUtils.getDeclaredFields(link).map(_.getName) val columns = fieldNames.mkString("(", ", ", ")") val placeholders = fieldNames.map(_ => "?").mkString("(", ",", ")") val values = fieldNames.map(fieldName => FieldUtils.readField(link, fieldName, true)) val excludes = Set("id", "dataVersion", "dataCreateTime", "dataUpdateTime", "dataDeleteTime") val filterFieldNames = fieldNames.filter(!excludes.contains(_)) val filterPlaceholders = filterFieldNames.map(fieldName => s"$fieldName = ?").mkString(", ") val filterValues = filterFieldNames.map(fieldName => FieldUtils.readField(link, fieldName, true)) val allValues = (values ++ filterValues).toSeq val sql = s"insert into link $columns values $placeholders ON DUPLICATE KEY UPDATE $filterPlaceholders, dataVersion = dataVersion + 1, dataUpdateTime = now()" println(sql) runner.update(conn, sql, allValues: _*) } override def close(): Boolean = { if (null != conn) { try conn.close() catch { case e: SQLException => e.printStackTrace() } conn = null } true } }
sutine/webant
webant-extension/src/main/scala/org/webant/extension/link/DerbyLinkProvider.scala
Scala
apache-2.0
6,293
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.mesos import java.net.SocketAddress import java.nio.ByteBuffer import scala.collection.mutable import org.apache.spark.{Logging, SecurityManager, SparkConf} import org.apache.spark.deploy.ExternalShuffleService import org.apache.spark.network.client.{RpcResponseCallback, TransportClient} import org.apache.spark.network.shuffle.ExternalShuffleBlockHandler import org.apache.spark.network.shuffle.protocol.BlockTransferMessage import org.apache.spark.network.shuffle.protocol.mesos.RegisterDriver import org.apache.spark.network.util.TransportConf /** * An RPC endpoint that receives registration requests from Spark drivers running on Mesos. * It detects driver termination and calls the cleanup callback to [[ExternalShuffleService]]. */ private[mesos] class MesosExternalShuffleBlockHandler(transportConf: TransportConf) extends ExternalShuffleBlockHandler(transportConf, null) with Logging { // Stores a map of driver socket addresses to app ids private val connectedApps = new mutable.HashMap[SocketAddress, String] protected override def handleMessage( message: BlockTransferMessage, client: TransportClient, callback: RpcResponseCallback): Unit = { message match { case RegisterDriverParam(appId) => val address = client.getSocketAddress logDebug(s"Received registration request from app $appId (remote address $address).") if (connectedApps.contains(address)) { val existingAppId = connectedApps(address) if (!existingAppId.equals(appId)) { logError(s"A new app '$appId' has connected to existing address $address, " + s"removing previously registered app '$existingAppId'.") applicationRemoved(existingAppId, true) } } connectedApps(address) = appId callback.onSuccess(ByteBuffer.allocate(0)) case _ => super.handleMessage(message, client, callback) } } /** * On connection termination, clean up shuffle files written by the associated application. */ override def connectionTerminated(client: TransportClient): Unit = { val address = client.getSocketAddress if (connectedApps.contains(address)) { val appId = connectedApps(address) logInfo(s"Application $appId disconnected (address was $address).") applicationRemoved(appId, true /* cleanupLocalDirs */) connectedApps.remove(address) } else { logWarning(s"Unknown $address disconnected.") } } /** An extractor object for matching [[RegisterDriver]] message. */ private object RegisterDriverParam { def unapply(r: RegisterDriver): Option[String] = Some(r.getAppId) } } /** * A wrapper of [[ExternalShuffleService]] that provides an additional endpoint for drivers * to associate with. This allows the shuffle service to detect when a driver is terminated * and can clean up the associated shuffle files. */ private[mesos] class MesosExternalShuffleService(conf: SparkConf, securityManager: SecurityManager) extends ExternalShuffleService(conf, securityManager) { protected override def newShuffleBlockHandler( conf: TransportConf): ExternalShuffleBlockHandler = { new MesosExternalShuffleBlockHandler(conf) } } private[spark] object MesosExternalShuffleService extends Logging { def main(args: Array[String]): Unit = { ExternalShuffleService.main(args, (conf: SparkConf, sm: SecurityManager) => new MesosExternalShuffleService(conf, sm)) } }
chenc10/Spark-PAF
core/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala
Scala
apache-2.0
4,319
/* * Copyright 2017 Datamountaineer. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datamountaineer.streamreactor.connect.hazelcast import java.net.URI import java.util.{Properties, UUID} import javax.cache.{CacheManager, Caching} import com.datamountaineer.streamreactor.connect.hazelcast.config.{HazelCastConnectionConfig, HazelCastSocketConfig} import com.hazelcast.cache.HazelcastCachingProvider import com.hazelcast.client.HazelcastClient import com.hazelcast.client.config.{ClientConfig, ClientNetworkConfig, SocketOptions} import com.hazelcast.config.GroupConfig import com.hazelcast.core.HazelcastInstance import scala.collection.JavaConversions._ /** * Created by andrew@datamountaineer.com on 10/08/16. * stream-reactor */ object HazelCastConnection { def buildClient(config: HazelCastConnectionConfig): HazelcastInstance = { val clientConfig = new ClientConfig val networkConfig = clientConfig.getNetworkConfig networkConfig.setAddresses(config.members.toList) val groupConfig = new GroupConfig(config.group, config.pass) clientConfig.setGroupConfig(groupConfig) buildSocketOptions(networkConfig, config.socketConfig) clientConfig.setInstanceName(config.group + "-kafka-connect-" + UUID.randomUUID().toString) HazelcastClient.newHazelcastClient(clientConfig) } private def buildSocketOptions(clientNetworkConfig: ClientNetworkConfig, socketConfig: HazelCastSocketConfig): SocketOptions = { val socketOptions = clientNetworkConfig.getSocketOptions socketOptions.setKeepAlive(socketConfig.keepAlive) socketOptions.setTcpNoDelay(socketConfig.tcpNoDelay) socketOptions.setReuseAddress(socketConfig.reuseAddress) socketOptions.setLingerSeconds(socketConfig.lingerSeconds) socketOptions.setBufferSize(socketConfig.bufferSize) socketOptions } def getCacheManager(client: HazelcastInstance, name: String) : CacheManager = { val instanceName = client.getName() val cachingProvider = Caching.getCachingProvider() // Create Properties instance pointing to a named HazelcastInstance val properties = new Properties() properties.setProperty(HazelcastCachingProvider.HAZELCAST_INSTANCE_NAME, instanceName) val cacheManagerName = new URI(name ) val cacheManager = cachingProvider.getCacheManager(cacheManagerName, null, properties ) cacheManager } }
CodeSmell/stream-reactor
kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/HazelCastConnection.scala
Scala
apache-2.0
2,874
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.rdd import scala.reflect.ClassTag import org.apache.spark._ case class DataLoadPartitionWrap[T: ClassTag](rdd: RDD[T], partition: Partition) class DataLoadCoalescedRDD[T: ClassTag]( @transient var prev: RDD[T], nodeList: Array[String]) extends RDD[DataLoadPartitionWrap[T]](prev.context, Nil) { override def getPartitions: Array[Partition] = { new DataLoadPartitionCoalescer(prev, nodeList).run } override def compute(split: Partition, context: TaskContext): Iterator[DataLoadPartitionWrap[T]] = { new Iterator[DataLoadPartitionWrap[T]] { val iter = split.asInstanceOf[CoalescedRDDPartition].parents.iterator def hasNext = iter.hasNext def next: DataLoadPartitionWrap[T] = { DataLoadPartitionWrap(firstParent[T], iter.next()) } } } override def getDependencies: Seq[Dependency[_]] = { Seq(new NarrowDependency(prev) { def getParents(id: Int): Seq[Int] = partitions(id).asInstanceOf[CoalescedRDDPartition].parentsIndices }) } override def clearDependencies() { super.clearDependencies() prev = null } /** * Returns the preferred machine for the partition. If split is of type CoalescedRDDPartition, * then the preferred machine will be one which most parent splits prefer too. * @param partition * @return the machine most preferred by split */ override def getPreferredLocations(partition: Partition): Seq[String] = { partition.asInstanceOf[CoalescedRDDPartition].preferredLocation.toSeq } }
ashokblend/incubator-carbondata
integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/DataLoadCoalescedRDD.scala
Scala
apache-2.0
2,358
/*********************************************************************** * Copyright (c) 2013-2015 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 which * accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. *************************************************************************/ package org.locationtech.geomesa.jobs.scalding import org.apache.accumulo.core.client.IteratorSetting import org.apache.accumulo.core.data.{Key, Range => AcRange} import org.apache.accumulo.core.security.Authorizations import org.apache.accumulo.core.util.{Pair => AcPair} import org.apache.hadoop.io.Text import org.apache.log4j.Level import scala.util.parsing.combinator.JavaTokenParsers sealed trait AccumuloSourceOptions { def instance: String def zooKeepers: String def user: String def password: String def table: String def logLevel: Option[Level] override def toString = s"${getClass.getSimpleName}[$instance,$table]" } case class AccumuloInputOptions( instance: String, zooKeepers: String, user: String, password: String, table: String, ranges: Seq[SerializedRange] = Seq.empty, columns: Seq[SerializedColumn] = Seq.empty, iterators: Seq[IteratorSetting] = Seq.empty, authorizations: Authorizations = new Authorizations(), autoAdjustRanges: Option[Boolean] = None, localIterators: Option[Boolean] = None, offlineTableScan: Option[Boolean] = None, scanIsolation: Option[Boolean] = None, logLevel: Option[Level] = None) extends AccumuloSourceOptions object AccumuloInputOptions { def apply(dsParams: Map[String, String]) = { new AccumuloInputOptions( dsParams.getOrElse("instanceId", "None"), dsParams.getOrElse("zookeepers", "None"), dsParams.getOrElse("user", "None"), dsParams.getOrElse("password", "None"), dsParams.getOrElse("tableName", "None")) } } case class AccumuloOutputOptions( instance: String, zooKeepers: String, user: String, password: String, table: String, threads: Option[Int] = None, memory: Option[Long] = None, createTable: Boolean = false, logLevel: Option[Level] = None) extends AccumuloSourceOptions object AccumuloOutputOptions { def apply(dsParams: Map[String, String]) = { new AccumuloOutputOptions( dsParams.getOrElse("instanceId", "None"), dsParams.getOrElse("zookeepers", "None"), dsParams.getOrElse("user", "None"), dsParams.getOrElse("password", "None"), dsParams.getOrElse("tableName", "None"), createTable = true) } } case class SerializedRange(start: Endpoint, end: Endpoint) case class Endpoint(r: Option[String], cf: Option[String], cq: Option[String], inclusive: Boolean = false) case class SerializedColumn(cf: String, cq: String) object SerializedRange { private class RangeParser extends JavaTokenParsers { case class Row(r: String, cf: Option[String], cq: Option[String]) case class Bracket(inclusive: Boolean = true) /** * Valid specs can have attributes that look like the following: * "id:Integer:opt1=v1,*geom:Geometry:srid=4326,ct:List[String]:index=true,mt:Map[String,Double]:index=false" * [row: abced cf: ppp cq:ab, row:dd cf:ee cq:zz) */ def startBracket = """[\\(\\[]""".r ^^ { case b if b == "(" => Bracket(inclusive = false) case b if b == "[" => Bracket(inclusive = true) } def endBracket = """[\\)\\]]""".r ^^ { case b if b == ")" => Bracket(inclusive = false) case b if b == "]" => Bracket(inclusive = true) } def part = """[^\\[\\]\\(\\),\\s]+""".r def row = part ~ part.? ~ part.? ^^ { case r ~ cf ~ cq => Row(r, cf, cq) } def separator = "," def start = startBracket ~ row.? ^^ { case b ~ r => Endpoint(r.map(_.r), r.flatMap(_.cf), r.flatMap(_.cq), b.inclusive) } def end = row.? ~ endBracket ^^ { case r ~ b => Endpoint(r.map(_.r), r.flatMap(_.cf), r.flatMap(_.cq), b.inclusive) } def range = start ~ separator ~ end ^^ { case s ~ _ ~ e => SerializedRange(s, e) } def ranges = repsep(range, separator) def parse(s: String): Seq[SerializedRange] = parse(ranges, s.trim) match { case Success(t, r) if r.atEnd => t case Error(msg, r) => throw new IllegalArgumentException(msg) case Failure(msg, r) => throw new IllegalArgumentException(msg) case Success(t, r) => throw new IllegalArgumentException(s"Malformed attribute in ${r.source} at ${r.pos}") } } def parse(s: String): Seq[SerializedRange] = new RangeParser().parse(s) def apply(s: String): Seq[SerializedRange] = parse(s) def apply(range: AcRange): Seq[SerializedRange] = apply(Seq(range)) def apply(ranges: Seq[AcRange]): Seq[SerializedRange] = { val sb = new StringBuilder val strings = ranges.map { r => sb.clear() if (r.isStartKeyInclusive) sb.append("[") else sb.append("(") if (!r.isInfiniteStartKey) { sb.append(r.getStartKey.getRow.toString) sb.append(" ").append(r.getStartKey.getColumnFamily.toString) sb.append(" ").append(r.getStartKey.getColumnQualifier.toString) } sb.append(",") if (!r.isInfiniteStopKey) { sb.append(r.getEndKey.getRow.toString) sb.append(" ").append(r.getEndKey.getColumnFamily.toString) sb.append(" ").append(r.getEndKey.getColumnQualifier.toString) } if (r.isEndKeyInclusive) sb.append("]") else sb.append(")") sb.toString() } apply(strings.mkString(",")) } } object SerializedRangeSeq { def unapply(value: SerializedRange): Option[AcRange] = { val start = new Key(value.start.r.getOrElse(""), value.start.cf.getOrElse(""), value.start.cq.getOrElse("")) val end = new Key(value.end.r.getOrElse(""), value.end.cf.getOrElse(""), value.end.cq.getOrElse("")) Some(new AcRange(start, end, value.start.inclusive, value.end.inclusive, value.start.r.isEmpty, value.end.r.isEmpty)) } } object SerializedColumn { private class ColumnParser extends JavaTokenParsers { def part = """[^\\[\\]\\(\\),\\s]+""".r def pair = "[" ~> part ~ part <~ "]" ^^ { case cf ~ cq => SerializedColumn(cf, cq) } def separator = "," def pairs = repsep(pair, separator) def parse(s: String): Seq[SerializedColumn] = parse(pairs, s.trim) match { case Success(t, r) if r.atEnd => t case Error(msg, r) => throw new IllegalArgumentException(msg) case Failure(msg, r) => throw new IllegalArgumentException(msg) case Success(t, r) => throw new IllegalArgumentException(s"Malformed attribute in ${r.source} at ${r.pos}") } } def parse(s: String): Seq[SerializedColumn] = new ColumnParser().parse(s) def apply(s: String): Seq[SerializedColumn] = parse(s) def apply(cols: AcPair[Text, Text]): Seq[SerializedColumn] = apply(Seq(cols)) def apply(cols: Seq[AcPair[Text, Text]]): Seq[SerializedColumn] = { val strings = cols.map(c => s"[${c.getFirst.toString} ${c.getSecond.toString}]") apply(strings.mkString(",")) } } object SerializedColumnSeq { def unapply(value: SerializedColumn): Option[AcPair[Text, Text]] = Some(new AcPair[Text, Text](new Text(value.cf), new Text(value.cq))) }
drackaer/geomesa
geomesa-jobs/src/main/scala/org/locationtech/geomesa/jobs/scalding/AccumuloSourceOptions.scala
Scala
apache-2.0
7,391
package dhg.ccg.parse.pcfg import scala.collection.mutable.{ Map => MMap } import scala.collection.mutable.{ Set => MSet } import scala.collection.mutable import scala.math.{ log, exp } import scalaz.{ Ordering => _, _ } import Scalaz._ import dhg.util._ import dhg.ccg.prob._ import dhg.ccg.tagdict.TagDictionary import dhg.ccg.cat._ import dhg.ccg.rule._ import dhg.ccg.parse._ import dhg.ccg.util.DrawMatrix import dhg.ccg.parse.pcfg.mcmc.PcfgTreeSampler import dhg.ccg.data.EnglishCcgTreeBankReader import dhg.ccg.tagdict.SimpleTagDictionaryFactory import dhg.ccg.parse.dep.DepParserEvaluator import dhg.ccg.parse.pcfg.mcmc.PcfgProductionCounter import dhg.ccg.parse.pcfg.mcmc.SimplePcfgProductionCounter import dhg.ccg.parse.pcfg.typesup._ import dhg.ccg.tag.learn.CatgramCatPriorInitializer import dhg.ccg.tag.learn.TagdictInformedAtomCatDistInitializer import dhg.ccg.tag.learn.TagDictionaryEstimateTagPriorInitializer import dhg.ccg.tag.learn.EmTagDictionaryEstimate import dhg.ccg.parse.scg.exp.Em2TermProdDist import dhg.ccg.data.RuleViolatedRemovingTreeBankReader class PcfgParser( val rootDist: LogProbabilityDistribution[Cat], val prodDist: ConditionalLogProbabilityDistribution[Cat, Prod]) extends WeightedKBestGuideChartParser with TreeWeighter { private[this] val weighter = new SimplePcfgWeighter() def parseAndProbKBestWithWeightsFromGuideChart(guideChart: CfgGuideChart, us: Vector[Vector[Map[Cat, LogDouble]]], k: Int): Vector[(CcgTree, LogDouble)] = { val n = guideChart.length val table: Vector[Vector[Map[Cat, FastKMaxPriorityQueue[CcgTree]]]] = guideChart.matrix.map { row => if (row != null) row.map { col => if (col != null) col.mapVals { _ => FastKMaxPriorityQueue.empty[CcgTree](k) } else null } else null } for { (i, j, cell) <- guideChart.bottomUpNodes if cell.nonEmpty // visit all relevant cells u = us(i)(j) (ij, entries) <- cell // cell is sorted according to unary rule dependency relationships entry <- entries } entry match { case BinaryGuideChartEntry(k, prod @ BinaryProd(ik, kj)) => val prodP = prodDist(prod, ij) // // TODO: DEBUG START // if (!table(i)(k).contains(ik)) { // println(f" ij=$ij => ik=$ik kj=$kj") // println(f" guideChart(i,k) = ${guideChart(i, k).keySet.toVector.map(_.toString).sorted}") // println(f" table(i)(k) = ${table(i)(k).keySet.toVector.map(_.toString).sorted}") // } // if (!FA(ik, kj).contains(ij) && !BA(ik, kj).contains(ij)) { // println(f"Invalid rule! $ij -> $ik $kj") // } // // TODO: DEBUG END for { (ikTree, ikP) <- table(i)(k)(ik).iterator (kjTree, kjP) <- table(k)(j)(kj).iterator } { val p = prodP * ikP * kjP * u.getOrElse(ij, LogDouble.one) if (shouldAdd(p)) table(i)(j)(ij).add(CcgBinode(ij, ikTree, kjTree), p) //println(f"""v$i$j(${ij.toString.toLowerCase.replace(" ", "")}): p(${ij.toString.toLowerCase.replace(" ", "")} -> ${ik.toString.toLowerCase.replace(" ", "")} ${kj.toString.toLowerCase.replace(" ", "")}) = ${prodP.toDouble} * ${ikP.toDouble} * ${kjP.toDouble} = ${p.toDouble} : CcgBinode($ij, $ikTree, $kjTree)""") //printTable(table) } case UnaryGuideChartEntry(prod @ UnaryProd(subCat)) => val prodP = prodDist(prod, ij) for ((subTree, subP) <- table(i)(j)(subCat).iterator) { val p = prodP * subP * u.getOrElse(ij, LogDouble.one) if (shouldAdd(p)) table(i)(j)(ij).add(CcgUnode(ij, subTree), p) //println(f"""v$i$j(${ij.toString.toLowerCase.replace(" ", "")}): p(${ij.toString.toLowerCase.replace(" ", "")} -> ${subCat.toString.toLowerCase.replace(" ", "")}) = ${prodP.toDouble} * ${subP.toDouble} = ${p.toDouble} : CcgUnode($ij, $subTree)""") //printTable(table) } case TermGuideChartEntry(prod @ TermProd(word)) => val prodP = prodDist(prod, ij) val p = prodP * u.getOrElse(ij, LogDouble.one) if (shouldAdd(p)) table(i)(j)(ij).add(CcgLeaf(ij, word, "FAKEPOS"), p) //println(f"""v$i$j(${ij.toString.toLowerCase.replace(" ", "")}): p($word|${ij.toString.toLowerCase.replace(" ", "")}) = ${prodP.toDouble} = ${p.toDouble} : CcgLeaf($ij, "$word")""") //printTable(table) } (for { (ij, cell) <- table(0)(n).toVector if guideChart.rootSet.contains(ij) rootP = rootDist(ij) (tree, treeP) <- cell.iterator p = rootP * treeP //_ = println(f"""p(${ij.toString.toLowerCase.replace(" ", "")}) * v0$n(${ij.toString.toLowerCase.replace(" ", "")}) = ${rootP.toDouble} * ${treeP.toDouble} = ${p.toDouble} : $t""") if shouldAdd(p) } yield (tree, p)).maxByN(k)(_._2) } private[this] def shouldAdd(p: LogDouble) = { assert(!p.isNaN) //assert(p.nonZero) p.nonZero } def printTable(table: Vector[Vector[Map[Cat, FastKMaxPriorityQueue[CcgTree]]]]) { DrawMatrix.drawMatrix(table.map(_.tail))(_.map { case (cat, q) => val qvals = q.toVector.map { case (t, p) => f"(${p.toDouble}%.6f, $t)" } val left = f"$cat -> {" f"$left${qvals.mkString(",\\n" + " " * left.length)}}" }.mkString("\\n"))(println) } def weight(t: CcgTree): LogDouble = weighter.weight(t, rootDist, prodDist) } class SamplingPcfgParser( rootDist: LogProbabilityDistribution[Cat], prodDist: ConditionalLogProbabilityDistribution[Cat, Prod], treeSampler: PcfgTreeSampler, weighter: PcfgWeighter) extends AbstractKBestGuideChartParser { def parseAndProbKBestFromGuideChart(guideChart: CfgGuideChart, k: Int): Vector[(CcgTree, LogDouble)] = { val trees = treeSampler.samples(guideChart, rootDist, prodDist, k).distinct.mapTo(weighter.weight(_, rootDist, prodDist)) trees.sorted(Ordering.by[(CcgTree, LogDouble), LogDouble](_._2).reverse) } } class CompositePcfgParser( delegateA: KBestGuideChartParser, kA: Int, delegateB: KBestGuideChartParser) extends AbstractKBestGuideChartParser { def parseAndProbKBestFromGuideChart(guideChart: CfgGuideChart, k: Int): Vector[(CcgTree, LogDouble)] = { val kB = (k - kA) max 0 val aTrees = time1("CompositePcfgParser aTrees", delegateA.parseAndProbKBestFromGuideChart(guideChart, k min kA)) val bTrees = time1("CompositePcfgParser bTrees", (if (kB > 0) delegateB.parseAndProbKBestFromGuideChart(guideChart, kB) else Vector.empty)) println(f"CompositePcfgParser: num aTrees = ${aTrees.size}; num bTrees = ${bTrees.size}; (${(aTrees ++ bTrees).map(_._1).toSet.size} unique)") (aTrees ++ bTrees).distinctBy(_._1).sorted(Ordering.by[(CcgTree, LogDouble), LogDouble](_._2).reverse) } } // // object PcfgParser { def main(args: Array[String]): Unit = { val rules = CcgRules.nonComp val reader = new RuleViolatedRemovingTreeBankReader(new SimpleRuleViolationFinder(CcgRules.all, allowAllUnary = true), EnglishCcgTreeBankReader()) val trainingData = time("read raw", reader.rawDataDONTUSE.toVector) val rawData = trainingData.map(_.words) val tdData = time("read td", reader.tdData.toVector) val testingData = time("read test", reader.testData.toVector) val tagdict = time("make td", new SimpleTagDictionaryFactory().apply(tdData.map(_.tagged), "<S>", cat"<S>", "<E>", cat"<E>", Set.empty, Set.empty)) val catPriorInit = new CatgramCatPriorInitializer(new TagdictInformedAtomCatDistInitializer(1.0), pTerm = 0.8, pMod = 0.6, pFwd = 0.5) val catPrior = time("make catprior", catPriorInit.fromRaw(rawData, tagdict)) val priorRootDist = catPrior val priorBinyProd = new BinaryPriorDist(catPrior) val priorBinyDist = new UnconditionalWrappingConditionalLogProbabilityDistribution[Cat, BinaryProd](priorBinyProd) val priorUnryProd = new UnaryPriorDist(catPrior) val priorUnryDist = new UnconditionalWrappingConditionalLogProbabilityDistribution[Cat, UnaryProd](priorUnryProd) val priorTermDist = time("make term prior", Em2TermProdDist(new EmTagDictionaryEstimate(catPriorInit).fromRaw(rawData, tagdict))) val priorProdProd = new IPDU(priorBinyProd, priorUnryProd, new ConditionalWrappingLogProbabilityDistribution[Cat, TermProd](cat"fake !!", priorTermDist), LogDouble(1.0 / 3), LogDouble(1.0 / 3), LogDouble(1.0 / 3)) val priorProdDist = new ICPDU(priorBinyDist, priorUnryDist, priorTermDist, Map().withDefaultValue(LogDouble(1.0 / 3), LogDouble(1.0 / 3), LogDouble(1.0 / 3))) val prodFinder = new SimplePcfgProductionCounter() val rootCounts = time("make root counts", trainingData.map(prodFinder.rootCounts).reduce(_ |+| _).mapVals(LogDouble(_))) val prodCounts = time("make prod counts", trainingData.map(prodFinder.prodCounts).reduce(_ |+| _).mapVals(_.mapVals(LogDouble(_)))) val rootDist = time("make root dist", new AlphaBetaLogProbabilityDistribution(rootCounts, LogDouble(1.0), priorRootDist)) val prodDist = time("make prod dist", new AlphaBetaConditionalLogProbabilityDistribution(prodCounts.mapVals { counts => new AlphaBetaLogProbabilityDistribution(counts, LogDouble(1.0), priorProdProd) }, LogDouble(1.0), priorProdDist)) val parser = new PcfgParser(rootDist, prodDist) val guideChartBuilder = new SimpleCfgGuideChartBuilder(rules, allowTerminalDeletion = false) val testingGCs = time("make test guidecharts", testingData.iterator.map(t => (guideChartBuilder.build(t.words, None, tagdict), t))) val evaluator = new DepParserEvaluator(None, true) time("evaluate", evaluator.evaluate(parser, testingGCs, tagdict)) } }
dhgarrette/2015-ccg-parsing
src/main/scala/dhg/ccg/parse/pcfg/PcfgParser.scala
Scala
apache-2.0
9,772
package org.dsa.core import java.text.SimpleDateFormat import java.util.Date import org.apache.log4j.{Level, Logger} import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext} import org.dsa.mediator.ssw.SSWScala import org.dsa.rdd.{AlignmentRecord, AlignmentRecordTopK} import org.dsa.utils.NameOrdering /** * Created by xubo on 2016/11/28. */ class DSW extends SequenceAlignment { /** * 对当条query序列和refRDD进行序列比对,并返回结果 * * @param query 查询序列 * @param refRDD ref database * @param scoreMatrix 评分矩阵,如蛋白质的blosum50 * @param topK topK * @param sc SparkContext * @return 比对后的结果 */ override def align(query: (String, String), refRDD: RDD[(String, String)], scoreMatrix: Array[Array[Int]], topK: Int, sc: SparkContext): AlignmentRecordTopK = { //1 compute var alignmentRecordTopK = new AlignmentRecordTopK() val mapRDD = refRDD.map { ref => // implicit val ord = implicitly[NameOrdering] val alignment = SSWScala.align(query._2, ref._2) // (alignment.score1, alignment) alignment.refName = ref._1 alignment } val alignmendRecordArray = mapRDD.top(topK)(NameOrdering) //2 transform alignmentRecordTopK.setTopK(topK) alignmentRecordTopK.setQueryName(query._1) alignmentRecordTopK.setAlignmentRcoreds(alignmendRecordArray) //3 return alignmentRecordTopK } // def compare(a: AlignmentRecord, b: AlignmentRecord): Int = // implicitly[Ordering[AlignmentRecord]].compare(a.score1, b.score1) /** * 对scoreMartix进行预处理 * * @param scoreMatrixFile scoreMartix文件 * @param sc SparkContext * @return 返回预处理结果 */ override def preprocessScoreMatrix(scoreMatrixFile: String, sc: SparkContext): Array[Array[Int]] = null // = { // var test = Array.ofDim[Int](1, 1) // test(0)(0) = 1 // test // } } object DSW { var startTime = System.currentTimeMillis() var stopTime = System.currentTimeMillis() var outStr = new String var appName = new String var outFile = new StringBuilder def main(args: Array[String]) { val subMatrix = args(0) val queryFile = args(1) val dbFile = args(2) val splitNum = args(3).toInt val taskNum = args(4).toInt val topK = args(5).toInt val dsw = new DSW() // // val result = dsw.run() // } // // def run(subMatrix: String, queryFile: String, dbFile: String, splitNum: Int, taskNum: Int, topK: Int) { val queryArr = queryFile.toString.split("/") val dbArr = dbFile.toString.split("/") val iString = new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date()) val alg = dsw.className outStr = "/xubo/project/SparkSW/output/time/" + iString + alg + "_" + "queryFile_" + queryArr(queryArr.length - 1) + "_dbFile_" + dbArr(dbArr.length - 1) + "_splitNum_" + splitNum.toString + "_taskNum_" + taskNum.toString + "_topK_" + topK.toString if (args.length > 6) { outStr = args(6) + iString } outFile.append("initTime1\\t") .append("alg" + "\\t") .append("data" + "\\t") .append("query" + "\\t") .append("splitNum" + "\\t") .append("taskNum" + "\\t") .append("org/dsw/topK" + "\\t") .append("totalTime\\t") .append("fileName\\t") .append("\\n") outFile.append(iString + "\\t") .append(alg + "\\t") .append(dbArr(dbArr.length - 1) + "\\t") .append(queryArr(queryArr.length - 1) + "\\t") .append(splitNum.toString + "\\t") .append(taskNum.toString + "\\t") .append(topK.toString + "\\t") appName = alg + " Application:" + "queryFile=" + queryFile.toString + ",dbFile=" + dbFile.toString + ",splitNum=" + splitNum.toString + ",taskNum=" + taskNum.toString + ",topK=" + topK.toString startTime = System.currentTimeMillis() // set application name val conf = new SparkConf().setAppName(appName) val rootLogger = Logger.getRootLogger() rootLogger.setLevel(Level.OFF) // initialize SparkContext val spark = new SparkContext(conf) val result = dsw.run(subMatrix: String, queryFile: String, dbFile: String, splitNum: Int, taskNum: Int, topK: Int, spark) spark.stop() stopTime = System.currentTimeMillis() outFile.append((stopTime - startTime) / 1000.0 + "\\t").append(outStr + "\\t") result.foreach { each => println("topK:" + each.getTopK() + " Query:" + each.getQueryName()) each.getAlignmentRcoreds().foreach { alignmentRecord => println(alignmentRecord) } println() } saveResult(outFile.toString()) outFile.clear() } def saveResult(str: String): Unit = { val conf = new SparkConf().setAppName("DSW" + " Application:saveResult,out:" + outStr) // initialize SparkContext val sc = new SparkContext(conf) val rddSave = sc.parallelize(Array(str.toString()), 1) rddSave.saveAsTextFile(outStr) sc.stop } }
xubo245/CloudSW
src/main/scala/org/dsa/core/DSW.scala
Scala
gpl-2.0
5,108
package chat.tox.antox.callbacks import java.util import android.content.Context import android.util.Log import chat.tox.antox.data.State import chat.tox.antox.wrapper.CallNumber import im.tox.tox4j.av.enums.ToxavFriendCallState import scala.collection.JavaConversions._ class AntoxOnCallStateCallback(private var ctx: Context) { def callState(callNumber: CallNumber, collectionState: util.Collection[ToxavFriendCallState])(state: Unit): Unit = { Log.d("OnAvCallbackCallback", "Received a callback from: " + callNumber + " state is " + collectionState) val call = State.callManager.get(callNumber) val state = collectionState.toSet call.foreach(_.updateFriendState(state)) } }
wiiam/Antox
app/src/main/scala/chat/tox/antox/callbacks/AntoxOnCallStateCallback.scala
Scala
gpl-3.0
704
/* * Range.scala * Methods for computing the range of elements associated with problem components. * * Created By: Avi Pfeffer (apfeffer@cra.com) * Creation Date: March 1, 2015 * * Copyright 2015 Avrom J. Pfeffer and Charles River Analytics, Inc. * See http://www.cra.com or email figaro@cra.com for information. * * See http://www.github.com/p2t2/figaro for a copy of the software license. */ package com.cra.figaro.algorithm.structured import com.cra.figaro.algorithm.lazyfactored.ValueSet import com.cra.figaro.algorithm.lazyfactored.ValueSet._ import com.cra.figaro.language._ import com.cra.figaro.library.compound.FastIf import com.cra.figaro.util.{ MultiSet, homogeneousCartesianProduct } import com.cra.figaro.util.HashMultiSet import com.cra.figaro.algorithm.factored.ParticleGenerator import com.cra.figaro.library.collection.FixedSizeArray import com.cra.figaro.library.atomic.discrete.{ AtomicBinomial, ParameterizedBinomialFixedNumTrials } import com.cra.figaro.library.compound.FoldLeft import com.cra.figaro.library.compound.IntSelector object Range { private def getRange[U](collection: ComponentCollection, otherElement: Element[U]): ValueSet[U] = { if (collection.contains(otherElement)) collection(otherElement).range else withStar(Set()) } // Get the range of the reference by taking the union of the current ranges of all the current possible targets. // Do not add any elements or expand any other ranges in the process in the process. private[algorithm] def getRangeOfSingleValuedReference[V](cc: ComponentCollection, ec: ElementCollection, ref: Reference[V]): ValueSet[V] = { val (firstElem, restRefOpt) = ec.getFirst(ref) restRefOpt match { case None => getRange(cc, firstElem.asInstanceOf[Element[V]]) case Some(restRef) => try { val firstRange = getRange(cc, firstElem.asInstanceOf[Element[ElementCollection]]) val vss: scala.collection.immutable.Set[ValueSet[V]] = for { firstEC <- firstRange.regularValues } yield { getRangeOfSingleValuedReference(cc, firstEC, restRef) } val starter: ValueSet[V] = if (firstRange.hasStar) withStar(Set()) else withoutStar(Set()) vss.foldLeft(starter)(_ ++ _) } catch { case _: ClassCastException => println("Warning: head of indirect reference does not refer to an element collection; setting range to empty with *") withStar(Set()) } } } private[algorithm] def getRangeOfMultiValuedReference[V](cc: ComponentCollection, ec: ElementCollection, ref: Reference[V]): ValueSet[MultiSet[V]] = { // This function gets all the lists of elements that could be joint targets of the given reference, starting in the given element collection. // The first return value of this function is a set of all the possible lists of targets referred to by this reference. // The second return value is a flag indicating whether any internal element in the path has *. def getTargetSets(currentEC: ElementCollection, currentRef: Reference[V]): (Set[List[Element[V]]], Boolean) = { val (firstElem, restRefOpt) = currentEC.getFirst(currentRef) restRefOpt match { case None => (Set(List(firstElem.asInstanceOf[Element[V]])), false) case Some(restRef) => try { var hasStar = false val firstRange = getRange(cc, firstElem) val targetSetSet: Set[Set[List[Element[V]]]] = for { value <- firstRange.regularValues } yield { val (targetSets, hs) = getTargetSetsHelper(value, restRef) if (hs) hasStar = true targetSets } (targetSetSet.flatten, hasStar || firstRange.hasStar) } catch { case _: IllegalArgumentException => println("Warning: head of indirect reference does not refer to an element collection; setting range to empty with *") (Set(), true) } } } // This function gets all the lists of elements that could be joint targets of a set of element collections. // The first argument is a value that can be used to create a set of element collections. // The second argument is the remaining reference that starts from each of these element collections. // For each element collection in the set, we get the possible lists of targets resulting from that element collection. // Then we get all possible lists that contain a list for each element collection. // Each one of these lists, when flattened, contains a list of elements that is one possible joint target, // so it is returned in the result. // This function also keeps track of whether any element involved has * in its range. def getTargetSetsHelper[T](ecMaker: T, restRef: Reference[V]): (Set[List[Element[V]]], Boolean) = { val ecs: List[ElementCollection] = ElementCollection.makeElementCollectionSet(ecMaker).toList var hasStar = false val subTargetSets: List[List[List[Element[V]]]] = for { ec <- ecs.toList } yield { val (subTargetSet, hs) = getTargetSets(ec, restRef) if (hs) hasStar = true subTargetSet.toList } val combinations: List[List[List[Element[V]]]] = homogeneousCartesianProduct(subTargetSets: _*) val targetSets = combinations.map(_.flatten).toSet (targetSets, hasStar) } def getMultiSetPossibilities(targetSet: List[Element[V]]): ValueSet[MultiSet[V]] = { val ranges: List[ValueSet[V]] = targetSet.map(getRange(cc, _)) val regularRanges: List[List[V]] = ranges.map(_.regularValues.toList) val possibilities: List[List[V]] = homogeneousCartesianProduct(regularRanges: _*) val multiSets: List[MultiSet[V]] = for { possibility <- possibilities } yield { val multiSet = new HashMultiSet[V] possibility.foreach(multiSet.addOne(_)) multiSet } if (ranges.exists(_.hasStar)) withStar(multiSets.toSet) else withoutStar(multiSets.toSet) } // First step is to get the current possible target sets. // Then, for each target set, we get all the possible multisets of their values. // Then, we take the union of these multisets. // Does not add any elements or expand any other ranges. val (targetSets, hasStar) = getTargetSets(ec, ref) val multiSetPossibilities = targetSets.map(getMultiSetPossibilities(_)) val starter: ValueSet[MultiSet[V]] = if (hasStar) withStar(Set()) else withoutStar(Set()) multiSetPossibilities.foldLeft(starter)(_ ++ _) } def getRangeOfFold[T, U](cc: ComponentCollection, fold: FoldLeft[T, U]): ValueSet[U] = { def helper(currentAccum: ValueSet[U], remainingElements: Seq[Element[T]]): ValueSet[U] = { if (remainingElements.isEmpty) currentAccum else { val firstVS = getRange(cc, remainingElements.head) val nextRegular = for { currentAccumVal <- currentAccum.regularValues firstVal <- firstVS.regularValues } yield fold.function(currentAccumVal, firstVal) val nextHasStar = currentAccum.hasStar || firstVS.hasStar val nextAccum = if (nextHasStar) ValueSet.withStar(nextRegular) else ValueSet.withoutStar(nextRegular) helper(nextAccum, remainingElements.tail) } } helper(ValueSet.withoutStar(Set(fold.start)), fold.elements) } def apply[V](component: ProblemComponent[V], numValues: Int): ValueSet[V] = { component match { case cc: ChainComponent[_, V] => chainRange(cc) case mc: MakeArrayComponent[V] => makeArrayRange(mc) case ac: ApplyComponent[V] => applyRange(ac) case _ => otherRange(component, numValues) } } private def chainRange[P, V](component: ChainComponent[P, V]): ValueSet[V] = { val collection = component.problem.collection component.chain match { // Parameterized binomial needs to be handled specially, because creating the factors for a parameterized element, // when the parameterized flag is true, creates a simple factor over the element. case b: ParameterizedBinomialFixedNumTrials => val values = (0 to b.numTrials).toSet if (getRange(collection, b.parameter).hasStar) withStar(values) else withoutStar(values) case _ => val parentVs = getRange(collection, component.chain.parent) val resultVs = for { parentV <- parentVs.regularValues subproblem <- component.subproblems.get(parentV) } yield getRange(collection, subproblem.target) val fullyExpanded = parentVs.regularValues.forall(component.subproblems.contains(_)) val starter: ValueSet[V] = if (parentVs.hasStar || !fullyExpanded) withStar(Set()) else withoutStar(Set()) resultVs.foldLeft(starter)(_ ++ _) } } private def makeArrayRange[V](component: MakeArrayComponent[V]): ValueSet[FixedSizeArray[V]] = { val collection = component.problem.collection val numItemsRange = getRange(collection, component.makeArray.numItems) val numItemsMax = numItemsRange.regularValues.foldLeft(0)(_ max _) if (numItemsMax <= component.maxExpanded) { val resultVs = numItemsRange.regularValues.map(component.makeArray.arrays(_)) if (numItemsRange.hasStar) withStar(resultVs) else withoutStar(resultVs) } else { val resultVs = numItemsRange.regularValues.filter(_ <= component.maxExpanded).map(component.makeArray.arrays(_)) withStar(resultVs) } } private def applyRange[V](component: ApplyComponent[V]): ValueSet[V] = { val collection = component.problem.collection val applyMap = component.getMap() component.element match { case i: FastIf[_] => if (getRange(collection, i.test).hasStar) withStar(Set(i.thn, i.els)) else withoutStar(Set(i.thn, i.els)) case a: Apply1[_, V] => val vs1 = getRange(collection, a.arg1) val resultsSet = for { arg1Val <- vs1.regularValues } yield { applyMap.getOrElseUpdate(arg1Val, a.fn(arg1Val)) } if (vs1.hasStar) withStar(resultsSet); else withoutStar(resultsSet) case a: Apply2[_, _, _] => val vs1 = getRange(collection, a.arg1) val vs2 = getRange(collection, a.arg2) val resultSet = for { v1 <- vs1.regularValues v2 <- vs2.regularValues } yield { applyMap.getOrElseUpdate((v1, v2), a.fn(v1, v2)) } if (vs1.hasStar || vs2.hasStar) withStar(resultSet) else withoutStar(resultSet) case a: Apply3[_, _, _, _] => val vs1 = getRange(collection, a.arg1) val vs2 = getRange(collection, a.arg2) val vs3 = getRange(collection, a.arg3) val resultSet = for { v1 <- vs1.regularValues v2 <- vs2.regularValues v3 <- vs3.regularValues } yield { applyMap.getOrElseUpdate((v1, v2, v3), a.fn(v1, v2, v3)) } if (vs1.hasStar || vs2.hasStar || vs3.hasStar) withStar(resultSet) else withoutStar(resultSet) case a: Apply4[_, _, _, _, _] => val vs1 = getRange(collection, a.arg1) val vs2 = getRange(collection, a.arg2) val vs3 = getRange(collection, a.arg3) val vs4 = getRange(collection, a.arg4) val resultSet = for { v1 <- vs1.regularValues v2 <- vs2.regularValues v3 <- vs3.regularValues v4 <- vs4.regularValues } yield { applyMap.getOrElseUpdate((v1, v2, v3, v4), a.fn(v1, v2, v3, v4)) } if (vs1.hasStar || vs2.hasStar || vs3.hasStar || vs4.hasStar) withStar(resultSet) else withoutStar(resultSet) case a: Apply5[_, _, _, _, _, _] => val vs1 = getRange(collection, a.arg1) val vs2 = getRange(collection, a.arg2) val vs3 = getRange(collection, a.arg3) val vs4 = getRange(collection, a.arg4) val vs5 = getRange(collection, a.arg5) val resultSet = for { v1 <- vs1.regularValues v2 <- vs2.regularValues v3 <- vs3.regularValues v4 <- vs4.regularValues v5 <- vs5.regularValues } yield { applyMap.getOrElseUpdate((v1, v2, v3, v4, v5), a.fn(v1, v2, v3, v4, v5)) } if (vs1.hasStar || vs2.hasStar || vs3.hasStar || vs4.hasStar || vs5.hasStar) withStar(resultSet) else withoutStar(resultSet) } } private def otherRange[V](component: ProblemComponent[V], numValues: Int): ValueSet[V] = { val collection = component.problem.collection component.element match { case c: Constant[_] => withoutStar(Set(c.constant)) case f: AtomicFlip => withoutStar(Set(true, false)) case f: ParameterizedFlip => if (getRange(collection, f.parameter).hasStar) withStar(Set(true, false)) else withoutStar(Set(true, false)) case f: CompoundFlip => if (getRange(collection, f.prob).hasStar) withStar(Set(true, false)) else withoutStar(Set(true, false)) case s: AtomicSelect[_] => withoutStar(Set(s.outcomes: _*)) case s: ParameterizedSelect[_] => val values = Set(s.outcomes: _*) if (getRange(collection, s.parameter).hasStar) withStar(values) else withoutStar(values) case s: CompoundSelect[_] => val values = Set(s.outcomes: _*) if (s.probs.map(getRange(collection, _)).exists(_.hasStar)) withStar(values) else withoutStar(values) case b: AtomicBinomial => ValueSet.withoutStar((0 to b.numTrials).toSet) case d: AtomicDist[_] => val componentSets = d.outcomes.map(getRange(collection, _)) componentSets.reduce(_ ++ _) case d: CompoundDist[_] => val componentSets = d.outcomes.map(getRange(collection, _)) val values = componentSets.reduce(_ ++ _) if (d.probs.map(getRange(collection, _)).exists(_.hasStar)) values ++ withStar(Set()) else values //case i: FastIf[_] => // if (getRange(collection, i.test).hasStar) withStar(Set(i.thn, i.els)) else withoutStar(Set(i.thn, i.els)) case c: Chain[_, _] => throw new RuntimeException("This shouldn't be called") // The other version of apply should always be called for a chain case i: Inject[_] => val argVSs = i.args.map(getRange(collection, _)) // val elementVSs = i.args.map(arg => LazyValues(arg.universe).storedValues(arg)) val incomplete = argVSs.exists(_.hasStar) val elementValues = argVSs.toList.map(_.regularValues.toList) val resultValues = homogeneousCartesianProduct(elementValues: _*).toSet.asInstanceOf[Set[i.Value]] if (incomplete) withStar(resultValues); else withoutStar(resultValues) case a: Atomic[_] => { if (!ParticleGenerator.exists(a.universe)) { println("Warning: Sampling element " + a + " even though no sampler defined for this universe") } val thisSampler = ParticleGenerator(a.universe) val samples = thisSampler(a, numValues) withoutStar(samples.unzip._2.toSet) } case r: SingleValuedReferenceElement[_] => getRangeOfSingleValuedReference(collection, r.collection, r.reference) case r: MultiValuedReferenceElement[_] => getRangeOfMultiValuedReference(collection, r.collection, r.reference) case a: Aggregate[_, _] => val inputs = getRange(collection, a.mvre) val resultValues = inputs.regularValues.map(a.aggregate(_)) if (inputs.hasStar) withStar(resultValues); else withoutStar(resultValues) case f: FoldLeft[_, _] => getRangeOfFold(collection, f) case i: IntSelector => val counterValues = getRange(collection, i.counter) if (counterValues.regularValues.nonEmpty) { val maxCounter = counterValues.regularValues.max // val all = List.tabulate(maxCounter)(i => i).toSet val all = Set((0 until maxCounter): _*) if (counterValues.hasStar) ValueSet.withStar(all); else ValueSet.withoutStar(all) } else { ValueSet.withStar(Set()) } case _ => /* A new improvement - if we can't compute the values, we just make them *, so the rest of the computation can proceed */ withStar(Set()) } } }
scottcb/figaro
Figaro/src/main/scala/com/cra/figaro/algorithm/structured/Range.scala
Scala
bsd-3-clause
16,500
/* * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.spark.compiler package operator package core import org.junit.runner.RunWith import org.scalatest.FlatSpec import org.scalatest.junit.JUnitRunner import java.io.{ DataInput, DataOutput } import scala.collection.JavaConversions._ import org.apache.hadoop.io.Writable import org.apache.spark.broadcast.Broadcast import com.asakusafw.lang.compiler.model.description.ClassDescription import com.asakusafw.lang.compiler.model.graph.CoreOperator import com.asakusafw.lang.compiler.model.graph.CoreOperator.CoreOperatorKind import com.asakusafw.runtime.model.DataModel import com.asakusafw.runtime.value.{ DoubleOption, IntOption, LongOption } import com.asakusafw.spark.compiler.spi.{ OperatorCompiler, OperatorType } import com.asakusafw.spark.runtime.fragment.{ Fragment, GenericOutputFragment } import com.asakusafw.spark.runtime.graph.BroadcastId import com.asakusafw.spark.tools.asm._ @RunWith(classOf[JUnitRunner]) class ProjectionOperatorsCompilerSpecTest extends ProjectionOperatorsCompilerSpec class ProjectionOperatorsCompilerSpec extends FlatSpec with UsingCompilerContext { import ProjectionOperatorsCompilerSpec._ behavior of classOf[ProjectionOperatorsCompiler].getSimpleName it should "compile Project operator" in { import Project._ val operator = CoreOperator.builder(CoreOperatorKind.PROJECT) .input("input", ClassDescription.of(classOf[Input])) .output("output", ClassDescription.of(classOf[Output])) .build() implicit val context = newOperatorCompilerContext("flowId") val thisType = OperatorCompiler.compile(operator, OperatorType.ExtractType) val cls = context.loadClass[Fragment[Input]](thisType.getClassName) val out = new GenericOutputFragment[Output]() val fragment = cls .getConstructor(classOf[Map[BroadcastId, Broadcast[_]]], classOf[Fragment[_]]) .newInstance(Map.empty, out) val input = new Project.Input() for (i <- 0 until 10) { input.i.modify(i) input.l.modify(i) fragment.add(input) } out.iterator.zipWithIndex.foreach { case (output, i) => assert(output.i.get === i) } fragment.reset() } it should "compile Extend operator" in { import Extend._ val operator = CoreOperator.builder(CoreOperatorKind.EXTEND) .input("input", ClassDescription.of(classOf[Input])) .output("output", ClassDescription.of(classOf[Output])) .build() implicit val context = newOperatorCompilerContext("flowId") val thisType = OperatorCompiler.compile(operator, OperatorType.ExtractType) val cls = context.loadClass[Fragment[Input]](thisType.getClassName) val out = new GenericOutputFragment[Output]() val fragment = cls .getConstructor(classOf[Map[BroadcastId, Broadcast[_]]], classOf[Fragment[_]]) .newInstance(Map.empty, out) val input = new Input() for (i <- 0 until 10) { input.i.modify(i) fragment.add(input) } out.iterator.zipWithIndex.foreach { case (output, i) => assert(output.i.get === i) assert(output.l.isNull) } fragment.reset() } it should "compile Restructure operator" in { import Restructure._ val operator = CoreOperator.builder(CoreOperatorKind.RESTRUCTURE) .input("input", ClassDescription.of(classOf[Input])) .output("output", ClassDescription.of(classOf[Output])) .build() implicit val context = newOperatorCompilerContext("flowId") val thisType = OperatorCompiler.compile(operator, OperatorType.ExtractType) val cls = context.loadClass[Fragment[Input]](thisType.getClassName) val out = new GenericOutputFragment[Output]() val fragment = cls .getConstructor(classOf[Map[BroadcastId, Broadcast[_]]], classOf[Fragment[_]]) .newInstance(Map.empty, out) fragment.reset() val input = new Input() for (i <- 0 until 10) { input.i.modify(i) fragment.add(input) } out.iterator.zipWithIndex.foreach { case (output, i) => assert(output.i.get === i) assert(output.d.isNull) } fragment.reset() } } object ProjectionOperatorsCompilerSpec { object Project { class Input extends DataModel[Input] with Writable { val i: IntOption = new IntOption() val l: LongOption = new LongOption() override def reset: Unit = { i.setNull() l.setNull() } override def copyFrom(other: Input): Unit = { i.copyFrom(other.i) l.copyFrom(other.l) } override def readFields(in: DataInput): Unit = { i.readFields(in) l.readFields(in) } override def write(out: DataOutput): Unit = { i.write(out) l.write(out) } def getIOption: IntOption = i def getLOption: LongOption = l } class Output extends DataModel[Output] with Writable { val i: IntOption = new IntOption() override def reset: Unit = { i.setNull() } override def copyFrom(other: Output): Unit = { i.copyFrom(other.i) } override def readFields(in: DataInput): Unit = { i.readFields(in) } override def write(out: DataOutput): Unit = { i.write(out) } def getIOption: IntOption = i } } object Extend { class Input extends DataModel[Input] with Writable { val i: IntOption = new IntOption() override def reset: Unit = { i.setNull() } override def copyFrom(other: Input): Unit = { i.copyFrom(other.i) } override def readFields(in: DataInput): Unit = { i.readFields(in) } override def write(out: DataOutput): Unit = { i.write(out) } def getIOption: IntOption = i } class Output extends DataModel[Output] with Writable { val i: IntOption = new IntOption() val l: LongOption = new LongOption() override def reset: Unit = { i.setNull() l.setNull() } override def copyFrom(other: Output): Unit = { i.copyFrom(other.i) l.copyFrom(other.l) } override def readFields(in: DataInput): Unit = { i.readFields(in) l.readFields(in) } override def write(out: DataOutput): Unit = { i.write(out) l.write(out) } def getIOption: IntOption = i def getLOption: LongOption = l } } object Restructure { class Input extends DataModel[Input] with Writable { val i: IntOption = new IntOption() val l: LongOption = new LongOption() override def reset: Unit = { i.setNull() l.setNull() } override def copyFrom(other: Input): Unit = { i.copyFrom(other.i) l.copyFrom(other.l) } override def readFields(in: DataInput): Unit = { i.readFields(in) l.readFields(in) } override def write(out: DataOutput): Unit = { i.write(out) l.write(out) } def getIOption: IntOption = i def getLOption: LongOption = l } class Output extends DataModel[Output] with Writable { val i: IntOption = new IntOption() val d: DoubleOption = new DoubleOption() override def reset: Unit = { i.setNull() d.setNull() } override def copyFrom(other: Output): Unit = { i.copyFrom(other.i) d.copyFrom(other.d) } override def readFields(in: DataInput): Unit = { i.readFields(in) d.readFields(in) } override def write(out: DataOutput): Unit = { i.write(out) d.write(out) } def getIOption: IntOption = i def getDOption: DoubleOption = d } } }
asakusafw/asakusafw-spark
compiler/src/test/scala/com/asakusafw/spark/compiler/operator/core/ProjectionOperatorsCompilerSpec.scala
Scala
apache-2.0
8,355
package controllers.chat import akka.actor.ActorSystem import akka.stream.Materializer import javax.inject._ import play.api._ import play.api.mvc._ import play.api.libs.streams._ import actors.ChatUserActor import actors.ChatBotAdminActor @Singleton class ChatController @Inject() ( implicit val system: ActorSystem, materializer: Materializer ) extends Controller { import play.api.libs.concurrent.Execution.Implicits.defaultContext ChatBotAdminActor(system) def index_socket = Action { request => Ok(views.html.chat.chat_index()(Flash(Map()))) } def ws = WebSocket.accept[String, String] { request => ActorFlow.actorRef(out => ChatUserActor.props(system)(out)) } }
tnddn/iv-web
portal/rest-portal/app/controllers/chat/ChatController.scala
Scala
apache-2.0
709
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package iht.views.application.tnrb import iht.forms.TnrbForms._ import iht.testhelpers.{CommonBuilder, TestHelper} import iht.utils._ import iht.utils.tnrb.TnrbHelper import iht.views.ViewTestHelper import iht.views.html.application.tnrb.deceased_widow_check_date class DeceasedWidowCheckDateViewTest extends ViewTestHelper with TnrbHelper { val ihtReference = Some("ABC1A1A1A") val deceasedDetails = CommonBuilder.buildDeceasedDetails val regDetails = CommonBuilder.buildRegistrationDetails.copy(ihtReference = ihtReference, deceasedDetails = Some(deceasedDetails.copy(maritalStatus = Some(TestHelper.MaritalStatusMarried))), deceasedDateOfDeath = Some(CommonBuilder.buildDeceasedDateOfDeath)) lazy val deceasedWidowCheckDateView: deceased_widow_check_date = app.injector.instanceOf[deceased_widow_check_date] val tnrbModel = CommonBuilder.buildTnrbEligibility val widowCheckModel = CommonBuilder.buildWidowedCheck lazy val pageTitle = messagesApi("page.iht.application.tnrbEligibilty.overview.partner.dod.question", spouseOrCivilPartnerLabelGenitive(tnrbModel, widowCheckModel, messagesApi("page.iht.application.tnrbEligibilty.partner.additional.label.the.deceased", DeceasedInfoHelper.getDeceasedNameOrDefaultString(regDetails)))) lazy val browserTitle = messagesApi("iht.estateReport.tnrb.increasingIHTThreshold") lazy val guidanceParagraphs = Set(messagesApi("iht.dateExample2")) lazy val returnLinkId = "cancel-button" lazy val returnLinkText = messagesApi("page.iht.application.tnrb.returnToIncreasingThreshold") lazy val returnLinkTargetUrl = iht.controllers.application.tnrb.routes.TnrbOverviewController.onPageLoad() "DeceasedWidowCheckDateView " must { "have no message keys in html" in { implicit val request = createFakeRequest() val view = deceasedWidowCheckDateView(deceasedWidowCheckQuestionForm, widowCheckModel, tnrbModel, regDetails, returnLinkTargetUrl, returnLinkText).toString noMessageKeysShouldBePresent(view) } "have the correct title" in { implicit val request = createFakeRequest() val view = deceasedWidowCheckDateView(deceasedWidowCheckQuestionForm, widowCheckModel, tnrbModel, regDetails, returnLinkTargetUrl, returnLinkText).toString titleShouldBeCorrect(view, pageTitle) } "have the correct browser title" in { implicit val request = createFakeRequest() val view = deceasedWidowCheckDateView(deceasedWidowCheckQuestionForm, widowCheckModel, tnrbModel, regDetails, returnLinkTargetUrl, returnLinkText).toString browserTitleShouldBeCorrect(view, browserTitle) } "show the correct guidance paragraphs" in { implicit val request = createFakeRequest() val view = deceasedWidowCheckDateView(deceasedWidowCheckQuestionForm, widowCheckModel, tnrbModel, regDetails, returnLinkTargetUrl, returnLinkText).toString for (paragraph <- guidanceParagraphs) messagesShouldBePresent(view, paragraph) } "show the Save and continue button" in { implicit val request = createFakeRequest() val view = deceasedWidowCheckDateView(deceasedWidowCheckQuestionForm, widowCheckModel, tnrbModel, regDetails, returnLinkTargetUrl, returnLinkText).toString val saveAndContinueButton = asDocument(view).getElementById("save-continue") saveAndContinueButton.text() mustBe messagesApi("iht.saveAndContinue") } "show the correct return link with text" in { implicit val request = createFakeRequest() val view = deceasedWidowCheckDateView(deceasedWidowCheckQuestionForm, widowCheckModel, tnrbModel, regDetails, returnLinkTargetUrl, returnLinkText).toString val returnLink = asDocument(view).getElementById(returnLinkId) returnLink.attr("href") mustBe returnLinkTargetUrl.url returnLink.text() mustBe returnLinkText } } }
hmrc/iht-frontend
test/iht/views/application/tnrb/DeceasedWidowCheckDateViewTest.scala
Scala
apache-2.0
4,666
package com.github.mdr.mash.ns.os.pathClass import java.nio.file.attribute.PosixFilePermission import com.github.mdr.mash.utils.Utils._ case class Parties(owner: Boolean, group: Boolean, others: Boolean) { def readPermissionSet = Set( owner.option(PosixFilePermission.OWNER_READ), group.option(PosixFilePermission.GROUP_READ), others.option(PosixFilePermission.OTHERS_READ)).flatten def writePermissionSet = Set( owner.option(PosixFilePermission.OWNER_WRITE), group.option(PosixFilePermission.GROUP_WRITE), others.option(PosixFilePermission.OTHERS_WRITE)).flatten def executePermissionSet = Set( owner.option(PosixFilePermission.OWNER_EXECUTE), group.option(PosixFilePermission.GROUP_EXECUTE), others.option(PosixFilePermission.OTHERS_EXECUTE)).flatten }
mdr/mash
src/main/scala/com/github/mdr/mash/ns/os/pathClass/Parties.scala
Scala
mit
804
package dotty.tools.dotc.ast import dotty.tools.DottyTest import dotty.tools.dotc.ast.Trees._ import dotty.tools.dotc.util.Property import org.junit.Test import org.junit.Assert.{assertEquals, assertTrue, fail} class AttachmentsTests extends DottyTest { private val TestKey = new Property.Key[String] private val StickyTestKey = new Property.StickyKey[String] private val StickyTestKey2 = new Property.StickyKey[String] @Test def attachmentsAreNotCopiedOver: Unit = { checkCompile("typer", "class A") { case (PackageDef(_, (clazz: tpd.TypeDef) :: Nil), context) => assertTrue("Attachment shouldn't be present", clazz.getAttachment(TestKey).isEmpty) val msg = "hello" clazz.putAttachment(TestKey, msg) assertEquals(Some(msg), clazz.getAttachment(TestKey)) val copy = tpd.cpy.TypeDef(clazz)(rhs = tpd.EmptyTree) assertTrue("A copy should have been returned", clazz ne copy) assertTrue("Attachment shouldn't be present", copy.getAttachment(TestKey).isEmpty) case _ => fail } } @Test def stickyAttachmentsAreCopiedOver: Unit = { checkCompile("typer", "class A") { case (PackageDef(_, (clazz: tpd.TypeDef) :: Nil), context) => assertTrue("Attachment shouldn't be present", clazz.getAttachment(StickyTestKey).isEmpty) assertTrue("Attachment shouldn't be present", clazz.getAttachment(StickyTestKey2).isEmpty) assertTrue("Attachment shouldn't be present", clazz.getAttachment(TestKey).isEmpty) val msg = "hello" clazz.putAttachment(StickyTestKey, msg) clazz.putAttachment(TestKey, msg) clazz.putAttachment(StickyTestKey2, msg) assertEquals(Some(msg), clazz.getAttachment(StickyTestKey)) assertEquals(Some(msg), clazz.getAttachment(TestKey)) assertEquals(Some(msg), clazz.getAttachment(StickyTestKey)) val copy = tpd.cpy.TypeDef(clazz)(rhs = tpd.EmptyTree) assertTrue("A copy should have been returned", clazz ne copy) assertTrue("Attachment should be present", copy.hasAttachment(StickyTestKey)) assertTrue("Attachment shouldn't be present", !copy.hasAttachment(TestKey)) assertTrue("Attachment should be present", copy.hasAttachment(StickyTestKey2)) case _ => fail } } }
dotty-staging/dotty
compiler/test/dotty/tools/dotc/ast/AttachmentsTest.scala
Scala
apache-2.0
2,328
package com.argcv package object dvergar { }
yuikns/hsgl
src/test/scala/com/argcv/dvergar/package.scala
Scala
mit
47
package cs1 // run-main cs1.LX03E: 浮動小数点数演算におけるアンダーフローの観察 object LX03E { def fpUnderflow(n: Int, f: Double) { def aux(i: Int, x: Double) { if (i != n) { println(f"$f * 2^-$i = $x%g") aux(i+1, x/2) } } aux(0, f) } def main(arguments: Array[String]) { fpUnderflow((2<<9) + 52, 1.0) } // さて,fpUnderflowの第一引数の式はあまりに恣意的だ. // どうやってこのコードの著者は実行の最初に 0.00000 が出現するところでプログラムを終了できたのだろう.IEEE 754標準との関連で考えるとこの謎が解ける. }
titech-is-cs115/lecture
src/lx03e-fpunderflow.scala
Scala
cc0-1.0
675
package com.datastax.demo import com.databricks.spark.csv._ import com.datastax.demo.mapper.CSVMapAndSave import com.datastax.demo.util.ArgHelper import org.apache.spark.sql.SQLContext import org.apache.spark.{Logging, SparkConf, SparkContext} import scala.annotation.tailrec import scala.reflect.io.Path /** * Abstract class that is a base class for importing data from CSV files. * See the example template class ProductCSVMapAndSave for details on how * to implement your own. * Created on 19/04/2015. */ abstract class SparkCassandraCSVImporter(val args : Array[String]) extends ArgHelper with Logging { //Dependency Requirement required : CSVMapAndSave => def doImport() = { val importFile = getArgOrDefault("csv.file", getFile("Please enter the path of the file you wish to import > ")) logInfo(s"Commencing Import of file $importFile") //Create Spark config with sensible defaults val conf = new SparkConf() .setMaster(getArgOrDefault("spark.master", "local[2]")) .setAppName("spark-cass-csv-importer") .set("spark.executor.memory", getArgOrDefault("executor.memory", "512m")) .set("spark.default.parallelism", getArgOrDefault("processing.cores", "2")) .set("spark.cassandra.connection.host", getArgOrDefault("cassandra.host", "127.0.0.1")) val sc = new SparkContext(conf) val sparkSQL = new SQLContext(sc) val csvRDD = sparkSQL.csvFile(importFile) mapAndSave(csvRDD) sc.stop logInfo("Import Completed!") } @tailrec private def getFile(prompt : String) : String = { val file = readFromStdIn(prompt) if(Path.string2path(file).exists) { file } else { getFile(prompt) } } }
jkds/datastax-spark-csv-importer
src/main/scala/com/datastax/demo/SparkCassandraCSVImporter.scala
Scala
apache-2.0
1,711
package uk.gov.gds.ier.transaction.ordinary.address import uk.gov.gds.ier.step.StepTemplate import uk.gov.gds.ier.serialiser.JsonSerialiser import uk.gov.gds.ier.service.AddressService import uk.gov.gds.ier.model.{PossibleAddress, Addresses} import uk.gov.gds.ier.validation.ErrorTransformForm import uk.gov.gds.ier.transaction.ordinary.InprogressOrdinary trait AddressSelectMustache extends StepTemplate[InprogressOrdinary] { val serialiser:JsonSerialiser val addressService:AddressService case class SelectModel ( question: Question, lookupUrl: String, manualUrl: String, postcode: Field, address: Field, possibleJsonList: Field, possiblePostcode: Field, hasAddresses: Boolean, hasAuthority: Boolean ) extends MustacheData val mustache = MultilingualTemplate("ordinary/addressSelect") { implicit lang => (form, post) => implicit val progressForm = form val selectedUprn = form(keys.address.uprn).value val postcode = form(keys.address.postcode).value.orElse { form(keys.possibleAddresses.postcode).value } val storedAddresses = for( jsonList <- form(keys.possibleAddresses.jsonList).value; postcode <- postcode ) yield { PossibleAddress( jsonList = serialiser.fromJson[Addresses](jsonList), postcode = postcode ) } //IER0091 : Temp removing the storedAddresses section of the code checks to remove populating against the hidden input field //val possibleAddresses = storedAddresses orElse postcode.map { pc => val possibleAddresses = postcode.map { pc => val addresses = addressService.lookupPartialAddress(pc) PossibleAddress( jsonList = Addresses(addresses), postcode = pc ) } val options = possibleAddresses.map { possibleAddress => possibleAddress.jsonList.addresses }.getOrElse(List.empty).map { address => SelectOption( value = address.uprn.getOrElse(""), text = address.addressLine.getOrElse(""), selected = if (address.uprn == selectedUprn) { "selected=\\"selected\\"" } else "" ) } val hasAddresses = possibleAddresses.exists (!_.jsonList.addresses.isEmpty) val hasAuthority = hasAddresses || addressService.validAuthority(postcode) val addressSelect = SelectField( key = keys.address.uprn, optionList = options, default = SelectOption( value = "", text = Messages("ordinary_address_nAddressFound", options.size) ) ) val addressSelectWithError = addressSelect.copy( classes = if (!hasAddresses) { "invalid" } else { addressSelect.classes } ) SelectModel( question = Question( postUrl = post.url, number = Messages("step_a_of_b", 6, 11), title = Messages("ordinary_address_postcode_title"), errorMessages = Messages.translatedGlobalErrors(form) ), lookupUrl = routes.AddressStep.get.url, manualUrl = routes.AddressManualStep.get.url, postcode = TextField(keys.address.postcode, default = postcode), address = addressSelectWithError, possibleJsonList = HiddenField( key = keys.possibleAddresses.jsonList, value = possibleAddresses.map { poss => serialiser.toJson(poss.jsonList) }.getOrElse("") ), possiblePostcode = HiddenField( key = keys.possibleAddresses.postcode, value = form(keys.address.postcode).value.getOrElse("") ), hasAddresses = hasAddresses, hasAuthority = hasAuthority ) } }
michaeldfallen/ier-frontend
app/uk/gov/gds/ier/transaction/ordinary/address/AddressSelectMustache.scala
Scala
mit
3,631
package github.bearrito.mailboxes import akka.dispatch._ import akka.actor.{ActorSystem, ActorRef} import com.typesafe.config.Config trait ForgetfulQueueSemantics { def capacity: Int } object ForgetfulMailbox { class Q(val capacity: Int) extends MessageQueue with ForgetfulQueueSemantics { val queue = new ForgetfulQueue[Envelope](capacity) override def cleanUp(owner: ActorRef, deadLetters: MessageQueue): Unit = { while(hasMessages) { deadLetters.enqueue(owner, dequeue())} } override def hasMessages: Boolean = !queue.hasMessages override def numberOfMessages: Int = queue.numberOfMessages override def dequeue(): Envelope = queue.dequeue override def enqueue(receiver: ActorRef, handle: Envelope): Unit = queue.enqueue(handle) } } class ForgetfulMailbox(val capacity: Int) extends MailboxType with ProducesMessageQueue[ForgetfulMailbox.Q] { import ForgetfulMailbox._ def this(settings: ActorSystem.Settings, config: Config) = { this(config.getInt("mailbox-capacity")) } final override def create(owner: Option[ActorRef], system: Option[ActorSystem]): MessageQueue = { new Q(capacity) } }
bearrito/RingMailBox
src/main/scala/github/bearrito/mailboxes/MailboxThings.scala
Scala
apache-2.0
1,189
package service import play.api.Play trait Configurable { import play.api.Play.current def getConfig(key: String) = { Play.configuration.getString(key).get } def getConfigInt(key: String) = { Play.configuration.getInt(key).get } }
sandermak/scalabitz
app/service/Configurable.scala
Scala
mit
253
package com.calebgo.gitstats.generator import org.joda.time.DateTime import com.github.nscala_time.time.StaticDateTimeFormat /** * Generator for creating dates. */ class DateGenerator extends NormalGenerator { override def header: Array[String] = Array("Date") override def valueForDate(date: DateTime, repository: String): Array[String] = { val formatter = StaticDateTimeFormat.forPattern("dd/MM/yyyy") Array(formatter print date) } }
Clegs/GitStats
src/main/scala/com/calebgo/gitstats/generator/DateGenerator.scala
Scala
mit
455
package algorithms /** * Created by qmha on 10/28/14. */ class PSO { }
minhprg/binary-hpsowm
src/algorithms/PSO.scala
Scala
mit
75
package ulang.calculus import ulang.syntax._ import ulang.syntax.predefined.prop._ import ulang.syntax.predefined.pred._ import scala.annotation.tailrec object Simplify extends Rule { def name = "simplify" type Rewrites = Map[Op, (List[Expr], Goal) => Expr] def simp(phi: Expr, prove: Boolean, ctx: Goal): Expr = { phi match { case True | False => phi case Not(phi) => not(phi, prove, ctx) case And(phi, psi) => and(phi, psi, prove, ctx) case Or(phi, psi) => or(phi, psi, prove, ctx) case Imp(phi, psi) => imp(phi, psi, prove, ctx) case Eqv(phi, psi) => eqv(phi, psi, prove, ctx) case Eq(lhs, rhs) => eq(lhs, rhs, prove, ctx) case _ => literal(phi, prove, ctx) } } def apply(seq: Seq): Proof = { val goal = con(seq.phis, Goal.empty) goal match { case Closed => Step(Nil, seq, this) case Open(_, phis) => Step(List(Seq(phis)), seq, this) } } def rewrite(self: Expr, ctx: Goal, rw: Rewrites): Expr = { rewrite(self, Nil, ctx, rw) } @tailrec def rewrite(self: Expr, args: List[Expr], ctx: Goal, rw: Rewrites): Expr = self match { case op: Op => rw(op)(args, ctx) case App(fun, arg) => rewrite(fun, term(arg, ctx) :: args, ctx, rw) case _ => self } @tailrec def con(todo: List[Expr], ctx: Goal): Goal = todo match { case Nil => ctx.reverse case phi :: rest => // may produce duplicate work: val newctx = assume(rest, ctx) val newphi = simp(phi, false, newctx) con(rest, assume(newphi, ctx)) } def literal(phi: Expr, prove: Boolean, ctx: Goal): Expr = { if (ctx contains phi) True else if (ctx contains Not(phi)) False else phi } def term(expr: Expr, ctx: Goal): Expr = { canon(expr, ctx) } def canon(expr: Expr, ctx: Goal): Expr = { val res = ctx canon expr // println("canon " + expr + " ~> " + res) res } def merge(lhs: Expr, rhs: Expr, ctx: Goal): Goal = { // println("merge " + lhs + " ~> " + rhs) ctx merge (lhs, rhs) } def not(phi: Expr, prove: Boolean, ctx: Goal): Expr = { triv.not(simp(phi, !prove, ctx)) } def and(phi: Expr, psi: Expr, prove: Boolean, ctx: Goal): Expr = { val (newphi, newpsi) = binary(phi, true, prove, psi, true, prove, ctx) triv.and(newphi, newpsi) } def or(phi: Expr, psi: Expr, prove: Boolean, ctx: Goal): Expr = { val (newphi, newpsi) = binary(phi, false, prove, psi, false, prove, ctx) triv.or(newphi, newpsi) } def imp(phi: Expr, psi: Expr, prove: Boolean, ctx: Goal): Expr = { val (newphi, newpsi) = binary(phi, true, !prove, psi, false, prove, ctx) triv.imp(newphi, newpsi) } def eqv(phi: Expr, psi: Expr, prove: Boolean, ctx: Goal): Expr = { val lr = simp(Imp(phi, psi), prove, ctx) val rl = simp(Imp(psi, phi), prove, ctx) // try to extract con/dis (lr, rl) match { case (False, _) => False case (_, False) => False case (True, _) => rl // already simplified case (_, True) => lr case _ => triv.eqv(simp(phi, false, ctx), simp(psi, false, ctx)) // does a lot of work again } } def eq(lhs: Expr, rhs: Expr, prove: Boolean, ctx: Goal): Expr = { val newlhs = term(lhs, ctx) val newrhs = term(rhs, ctx) triv.eq(newlhs, newrhs) } def binary( phi: Expr, phi_pos: Boolean, phi_prove: Boolean, psi: Expr, psi_pos: Boolean, psi_prove: Boolean, ctx: Goal, psi_done: Boolean = false, swap: Boolean = false): (Expr, Expr) = { val newctx = if (psi_pos) assume(psi, ctx) else assert(psi, ctx) val newphi = simp(phi, phi_prove, newctx) val phi_done = phi == newphi if (phi_done && psi_done) { if (swap) (psi, phi) else (phi, psi) } else { binary(psi, psi_pos, psi_prove, /**/ newphi, phi_pos, phi_prove, /**/ ctx, phi_done, !swap) } } def assume(phi: Expr, ctx: Goal): Goal = phi match { case True => ctx case False => Closed case Not(psi) => assert(psi, ctx) case And(phi, psi) => assume(phi, assume(psi, ctx)) case Eq(lhs, rhs) => phi :: merge(lhs, rhs, ctx) /*case Ex(bound, body) => // slow val avoid = free(phi :: ctx) assume(inst(body, fresh(bound, avoid)), ctx)*/ case _ => phi :: ctx } def assert(phi: Expr, ctx: Goal): Goal = phi match { case True => Closed case False => ctx case Not(psi) => assume(psi, ctx) case Imp(phi, psi) => assert(psi, assume(phi, ctx)) case Or(phi, psi) => assert(psi, assert(phi, ctx)) /*case All(bound, body) => // slow val avoid = free(phi :: ctx) assert(inst(body, fresh(bound, avoid)), ctx)*/ case _ => triv.not(phi) :: ctx } def assume(args: List[Expr], ctx: Goal): Goal = { args.foldRight(ctx)(assume) } def assert(args: List[Expr], ctx: Goal): Goal = { args.foldRight(ctx)(assert) } }
gernst/ulang
src/ulang/calculus/Simplify.scala
Scala
mit
5,012
package suiryc.scala.javafx.concurrent import akka.dispatch.{DispatcherPrerequisites, ExecutorServiceConfigurator, ExecutorServiceFactory} import com.typesafe.config.Config import java.util.{Collections, List => jList} import java.util.concurrent.{AbstractExecutorService, ExecutorService, ThreadFactory, TimeUnit} import javafx.application.Platform import scala.concurrent.{ExecutionContext, ExecutionContextExecutor} // See: // - https://groups.google.com/forum/#!msg/scalafx-users/JxXXNTKC4Kk/riJCqyaEG1cJ // - https://gist.github.com/saberduck/5150719 // - https://gist.github.com/viktorklang/2422443 object JFXExecutor { /* Execution context based on JavaFX, but not tied to any akka system. * May be used when the whole JFXSystem (akka system with dedicated dispatcher * and scheduler) is not needed. */ implicit lazy val executor: ExecutionContextExecutor = ExecutionContext.fromExecutorService(JFXExecutorService) } object JFXExecutorService extends AbstractExecutorService { def execute(command: Runnable): Unit = Platform.runLater(command) def shutdown(): Unit = () def shutdownNow(): jList[Runnable] = Collections.emptyList[Runnable] def isShutdown: Boolean = false def isTerminated: Boolean = false def awaitTermination(l: Long, timeUnit: TimeUnit): Boolean = true } class JFXEventThreadExecutorServiceConfigurator(config: Config, prerequisites: DispatcherPrerequisites) extends ExecutorServiceConfigurator(config, prerequisites) { private val f = new ExecutorServiceFactory { def createExecutorService: ExecutorService = JFXExecutorService } def createExecutorServiceFactory(id: String, threadFactory: ThreadFactory): ExecutorServiceFactory = f }
suiryc/suiryc-scala
javafx/src/main/scala/suiryc/scala/javafx/concurrent/JFXExecutor.scala
Scala
gpl-3.0
1,722
package com.github.andr83.parsek.spark.source import com.github.andr83.parsek.spark.PathFilter.PathFilter import com.github.andr83.parsek.spark.{PathFilter, SparkJob} import com.github.andr83.parsek.{PString, PValue} import com.typesafe.config.Config import net.ceedubs.ficus.Ficus._ import org.apache.hadoop.fs.Path import org.apache.spark.rdd.RDD /** * Read a text file from HDFS, a local file system (available on all nodes), or any * Hadoop-supported file system URI, and return it as an RDD of Strings. * * @param path seq of strings paths. Every string can contain multiple path separated by comma. * Path can be a file or directory. * @param filters seq of filters to filter paths * * @author andr83 */ case class TextFileSource(path: Seq[String], filters: Seq[PathFilter]) extends Source { def this(config: Config) = this( path = config.getAnyRef("path") match { case list: List[_] => list.map(_.toString) case value => List(value.toString) }, filters = config.as[Option[List[Config]]]("filters") map (filters => { filters.map(f => PathFilter(f)) }) getOrElse Seq.empty[PathFilter] ) // Default filter to exclude all files with underscore prefix like _SUCCESS val defaultHadoopFilter = (p: Path) => !p.getName.startsWith("_") override def apply(job: SparkJob): RDD[PValue] = { val files = path.flatMap(job.listFilesOnly(_, defaultHadoopFilter +: filters)) logger.info("Input files:") files.sorted.foreach(f => logger.info(f)) if (files.isEmpty) { job.sc.emptyRDD[PValue] } else { job.sc.textFile(files.mkString(",")).map(PString) } } }
andr83/parsek
spark/src/main/scala/com/github/andr83/parsek/spark/source/TextFileSource.scala
Scala
mit
1,662
package io.ddf.jdbc.etl import java.sql.Connection import java.util.Collections import io.ddf.DDF import io.ddf.content.{Schema, SqlResult, SqlTypedResult} import io.ddf.datasource.{DataFormat, DataSourceDescriptor} import io.ddf.jdbc.JdbcDDFManager import io.ddf.jdbc.content._ import org.apache.commons.lang.StringUtils class SqlHandler(ddf: DDF) extends io.ddf.etl.ASqlHandler(ddf) { val ddfManager: JdbcDDFManager = ddf.getManager.asInstanceOf[JdbcDDFManager] val baseSchema = ddfManager.baseSchema implicit val catalog = ddfManager.catalog def getConnection() : Connection = { ddfManager.getConnection() } override def sql2ddf(command: String): DDF = { this.sql2ddf(command, null, null, null) } override def sql2ddf(command: String, schema: Schema): DDF = { this.sql2ddf(command, schema, null, null) } override def sql2ddf(command: String, dataFormat: DataFormat): DDF = { this.sql2ddf(command, null, null, null) } override def sql2ddf(command: String, schema: Schema, dataSource: DataSourceDescriptor): DDF = { this.sql2ddf(command, schema, dataSource, null) } override def sql2ddf(command: String, schema: Schema, dataFormat: DataFormat): DDF = { this.sql2ddf(command, schema, null, null) } override def sql2ddf(command: String, schema: Schema, dataSource: DataSourceDescriptor, dataFormat: DataFormat): DDF = { ddfManager.checkSinkAllowed() if (StringUtils.startsWithIgnoreCase(command.trim, "LOAD")) { load(command) } else if (StringUtils.startsWithIgnoreCase(command.trim, "CREATE")) { create2ddf(command, schema) } else { if (this.ddfManager.getCanCreateView()) { val viewName = TableNameGenerator.genTableName(8) //View will allow select commands DdlCommand(getConnection(), baseSchema, "CREATE VIEW " + viewName + " AS (" + command + ")") val viewSchema = if (schema == null) catalog.getViewSchema(getConnection(), baseSchema, viewName) else schema val viewRep = TableNameRepresentation(viewName, viewSchema) // TODO(TJ): This function implementation is wrong. ddf.getManager.newDDF(this.getManager, viewRep, Array(Representations.VIEW), viewName, viewSchema) } else { val sqlRet = this.sql("select * from (" + command + ") tmp limit 1"); val schema = sqlRet.getSchema val viewName = TableNameGenerator.genTableName(8) schema.setTableName(command) val newDDF = ddf.getManager.newDDF(this.getManager, // the ddfmanager "this is a view", // the content // content class Array(classOf[java.lang.String]), null, schema) // Indicate that this ddf is a view, this information will be handled // in TableNameReplacer newDDF.setIsDDFView(true) newDDF } } } def load(command: String): DDF = { val l = LoadCommand.parse(command) val ddf = ddfManager.getDDFByName(l.tableName) val schema = ddf.getSchema val tableName = LoadCommand(getConnection(), baseSchema, schema, l) val newDDF = ddfManager.getDDFByName(tableName) newDDF } def create2ddf(command: String, schema: Schema): DDF = { val tableName = Parsers.parseCreate(command).tableName DdlCommand(getConnection(), baseSchema, command) val tableSchema = if (schema == null) catalog.getTableSchema(getConnection(), baseSchema, tableName) else schema val emptyRep = TableNameRepresentation(tableName, tableSchema) ddf.getManager.newDDF(this.getManager, emptyRep, Array(Representations.VIEW), tableName, tableSchema) } override def sql(command: String): SqlResult = { sql(command, Integer.MAX_VALUE, null) } override def sql(command: String, maxRows: Integer): SqlResult = { sql(command, maxRows, null) } override def sql(command: String, maxRows: Integer, dataSource: DataSourceDescriptor): SqlResult = { val maxRowsInt: Int = if (maxRows == null) Integer.MAX_VALUE else maxRows if (StringUtils.startsWithIgnoreCase(command.trim, "DROP")) { DdlCommand(getConnection(), baseSchema, command) new SqlResult(null, Collections.singletonList("0")) } else if (StringUtils.startsWithIgnoreCase(command.trim, "LOAD")) { ddfManager.checkSinkAllowed() val l = LoadCommand.parse(command) val ddf = ddfManager.getDDFByName(l.tableName) val schema = ddf.getSchema val tableName = LoadCommand(getConnection(), baseSchema, schema, l) new SqlResult(null, Collections.singletonList(tableName)) } else if (StringUtils.startsWithIgnoreCase(command.trim, "CREATE")) { create2ddf(command, null) new SqlResult(null, Collections.singletonList("0")) } else { val tableName = ddf.getSchemaHandler.newTableName() SqlCommand(getConnection(), baseSchema, tableName, command, maxRowsInt, "\\t", this.ddfManager.getEngine) } } override def sqlTyped(command: String): SqlTypedResult = new SqlTypedResult(sql(command)) override def sqlTyped(command: String, maxRows: Integer): SqlTypedResult = new SqlTypedResult(sql(command, maxRows)) override def sqlTyped(command: String, maxRows: Integer, dataSource: DataSourceDescriptor): SqlTypedResult = new SqlTypedResult(sql(command, maxRows, dataSource)) }
ddf-project/ddf-jdbc
jdbc/src/main/scala/io/ddf/jdbc/etl/SqlHandler.scala
Scala
apache-2.0
5,497
/* * Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package usecases.emptyTargetColumns import java.io.File import java.net.URI import akka.testkit.{ TestActorRef, TestFSMRef } import com.wegtam.scalatest.tags.{ DbTest, DbTestH2 } import com.wegtam.tensei.adt.Recipe.MapOneToOne import com.wegtam.tensei.adt._ import com.wegtam.tensei.agent.Parser.{ ParserCompletedStatus, ParserMessages } import com.wegtam.tensei.agent.Processor.ProcessorMessages import com.wegtam.tensei.agent.adt.ParserStatus import com.wegtam.tensei.agent.{ DataTreeDocument, Parser, Processor, XmlActorSpec } import scala.collection.mutable.ListBuffer class EmptyTargetColumns extends XmlActorSpec { val agentRunIdentifier = Option("EmptyTargetColumnsTest") describe("Use cases") { describe("if not all target data columns are mapped") { describe("when writing into a file") { describe("and target columns have default values") { it("should write the default values to the not mapped columns") { val data = getClass.getResource("/usecases/emptyTargetColumns/source.csv").toURI val targetFilePath = File.createTempFile("EmptyTargetColumns", "test").toURI val sourceDfasdl = DFASDL( "XML-SOURCE-DATA", scala.io.Source .fromInputStream( getClass.getResourceAsStream("/usecases/emptyTargetColumns/source-dfasdl.xml") ) .mkString ) val targetDfasdl = DFASDL( "DB-TARGET-DATA", scala.io.Source .fromInputStream( getClass.getResourceAsStream( "/usecases/emptyTargetColumns/target-with-defaults-dfasdl.xml" ) ) .mkString ) val customersRecipe = Recipe( "MapColumns", MapOneToOne, List( MappingTransformation( createElementReferenceList(sourceDfasdl, List("birthday")), createElementReferenceList(targetDfasdl, List("birthday")) ), MappingTransformation( createElementReferenceList(sourceDfasdl, List("firstname", "firstname")), createElementReferenceList(targetDfasdl, List("firstname", "lastname")), List( TransformationDescription("com.wegtam.tensei.agent.transformers.Nullify", TransformerOptions(classOf[String], classOf[String])) ) ) ) ) val cookbook = Cookbook("EmptyColumnsTest", List(sourceDfasdl), Option(targetDfasdl), List(customersRecipe)) val sourceConnection = ConnectionInformation(data, Option(DFASDLReference(cookbook.id, sourceDfasdl.id))) val targetConnection = ConnectionInformation(targetFilePath, Option(DFASDLReference(cookbook.id, targetDfasdl.id))) val agentStartTransformationMessage = AgentStartTransformationMessage(List(sourceConnection), targetConnection, cookbook, agentRunIdentifier) val dataTree = TestActorRef( DataTreeDocument.props(sourceDfasdl, Option("Xml2DbTest"), Set.empty[String]) ) val parser = TestFSMRef(new Parser(agentRunIdentifier)) parser ! ParserMessages.StartParsing(agentStartTransformationMessage, Map(sourceDfasdl.hashCode() -> dataTree)) val parserResponse = expectMsgType[ParserCompletedStatus] parserResponse.statusMessages.foreach( status => status should be(ParserStatus.COMPLETED) ) val processor = TestFSMRef(new Processor(agentRunIdentifier)) processor ! ProcessorMessages.StartProcessingMessage(agentStartTransformationMessage, List(dataTree)) expectMsg(ProcessorMessages.Completed) val actualData = scala.io.Source.fromURI(targetFilePath).mkString val expectedData = scala.io.Source .fromURI( getClass .getResource("/usecases/emptyTargetColumns/expected-target-with-defaults.csv") .toURI ) .mkString withClue(s"The file $targetFilePath should have the proper content!")( actualData should be(expectedData) ) } } describe("and target columns don't have default values") { it("should write an empty string to the not mapped columns") { val data = getClass.getResource("/usecases/emptyTargetColumns/source.csv").toURI val targetFilePath = File.createTempFile("EmptyTargetColumns", "test").toURI val sourceDfasdl = DFASDL( "XML-SOURCE-DATA", scala.io.Source .fromInputStream( getClass.getResourceAsStream("/usecases/emptyTargetColumns/source-dfasdl.xml") ) .mkString ) val targetDfasdl = DFASDL( "DB-TARGET-DATA", scala.io.Source .fromInputStream( getClass.getResourceAsStream("/usecases/emptyTargetColumns/target-dfasdl.xml") ) .mkString ) val customersRecipe = Recipe( "MapColumns", MapOneToOne, List( MappingTransformation( createElementReferenceList(sourceDfasdl, List("birthday")), createElementReferenceList(targetDfasdl, List("birthday")) ), MappingTransformation( createElementReferenceList(sourceDfasdl, List("firstname", "firstname")), createElementReferenceList(targetDfasdl, List("firstname", "lastname")), List( TransformationDescription("com.wegtam.tensei.agent.transformers.Nullify", TransformerOptions(classOf[String], classOf[String])) ) ) ) ) val cookbook = Cookbook("EmptyColumnsTest", List(sourceDfasdl), Option(targetDfasdl), List(customersRecipe)) val sourceConnection = ConnectionInformation(data, Option(DFASDLReference(cookbook.id, sourceDfasdl.id))) val targetConnection = ConnectionInformation(targetFilePath, Option(DFASDLReference(cookbook.id, targetDfasdl.id))) val agentStartTransformationMessage = AgentStartTransformationMessage(List(sourceConnection), targetConnection, cookbook, agentRunIdentifier) val dataTree = TestActorRef( DataTreeDocument.props(sourceDfasdl, Option("Xml2DbTest"), Set.empty[String]) ) val parser = TestFSMRef(new Parser(agentRunIdentifier)) parser ! ParserMessages.StartParsing(agentStartTransformationMessage, Map(sourceDfasdl.hashCode() -> dataTree)) val parserResponse = expectMsgType[ParserCompletedStatus] parserResponse.statusMessages.foreach( status => status should be(ParserStatus.COMPLETED) ) val processor = TestFSMRef(new Processor(agentRunIdentifier)) processor ! ProcessorMessages.StartProcessingMessage(agentStartTransformationMessage, List(dataTree)) expectMsg(ProcessorMessages.Completed) val actualData = scala.io.Source.fromURI(targetFilePath).mkString val expectedData = scala.io.Source .fromURI( getClass.getResource("/usecases/emptyTargetColumns/expected-target.csv").toURI ) .mkString withClue(s"The file $targetFilePath should have the proper content!")( actualData should be(expectedData) ) } } } describe("when writing into a database") { describe("and target columns have default values") { it("should write the default values to the not mapped columns", DbTest, DbTestH2) { val data = getClass.getResource("/usecases/emptyTargetColumns/source.csv").toURI val sourceDfasdl = DFASDL( "XML-SOURCE-DATA", scala.io.Source .fromInputStream( getClass.getResourceAsStream("/usecases/emptyTargetColumns/source-dfasdl.xml") ) .mkString ) val targetDfasdl = DFASDL( "DB-TARGET-DATA", scala.io.Source .fromInputStream( getClass.getResourceAsStream( "/usecases/emptyTargetColumns/target-with-defaults-dfasdl.xml" ) ) .mkString ) val customersRecipe = Recipe( "MapColumns", MapOneToOne, List( MappingTransformation( createElementReferenceList(sourceDfasdl, List("birthday")), createElementReferenceList(targetDfasdl, List("birthday")) ), MappingTransformation( createElementReferenceList(sourceDfasdl, List("firstname", "lastname")), createElementReferenceList(targetDfasdl, List("firstname", "lastname")), List( TransformationDescription("com.wegtam.tensei.agent.transformers.Nullify", TransformerOptions(classOf[String], classOf[String])) ) ) ) ) val cookbook = Cookbook("EmptyColumnsTest", List(sourceDfasdl), Option(targetDfasdl), List(customersRecipe)) val sourceConnection = ConnectionInformation(data, Option(DFASDLReference(cookbook.id, sourceDfasdl.id))) val targetDatabase = java.sql.DriverManager.getConnection("jdbc:h2:mem:emptyColumns1") val targetConnection = ConnectionInformation(new URI(targetDatabase.getMetaData.getURL), Option(DFASDLReference(cookbook.id, targetDfasdl.id))) val agentStartTransformationMessage = AgentStartTransformationMessage(List(sourceConnection), targetConnection, cookbook, agentRunIdentifier) val dataTree = TestActorRef( DataTreeDocument.props(sourceDfasdl, Option("Xml2DbTest"), Set.empty[String]) ) val parser = TestFSMRef(new Parser(agentRunIdentifier)) parser ! ParserMessages.StartParsing(agentStartTransformationMessage, Map(sourceDfasdl.hashCode() -> dataTree)) val parserResponse = expectMsgType[ParserCompletedStatus] parserResponse.statusMessages.foreach( status => status should be(ParserStatus.COMPLETED) ) val processor = TestFSMRef(new Processor(agentRunIdentifier)) processor ! ProcessorMessages.StartProcessingMessage(agentStartTransformationMessage, List(dataTree)) expectMsg(ProcessorMessages.Completed) val actualData = { val stm = targetDatabase.createStatement() val results = stm.executeQuery("SELECT * FROM ROWS") val rows = new ListBuffer[String] while (results.next()) { rows += s"${results.getString("firstname")},${results.getString("lastname")},${results .getDate("birthday")}" } rows.toList.mkString(";") } val expectedData = """John,Doe,1879-03-14;John,Doe,1826-09-17;John,Doe,1777-04-30;John,Doe,1808-07-25;John,Doe,1646-07-01""" withClue(s"The database table should have the proper content!")( actualData should be(expectedData) ) val dst = java.sql.DriverManager.getConnection(s"jdbc:h2:mem:emptyColumns1") dst.createStatement().execute("SHUTDOWN") dst.close() } } describe("and target columns don't have default values") { it("should write an empty string to the not mapped columns", DbTest, DbTestH2) { val data = getClass.getResource("/usecases/emptyTargetColumns/source.csv").toURI val sourceDfasdl = DFASDL( "XML-SOURCE-DATA", scala.io.Source .fromInputStream( getClass.getResourceAsStream("/usecases/emptyTargetColumns/source-dfasdl.xml") ) .mkString ) val targetDfasdl = DFASDL( "DB-TARGET-DATA", scala.io.Source .fromInputStream( getClass.getResourceAsStream("/usecases/emptyTargetColumns/target-dfasdl.xml") ) .mkString ) val customersRecipe = Recipe( "MapColumns", MapOneToOne, List( MappingTransformation( createElementReferenceList(sourceDfasdl, List("birthday")), createElementReferenceList(targetDfasdl, List("birthday")) ), MappingTransformation( createElementReferenceList(sourceDfasdl, List("firstname", "lastname")), createElementReferenceList(targetDfasdl, List("firstname", "lastname")), List( TransformationDescription("com.wegtam.tensei.agent.transformers.Nullify", TransformerOptions(classOf[String], classOf[String])) ) ) ) ) val cookbook = Cookbook("EmptyColumnsTest", List(sourceDfasdl), Option(targetDfasdl), List(customersRecipe)) val sourceConnection = ConnectionInformation(data, Option(DFASDLReference(cookbook.id, sourceDfasdl.id))) val targetDatabase = java.sql.DriverManager.getConnection("jdbc:h2:mem:emptyColumns2") val targetConnection = ConnectionInformation(new URI(targetDatabase.getMetaData.getURL), Option(DFASDLReference(cookbook.id, targetDfasdl.id))) val agentStartTransformationMessage = AgentStartTransformationMessage(List(sourceConnection), targetConnection, cookbook, agentRunIdentifier) val dataTree = TestActorRef( DataTreeDocument.props(sourceDfasdl, Option("Xml2DbTest"), Set.empty[String]) ) val parser = TestFSMRef(new Parser(agentRunIdentifier)) parser ! ParserMessages.StartParsing(agentStartTransformationMessage, Map(sourceDfasdl.hashCode() -> dataTree)) val parserResponse = expectMsgType[ParserCompletedStatus] parserResponse.statusMessages.foreach( status => status should be(ParserStatus.COMPLETED) ) val processor = TestFSMRef(new Processor(agentRunIdentifier)) processor ! ProcessorMessages.StartProcessingMessage(agentStartTransformationMessage, List(dataTree)) expectMsg(ProcessorMessages.Completed) val actualData = { val stm = targetDatabase.createStatement() val results = stm.executeQuery("SELECT * FROM ROWS") val rows = new ListBuffer[String] while (results.next()) { rows += s"${results.getString("firstname")},${results.getString("lastname")},${results .getDate("birthday")}" } rows.toList.mkString(";") } val expectedData = """null,null,1879-03-14;null,null,1826-09-17;null,null,1777-04-30;null,null,1808-07-25;null,null,1646-07-01""" withClue(s"The database table should have the proper content!")( actualData should be(expectedData) ) val dst = java.sql.DriverManager.getConnection(s"jdbc:h2:mem:emptyColumns2") dst.createStatement().execute("SHUTDOWN") dst.close() } } } } } }
Tensei-Data/tensei-agent
src/it/scala/usecases/emptyTargetColumns/EmptyTargetColumns.scala
Scala
agpl-3.0
18,679
package defend case class Envelope(id: String, message: Any)
otrebski/reactive-missile-defend
src/main/scala/defend/Envelope.scala
Scala
apache-2.0
61
import sbt._ import sbt.Keys._ import scala.language.postfixOps object BuildSettings { val filter = { (ms: Seq[(File, String)]) => ms filter { case (file, path) => path != "logback.xml" && !path.startsWith("toignore") && !path.startsWith("samples") } } val baseSettings = Seq(organization := "org.reactivemongo") val buildSettings = Defaults.coreDefaultSettings ++ baseSettings ++ Seq( scalaVersion := "2.11.8", crossScalaVersions := Seq("2.10.5", "2.11.8", "2.12.1"), crossVersion := CrossVersion.binary, //parallelExecution in Test := false, //fork in Test := true, // Don't share executioncontext between SBT CLI/tests scalacOptions ++= Seq( "-encoding", "UTF-8", "-unchecked", "-deprecation", "-feature", //"-Xfatal-warnings", "-Xlint", "-Ywarn-numeric-widen", "-Ywarn-dead-code", "-Ywarn-value-discard", "-g:vars" ), scalacOptions in Compile ++= { if (!scalaVersion.value.startsWith("2.11.")) Nil else Seq( "-Yconst-opt", "-Yclosure-elim", "-Ydead-code", "-Yopt:_" ) }, scalacOptions in Compile ++= { if (scalaVersion.value startsWith "2.10.") Nil else Seq( "-Ywarn-infer-any", "-Ywarn-unused", "-Ywarn-unused-import", "-Xlint:missing-interpolator" ) }, scalacOptions in Compile ++= { if (!scalaVersion.value.startsWith("2.12.")) Seq("-target:jvm-1.6") else Nil }, scalacOptions in (Compile, console) ~= { _.filterNot { opt => opt.startsWith("-X") || opt.startsWith("-Y") } }, scalacOptions in (Test, console) ~= { _.filterNot { opt => opt.startsWith("-X") || opt.startsWith("-Y") } }, scalacOptions in (Compile, doc) ++= Seq("-unchecked", "-deprecation", /*"-diagrams", */"-implicits", "-skip-packages", "samples"), scalacOptions in (Compile, doc) ++= Opts.doc.title("ReactiveMongo API"), scalacOptions in (Compile, doc) ++= Opts.doc.version(Release.major.value), scalacOptions in Compile := { val opts = (scalacOptions in Compile).value if (scalaVersion.value != "2.10.5") opts else { opts.filter(_ != "-Ywarn-unused-import") } }, mappings in (Compile, packageBin) ~= filter, mappings in (Compile, packageSrc) ~= filter, mappings in (Compile, packageDoc) ~= filter ) ++ Publish.settings ++ Format.settings ++ ( Release.settings ++ Publish.mimaSettings) } object Resolvers { val typesafe = Seq( "Typesafe repository snapshots" at "http://repo.typesafe.com/typesafe/snapshots/", "Typesafe repository releases" at "http://repo.typesafe.com/typesafe/releases/") val resolversList = typesafe } object Dependencies { val akka = Def.setting[Seq[ModuleID]] { val ver = sys.env.get("AKKA_VERSION").getOrElse { if (scalaVersion.value startsWith "2.12.") "2.4.14" else "2.3.13" } Seq( "com.typesafe.akka" %% "akka-actor" % ver, "com.typesafe.akka" %% "akka-testkit" % ver % Test) } val playIteratees = Def.setting[ModuleID] { val ver = sys.env.get("ITERATEES_VERSION").getOrElse { if (scalaVersion.value startsWith "2.10.") "2.3.9" else "2.6.1" } "com.typesafe.play" %% "play-iteratees" % ver } val specsVer = "3.8.6" val specs = "org.specs2" %% "specs2-core" % specsVer % Test val slf4jVer = "1.7.12" val log4jVer = "2.5" val slf4j = "org.slf4j" % "slf4j-api" % slf4jVer val slf4jSimple = "org.slf4j" % "slf4j-simple" % slf4jVer val logApi = Seq( slf4j % "provided", "org.apache.logging.log4j" % "log4j-api" % log4jVer // deprecated ) ++ Seq("log4j-core", "log4j-slf4j-impl").map( "org.apache.logging.log4j" % _ % log4jVer % Test) val shapelessTest = "com.chuusai" %% "shapeless" % "2.3.2" val commonsCodec = "commons-codec" % "commons-codec" % "1.10" } object Findbugs { import scala.xml.{ NodeSeq, XML }, XML.{ loadFile => loadXML } import de.johoop.findbugs4sbt.{ FindBugs, ReportType }, FindBugs.{ findbugsExcludeFilters, findbugsReportPath, findbugsReportType, findbugsSettings } @inline def task = FindBugs.findbugs val settings = findbugsSettings ++ Seq( findbugsReportType := Some(ReportType.PlainHtml), findbugsReportPath := Some(target.value / "findbugs.html"), findbugsExcludeFilters := { val commonFilters = loadXML(baseDirectory.value / ".." / "project" / ( "findbugs-exclude-filters.xml")) val filters = { val f = baseDirectory.value / "findbugs-exclude-filters.xml" if (!f.exists) NodeSeq.Empty else loadXML(f).child } Some( <FindBugsFilter>${commonFilters.child}${filters}</FindBugsFilter> ) } ) } object Documentation { import sbtunidoc.{ Plugin => UnidocPlugin }, UnidocPlugin.UnidocKeys._, UnidocPlugin.ScalaUnidoc def mappings(org: String, location: String, revision: String => String = identity)(names: String*) = Def.task[Map[File, URL]] { (for { entry: Attributed[File] <- (fullClasspath in Compile).value module: ModuleID <- entry.get(moduleID.key) if module.organization == org if names.exists(module.name.startsWith) rev = revision(module.revision) } yield entry.data -> url(location.format(rev))).toMap } val settings = UnidocPlugin.unidocSettings ++ Seq( unidocProjectFilter in (ScalaUnidoc, unidoc) := { inAnyProject -- inProjects( ReactiveMongoBuild.shaded, ReactiveMongoBuild.jmx) }, apiMappings ++= mappings("org.scala-lang", "http://scala-lang.org/api/%s/")("scala-library").value ) } object ReactiveMongoBuild extends Build { import BuildSettings._ import Resolvers._ import Dependencies._ import sbtassembly.{ AssemblyKeys, MergeStrategy, PathList, ShadeRule }, AssemblyKeys._ import com.typesafe.tools.mima.core._, ProblemFilters._, Problem.ClassVersion import com.typesafe.tools.mima.plugin.MimaKeys.{ binaryIssueFilters, previousArtifacts } import de.johoop.cpd4sbt.CopyPasteDetector val travisEnv = taskKey[Unit]("Print Travis CI env") val projectPrefix = "ReactiveMongo" lazy val reactivemongo = Project( s"$projectPrefix-Root", file("."), settings = buildSettings ++ Documentation.settings). settings( publishArtifact := false, previousArtifacts := Set.empty, travisEnv in Test := { // test:travisEnv from SBT CLI val (akkaLower, akkaUpper) = "2.3.13" -> "2.5.0" val (playLower, playUpper) = "2.3.8" -> "2.6.1" val (mongoLower, mongoUpper) = "2_6" -> "3_4" val specs = List[(String, List[String])]( "MONGO_VER" -> List("2_6", "3", "3_4"), "MONGO_PROFILE" -> List( "default", "invalid-ssl", "mutual-ssl", "rs"), "AKKA_VERSION" -> List(akkaLower, akkaUpper), "ITERATEES_VERSION" -> List(playLower, playUpper) ) lazy val integrationEnv = specs.flatMap { case (key, values) => values.map(key -> _) }.combinations(specs.size).filterNot { flags => /* chrono-compat exclusions */ (flags.contains("AKKA_VERSION" -> akkaLower) && flags. contains("ITERATEES_VERSION" -> playUpper)) || (flags.contains("AKKA_VERSION" -> akkaUpper) && flags. contains("ITERATEES_VERSION" -> playLower)) || (flags.contains("MONGO_VER" -> mongoLower) && flags. contains("ITERATEES_VERSION" -> playUpper)) || (flags.contains("MONGO_VER" -> mongoUpper) && flags. contains("ITERATEES_VERSION" -> playLower)) || (flags.contains("MONGO_VER" -> mongoUpper) && flags. contains("AKKA_VERSION" -> akkaLower)) || (flags.contains("AKKA_VERSION" -> akkaLower) && flags. contains("MONGO_PROFILE" -> "rs")) || /* profile exclusions */ (!flags.contains("MONGO_VER" -> mongoUpper) && flags. contains("MONGO_PROFILE" -> "invalid-ssl")) || (!flags.contains("MONGO_VER" -> mongoUpper) && flags. contains("MONGO_PROFILE" -> "mutual-ssl")) || (flags.contains("MONGO_VER" -> mongoLower) && flags. contains("MONGO_PROFILE" -> "rs") && flags. contains("ITERATEES_VERSION" -> playLower)) }.collect { case flags if (flags.map(_._1).toSet.size == specs.size) => ("CI_CATEGORY" -> "INTEGRATION_TESTS") :: flags.sortBy(_._1) }.toList @inline def integrationVars(flags: List[(String, String)]): String = flags.map { case (k, v) => s"$k=$v" }.mkString(" ") def integrationMatrix = integrationEnv.map(integrationVars).map { c => s" - $c" } def matrix = (List("env:", " - CI_CATEGORY=UNIT_TESTS").iterator ++ ( integrationMatrix :+ "matrix: " :+ " exclude: ") ++ List( " - scala: 2.10.5", " jdk: oraclejdk8", " env: CI_CATEGORY=UNIT_TESTS") ++ List( " - scala: 2.11.8", " jdk: oraclejdk7", " env: CI_CATEGORY=UNIT_TESTS") ++ List( " - scala: 2.12.1", " jdk: oraclejdk7", " env: CI_CATEGORY=UNIT_TESTS") ++ ( integrationEnv.flatMap { flags => if (flags.contains("CI_CATEGORY" -> "INTEGRATION_TESTS") && (/* time-compat exclusions: */ flags.contains("ITERATEES_VERSION" -> playUpper) || flags.contains("AKKA_VERSION" -> akkaUpper) || flags.contains("MONGO_VER" -> mongoUpper) || /* profile priority exclusions: */ flags.contains("MONGO_PROFILE" -> "invalid-ssl") || flags.contains("MONGO_PROFILE" -> "mutual-ssl"))) { List( " - scala: 2.10.5", s" env: ${integrationVars(flags)}", " - jdk: oraclejdk7", s" env: ${integrationVars(flags)}" ) } else if (flags.contains("CI_CATEGORY" -> "INTEGRATION_TESTS") && (/* time-compat exclusions: */ flags.contains("ITERATEES_VERSION" -> playLower) || flags.contains("AKKA_VERSION" -> akkaLower) || flags.contains("MONGO_VER" -> mongoLower) )) { List( " - scala: 2.12.1", s" env: ${integrationVars(flags)}", " - jdk: oraclejdk8", s" env: ${integrationVars(flags)}" ) } else List.empty[String] }) ).mkString("\\r\\n") println(s"# Travis CI env\\r\\n$matrix") } ).aggregate(bson, bsonmacros, shaded, driver, jmx). enablePlugins(CopyPasteDetector) import scala.xml.{ Elem => XmlElem, Node => XmlNode } private def transformPomDependencies(tx: XmlElem => Option[XmlNode]): XmlNode => XmlNode = { node: XmlNode => import scala.xml.{ NodeSeq, XML } import scala.xml.transform.{ RewriteRule, RuleTransformer } val tr = new RuleTransformer(new RewriteRule { override def transform(node: XmlNode): NodeSeq = node match { case e: XmlElem if e.label == "dependency" => tx(e) match { case Some(n) => n case _ => NodeSeq.Empty } case _ => node } }) tr.transform(node).headOption match { case Some(transformed) => transformed case _ => sys.error("Fails to transform the POM") } } import de.johoop.findbugs4sbt.FindBugs.findbugsAnalyzedPath lazy val shaded = Project( s"$projectPrefix-Shaded", file("shaded"), settings = baseSettings ++ Publish.settings ++ Seq( previousArtifacts := Set.empty, crossPaths := false, autoScalaLibrary := false, libraryDependencies ++= Seq( "io.netty" % "netty" % "3.10.6.Final" cross CrossVersion.Disabled, "com.google.guava" % "guava" % "19.0" cross CrossVersion.Disabled ), assemblyShadeRules in assembly := Seq( ShadeRule.rename("org.jboss.netty.**" -> "shaded.netty.@1").inAll, ShadeRule.rename("com.google.**" -> "shaded.google.@1").inAll ), pomPostProcess := transformPomDependencies { _ => None }, makePom <<= makePom.dependsOn(assembly), packageBin in Compile := target.value / ( assemblyJarName in assembly).value ) ) private val driverFilter: Seq[(File, String)] => Seq[(File, String)] = { (_: Seq[(File, String)]).filter { case (file, name) => !(name endsWith "external/reactivemongo/StaticListenerBinder.class") } } andThen BuildSettings.filter private val commonCleanup: ClassLoader => Unit = { cl => import scala.language.reflectiveCalls val c = cl.loadClass("Common$") type M = { def close(): Unit } val m: M = c.getField("MODULE$").get(null).asInstanceOf[M] m.close() } lazy val bson = Project( s"$projectPrefix-BSON", file("bson"), settings = buildSettings ++ Findbugs.settings ++ Seq( libraryDependencies ++= Seq(specs, "org.specs2" %% "specs2-scalacheck" % specsVer % Test, "org.typelevel" %% "discipline" % "0.7.2" % Test, "org.spire-math" %% "spire-laws" % "0.13.0" % Test), binaryIssueFilters ++= { import ProblemFilters.{ exclude => x } @inline def irt(s: String) = x[IncompatibleResultTypeProblem](s) Seq( x[MissingTypesProblem]("reactivemongo.bson.BSONTimestamp$"), irt("reactivemongo.bson.Producer.noneOptionValue2Producer"), irt("reactivemongo.bson.Producer.noneOptionValueProducer"), irt("reactivemongo.bson.Producer.nameOptionValue2Producer"), irt("reactivemongo.bson.Producer.valueProducer"), irt("reactivemongo.bson.Producer.optionValueProducer") ) } ) ).enablePlugins(CopyPasteDetector) lazy val bsonmacros = Project( s"$projectPrefix-BSON-Macros", file("macros"), settings = buildSettings ++ Findbugs.settings ++ Seq( libraryDependencies ++= Seq(specs, "org.scala-lang" % "scala-compiler" % scalaVersion.value % "provided") )) .enablePlugins(CopyPasteDetector) .dependsOn(bson) val driverCleanup = taskKey[Unit]("Driver compilation cleanup") lazy val driver = Project( projectPrefix, file("driver"), settings = buildSettings ++ Findbugs.settings ++ Seq( resolvers := resolversList, compile in Compile <<= (compile in Compile).dependsOn(assembly in shaded), sourceGenerators in Compile += Def.task { val ver = version.value val dir = (sourceManaged in Compile).value val outdir = dir / "reactivemongo" / "api" val f = outdir / "Version.scala" outdir.mkdirs() Seq(IO.writer[File](f, "", IO.defaultCharset, false) { w => w.append(s"""package reactivemongo.api object Version { /** The ReactiveMongo API version */ override val toString = "$ver" /** The major version (e.g. 0.12 for the release 0.12.0) */ val majorVersion = "${Release.major.value}" }""") f }) }.taskValue, driverCleanup := { val classDir = (classDirectory in Compile).value val extDir = { val d = target.value / "external" / "reactivemongo" d.mkdirs(); d } val classFile = "StaticListenerBinder.class" val listenerClass = classDir / "external" / "reactivemongo" / classFile streams.value.log(s"Cleanup $listenerClass ...") IO.move(listenerClass, extDir / classFile) }, driverCleanup <<= driverCleanup.triggeredBy(compile in Compile), unmanagedJars in Compile := { val shadedDir = (target in shaded).value val shadedJar = (assemblyJarName in (shaded, assembly)).value (shadedDir / "classes").mkdirs() // Findbugs workaround Seq(Attributed(shadedDir / shadedJar)(AttributeMap.empty)) }, libraryDependencies ++= akka.value ++ Seq( playIteratees.value, commonsCodec, shapelessTest, specs) ++ logApi, findbugsAnalyzedPath += target.value / "external", binaryIssueFilters ++= { import ProblemFilters.{ exclude => x } @inline def mmp(s: String) = x[MissingMethodProblem](s) @inline def imt(s: String) = x[IncompatibleMethTypeProblem](s) @inline def fmp(s: String) = x[FinalMethodProblem](s) @inline def fcp(s: String) = x[FinalClassProblem](s) @inline def irt(s: String) = x[IncompatibleResultTypeProblem](s) @inline def mtp(s: String) = x[MissingTypesProblem](s) @inline def mcp(s: String) = x[MissingClassProblem](s) Seq( mcp("reactivemongo.api.MongoConnection$MonitorActor$"), mcp("reactivemongo.api.ReadPreference$BSONDocumentWrapper$"), // priv mcp("reactivemongo.api.ReadPreference$BSONDocumentWrapper"), // priv fcp("reactivemongo.api.MongoDriver$SupervisorActor"), // private mcp("reactivemongo.api.MongoDriver$SupervisorActor$"), // private fcp("reactivemongo.api.collections.bson.BSONCollection"), imt("reactivemongo.core.actors.AwaitingResponse.apply"), // private imt("reactivemongo.core.actors.AwaitingResponse.this"), // private mmp("reactivemongo.core.protocol.MongoHandler.this"), // private fcp("reactivemongo.core.nodeset.ChannelFactory"), mcp("reactivemongo.core.actors.RefreshAllNodes"), mcp("reactivemongo.core.actors.RefreshAllNodes$"), mmp("reactivemongo.core.actors.MongoDBSystem.DefaultConnectionRetryInterval"), imt("reactivemongo.core.netty.ChannelBufferReadableBuffer.apply"), irt("reactivemongo.core.netty.ChannelBufferReadableBuffer.buffer"), imt("reactivemongo.core.netty.ChannelBufferReadableBuffer.this"), irt("reactivemongo.core.netty.ChannelBufferWritableBuffer.buffer"), imt( "reactivemongo.core.netty.ChannelBufferWritableBuffer.writeBytes"), imt("reactivemongo.core.netty.ChannelBufferWritableBuffer.this"), irt("reactivemongo.core.netty.BufferSequence.merged"), imt("reactivemongo.core.netty.BufferSequence.this"), imt("reactivemongo.core.netty.BufferSequence.apply"), imt("reactivemongo.core.protocol.package.RichBuffer"), imt( "reactivemongo.core.protocol.BufferAccessors.writeTupleToBuffer2"), imt( "reactivemongo.core.protocol.BufferAccessors.writeTupleToBuffer4"), imt( "reactivemongo.core.protocol.BufferAccessors.writeTupleToBuffer3"), mtp("reactivemongo.core.protocol.MongoHandler"), imt("reactivemongo.core.protocol.MongoHandler.exceptionCaught"), imt("reactivemongo.core.protocol.MongoHandler.channelConnected"), imt("reactivemongo.core.protocol.MongoHandler.writeComplete"), imt("reactivemongo.core.protocol.MongoHandler.log"), imt("reactivemongo.core.protocol.MongoHandler.writeRequested"), imt("reactivemongo.core.protocol.MongoHandler.messageReceived"), imt("reactivemongo.core.protocol.MongoHandler.channelClosed"), imt("reactivemongo.core.protocol.MongoHandler.channelDisconnected"), mtp("reactivemongo.core.protocol.ResponseDecoder"), imt("reactivemongo.core.protocol.ResponseDecoder.decode"), mtp("reactivemongo.core.protocol.ResponseFrameDecoder"), imt("reactivemongo.core.protocol.ResponseFrameDecoder.decode"), imt("reactivemongo.core.protocol.BufferAccessors#BufferInteroperable.apply"), mtp("reactivemongo.core.protocol.RequestEncoder"), imt("reactivemongo.core.protocol.RequestEncoder.encode"), mmp("reactivemongo.core.protocol.ChannelBufferReadable.apply"), imt("reactivemongo.core.protocol.ChannelBufferReadable.readFrom"), imt("reactivemongo.core.protocol.MessageHeader.readFrom"), imt("reactivemongo.core.protocol.MessageHeader.apply"), imt("reactivemongo.core.protocol.Response.copy"), irt("reactivemongo.core.protocol.Response.documents"), imt("reactivemongo.core.protocol.Response.this"), imt("reactivemongo.core.protocol.ReplyDocumentIterator.apply"), imt("reactivemongo.core.protocol.Response.apply"), irt("reactivemongo.core.protocol.package#RichBuffer.writeString"), irt("reactivemongo.core.protocol.package#RichBuffer.buffer"), irt("reactivemongo.core.protocol.package#RichBuffer.writeCString"), imt("reactivemongo.core.protocol.package#RichBuffer.this"), imt("reactivemongo.core.protocol.Reply.readFrom"), imt("reactivemongo.core.protocol.Reply.apply"), imt("reactivemongo.core.nodeset.Connection.apply"), imt("reactivemongo.core.nodeset.Connection.copy"), irt("reactivemongo.core.nodeset.Connection.send"), irt("reactivemongo.core.nodeset.Connection.channel"), imt("reactivemongo.core.nodeset.Connection.this"), irt("reactivemongo.core.nodeset.ChannelFactory.channelFactory"), irt("reactivemongo.core.nodeset.ChannelFactory.create"), mcp("reactivemongo.api.MongoDriver$CloseWithTimeout"), mcp("reactivemongo.api.MongoDriver$CloseWithTimeout$"), mtp("reactivemongo.api.FailoverStrategy$"), irt( "reactivemongo.api.collections.GenericCollection#BulkMaker.result"), irt("reactivemongo.api.collections.GenericCollection#Mongo26WriteCommand.result"), x[IncompatibleTemplateDefProblem]( "reactivemongo.core.actors.MongoDBSystem"), mcp("reactivemongo.core.actors.RequestIds"), mcp("reactivemongo.core.actors.RefreshAllNodes"), mcp("reactivemongo.core.actors.RefreshAllNodes$"), mmp("reactivemongo.core.actors.MongoDBSystem.DefaultConnectionRetryInterval"), mmp("reactivemongo.api.CollectionMetaCommands.drop"), mmp("reactivemongo.api.DB.coll"), mmp("reactivemongo.api.DB.coll$default$2"), mmp("reactivemongo.api.DB.defaultReadPreference"), mmp("reactivemongo.api.DB.coll$default$4"), mmp("reactivemongo.api.DBMetaCommands.serverStatus"), mmp( "reactivemongo.api.collections.BatchCommands.DistinctResultReader"), mmp( "reactivemongo.api.collections.BatchCommands.AggregationFramework"), mmp( "reactivemongo.api.collections.BatchCommands.FindAndModifyReader"), mmp( "reactivemongo.api.collections.BatchCommands.DistinctWriter"), mmp( "reactivemongo.api.collections.BatchCommands.FindAndModifyCommand"), mmp( "reactivemongo.api.collections.BatchCommands.AggregateWriter"), mmp( "reactivemongo.api.collections.BatchCommands.DistinctCommand"), mmp( "reactivemongo.api.collections.BatchCommands.AggregateReader"), mmp("reactivemongo.bson.BSONTimestamp.toString"), irt("reactivemongo.api.commands.Upserted._id"), imt("reactivemongo.api.commands.Upserted.this"), imt("reactivemongo.api.commands.Upserted.copy"), irt("reactivemongo.api.Cursor.logger"), mcp("reactivemongo.core.netty.package"), mcp("reactivemongo.core.netty.package$"), mcp("reactivemongo.core.netty.package$BSONDocumentNettyWritable"), mcp("reactivemongo.core.netty.package$BSONDocumentNettyWritable$"), mcp("reactivemongo.core.netty.package$BSONDocumentNettyReadable"), mcp("reactivemongo.core.netty.package$BSONDocumentNettyReadable$"), imt("reactivemongo.core.netty.ChannelBufferReadableBuffer.apply"), imt("reactivemongo.core.netty.ChannelBufferReadableBuffer.buffer"), imt("reactivemongo.core.netty.ChannelBufferReadableBuffer.this"), imt("reactivemongo.core.netty.ChannelBufferWritableBuffer.buffer"), imt( "reactivemongo.core.netty.ChannelBufferWritableBuffer.writeBytes"), imt("reactivemongo.core.netty.ChannelBufferWritableBuffer.this"), imt("reactivemongo.core.netty.BufferSequence.merged"), imt("reactivemongo.core.netty.BufferSequence.this"), imt("reactivemongo.core.protocol.package.RichBuffer"), imt("reactivemongo.core.netty.ChannelBufferReadableBuffer.buffer"), imt("reactivemongo.core.netty.ChannelBufferWritableBuffer.buffer"), imt("reactivemongo.core.netty.BufferSequence.merged"), irt("reactivemongo.core.netty.ChannelBufferReadableBuffer.buffer"), irt("reactivemongo.core.netty.ChannelBufferWritableBuffer.buffer"), irt("reactivemongo.core.netty.BufferSequence.merged"), imt("reactivemongo.core.netty.BufferSequence.apply"), imt("reactivemongo.core.protocol.BufferAccessors.writeTupleToBuffer2"), imt("reactivemongo.core.protocol.BufferAccessors.writeTupleToBuffer4"), imt("reactivemongo.core.protocol.BufferAccessors.writeTupleToBuffer3"), mtp("reactivemongo.core.protocol.MongoHandler"), imt("reactivemongo.core.protocol.MongoHandler.exceptionCaught"), imt("reactivemongo.core.protocol.MongoHandler.channelConnected"), imt("reactivemongo.core.protocol.MongoHandler.writeComplete"), imt("reactivemongo.core.protocol.MongoHandler.log"), imt("reactivemongo.core.protocol.MongoHandler.writeRequested"), imt("reactivemongo.core.protocol.MongoHandler.messageReceived"), imt("reactivemongo.core.protocol.MongoHandler.channelClosed"), imt("reactivemongo.core.protocol.MongoHandler.channelDisconnected"), mtp("reactivemongo.core.protocol.ResponseDecoder"), imt("reactivemongo.core.protocol.ResponseDecoder.decode"), mtp("reactivemongo.core.protocol.ResponseFrameDecoder"), imt("reactivemongo.core.protocol.ResponseFrameDecoder.decode"), imt("reactivemongo.core.protocol.BufferAccessors#BufferInteroperable.apply"), mtp("reactivemongo.core.protocol.RequestEncoder"), imt("reactivemongo.core.protocol.RequestEncoder.encode"), mmp("reactivemongo.core.protocol.ChannelBufferReadable.apply"), imt("reactivemongo.core.protocol.ChannelBufferReadable.readFrom"), imt("reactivemongo.core.protocol.MessageHeader.readFrom"), imt("reactivemongo.core.protocol.MessageHeader.apply"), imt("reactivemongo.core.protocol.Response.copy"), irt("reactivemongo.core.protocol.Response.documents"), imt("reactivemongo.core.protocol.Response.this"), imt("reactivemongo.core.protocol.ReplyDocumentIterator.apply"), imt("reactivemongo.core.protocol.Response.apply"), irt("reactivemongo.core.protocol.package#RichBuffer.writeString"), irt("reactivemongo.core.protocol.package#RichBuffer.buffer"), irt("reactivemongo.core.protocol.package#RichBuffer.writeCString"), imt("reactivemongo.core.protocol.package#RichBuffer.this"), imt("reactivemongo.core.protocol.Reply.readFrom"), imt("reactivemongo.core.protocol.Reply.apply"), imt("reactivemongo.core.nodeset.Connection.apply"), imt("reactivemongo.core.nodeset.Connection.copy"), irt("reactivemongo.core.nodeset.Connection.send"), irt("reactivemongo.core.nodeset.Connection.channel"), imt("reactivemongo.core.nodeset.Connection.this"), ProblemFilters.exclude[FinalClassProblem]( "reactivemongo.core.nodeset.ChannelFactory"), irt("reactivemongo.core.nodeset.ChannelFactory.channelFactory"), irt("reactivemongo.core.nodeset.ChannelFactory.create"), mcp("reactivemongo.api.MongoDriver$CloseWithTimeout"), mcp("reactivemongo.api.MongoDriver$CloseWithTimeout$"), mtp("reactivemongo.api.FailoverStrategy$"), irt("reactivemongo.api.collections.GenericCollection#BulkMaker.result"), irt("reactivemongo.api.collections.GenericCollection#Mongo26WriteCommand.result"), imt("reactivemongo.core.protocol.Response.documents"), imt("reactivemongo.core.protocol.package#RichBuffer.writeString"), imt("reactivemongo.core.protocol.package#RichBuffer.buffer"), imt("reactivemongo.core.protocol.package#RichBuffer.writeCString"), imt("reactivemongo.core.nodeset.Connection.send"), imt("reactivemongo.core.nodeset.Connection.channel"), imt("reactivemongo.core.nodeset.ChannelFactory.channelFactory"), imt("reactivemongo.core.nodeset.ChannelFactory.create"), imt("reactivemongo.api.collections.GenericCollection#BulkMaker.result"), imt("reactivemongo.api.collections.GenericCollection#Mongo26WriteCommand.result"), mcp("reactivemongo.api.MongoConnection$IsKilled"), mcp("reactivemongo.api.MongoConnection$IsKilled$"), mcp("reactivemongo.api.commands.tst2"), mcp("reactivemongo.api.commands.tst2$"), mcp("reactivemongo.api.collections.GenericCollection$Mongo24BulkInsert"), mtp("reactivemongo.api.commands.DefaultWriteResult"), mmp("reactivemongo.api.commands.DefaultWriteResult.fillInStackTrace"), mmp("reactivemongo.api.commands.DefaultWriteResult.isUnauthorized"), mmp("reactivemongo.api.commands.DefaultWriteResult.getMessage"), irt("reactivemongo.api.commands.DefaultWriteResult.originalDocument"), irt("reactivemongo.core.commands.Getnonce.ResultMaker"), irt("reactivemongo.core.protocol.RequestEncoder.logger"), irt("reactivemongo.api.MongoConnection.killed"), mmp("reactivemongo.api.MongoConnection#MonitorActor.killed_="), mmp("reactivemongo.api.MongoConnection#MonitorActor.primaryAvailable_="), mmp("reactivemongo.api.MongoConnection#MonitorActor.killed"), mmp( "reactivemongo.api.MongoConnection#MonitorActor.primaryAvailable"), mmp("reactivemongo.api.collections.GenericCollection#Mongo26WriteCommand._debug"), fmp( "reactivemongo.api.commands.AggregationFramework#Project.toString"), fmp( "reactivemongo.api.commands.AggregationFramework#Redact.toString"), fmp("reactivemongo.api.commands.AggregationFramework#Sort.toString"), mmp("reactivemongo.api.commands.AggregationFramework#Limit.n"), mmp("reactivemongo.api.commands.AggregationFramework#Limit.name"), mtp("reactivemongo.api.commands.AggregationFramework$Limit"), irt("reactivemongo.api.gridfs.DefaultFileToSave.filename"), irt("reactivemongo.api.gridfs.DefaultReadFile.filename"), irt("reactivemongo.api.gridfs.DefaultReadFile.length"), irt("reactivemongo.api.gridfs.BasicMetadata.filename"), irt("reactivemongo.api.gridfs.package.logger"), mtp("reactivemongo.api.MongoConnectionOptions$"), mmp("reactivemongo.api.MongoConnectionOptions.apply"), mmp("reactivemongo.api.MongoConnectionOptions.copy"), mmp("reactivemongo.api.MongoConnectionOptions.this"), fmp("reactivemongo.api.commands.AggregationFramework#Group.toString"), mcp("reactivemongo.api.commands.tst2$Toto"), fmp("reactivemongo.api.commands.AggregationFramework#Match.toString"), mmp("reactivemongo.api.commands.WriteResult.originalDocument"), fmp( "reactivemongo.api.commands.AggregationFramework#GeoNear.toString"), irt("reactivemongo.api.gridfs.ComputedMetadata.length"), irt("reactivemongo.core.commands.Authenticate.ResultMaker"), mtp("reactivemongo.api.gridfs.DefaultFileToSave"), mmp("reactivemongo.api.gridfs.DefaultFileToSave.productElement"), mmp("reactivemongo.api.gridfs.DefaultFileToSave.productArity"), mmp("reactivemongo.api.gridfs.DefaultFileToSave.productIterator"), mmp("reactivemongo.api.gridfs.DefaultFileToSave.productPrefix"), imt("reactivemongo.api.gridfs.DefaultFileToSave.this"), mtp("reactivemongo.api.gridfs.DefaultFileToSave$"), imt("reactivemongo.api.gridfs.DefaultReadFile.apply"), imt("reactivemongo.api.gridfs.DefaultReadFile.copy"), imt("reactivemongo.api.gridfs.DefaultReadFile.this"), mmp("reactivemongo.api.gridfs.DefaultFileToSave.apply"), imt("reactivemongo.api.gridfs.DefaultFileToSave.copy"), mmp("reactivemongo.api.commands.AggregationFramework#Skip.n"), mmp("reactivemongo.api.commands.AggregationFramework#Skip.name"), mtp("reactivemongo.api.commands.AggregationFramework$Skip"), mcp("reactivemongo.api.commands.AggregationFramework$PipelineStage"), mmp("reactivemongo.api.commands.AggregationFramework#Aggregate.needsCursor"), mmp("reactivemongo.api.commands.AggregationFramework#Aggregate.cursorOptions"), mtp("reactivemongo.api.commands.WriteResult"), mtp("reactivemongo.api.commands.UpdateWriteResult"), mmp("reactivemongo.api.commands.UpdateWriteResult.fillInStackTrace"), mmp("reactivemongo.api.commands.UpdateWriteResult.isUnauthorized"), mmp("reactivemongo.api.commands.UpdateWriteResult.getMessage"), mmp( "reactivemongo.api.commands.UpdateWriteResult.isNotAPrimaryError"), irt("reactivemongo.api.commands.UpdateWriteResult.originalDocument"), mtp("reactivemongo.api.commands.AggregationFramework$Match$"), mtp("reactivemongo.api.commands.AggregationFramework$Redact$"), mcp("reactivemongo.api.commands.AggregationFramework$DocumentStageCompanion"), mtp("reactivemongo.api.commands.AggregationFramework$Project$"), mtp("reactivemongo.api.commands.AggregationFramework$Sort$"), mtp("reactivemongo.core.commands.Authenticate$"), mmp("reactivemongo.api.commands.AggregationFramework#Unwind.prefixedField"), mmp("reactivemongo.core.commands.Authenticate.apply"), mmp("reactivemongo.core.commands.Authenticate.apply"), mtp("reactivemongo.api.commands.LastError$"), mmp("reactivemongo.api.commands.LastError.apply"), irt("reactivemongo.api.commands.LastError.originalDocument"), mmp("reactivemongo.api.commands.LastError.copy"), mmp("reactivemongo.api.commands.LastError.this"), mtp("reactivemongo.api.commands.CollStatsResult$"), mmp("reactivemongo.api.commands.CollStatsResult.apply"), mmp("reactivemongo.api.commands.CollStatsResult.this"), mmp("reactivemongo.api.commands.GetLastError#TagSet.s"), mmp("reactivemongo.api.commands.FindAndModifyCommand#FindAndModify.apply"), mmp("reactivemongo.api.commands.FindAndModifyCommand#FindAndModify.this"), mmp("reactivemongo.api.commands.FindAndModifyCommand#FindAndModify.copy"), mmp("reactivemongo.api.commands.FindAndModifyCommand#Update.copy"), mmp("reactivemongo.api.commands.FindAndModifyCommand#Update.this"), mmp("reactivemongo.api.commands.FindAndModifyCommand#Update.apply"), irt("reactivemongo.api.commands.LastError.originalDocument"), imt("reactivemongo.api.commands.AggregationFramework#Group.apply"), mmp("reactivemongo.api.commands.AggregationFramework#Redact.apply"), imt("reactivemongo.api.commands.Upserted.apply"), mmp("reactivemongo.api.commands.AggregationFramework#Match.apply"), irt("reactivemongo.api.commands.AggregationFramework#Sort.apply"), mmp("reactivemongo.api.commands.AggregationFramework#Sort.apply"), mmp("reactivemongo.api.commands.AggregationFramework#GeoNear.apply"), mmp( "reactivemongo.api.commands.AggregationFramework#GeoNear.andThen"), mmp("reactivemongo.api.commands.AggregationFramework#Sort.unapply"), mmp("reactivemongo.api.commands.AggregationFramework#Sort.copy"), mmp("reactivemongo.api.commands.AggregationFramework#Sort.document"), mcp("reactivemongo.api.commands.AggregationFramework$PipelineStageDocumentProducer"), mmp("reactivemongo.api.commands.AggregationFramework.PipelineStageDocumentProducer"), mcp("reactivemongo.api.commands.AggregationFramework$PipelineStageDocumentProducer$"), mmp("reactivemongo.api.commands.AggregationFramework#Group.andThen"), mmp("reactivemongo.api.commands.AggregationFramework#Group.this"), mmp("reactivemongo.api.commands.AggregationFramework#Group.copy"), mmp("reactivemongo.api.commands.AggregationFramework#Group.document"), imt("reactivemongo.api.commands.AggregationFramework#Sort.this"), mmp("reactivemongo.api.commands.AggregationFramework#Sort.copy"), mmp("reactivemongo.api.commands.AggregationFramework#Sort.document"), mcp("reactivemongo.api.commands.CursorCommand"), mmp( "reactivemongo.api.commands.AggregationFramework#Project.document"), mmp("reactivemongo.api.commands.AggregationFramework#Match.document"), mmp("reactivemongo.api.collections.bson.BSONQueryBuilder.copy"), mmp("reactivemongo.api.collections.bson.BSONQueryBuilder.this"), mmp("reactivemongo.api.collections.bson.BSONQueryBuilder$"), mmp("reactivemongo.api.collections.bson.BSONQueryBuilder.apply"), mmp("reactivemongo.api.MongoConnection.ask"), mmp("reactivemongo.api.MongoConnection.ask"), mmp("reactivemongo.api.MongoConnection.waitForPrimary"), mmp( "reactivemongo.api.commands.AggregationFramework#Aggregate.apply"), mtp("reactivemongo.api.commands.AggregationFramework$Aggregate"), mmp("reactivemongo.api.commands.AggregationFramework#Aggregate.copy"), irt("reactivemongo.api.commands.AggregationFramework#Aggregate.pipeline"), mmp("reactivemongo.api.commands.AggregationFramework#Aggregate.this"), mmp("reactivemongo.api.collections.GenericQueryBuilder.copy"), mcp("reactivemongo.api.commands.AggregationFramework$AggregateCursorOptions$"), mcp("reactivemongo.api.commands.AggregationFramework$AggregateCursorOptions"), mmp("reactivemongo.api.commands.AggregationFramework.AggregateCursorOptions"), mtp("reactivemongo.api.commands.AggregationFramework$Group$"), mtp("reactivemongo.api.collections.bson.BSONQueryBuilder$"), x[IncompatibleTemplateDefProblem]( "reactivemongo.core.nodeset.Authenticating"), mmp("reactivemongo.api.commands.AggregationFramework#Group.compose"), mtp("reactivemongo.api.commands.AggregationFramework$GeoNear$"), mmp( "reactivemongo.api.commands.AggregationFramework#GeoNear.compose"), mcp("reactivemongo.api.commands.AggregationFramework$DocumentStage"), mtp("reactivemongo.api.commands.AggregationFramework$Redact"), mtp("reactivemongo.api.commands.AggregationFramework$GeoNear"), mtp("reactivemongo.api.commands.AggregationFramework$Unwind"), mtp("reactivemongo.api.commands.AggregationFramework$Project"), mtp("reactivemongo.api.commands.AggregationFramework$Out"), mtp("reactivemongo.api.commands.AggregationFramework$Match"), mmp("reactivemongo.api.commands.DefaultWriteResult.isNotAPrimaryError"), mtp("reactivemongo.api.commands.AggregationFramework$Sort"), mtp("reactivemongo.api.commands.AggregationFramework$Group"), mmp("reactivemongo.api.commands.AggregationFramework#GeoNear.name"), mmp("reactivemongo.api.commands.AggregationFramework#Redact.name"), mmp("reactivemongo.api.commands.AggregationFramework#Unwind.name"), mmp("reactivemongo.api.commands.AggregationFramework#Project.name"), mmp("reactivemongo.api.commands.AggregationFramework#Out.name"), mmp("reactivemongo.api.commands.AggregationFramework#Match.name"), mmp("reactivemongo.api.commands.AggregationFramework#Sort.name"), mmp("reactivemongo.api.commands.AggregationFramework#Group.name"), mmp("reactivemongo.api.commands.AggregationFramework#Group.apply"), mmp("reactivemongo.api.commands.AggregationFramework#GeoNear.this"), mmp("reactivemongo.api.commands.AggregationFramework#GeoNear.copy"), mmp( "reactivemongo.api.commands.AggregationFramework#GeoNear.document"), mtp("reactivemongo.core.nodeset.Authenticating$"), mmp("reactivemongo.api.commands.AggregationFramework#Project.apply"), mmp( "reactivemongo.api.commands.AggregationFramework#Redact.document"), mmp("reactivemongo.api.DefaultDB.sister"), mmp("reactivemongo.api.DB.sister"), mtp("reactivemongo.api.MongoDriver$AddConnection$"), mmp("reactivemongo.api.MongoDriver#AddConnection.apply"), mmp("reactivemongo.api.MongoDriver#AddConnection.copy"), mmp("reactivemongo.api.MongoDriver#AddConnection.this"), mmp("reactivemongo.api.indexes.Index.copy"), mmp("reactivemongo.api.indexes.Index.this"), mtp("reactivemongo.api.indexes.Index$"), mmp("reactivemongo.api.indexes.Index.apply")) }, testOptions in Test += Tests.Cleanup(commonCleanup), mappings in (Compile, packageBin) ~= driverFilter, //mappings in (Compile, packageDoc) ~= driverFilter, mappings in (Compile, packageSrc) ~= driverFilter, apiMappings ++= Documentation.mappings("com.typesafe.akka", "http://doc.akka.io/api/akka/%s/")("akka-actor").value ++ Documentation.mappings("com.typesafe.play", "http://playframework.com/documentation/%s/api/scala/index.html", _.replaceAll("[\\\\d]$", "x"))("play-iteratees").value ) ).enablePlugins(CopyPasteDetector). dependsOn(bsonmacros, shaded) private val providedInternalDeps: XmlNode => XmlNode = { import scala.xml.NodeSeq import scala.xml.transform.{ RewriteRule, RuleTransformer } val asProvided = new RuleTransformer(new RewriteRule { override def transform(node: XmlNode): NodeSeq = node match { case e: XmlElem if e.label == "scope" => NodeSeq.Empty case _ => node } }) transformPomDependencies { dep: scala.xml.Elem => if ((dep \\ "groupId").text == "org.reactivemongo") { asProvided.transform(dep).headOption.collectFirst { case e: XmlElem => e.copy( child = e.child :+ <scope>provided</scope>) } } else Some(dep) } } lazy val jmx = Project( s"$projectPrefix-JMX", file("jmx"), settings = buildSettings ++ Findbugs.settings). settings( previousArtifacts := Set.empty, testOptions in Test += Tests.Cleanup(commonCleanup), libraryDependencies ++= Seq(specs) ++ logApi, pomPostProcess := providedInternalDeps ).enablePlugins(CopyPasteDetector). dependsOn(driver) }
maxime-gautre/ReactiveMongo
project/ReactiveMongo.scala
Scala
apache-2.0
43,574
/*********************************************************************** * Copyright (c) 2013-2019 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.hbase.tools.ingest import com.beust.jcommander.{Parameter, ParameterException, Parameters} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.hbase.TableName import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles import org.locationtech.geomesa.hbase.data.HBaseDataStore import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand.{HBaseParams, RemoteFilterNotUsedParam} import org.locationtech.geomesa.hbase.tools.ingest.HBaseBulkLoadCommand.BulkLoadParams import org.locationtech.geomesa.index.conf.partition.TablePartition import org.locationtech.geomesa.tools.{Command, RequiredIndexParam, RequiredTypeNameParam} import org.locationtech.geomesa.utils.index.IndexMode import org.locationtech.geomesa.utils.text.TextTools class HBaseBulkLoadCommand extends HBaseDataStoreCommand { override val name: String = "bulk-load" override val params = new BulkLoadParams override def execute(): Unit = withDataStore(run) def run(ds: HBaseDataStore): Unit = { val sft = ds.getSchema(params.featureName) if (sft == null) { throw new ParameterException(s"Schema '${params.featureName}' does not exist") } require(!TablePartition.partitioned(sft), "Bulk loading partitioned tables is not currently supported") val index = params.loadIndex(ds, sft.getTypeName, IndexMode.Write) val input = new Path(params.input) Command.user.info(s"Running HBase incremental load...") val start = System.currentTimeMillis() val tableName = index.getTableNames(None) match { case Seq(t) => TableName.valueOf(t) // should always be writing to a single table here case tables => throw new IllegalStateException(s"Expected a single table but got: ${tables.mkString(", ")}") } val table = ds.connection.getTable(tableName) val locator = ds.connection.getRegionLocator(tableName) val config = new Configuration config.set("hbase.loadincremental.validate.hfile", params.validate.toString) new LoadIncrementalHFiles(config).doBulkLoad(input, ds.connection.getAdmin, table, locator) Command.user.info(s"HBase incremental load complete in ${TextTools.getTime(start)}") } } object HBaseBulkLoadCommand { @Parameters(commandDescription = "Bulk load HFiles into HBase") class BulkLoadParams extends HBaseParams with RequiredTypeNameParam with RequiredIndexParam with RemoteFilterNotUsedParam { @Parameter(names = Array("--input"), description = "Path to HFiles to be loaded", required = true) var input: String = _ @Parameter(names = Array("--validate"), description = "Validate HFiles before loading", arity = 1) var validate: Boolean = true } }
elahrvivaz/geomesa
geomesa-hbase/geomesa-hbase-tools/src/main/scala/org/locationtech/geomesa/hbase/tools/ingest/HBaseBulkLoadCommand.scala
Scala
apache-2.0
3,264
package models.quiz import com.artclod.mathml.MathML import com.artclod.mathml.scalar.MathMLElem import com.artclod.slick.{JodaUTC, NumericBoolean} import com.artclod.util.OneOfThree import com.artclod.util.ofthree.{First, Second, Third} import controllers.quiz._ import models.quiz.util.SequenceTokenOrMath import models.support.HasOrder import models.{QuestionId, QuestionPartId, QuestionSectionId, UserId} import org.joda.time.DateTime import play.twirl.api.Html import models.user.User // ======= QuestionFrame case class QuestionFrame(question: Question, sections: Vector[QuestionSectionFrame], skills: Vector[Skill], userConstants: QuestionUserConstantsFrame) { // ==== throw errors for bad formulation if(sections.isEmpty) { throw new IllegalArgumentException("There were no sections"); } if(skills.isEmpty) { throw new IllegalArgumentException("There were no skills"); } def id(questionId: QuestionId) = QuestionFrame(question.copy(id = questionId), sections.map(_.questionId(questionId)), skills, userConstants.questionId(questionId)) def fixConstants(user: User) = copy( question = question.fixConstants(user, userConstants), sections = sections.map(_.fixConstants(user, userConstants)), userConstants = QuestionUserConstantsFrame.empty) } object QuestionFrame { // ==== JSON to Frame ==== def apply(questionJson: QuestionJson, ownerId: UserId, skillMap: Map[String, Skill], now : DateTime = JodaUTC.now) : QuestionFrame = { val question : Question = Question(id = null, ownerId = ownerId, title = questionJson.title, descriptionRaw = questionJson.descriptionRaw, descriptionHtml = Html(questionJson.descriptionHtml), creationDate = now) // val question : Question = questionJson.toModel(ownerId, now) val sections : Vector[QuestionSectionFrame] = questionJson.sections.zipWithIndex.map(s => sectionFrame(s._1, s._2)) val userConstants : QuestionUserConstantsFrame = questionJson.userConstants.map(_.toModel).getOrElse(QuestionUserConstantsFrame.empty) QuestionFrame(question=question, sections=sections, skills = questionJson.skills.map(s => skillMap.getOrElse(s, throw new IllegalArgumentException("Skill not found for " + s)) ), userConstants = userConstants) } private def sectionFrame(section: QuestionSectionJson, index: Int) : QuestionSectionFrame = { val questionSection = QuestionSection(id = null, questionId = null, explanationRaw = section.explanationRaw, explanationHtml = Html(section.explanationHtml), order = index.toShort) val parts : OneOfThree[ Vector[QuestionPartChoice], Vector[QuestionPartFunction], Vector[QuestionPartSequence] ] = section.partType match { case "choice" => { if(section.correctChoiceIndex >= section.choices.size || section.correctChoiceIndex < 0) { throw new IllegalArgumentException("section.correctChoiceIndex did not match any section [" + section.correctChoiceIndex + "] " + section.choices.size) } First(section.choices.zipWithIndex.map(f => partChoice(f._1, f._2, NumericBoolean(section.correctChoiceIndex == f._2)))) } case "function" => Second(section.functions.zipWithIndex.map(f => partFunction(f._1, f._2))) case "sequence" => Third(section.sequences.zipWithIndex.map(f => partSequence(f._1, f._2))) case _ => throw new IllegalArgumentException("section.partType was not recognized [" + section.partType + "]") } QuestionSectionFrame(questionSection, parts) } private def partChoice(part: QuestionPartChoiceJson, index: Int, correct: Short) : QuestionPartChoice = { QuestionPartChoice(id = null, sectionId = null, questionId = null, summaryRaw = part.summaryRaw, summaryHtml = Html(part.summaryHtml), correctChoice = correct, order = index.toShort) } private def partFunction(part: QuestionPartFunctionJson, index: Int) : QuestionPartFunction = { QuestionPartFunction(id = null, sectionId = null, questionId = null, summaryRaw = part.summaryRaw, summaryHtml = Html(part.summaryHtml), functionRaw = part.functionRaw, functionMath = MathML(part.functionMath).get, order = index.toShort) } private def partSequence(part: QuestionPartSequenceJson, index: Int) : QuestionPartSequence = { QuestionPartSequence(id = null, sectionId = null, questionId = null, summaryRaw = part.summaryRaw, summaryHtml = Html(part.summaryHtml), sequenceStr = part.sequenceStr, sequenceMath = SequenceTokenOrMath(part.sequenceMath), order = index.toShort) } // ---- utility functions def checkInOrder(items : Seq[HasOrder[_]], message: String) { val size = items.size if(items.map(_.order.toInt) != (0 until size)) { throw new IllegalArgumentException(message + " was not in order " + items) } } } // ======= QuestionUserConstantsFrame case class QuestionUserConstantsFrame(integers: Vector[QuestionUserConstantInteger], decimals: Vector[QuestionUserConstantDecimal], sets: Vector[QuestionUserConstantSet]) { def questionId(questionId: QuestionId) : QuestionUserConstantsFrame = QuestionUserConstantsFrame( integers.map(_.copy(questionId = questionId)), decimals.map(_.copy(questionId = questionId)), sets.map(_.copy(questionId = questionId)) ) def all: Vector[UserConstant] = integers ++ decimals ++ sets def constant(name: String) = all.find(_.name == name) } object QuestionUserConstantsFrame { val empty = QuestionUserConstantsFrame(Vector(), Vector(), Vector()) // def apply(integers: Vector[QuestionUserConstantInteger], decimals: Vector[QuestionUserConstantDecimal], sets: Vector[QuestionUserConstantSet]): QuestionUserConstantsFrame = new QuestionUserConstantsFrame(integers, decimals, sets) def apply(values : (Seq[QuestionUserConstantInteger], Seq[QuestionUserConstantDecimal], Seq[QuestionUserConstantSet]) ): QuestionUserConstantsFrame = new QuestionUserConstantsFrame(values._1.toVector, values._2.toVector, values._3.toVector) } // ======= QuestionSectionFrame case class QuestionSectionFrame(section: QuestionSection, parts: OneOfThree[ Vector[QuestionPartChoice], Vector[QuestionPartFunction], Vector[QuestionPartSequence] ]) extends HasOrder[QuestionSectionFrame] { override def order = section.order def correctIndex = parts match { case First(choices) => Some(choices.indexWhere(_.correctChoice == NumericBoolean.T)) case Second(functions) => None case Third(sequences) => None } def choiceSize = parts match { case First(choices) => Some(choices.size) case Second(functions) => None case Third(sequences) => None } def partKind = parts match { case First(choices) => "choice" case Second(functions) => "function" case Third(sequences) => "sequence" } def id(sectionId: QuestionSectionId) = QuestionSectionFrame( section = section.copy(id = sectionId), parts = parts match { case First(ps) => First(ps.map(p => p.copy(sectionId=sectionId))) case Second(ps) => Second(ps.map(p => p.copy(sectionId=sectionId))) case Third(ps) => Third(ps.map(p => p.copy(sectionId=sectionId))) } ) def questionId(questionId: QuestionId) = QuestionSectionFrame( section = section.copy(questionId = questionId), parts = parts match { case First(ps) => First(ps.map(p => p.copy(questionId=questionId))) case Second(ps) => Second(ps.map(p => p.copy(questionId=questionId))) case Third(ps) => Third(ps.map(p => p.copy(questionId=questionId))) } ) def fixConstants(user: User, userConstants: QuestionUserConstantsFrame) = this.copy( section = section.fixConstants(user, userConstants), parts = parts match { case First(ps) => First( ps.map(p => p.fixConstants(user, userConstants))) case Second(ps) => Second(ps.map(p => p.fixConstants(user, userConstants))) case Third(ps) => Third( ps.map(p => p.fixConstants(user, userConstants))) } ) // ==== throw errors for bad formulation parts match { case First(partChoices) => { if(partChoices.isEmpty) { throw new IllegalArgumentException("There were no partChoices"); } else if (!partChoices.map(_.correctChoice == NumericBoolean.T).fold(false)((a, b) => a || b)) { throw new IllegalArgumentException("There was no correct choice in " + partChoices) } QuestionFrame.checkInOrder(partChoices, "partChoices") } case Second(functionChoices) => { if(functionChoices.isEmpty) { throw new IllegalArgumentException("There were no functionChoices"); } QuestionFrame.checkInOrder(functionChoices, "functionChoices") } case Third(sequenceChoices) => { if(sequenceChoices.isEmpty) { throw new IllegalArgumentException("There were no sequenceChoices"); } QuestionFrame.checkInOrder(sequenceChoices, "sequenceChoices") } } } object QuestionSectionFrame { def apply(section: QuestionSection, choices: Seq[QuestionPartChoice], functions: Seq[QuestionPartFunction], sequences: Seq[QuestionPartSequence]): QuestionSectionFrame = (choices.nonEmpty, functions.nonEmpty, sequences.nonEmpty) match { case (false, false, false) => throw new IllegalArgumentException("functions, choices and sequence were all null") case (true, true, true) => throw new IllegalArgumentException("functions, choices and sequence all had values: functions = " + functions + " choices = " + choices + " sequences = " + sequences) case (true, false, false) => QuestionSectionFrame(section, First( Vector(choices:_*).sorted )) case (false, true, false) => QuestionSectionFrame(section, Second( Vector(functions:_*).sorted )) case (false, false, true) => QuestionSectionFrame(section, Third( Vector(sequences:_*).sorted )) case _ => throw new IllegalArgumentException("two of functions, choices and sequence had values: functions = " + functions + " choices = " + choices + " sequences = " + sequences) } }
kristiankime/calc-tutor
app/models/quiz/QuestionFrame.scala
Scala
mit
10,061
package org.geoscript.support.logic trait Sentential[Sentence] { val False: Sentence val True: Sentence def not(p: Sentence): Sentence def extractNot(p: Sentence): Option[Sentence] def and(p: Sentence, q: Sentence): Sentence def extractAnd(p: Sentence): Option[(Sentence, Sentence)] def or(p: Sentence, q: Sentence): Sentence def extractOr(p: Sentence): Option[(Sentence, Sentence)] def isLiteral(p: Sentence): Boolean def provenBy(givens: Set[Sentence], s: Sentence): Boolean def disprovenBy(givens: Set[Sentence], s: Sentence): Boolean object Ops { object Not { def apply(p: Sentence): Sentence = not(p) def unapply(p: Sentence): Option[Sentence] = extractNot(p) } object And { def apply(p: Sentence, q: Sentence): Sentence = and(p, q) def unapply(p: Sentence): Option[(Sentence, Sentence)] = extractAnd(p) } object Or { def apply(p: Sentence, q: Sentence): Sentence = or(p, q) def unapply(p: Sentence): Option[(Sentence, Sentence)] = extractOr(p) } object Literal { def unapply(p: Sentence): Boolean = isLiteral(p) } } }
dwins/geoscript.scala
geocss/src/main/scala/org/geoscript/support/logic/Sentence.scala
Scala
mit
1,133
/* * Copyright 2017 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.computations import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Linked} case class CP515(value: Option[Int]) extends CtBoxIdentifier with CtOptionalInteger object CP515 extends Linked[CP513, CP515]{ override def apply(source: CP513): CP515 = CP515(source.value) }
liquidarmour/ct-calculations
src/main/scala/uk/gov/hmrc/ct/computations/CP515.scala
Scala
apache-2.0
910
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.util.sketch import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import scala.reflect.ClassTag import scala.util.Random import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite class BloomFilterSuite extends AnyFunSuite { // scalastyle:ignore funsuite private final val EPSILON = 0.01 // Serializes and deserializes a given `BloomFilter`, then checks whether the deserialized // version is equivalent to the original one. private def checkSerDe(filter: BloomFilter): Unit = { val out = new ByteArrayOutputStream() filter.writeTo(out) out.close() val in = new ByteArrayInputStream(out.toByteArray) val deserialized = BloomFilter.readFrom(in) in.close() assert(filter == deserialized) } def testAccuracy[T: ClassTag](typeName: String, numItems: Int)(itemGen: Random => T): Unit = { test(s"accuracy - $typeName") { // use a fixed seed to make the test predictable. val r = new Random(37) val fpp = 0.05 val numInsertion = numItems / 10 val allItems = Array.fill(numItems)(itemGen(r)) val filter = BloomFilter.create(numInsertion, fpp) // insert first `numInsertion` items. allItems.take(numInsertion).foreach(filter.put) // false negative is not allowed. assert(allItems.take(numInsertion).forall(filter.mightContain)) // The number of inserted items doesn't exceed `expectedNumItems`, so the `expectedFpp` // should not be significantly higher than the one we passed in to create this bloom filter. assert(filter.expectedFpp() - fpp < EPSILON) val errorCount = allItems.drop(numInsertion).count(filter.mightContain) // Also check the actual fpp is not significantly higher than we expected. val actualFpp = errorCount.toDouble / (numItems - numInsertion) assert(actualFpp - fpp < EPSILON) checkSerDe(filter) } } def testMergeInPlace[T: ClassTag](typeName: String, numItems: Int)(itemGen: Random => T): Unit = { test(s"mergeInPlace - $typeName") { // use a fixed seed to make the test predictable. val r = new Random(37) val items1 = Array.fill(numItems / 2)(itemGen(r)) val items2 = Array.fill(numItems / 2)(itemGen(r)) val filter1 = BloomFilter.create(numItems) items1.foreach(filter1.put) val filter2 = BloomFilter.create(numItems) items2.foreach(filter2.put) filter1.mergeInPlace(filter2) // After merge, `filter1` has `numItems` items which doesn't exceed `expectedNumItems`, so the // `expectedFpp` should not be significantly higher than the default one. assert(filter1.expectedFpp() - BloomFilter.DEFAULT_FPP < EPSILON) items1.foreach(i => assert(filter1.mightContain(i))) items2.foreach(i => assert(filter1.mightContain(i))) checkSerDe(filter1) } } def testIntersectInPlace[T: ClassTag] (typeName: String, numItems: Int)(itemGen: Random => T): Unit = { test(s"intersectInPlace - $typeName") { // use a fixed seed to make the test predictable. val r = new Random(37) val items1 = Array.fill(numItems / 2)(itemGen(r)) val items2 = Array.fill(numItems / 2)(itemGen(r)) val filter1 = BloomFilter.create(numItems / 2) items1.foreach(filter1.put) val filter2 = BloomFilter.create(numItems / 2) items2.foreach(filter2.put) filter1.intersectInPlace(filter2) val common_items = items1.intersect(items2) common_items.foreach(i => assert(filter1.mightContain(i))) // After intersect, `filter1` still has `numItems/2` items // which doesn't exceed `expectedNumItems`, // so the `expectedFpp` should not be higher than the default one. assert(filter1.expectedFpp() - BloomFilter.DEFAULT_FPP < EPSILON) checkSerDe(filter1) } } def testItemType[T: ClassTag](typeName: String, numItems: Int)(itemGen: Random => T): Unit = { testAccuracy[T](typeName, numItems)(itemGen) testMergeInPlace[T](typeName, numItems)(itemGen) testIntersectInPlace[T](typeName, numItems)(itemGen) } testItemType[Byte]("Byte", 160) { _.nextInt().toByte } testItemType[Short]("Short", 1000) { _.nextInt().toShort } testItemType[Int]("Int", 100000) { _.nextInt() } testItemType[Long]("Long", 100000) { _.nextLong() } testItemType[String]("String", 100000) { r => r.nextString(r.nextInt(512)) } test("incompatible merge") { intercept[IncompatibleMergeException] { BloomFilter.create(1000).mergeInPlace(null) } intercept[IncompatibleMergeException] { val filter1 = BloomFilter.create(1000, 6400) val filter2 = BloomFilter.create(1000, 3200) filter1.mergeInPlace(filter2) } intercept[IncompatibleMergeException] { val filter1 = BloomFilter.create(1000, 6400) val filter2 = BloomFilter.create(2000, 6400) filter1.mergeInPlace(filter2) } } }
maropu/spark
common/sketch/src/test/scala/org/apache/spark/util/sketch/BloomFilterSuite.scala
Scala
apache-2.0
5,755
package nl.lpdiy.incubator.store import java.io.File import akka.actor._ import com.typesafe.config.ConfigFactory import nl.lpdiy.incubator.Bootstrap import nl.lpdiy.incubator.Bootstrap.{Shutdown, Start} import Bootstrap.{Shutdown, Start} import nl.lpdiy.incubator.json.{SerializationFormat, OffsetDateTimeSerializer} import nl.lpdiy.incubator.json.SerializationFormat import ElasticSearchActor.IndexDocument import nl.lpdiy.pishake.util.{ActorDescription, ActorSupport, FutureSupport} import org.elasticsearch.common.settings.ImmutableSettings import org.elasticsearch.node.NodeBuilder._ import org.json4s.native.Serialization._ import scala.language.{implicitConversions, postfixOps} object ElasticSearchActor extends ActorDescription { def props(args: Any*): Props = Props[ElasticSearchActor] case class IndexDocument(index: String, `type`: String, any: AnyRef) } class ElasticSearchActor extends Actor with ActorLogging with FutureSupport with ActorSupport { private val config = ConfigFactory.load().getConfig("incubator.elasticsearch") private val directory = { val file = new File(config.getString("data-directory")) if (!file.exists()) file.mkdirs() file } private val settings = ImmutableSettings.settingsBuilder .put("path.data", directory.getAbsolutePath) .put("cluster.name", config.getString("cluster-name")) .build private lazy val node = nodeBuilder.local(true).settings(settings).build private lazy val client = node.client() def receive = { case Start => node.start() case Shutdown => node.close() case IndexDocument(ind, typ, any) => implicit val format = SerializationFormat(OffsetDateTimeSerializer) client.prepareIndex(ind, typ).setSource(write(any)).execute().actionGet() } }
dragoslav/incubator
core/src/main/scala/nl/lpdiy/incubator/store/ElasticSearchActor.scala
Scala
apache-2.0
1,781
package es.juanc.katas.fizzbuzz object Main extends App { FizzBuzz.print(10, println) }
juancsch/katas
scala/SimpleKatas/src/main/scala/es/juanc/katas/fizzbuzz/Main.scala
Scala
unlicense
92
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs // License: http://www.gnu.org/licenses/gpl-3.0.en.html package org.ensime.indexer import org.ensime.fixture._ import org.ensime.util.EnsimeSpec import org.ensime.util.file._ import org.ensime.util.fileobject._ class SourceResolverSpec extends EnsimeSpec with SharedEnsimeVFSFixture with SharedSourceResolverFixture with SourceResolverTestUtils { def original = EnsimeConfigFixture.SimpleTestProject "SourceResolver" should "resolve java sources in J2SE" in withSourceResolver { implicit r => find("java.lang", "String.java") shouldBe Some("/java/lang/String.java") } it should "resolve scala sources in the project dependencies" in withSourceResolver { implicit r => find("scala.collection.immutable", "List.scala") shouldBe Some("/scala/collection/immutable/List.scala") find("org.scalatest", "FunSpec.scala") shouldBe Some("/org/scalatest/FunSpec.scala") } it should "resolve sources in the project" in withSourceResolver { (c, r) => implicit val config = c implicit val resolver = r find("org.example.Foo", "Foo.scala") shouldBe Some((scalaMain / "org/example/Foo.scala").getAbsolutePath) } it should "should resolve files in parent directories in the project" in withSourceResolver { (c, r) => implicit val config = c implicit val resolver = r find("org.example", "bad-convention.scala") shouldBe Some((scalaMain / "bad-convention.scala").getAbsolutePath) } it should "resolve sources in the child directories in the project" in withSourceResolver { (c, r) => implicit val config = c implicit val resolver = r find("org.util.set", "badconvention.scala") shouldBe Some((scalaMain / "util/badconvention.scala").getAbsolutePath) } } trait SourceResolverTestUtils { def find(pkg: String, file: String)(implicit resolver: SourceResolver) = resolver .resolve( PackageName(pkg.split('.').toList), RawSource(Some(file), None) ) .map( fo => fo.pathWithinArchive match { case None => fo.asLocalFile.getAbsolutePath case _ => fo.getName.getPath } ) }
yyadavalli/ensime-server
core/src/it/scala/org/ensime/indexer/SourceResolverSpec.scala
Scala
gpl-3.0
2,292
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.util import java.io._ import java.lang.{Byte => JByte} import java.lang.management.{LockInfo, ManagementFactory, MonitorInfo, ThreadInfo} import java.lang.reflect.InvocationTargetException import java.math.{MathContext, RoundingMode} import java.net._ import java.nio.ByteBuffer import java.nio.channels.{Channels, FileChannel, WritableByteChannel} import java.nio.charset.StandardCharsets import java.nio.file.Files import java.security.SecureRandom import java.util.{Locale, Properties, Random, UUID} import java.util.concurrent._ import java.util.concurrent.TimeUnit.NANOSECONDS import java.util.zip.GZIPInputStream import scala.annotation.tailrec import scala.collection.JavaConverters._ import scala.collection.Map import scala.collection.mutable.ArrayBuffer import scala.io.Source import scala.reflect.ClassTag import scala.util.{Failure, Success, Try} import scala.util.control.{ControlThrowable, NonFatal} import scala.util.matching.Regex import _root_.io.netty.channel.unix.Errors.NativeIoException import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} import com.google.common.io.{ByteStreams, Files => GFiles} import com.google.common.net.InetAddresses import org.apache.commons.codec.binary.Hex import org.apache.commons.lang3.SystemUtils import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, FileUtil, Path} import org.apache.hadoop.io.compress.{CompressionCodecFactory, SplittableCompressionCodec} import org.apache.hadoop.security.UserGroupInformation import org.apache.hadoop.util.{RunJar, StringUtils} import org.apache.hadoop.yarn.conf.YarnConfiguration import org.eclipse.jetty.util.MultiException import org.slf4j.Logger import org.apache.spark._ import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.internal.{config, Logging} import org.apache.spark.internal.config._ import org.apache.spark.internal.config.Streaming._ import org.apache.spark.internal.config.Tests.IS_TESTING import org.apache.spark.internal.config.UI._ import org.apache.spark.internal.config.Worker._ import org.apache.spark.launcher.SparkLauncher import org.apache.spark.network.util.JavaUtils import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance} import org.apache.spark.status.api.v1.{StackTrace, ThreadStackTrace} import org.apache.spark.util.io.ChunkedByteBufferOutputStream /** CallSite represents a place in user code. It can have a short and a long form. */ private[spark] case class CallSite(shortForm: String, longForm: String) private[spark] object CallSite { val SHORT_FORM = "callSite.short" val LONG_FORM = "callSite.long" val empty = CallSite("", "") } /** * Various utility methods used by Spark. */ private[spark] object Utils extends Logging { val random = new Random() private val sparkUncaughtExceptionHandler = new SparkUncaughtExceptionHandler @volatile private var cachedLocalDir: String = "" /** * Define a default value for driver memory here since this value is referenced across the code * base and nearly all files already use Utils.scala */ val DEFAULT_DRIVER_MEM_MB = JavaUtils.DEFAULT_DRIVER_MEM_MB.toInt private val MAX_DIR_CREATION_ATTEMPTS: Int = 10 @volatile private var localRootDirs: Array[String] = null /** Scheme used for files that are locally available on worker nodes in the cluster. */ val LOCAL_SCHEME = "local" private val PATTERN_FOR_COMMAND_LINE_ARG = "-D(.+?)=(.+)".r /** Serialize an object using Java serialization */ def serialize[T](o: T): Array[Byte] = { val bos = new ByteArrayOutputStream() val oos = new ObjectOutputStream(bos) oos.writeObject(o) oos.close() bos.toByteArray } /** Deserialize an object using Java serialization */ def deserialize[T](bytes: Array[Byte]): T = { val bis = new ByteArrayInputStream(bytes) val ois = new ObjectInputStream(bis) ois.readObject.asInstanceOf[T] } /** Deserialize an object using Java serialization and the given ClassLoader */ def deserialize[T](bytes: Array[Byte], loader: ClassLoader): T = { val bis = new ByteArrayInputStream(bytes) val ois = new ObjectInputStream(bis) { override def resolveClass(desc: ObjectStreamClass): Class[_] = { // scalastyle:off classforname Class.forName(desc.getName, false, loader) // scalastyle:on classforname } } ois.readObject.asInstanceOf[T] } /** Deserialize a Long value (used for [[org.apache.spark.api.python.PythonPartitioner]]) */ def deserializeLongValue(bytes: Array[Byte]) : Long = { // Note: we assume that we are given a Long value encoded in network (big-endian) byte order var result = bytes(7) & 0xFFL result = result + ((bytes(6) & 0xFFL) << 8) result = result + ((bytes(5) & 0xFFL) << 16) result = result + ((bytes(4) & 0xFFL) << 24) result = result + ((bytes(3) & 0xFFL) << 32) result = result + ((bytes(2) & 0xFFL) << 40) result = result + ((bytes(1) & 0xFFL) << 48) result + ((bytes(0) & 0xFFL) << 56) } /** Serialize via nested stream using specific serializer */ def serializeViaNestedStream(os: OutputStream, ser: SerializerInstance)( f: SerializationStream => Unit): Unit = { val osWrapper = ser.serializeStream(new OutputStream { override def write(b: Int): Unit = os.write(b) override def write(b: Array[Byte], off: Int, len: Int): Unit = os.write(b, off, len) }) try { f(osWrapper) } finally { osWrapper.close() } } /** Deserialize via nested stream using specific serializer */ def deserializeViaNestedStream(is: InputStream, ser: SerializerInstance)( f: DeserializationStream => Unit): Unit = { val isWrapper = ser.deserializeStream(new InputStream { override def read(): Int = is.read() override def read(b: Array[Byte], off: Int, len: Int): Int = is.read(b, off, len) }) try { f(isWrapper) } finally { isWrapper.close() } } /** * Get the ClassLoader which loaded Spark. */ def getSparkClassLoader: ClassLoader = getClass.getClassLoader /** * Get the Context ClassLoader on this thread or, if not present, the ClassLoader that * loaded Spark. * * This should be used whenever passing a ClassLoader to Class.ForName or finding the currently * active loader when setting up ClassLoader delegation chains. */ def getContextOrSparkClassLoader: ClassLoader = Option(Thread.currentThread().getContextClassLoader).getOrElse(getSparkClassLoader) /** Determines whether the provided class is loadable in the current thread. */ def classIsLoadable(clazz: String): Boolean = { Try { classForName(clazz, initialize = false) }.isSuccess } // scalastyle:off classforname /** * Preferred alternative to Class.forName(className), as well as * Class.forName(className, initialize, loader) with current thread's ContextClassLoader. */ def classForName[C]( className: String, initialize: Boolean = true, noSparkClassLoader: Boolean = false): Class[C] = { if (!noSparkClassLoader) { Class.forName(className, initialize, getContextOrSparkClassLoader).asInstanceOf[Class[C]] } else { Class.forName(className, initialize, Thread.currentThread().getContextClassLoader). asInstanceOf[Class[C]] } // scalastyle:on classforname } /** * Run a segment of code using a different context class loader in the current thread */ def withContextClassLoader[T](ctxClassLoader: ClassLoader)(fn: => T): T = { val oldClassLoader = Thread.currentThread().getContextClassLoader() try { Thread.currentThread().setContextClassLoader(ctxClassLoader) fn } finally { Thread.currentThread().setContextClassLoader(oldClassLoader) } } /** * Primitive often used when writing [[java.nio.ByteBuffer]] to [[java.io.DataOutput]] */ def writeByteBuffer(bb: ByteBuffer, out: DataOutput): Unit = { if (bb.hasArray) { out.write(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()) } else { val originalPosition = bb.position() val bbval = new Array[Byte](bb.remaining()) bb.get(bbval) out.write(bbval) bb.position(originalPosition) } } /** * Primitive often used when writing [[java.nio.ByteBuffer]] to [[java.io.OutputStream]] */ def writeByteBuffer(bb: ByteBuffer, out: OutputStream): Unit = { if (bb.hasArray) { out.write(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()) } else { val originalPosition = bb.position() val bbval = new Array[Byte](bb.remaining()) bb.get(bbval) out.write(bbval) bb.position(originalPosition) } } /** * JDK equivalent of `chmod 700 file`. * * @param file the file whose permissions will be modified * @return true if the permissions were successfully changed, false otherwise. */ def chmod700(file: File): Boolean = { file.setReadable(false, false) && file.setReadable(true, true) && file.setWritable(false, false) && file.setWritable(true, true) && file.setExecutable(false, false) && file.setExecutable(true, true) } /** * Create a directory given the abstract pathname * @return true, if the directory is successfully created; otherwise, return false. */ def createDirectory(dir: File): Boolean = { try { // This sporadically fails - not sure why ... !dir.exists() && !dir.mkdirs() // So attempting to create and then check if directory was created or not. dir.mkdirs() if ( !dir.exists() || !dir.isDirectory) { logError(s"Failed to create directory " + dir) } dir.isDirectory } catch { case e: Exception => logError(s"Failed to create directory " + dir, e) false } } /** * Create a directory inside the given parent directory. The directory is guaranteed to be * newly created, and is not marked for automatic deletion. */ def createDirectory(root: String, namePrefix: String = "spark"): File = { var attempts = 0 val maxAttempts = MAX_DIR_CREATION_ATTEMPTS var dir: File = null while (dir == null) { attempts += 1 if (attempts > maxAttempts) { throw new IOException("Failed to create a temp directory (under " + root + ") after " + maxAttempts + " attempts!") } try { dir = new File(root, namePrefix + "-" + UUID.randomUUID.toString) if (dir.exists() || !dir.mkdirs()) { dir = null } } catch { case e: SecurityException => dir = null; } } dir.getCanonicalFile } /** * Create a temporary directory inside the given parent directory. The directory will be * automatically deleted when the VM shuts down. */ def createTempDir( root: String = System.getProperty("java.io.tmpdir"), namePrefix: String = "spark"): File = { val dir = createDirectory(root, namePrefix) ShutdownHookManager.registerShutdownDeleteDir(dir) dir } /** * Copy all data from an InputStream to an OutputStream. NIO way of file stream to file stream * copying is disabled by default unless explicitly set transferToEnabled as true, * the parameter transferToEnabled should be configured by spark.file.transferTo = [true|false]. */ def copyStream( in: InputStream, out: OutputStream, closeStreams: Boolean = false, transferToEnabled: Boolean = false): Long = { tryWithSafeFinally { if (in.isInstanceOf[FileInputStream] && out.isInstanceOf[FileOutputStream] && transferToEnabled) { // When both streams are File stream, use transferTo to improve copy performance. val inChannel = in.asInstanceOf[FileInputStream].getChannel() val outChannel = out.asInstanceOf[FileOutputStream].getChannel() val size = inChannel.size() copyFileStreamNIO(inChannel, outChannel, 0, size) size } else { var count = 0L val buf = new Array[Byte](8192) var n = 0 while (n != -1) { n = in.read(buf) if (n != -1) { out.write(buf, 0, n) count += n } } count } } { if (closeStreams) { try { in.close() } finally { out.close() } } } } /** * Copy the first `maxSize` bytes of data from the InputStream to an in-memory * buffer, primarily to check for corruption. * * This returns a new InputStream which contains the same data as the original input stream. * It may be entirely on in-memory buffer, or it may be a combination of in-memory data, and then * continue to read from the original stream. The only real use of this is if the original input * stream will potentially detect corruption while the data is being read (e.g. from compression). * This allows for an eager check of corruption in the first maxSize bytes of data. * * @return An InputStream which includes all data from the original stream (combining buffered * data and remaining data in the original stream) */ def copyStreamUpTo(in: InputStream, maxSize: Long): InputStream = { var count = 0L val out = new ChunkedByteBufferOutputStream(64 * 1024, ByteBuffer.allocate) val fullyCopied = tryWithSafeFinally { val bufSize = Math.min(8192L, maxSize) val buf = new Array[Byte](bufSize.toInt) var n = 0 while (n != -1 && count < maxSize) { n = in.read(buf, 0, Math.min(maxSize - count, bufSize).toInt) if (n != -1) { out.write(buf, 0, n) count += n } } count < maxSize } { try { if (count < maxSize) { in.close() } } finally { out.close() } } if (fullyCopied) { out.toChunkedByteBuffer.toInputStream(dispose = true) } else { new SequenceInputStream( out.toChunkedByteBuffer.toInputStream(dispose = true), in) } } def copyFileStreamNIO( input: FileChannel, output: WritableByteChannel, startPosition: Long, bytesToCopy: Long): Unit = { val outputInitialState = output match { case outputFileChannel: FileChannel => Some((outputFileChannel.position(), outputFileChannel)) case _ => None } var count = 0L // In case transferTo method transferred less data than we have required. while (count < bytesToCopy) { count += input.transferTo(count + startPosition, bytesToCopy - count, output) } assert(count == bytesToCopy, s"request to copy $bytesToCopy bytes, but actually copied $count bytes.") // Check the position after transferTo loop to see if it is in the right position and // give user information if not. // Position will not be increased to the expected length after calling transferTo in // kernel version 2.6.32, this issue can be seen in // https://bugs.openjdk.java.net/browse/JDK-7052359 // This will lead to stream corruption issue when using sort-based shuffle (SPARK-3948). outputInitialState.foreach { case (initialPos, outputFileChannel) => val finalPos = outputFileChannel.position() val expectedPos = initialPos + bytesToCopy assert(finalPos == expectedPos, s""" |Current position $finalPos do not equal to expected position $expectedPos |after transferTo, please check your kernel version to see if it is 2.6.32, |this is a kernel bug which will lead to unexpected behavior when using transferTo. |You can set spark.file.transferTo = false to disable this NIO feature. """.stripMargin) } } /** * A file name may contain some invalid URI characters, such as " ". This method will convert the * file name to a raw path accepted by `java.net.URI(String)`. * * Note: the file name must not contain "/" or "\" */ def encodeFileNameToURIRawPath(fileName: String): String = { require(!fileName.contains("/") && !fileName.contains("\\")) // `file` and `localhost` are not used. Just to prevent URI from parsing `fileName` as // scheme or host. The prefix "/" is required because URI doesn't accept a relative path. // We should remove it after we get the raw path. new URI("file", null, "localhost", -1, "/" + fileName, null, null).getRawPath.substring(1) } /** * Get the file name from uri's raw path and decode it. If the raw path of uri ends with "/", * return the name before the last "/". */ def decodeFileNameInURI(uri: URI): String = { val rawPath = uri.getRawPath val rawFileName = rawPath.split("/").last new URI("file:///" + rawFileName).getPath.substring(1) } /** * Download a file or directory to target directory. Supports fetching the file in a variety of * ways, including HTTP, Hadoop-compatible filesystems, and files on a standard filesystem, based * on the URL parameter. Fetching directories is only supported from Hadoop-compatible * filesystems. * * If `useCache` is true, first attempts to fetch the file to a local cache that's shared * across executors running the same application. `useCache` is used mainly for * the executors, and not in local mode. * * Throws SparkException if the target file already exists and has different contents than * the requested file. * * If `shouldUntar` is true, it untars the given url if it is a tar.gz or tgz into `targetDir`. * This is a legacy behavior, and users should better use `spark.archives` configuration or * `SparkContext.addArchive` */ def fetchFile( url: String, targetDir: File, conf: SparkConf, hadoopConf: Configuration, timestamp: Long, useCache: Boolean, shouldUntar: Boolean = true): File = { val fileName = decodeFileNameInURI(new URI(url)) val targetFile = new File(targetDir, fileName) val fetchCacheEnabled = conf.getBoolean("spark.files.useFetchCache", defaultValue = true) if (useCache && fetchCacheEnabled) { val cachedFileName = s"${url.hashCode}${timestamp}_cache" val lockFileName = s"${url.hashCode}${timestamp}_lock" // Set the cachedLocalDir for the first time and re-use it later if (cachedLocalDir.isEmpty) { this.synchronized { if (cachedLocalDir.isEmpty) { cachedLocalDir = getLocalDir(conf) } } } val localDir = new File(cachedLocalDir) val lockFile = new File(localDir, lockFileName) val lockFileChannel = new RandomAccessFile(lockFile, "rw").getChannel() // Only one executor entry. // The FileLock is only used to control synchronization for executors download file, // it's always safe regardless of lock type (mandatory or advisory). val lock = lockFileChannel.lock() val cachedFile = new File(localDir, cachedFileName) try { if (!cachedFile.exists()) { doFetchFile(url, localDir, cachedFileName, conf, hadoopConf) } } finally { lock.release() lockFileChannel.close() } copyFile( url, cachedFile, targetFile, conf.getBoolean("spark.files.overwrite", false) ) } else { doFetchFile(url, targetDir, fileName, conf, hadoopConf) } if (shouldUntar) { // Decompress the file if it's a .tar or .tar.gz if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) { logWarning( "Untarring behavior will be deprecated at spark.files and " + "SparkContext.addFile. Consider using spark.archives or SparkContext.addArchive " + "instead.") logInfo("Untarring " + fileName) executeAndGetOutput(Seq("tar", "-xzf", fileName), targetDir) } else if (fileName.endsWith(".tar")) { logWarning( "Untarring behavior will be deprecated at spark.files and " + "SparkContext.addFile. Consider using spark.archives or SparkContext.addArchive " + "instead.") logInfo("Untarring " + fileName) executeAndGetOutput(Seq("tar", "-xf", fileName), targetDir) } } // Make the file executable - That's necessary for scripts FileUtil.chmod(targetFile.getAbsolutePath, "a+x") // Windows does not grant read permission by default to non-admin users // Add read permission to owner explicitly if (isWindows) { FileUtil.chmod(targetFile.getAbsolutePath, "u+r") } targetFile } /** * Unpacks an archive file into the specified directory. It expects .jar, .zip, .tar.gz, .tgz * and .tar files. This behaves same as Hadoop's archive in distributed cache. This method is * basically copied from `org.apache.hadoop.yarn.util.FSDownload.unpack`. */ def unpack(source: File, dest: File): Unit = { val lowerSrc = StringUtils.toLowerCase(source.getName) if (lowerSrc.endsWith(".jar")) { RunJar.unJar(source, dest, RunJar.MATCH_ANY) } else if (lowerSrc.endsWith(".zip")) { FileUtil.unZip(source, dest) } else if ( lowerSrc.endsWith(".tar.gz") || lowerSrc.endsWith(".tgz") || lowerSrc.endsWith(".tar")) { FileUtil.unTar(source, dest) } else { logWarning(s"Cannot unpack $source, just copying it to $dest.") copyRecursive(source, dest) } } /** Records the duration of running `body`. */ def timeTakenMs[T](body: => T): (T, Long) = { val startTime = System.nanoTime() val result = body val endTime = System.nanoTime() (result, math.max(NANOSECONDS.toMillis(endTime - startTime), 0)) } /** * Download `in` to `tempFile`, then move it to `destFile`. * * If `destFile` already exists: * - no-op if its contents equal those of `sourceFile`, * - throw an exception if `fileOverwrite` is false, * - attempt to overwrite it otherwise. * * @param url URL that `sourceFile` originated from, for logging purposes. * @param in InputStream to download. * @param destFile File path to move `tempFile` to. * @param fileOverwrite Whether to delete/overwrite an existing `destFile` that does not match * `sourceFile` */ private def downloadFile( url: String, in: InputStream, destFile: File, fileOverwrite: Boolean): Unit = { val tempFile = File.createTempFile("fetchFileTemp", null, new File(destFile.getParentFile.getAbsolutePath)) logInfo(s"Fetching $url to $tempFile") try { val out = new FileOutputStream(tempFile) Utils.copyStream(in, out, closeStreams = true) copyFile(url, tempFile, destFile, fileOverwrite, removeSourceFile = true) } finally { // Catch-all for the couple of cases where for some reason we didn't move `tempFile` to // `destFile`. if (tempFile.exists()) { tempFile.delete() } } } /** * Copy `sourceFile` to `destFile`. * * If `destFile` already exists: * - no-op if its contents equal those of `sourceFile`, * - throw an exception if `fileOverwrite` is false, * - attempt to overwrite it otherwise. * * @param url URL that `sourceFile` originated from, for logging purposes. * @param sourceFile File path to copy/move from. * @param destFile File path to copy/move to. * @param fileOverwrite Whether to delete/overwrite an existing `destFile` that does not match * `sourceFile` * @param removeSourceFile Whether to remove `sourceFile` after / as part of moving/copying it to * `destFile`. */ private def copyFile( url: String, sourceFile: File, destFile: File, fileOverwrite: Boolean, removeSourceFile: Boolean = false): Unit = { if (destFile.exists) { if (!filesEqualRecursive(sourceFile, destFile)) { if (fileOverwrite) { logInfo( s"File $destFile exists and does not match contents of $url, replacing it with $url" ) if (!destFile.delete()) { throw new SparkException( "Failed to delete %s while attempting to overwrite it with %s".format( destFile.getAbsolutePath, sourceFile.getAbsolutePath ) ) } } else { throw new SparkException( s"File $destFile exists and does not match contents of $url") } } else { // Do nothing if the file contents are the same, i.e. this file has been copied // previously. logInfo( "%s has been previously copied to %s".format( sourceFile.getAbsolutePath, destFile.getAbsolutePath ) ) return } } // The file does not exist in the target directory. Copy or move it there. if (removeSourceFile) { Files.move(sourceFile.toPath, destFile.toPath) } else { logInfo(s"Copying ${sourceFile.getAbsolutePath} to ${destFile.getAbsolutePath}") copyRecursive(sourceFile, destFile) } } private def filesEqualRecursive(file1: File, file2: File): Boolean = { if (file1.isDirectory && file2.isDirectory) { val subfiles1 = file1.listFiles() val subfiles2 = file2.listFiles() if (subfiles1.size != subfiles2.size) { return false } subfiles1.sortBy(_.getName).zip(subfiles2.sortBy(_.getName)).forall { case (f1, f2) => filesEqualRecursive(f1, f2) } } else if (file1.isFile && file2.isFile) { GFiles.equal(file1, file2) } else { false } } private def copyRecursive(source: File, dest: File): Unit = { if (source.isDirectory) { if (!dest.mkdir()) { throw new IOException(s"Failed to create directory ${dest.getPath}") } val subfiles = source.listFiles() subfiles.foreach(f => copyRecursive(f, new File(dest, f.getName))) } else { Files.copy(source.toPath, dest.toPath) } } /** * Download a file or directory to target directory. Supports fetching the file in a variety of * ways, including HTTP, Hadoop-compatible filesystems, and files on a standard filesystem, based * on the URL parameter. Fetching directories is only supported from Hadoop-compatible * filesystems. * * Throws SparkException if the target file already exists and has different contents than * the requested file. */ def doFetchFile( url: String, targetDir: File, filename: String, conf: SparkConf, hadoopConf: Configuration): File = { val targetFile = new File(targetDir, filename) val uri = new URI(url) val fileOverwrite = conf.getBoolean("spark.files.overwrite", defaultValue = false) Option(uri.getScheme).getOrElse("file") match { case "spark" => if (SparkEnv.get == null) { throw new IllegalStateException( "Cannot retrieve files with 'spark' scheme without an active SparkEnv.") } val source = SparkEnv.get.rpcEnv.openChannel(url) val is = Channels.newInputStream(source) downloadFile(url, is, targetFile, fileOverwrite) case "http" | "https" | "ftp" => val uc = new URL(url).openConnection() val timeoutMs = conf.getTimeAsSeconds("spark.files.fetchTimeout", "60s").toInt * 1000 uc.setConnectTimeout(timeoutMs) uc.setReadTimeout(timeoutMs) uc.connect() val in = uc.getInputStream() downloadFile(url, in, targetFile, fileOverwrite) case "file" => // In the case of a local file, copy the local file to the target directory. // Note the difference between uri vs url. val sourceFile = if (uri.isAbsolute) new File(uri) else new File(uri.getPath) copyFile(url, sourceFile, targetFile, fileOverwrite) case _ => val fs = getHadoopFileSystem(uri, hadoopConf) val path = new Path(uri) fetchHcfsFile(path, targetDir, fs, conf, hadoopConf, fileOverwrite, filename = Some(filename)) } targetFile } /** * Fetch a file or directory from a Hadoop-compatible filesystem. * * Visible for testing */ private[spark] def fetchHcfsFile( path: Path, targetDir: File, fs: FileSystem, conf: SparkConf, hadoopConf: Configuration, fileOverwrite: Boolean, filename: Option[String] = None): Unit = { if (!targetDir.exists() && !targetDir.mkdir()) { throw new IOException(s"Failed to create directory ${targetDir.getPath}") } val dest = new File(targetDir, filename.getOrElse(path.getName)) if (fs.isFile(path)) { val in = fs.open(path) try { downloadFile(path.toString, in, dest, fileOverwrite) } finally { in.close() } } else { fs.listStatus(path).foreach { fileStatus => fetchHcfsFile(fileStatus.getPath(), dest, fs, conf, hadoopConf, fileOverwrite) } } } /** * Validate that a given URI is actually a valid URL as well. * @param uri The URI to validate */ @throws[MalformedURLException]("when the URI is an invalid URL") def validateURL(uri: URI): Unit = { Option(uri.getScheme).getOrElse("file") match { case "http" | "https" | "ftp" => try { uri.toURL } catch { case e: MalformedURLException => val ex = new MalformedURLException(s"URI (${uri.toString}) is not a valid URL.") ex.initCause(e) throw ex } case _ => // will not be turned into a URL anyway } } /** * Get the path of a temporary directory. Spark's local directories can be configured through * multiple settings, which are used with the following precedence: * * - If called from inside of a YARN container, this will return a directory chosen by YARN. * - If the SPARK_LOCAL_DIRS environment variable is set, this will return a directory from it. * - Otherwise, if the spark.local.dir is set, this will return a directory from it. * - Otherwise, this will return java.io.tmpdir. * * Some of these configuration options might be lists of multiple paths, but this method will * always return a single directory. The return directory is chosen randomly from the array * of directories it gets from getOrCreateLocalRootDirs. */ def getLocalDir(conf: SparkConf): String = { val localRootDirs = getOrCreateLocalRootDirs(conf) if (localRootDirs.isEmpty) { val configuredLocalDirs = getConfiguredLocalDirs(conf) throw new IOException( s"Failed to get a temp directory under [${configuredLocalDirs.mkString(",")}].") } else { localRootDirs(scala.util.Random.nextInt(localRootDirs.length)) } } private[spark] def isRunningInYarnContainer(conf: SparkConf): Boolean = { // These environment variables are set by YARN. conf.getenv("CONTAINER_ID") != null } /** * Gets or creates the directories listed in spark.local.dir or SPARK_LOCAL_DIRS, * and returns only the directories that exist / could be created. * * If no directories could be created, this will return an empty list. * * This method will cache the local directories for the application when it's first invoked. * So calling it multiple times with a different configuration will always return the same * set of directories. */ private[spark] def getOrCreateLocalRootDirs(conf: SparkConf): Array[String] = { if (localRootDirs == null) { this.synchronized { if (localRootDirs == null) { localRootDirs = getOrCreateLocalRootDirsImpl(conf) } } } localRootDirs } /** * Return the configured local directories where Spark can write files. This * method does not create any directories on its own, it only encapsulates the * logic of locating the local directories according to deployment mode. */ def getConfiguredLocalDirs(conf: SparkConf): Array[String] = { val shuffleServiceEnabled = conf.get(config.SHUFFLE_SERVICE_ENABLED) if (isRunningInYarnContainer(conf)) { // If we are in yarn mode, systems can have different disk layouts so we must set it // to what Yarn on this system said was available. Note this assumes that Yarn has // created the directories already, and that they are secured so that only the // user has access to them. randomizeInPlace(getYarnLocalDirs(conf).split(",")) } else if (conf.getenv("SPARK_EXECUTOR_DIRS") != null) { conf.getenv("SPARK_EXECUTOR_DIRS").split(File.pathSeparator) } else if (conf.getenv("SPARK_LOCAL_DIRS") != null) { conf.getenv("SPARK_LOCAL_DIRS").split(",") } else if (conf.getenv("MESOS_SANDBOX") != null && !shuffleServiceEnabled) { // Mesos already creates a directory per Mesos task. Spark should use that directory // instead so all temporary files are automatically cleaned up when the Mesos task ends. // Note that we don't want this if the shuffle service is enabled because we want to // continue to serve shuffle files after the executors that wrote them have already exited. Array(conf.getenv("MESOS_SANDBOX")) } else { if (conf.getenv("MESOS_SANDBOX") != null && shuffleServiceEnabled) { logInfo("MESOS_SANDBOX available but not using provided Mesos sandbox because " + s"${config.SHUFFLE_SERVICE_ENABLED.key} is enabled.") } // In non-Yarn mode (or for the driver in yarn-client mode), we cannot trust the user // configuration to point to a secure directory. So create a subdirectory with restricted // permissions under each listed directory. conf.get("spark.local.dir", System.getProperty("java.io.tmpdir")).split(",") } } private def getOrCreateLocalRootDirsImpl(conf: SparkConf): Array[String] = { val configuredLocalDirs = getConfiguredLocalDirs(conf) val uris = configuredLocalDirs.filter { root => // Here, we guess if the given value is a URI at its best - check if scheme is set. Try(new URI(root).getScheme != null).getOrElse(false) } if (uris.nonEmpty) { logWarning( "The configured local directories are not expected to be URIs; however, got suspicious " + s"values [${uris.mkString(", ")}]. Please check your configured local directories.") } configuredLocalDirs.flatMap { root => try { val rootDir = new File(root) if (rootDir.exists || rootDir.mkdirs()) { val dir = createTempDir(root) chmod700(dir) Some(dir.getAbsolutePath) } else { logError(s"Failed to create dir in $root. Ignoring this directory.") None } } catch { case e: IOException => logError(s"Failed to create local root dir in $root. Ignoring this directory.") None } } } /** Get the Yarn approved local directories. */ private def getYarnLocalDirs(conf: SparkConf): String = { val localDirs = Option(conf.getenv("LOCAL_DIRS")).getOrElse("") if (localDirs.isEmpty) { throw new Exception("Yarn Local dirs can't be empty") } localDirs } /** Used by unit tests. Do not call from other places. */ private[spark] def clearLocalRootDirs(): Unit = { localRootDirs = null } /** * Shuffle the elements of a collection into a random order, returning the * result in a new collection. Unlike scala.util.Random.shuffle, this method * uses a local random number generator, avoiding inter-thread contention. */ def randomize[T: ClassTag](seq: TraversableOnce[T]): Seq[T] = { randomizeInPlace(seq.toArray) } /** * Shuffle the elements of an array into a random order, modifying the * original array. Returns the original array. */ def randomizeInPlace[T](arr: Array[T], rand: Random = new Random): Array[T] = { for (i <- (arr.length - 1) to 1 by -1) { val j = rand.nextInt(i + 1) val tmp = arr(j) arr(j) = arr(i) arr(i) = tmp } arr } /** * Get the local host's IP address in dotted-quad format (e.g. 1.2.3.4). * Note, this is typically not used from within core spark. */ private lazy val localIpAddress: InetAddress = findLocalInetAddress() private def findLocalInetAddress(): InetAddress = { val defaultIpOverride = System.getenv("SPARK_LOCAL_IP") if (defaultIpOverride != null) { InetAddress.getByName(defaultIpOverride) } else { val address = InetAddress.getLocalHost if (address.isLoopbackAddress) { // Address resolves to something like 127.0.1.1, which happens on Debian; try to find // a better address using the local network interfaces // getNetworkInterfaces returns ifs in reverse order compared to ifconfig output order // on unix-like system. On windows, it returns in index order. // It's more proper to pick ip address following system output order. val activeNetworkIFs = NetworkInterface.getNetworkInterfaces.asScala.toSeq val reOrderedNetworkIFs = if (isWindows) activeNetworkIFs else activeNetworkIFs.reverse for (ni <- reOrderedNetworkIFs) { val addresses = ni.getInetAddresses.asScala .filterNot(addr => addr.isLinkLocalAddress || addr.isLoopbackAddress).toSeq if (addresses.nonEmpty) { val addr = addresses.find(_.isInstanceOf[Inet4Address]).getOrElse(addresses.head) // because of Inet6Address.toHostName may add interface at the end if it knows about it val strippedAddress = InetAddress.getByAddress(addr.getAddress) // We've found an address that looks reasonable! logWarning("Your hostname, " + InetAddress.getLocalHost.getHostName + " resolves to" + " a loopback address: " + address.getHostAddress + "; using " + strippedAddress.getHostAddress + " instead (on interface " + ni.getName + ")") logWarning("Set SPARK_LOCAL_IP if you need to bind to another address") return strippedAddress } } logWarning("Your hostname, " + InetAddress.getLocalHost.getHostName + " resolves to" + " a loopback address: " + address.getHostAddress + ", but we couldn't find any" + " external IP address!") logWarning("Set SPARK_LOCAL_IP if you need to bind to another address") } address } } private var customHostname: Option[String] = sys.env.get("SPARK_LOCAL_HOSTNAME") /** * Allow setting a custom host name because when we run on Mesos we need to use the same * hostname it reports to the master. */ def setCustomHostname(hostname: String): Unit = { // DEBUG code Utils.checkHost(hostname) customHostname = Some(hostname) } /** * Get the local machine's FQDN. */ def localCanonicalHostName(): String = { customHostname.getOrElse(localIpAddress.getCanonicalHostName) } /** * Get the local machine's hostname. */ def localHostName(): String = { customHostname.getOrElse(localIpAddress.getHostAddress) } /** * Get the local machine's URI. */ def localHostNameForURI(): String = { customHostname.getOrElse(InetAddresses.toUriString(localIpAddress)) } /** * Checks if the host contains only valid hostname/ip without port * NOTE: Incase of IPV6 ip it should be enclosed inside [] */ def checkHost(host: String): Unit = { if (host != null && host.split(":").length > 2) { assert(host.startsWith("[") && host.endsWith("]"), s"Expected hostname or IPv6 IP enclosed in [] but got $host") } else { assert(host != null && host.indexOf(':') == -1, s"Expected hostname or IP but got $host") } } def checkHostPort(hostPort: String): Unit = { if (hostPort != null && hostPort.split(":").length > 2) { assert(hostPort != null && hostPort.indexOf("]:") != -1, s"Expected host and port but got $hostPort") } else { assert(hostPort != null && hostPort.indexOf(':') != -1, s"Expected host and port but got $hostPort") } } // Typically, this will be of order of number of nodes in cluster // If not, we should change it to LRUCache or something. private val hostPortParseResults = new ConcurrentHashMap[String, (String, Int)]() def parseHostPort(hostPort: String): (String, Int) = { // Check cache first. val cached = hostPortParseResults.get(hostPort) if (cached != null) { return cached } def setDefaultPortValue: (String, Int) = { val retval = (hostPort, 0) hostPortParseResults.put(hostPort, retval) retval } // checks if the hostport contains IPV6 ip and parses the host, port if (hostPort != null && hostPort.split(":").length > 2) { val index: Int = hostPort.lastIndexOf("]:") if (-1 == index) { return setDefaultPortValue } val port = hostPort.substring(index + 2).trim() val retval = (hostPort.substring(0, index + 1).trim(), if (port.isEmpty) 0 else port.toInt) hostPortParseResults.putIfAbsent(hostPort, retval) } else { val index: Int = hostPort.lastIndexOf(':') if (-1 == index) { return setDefaultPortValue } val port = hostPort.substring(index + 1).trim() val retval = (hostPort.substring(0, index).trim(), if (port.isEmpty) 0 else port.toInt) hostPortParseResults.putIfAbsent(hostPort, retval) } hostPortParseResults.get(hostPort) } /** * Return the string to tell how long has passed in milliseconds. * @param startTimeNs - a timestamp in nanoseconds returned by `System.nanoTime`. */ def getUsedTimeNs(startTimeNs: Long): String = { s"${TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNs)} ms" } /** * Delete a file or directory and its contents recursively. * Don't follow directories if they are symlinks. * Throws an exception if deletion is unsuccessful. */ def deleteRecursively(file: File): Unit = { if (file != null) { JavaUtils.deleteRecursively(file) ShutdownHookManager.removeShutdownDeleteDir(file) } } /** * Determines if a directory contains any files newer than cutoff seconds. * * @param dir must be the path to a directory, or IllegalArgumentException is thrown * @param cutoff measured in seconds. Returns true if there are any files or directories in the * given directory whose last modified time is later than this many seconds ago */ def doesDirectoryContainAnyNewFiles(dir: File, cutoff: Long): Boolean = { if (!dir.isDirectory) { throw new IllegalArgumentException(s"$dir is not a directory!") } val filesAndDirs = dir.listFiles() val cutoffTimeInMillis = System.currentTimeMillis - (cutoff * 1000) filesAndDirs.exists(_.lastModified() > cutoffTimeInMillis) || filesAndDirs.filter(_.isDirectory).exists( subdir => doesDirectoryContainAnyNewFiles(subdir, cutoff) ) } /** * Convert a time parameter such as (50s, 100ms, or 250us) to milliseconds for internal use. If * no suffix is provided, the passed number is assumed to be in ms. */ def timeStringAsMs(str: String): Long = { JavaUtils.timeStringAsMs(str) } /** * Convert a time parameter such as (50s, 100ms, or 250us) to seconds for internal use. If * no suffix is provided, the passed number is assumed to be in seconds. */ def timeStringAsSeconds(str: String): Long = { JavaUtils.timeStringAsSec(str) } /** * Convert a passed byte string (e.g. 50b, 100k, or 250m) to bytes for internal use. * * If no suffix is provided, the passed number is assumed to be in bytes. */ def byteStringAsBytes(str: String): Long = { JavaUtils.byteStringAsBytes(str) } /** * Convert a passed byte string (e.g. 50b, 100k, or 250m) to kibibytes for internal use. * * If no suffix is provided, the passed number is assumed to be in kibibytes. */ def byteStringAsKb(str: String): Long = { JavaUtils.byteStringAsKb(str) } /** * Convert a passed byte string (e.g. 50b, 100k, or 250m) to mebibytes for internal use. * * If no suffix is provided, the passed number is assumed to be in mebibytes. */ def byteStringAsMb(str: String): Long = { JavaUtils.byteStringAsMb(str) } /** * Convert a passed byte string (e.g. 50b, 100k, or 250m, 500g) to gibibytes for internal use. * * If no suffix is provided, the passed number is assumed to be in gibibytes. */ def byteStringAsGb(str: String): Long = { JavaUtils.byteStringAsGb(str) } /** * Convert a Java memory parameter passed to -Xmx (such as 300m or 1g) to a number of mebibytes. */ def memoryStringToMb(str: String): Int = { // Convert to bytes, rather than directly to MiB, because when no units are specified the unit // is assumed to be bytes (JavaUtils.byteStringAsBytes(str) / 1024 / 1024).toInt } /** * Convert a quantity in bytes to a human-readable string such as "4.0 MiB". */ def bytesToString(size: Long): String = bytesToString(BigInt(size)) def bytesToString(size: BigInt): String = { val EiB = 1L << 60 val PiB = 1L << 50 val TiB = 1L << 40 val GiB = 1L << 30 val MiB = 1L << 20 val KiB = 1L << 10 if (size >= BigInt(1L << 11) * EiB) { // The number is too large, show it in scientific notation. BigDecimal(size, new MathContext(3, RoundingMode.HALF_UP)).toString() + " B" } else { val (value, unit) = { if (size >= 2 * EiB) { (BigDecimal(size) / EiB, "EiB") } else if (size >= 2 * PiB) { (BigDecimal(size) / PiB, "PiB") } else if (size >= 2 * TiB) { (BigDecimal(size) / TiB, "TiB") } else if (size >= 2 * GiB) { (BigDecimal(size) / GiB, "GiB") } else if (size >= 2 * MiB) { (BigDecimal(size) / MiB, "MiB") } else if (size >= 2 * KiB) { (BigDecimal(size) / KiB, "KiB") } else { (BigDecimal(size), "B") } } "%.1f %s".formatLocal(Locale.US, value, unit) } } /** * Returns a human-readable string representing a duration such as "35ms" */ def msDurationToString(ms: Long): String = { val second = 1000 val minute = 60 * second val hour = 60 * minute val locale = Locale.US ms match { case t if t < second => "%d ms".formatLocal(locale, t) case t if t < minute => "%.1f s".formatLocal(locale, t.toFloat / second) case t if t < hour => "%.1f m".formatLocal(locale, t.toFloat / minute) case t => "%.2f h".formatLocal(locale, t.toFloat / hour) } } /** * Convert a quantity in megabytes to a human-readable string such as "4.0 MiB". */ def megabytesToString(megabytes: Long): String = { bytesToString(megabytes * 1024L * 1024L) } /** * Execute a command and return the process running the command. */ def executeCommand( command: Seq[String], workingDir: File = new File("."), extraEnvironment: Map[String, String] = Map.empty, redirectStderr: Boolean = true): Process = { val builder = new ProcessBuilder(command: _*).directory(workingDir) val environment = builder.environment() for ((key, value) <- extraEnvironment) { environment.put(key, value) } val process = builder.start() if (redirectStderr) { val threadName = "redirect stderr for command " + command(0) def log(s: String): Unit = logInfo(s) processStreamByLine(threadName, process.getErrorStream, log) } process } /** * Execute a command and get its output, throwing an exception if it yields a code other than 0. */ def executeAndGetOutput( command: Seq[String], workingDir: File = new File("."), extraEnvironment: Map[String, String] = Map.empty, redirectStderr: Boolean = true): String = { val process = executeCommand(command, workingDir, extraEnvironment, redirectStderr) val output = new StringBuilder val threadName = "read stdout for " + command(0) def appendToOutput(s: String): Unit = output.append(s).append("\n") val stdoutThread = processStreamByLine(threadName, process.getInputStream, appendToOutput) val exitCode = process.waitFor() stdoutThread.join() // Wait for it to finish reading output if (exitCode != 0) { logError(s"Process $command exited with code $exitCode: $output") throw new SparkException(s"Process $command exited with code $exitCode") } output.toString } /** * Return and start a daemon thread that processes the content of the input stream line by line. */ def processStreamByLine( threadName: String, inputStream: InputStream, processLine: String => Unit): Thread = { val t = new Thread(threadName) { override def run(): Unit = { for (line <- Source.fromInputStream(inputStream).getLines()) { processLine(line) } } } t.setDaemon(true) t.start() t } /** * Execute a block of code that evaluates to Unit, forwarding any uncaught exceptions to the * default UncaughtExceptionHandler * * NOTE: This method is to be called by the spark-started JVM process. */ def tryOrExit(block: => Unit): Unit = { try { block } catch { case e: ControlThrowable => throw e case t: Throwable => sparkUncaughtExceptionHandler.uncaughtException(t) } } /** * Execute a block of code that evaluates to Unit, stop SparkContext if there is any uncaught * exception * * NOTE: This method is to be called by the driver-side components to avoid stopping the * user-started JVM process completely; in contrast, tryOrExit is to be called in the * spark-started JVM process . */ def tryOrStopSparkContext(sc: SparkContext)(block: => Unit): Unit = { try { block } catch { case e: ControlThrowable => throw e case t: Throwable => val currentThreadName = Thread.currentThread().getName if (sc != null) { logError(s"uncaught error in thread $currentThreadName, stopping SparkContext", t) sc.stopInNewThread() } if (!NonFatal(t)) { logError(s"throw uncaught fatal error in thread $currentThreadName", t) throw t } } } /** * Execute a block of code that returns a value, re-throwing any non-fatal uncaught * exceptions as IOException. This is used when implementing Externalizable and Serializable's * read and write methods, since Java's serializer will not report non-IOExceptions properly; * see SPARK-4080 for more context. */ def tryOrIOException[T](block: => T): T = { try { block } catch { case e: IOException => logError("Exception encountered", e) throw e case NonFatal(e) => logError("Exception encountered", e) throw new IOException(e) } } /** Executes the given block. Log non-fatal errors if any, and only throw fatal errors */ def tryLogNonFatalError(block: => Unit): Unit = { try { block } catch { case NonFatal(t) => logError(s"Uncaught exception in thread ${Thread.currentThread().getName}", t) } } /** * Execute a block of code, then a finally block, but if exceptions happen in * the finally block, do not suppress the original exception. * * This is primarily an issue with `finally { out.close() }` blocks, where * close needs to be called to clean up `out`, but if an exception happened * in `out.write`, it's likely `out` may be corrupted and `out.close` will * fail as well. This would then suppress the original/likely more meaningful * exception from the original `out.write` call. */ def tryWithSafeFinally[T](block: => T)(finallyBlock: => Unit): T = { var originalThrowable: Throwable = null try { block } catch { case t: Throwable => // Purposefully not using NonFatal, because even fatal exceptions // we don't want to have our finallyBlock suppress originalThrowable = t throw originalThrowable } finally { try { finallyBlock } catch { case t: Throwable if (originalThrowable != null && originalThrowable != t) => originalThrowable.addSuppressed(t) logWarning(s"Suppressing exception in finally: ${t.getMessage}", t) throw originalThrowable } } } /** * Execute a block of code and call the failure callbacks in the catch block. If exceptions occur * in either the catch or the finally block, they are appended to the list of suppressed * exceptions in original exception which is then rethrown. * * This is primarily an issue with `catch { abort() }` or `finally { out.close() }` blocks, * where the abort/close needs to be called to clean up `out`, but if an exception happened * in `out.write`, it's likely `out` may be corrupted and `abort` or `out.close` will * fail as well. This would then suppress the original/likely more meaningful * exception from the original `out.write` call. */ def tryWithSafeFinallyAndFailureCallbacks[T](block: => T) (catchBlock: => Unit = (), finallyBlock: => Unit = ()): T = { var originalThrowable: Throwable = null try { block } catch { case cause: Throwable => // Purposefully not using NonFatal, because even fatal exceptions // we don't want to have our finallyBlock suppress originalThrowable = cause try { logError("Aborting task", originalThrowable) if (TaskContext.get() != null) { TaskContext.get().markTaskFailed(originalThrowable) } catchBlock } catch { case t: Throwable => if (originalThrowable != t) { originalThrowable.addSuppressed(t) logWarning(s"Suppressing exception in catch: ${t.getMessage}", t) } } throw originalThrowable } finally { try { finallyBlock } catch { case t: Throwable if (originalThrowable != null && originalThrowable != t) => originalThrowable.addSuppressed(t) logWarning(s"Suppressing exception in finally: ${t.getMessage}", t) throw originalThrowable } } } // A regular expression to match classes of the internal Spark API's // that we want to skip when finding the call site of a method. private val SPARK_CORE_CLASS_REGEX = """^org\.apache\.spark(\.api\.java)?(\.util)?(\.rdd)?(\.broadcast)?\.[A-Z]""".r private val SPARK_SQL_CLASS_REGEX = """^org\.apache\.spark\.sql.*""".r /** Default filtering function for finding call sites using `getCallSite`. */ private def sparkInternalExclusionFunction(className: String): Boolean = { val SCALA_CORE_CLASS_PREFIX = "scala" val isSparkClass = SPARK_CORE_CLASS_REGEX.findFirstIn(className).isDefined || SPARK_SQL_CLASS_REGEX.findFirstIn(className).isDefined val isScalaClass = className.startsWith(SCALA_CORE_CLASS_PREFIX) // If the class is a Spark internal class or a Scala class, then exclude. isSparkClass || isScalaClass } /** * When called inside a class in the spark package, returns the name of the user code class * (outside the spark package) that called into Spark, as well as which Spark method they called. * This is used, for example, to tell users where in their code each RDD got created. * * @param skipClass Function that is used to exclude non-user-code classes. */ def getCallSite(skipClass: String => Boolean = sparkInternalExclusionFunction): CallSite = { // Keep crawling up the stack trace until we find the first function not inside of the spark // package. We track the last (shallowest) contiguous Spark method. This might be an RDD // transformation, a SparkContext function (such as parallelize), or anything else that leads // to instantiation of an RDD. We also track the first (deepest) user method, file, and line. var lastSparkMethod = "<unknown>" var firstUserFile = "<unknown>" var firstUserLine = 0 var insideSpark = true val callStack = new ArrayBuffer[String]() :+ "<unknown>" Thread.currentThread.getStackTrace().foreach { ste: StackTraceElement => // When running under some profilers, the current stack trace might contain some bogus // frames. This is intended to ensure that we don't crash in these situations by // ignoring any frames that we can't examine. if (ste != null && ste.getMethodName != null && !ste.getMethodName.contains("getStackTrace")) { if (insideSpark) { if (skipClass(ste.getClassName)) { lastSparkMethod = if (ste.getMethodName == "<init>") { // Spark method is a constructor; get its class name ste.getClassName.substring(ste.getClassName.lastIndexOf('.') + 1) } else { ste.getMethodName } callStack(0) = ste.toString // Put last Spark method on top of the stack trace. } else { if (ste.getFileName != null) { firstUserFile = ste.getFileName if (ste.getLineNumber >= 0) { firstUserLine = ste.getLineNumber } } callStack += ste.toString insideSpark = false } } else { callStack += ste.toString } } } val callStackDepth = System.getProperty("spark.callstack.depth", "20").toInt val shortForm = if (firstUserFile == "HiveSessionImpl.java") { // To be more user friendly, show a nicer string for queries submitted from the JDBC // server. "Spark JDBC Server Query" } else { s"$lastSparkMethod at $firstUserFile:$firstUserLine" } val longForm = callStack.take(callStackDepth).mkString("\n") CallSite(shortForm, longForm) } private var compressedLogFileLengthCache: LoadingCache[String, java.lang.Long] = null private def getCompressedLogFileLengthCache( sparkConf: SparkConf): LoadingCache[String, java.lang.Long] = this.synchronized { if (compressedLogFileLengthCache == null) { val compressedLogFileLengthCacheSize = sparkConf.get( UNCOMPRESSED_LOG_FILE_LENGTH_CACHE_SIZE_CONF) compressedLogFileLengthCache = CacheBuilder.newBuilder() .maximumSize(compressedLogFileLengthCacheSize) .build[String, java.lang.Long](new CacheLoader[String, java.lang.Long]() { override def load(path: String): java.lang.Long = { Utils.getCompressedFileLength(new File(path)) } }) } compressedLogFileLengthCache } /** * Return the file length, if the file is compressed it returns the uncompressed file length. * It also caches the uncompressed file size to avoid repeated decompression. The cache size is * read from workerConf. */ def getFileLength(file: File, workConf: SparkConf): Long = { if (file.getName.endsWith(".gz")) { getCompressedLogFileLengthCache(workConf).get(file.getAbsolutePath) } else { file.length } } /** Return uncompressed file length of a compressed file. */ private def getCompressedFileLength(file: File): Long = { var gzInputStream: GZIPInputStream = null try { // Uncompress .gz file to determine file size. var fileSize = 0L gzInputStream = new GZIPInputStream(new FileInputStream(file)) val bufSize = 1024 val buf = new Array[Byte](bufSize) var numBytes = ByteStreams.read(gzInputStream, buf, 0, bufSize) while (numBytes > 0) { fileSize += numBytes numBytes = ByteStreams.read(gzInputStream, buf, 0, bufSize) } fileSize } catch { case e: Throwable => logError(s"Cannot get file length of ${file}", e) throw e } finally { if (gzInputStream != null) { gzInputStream.close() } } } /** Return a string containing part of a file from byte 'start' to 'end'. */ def offsetBytes(path: String, length: Long, start: Long, end: Long): String = { val file = new File(path) val effectiveEnd = math.min(length, end) val effectiveStart = math.max(0, start) val buff = new Array[Byte]((effectiveEnd-effectiveStart).toInt) val stream = if (path.endsWith(".gz")) { new GZIPInputStream(new FileInputStream(file)) } else { new FileInputStream(file) } try { ByteStreams.skipFully(stream, effectiveStart) ByteStreams.readFully(stream, buff) } finally { stream.close() } Source.fromBytes(buff).mkString } /** * Return a string containing data across a set of files. The `startIndex` * and `endIndex` is based on the cumulative size of all the files take in * the given order. See figure below for more details. */ def offsetBytes(files: Seq[File], fileLengths: Seq[Long], start: Long, end: Long): String = { assert(files.length == fileLengths.length) val startIndex = math.max(start, 0) val endIndex = math.min(end, fileLengths.sum) val fileToLength = files.zip(fileLengths).toMap logDebug("Log files: \n" + fileToLength.mkString("\n")) val stringBuffer = new StringBuffer((endIndex - startIndex).toInt) var sum = 0L files.zip(fileLengths).foreach { case (file, fileLength) => val startIndexOfFile = sum val endIndexOfFile = sum + fileToLength(file) logDebug(s"Processing file $file, " + s"with start index = $startIndexOfFile, end index = $endIndex") /* ____________ range 1: | | | case A | files: |==== file 1 ====|====== file 2 ======|===== file 3 =====| | case B . case C . case D | range 2: |___________.____________________.______________| */ if (startIndex <= startIndexOfFile && endIndex >= endIndexOfFile) { // Case C: read the whole file stringBuffer.append(offsetBytes(file.getAbsolutePath, fileLength, 0, fileToLength(file))) } else if (startIndex > startIndexOfFile && startIndex < endIndexOfFile) { // Case A and B: read from [start of required range] to [end of file / end of range] val effectiveStartIndex = startIndex - startIndexOfFile val effectiveEndIndex = math.min(endIndex - startIndexOfFile, fileToLength(file)) stringBuffer.append(Utils.offsetBytes( file.getAbsolutePath, fileLength, effectiveStartIndex, effectiveEndIndex)) } else if (endIndex > startIndexOfFile && endIndex < endIndexOfFile) { // Case D: read from [start of file] to [end of require range] val effectiveStartIndex = math.max(startIndex - startIndexOfFile, 0) val effectiveEndIndex = endIndex - startIndexOfFile stringBuffer.append(Utils.offsetBytes( file.getAbsolutePath, fileLength, effectiveStartIndex, effectiveEndIndex)) } sum += fileToLength(file) logDebug(s"After processing file $file, string built is ${stringBuffer.toString}") } stringBuffer.toString } /** * Clone an object using a Spark serializer. */ def clone[T: ClassTag](value: T, serializer: SerializerInstance): T = { serializer.deserialize[T](serializer.serialize(value)) } private def isSpace(c: Char): Boolean = { " \t\r\n".indexOf(c) != -1 } /** * Split a string of potentially quoted arguments from the command line the way that a shell * would do it to determine arguments to a command. For example, if the string is 'a "b c" d', * then it would be parsed as three arguments: 'a', 'b c' and 'd'. */ def splitCommandString(s: String): Seq[String] = { val buf = new ArrayBuffer[String] var inWord = false var inSingleQuote = false var inDoubleQuote = false val curWord = new StringBuilder def endWord(): Unit = { buf += curWord.toString curWord.clear() } var i = 0 while (i < s.length) { val nextChar = s.charAt(i) if (inDoubleQuote) { if (nextChar == '"') { inDoubleQuote = false } else if (nextChar == '\\') { if (i < s.length - 1) { // Append the next character directly, because only " and \ may be escaped in // double quotes after the shell's own expansion curWord.append(s.charAt(i + 1)) i += 1 } } else { curWord.append(nextChar) } } else if (inSingleQuote) { if (nextChar == '\'') { inSingleQuote = false } else { curWord.append(nextChar) } // Backslashes are not treated specially in single quotes } else if (nextChar == '"') { inWord = true inDoubleQuote = true } else if (nextChar == '\'') { inWord = true inSingleQuote = true } else if (!isSpace(nextChar)) { curWord.append(nextChar) inWord = true } else if (inWord && isSpace(nextChar)) { endWord() inWord = false } i += 1 } if (inWord || inDoubleQuote || inSingleQuote) { endWord() } buf.toSeq } /* Calculates 'x' modulo 'mod', takes to consideration sign of x, * i.e. if 'x' is negative, than 'x' % 'mod' is negative too * so function return (x % mod) + mod in that case. */ def nonNegativeMod(x: Int, mod: Int): Int = { val rawMod = x % mod rawMod + (if (rawMod < 0) mod else 0) } // Handles idiosyncrasies with hash (add more as required) // This method should be kept in sync with // org.apache.spark.network.util.JavaUtils#nonNegativeHash(). def nonNegativeHash(obj: AnyRef): Int = { // Required ? if (obj eq null) return 0 val hash = obj.hashCode // math.abs fails for Int.MinValue val hashAbs = if (Int.MinValue != hash) math.abs(hash) else 0 // Nothing else to guard against ? hashAbs } /** * Returns the system properties map that is thread-safe to iterator over. It gets the * properties which have been set explicitly, as well as those for which only a default value * has been defined. */ def getSystemProperties: Map[String, String] = { System.getProperties.stringPropertyNames().asScala .map(key => (key, System.getProperty(key))).toMap } /** * Method executed for repeating a task for side effects. * Unlike a for comprehension, it permits JVM JIT optimization */ def times(numIters: Int)(f: => Unit): Unit = { var i = 0 while (i < numIters) { f i += 1 } } /** * Timing method based on iterations that permit JVM JIT optimization. * * @param numIters number of iterations * @param f function to be executed. If prepare is not None, the running time of each call to f * must be an order of magnitude longer than one nanosecond for accurate timing. * @param prepare function to be executed before each call to f. Its running time doesn't count. * @return the total time across all iterations (not counting preparation time) in nanoseconds. */ def timeIt(numIters: Int)(f: => Unit, prepare: Option[() => Unit] = None): Long = { if (prepare.isEmpty) { val startNs = System.nanoTime() times(numIters)(f) System.nanoTime() - startNs } else { var i = 0 var sum = 0L while (i < numIters) { prepare.get.apply() val startNs = System.nanoTime() f sum += System.nanoTime() - startNs i += 1 } sum } } /** * Counts the number of elements of an iterator using a while loop rather than calling * [[scala.collection.Iterator#size]] because it uses a for loop, which is slightly slower * in the current version of Scala. */ def getIteratorSize(iterator: Iterator[_]): Long = { var count = 0L while (iterator.hasNext) { count += 1L iterator.next() } count } /** * Generate a zipWithIndex iterator, avoid index value overflowing problem * in scala's zipWithIndex */ def getIteratorZipWithIndex[T](iter: Iterator[T], startIndex: Long): Iterator[(T, Long)] = { new Iterator[(T, Long)] { require(startIndex >= 0, "startIndex should be >= 0.") var index: Long = startIndex - 1L def hasNext: Boolean = iter.hasNext def next(): (T, Long) = { index += 1L (iter.next(), index) } } } /** * Creates a symlink. * * @param src absolute path to the source * @param dst relative path for the destination */ def symlink(src: File, dst: File): Unit = { if (!src.isAbsolute()) { throw new IOException("Source must be absolute") } if (dst.isAbsolute()) { throw new IOException("Destination must be relative") } Files.createSymbolicLink(dst.toPath, src.toPath) } /** Return the class name of the given object, removing all dollar signs */ def getFormattedClassName(obj: AnyRef): String = { getSimpleName(obj.getClass).replace("$", "") } /** * Return a Hadoop FileSystem with the scheme encoded in the given path. */ def getHadoopFileSystem(path: URI, conf: Configuration): FileSystem = { FileSystem.get(path, conf) } /** * Return a Hadoop FileSystem with the scheme encoded in the given path. */ def getHadoopFileSystem(path: String, conf: Configuration): FileSystem = { getHadoopFileSystem(new URI(path), conf) } /** * Whether the underlying operating system is Windows. */ val isWindows = SystemUtils.IS_OS_WINDOWS /** * Whether the underlying operating system is Mac OS X. */ val isMac = SystemUtils.IS_OS_MAC_OSX /** * Pattern for matching a Windows drive, which contains only a single alphabet character. */ val windowsDrive = "([a-zA-Z])".r /** * Indicates whether Spark is currently running unit tests. */ def isTesting: Boolean = { // Scala's `sys.env` creates a ton of garbage by constructing Scala immutable maps, so // we directly use the Java APIs instead. System.getenv("SPARK_TESTING") != null || System.getProperty(IS_TESTING.key) != null } /** * Terminates a process waiting for at most the specified duration. * * @return the process exit value if it was successfully terminated, else None */ def terminateProcess(process: Process, timeoutMs: Long): Option[Int] = { // Politely destroy first process.destroy() if (process.waitFor(timeoutMs, TimeUnit.MILLISECONDS)) { // Successful exit Option(process.exitValue()) } else { try { process.destroyForcibly() } catch { case NonFatal(e) => logWarning("Exception when attempting to kill process", e) } // Wait, again, although this really should return almost immediately if (process.waitFor(timeoutMs, TimeUnit.MILLISECONDS)) { Option(process.exitValue()) } else { logWarning("Timed out waiting to forcibly kill process") None } } } /** * Return the stderr of a process after waiting for the process to terminate. * If the process does not terminate within the specified timeout, return None. */ def getStderr(process: Process, timeoutMs: Long): Option[String] = { val terminated = process.waitFor(timeoutMs, TimeUnit.MILLISECONDS) if (terminated) { Some(Source.fromInputStream(process.getErrorStream).getLines().mkString("\n")) } else { None } } /** * Execute the given block, logging and re-throwing any uncaught exception. * This is particularly useful for wrapping code that runs in a thread, to ensure * that exceptions are printed, and to avoid having to catch Throwable. */ def logUncaughtExceptions[T](f: => T): T = { try { f } catch { case ct: ControlThrowable => throw ct case t: Throwable => logError(s"Uncaught exception in thread ${Thread.currentThread().getName}", t) throw t } } /** Executes the given block in a Try, logging any uncaught exceptions. */ def tryLog[T](f: => T): Try[T] = { try { val res = f scala.util.Success(res) } catch { case ct: ControlThrowable => throw ct case t: Throwable => logError(s"Uncaught exception in thread ${Thread.currentThread().getName}", t) scala.util.Failure(t) } } /** Returns true if the given exception was fatal. See docs for scala.util.control.NonFatal. */ def isFatalError(e: Throwable): Boolean = { e match { case NonFatal(_) | _: InterruptedException | _: NotImplementedError | _: ControlThrowable | _: LinkageError => false case _ => true } } /** * Return a well-formed URI for the file described by a user input string. * * If the supplied path does not contain a scheme, or is a relative path, it will be * converted into an absolute path with a file:// scheme. */ def resolveURI(path: String): URI = { try { val uri = new URI(path) if (uri.getScheme() != null) { return uri } // make sure to handle if the path has a fragment (applies to yarn // distributed cache) if (uri.getFragment() != null) { val absoluteURI = new File(uri.getPath()).getAbsoluteFile().toURI() return new URI(absoluteURI.getScheme(), absoluteURI.getHost(), absoluteURI.getPath(), uri.getFragment()) } } catch { case e: URISyntaxException => } new File(path).getAbsoluteFile().toURI() } /** Resolve a comma-separated list of paths. */ def resolveURIs(paths: String): String = { if (paths == null || paths.trim.isEmpty) { "" } else { paths.split(",").filter(_.trim.nonEmpty).map { p => Utils.resolveURI(p) }.mkString(",") } } /** Return all non-local paths from a comma-separated list of paths. */ def nonLocalPaths(paths: String, testWindows: Boolean = false): Array[String] = { val windows = isWindows || testWindows if (paths == null || paths.trim.isEmpty) { Array.empty } else { paths.split(",").filter { p => val uri = resolveURI(p) Option(uri.getScheme).getOrElse("file") match { case windowsDrive(d) if windows => false case "local" | "file" => false case _ => true } } } } /** * Load default Spark properties from the given file. If no file is provided, * use the common defaults file. This mutates state in the given SparkConf and * in this JVM's system properties if the config specified in the file is not * already set. Return the path of the properties file used. */ def loadDefaultSparkProperties(conf: SparkConf, filePath: String = null): String = { val path = Option(filePath).getOrElse(getDefaultPropertiesFile()) Option(path).foreach { confFile => getPropertiesFromFile(confFile).filter { case (k, v) => k.startsWith("spark.") }.foreach { case (k, v) => conf.setIfMissing(k, v) sys.props.getOrElseUpdate(k, v) } } path } /** * Updates Spark config with properties from a set of Properties. * Provided properties have the highest priority. */ def updateSparkConfigFromProperties( conf: SparkConf, properties: Map[String, String]) : Unit = { properties.filter { case (k, v) => k.startsWith("spark.") }.foreach { case (k, v) => conf.set(k, v) } } /** * Implements the same logic as JDK `java.lang.String#trim` by removing leading and trailing * non-printable characters less or equal to '\u0020' (SPACE) but preserves natural line * delimiters according to [[java.util.Properties]] load method. The natural line delimiters are * removed by JDK during load. Therefore any remaining ones have been specifically provided and * escaped by the user, and must not be ignored * * @param str * @return the trimmed value of str */ private[util] def trimExceptCRLF(str: String): String = { val nonSpaceOrNaturalLineDelimiter: Char => Boolean = { ch => ch > ' ' || ch == '\r' || ch == '\n' } val firstPos = str.indexWhere(nonSpaceOrNaturalLineDelimiter) val lastPos = str.lastIndexWhere(nonSpaceOrNaturalLineDelimiter) if (firstPos >= 0 && lastPos >= 0) { str.substring(firstPos, lastPos + 1) } else { "" } } /** Load properties present in the given file. */ def getPropertiesFromFile(filename: String): Map[String, String] = { val file = new File(filename) require(file.exists(), s"Properties file $file does not exist") require(file.isFile(), s"Properties file $file is not a normal file") val inReader = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8) try { val properties = new Properties() properties.load(inReader) properties.stringPropertyNames().asScala .map { k => (k, trimExceptCRLF(properties.getProperty(k))) } .toMap } catch { case e: IOException => throw new SparkException(s"Failed when loading Spark properties from $filename", e) } finally { inReader.close() } } /** Return the path of the default Spark properties file. */ def getDefaultPropertiesFile(env: Map[String, String] = sys.env): String = { env.get("SPARK_CONF_DIR") .orElse(env.get("SPARK_HOME").map { t => s"$t${File.separator}conf" }) .map { t => new File(s"$t${File.separator}spark-defaults.conf")} .filter(_.isFile) .map(_.getAbsolutePath) .orNull } /** * Return a nice string representation of the exception. It will call "printStackTrace" to * recursively generate the stack trace including the exception and its causes. */ def exceptionString(e: Throwable): String = { if (e == null) { "" } else { // Use e.printStackTrace here because e.getStackTrace doesn't include the cause val stringWriter = new StringWriter() e.printStackTrace(new PrintWriter(stringWriter)) stringWriter.toString } } private implicit class Lock(lock: LockInfo) { def lockString: String = { lock match { case monitor: MonitorInfo => s"Monitor(${lock.getClassName}@${lock.getIdentityHashCode}})" case _ => s"Lock(${lock.getClassName}@${lock.getIdentityHashCode}})" } } } /** Return a thread dump of all threads' stacktraces. Used to capture dumps for the web UI */ def getThreadDump(): Array[ThreadStackTrace] = { // We need to filter out null values here because dumpAllThreads() may return null array // elements for threads that are dead / don't exist. val threadInfos = ManagementFactory.getThreadMXBean.dumpAllThreads(true, true).filter(_ != null) threadInfos.sortWith { case (threadTrace1, threadTrace2) => val v1 = if (threadTrace1.getThreadName.contains("Executor task launch")) 1 else 0 val v2 = if (threadTrace2.getThreadName.contains("Executor task launch")) 1 else 0 if (v1 == v2) { val name1 = threadTrace1.getThreadName().toLowerCase(Locale.ROOT) val name2 = threadTrace2.getThreadName().toLowerCase(Locale.ROOT) val nameCmpRes = name1.compareTo(name2) if (nameCmpRes == 0) { threadTrace1.getThreadId < threadTrace2.getThreadId } else { nameCmpRes < 0 } } else { v1 > v2 } }.map(threadInfoToThreadStackTrace) } def getThreadDumpForThread(threadId: Long): Option[ThreadStackTrace] = { if (threadId <= 0) { None } else { // The Int.MaxValue here requests the entire untruncated stack trace of the thread: val threadInfo = Option(ManagementFactory.getThreadMXBean.getThreadInfo(threadId, Int.MaxValue)) threadInfo.map(threadInfoToThreadStackTrace) } } private def threadInfoToThreadStackTrace(threadInfo: ThreadInfo): ThreadStackTrace = { val monitors = threadInfo.getLockedMonitors.map(m => m.getLockedStackFrame -> m).toMap val stackTrace = StackTrace(threadInfo.getStackTrace.map { frame => monitors.get(frame) match { case Some(monitor) => monitor.getLockedStackFrame.toString + s" => holding ${monitor.lockString}" case None => frame.toString } }) // use a set to dedup re-entrant locks that are held at multiple places val heldLocks = (threadInfo.getLockedSynchronizers ++ threadInfo.getLockedMonitors).map(_.lockString).toSet ThreadStackTrace( threadId = threadInfo.getThreadId, threadName = threadInfo.getThreadName, threadState = threadInfo.getThreadState, stackTrace = stackTrace, blockedByThreadId = if (threadInfo.getLockOwnerId < 0) None else Some(threadInfo.getLockOwnerId), blockedByLock = Option(threadInfo.getLockInfo).map(_.lockString).getOrElse(""), holdingLocks = heldLocks.toSeq) } /** * Convert all spark properties set in the given SparkConf to a sequence of java options. */ def sparkJavaOpts(conf: SparkConf, filterKey: (String => Boolean) = _ => true): Seq[String] = { conf.getAll .filter { case (k, _) => filterKey(k) } .map { case (k, v) => s"-D$k=$v" } } /** * Maximum number of retries when binding to a port before giving up. */ def portMaxRetries(conf: SparkConf): Int = { val maxRetries = conf.getOption("spark.port.maxRetries").map(_.toInt) if (conf.contains(IS_TESTING)) { // Set a higher number of retries for tests... maxRetries.getOrElse(100) } else { maxRetries.getOrElse(16) } } /** * Returns the user port to try when trying to bind a service. Handles wrapping and skipping * privileged ports. */ def userPort(base: Int, offset: Int): Int = { (base + offset - 1024) % (65536 - 1024) + 1024 } /** * Attempt to start a service on the given port, or fail after a number of attempts. * Each subsequent attempt uses 1 + the port used in the previous attempt (unless the port is 0). * * @param startPort The initial port to start the service on. * @param startService Function to start service on a given port. * This is expected to throw java.net.BindException on port collision. * @param conf A SparkConf used to get the maximum number of retries when binding to a port. * @param serviceName Name of the service. * @return (service: T, port: Int) */ def startServiceOnPort[T]( startPort: Int, startService: Int => (T, Int), conf: SparkConf, serviceName: String = ""): (T, Int) = { require(startPort == 0 || (1024 <= startPort && startPort < 65536), "startPort should be between 1024 and 65535 (inclusive), or 0 for a random free port.") val serviceString = if (serviceName.isEmpty) "" else s" '$serviceName'" val maxRetries = portMaxRetries(conf) for (offset <- 0 to maxRetries) { // Do not increment port if startPort is 0, which is treated as a special port val tryPort = if (startPort == 0) { startPort } else { userPort(startPort, offset) } try { val (service, port) = startService(tryPort) logInfo(s"Successfully started service$serviceString on port $port.") return (service, port) } catch { case e: Exception if isBindCollision(e) => if (offset >= maxRetries) { val exceptionMessage = if (startPort == 0) { s"${e.getMessage}: Service$serviceString failed after " + s"$maxRetries retries (on a random free port)! " + s"Consider explicitly setting the appropriate binding address for " + s"the service$serviceString (for example ${DRIVER_BIND_ADDRESS.key} " + s"for SparkDriver) to the correct binding address." } else { s"${e.getMessage}: Service$serviceString failed after " + s"$maxRetries retries (starting from $startPort)! Consider explicitly setting " + s"the appropriate port for the service$serviceString (for example spark.ui.port " + s"for SparkUI) to an available port or increasing spark.port.maxRetries." } val exception = new BindException(exceptionMessage) // restore original stack trace exception.setStackTrace(e.getStackTrace) throw exception } if (startPort == 0) { // As startPort 0 is for a random free port, it is most possibly binding address is // not correct. logWarning(s"Service$serviceString could not bind on a random free port. " + "You may check whether configuring an appropriate binding address.") } else { logWarning(s"Service$serviceString could not bind on port $tryPort. " + s"Attempting port ${tryPort + 1}.") } } } // Should never happen throw new SparkException(s"Failed to start service$serviceString on port $startPort") } /** * Return whether the exception is caused by an address-port collision when binding. */ def isBindCollision(exception: Throwable): Boolean = { exception match { case e: BindException => if (e.getMessage != null) { return true } isBindCollision(e.getCause) case e: MultiException => e.getThrowables.asScala.exists(isBindCollision) case e: NativeIoException => (e.getMessage != null && e.getMessage.startsWith("bind() failed: ")) || isBindCollision(e.getCause) case e: Exception => isBindCollision(e.getCause) case _ => false } } /** * configure a new log4j level */ def setLogLevel(l: org.apache.log4j.Level): Unit = { val rootLogger = org.apache.log4j.Logger.getRootLogger() rootLogger.setLevel(l) // Setting threshold to null as rootLevel will define log level for spark-shell Logging.sparkShellThresholdLevel = null } /** * Return the current system LD_LIBRARY_PATH name */ def libraryPathEnvName: String = { if (isWindows) { "PATH" } else if (isMac) { "DYLD_LIBRARY_PATH" } else { "LD_LIBRARY_PATH" } } /** * Return the prefix of a command that appends the given library paths to the * system-specific library path environment variable. On Unix, for instance, * this returns the string LD_LIBRARY_PATH="path1:path2:$LD_LIBRARY_PATH". */ def libraryPathEnvPrefix(libraryPaths: Seq[String]): String = { val libraryPathScriptVar = if (isWindows) { s"%${libraryPathEnvName}%" } else { "$" + libraryPathEnvName } val libraryPath = (libraryPaths :+ libraryPathScriptVar).mkString("\"", File.pathSeparator, "\"") val ampersand = if (Utils.isWindows) { " &" } else { "" } s"$libraryPathEnvName=$libraryPath$ampersand" } /** * Return the value of a config either through the SparkConf or the Hadoop configuration. * We Check whether the key is set in the SparkConf before look at any Hadoop configuration. * If the key is set in SparkConf, no matter whether it is running on YARN or not, * gets the value from SparkConf. * Only when the key is not set in SparkConf and running on YARN, * gets the value from Hadoop configuration. */ def getSparkOrYarnConfig(conf: SparkConf, key: String, default: String): String = { if (conf.contains(key)) { conf.get(key, default) } else if (conf.get(SparkLauncher.SPARK_MASTER, null) == "yarn") { new YarnConfiguration(SparkHadoopUtil.get.newConfiguration(conf)).get(key, default) } else { default } } /** * Return a pair of host and port extracted from the `sparkUrl`. * * A spark url (`spark://host:port`) is a special URI that its scheme is `spark` and only contains * host and port. * * @throws org.apache.spark.SparkException if sparkUrl is invalid. */ @throws(classOf[SparkException]) def extractHostPortFromSparkUrl(sparkUrl: String): (String, Int) = { try { val uri = new java.net.URI(sparkUrl) val host = uri.getHost val port = uri.getPort if (uri.getScheme != "spark" || host == null || port < 0 || (uri.getPath != null && !uri.getPath.isEmpty) || // uri.getPath returns "" instead of null uri.getFragment != null || uri.getQuery != null || uri.getUserInfo != null) { throw new SparkException("Invalid master URL: " + sparkUrl) } (host, port) } catch { case e: java.net.URISyntaxException => throw new SparkException("Invalid master URL: " + sparkUrl, e) } } /** * Returns the current user name. This is the currently logged in user, unless that's been * overridden by the `SPARK_USER` environment variable. */ def getCurrentUserName(): String = { Option(System.getenv("SPARK_USER")) .getOrElse(UserGroupInformation.getCurrentUser().getShortUserName()) } val EMPTY_USER_GROUPS = Set.empty[String] // Returns the groups to which the current user belongs. def getCurrentUserGroups(sparkConf: SparkConf, username: String): Set[String] = { val groupProviderClassName = sparkConf.get(USER_GROUPS_MAPPING) if (groupProviderClassName != "") { try { val groupMappingServiceProvider = classForName(groupProviderClassName). getConstructor().newInstance(). asInstanceOf[org.apache.spark.security.GroupMappingServiceProvider] val currentUserGroups = groupMappingServiceProvider.getGroups(username) return currentUserGroups } catch { case e: Exception => logError(s"Error getting groups for user=$username", e) } } EMPTY_USER_GROUPS } /** * Split the comma delimited string of master URLs into a list. * For instance, "spark://abc,def" becomes [spark://abc, spark://def]. */ def parseStandaloneMasterUrls(masterUrls: String): Array[String] = { masterUrls.stripPrefix("spark://").split(",").map("spark://" + _) } /** An identifier that backup masters use in their responses. */ val BACKUP_STANDALONE_MASTER_PREFIX = "Current state is not alive" /** Return true if the response message is sent from a backup Master on standby. */ def responseFromBackup(msg: String): Boolean = { msg.startsWith(BACKUP_STANDALONE_MASTER_PREFIX) } /** * To avoid calling `Utils.getCallSite` for every single RDD we create in the body, * set a dummy call site that RDDs use instead. This is for performance optimization. */ def withDummyCallSite[T](sc: SparkContext)(body: => T): T = { val oldShortCallSite = sc.getLocalProperty(CallSite.SHORT_FORM) val oldLongCallSite = sc.getLocalProperty(CallSite.LONG_FORM) try { sc.setLocalProperty(CallSite.SHORT_FORM, "") sc.setLocalProperty(CallSite.LONG_FORM, "") body } finally { // Restore the old ones here sc.setLocalProperty(CallSite.SHORT_FORM, oldShortCallSite) sc.setLocalProperty(CallSite.LONG_FORM, oldLongCallSite) } } /** * Return whether the specified file is a parent directory of the child file. */ @tailrec def isInDirectory(parent: File, child: File): Boolean = { if (child == null || parent == null) { return false } if (!child.exists() || !parent.exists() || !parent.isDirectory()) { return false } if (parent.equals(child)) { return true } isInDirectory(parent, child.getParentFile) } /** * * @return whether it is local mode */ def isLocalMaster(conf: SparkConf): Boolean = { val master = conf.get("spark.master", "") master == "local" || master.startsWith("local[") } /** * Push based shuffle can only be enabled when external shuffle service is enabled. */ def isPushBasedShuffleEnabled(conf: SparkConf): Boolean = { conf.get(PUSH_BASED_SHUFFLE_ENABLED) && (conf.get(IS_TESTING).getOrElse(false) || conf.get(SHUFFLE_SERVICE_ENABLED)) } /** * Return whether dynamic allocation is enabled in the given conf. */ def isDynamicAllocationEnabled(conf: SparkConf): Boolean = { val dynamicAllocationEnabled = conf.get(DYN_ALLOCATION_ENABLED) dynamicAllocationEnabled && (!isLocalMaster(conf) || conf.get(DYN_ALLOCATION_TESTING)) } def isStreamingDynamicAllocationEnabled(conf: SparkConf): Boolean = { val streamingDynamicAllocationEnabled = conf.get(STREAMING_DYN_ALLOCATION_ENABLED) streamingDynamicAllocationEnabled && (!isLocalMaster(conf) || conf.get(STREAMING_DYN_ALLOCATION_TESTING)) } /** * Return the initial number of executors for dynamic allocation. */ def getDynamicAllocationInitialExecutors(conf: SparkConf): Int = { if (conf.get(DYN_ALLOCATION_INITIAL_EXECUTORS) < conf.get(DYN_ALLOCATION_MIN_EXECUTORS)) { logWarning(s"${DYN_ALLOCATION_INITIAL_EXECUTORS.key} less than " + s"${DYN_ALLOCATION_MIN_EXECUTORS.key} is invalid, ignoring its setting, " + "please update your configs.") } if (conf.get(EXECUTOR_INSTANCES).getOrElse(0) < conf.get(DYN_ALLOCATION_MIN_EXECUTORS)) { logWarning(s"${EXECUTOR_INSTANCES.key} less than " + s"${DYN_ALLOCATION_MIN_EXECUTORS.key} is invalid, ignoring its setting, " + "please update your configs.") } val initialExecutors = Seq( conf.get(DYN_ALLOCATION_MIN_EXECUTORS), conf.get(DYN_ALLOCATION_INITIAL_EXECUTORS), conf.get(EXECUTOR_INSTANCES).getOrElse(0)).max logInfo(s"Using initial executors = $initialExecutors, max of " + s"${DYN_ALLOCATION_INITIAL_EXECUTORS.key}, ${DYN_ALLOCATION_MIN_EXECUTORS.key} and " + s"${EXECUTOR_INSTANCES.key}") initialExecutors } def tryWithResource[R <: Closeable, T](createResource: => R)(f: R => T): T = { val resource = createResource try f.apply(resource) finally resource.close() } /** * Returns a path of temporary file which is in the same directory with `path`. */ def tempFileWith(path: File): File = { new File(path.getAbsolutePath + "." + UUID.randomUUID()) } /** * Returns the name of this JVM process. This is OS dependent but typically (OSX, Linux, Windows), * this is formatted as PID@hostname. */ def getProcessName(): String = { ManagementFactory.getRuntimeMXBean().getName() } /** * Utility function that should be called early in `main()` for daemons to set up some common * diagnostic state. */ def initDaemon(log: Logger): Unit = { log.info(s"Started daemon with process name: ${Utils.getProcessName()}") SignalUtils.registerLogger(log) } /** * Return the jar files pointed by the "spark.jars" property. Spark internally will distribute * these jars through file server. In the YARN mode, it will return an empty list, since YARN * has its own mechanism to distribute jars. */ def getUserJars(conf: SparkConf): Seq[String] = { conf.get(JARS).filter(_.nonEmpty) } /** * Return the local jar files which will be added to REPL's classpath. These jar files are * specified by --jars (spark.jars) or --packages, remote jars will be downloaded to local by * SparkSubmit at first. */ def getLocalUserJarsForShell(conf: SparkConf): Seq[String] = { val localJars = conf.getOption("spark.repl.local.jars") localJars.map(_.split(",")).map(_.filter(_.nonEmpty)).toSeq.flatten } private[spark] val REDACTION_REPLACEMENT_TEXT = "*********(redacted)" /** * Redact the sensitive values in the given map. If a map key matches the redaction pattern then * its value is replaced with a dummy text. */ def redact(conf: SparkConf, kvs: Seq[(String, String)]): Seq[(String, String)] = { val redactionPattern = conf.get(SECRET_REDACTION_PATTERN) redact(redactionPattern, kvs) } /** * Redact the sensitive values in the given map. If a map key matches the redaction pattern then * its value is replaced with a dummy text. */ def redact[K, V](regex: Option[Regex], kvs: Seq[(K, V)]): Seq[(K, V)] = { regex match { case None => kvs case Some(r) => redact(r, kvs) } } /** * Redact the sensitive information in the given string. */ def redact(regex: Option[Regex], text: String): String = { regex match { case None => text case Some(r) => if (text == null || text.isEmpty) { text } else { r.replaceAllIn(text, REDACTION_REPLACEMENT_TEXT) } } } private def redact[K, V](redactionPattern: Regex, kvs: Seq[(K, V)]): Seq[(K, V)] = { // If the sensitive information regex matches with either the key or the value, redact the value // While the original intent was to only redact the value if the key matched with the regex, // we've found that especially in verbose mode, the value of the property may contain sensitive // information like so: // "sun.java.command":"org.apache.spark.deploy.SparkSubmit ... \ // --conf spark.executorEnv.HADOOP_CREDSTORE_PASSWORD=secret_password ... // // And, in such cases, simply searching for the sensitive information regex in the key name is // not sufficient. The values themselves have to be searched as well and redacted if matched. // This does mean we may be accounting more false positives - for example, if the value of an // arbitrary property contained the term 'password', we may redact the value from the UI and // logs. In order to work around it, user would have to make the spark.redaction.regex property // more specific. kvs.map { case (key: String, value: String) => redactionPattern.findFirstIn(key) .orElse(redactionPattern.findFirstIn(value)) .map { _ => (key, REDACTION_REPLACEMENT_TEXT) } .getOrElse((key, value)) case (key, value: String) => redactionPattern.findFirstIn(value) .map { _ => (key, REDACTION_REPLACEMENT_TEXT) } .getOrElse((key, value)) case (key, value) => (key, value) }.asInstanceOf[Seq[(K, V)]] } /** * Looks up the redaction regex from within the key value pairs and uses it to redact the rest * of the key value pairs. No care is taken to make sure the redaction property itself is not * redacted. So theoretically, the property itself could be configured to redact its own value * when printing. */ def redact(kvs: Map[String, String]): Seq[(String, String)] = { val redactionPattern = kvs.getOrElse( SECRET_REDACTION_PATTERN.key, SECRET_REDACTION_PATTERN.defaultValueString ).r redact(redactionPattern, kvs.toArray) } def redactCommandLineArgs(conf: SparkConf, commands: Seq[String]): Seq[String] = { val redactionPattern = conf.get(SECRET_REDACTION_PATTERN) commands.map { case PATTERN_FOR_COMMAND_LINE_ARG(key, value) => val (_, newValue) = redact(redactionPattern, Seq((key, value))).head s"-D$key=$newValue" case cmd => cmd } } def stringToSeq(str: String): Seq[String] = { str.split(",").map(_.trim()).filter(_.nonEmpty) } /** * Create instances of extension classes. * * The classes in the given list must: * - Be sub-classes of the given base class. * - Provide either a no-arg constructor, or a 1-arg constructor that takes a SparkConf. * * The constructors are allowed to throw "UnsupportedOperationException" if the extension does not * want to be registered; this allows the implementations to check the Spark configuration (or * other state) and decide they do not need to be added. A log message is printed in that case. * Other exceptions are bubbled up. */ def loadExtensions[T <: AnyRef]( extClass: Class[T], classes: Seq[String], conf: SparkConf): Seq[T] = { classes.flatMap { name => try { val klass = classForName[T](name) require(extClass.isAssignableFrom(klass), s"$name is not a subclass of ${extClass.getName()}.") val ext = Try(klass.getConstructor(classOf[SparkConf])) match { case Success(ctor) => ctor.newInstance(conf) case Failure(_) => klass.getConstructor().newInstance() } Some(ext.asInstanceOf[T]) } catch { case _: NoSuchMethodException => throw new SparkException( s"$name did not have a zero-argument constructor or a" + " single-argument constructor that accepts SparkConf. Note: if the class is" + " defined inside of another Scala class, then its constructors may accept an" + " implicit parameter that references the enclosing class; in this case, you must" + " define the class as a top-level class in order to prevent this extra" + " parameter from breaking Spark's ability to find a valid constructor.") case e: InvocationTargetException => e.getCause() match { case uoe: UnsupportedOperationException => logDebug(s"Extension $name not being initialized.", uoe) logInfo(s"Extension $name not being initialized.") None case null => throw e case cause => throw cause } } } } /** * Check the validity of the given Kubernetes master URL and return the resolved URL. Prefix * "k8s://" is appended to the resolved URL as the prefix is used by KubernetesClusterManager * in canCreate to determine if the KubernetesClusterManager should be used. */ def checkAndGetK8sMasterUrl(rawMasterURL: String): String = { require(rawMasterURL.startsWith("k8s://"), "Kubernetes master URL must start with k8s://.") val masterWithoutK8sPrefix = rawMasterURL.substring("k8s://".length) // To handle master URLs, e.g., k8s://host:port. if (!masterWithoutK8sPrefix.contains("://")) { val resolvedURL = s"https://$masterWithoutK8sPrefix" logInfo("No scheme specified for kubernetes master URL, so defaulting to https. Resolved " + s"URL is $resolvedURL.") return s"k8s://$resolvedURL" } val masterScheme = new URI(masterWithoutK8sPrefix).getScheme val resolvedURL = Option(masterScheme).map(_.toLowerCase(Locale.ROOT)) match { case Some("https") => masterWithoutK8sPrefix case Some("http") => logWarning("Kubernetes master URL uses HTTP instead of HTTPS.") masterWithoutK8sPrefix case _ => throw new IllegalArgumentException("Invalid Kubernetes master scheme: " + masterScheme + " found in URL: " + masterWithoutK8sPrefix) } s"k8s://$resolvedURL" } /** * Replaces all the {{EXECUTOR_ID}} occurrences with the Executor Id * and {{APP_ID}} occurrences with the App Id. */ def substituteAppNExecIds(opt: String, appId: String, execId: String): String = { opt.replace("{{APP_ID}}", appId).replace("{{EXECUTOR_ID}}", execId) } /** * Replaces all the {{APP_ID}} occurrences with the App Id. */ def substituteAppId(opt: String, appId: String): String = { opt.replace("{{APP_ID}}", appId) } def createSecret(conf: SparkConf): String = { val bits = conf.get(AUTH_SECRET_BIT_LENGTH) val rnd = new SecureRandom() val secretBytes = new Array[Byte](bits / JByte.SIZE) rnd.nextBytes(secretBytes) Hex.encodeHexString(secretBytes) } /** * Safer than Class obj's getSimpleName which may throw Malformed class name error in scala. * This method mimics scalatest's getSimpleNameOfAnObjectsClass. */ def getSimpleName(cls: Class[_]): String = { try { cls.getSimpleName } catch { // TODO: the value returned here isn't even quite right; it returns simple names // like UtilsSuite$MalformedClassObject$MalformedClass instead of MalformedClass // The exact value may not matter much as it's used in log statements case _: InternalError => stripDollars(stripPackages(cls.getName)) } } /** * Remove the packages from full qualified class name */ private def stripPackages(fullyQualifiedName: String): String = { fullyQualifiedName.split("\\.").takeRight(1)(0) } /** * Remove trailing dollar signs from qualified class name, * and return the trailing part after the last dollar sign in the middle */ private def stripDollars(s: String): String = { val lastDollarIndex = s.lastIndexOf('$') if (lastDollarIndex < s.length - 1) { // The last char is not a dollar sign if (lastDollarIndex == -1 || !s.contains("$iw")) { // The name does not have dollar sign or is not an interpreter // generated class, so we should return the full string s } else { // The class name is interpreter generated, // return the part after the last dollar sign // This is the same behavior as getClass.getSimpleName s.substring(lastDollarIndex + 1) } } else { // The last char is a dollar sign // Find last non-dollar char val lastNonDollarChar = s.reverse.find(_ != '$') lastNonDollarChar match { case None => s case Some(c) => val lastNonDollarIndex = s.lastIndexOf(c) if (lastNonDollarIndex == -1) { s } else { // Strip the trailing dollar signs // Invoke stripDollars again to get the simple name stripDollars(s.substring(0, lastNonDollarIndex + 1)) } } } } /** * Regular expression matching full width characters. * * Looked at all the 0x0000-0xFFFF characters (unicode) and showed them under Xshell. * Found all the full width characters, then get the regular expression. */ private val fullWidthRegex = ("""[""" + // scalastyle:off nonascii "\u1100-\u115F" + "\u2E80-\uA4CF" + "\uAC00-\uD7A3" + "\uF900-\uFAFF" + "\uFE10-\uFE19" + "\uFE30-\uFE6F" + "\uFF00-\uFF60" + "\uFFE0-\uFFE6" + // scalastyle:on nonascii """]""").r /** * Return the number of half widths in a given string. Note that a full width character * occupies two half widths. * * For a string consisting of 1 million characters, the execution of this method requires * about 50ms. */ def stringHalfWidth(str: String): Int = { if (str == null) 0 else str.length + fullWidthRegex.findAllIn(str).size } def sanitizeDirName(str: String): String = { str.replaceAll("[ :/]", "-").replaceAll("[.${}'\"]", "_").toLowerCase(Locale.ROOT) } def isClientMode(conf: SparkConf): Boolean = { "client".equals(conf.get(SparkLauncher.DEPLOY_MODE, "client")) } /** Returns whether the URI is a "local:" URI. */ def isLocalUri(uri: String): Boolean = { uri.startsWith(s"$LOCAL_SCHEME:") } /** Check whether the file of the path is splittable. */ def isFileSplittable(path: Path, codecFactory: CompressionCodecFactory): Boolean = { val codec = codecFactory.getCodec(path) codec == null || codec.isInstanceOf[SplittableCompressionCodec] } /** Create a new properties object with the same values as `props` */ def cloneProperties(props: Properties): Properties = { val resultProps = new Properties() props.forEach((k, v) => resultProps.put(k, v)) resultProps } /** * Convert a sequence of `Path`s to a metadata string. When the length of metadata string * exceeds `stopAppendingThreshold`, stop appending paths for saving memory. */ def buildLocationMetadata(paths: Seq[Path], stopAppendingThreshold: Int): String = { val metadata = new StringBuilder(s"(${paths.length} paths)[") var index: Int = 0 while (index < paths.length && metadata.length < stopAppendingThreshold) { if (index > 0) { metadata.append(", ") } metadata.append(paths(index).toString) index += 1 } if (paths.length > index) { if (index > 0) { metadata.append(", ") } metadata.append("...") } metadata.append("]") metadata.toString } /** * Convert MEMORY_OFFHEAP_SIZE to MB Unit, return 0 if MEMORY_OFFHEAP_ENABLED is false. */ def executorOffHeapMemorySizeAsMb(sparkConf: SparkConf): Int = { val sizeInMB = Utils.memoryStringToMb(sparkConf.get(MEMORY_OFFHEAP_SIZE).toString) checkOffHeapEnabled(sparkConf, sizeInMB).toInt } /** * return 0 if MEMORY_OFFHEAP_ENABLED is false. */ def checkOffHeapEnabled(sparkConf: SparkConf, offHeapSize: Long): Long = { if (sparkConf.get(MEMORY_OFFHEAP_ENABLED)) { require(offHeapSize > 0, s"${MEMORY_OFFHEAP_SIZE.key} must be > 0 when ${MEMORY_OFFHEAP_ENABLED.key} == true") offHeapSize } else { 0 } } } private[util] object CallerContext extends Logging { val callerContextSupported: Boolean = { SparkHadoopUtil.get.conf.getBoolean("hadoop.caller.context.enabled", false) && { try { Utils.classForName("org.apache.hadoop.ipc.CallerContext") Utils.classForName("org.apache.hadoop.ipc.CallerContext$Builder") true } catch { case _: ClassNotFoundException => false case NonFatal(e) => logWarning("Fail to load the CallerContext class", e) false } } } } /** * An utility class used to set up Spark caller contexts to HDFS and Yarn. The `context` will be * constructed by parameters passed in. * When Spark applications run on Yarn and HDFS, its caller contexts will be written into Yarn RM * audit log and hdfs-audit.log. That can help users to better diagnose and understand how * specific applications impacting parts of the Hadoop system and potential problems they may be * creating (e.g. overloading NN). As HDFS mentioned in HDFS-9184, for a given HDFS operation, it's * very helpful to track which upper level job issues it. * * @param from who sets up the caller context (TASK, CLIENT, APPMASTER) * * The parameters below are optional: * @param upstreamCallerContext caller context the upstream application passes in * @param appId id of the app this task belongs to * @param appAttemptId attempt id of the app this task belongs to * @param jobId id of the job this task belongs to * @param stageId id of the stage this task belongs to * @param stageAttemptId attempt id of the stage this task belongs to * @param taskId task id * @param taskAttemptNumber task attempt id */ private[spark] class CallerContext( from: String, upstreamCallerContext: Option[String] = None, appId: Option[String] = None, appAttemptId: Option[String] = None, jobId: Option[Int] = None, stageId: Option[Int] = None, stageAttemptId: Option[Int] = None, taskId: Option[Long] = None, taskAttemptNumber: Option[Int] = None) extends Logging { private val context = prepareContext("SPARK_" + from + appId.map("_" + _).getOrElse("") + appAttemptId.map("_" + _).getOrElse("") + jobId.map("_JId_" + _).getOrElse("") + stageId.map("_SId_" + _).getOrElse("") + stageAttemptId.map("_" + _).getOrElse("") + taskId.map("_TId_" + _).getOrElse("") + taskAttemptNumber.map("_" + _).getOrElse("") + upstreamCallerContext.map("_" + _).getOrElse("")) private def prepareContext(context: String): String = { // The default max size of Hadoop caller context is 128 lazy val len = SparkHadoopUtil.get.conf.getInt("hadoop.caller.context.max.size", 128) if (context == null || context.length <= len) { context } else { val finalContext = context.substring(0, len) logWarning(s"Truncated Spark caller context from $context to $finalContext") finalContext } } /** * Set up the caller context [[context]] by invoking Hadoop CallerContext API of * [[org.apache.hadoop.ipc.CallerContext]], which was added in hadoop 2.8. */ def setCurrentContext(): Unit = { if (CallerContext.callerContextSupported) { try { val callerContext = Utils.classForName("org.apache.hadoop.ipc.CallerContext") val builder: Class[AnyRef] = Utils.classForName("org.apache.hadoop.ipc.CallerContext$Builder") val builderInst = builder.getConstructor(classOf[String]).newInstance(context) val hdfsContext = builder.getMethod("build").invoke(builderInst) callerContext.getMethod("setCurrent", callerContext).invoke(null, hdfsContext) } catch { case NonFatal(e) => logWarning("Fail to set Spark caller context", e) } } } } /** * A utility class to redirect the child process's stdout or stderr. */ private[spark] class RedirectThread( in: InputStream, out: OutputStream, name: String, propagateEof: Boolean = false) extends Thread(name) { setDaemon(true) override def run(): Unit = { scala.util.control.Exception.ignoring(classOf[IOException]) { // FIXME: We copy the stream on the level of bytes to avoid encoding problems. Utils.tryWithSafeFinally { val buf = new Array[Byte](1024) var len = in.read(buf) while (len != -1) { out.write(buf, 0, len) out.flush() len = in.read(buf) } } { if (propagateEof) { out.close() } } } } } /** * An [[OutputStream]] that will store the last 10 kilobytes (by default) written to it * in a circular buffer. The current contents of the buffer can be accessed using * the toString method. */ private[spark] class CircularBuffer(sizeInBytes: Int = 10240) extends java.io.OutputStream { private var pos: Int = 0 private var isBufferFull = false private val buffer = new Array[Byte](sizeInBytes) def write(input: Int): Unit = { buffer(pos) = input.toByte pos = (pos + 1) % buffer.length isBufferFull = isBufferFull || (pos == 0) } override def toString: String = { if (!isBufferFull) { return new String(buffer, 0, pos, StandardCharsets.UTF_8) } val nonCircularBuffer = new Array[Byte](sizeInBytes) System.arraycopy(buffer, pos, nonCircularBuffer, 0, buffer.length - pos) System.arraycopy(buffer, 0, nonCircularBuffer, buffer.length - pos, pos) new String(nonCircularBuffer, StandardCharsets.UTF_8) } }
witgo/spark
core/src/main/scala/org/apache/spark/util/Utils.scala
Scala
apache-2.0
115,928
//====================================================================================================================== // Facsimile: A Discrete-Event Simulation Library // Copyright © 2004-2020, Michael J Allen. // // This file is part of Facsimile. // // Facsimile is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later // version. // // Facsimile is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied // warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more // details. // // You should have received a copy of the GNU Lesser General Public License along with Facsimile. If not, see: // // http://www.gnu.org/licenses/lgpl. // // The developers welcome all comments, suggestions and offers of assistance. For further information, please visit the // project home page at: // // http://facsim.org/ // // Thank you for your interest in the Facsimile project! // // IMPORTANT NOTE: All patches (modifications to existing files and/or the addition of new files) submitted for // inclusion as part of the official Facsimile code base, must comply with the published Facsimile Coding Standards. If // your code fails to comply with the standard, then your patches will be rejected. For further information, please // visit the coding standards at: // // http://facsim.org/Documentation/CodingStandards/ //====================================================================================================================== //====================================================================================================================== // Scala source file belonging to the org.facsim.types.phys types. //====================================================================================================================== package org.facsim.types.phys import org.facsim.util.{requireFinite, requireValid} /** Offset converter. * * Converts physical quantity measurement units of an associated, but unspecified, unit to and from the corresponding * standard ''[[http://en.wikipedia.org/wiki/SI SI]]'' units for the unit family. Values are ''imported'' (converted to * ''SI'' unit values) by subtracting the specified constant `offset` value; they are ''exported'' (converted from * ''SI'' unit values) by adding the same constant `offset`. * * @constructor Create new offset converter from the specified `offset`. * * @param offset Constant value that, when subtracted from specific measurements in the corresponding non-''SI'' units, * converts those measurements to ''SI'' units. `offset` must be finite and cannot be zero (in which case, * [[SIConverter]] is a better option). * * @throws IllegalArgumentException if `offset` is `NaN`, infinite, or zero. */ private[phys] final class OffsetConverter(offset: Double) extends Converter { // Sanity checks. Constant values must be finite and non-zero. requireFinite(offset) requireValid(offset, offset != 0.0) /** @inheritdoc */ private[phys] override def importValue(value: Double): Double = value - offset /** @inheritdoc */ private[phys] override def exportValue(value: Double): Double = value + offset }
MichaelJAllen/facsimile
facsimile-types/src/main/scala/org/facsim/types/phys/OffsetConverter.scala
Scala
lgpl-3.0
3,398
// Copyright 2012 Twitter, Inc. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.twitter.cassie import com.twitter.cassie.connection.{ CCluster, ClusterClientProvider, RetryPolicy, SocketAddressCluster } import com.twitter.conversions.time._ import com.twitter.finagle.stats.{ StatsReceiver, NullStatsReceiver } import com.twitter.finagle.tracing.{ Tracer, NullTracer } import com.twitter.util.Duration import org.slf4j.LoggerFactory import java.net.{ SocketAddress, InetSocketAddress } import scala.collection.JavaConversions._ /** * A Cassandra cluster. * * @param seedHosts list of some hosts in the cluster * @param seedPort the port number for '''all''' hosts in the cluster * to refresh its host list. * @param stats a finagle stats receiver */ class Cluster(seedHosts: Set[String], seedPort: Int, stats: StatsReceiver, tracer: Tracer.Factory) extends ClusterBase { private var mapHostsEvery: Duration = 10.minutes /** * @param seedHosts A comma separated list of seed hosts for a cluster. The rest of the * hosts can be found via mapping the cluser. See KeyspaceBuilder.mapHostsEvery. * The port number is assumed to be 9160. */ def this(seedHosts: String, stats: StatsReceiver = NullStatsReceiver) = this(seedHosts.split(',').filter { !_.isEmpty }.toSet, 9160, stats, NullTracer.factory) /** * @param seedHosts A comma separated list of seed hosts for a cluster. The rest of the * hosts can be found via mapping the cluser. See KeyspaceBuilder.mapHostsEvery. */ def this(seedHosts: String, port: Int) = this(seedHosts.split(',').filter { !_.isEmpty }.toSet, port, NullStatsReceiver, NullTracer.factory) /** * @param seedHosts A collection of seed host addresses. The port number is assumed to be 9160 */ def this(seedHosts: java.util.Collection[String]) = this(collectionAsScalaIterable(seedHosts).toSet, 9160, NullStatsReceiver, NullTracer.factory) /** * Returns a [[com.twitter.cassie.KeyspaceBuilder]] instance. * @param name the keyspace's name */ def keyspace(name: String): KeyspaceBuilder = { val scopedStats = stats.scope("cassie").scope(name) val seedAddresses = seedHosts.map { host => new InetSocketAddress(host, seedPort) }.toSeq val cluster = if (mapHostsEvery > 0.seconds) // either map the cluster for this keyspace new ClusterRemapper(name, seedAddresses, mapHostsEvery, seedPort, stats.scope("remapper"), tracer) else // or connect directly to the hosts that were given as seeds new SocketAddressCluster(seedAddresses) KeyspaceBuilder(cluster, name, scopedStats, tracer) } /** * @param d Cassie will query the cassandra cluster every [[period]] period * to refresh its host list. */ def mapHostsEvery(period: Duration): Cluster = { mapHostsEvery = period this } } trait ClusterBase { /** * Returns a [[com.twitter.cassie.KeyspaceBuilder]] instance. * @param name the keyspace's name */ def keyspace(name: String): KeyspaceBuilder } object KeyspaceBuilder { private val log = LoggerFactory.getLogger(this.getClass) } case class KeyspaceBuilder( cluster: CCluster[SocketAddress], name: String, stats: StatsReceiver, tracer: Tracer.Factory, _retries: Int = 0, _timeout: Int = 5000, _requestTimeout: Int = 1000, _connectTimeout: Int = 1000, _minConnectionsPerHost: Int = 1, _maxConnectionsPerHost: Int = 5, _hostConnectionMaxWaiters: Int = 100, _retryPolicy: RetryPolicy = RetryPolicy.Idempotent, _failFast: Boolean = false ) { import KeyspaceBuilder._ /** * connect to the cluster with the specified parameters */ def connect(): Keyspace = { // TODO: move to builder pattern as well if (_timeout < _requestTimeout) log.error("Timeout (for all requests including retries) is less than the per-request timeout.") if (_timeout < _connectTimeout) log.error("Timeout (for all requests including retries) is less than the connection timeout.") val ccp = new ClusterClientProvider( cluster, name, _retries, _timeout.milliseconds, _requestTimeout.milliseconds, _connectTimeout.milliseconds, _minConnectionsPerHost, _maxConnectionsPerHost, _hostConnectionMaxWaiters, stats, tracer, _retryPolicy, _failFast) new Keyspace(name, ccp, stats) } /** * In general, it is recommended that you set this to true. * It is likely to become the default behavior in Finagle in the not too distant future. */ def failFast(ff: Boolean): KeyspaceBuilder = copy(_failFast = ff) def timeout(t: Int): KeyspaceBuilder = copy(_timeout = t) def retries(r: Int): KeyspaceBuilder = copy(_retries = r) def retryPolicy(r: RetryPolicy): KeyspaceBuilder = copy(_retryPolicy = r) /** * @see requestTimeout in [[http://twitter.github.com/finagle/finagle-core/target/doc/main/api/com/twitter/finagle/builder/ClientBuilder.html]] */ def requestTimeout(r: Int): KeyspaceBuilder = copy(_requestTimeout = r) /** * @see connectionTimeout in [[http://twitter.github.com/finagle/finagle-core/target/doc/main/api/com/twitter/finagle/builder/ClientBuilder.html]] */ def connectTimeout(r: Int): KeyspaceBuilder = copy(_connectTimeout = r) def minConnectionsPerHost(m: Int): KeyspaceBuilder = copy(_minConnectionsPerHost = m) def maxConnectionsPerHost(m: Int): KeyspaceBuilder = copy(_maxConnectionsPerHost = m) /** A finagle stats receiver for reporting. */ def reportStatsTo(r: StatsReceiver): KeyspaceBuilder = copy(stats = r) /** Set a tracer to collect request traces. */ def tracerFactory(t: Tracer.Factory): KeyspaceBuilder = copy(tracer = t) def hostConnectionMaxWaiters(i: Int): KeyspaceBuilder = copy(_hostConnectionMaxWaiters = i) }
travisbrown/zipkin
zipkin-cassandra/src/main/scala/com/twitter/cassie/Cluster.scala
Scala
apache-2.0
6,387
package test import tap.interpreter.Interpreter import tap.interpreter.Interpreter._ import tap.ir._ import tap.{LocalId, ModuleId} import scala.collection.mutable.ArrayBuffer import tap.types.Natives._ trait InterpreterFixture { val idTuple2 = ModuleId("Prelude.Tuple", "Tuple2") val idCons = ModuleId("Prelude", ":") val idEOL = ModuleId("Prelude", "EOL") val idSome = ModuleId("Prelude", "Some") val idNone = ModuleId("Prelude", "None") val defaultScope: Scope = Map( idTrue -> iTrue, idFalse -> iFalse, idUnit -> iUnit, idVar -> IData(idVar, ArrayBuffer.empty), idTuple2 -> IData(idTuple2, ArrayBuffer.empty), idCons -> IData(idCons, ArrayBuffer.empty), idEOL -> IData(idEOL, ArrayBuffer.empty), idSome -> IData(idSome, ArrayBuffer.empty), idNone -> IData(idNone, ArrayBuffer.empty) ) def eval(expr: TapExpr, scope: Scope = Map.empty): IValue = { Interpreter.eval(expr, defaultScope ++ scope) } val eTrue = ValueReadExpr(idTrue) val eFalse = ValueReadExpr(idFalse) val eUnit = ValueReadExpr(idUnit) def eSome(e: TapExpr) = ApplyExpr(ValueReadExpr(idSome), e) def iSome(e: IValue) = IData(idSome, ArrayBuffer(e)) val eNone = ValueReadExpr(idNone) val iNone = IData(idNone, ArrayBuffer.empty) def eTuple2(x: TapExpr, y: TapExpr) = ApplyExpr(ApplyExpr(ValueReadExpr(idTuple2), x), y) def iTuple2(x: IValue, y: IValue) = IData(idTuple2, ArrayBuffer(x, y)) def eCons(x: TapExpr, y: TapExpr) = ApplyExpr(ApplyExpr(ValueReadExpr(idCons), x), y) def iCons(x: IValue, y: IValue) = IData(idCons, ArrayBuffer(x, y)) val iEOL = IData(idEOL, ArrayBuffer.empty) val eEOL = ValueReadExpr(idEOL) def eVar(x: TapExpr) = ApplyExpr(ValueReadExpr(idVar), x) def iVar(x: IValue) = IData(idVar, ArrayBuffer(x)) val eFnIdentity = FunctionExpr(Argument("x"), ValueReadExpr(LocalId("x"))) }
garyb/tap
src/test/scala/test/InterpreterFixture.scala
Scala
mit
1,963
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.rules.logical import org.apache.flink.table.planner.plan.optimize.program.{FlinkChainedProgram, FlinkHepRuleSetProgramBuilder, HEP_RULES_EXECUTION_TYPE, StreamOptimizeContext} import org.apache.flink.table.planner.utils.TableTestBase import org.apache.calcite.plan.hep.HepMatchOrder import org.apache.calcite.tools.RuleSets import org.junit.{Before, Test} /** * Test for [[CalcRankTransposeRule]]. */ class CorrelateSortToRankRuleTest extends TableTestBase { private val util = streamTestUtil() @Before def setup(): Unit = { val programs = new FlinkChainedProgram[StreamOptimizeContext]() programs.addLast( "rules", FlinkHepRuleSetProgramBuilder.newBuilder .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE) .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) .add(RuleSets.ofList(CorrelateSortToRankRule.INSTANCE)) .build()) util.replaceStreamProgram(programs) val createTable = s""" |create table t1( | f0 int, | f1 bigint, | f2 varchar(20) |) with ( | 'connector' = 'values', | 'bounded' = 'false' |) |""".stripMargin util.tableEnv.executeSql(createTable) } @Test def testCorrelateSortToRank(): Unit = { val query = s""" |SELECT f0, f1 |FROM | (SELECT DISTINCT f0 FROM t1) t2, | LATERAL ( | SELECT f1, f2 | FROM t1 | WHERE f0 = t2.f0 | ORDER BY f2 | DESC LIMIT 3 | ) """.stripMargin util.verifyRelPlan(query) } @Test def testNonInnerJoinNotSupported(): Unit = { val query = s""" |SELECT f0, f1 |FROM | (SELECT DISTINCT f0 FROM t1) t2 | NATURAL LEFT JOIN | LATERAL ( | SELECT f1, f2 | FROM t1 | WHERE f0 = t2.f0 | ORDER BY f2 | DESC LIMIT 3 | ) """.stripMargin util.verifyRelPlan(query) } @Test def testAggCallNotSupported(): Unit = { val query = s""" |SELECT mf0, f1 |FROM | (SELECT max(f0) as mf0 FROM t1) t2, | LATERAL ( | SELECT f1, f2 | FROM t1 | WHERE f0 = t2.mf0 | ORDER BY f2 | DESC LIMIT 3 | ) """.stripMargin util.verifyRelPlan(query) } @Test // TODO: this is a valid case to support def testMultipleGroupingsNotSupported(): Unit = { val query = s""" |SELECT f0, f2 |FROM | (SELECT DISTINCT f0, f1 FROM t1) t2, | LATERAL ( | SELECT f2 | FROM t1 | WHERE f0 = t2.f0 AND f1 = t2.f1 | ORDER BY f2 | DESC LIMIT 3 | ) """.stripMargin util.verifyRelPlan(query) } @Test def testAggInputNonMappingNotSupported(): Unit = { val query = s""" |SELECT f0, f1 |FROM | (SELECT DISTINCT f0 FROM (SELECT f0 + f1 as f0 from t1)) t2, | LATERAL ( | SELECT f1, f2 | FROM t1 | WHERE f0 = t2.f0 | ORDER BY f2 | DESC LIMIT 3 | ) """.stripMargin util.verifyRelPlan(query) } @Test def testSortWithOffsetNotSupported(): Unit = { val query = s""" |SELECT f0, f1 |FROM | (SELECT DISTINCT f0 FROM t1) t2, | LATERAL ( | SELECT f1, f2 | FROM t1 | WHERE f0 = t2.f0 | ORDER BY f2 DESC | OFFSET 2 ROWS | FETCH NEXT 3 ROWS ONLY | ) """.stripMargin util.verifyRelPlan(query) } @Test def testNonEqualConditionNotSupported(): Unit = { val query = s""" |SELECT f0, f1 |FROM | (SELECT DISTINCT f0 FROM t1) t2, | LATERAL ( | SELECT f1, f2 | FROM t1 | WHERE f0 > t2.f0 | ORDER BY f2 | DESC LIMIT 3 | ) """.stripMargin util.verifyRelPlan(query) } @Test def testFilterConditionNotCorrelationID(): Unit = { val query = s""" |SELECT f0, f1 |FROM | (SELECT DISTINCT f0 FROM t1) t2, | LATERAL ( | SELECT f1, f2 | FROM t1 | WHERE t2.f0 = f0 + 1 | ORDER BY f2 | DESC LIMIT 3 | ) """.stripMargin util.verifyRelPlan(query) } }
tillrohrmann/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/CorrelateSortToRankRuleTest.scala
Scala
apache-2.0
5,425
/* Copyright 2012 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.algebird object StatefulSummerLaws { import BaseProperties.isNonZero // Law1: def zeroEquiv[V: Semigroup: Equiv](v0: Option[V], v1: Option[V]): Boolean = { val v0NonZero = v0.exists(isNonZero(_)) val v1NonZero = v1.exists(isNonZero(_)) if (!(v0NonZero || v1NonZero)) { // neither are non-zero true } else { (for { a <- v0 b <- v1 eq = Equiv[V].equiv(a, b) } yield eq).getOrElse(false) } } def sumIsPreserved[V: Semigroup: Equiv](summer: StatefulSummer[V], items: Iterable[V]): Boolean = { summer.flush val sg = Semigroup.sumOption(items) val wsummer = Monoid.plus( Monoid.sum(items.map(summer.put(_)).filter { _.isDefined }), summer.flush ) zeroEquiv(sg, wsummer) && summer.isFlushed } // Law 2: def isFlushedIsConsistent[V](summer: StatefulSummer[V], items: Iterable[V]): Boolean = items.forall { v => summer.put(v) (summer.isFlushed == summer.flush.isEmpty) && // Now flush should empty summer.isFlushed && summer.flush.isEmpty } }
twitter/algebird
algebird-test/src/main/scala/com/twitter/algebird/StatefulSummerLaws.scala
Scala
apache-2.0
1,682
object Deriving { import scala.compiletime._ sealed trait Shape class HasSumShape[T, S <: Tuple] abstract class HasProductShape[T, Xs <: Tuple] { def toProduct(x: T): Xs def fromProduct(x: Xs): T } enum Lst[+T] { case Cons(hd: T, tl: Lst[T]) case Nil } object Lst { implicit def lstShape[T]: HasSumShape[Lst[T], (Cons[T], Nil.type)] = new HasSumShape implicit def consShape[T]: HasProductShape[Lst.Cons[T], (T, Lst[T])] = new { def toProduct(xs: Lst.Cons[T]) = (xs.hd, xs.tl) def fromProduct(xs: (T, Lst[T])): Lst.Cons[T] = Lst.Cons(xs(0), xs(1)).asInstanceOf } implicit def nilShape[T]: HasProductShape[Lst.Nil.type, Unit] = new { def toProduct(xs: Lst.Nil.type) = () def fromProduct(xs: Unit) = Lst.Nil } implicit def LstEq[T: Eq]: Eq[Lst[T]] = Eq.derivedForSum implicit def ConsEq[T: Eq]: Eq[Cons[T]] = Eq.derivedForProduct implicit def NilEq[T]: Eq[Nil.type] = Eq.derivedForProduct } trait Eq[T] { def equals(x: T, y: T): Boolean } object Eq { inline def tryEq[T](x: T, y: T) = summonInline[Eq[T]].equals(x, y) inline def deriveForSum[Alts <: Tuple](x: Any, y: Any): Boolean = inline erasedValue[Alts] match { case _: (alt *: alts1) => x match { case x: `alt` => y match { case y: `alt` => tryEq[alt](x, y) case _ => false } case _ => deriveForSum[alts1](x, y) } case _: Unit => false } inline def deriveForProduct[Elems <: Tuple](xs: Elems, ys: Elems): Boolean = inline erasedValue[Elems] match { case _: (elem *: elems1) => val xs1 = xs.asInstanceOf[elem *: elems1] val ys1 = ys.asInstanceOf[elem *: elems1] tryEq[elem](xs1.head, ys1.head) && deriveForProduct[elems1](xs1.tail, ys1.tail) case _: Unit => true } inline def derivedForSum[T, Alts <: Tuple](implicit ev: HasSumShape[T, Alts]): Eq[T] = new { def equals(x: T, y: T): Boolean = deriveForSum[Alts](x, y) } inline def derivedForProduct[T, Elems <: Tuple](implicit ev: HasProductShape[T, Elems]): Eq[T] = new { def equals(x: T, y: T): Boolean = deriveForProduct[Elems](ev.toProduct(x), ev.toProduct(y)) } implicit object eqInt extends Eq[Int] { def equals(x: Int, y: Int) = x == y } } } object Test extends App { import Deriving._ val eq = implicitly[Eq[Lst[Int]]] val xs = Lst.Cons(1, Lst.Cons(2, Lst.Cons(3, Lst.Nil))) val ys = Lst.Cons(1, Lst.Cons(2, Lst.Nil)) assert(eq.equals(xs, xs)) assert(!eq.equals(xs, ys)) assert(!eq.equals(ys, xs)) assert(eq.equals(ys, ys)) val eq2 = implicitly[Eq[Lst[Lst[Int]]]] val xss = Lst.Cons(xs, Lst.Cons(ys, Lst.Nil)) val yss = Lst.Cons(xs, Lst.Nil) assert(eq2.equals(xss, xss)) assert(!eq2.equals(xss, yss)) assert(!eq2.equals(yss, xss)) assert(eq2.equals(yss, yss)) }
som-snytt/dotty
tests/run/typeclass-derivation1.scala
Scala
apache-2.0
2,941
import sbt._ object Dependencies { val servletApiDep = "javax.servlet" % "javax.servlet-api" % "3.1.0" % "provided" def specs2Dep(sv: String) = "org.specs2" %% "specs2-core" % "3.8.6" def okHttp = "com.squareup.okhttp3" % "okhttp" % "3.5.0" :: Nil def integrationTestDeps(sv: String) = (specs2Dep(sv) :: okHttp) map { _ % "test" } val commonsCodecVersion = "1.10" val scalacheckVersion = "1.13.4" val jodaTimeVersion = "2.9.6" val jodaConvertVersion = "1.8.1" val scalaXmlVersion = "1.0.6" val commonsIoVersion = "2.5" val commonsFileUploadVersion = "1.3.2" val jettyVersion = "9.2.19.v20160908" val nettyVersion = "4.1.6.Final" val scalatestVersion = "3.0.1" val json4sVersion = "3.5.0" val asyncHttpClientVersion = "1.8.17" val scribeJavaVersion = "3.3.0" }
jarin/unfiltered
project/Dependencies.scala
Scala
mit
803
/* * Copyright 2012 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.zipkin.web import com.twitter.finatra.Request import com.twitter.util.Time import com.twitter.zipkin.common.{AnnotationType, BinaryAnnotation} import com.twitter.zipkin.query.{Order, QueryRequest} import java.nio.ByteBuffer import java.text.SimpleDateFormat object QueryExtractor { val fmt = new SimpleDateFormat("MM-dd-yyyy HH:mm:ss") /** * Takes a Finatra `Request` and produce the correct `QueryResponse` depending * on the GET parameters present */ def apply(request: Request): Option[QueryRequest] = { val serviceName = request.params.get("serviceName") val spanName = request.params.get("spanName").flatMap { case "all" => None case "" => None case s@_ => Some(s) } /* Pull out the annotations */ val annotations = extractParams(request, "annotations[%d]") match { case Nil => None case seq @ _ => Some(seq) } /* Pull out the kv annotations */ val keys = extractParams(request, "keyValueAnnotations[%d][key]") val values = extractParams(request, "keyValueAnnotations[%d][val]") val binaryAnnotations = (0 until (keys.length min values.length)).map { i => BinaryAnnotation(keys(i), ByteBuffer.wrap(values(i).getBytes), AnnotationType.String, None) }.toSeq match { case Nil => None case seq @ _ => Some(seq) } val endTimestamp = request.params.get("endDatetime") match { case Some(str) => { fmt.parse(str).getTime * 1000 } case _ => { Time.now.inMicroseconds } } val limit = request.params.get("limit").map { _.toInt }.getOrElse(Constants.DefaultQueryLimit) val order = Order.DurationDesc serviceName.map { name => QueryRequest(name, spanName, annotations, binaryAnnotations, endTimestamp, limit, order) } } private def extractParams(request: Request, keyFormatStr: String): Seq[String] = { var values = Seq.empty[String] var done = false var count = 0 while (!done) { request.params.get(keyFormatStr.format(count)) match { case Some(v) => { values = values :+ v count += 1 } case None => { done = true } } } values } }
cordoval/zipkin
zipkin-web/src/main/scala/com/twitter/zipkin/web/QueryExtractor.scala
Scala
apache-2.0
2,843
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.nn import com.intel.analytics.bigdl.nn._ import com.intel.analytics.bigdl.optim.{L2Regularizer, SGD} import com.intel.analytics.bigdl.utils._ import com.intel.analytics.bigdl.tensor.{Storage, Tensor} import com.intel.analytics.bigdl.utils.RandomGenerator._ import com.intel.analytics.bigdl.utils.TorchObject.TYPE_DOUBLE_TENSOR import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers} import scala.collection.mutable.ArrayBuffer import scala.math._ @com.intel.analytics.bigdl.tags.Parallel class ConvLSTMPeepholeSpec extends FlatSpec with BeforeAndAfter with Matchers { "A convLstm" should " work in BatchMode" in { val hiddenSize = 5 val inputSize = 3 val seqLength = 4 val batchSize = 2 val kernalW = 3 val kernalH = 3 val rec = Recurrent[Double]() val model = Sequential[Double]() .add(rec .add(ConvLSTMPeephole[Double]( inputSize, hiddenSize, kernalW, kernalH, 1, withPeephole = false))) // .add(View(hiddenSize * kernalH * kernalW)) val input = Tensor[Double](batchSize, seqLength, inputSize, kernalW, kernalH).rand val output = model.forward(input).toTensor[Double] for (i <- 1 to 3) { val output = model.forward(input) model.backward(input, output) } } "A ConvLSTMPeepwhole " should "generate corrent output" in { val hiddenSize = 5 val inputSize = 3 val seqLength = 4 val batchSize = 1 val inputData = Array( 0.7858528697343803, 0.39643673229432663, 0.5413854281649032, 0.19631629573541698, 0.9680900797738962, 0.7049105791704678, 0.718163522037053, 0.3668819786771701, 0.9440464687074629, 0.397055650490982, 0.5690454413390856, 0.18889661397871826, 0.001579648466406347, 0.7021713615013074, 0.8454086571332006, 0.35854686871163577, 0.2314278384571361, 0.13381056284442328, 0.6132205837353593, 0.7805923402713709, 0.3193504762780809, 0.4820660231781917, 0.13430830215086442, 0.08936018148523917, 0.4341675120170957, 0.3757014153775321, 0.38254346806277906, 0.21915241667840835, 0.8007170972591638, 0.15182768990111906, 0.16921631594179842, 0.4634274688062553, 0.8563216662553195, 0.7757094855568256, 0.8645676388878089, 0.2951255327230293, 0.8551730734700937, 0.4644185607835579, 0.871393203268247, 0.4597647143442395, 0.4071171727330902, 0.4854337817083433, 0.3318299516855079, 0.498443023507131, 0.7156391293365701, 0.3249206769911145, 0.029879345424368986, 0.9096393017575739, 0.2144207018779739, 0.0503060117834595, 0.089917511783566, 0.6167131506276432, 0.8926610118451324, 0.22789546589690257, 0.8653884115886622, 0.2722629410529933, 0.8464231593999603, 0.1986656076231056, 0.36207025934462145, 0.20100784318653586, 0.49658835737109863, 0.8639942640204804, 0.6467518899038553, 0.4596178558550218, 0.47357079720990714, 0.38797563894307674, 0.9548503051587474, 0.5749238739873289, 0.3847657155455617, 0.3841002078008404, 0.6294462025466693, 0.011197612494351583, 0.5527740169991779, 0.22146120004645264, 0.5994096770862478, 0.31269860199224186, 0.34076707800851624, 0.059283360194189005, 0.8357335896530725, 0.16944373713090255, 0.8659253353994816, 0.09845143876193818, 0.11288714233839348, 0.3621625765077783, 0.7427050848065528, 0.32484987192893533, 0.4459479804922727, 0.6566592559514339, 0.9947576704428358, 0.7117063859310553, 0.7744659218984301, 0.23830967806526449, 0.4276461743258111, 0.44676684424295, 0.820457223019586, 0.20418080384634774, 0.5640742319761487, 0.25500989650402617, 0.5606232230408129, 0.9632661452404038, 0.7091629761784937, 0.2987437440895777, 0.42183273373923236, 0.13458995231781568, 0.8353811682997592, 0.4543100021283092, 0.13410281765513044, 0.678439795677715 ) val input = Tensor[Double](inputData, Array(batchSize, seqLength, inputSize, 3, 3)) val rec = Recurrent[Double]() val model = Sequential[Double]() .add(rec .add(ConvLSTMPeephole[Double](inputSize, hiddenSize, 3, 3, 1, withPeephole = false))) val weightData = Array( 0.1323664, 0.11453647, 0.08062653, 0.12153825, 0.09627097, 0.09425588, -0.12831208, 0.1123728, 0.12671691, 0.16061302, -0.110098146, 0.17929439, -0.09391218, 0.03184388, 0.16272181, -0.17903298, -0.11212616, -0.16194142, -0.07123136, 0.171485, -0.06678299, 0.14097463, -0.076257214, -0.12772666, 0.10127824, 0.14800657, -0.17392491, 0.182817, -0.12528893, 0.18364896, -0.14304572, -0.17017141, 0.09545128, 0.0021875147, -0.12588996, -0.17018303, -0.13192223, -0.08928953, 0.083996765, 0.113029376, -0.03533393, 0.13787921, 0.044130187, -0.05275204, 0.06665044, 0.10583809, 0.14640503, -0.0825495, 0.175358, -0.17123815, 0.016984237, -0.15625823, 0.060610376, -0.07900191, -0.1368149, -0.14521007, 0.1376833, -0.15550166, -0.10907967, 0.015855903, -0.16790473, -0.16079058, -0.17573304, -0.093626104, -0.14478739, 0.06643916, 0.080685385, -0.048617058, 0.12523253, -0.06701434, 0.016185585, 0.108312786, -0.1217048, 0.18999666, 0.12596558, 0.1527906, 0.11697532, 0.11283574, 0.057593197, 0.12615764, 0.072295435, 0.025243226, -0.11434275, -0.014110628, 0.16677082, -0.17618565, -0.020268144, 0.017786589, 0.10867132, -0.01608419, 0.15298566, -0.17555264, 0.13313031, -0.10074914, -0.034776952, -0.02424163, 0.06472268, 0.014123779, -0.046546947, -0.018097628, 0.12484821, 0.12694938, 0.08661913, -0.028283825, -0.17283146, -0.11884577, 0.054063573, -0.035790008, -0.048655663, -0.10025538, -0.08609334, 0.13622199, -0.018927833, -0.13733545, 0.06963552, -0.14686611, -0.12066068, -0.041992664, 0.1182357, -0.027649706, 0.18181281, 0.041711636, 0.04432702, -0.116909824, -0.17462158, -0.021893747, -0.15286785, -0.17956264, 0.08781697, 0.18018082, -0.12757431, 0.15001388, 0.06699571, -0.013572012, 0.011313009, -0.17503677, 0.16929771, -0.1792189, -0.06653591, -0.018526785, 0.14020926, -0.14813682, -0.042868182, 0.071334295, -0.0331954, -0.022251781, -0.044458825, -0.108357064, -0.09425906, 0.12529904, 0.053906064, -0.131506, 0.035108663, -0.027024692, 0.08062959, -0.07704205, -0.08997438, 0.049663007, -0.0602714, -0.099638045, 0.11657174, -0.14531808, -0.14385863, -0.13701083, 0.10701223, 0.07447342, 0.02098617, 0.013973343, 0.016073138, 0.1054289, -0.12807003, 0.10496164, -0.08306444, -0.07314216, 0.00746669, 0.13380568, 0.11950841, 0.012040939, -0.08963716, 0.029381294, 0.0444725, -0.14176999, -0.046013553, 0.14456055, -0.020257693, 0.11915037, -0.13432308, -0.04188419, -0.14749014, 0.10679834, -0.008356104, -0.091679156, -0.052243225, -0.12075397, -0.122984625, -0.0019464424, -0.009337306, -0.09842576, -0.05086776, -0.13618909, 0.044009373, 0.03276076, 0.14773913, -0.08475516, -0.035664886, 0.13492987, -0.0016442818, 0.01504501, 0.10326427, 0.14080256, 0.13216749, -0.13468772, -0.13552676, -0.07458449, 0.116350845, -0.017945917, -0.079822004, 0.14235756, 0.108743496, 0.056373693, 0.106244266, 0.13354784, -0.055554073, 0.06573345, 0.11254954, 0.030720904, -0.12789108, 0.11583857, 0.01126726, -0.076339975, -0.043077517, 0.14489457, -0.061520822, -0.084330715, -0.13341874, 0.08033921, 0.01965443, -0.028425984, -0.10873458, 0.0019523608, -0.14760552, -0.0715915, 0.121757604, 0.06075947, 0.024467388, 0.08100012, -0.08324363, -0.077451654, -0.14260122, -0.0990978, -0.035064414, -0.122219145, 0.075672865, -0.097894855, -0.032138903, -0.14660397, -0.09522337, -0.10158224, 0.02410856, 0.09243034, -0.0098140575, -0.1458175, 0.1018263, 0.072414316, -0.025606995, -0.09973119, -0.07166437, 0.024537396, -0.104933284, -0.12598962, 0.073707975, -0.11742054, -0.117542766, -0.11412587, -0.11182065, 0.001858102, -0.11021746, 0.14775929, -0.05255193, 0.11530138, -0.053862877, -0.12126004, -0.039800987, 0.12030308, 0.06380281, 0.14243272, -0.105793275, -0.033665083, 0.08111985, -0.013766574, 0.113472275, -0.084296875, -0.08927287, 0.11604687, -4.435518E-4, 0.04164252, -0.10153338, -0.0691632, -0.1405299, 0.045724023, -0.064101756, 0.040309414, 0.09550836, -0.1334707, -0.048097685, -0.10702615, 0.032742012, 0.11855929, 0.037364442, -0.01936604, -0.059911903, -0.0876515, 0.0025609094, 0.013815972, 0.0032597338, 0.048198074, -0.12489824, -0.050333824, 0.043307517, 0.075850576, 0.04691248, 0.14177343, 0.12245611, 0.041173577, -0.04838414, -0.12059745, 0.034499634, -0.13209838, 0.12898007, 0.09691984, -0.06896793, -0.09984528, 0.118018836, -0.13144998, -0.12338916, 0.012128665, 0.10709243, 0.057155527, 0.11058617, -0.042369995, 0.094536856, 0.09929702, -0.04074019, -0.09469174, -0.014339956, -0.14446115, -0.06111967, -0.07840543, 0.08384239, 0.04264625, 0.0666282, 0.12344897, -0.07968074, -0.11735667, -0.024460025, -0.076237164, 0.04076042, -0.05550425, -0.03196365, -0.14604644, 0.06142091, 0.057403088, -0.076850355, 0.04749991, -0.14173616, -0.12143371, -0.11346632, -0.40388778, 0.41770256, -0.16137226, 0.18514906, 0.17749049, 0.17038673, 0.0525878, 0.059497084, -0.12689222, 0.0934659, -0.16460975, 0.0685503, -0.1598997, 0.18707295, -0.18101873, 0.09334456, 0.0068138484, 0.119945474, -0.15793812, 0.16077317, -0.12223828, 0.14830972, -0.08173027, -0.14720327, 0.15735061, -0.18086493, -0.15324399, 0.041607905, 0.08446815, -0.066285975, -0.021721164, 0.122331254, -0.091555275, -0.009002679, 0.05552557, -0.12491941, -0.11549681, -0.11587785, 0.07597942, 0.12365428, 0.18181923, 0.10287596, -0.110499285, 0.1069625, -0.15017055, 0.03847431, -0.10492084, 0.15501796, -0.17868865, -0.15074125, -0.14290886, -0.11815483, 0.07344515, 0.07833582, -0.018226074, 0.18693122, -0.042623483, -0.08500967, -0.17536701, 0.03428398, -0.13042022, 0.106423296, -0.11648117, 0.061523918, 0.05105374, -0.14420861, -0.091228865, -0.09801924, 0.14349346, -0.06156704, -0.007966236, -0.07721831, 0.118240215, 0.17742375, -0.124138705, 0.15581268, -0.123607285, 0.06057967, -0.14400646, 0.11283668, -0.13120261, -0.014970919, -0.0342267, 0.08669027, -0.14633434, -0.0027491183, -0.18417189, -0.10101288, -0.08966918, 0.18372758, 0.12753725, -0.12904254, -0.14902571, 0.05452875, 0.04595482, 0.0018077934, 0.06402101, -0.09695311, -0.031936277, -0.0836957, 0.09798136, -0.10965453, -0.15086654, -0.15770726, 0.13075432, 0.04373129, 0.17452799, 0.15790863, 0.1877846, 0.055126328, 0.06072954, -0.06788629, -0.03286624, -0.08618504, 0.07507046, -0.06212962, 0.1620441, -0.1030619, -0.09856772, -0.18165682, 0.118544854, 0.08244241, -0.08935274, -0.10706232, -0.097339824, 0.12492477, -0.14299503, -0.1733319, 0.046472415, -0.17566924, -0.13683586, -0.064733684, -0.13249831, -0.00250903, -0.1423295, 0.19028416, -0.06298632, -0.11775568, -0.15973037, -0.16295016, 0.091037214, -0.021105299, 0.114337094, -0.028981507, -0.056668505, -0.065267414, -0.045449473, 0.07244931, -0.12886764, -0.03691907, 0.094375856, -0.019792939, 0.024737319, 0.09090861, -0.12854996, 0.072243236, -0.030617973, -0.094412014, -0.042138487, 0.14073718, -0.101585604, 0.11363049, -0.07387862, -0.07795169, -0.057193495, -0.11206202, -0.042421576, -0.07356668, 0.0035800182, 0.14250474, 0.035420645, -0.09439937, -0.00454818, 0.04039183, 0.063089624, 0.025078747, -0.08424321, -0.13109599, -0.054199703, 0.08430544, -0.07249277, 0.05265952, 0.09668127, 0.08280557, 0.102784894, 0.09898424, 0.027891688, -0.00482123, 0.083571374, 0.13162544, 0.13045219, -0.13977656, 0.09740613, -0.09737305, -0.03441763, -0.0721285, -0.04417562, 0.013421286, 0.011711505, 0.09309547, 0.05629748, 0.051518198, -0.099549964, 0.026949838, 0.11573528, 0.059821997, -0.086777166, 0.043824308, 0.06498698, 0.025848297, 0.055641714, -0.09549449, -0.13476206, 0.044260006, 0.059218522, -0.07706229, 0.06561182, -0.00923181, 0.08157991, -0.14256369, -0.021998769, 5.660323E-4, 0.100776434, -0.08302952, -0.08642054, -0.049740147, 0.12746517, -0.067147635, 0.050859027, -0.07180711, -0.08228902, -0.077878885, 0.07168357, 0.059669446, -0.08330871, -0.045181356, -0.08571539, -0.033329267, -0.12251607, 0.1383708, 0.0305497, -0.09032205, -0.14507341, -0.017640868, -0.11075582, 0.12985572, 0.055534806, 0.08301828, 0.003015704, -0.13372704, 0.13678057, 0.06473821, 0.04450592, 0.04828933, -0.09687717, 0.046958666, -0.13321148, -0.092644796, 0.11233321, 0.10518413, -0.065738074, -0.034296088, 0.1480033, 0.016602587, 0.014963573, -0.08498246, 0.09283064, 0.022058068, -0.03559866, -0.12306792, -0.0899152, -0.13637953, 0.13551217, -0.0945891, 0.12492594, -0.10224763, -0.13880855, 0.08347999, 0.12732211, 0.05273699, -0.09553963, 0.04250221, 0.026436547, -0.06730121, -0.03517642, 0.089389004, 0.032738786, -0.01546961, -0.12659001, 0.09378276, 0.10204366, -0.0807472, -0.14217609, -0.10287603, -0.060035314, -0.081469364, 0.14679892, 0.010581807, -0.026570352, 0.02302546, -0.028409055, 0.038031608, 0.010894875, -0.14031309, -0.0054970854, -0.08796428, -0.06273019, -0.09651355, -0.024267742, -0.062752575, -0.02792022, 0.057130355, -0.13787158, 0.14674985, -0.049736664, -0.018737877, 0.06715224, -0.1446277, 0.0037571336, 0.07341689, 0.14253464, -0.14391004, -0.04377491, -0.06568719, 0.030192256, -0.020413188, 0.068322495, -0.10879119, 0.057932742, 0.13234846, -0.109634556, 0.082434475, 0.012896727, 0.07257251, 0.06956354, 0.14261313, 0.10148351, -0.012789844, -0.06141955, -0.14040212, -0.12688103, 0.04696185, -0.02195622, -0.13131708, -0.13735592, 0.017267786, -0.11317883, 0.058699675, 0.068665326, 0.046896778, 0.0686829, 0.0040637283, 0.051919427, 0.012295667, 0.01212392, 0.13709806, -0.027172498, 0.0227216, -0.019342942, -0.118283324, -0.12041302, -0.11384692, 0.068581454, 0.13964377, 0.019542517, 0.07451984, -0.03907243, 0.119501606, 0.15579715, 0.041966874, -0.269146, 0.29396367, -0.031004338, 0.11562358, 0.10148933, 0.01713493, 0.11109411, 0.12588945, -0.1567852, 0.007233048, -0.04118252, -0.13922743, 0.013768866, 0.14614376, 0.03159147, 0.09009805, -0.15100884, -0.045039184, -0.0754665, 0.16305731, -0.10817033, 0.066871606, -0.0444717, 0.0176596, -0.14596693, 0.17050807, 0.1107248, 0.045592476, -0.12141662, -0.08798046, 0.08681701, -0.12117194, -0.032277536, -0.085312314, -0.104780264, 0.07352057, -0.0038153927, 0.016070124, -0.018401293, -0.08902605, 0.14795774, 0.076488174, -0.16232127, -0.049365066, 0.18580824, -0.14259012, -0.10463519, -0.1639147, 0.005918354, 0.1515928, -0.107107915, -0.030828143, -0.1123979, 0.04319057, -0.121740706, -0.08577948, 0.12570882, 0.13560909, 0.18987843, 0.12616682, -0.079119384, -0.050766237, 0.020498704, -0.12197697, 0.08224279, 0.062052865, 0.02906278, 0.017115045, -0.18389794, 0.12766539, 0.03930737, 0.008523566, 0.18270826, 0.14880183, 0.10221764, -0.15133749, 0.082017995, -0.0130814025, -0.027355297, -0.10657773, 0.052748024, -0.115928516, -0.046661552, -0.12882695, -0.029029572, -0.06530424, 0.04025339, 0.16088219, -0.10383346, 0.082959786, 0.0887459, 0.035165705, 0.15808243, 0.16269772, -0.1357966, -0.10179971, -0.0051135537, 0.17215031, 0.1328058, -0.01860083, 0.1615801, 0.12609202, 0.16104408, -0.021468738, -0.16631302, 0.0871791, 0.014376062, -0.17092176, 0.11434908, 0.122610696, 0.028687527, 0.050493836, 0.07100734, 0.059703957, -0.082912676, -0.12887894, -0.04537745, -0.18151796, 0.056237914, 0.14380231, -0.14045763, -0.09534631, -0.074677795, 0.19009903, -0.10265426, -0.1292793, -0.11336175, -0.1784303, -0.07473458, -0.15607946, 0.07124518, -0.00909464, -0.045578852, 0.19244981, -0.15921071, 0.07649794, 0.17505349, -0.18314067, 0.07524149, 0.010540678, 0.09569251, 0.046074994, -0.02197193, 0.086423144, 0.09348795, -0.028091308, -0.09014368, -0.10826236, 0.13677588, 0.04464668, 0.034753967, -0.12927626, -0.11813127, 0.004072468, 0.026979715, -0.03775997, 0.061768685, -0.08796178, 0.11295643, -8.3927833E-4, -0.014426978, -0.11624958, 0.034711517, -0.08129607, 0.03957355, -0.12502019, -0.030620545, 0.09466354, -0.11467053, 0.0122420285, -0.055959523, 0.018059604, -0.14006427, -0.08326284, 0.041128356, 0.13832542, 0.015870363, -0.08918273, 0.08830697, -0.107775986, -0.07253431, -0.040891536, -0.13261265, 0.09009444, 0.14417978, 0.10808684, -0.037405483, -0.034763217, 0.050338723, -0.06079984, 0.11137182, -0.014334275, 0.102414526, 0.07237307, 0.0776832, 0.11163047, -0.04197717, -0.12803015, 0.024760138, 0.0034540703, 0.0042343847, 0.07084355, -0.06716749, -0.102060705, -0.135623, 0.06517094, 0.1303601, -0.0050743595, -0.02650759, -0.02323579, -0.08093262, 0.05246471, -0.13667563, -0.020750018, -0.04469017, 0.09838917, -0.13269165, -0.122779235, -0.01706858, 0.10670577, 0.054394018, -0.086262256, -0.08817667, -0.062382106, 0.08033159, 0.031420924, -0.088938475, 0.12926981, 0.08264999, -0.11417435, 0.08372043, 0.0063608363, -0.021203713, 0.014786269, -0.07409301, -0.021681488, 0.08935846, 0.058311924, -0.082670316, 0.06871885, 0.116076715, -0.08725295, -0.14136109, -0.026386688, -0.061878256, 0.14713068, 0.022175662, -0.041593164, 0.14277703, 0.028373852, 0.040583156, 0.111651644, -0.016684225, -0.10156425, 0.0021935678, -0.14317933, 0.061549712, 0.13288979, 0.14676924, -0.101627015, -0.07396209, -0.13494837, -0.09710303, 0.054546677, 0.08524791, -0.08548785, 0.035400804, -0.042056955, -0.08300001, -0.1220187, -0.14316916, 0.080142096, 0.04776946, 0.0020199257, -0.109317854, 0.08319731, -0.14316626, 0.0773201, -0.0937432, 0.067408286, 0.14834292, -0.13614324, 0.13782948, -0.13609166, 0.119039305, -0.013118409, -0.07150358, 0.077627346, 0.09797143, -0.06394284, -0.047202796, 0.047071274, 0.07730204, -0.046518948, 0.05054308, 0.02134939, -0.08509347, 0.13650805, -0.117712826, 0.102454275, -0.12443718, -0.043289855, -0.02050105, 0.011652269, -0.13120848, 0.096573845, -0.08977994, -0.12086315, 0.063193254, -0.004018177, 0.13031282, 0.08007126, -0.027115893, -0.09841936, 0.1075952, 0.0168945, 0.023779849, -0.049907062, -0.085106626, 0.0026978212, 0.032795515, -0.07142712, -0.08590961, -0.09582995, -0.13937126, 0.108712606, -0.14322321, 0.070150875, -0.13415396, 0.013311513, 0.09438836, -0.13973792, 0.013226105, 0.014542425, -0.027073216, -0.023348909, -0.012271121, -0.082431994, -0.0440865, 0.032611705, 0.10746979, 0.039494224, -0.09831236, 0.05940311, 2.3674325E-4, -0.013012138, -0.08452134, 0.026267232, 0.055084113, -0.007533836, -0.13804708, -0.07745523, 0.055061925, -0.13331874, 0.055787712, 0.14850645, 0.09001049, -0.113049194, 0.1034078, -0.053732794, 0.005184802, -0.10854875, -0.032727003, 0.069370605, -0.0699945, -0.06817097, 0.10078447, 0.07813147, 0.033056628, -0.1704846, -0.15495631, -0.066432096, -0.13988744, 0.019557567, -0.06827179, 0.010410018, 0.11517244, 0.06752451, -0.07570038, -0.069486514, -0.01585388, 0.13004497, 0.05301245, -0.04380925, -0.057413254, 0.18796284, -0.025588196, -0.055301014, -0.18797436, 0.042079464, 0.049512137, 0.036646772, -0.028730556, 0.011647098, 0.12882207, -0.17270355, -0.0035952448, 0.1648032, 0.031258453, 0.06238857, 0.021837315, -0.021809775, -0.013917966, -0.13398269, -0.09437768, -0.17708766, 0.05525659, -0.07333484, -0.08282379, 0.11259561, -0.021445781, -0.054697804, -0.14095771, 0.16938439, -0.08627576, -0.05361658, -0.12766492, 0.17628285, 0.10054519, 0.11889319, 0.05916385, 0.17565647, 0.17474626, -0.084707394, -0.06529905, 0.027251877, 0.045083825, -0.068300545, 0.07404977, -0.106370196, -0.18558562, 0.0012421582, -0.07639284, 0.09119752, -0.16026673, 0.03536145, 0.0415398, 0.14838776, -0.08839314, -0.095788084, -0.07995972, 0.03311477, -0.110495165, -0.12813115, 0.014271884, 0.09901724, -0.1511705, 0.008499655, -0.15553316, 0.098703355, 0.1498216, 0.076806694, 0.006844005, -0.109560326, 0.090494744, 0.14271256, -0.18168364, 0.18955807, 0.01633289, -0.18395755, 0.114932604, 0.10402427, 0.038280725, -0.17810583, 0.01789274, -0.048914507, 0.09589668, -0.06169789, 0.12770708, -0.18387029, -0.04703918, 0.17488319, -0.12249794, 0.060243525, -0.056309838, -0.032533053, -0.1619768, 0.17675658, 0.014305584, 0.14060433, -0.042199265, 0.05712904, 0.044829167, 0.011096618, -0.14014772, -0.17880245, 0.16239895, 0.1751847, 0.13345739, -0.087636806, -0.092316225, -0.027283559, -0.07892386, 0.18008122, 0.053961694, -0.16841035, -0.11244774, -0.18884337, 0.19002426, -0.12775706, -0.0441988, -0.064007156, -0.006827585, -0.034391925, 0.017185388, 0.14070128, 0.18855122, 0.16581473, -0.07020123, -0.05308279, -0.10519481, 0.14562331, 0.10377785, 0.048685696, -0.03890746, -0.12079896, -0.050986253, -0.062715515, -0.072706096, -0.12474842, -0.03164121, 0.018972829, 0.09503959, 0.03272181, 0.0718327, 0.07293465, 0.0356435, 0.0070007364, 0.030311415, -0.028329633, -0.07726967, 0.02418142, 0.1357774, 0.14425409, 0.042558707, -0.061877035, 0.13747448, 0.13204801, 0.13962367, 0.050360594, 0.11418883, 0.12632264, -0.06569335, 0.120681286, 0.07733086, -0.029515827, -0.13601883, -0.017011724, -0.010782417, 0.014715333, -0.052253734, -0.032368187, 0.13431908, -0.043660387, 0.096442826, -0.13594243, 0.09075854, 0.0146357855, 0.061830286, 0.038969733, 0.112418495, -0.04555642, -0.12400285, 0.094084635, 0.0386825, 0.05185844, 0.07179485, 0.12685227, -0.07149219, -0.09555144, -0.12677261, 0.014616831, 0.11663383, -0.14626648, 0.12032344, 0.052169777, -0.14531699, -0.14825104, 0.0458667, 0.14745489, -0.11824784, -0.143719, -0.032160666, 0.013308394, -0.038604915, 0.046203095, -0.061285798, -0.011695573, 0.05354165, -0.0021355557, 0.03226571, -0.13687798, 0.07708235, 0.012822559, 0.13057984, -0.047273517, -0.046722367, 0.13640164, -0.1134389, -0.018769452, 0.033678796, 0.10306464, -0.14307259, 0.08554146, 0.05952141, 0.0710592, -0.07967377, -0.110443026, -0.051775664, 0.08555203, 0.12498455, 0.010950238, 0.07806091, 0.02533094, -0.12941347, -0.0676599, 0.047001403, 0.13817227, -0.041809034, -0.039458882, -0.1277546, -0.09346504, 0.045584284, -0.013046625, 0.13334489, -0.030883985, 0.06816985, -0.094920315, 8.6036685E-4, -0.063012615, 0.022756761, -0.14304605, 0.0018345796, -0.027446955, -0.062981054, 0.049244262, 0.08467103, 0.04948917, 0.12852058, -0.054968648, -0.0013518144, 0.08650799, 0.046505045, 0.12778473, -0.04336263, 0.12158557, -0.048306707, -0.07632669, -0.118921325, 0.08370227, 0.10476553, 0.1153842, 0.076464675, 0.077894576, 0.11230735, -0.006782354, 0.043779783, 0.124212556, -0.06815983, 0.07183016, 0.103715405, 0.0060186437, -0.07855222, -0.068271, -0.11873516, -0.03313894, -0.12967804, 0.034167856, -0.0710432, -0.13800886, -0.06777998, 0.0073924917, 0.07163505, 0.05019128, -0.10930661, -0.061820634, -0.016123764, 0.05254672, -0.06823265, 0.1219117, 0.016617121, -0.07891416, 0.034150273, 0.122803226, 0.06793002, 0.09735866, -0.02079113, -0.13155958, 0.13877551, -0.10352277, 0.044731736, 0.13054536, -0.0034577732, -0.10844458, 0.1308905, 0.1355866, -0.1172533, 0.13036548, -0.06841189, -0.14214517, 0.029879646, 0.14223722, -0.09950151, 0.0975786, -0.08808848, -0.102723174, 0.04360433, 0.043727856, -0.102559224, -0.068816505, 0.14440708, 0.038142376, 0.11174967, -0.13707735, 0.11471822, 0.07180096, -0.089118496, -0.0041832873, -0.06590406, -0.06827257, 0.115748845, 0.14643653, -0.13591826 ) val weights = model.getParameters()._1 weights.copy(Tensor[Double](weightData, Array(weightData.size, 1))) // val weightsOri = new ArrayBuffer[Tensor[Double]]() // val weightsNew = new ArrayBuffer[Tensor[Double]]() // // val sizeI = hiddenSize * inputSize * 3 * 3 // val sizeH = hiddenSize * hiddenSize * 3 * 3 // var next = 0 // for(i <- 0 until 4) { // val i2g = Tensor[Double](weightData.slice(next, next + sizeI), // Array(1, hiddenSize, inputSize, 3, 3)) // weightsOri += Tensor[Double]().resizeAs(i2g).copy(i2g) // next += sizeI // val i2gBias = Tensor[Double](weightData.slice(next, next + hiddenSize), // Array(1, hiddenSize)) // weightsOri += Tensor[Double]().resizeAs(i2gBias).copy(i2gBias) // next += hiddenSize // val h2g = Tensor[Double](weightData.slice(next, next + sizeH), // Array(1, hiddenSize, hiddenSize, 3, 3)) // weightsOri += Tensor[Double]().resizeAs(h2g).copy(h2g) // next += sizeH // } // // // weightsOri(0) -----> forgetGatei2g.weight // // weightsOri(3) -----> inputGatei2g.weight // // weightsOri(6) -----> hiddeni2g.weight // // weightsOri(9) -----> outputGatei2g.weight // val weightsTable = T(weightsOri(0), weightsOri(3), weightsOri(6), weightsOri(9)) // val joinWeights = JoinTable[Double](2, 5) // weightsNew += joinWeights.forward(weightsTable) // // // weightsOri(1) -----> forgetGatei2g.bias // // weightsOri(4) -----> inputGatei2g.bias // // weightsOri(7) -----> hiddeni2g.bias // // weightsOri(10) -----> outputGatei2g.bias // val biasTable = T(weightsOri(1), weightsOri(4), weightsOri(7), weightsOri(10)) // val joinBias = JoinTable[Double](1, 1) // weightsNew += joinBias.forward(biasTable) // // // weightsOri(2) -----> forgetGateh2g // // weightsOri(5) -----> inputGateh2g // // weightsOri(8) -----> hiddenh2h // // weightsOri(11) -----> outputGateh2g // weightsNew += weightsOri(2) // weightsNew += weightsOri(5) // weightsNew += weightsOri(8) // weightsNew += weightsOri(11) // // weights.copy(Module.flatten[Double](weightsNew.toArray)) val output = model.forward(input) val gradInput = model.backward(input, output).asInstanceOf[Tensor[Double]] val expectedGradData = Array( 0.014530784777034018, 0.009306995336246415, -0.008286519510758553, -0.005862022951432651, 0.03774343783342986, 0.02559209113219452, -0.009158373635152154, -0.006768851798254148, 0.02421276189520975, 0.019129483877254584, 0.03387074024263973, 0.016151513843310296, 0.009110519248782582, 0.0519530219650559, 0.02990668621337847, 0.0038953190691805974, 0.00881452262673705, 0.006641299351351705, 0.011895276575900459, 0.010718002608531207, 5.692818102669443E-5, 5.613122237450347E-4, 0.004340609346706831, 0.009249622360370996, 0.007077281923732542, 0.025274489209446492, 0.030983433996406604, 0.01550887654620608, 0.015250729260157744, -5.386170921407475E-4, -0.013094061514047645, 0.03932976633855383, 0.016212649176254017, -0.014008845387900731, -9.695636289087138E-4, 0.013443209145444692, 0.021871534544684157, 0.04383469258878202, 0.030960134463390295, 0.025071766698666193, 0.048735184244840295, 0.030630173082074514, -0.005170600367028816, 0.009304571633078955, 0.014833103616387424, 0.010256073366121572, 0.009494353388598848, 0.010272214821693637, 0.0024692527383167804, 0.004536446048889638, 0.012244081222251282, 0.010526712698432203, 0.033398738011474687, 0.02813455387980294, 0.010066005831263696, 0.0019506300334422779, -0.0019043398614618512, -0.004088008352356501, 0.024485482387752576, 0.00699954945689254, -0.01904707399526851, 0.0033876388201005622, 0.017088928573562367, 0.017872693491787887, 0.041176979595515396, 0.022870978612379685, 0.017734548112707957, 0.055116178410244576, 0.02362922566039578, -0.0032533670131090326, 0.014820133977046292, 0.015189531245204876, 0.013417026107347331, 0.008199462128197013, 0.008682901874796495, -0.001057281436202452, -9.580529058057278E-4, 0.018130305232989127, 0.013197627737917545, 0.025993499815504965, 0.0282101706655173, 0.004722146118847874, -6.211365954561445E-4, 0.001463247132531586, -0.0040490572380433, 0.009980156570398933, 0.003884023741930937, -0.008444307994339605, -0.003627754988835289, 0.014385061921127408, 0.007966076755923803, 0.030602239509086984, 0.016312201377609553, 0.015614085801242397, 0.03735781819802621, 0.011600928393055569, -0.00961434081421862, 0.010773314255620866, 0.007550266521377238, 0.006972407988187046, 0.005173251794764602, 0.004780105131762285, -5.812169827385085E-4, 6.467089566629608E-4, 0.011314282674270778, 0.009924497929978446, 0.01018491048439032, 0.022783415058596372 ) val expectedGrad = Tensor[Double](Storage(expectedGradData), 1, Array(batchSize, seqLength, inputSize, 3, 3)) expectedGrad.map(gradInput, (v1, v2) => { assert(abs(v1 - v2) < 1e-6) v1 }) } "A ConvLSTMPeepwhole " should "generate corrent output when batch != 1" in { val hiddenSize = 4 val inputSize = 2 val seqLength = 2 val batchSize = 3 val inputData = Array( 0.7379243471309989, 0.19769034340444613, 0.7553588318460729, 0.04613053826696778, 0.5252748330906923, 0.3243104024273151, 0.8989024506441619, 0.6712812402234619, 0.15323104849092073, 0.10185090916201034, 0.5226652689296567, 0.2899686031416233, 0.511326269557182, 0.49001743514565177, 0.1984377134313693, 0.7480165633318978, 0.6211121942699456, 0.6036424737237795, 0.1592963148948352, 0.26366166406420644, 0.7005873221417939, 0.35208052461842276, 0.725171953811403, 0.5044467614524997, 0.523452964024349, 0.7937875951399342, 0.6943424385328354, 0.8522757484294406, 0.2271578105064339, 0.5126336840147973, 0.6541267785298224, 0.3414006259681709, 0.4863876223334871, 0.9055057744040688, 0.8798793872101478, 0.41524602527888876, 0.8876018988857928, 0.2600152644808019, 0.8634521212917783, 0.2153581486704762, 0.4154146887274315, 0.89663329916328, 0.5406127724020381, 0.5893982389701549, 0.19306165705001244, 0.45133332543350857, 0.9714792090557134, 0.38498402005236265, 0.006891668205946, 0.5828297372808069, 0.5688474525455758, 0.8823143009035355, 0.6887655121242025, 0.0027582524956907273, 0.9663642226218756, 0.4108958429337164, 0.13044029438378613, 0.7476518446247042, 0.27181284499242064, 0.2943235571195709, 0.5604631859123652, 0.038323627725658116, 0.7603531593097606, 0.27194849168888424, 0.7267936284527879, 0.9052766088556446, 0.02837551118315562, 0.4574005827539791, 0.7973179561393601, 0.9980165989408478, 0.25069973544723445, 0.7113158573554784, 0.11561652478927409, 0.6416543947530068, 0.4621700648670196, 0.029251147823290524, 0.43078782880788635, 0.25593904336252427, 0.7192673207531932, 0.1756512226290753, 0.8578302327025378, 0.7098735179715736, 0.5881799635556655, 0.3308673021852163, 0.6190265867439853, 0.04932089904003756, 0.6528462133452869, 0.9606073952644223, 0.5669727867139613, 0.30551745863715773, 0.6397990914737653, 0.4658329782779089, 0.5963362791762975, 0.7691666538081872, 0.3162174933237236, 0.4259981344604039, 0.3141262752374987, 0.1977960342228966, 0.5334056420410246, 0.40966136495594996, 0.757202109971997, 0.913316506797173, 0.6599946668051349, 0.3938817528802213, 0.5209422302691442, 0.6422317952685995, 0.17747939441294336, 0.8074452051580658, 0.3944410584066397, 0.2527031847980983, 0.39527449481971033, 0.0928484214334575, 0.4852964142206313, 0.25068274731805196, 0.08329386976314157, 0.4681014271865992, 0.860995719082069, 0.01158601045989438, 0.31712803268106715, 0.2617924245499301, 0.934104253955566, 0.24448283400001236, 0.9842141116573005, 0.7934911952785096, 0.17490418077955516, 0.11254306305960093, 0.20896334129728766, 0.2764503640807836, 0.24782820561381746, 0.7664813069795481, 0.4206320455881154, 0.6306966327857411, 0.33697752771986067, 0.7870030511736112, 0.9097909928901393, 0.41198674582123507, 0.25120817362250203, 0.17776223144093362, 0.49757005026260037, 0.28573572832782146, 0.8807161778981778, 0.10209784975052816, 0.37642364632639536, 0.6049083416711989) val input = Tensor[Double](inputData, Array(batchSize, seqLength, inputSize, 3, 4)) val rec = Recurrent[Double]() val model = Sequential[Double]() .add(rec .add(ConvLSTMPeephole[Double](inputSize, hiddenSize, 3, 3, 1, withPeephole = false))) val weightData = Array( -0.0697708, 0.187022, 0.08511595, 0.096392, 0.004365, -0.181258, 0.0446674, -0.1335725, -0.20553963, -0.06138988, -0.07350091, 0.21952641, -0.20255956, -0.010300428, -0.038676325, 0.1958987, -0.13511689, -0.101483345, -0.125394, 0.21549562, 0.009512273, 0.22363727, -0.12906058, -0.1364867, -0.096433975, 0.05142183, 0.0600333, 0.104275264, -0.23061629, -0.14527707, 0.011904705, -0.13122521, -0.028477365, -0.17441525, -0.22748822, 0.089772895, 0.047597, -0.12802905, 0.023593059, 0.047630176, -0.0041123535, 0.15042211, 0.22905788, 0.16112402, 0.17447555, -0.17807686, -0.1150537, -0.112298205, -0.09767061, -0.06547484, -0.059544224, -0.068848155, 0.017608726, -0.0486288, 0.040728, 0.0032547752, -0.062271275, -0.07636171, -0.090193786, -0.1757421, -0.029310212, -0.055856735, 0.2138684, 0.08065, -0.14784653, -0.11130823, 0.15752073, 0.0417686, 0.07667526, -0.08721331, 0.12652178, -0.22656773, 0.028447477, 0.059678376, 0.097432226, 0.07587893, 0.018427523, 0.07969191, -0.16021226, 0.15130767, 0.16185403, 0.09212428, -0.009819705, -0.09506356, 0.1286456, -0.08805564, 0.046685345, 0.12232006, -0.100213096, -0.15668556, 0.07114366, 0.095224895, -0.124568574, 0.06927875, 0.0905962, -0.035104837, 0.15567762, -0.043663125, -0.07430526, -0.055192, 0.01406816, -0.082332, -0.09430171, -0.1095887, -0.07151577, -0.02634421, -0.06150073, 0.11257642, 0.14980435, -0.062605076, -0.12603457, -0.15501663, 0.02251482, -0.08227526, 0.13384429, 0.024052791, 0.13748798, -0.08544985, -0.036989275, 0.026403576, -0.136615, 0.05420539, 0.16269544, 0.035181798, 0.15763186, -0.1604577, 0.1537655, 0.02734131, -0.048081715, 0.100141905, 0.028017784, 0.08105907, 0.019959895, 0.13988869, 0.16058885, -0.003570326, 0.06591913, -0.10256911, 0.13575412, 0.04774964, -0.017293347, -0.048617315, 0.15695612, 0.15765312, -0.047396783, -0.16620952, 0.0025890507, -0.13422322, -0.03875675, -0.075357996, 0.113039605, 0.13345407, 0.09567941, -0.003772, 0.07441882, 0.04021747, 0.12041045, -0.042105403, -0.027613033, 0.15320867, -0.12912026, -0.081750855, -0.0344171, -0.15512398, 0.15219747, 0.036528654, -0.012755581, 0.098534, -0.07061299, 0.02883929, 0.14481406, -0.051582605, 0.10316327, 0.085615724, 0.06536975, -0.054357443, 0.02749899, -0.013213737, 0.057099275, 0.15802467, -0.05081968, -0.07198317, 0.11493357, -0.0012803806, 0.11840431, 0.10919253, -0.10307259, 0.087982856, 0.06715956, 0.03439658, -0.1251883, -0.16122015, 0.11468333, 0.15124878, -0.040252376, 0.13959402, -0.018218568, -0.03417238, -0.07071258, 0.15031807, -0.09312864, -0.014361585, 0.009083145, 0.07651518, -0.030849354, 0.053097464, 0.02317304, -0.0126761, -0.10731614, 0.08843881, -0.058363467, -0.07192067, 0.13913071, -0.07697743, 0.15923063, -0.08419231, 0.017478677, -0.08418075, -0.057064693, 0.0024510117, -0.20928818, -0.22167638, -0.038345862, 0.03438525, 0.11347725, -0.12304, 0.026768617, 0.045132592, 0.091074154, 0.13448715, 0.11804616, -0.22657603, -0.016182138, -0.1331919, 0.05141501, 0.015872177, -0.12630826, 0.21568011, -0.10292801, 0.13611461, -0.08374142, -0.22699684, 0.16571483, -0.098663375, -0.018467197, -0.15427141, -0.15015155, 0.10223335, -0.14016786, -0.10880828, -0.21908437, 0.14608948, 0.07250339, 0.06662375, -0.18800929, 0.11404393, 0.13704747, 0.14116052, 0.10486333, -0.010664585, -0.11811825, -0.1724059, 0.15996984, -0.17623067, -0.055978876, -0.10195447, -0.17426933, 0.009317461, -0.23025058, -0.22655061, 0.1504219, -0.22794038, 0.19736658, -0.076756656, -0.16812365, -0.22800718, 0.17344427, -0.12931758, 0.10014104, -0.13550109, -0.23511806, -0.06651987, 0.19619618, 0.097913995, 0.14484589, -0.20718974, 0.20920905, 0.18459858, -0.008639049, -0.22041525, 0.08012527, 0.14633249, -0.024920763, -0.18607515, 0.07662353, -0.15454617, 0.067585476, 0.05524932, -0.15291865, 0.12737663, 0.05814569, -0.11415055, 0.11919394, -0.06319863, 0.1465731, 0.054857183, -0.15075083, 0.090675876, 0.1525343, -0.0066932747, -0.048541967, 0.06132587, -0.079331905, 0.11314261, 0.14027406, -0.0266242, -0.016292417, 0.07795509, 0.020753743, -0.10986114, 0.10756251, 0.02036946, 0.026220735, -0.11005689, 0.10311518, 0.07109452, -0.09970161, 0.068307705, 0.11119034, -0.06424175, -0.0012396448, -0.11550802, -0.06943571, -0.110153, -0.041444167, -0.12524629, 0.15868594, 0.008897657, -0.10843479, 0.15759167, -0.09669543, -0.08299825, -0.0937801, -0.020804988, -0.08680972, 0.083160855, 0.029616985, -0.017982747, 0.0037287925, 0.097527005, 0.09538205, -0.0932, -0.097054094, 0.10397664, 0.12322543, -0.06448696, -0.12847184, 0.050058555, 0.09502069, 0.08681986, -0.14003497, 0.03627888, -0.075629145, -0.095788166, -0.08410784, 0.13308963, -0.007147816, -0.16363329, 0.12797672, 0.124641, -0.05630061, 0.0064241965, 0.077181205, -0.1251426, 0.08616565, 0.1477562, -0.04511368, 0.029885028, 0.057127535, -0.08563146, 0.13729702, -0.10859255, -0.102196366, 0.008430395, -0.0945447, 0.10205625, -0.07343792, 0.16189432, -0.1300748, 0.08548705, 0.16390403, -0.02669807, -0.058629803, -0.05904906, -0.016605929, 0.14874554, 0.014934211, -0.09052281, 0.0579616, -0.041529182, 0.09614261, 0.15888576, -0.11366321, -0.102919176, -0.1167308, -0.011413716, 0.07176415, -0.03216456, -0.063260436, 0.059609246, 0.16423965, 0.0052398313, 0.1286797, -0.0381152, -0.009582818, 0.004786132, -0.1019815, 0.043783717, 0.05244485, -0.06435464, -0.16259833, -0.100482024, 0.0587321, -0.052555863, 0.032503795, -0.1606384, -0.14574, -0.05185242, -0.08184071, 0.15766397, 0.09867271, 0.08309498, -0.15646282, 0.15911676, -0.008041214, -0.07785257, 0.06866316, -0.07157379, 0.13319956, -0.066218115, 0.0138255, -0.076073825, -0.14936924, 0.10676395, -0.4842985, 0.083836384, -0.21565008, -0.2306193, 0.0017399695, -0.07744393, -0.2044993, -0.21714376, 0.0077707577, 0.14650588, 0.19233301, -0.04317506, -0.058397546, 0.15633541, -0.115028, -0.044130307, 0.063888475, 0.21123467, 0.20039539, -0.045635425, 0.040344927, -0.12061099, 0.13238785, -0.11554383, 0.012527357, -0.04936022, -0.223834, 0.2067501, 0.035001267, -0.121593, 0.08469669, -0.15821323, 0.013301196, 0.19869077, -0.18677086, -0.09790556, 0.18662173, -0.216591, 0.13041325, 0.13628985, 0.042848308, -0.031125685, -0.22374651, -0.087204315, 0.05124186, -0.22576457, 0.014185649, -0.0899537, -0.015126135, -0.10904176, -0.212513, -0.19453013, 0.0071554612, -0.07960433, -0.20750536, -0.22908148, 0.066988595, -0.11946863, -0.20373446, -0.03756, -0.15693687, 0.015695922, -0.19193731, 0.035843078, 0.07994549, 0.025597025, -0.10725631, -0.11276663, -0.1882937, 0.019561082, 0.0135140605, 0.041632164, 0.0010907603, -0.06264914, -0.016213655, 0.0937373, 0.094795, -0.1173104, -0.21944033, -0.09396857, 0.13556847, -0.09024931, 0.1276821, -2.7715965E-4, 0.12017726, 0.13998412, 0.13809435, 0.16587347, 0.04789949, -0.08513931, 0.07294201, -0.08220003, -0.15560868, 0.14816408, -0.09582949, 0.051776934, -0.011485172, 0.14832942, 0.10104054, 0.080303155, 0.0034141147, 0.14833276, 0.09612207, 0.11273294, 0.13111332, -0.00879518, -0.1397018, -0.10093753, -0.00945932, -0.032682095, -0.14018348, 0.050238717, 0.09185889, -0.14419281, 0.09613244, -0.13719763, 0.04358094, -0.15398286, -0.116741166, -0.11954482, 0.14914127, -0.126483, -0.026603939, 0.15768388, 0.06356159, 0.05631903, 0.0101217795, 0.15248485, -0.14745563, -0.0145869935, 0.0382958, 0.057202652, -0.14191794, 0.059604887, 0.011006361, -0.07016107, 0.076446384, 0.013760659, -0.068240955, 0.0037634, 0.12695941, 0.041081227, 0.10223117, 0.11603621, -0.06294605, -0.010134418, -0.006934982, 0.11731349, -0.10002373, 0.14468494, 0.006046706, -0.11748926, -0.13269922, 0.08922616, 0.076726876, 0.079133116, -0.13795392, 0.05776867, -0.12632991, -0.16351144, -0.067499354, 0.047223303, 0.063164465, -0.0149828065, -0.031813424, -0.08393954, -0.067819, 0.081516, -0.1065244, 0.14492081, 0.11396905, -0.10664382, -0.0098184915, 0.08660889, -0.16464078, 0.07709077, -0.1493178, 0.017629929, 0.08108806, -0.057861995, 0.05144662, -0.019507658, 0.098744385, 0.14157839, -0.101155385, -0.1155548, -0.1539434, 0.07039324, -0.015811022, 0.15094946, -0.16115923, 0.116900794, 0.11721963, 0.020760974, 0.0040808455, -0.0896887, 0.013347261, 0.11278092, -0.07966485, -0.094330534, -0.15664604, 0.015197758, 0.12119024, -0.05060158, 0.06654976, -0.13198644, -0.1457269, -0.13899888, 0.038908076, -0.13269305, -0.11445787, 0.021789772, 0.027084751, 0.01323522, -0.12667863, 0.026683968, 0.04916361, 0.0086855, 0.15367854, 0.031549584, -0.0036370864, 0.08499007, -0.10802871, 0.03548985, -0.17660856, -0.068241306, 0.15097389, 0.16520916, 0.024556529, 0.0017257226, 0.17331718, 0.196117, 0.19437543, 0.19648184, -0.1331118, -0.21632133, -0.18020143, 0.12856491, 0.1344524, 0.11382166, 0.064181186, 0.14279565, -0.08350899, -0.2256594, -0.13126723, 0.043258272, -0.021165192, 0.089386486, -0.09204444, -0.0960608, -0.037649803, 0.22336064, -0.031554904, 0.124656096, -0.025671339, -0.1065685, 0.0453102, -1.68393E-5, 0.22479524, 0.046631828, 0.007860622, -0.22629729, -0.13721013, 0.22810946, -0.12107487, 0.022246245, 0.17803338, 0.2083739, 0.18673882, -0.1917718, 0.07565709, 0.120346785, -0.14759375, -0.1377154, 0.038963128, 0.22792713, -0.2159763, -0.006619736, 0.2313753, -0.04800687, -0.1518908, 0.18948461, 0.1076321, -0.11479616, -0.0212803, 0.14886868, -0.22150691, 0.089185275, -0.040394045, 0.13415302, 0.21480684, 0.0878023, 0.106930904, -0.18570949, -0.013600573, 0.11532847, 0.11659276, 0.112827145, -0.1062416, 0.066263296, -0.08610482, 0.105527066, -0.058957383, -0.15528603, -0.009521967, 0.011328606, -0.06197259, -0.13204348, 0.08675131, -0.113543, -0.01445269, 0.02258719, -0.008030752, -0.093486756, -0.07264881, 0.09213272, 0.07619277, 0.16032794, -0.026074272, 0.066076815, -0.10525776, 0.16016503, 0.03144442, -0.023126643, 0.05451808, 0.022852356, -0.096872106, -0.030566314, -0.16589479, 0.0905115, -0.1473723, -0.12166525, 0.078377604, 0.13821222, -0.078764655, 0.14731602, -0.08815969, -0.0236424, -0.0355236, -0.09844407, -0.012984, 0.047678906, -0.038449008, 0.08535368, 0.15068671, -0.008833185, -0.09007217, 0.112541415, -0.06900989, -0.102243155, -0.050330114, -0.13928314, -0.041724514, 0.054797813, -0.16646549, 0.13796, 0.12394269, 0.020277899, -0.013631716, -0.09424963, -0.13880578, 0.08686539, -0.15236098, 0.05722864, -0.02671615, -0.06085055, 0.09522983, -0.03990184, -0.06986189, -0.014213024, -0.1377847, 0.08251909, 0.0143873375, -0.0860864, -0.0640099, -0.06048214, -0.030843036, 0.10346391, -0.14285919, 0.1575129, 0.11078764, -0.09553229, -0.15557009, -0.039680153, -0.02489069, 0.03813003, 0.1080799, 0.07591443, 0.1631084, 0.04714953, -0.10192201, -0.12497483, 0.038626827, -0.07361671, -0.097818114, -0.14928903, -0.14453772, 0.10313048, 0.11320499, -0.063832685, 0.011636197, -0.16415314, -0.142816, 0.041214544, -0.119791135, 0.10883034, -0.14729027, -0.122481905, 0.08507194, -0.088145964, -0.015075706, 0.06492, -0.16094309, 0.12339206, 0.011586048, 0.1321518, -0.05177626, 0.033773363, -0.13636817, 0.013378032, -0.003163873, 0.02471618, -0.13203168, 0.07989189, -0.054477777, 0.059936292, -0.077277765, 0.019922124, -0.15395634, 0.0088137, 0.036947053, -0.11207754, 0.042513624, -0.05665606, -0.015827265, 0.12174054 ) val weights = model.getParameters()._1 weights.copy(Tensor[Double](weightData, Array(weightData.size, 1))) // val weightsOri = new ArrayBuffer[Tensor[Double]]() // val weightsNew = new ArrayBuffer[Tensor[Double]]() // // val sizeI = hiddenSize * inputSize * 3 * 3 // val sizeH = hiddenSize * hiddenSize * 3 * 3 // var next = 0 // for(i <- 0 until 4) { // val i2g = Tensor[Double](weightData.slice(next, next + sizeI), // Array(1, hiddenSize, inputSize, 3, 3)) // weightsOri += Tensor[Double]().resizeAs(i2g).copy(i2g) // next += sizeI // val i2gBias = Tensor[Double](weightData.slice(next, next + hiddenSize), // Array(1, hiddenSize)) // weightsOri += Tensor[Double]().resizeAs(i2gBias).copy(i2gBias) // next += hiddenSize // val h2g = Tensor[Double](weightData.slice(next, next + sizeH), // Array(1, hiddenSize, hiddenSize, 3, 3)) // weightsOri += Tensor[Double]().resizeAs(h2g).copy(h2g) // next += sizeH // } // // val weightsTable = T(weightsOri(0), weightsOri(3), weightsOri(6), weightsOri(9)) // val joinWeights = JoinTable[Double](2, 5) // weightsNew += joinWeights.forward(weightsTable) // // val biasTable = T(weightsOri(1), weightsOri(4), weightsOri(7), weightsOri(10)) // val joinBias = JoinTable[Double](1, 1) // weightsNew += joinBias.forward(biasTable) // // weightsNew += weightsOri(2) // weightsNew += weightsOri(5) // weightsNew += weightsOri(8) // weightsNew += weightsOri(11) // // weights.copy(Module.flatten[Double](weightsNew.toArray)) val output = model.forward(input) val gradInput = model.backward(input, output).asInstanceOf[Tensor[Double]] val expectedGradData = Array( 0.034878514686072545, 0.04398729956621657, 0.05758099669158952, 0.03437097904494599, 0.0249094742719872, 0.07606488761784662, 0.09516671025789802, 0.06654659858425119, 0.01808987869570458, 0.048331800259105696, 0.06467829266973679, 0.05127588163224714, 0.03994717516008958, 0.038453084506413944, 0.01098428754646074, 0.004754856971752791, 0.04691100984590687, 0.04733273359693526, 0.032698733073028036, 0.008710296200014872, 0.016446470040048802, 0.014504827825521021, 0.014398413896711182, 0.005563785069582132, 0.02508919498362225, 0.04510472680505569, 0.05014850646512659, 0.03689880987099306, 0.018396925338213875, 0.06508697182696906, 0.07680143870812711, 0.05405187114525272, 0.011704545443457297, 0.04024544101139729, 0.05209939603631103, 0.04425816399870016, 0.03849933792429499, 0.026104351589482305, 0.014195299345464618, 0.00429469388842192, 0.0391442871880561, 0.03748689488743062, 0.034699130158092666, 0.014060432566210765, 0.019155416496624153, 0.019787616624734043, 0.026792094505297428, 0.008815024575985419, 0.011581512006221744, 0.06048972732734363, 0.03282915602659322, 0.04561238830468398, 0.041571255786098195, 0.07116665388420179, 0.08661446915699353, 0.059578562302912356, 0.020123218760508918, 0.05257043899642402, 0.06063252611339279, 0.04371064348072411, 0.045257409698179596, 0.03071963993748246, 0.02529315514566803, -0.005670740927325354, 0.03639680434337409, 0.048487407146512125, 0.02275229820249338, 0.014118614303447524, 0.02428551150184052, 0.016819297537995985, 0.004203314658387305, 0.013208252415897827, 0.00941868766149703, 0.054293608957875564, 0.04095605099670765, 0.03558088296182436, 0.03155322059071331, 0.06819978262289549, 0.0698245353478215, 0.049097324020888955, 0.024622612693984566, 0.04302580855110488, 0.04833338328255696, 0.03833458594732048, 0.03859568415306832, 0.019662097401664583, 0.020328833083506203, 0.013087242556229211, 0.03063108223918263, 0.038899278583615186, 0.029803127563184754, 0.012881705196662431, 0.022743671177388635, 0.024842756232781472, 0.017745566495452548, 0.010775736917320205, 0.03143314853307875, 0.026008021124113205, 0.05003786254172548, 0.049226925346696454, 0.04179540300548766, 0.07442518781716083, 0.06586987783245223, 0.06962442807281881, 0.019949908429511852, 0.06736192219429586, 0.036991517213459404, 0.06295162984318212, 0.031871333755458135, 0.029843372655222782, 0.017421399722145476, -0.0023651889661522544, 0.041743057643546515, 0.021105312244889884, 0.0279956737169246, 0.007435954259695617, 0.02861718110766123, 0.005404362682476721, 0.023503489726929276, -0.0010247413809265477, 0.04088956639631173, 0.013630851891249091, 0.05529042930307609, 0.049642832403158754, 0.0351046539048856, 0.050125216770852105, 0.05088086050445006, 0.06647703410350292, 0.018242402674039926, 0.04993188252833041, 0.03556726962341712, 0.051466671646896084, 0.014878868479939288, 0.0391711844039187, 0.03236231195889219, 0.003706375401465365, 0.022623344961740982, 0.021894028156073286, 0.03376341058584762, 0.014814947908133192, 0.03352917374224103, 0.009108881997277354, 0.022206440887808925, 0.005025803338779346 ) val expectedGrad = Tensor[Double](Storage(expectedGradData), 1, Array(batchSize, seqLength, inputSize, 3, 4)) expectedGrad.map(gradInput, (v1, v2) => { assert(abs(v1 - v2) < 1e-6) v1 }) } // Tested with torch convlstm "A ConvLSTMPeepwhole " should "return expected hidden and cell state when batch != 1" in { val hiddenSize = 4 val inputSize = 2 val seqLength = 2 val batchSize = 3 val inputData = Array( 0.7379243471309989, 0.19769034340444613, 0.7553588318460729, 0.04613053826696778, 0.5252748330906923, 0.3243104024273151, 0.8989024506441619, 0.6712812402234619, 0.15323104849092073, 0.10185090916201034, 0.5226652689296567, 0.2899686031416233, 0.511326269557182, 0.49001743514565177, 0.1984377134313693, 0.7480165633318978, 0.6211121942699456, 0.6036424737237795, 0.1592963148948352, 0.26366166406420644, 0.7005873221417939, 0.35208052461842276, 0.725171953811403, 0.5044467614524997, 0.523452964024349, 0.7937875951399342, 0.6943424385328354, 0.8522757484294406, 0.2271578105064339, 0.5126336840147973, 0.6541267785298224, 0.3414006259681709, 0.4863876223334871, 0.9055057744040688, 0.8798793872101478, 0.41524602527888876, 0.8876018988857928, 0.2600152644808019, 0.8634521212917783, 0.2153581486704762, 0.4154146887274315, 0.89663329916328, 0.5406127724020381, 0.5893982389701549, 0.19306165705001244, 0.45133332543350857, 0.9714792090557134, 0.38498402005236265, 0.006891668205946, 0.5828297372808069, 0.5688474525455758, 0.8823143009035355, 0.6887655121242025, 0.0027582524956907273, 0.9663642226218756, 0.4108958429337164, 0.13044029438378613, 0.7476518446247042, 0.27181284499242064, 0.2943235571195709, 0.5604631859123652, 0.038323627725658116, 0.7603531593097606, 0.27194849168888424, 0.7267936284527879, 0.9052766088556446, 0.02837551118315562, 0.4574005827539791, 0.7973179561393601, 0.9980165989408478, 0.25069973544723445, 0.7113158573554784, 0.11561652478927409, 0.6416543947530068, 0.4621700648670196, 0.029251147823290524, 0.43078782880788635, 0.25593904336252427, 0.7192673207531932, 0.1756512226290753, 0.8578302327025378, 0.7098735179715736, 0.5881799635556655, 0.3308673021852163, 0.6190265867439853, 0.04932089904003756, 0.6528462133452869, 0.9606073952644223, 0.5669727867139613, 0.30551745863715773, 0.6397990914737653, 0.4658329782779089, 0.5963362791762975, 0.7691666538081872, 0.3162174933237236, 0.4259981344604039, 0.3141262752374987, 0.1977960342228966, 0.5334056420410246, 0.40966136495594996, 0.757202109971997, 0.913316506797173, 0.6599946668051349, 0.3938817528802213, 0.5209422302691442, 0.6422317952685995, 0.17747939441294336, 0.8074452051580658, 0.3944410584066397, 0.2527031847980983, 0.39527449481971033, 0.0928484214334575, 0.4852964142206313, 0.25068274731805196, 0.08329386976314157, 0.4681014271865992, 0.860995719082069, 0.01158601045989438, 0.31712803268106715, 0.2617924245499301, 0.934104253955566, 0.24448283400001236, 0.9842141116573005, 0.7934911952785096, 0.17490418077955516, 0.11254306305960093, 0.20896334129728766, 0.2764503640807836, 0.24782820561381746, 0.7664813069795481, 0.4206320455881154, 0.6306966327857411, 0.33697752771986067, 0.7870030511736112, 0.9097909928901393, 0.41198674582123507, 0.25120817362250203, 0.17776223144093362, 0.49757005026260037, 0.28573572832782146, 0.8807161778981778, 0.10209784975052816, 0.37642364632639536, 0.6049083416711989) val input = Tensor[Double](inputData, Array(batchSize, seqLength, inputSize, 3, 4)) val rec = Recurrent[Double]() val model = Sequential[Double]() .add(rec .add(ConvLSTMPeephole[Double](inputSize, hiddenSize, 3, 3, 1, withPeephole = false))) val weightData = Array( -0.0697708, 0.187022, 0.08511595, 0.096392, 0.004365, -0.181258, 0.0446674, -0.1335725, -0.20553963, -0.06138988, -0.07350091, 0.21952641, -0.20255956, -0.010300428, -0.038676325, 0.1958987, -0.13511689, -0.101483345, -0.125394, 0.21549562, 0.009512273, 0.22363727, -0.12906058, -0.1364867, -0.096433975, 0.05142183, 0.0600333, 0.104275264, -0.23061629, -0.14527707, 0.011904705, -0.13122521, -0.028477365, -0.17441525, -0.22748822, 0.089772895, 0.047597, -0.12802905, 0.023593059, 0.047630176, -0.0041123535, 0.15042211, 0.22905788, 0.16112402, 0.17447555, -0.17807686, -0.1150537, -0.112298205, -0.09767061, -0.06547484, -0.059544224, -0.068848155, 0.017608726, -0.0486288, 0.040728, 0.0032547752, -0.062271275, -0.07636171, -0.090193786, -0.1757421, -0.029310212, -0.055856735, 0.2138684, 0.08065, -0.14784653, -0.11130823, 0.15752073, 0.0417686, 0.07667526, -0.08721331, 0.12652178, -0.22656773, 0.028447477, 0.059678376, 0.097432226, 0.07587893, 0.018427523, 0.07969191, -0.16021226, 0.15130767, 0.16185403, 0.09212428, -0.009819705, -0.09506356, 0.1286456, -0.08805564, 0.046685345, 0.12232006, -0.100213096, -0.15668556, 0.07114366, 0.095224895, -0.124568574, 0.06927875, 0.0905962, -0.035104837, 0.15567762, -0.043663125, -0.07430526, -0.055192, 0.01406816, -0.082332, -0.09430171, -0.1095887, -0.07151577, -0.02634421, -0.06150073, 0.11257642, 0.14980435, -0.062605076, -0.12603457, -0.15501663, 0.02251482, -0.08227526, 0.13384429, 0.024052791, 0.13748798, -0.08544985, -0.036989275, 0.026403576, -0.136615, 0.05420539, 0.16269544, 0.035181798, 0.15763186, -0.1604577, 0.1537655, 0.02734131, -0.048081715, 0.100141905, 0.028017784, 0.08105907, 0.019959895, 0.13988869, 0.16058885, -0.003570326, 0.06591913, -0.10256911, 0.13575412, 0.04774964, -0.017293347, -0.048617315, 0.15695612, 0.15765312, -0.047396783, -0.16620952, 0.0025890507, -0.13422322, -0.03875675, -0.075357996, 0.113039605, 0.13345407, 0.09567941, -0.003772, 0.07441882, 0.04021747, 0.12041045, -0.042105403, -0.027613033, 0.15320867, -0.12912026, -0.081750855, -0.0344171, -0.15512398, 0.15219747, 0.036528654, -0.012755581, 0.098534, -0.07061299, 0.02883929, 0.14481406, -0.051582605, 0.10316327, 0.085615724, 0.06536975, -0.054357443, 0.02749899, -0.013213737, 0.057099275, 0.15802467, -0.05081968, -0.07198317, 0.11493357, -0.0012803806, 0.11840431, 0.10919253, -0.10307259, 0.087982856, 0.06715956, 0.03439658, -0.1251883, -0.16122015, 0.11468333, 0.15124878, -0.040252376, 0.13959402, -0.018218568, -0.03417238, -0.07071258, 0.15031807, -0.09312864, -0.014361585, 0.009083145, 0.07651518, -0.030849354, 0.053097464, 0.02317304, -0.0126761, -0.10731614, 0.08843881, -0.058363467, -0.07192067, 0.13913071, -0.07697743, 0.15923063, -0.08419231, 0.017478677, -0.08418075, -0.057064693, 0.0024510117, -0.20928818, -0.22167638, -0.038345862, 0.03438525, 0.11347725, -0.12304, 0.026768617, 0.045132592, 0.091074154, 0.13448715, 0.11804616, -0.22657603, -0.016182138, -0.1331919, 0.05141501, 0.015872177, -0.12630826, 0.21568011, -0.10292801, 0.13611461, -0.08374142, -0.22699684, 0.16571483, -0.098663375, -0.018467197, -0.15427141, -0.15015155, 0.10223335, -0.14016786, -0.10880828, -0.21908437, 0.14608948, 0.07250339, 0.06662375, -0.18800929, 0.11404393, 0.13704747, 0.14116052, 0.10486333, -0.010664585, -0.11811825, -0.1724059, 0.15996984, -0.17623067, -0.055978876, -0.10195447, -0.17426933, 0.009317461, -0.23025058, -0.22655061, 0.1504219, -0.22794038, 0.19736658, -0.076756656, -0.16812365, -0.22800718, 0.17344427, -0.12931758, 0.10014104, -0.13550109, -0.23511806, -0.06651987, 0.19619618, 0.097913995, 0.14484589, -0.20718974, 0.20920905, 0.18459858, -0.008639049, -0.22041525, 0.08012527, 0.14633249, -0.024920763, -0.18607515, 0.07662353, -0.15454617, 0.067585476, 0.05524932, -0.15291865, 0.12737663, 0.05814569, -0.11415055, 0.11919394, -0.06319863, 0.1465731, 0.054857183, -0.15075083, 0.090675876, 0.1525343, -0.0066932747, -0.048541967, 0.06132587, -0.079331905, 0.11314261, 0.14027406, -0.0266242, -0.016292417, 0.07795509, 0.020753743, -0.10986114, 0.10756251, 0.02036946, 0.026220735, -0.11005689, 0.10311518, 0.07109452, -0.09970161, 0.068307705, 0.11119034, -0.06424175, -0.0012396448, -0.11550802, -0.06943571, -0.110153, -0.041444167, -0.12524629, 0.15868594, 0.008897657, -0.10843479, 0.15759167, -0.09669543, -0.08299825, -0.0937801, -0.020804988, -0.08680972, 0.083160855, 0.029616985, -0.017982747, 0.0037287925, 0.097527005, 0.09538205, -0.0932, -0.097054094, 0.10397664, 0.12322543, -0.06448696, -0.12847184, 0.050058555, 0.09502069, 0.08681986, -0.14003497, 0.03627888, -0.075629145, -0.095788166, -0.08410784, 0.13308963, -0.007147816, -0.16363329, 0.12797672, 0.124641, -0.05630061, 0.0064241965, 0.077181205, -0.1251426, 0.08616565, 0.1477562, -0.04511368, 0.029885028, 0.057127535, -0.08563146, 0.13729702, -0.10859255, -0.102196366, 0.008430395, -0.0945447, 0.10205625, -0.07343792, 0.16189432, -0.1300748, 0.08548705, 0.16390403, -0.02669807, -0.058629803, -0.05904906, -0.016605929, 0.14874554, 0.014934211, -0.09052281, 0.0579616, -0.041529182, 0.09614261, 0.15888576, -0.11366321, -0.102919176, -0.1167308, -0.011413716, 0.07176415, -0.03216456, -0.063260436, 0.059609246, 0.16423965, 0.0052398313, 0.1286797, -0.0381152, -0.009582818, 0.004786132, -0.1019815, 0.043783717, 0.05244485, -0.06435464, -0.16259833, -0.100482024, 0.0587321, -0.052555863, 0.032503795, -0.1606384, -0.14574, -0.05185242, -0.08184071, 0.15766397, 0.09867271, 0.08309498, -0.15646282, 0.15911676, -0.008041214, -0.07785257, 0.06866316, -0.07157379, 0.13319956, -0.066218115, 0.0138255, -0.076073825, -0.14936924, 0.10676395, -0.4842985, 0.083836384, -0.21565008, -0.2306193, 0.0017399695, -0.07744393, -0.2044993, -0.21714376, 0.0077707577, 0.14650588, 0.19233301, -0.04317506, -0.058397546, 0.15633541, -0.115028, -0.044130307, 0.063888475, 0.21123467, 0.20039539, -0.045635425, 0.040344927, -0.12061099, 0.13238785, -0.11554383, 0.012527357, -0.04936022, -0.223834, 0.2067501, 0.035001267, -0.121593, 0.08469669, -0.15821323, 0.013301196, 0.19869077, -0.18677086, -0.09790556, 0.18662173, -0.216591, 0.13041325, 0.13628985, 0.042848308, -0.031125685, -0.22374651, -0.087204315, 0.05124186, -0.22576457, 0.014185649, -0.0899537, -0.015126135, -0.10904176, -0.212513, -0.19453013, 0.0071554612, -0.07960433, -0.20750536, -0.22908148, 0.066988595, -0.11946863, -0.20373446, -0.03756, -0.15693687, 0.015695922, -0.19193731, 0.035843078, 0.07994549, 0.025597025, -0.10725631, -0.11276663, -0.1882937, 0.019561082, 0.0135140605, 0.041632164, 0.0010907603, -0.06264914, -0.016213655, 0.0937373, 0.094795, -0.1173104, -0.21944033, -0.09396857, 0.13556847, -0.09024931, 0.1276821, -2.7715965E-4, 0.12017726, 0.13998412, 0.13809435, 0.16587347, 0.04789949, -0.08513931, 0.07294201, -0.08220003, -0.15560868, 0.14816408, -0.09582949, 0.051776934, -0.011485172, 0.14832942, 0.10104054, 0.080303155, 0.0034141147, 0.14833276, 0.09612207, 0.11273294, 0.13111332, -0.00879518, -0.1397018, -0.10093753, -0.00945932, -0.032682095, -0.14018348, 0.050238717, 0.09185889, -0.14419281, 0.09613244, -0.13719763, 0.04358094, -0.15398286, -0.116741166, -0.11954482, 0.14914127, -0.126483, -0.026603939, 0.15768388, 0.06356159, 0.05631903, 0.0101217795, 0.15248485, -0.14745563, -0.0145869935, 0.0382958, 0.057202652, -0.14191794, 0.059604887, 0.011006361, -0.07016107, 0.076446384, 0.013760659, -0.068240955, 0.0037634, 0.12695941, 0.041081227, 0.10223117, 0.11603621, -0.06294605, -0.010134418, -0.006934982, 0.11731349, -0.10002373, 0.14468494, 0.006046706, -0.11748926, -0.13269922, 0.08922616, 0.076726876, 0.079133116, -0.13795392, 0.05776867, -0.12632991, -0.16351144, -0.067499354, 0.047223303, 0.063164465, -0.0149828065, -0.031813424, -0.08393954, -0.067819, 0.081516, -0.1065244, 0.14492081, 0.11396905, -0.10664382, -0.0098184915, 0.08660889, -0.16464078, 0.07709077, -0.1493178, 0.017629929, 0.08108806, -0.057861995, 0.05144662, -0.019507658, 0.098744385, 0.14157839, -0.101155385, -0.1155548, -0.1539434, 0.07039324, -0.015811022, 0.15094946, -0.16115923, 0.116900794, 0.11721963, 0.020760974, 0.0040808455, -0.0896887, 0.013347261, 0.11278092, -0.07966485, -0.094330534, -0.15664604, 0.015197758, 0.12119024, -0.05060158, 0.06654976, -0.13198644, -0.1457269, -0.13899888, 0.038908076, -0.13269305, -0.11445787, 0.021789772, 0.027084751, 0.01323522, -0.12667863, 0.026683968, 0.04916361, 0.0086855, 0.15367854, 0.031549584, -0.0036370864, 0.08499007, -0.10802871, 0.03548985, -0.17660856, -0.068241306, 0.15097389, 0.16520916, 0.024556529, 0.0017257226, 0.17331718, 0.196117, 0.19437543, 0.19648184, -0.1331118, -0.21632133, -0.18020143, 0.12856491, 0.1344524, 0.11382166, 0.064181186, 0.14279565, -0.08350899, -0.2256594, -0.13126723, 0.043258272, -0.021165192, 0.089386486, -0.09204444, -0.0960608, -0.037649803, 0.22336064, -0.031554904, 0.124656096, -0.025671339, -0.1065685, 0.0453102, -1.68393E-5, 0.22479524, 0.046631828, 0.007860622, -0.22629729, -0.13721013, 0.22810946, -0.12107487, 0.022246245, 0.17803338, 0.2083739, 0.18673882, -0.1917718, 0.07565709, 0.120346785, -0.14759375, -0.1377154, 0.038963128, 0.22792713, -0.2159763, -0.006619736, 0.2313753, -0.04800687, -0.1518908, 0.18948461, 0.1076321, -0.11479616, -0.0212803, 0.14886868, -0.22150691, 0.089185275, -0.040394045, 0.13415302, 0.21480684, 0.0878023, 0.106930904, -0.18570949, -0.013600573, 0.11532847, 0.11659276, 0.112827145, -0.1062416, 0.066263296, -0.08610482, 0.105527066, -0.058957383, -0.15528603, -0.009521967, 0.011328606, -0.06197259, -0.13204348, 0.08675131, -0.113543, -0.01445269, 0.02258719, -0.008030752, -0.093486756, -0.07264881, 0.09213272, 0.07619277, 0.16032794, -0.026074272, 0.066076815, -0.10525776, 0.16016503, 0.03144442, -0.023126643, 0.05451808, 0.022852356, -0.096872106, -0.030566314, -0.16589479, 0.0905115, -0.1473723, -0.12166525, 0.078377604, 0.13821222, -0.078764655, 0.14731602, -0.08815969, -0.0236424, -0.0355236, -0.09844407, -0.012984, 0.047678906, -0.038449008, 0.08535368, 0.15068671, -0.008833185, -0.09007217, 0.112541415, -0.06900989, -0.102243155, -0.050330114, -0.13928314, -0.041724514, 0.054797813, -0.16646549, 0.13796, 0.12394269, 0.020277899, -0.013631716, -0.09424963, -0.13880578, 0.08686539, -0.15236098, 0.05722864, -0.02671615, -0.06085055, 0.09522983, -0.03990184, -0.06986189, -0.014213024, -0.1377847, 0.08251909, 0.0143873375, -0.0860864, -0.0640099, -0.06048214, -0.030843036, 0.10346391, -0.14285919, 0.1575129, 0.11078764, -0.09553229, -0.15557009, -0.039680153, -0.02489069, 0.03813003, 0.1080799, 0.07591443, 0.1631084, 0.04714953, -0.10192201, -0.12497483, 0.038626827, -0.07361671, -0.097818114, -0.14928903, -0.14453772, 0.10313048, 0.11320499, -0.063832685, 0.011636197, -0.16415314, -0.142816, 0.041214544, -0.119791135, 0.10883034, -0.14729027, -0.122481905, 0.08507194, -0.088145964, -0.015075706, 0.06492, -0.16094309, 0.12339206, 0.011586048, 0.1321518, -0.05177626, 0.033773363, -0.13636817, 0.013378032, -0.003163873, 0.02471618, -0.13203168, 0.07989189, -0.054477777, 0.059936292, -0.077277765, 0.019922124, -0.15395634, 0.0088137, 0.036947053, -0.11207754, 0.042513624, -0.05665606, -0.015827265, 0.12174054 ) val weights = model.getParameters()._1 weights.copy(Tensor[Double](weightData, Array(weightData.size, 1))) // val weightsOri = new ArrayBuffer[Tensor[Double]]() // val weightsNew = new ArrayBuffer[Tensor[Double]]() // // val sizeI = hiddenSize * inputSize * 3 * 3 // val sizeH = hiddenSize * hiddenSize * 3 * 3 // var next = 0 // for(i <- 0 until 4) { // val i2g = Tensor[Double](weightData.slice(next, next + sizeI), // Array(1, hiddenSize, inputSize, 3, 3)) // weightsOri += Tensor[Double]().resizeAs(i2g).copy(i2g) // next += sizeI // val i2gBias = Tensor[Double](weightData.slice(next, next + hiddenSize), // Array(1, hiddenSize)) // weightsOri += Tensor[Double]().resizeAs(i2gBias).copy(i2gBias) // next += hiddenSize // val h2g = Tensor[Double](weightData.slice(next, next + sizeH), // Array(1, hiddenSize, hiddenSize, 3, 3)) // weightsOri += Tensor[Double]().resizeAs(h2g).copy(h2g) // next += sizeH // } // // val weightsTable = T(weightsOri(0), weightsOri(3), weightsOri(6), weightsOri(9)) // val joinWeights = JoinTable[Double](2, 5) // weightsNew += joinWeights.forward(weightsTable) // // val biasTable = T(weightsOri(1), weightsOri(4), weightsOri(7), weightsOri(10)) // val joinBias = JoinTable[Double](1, 1) // weightsNew += joinBias.forward(biasTable) // // weightsNew += weightsOri(2) // weightsNew += weightsOri(5) // weightsNew += weightsOri(8) // weightsNew += weightsOri(11) // // weights.copy(Module.flatten[Double](weightsNew.toArray)) val expectedCellData = Array( -0.14987348457222294, -0.22105992474941866, -0.3387238605194439, -0.39446403835780186, -0.316453669906271, -0.37140873935818197, -0.4794204029633373, -0.43318998938865677, -0.31715138375541807, -0.36546697609966056, -0.36668646927670884, -0.2531378632113071, 0.12164367622452013, 0.06375662877054206, 0.10558023052572624, 0.09296068067631055, 0.10350287214650072, -0.043105084379426555, -0.002532373132315092, -0.017107651118940928, 0.0783054058646045, 0.09983730266019218, 0.15692887903321268, 0.0010369583032553142, -0.11568265586055029, -0.05157072657958458, -0.030545098446643958, 0.07097465680094239, -0.19039540889885484, -0.19104510993259888, -0.02513773533630481, 0.033952703953675126, -0.22412212420921007, -0.24804429480520482, -0.12672816312820606, -0.022755560537669244, -0.30986135292585515, -0.4251493103752495, -0.2889602199722825, -0.17273356360945663, -0.3580787788172773, -0.5158149093281363, -0.5032594569503482, -0.27377998399438014, -0.14520932544812054, -0.3487598323162634, -0.36089232520218323, -0.3875967229666661, -0.10052985037535103, -0.30405218242711163, -0.18654948355613826, -0.27308495250404397, -0.3780858084639802, -0.348063759578918, -0.5343725340433997, -0.3798054993937692, -0.2756440427688284, -0.38768793305018395, -0.3235976734640538, -0.24045597962433007, 0.0341441391957715, 5.299130157097875E-4, 0.15185104088503992, 0.08317049353052722, 0.11869821799103805, -0.015495938546971158, -0.03628157899463032, 0.03548216233650562, 0.16615806661829663, 0.11809127970778976, 0.022987783943591498, 0.053172762021747644, -0.1608682545104523, 0.05471790389217375, -0.05140250835841573, 0.012483754945225009, -0.19132561788613034, -0.11758276843982571, -0.04001330599272664, -0.06993179715262401, -0.18292331023592934, -0.23018407727971535, -0.04965212591333139, -0.06706774082467053, -0.20722497302489531, -0.3163177647903549, -0.20047416971047505, -0.09940467310699157, -0.40391310591443685, -0.3453615998739744, -0.3436142871456762, -0.30579940792439053, -0.21393999767341623, -0.38632194331237324, -0.338132501410083, -0.2799806913472954, -0.2982647860718788, -3.718604397006525E-4, -0.2748632767327928, -0.3613061886167156, -0.35767327847568825, -0.4899646165682648, -0.4672077926890158, -0.37567367161152376, -0.1962868448170617, -0.3926117801022085, -0.2074747021487374, -0.3643094309050142, 0.15985278516427298, 0.11449180058788376, 0.11367214355435092, 0.10212329736136479, 0.11600188662627217, -0.08044840449625897, 0.042658116444042354, -0.06944713691444436, 0.15348701605457232, -0.02271487773115937, 0.13052044218592151, 0.10538763744284842, 0.06161551500417564, -0.1439853127761765, -0.03235769175109047, 0.05306321317925293, -0.023773273902641757, -0.18127460458925815, -0.08972730903797148, 0.10524359137085819, -0.22209026327385234, 0.015056602926191405, -0.19742347876502547, 0.01990844614402051, -0.18828070103927522, -0.21929051591262086, -0.3450388474490299, -0.21525044509990426, -0.36115984252761013, -0.30653256673883555, -0.4084202398086612, -0.3277215031561226, -0.1867237630660016, -0.3244769101591504, -0.23294822238292623, -0.26057123158689666 ) val output = model.forward(input).asInstanceOf[Tensor[Double]] val state = rec.getHiddenState() val hiddenState = state.toTable.apply(1).asInstanceOf[Tensor[Double]] val cell = state.toTable.apply(2).asInstanceOf[Tensor[Double]] hiddenState.map(output.select(2, seqLength), (v1, v2) => { assert(abs(v1 - v2) == 0) v1 }) cell.map(Tensor[Double](expectedCellData, Array(batchSize, hiddenSize, 3, 4)), (v1, v2) => { assert(abs(v1 - v2) < 1e-10) v1 }) rec.setHiddenState(state) model.forward(input) } "A ConvLSTMPeepwhole " should "with set state should generate different output" in { val hiddenSize = 4 val inputSize = 2 val seqLength = 2 val batchSize = 3 val input = Tensor[Double](batchSize, seqLength, inputSize, 3, 4).rand() val seed = 890 RNG.setSeed(seed) val rec = Recurrent[Double]() val model = Sequential[Double]() .add(rec .add(ConvLSTMPeephole[Double](inputSize, hiddenSize, 3, 3, 1, withPeephole = false))) RNG.setSeed(890) val rec2 = Recurrent[Double]() val model2 = Sequential[Double]() .add(rec2 .add(ConvLSTMPeephole[Double](inputSize, hiddenSize, 3, 3, 1, withPeephole = false))) val output = model.forward(input).asInstanceOf[Tensor[Double]] rec2.setHiddenState(T(Tensor[Double](batchSize, hiddenSize, 3, 4).rand, Tensor[Double](batchSize, hiddenSize, 3, 4).rand)) val output2 = model2.forward(input).asInstanceOf[Tensor[Double]] output.map(output2, (v1, v2) => { assert(abs(v1 - v2) != 0) v1 }) } "ConvLSTMPeephole L2 regularizer" should "works correctly" in { import com.intel.analytics.bigdl.numeric.NumericDouble val hiddenSize = 5 val inputSize = 3 val seqLength = 4 val batchSize = 1 val kernalW = 3 val kernalH = 3 val state1 = T("learningRate" -> 0.1, "learningRateDecay" -> 5e-7, "weightDecay" -> 0.1, "momentum" -> 0.002) val state2 = T("learningRate" -> 0.1, "learningRateDecay" -> 5e-7, "weightDecay" -> 0.0, "momentum" -> 0.002) val criterion = new TimeDistributedCriterion[Double](new MSECriterion[Double]) val input = Tensor[Double](batchSize, seqLength, inputSize, 3, 3).rand val labels = Tensor[Double](batchSize, seqLength, hiddenSize, 3, 3).rand val rec = Recurrent[Double]() val model1 = Sequential[Double]() .add(rec .add(ConvLSTMPeephole[Double]( inputSize, hiddenSize, kernalW, kernalH, 1, withPeephole = true))) val (weights1, grad1) = model1.getParameters() val model2 = Sequential[Double]() .add(Recurrent[Double]() .add(ConvLSTMPeephole[Double]( inputSize, hiddenSize, kernalW, kernalH, 1, wRegularizer = L2Regularizer(0.1), uRegularizer = L2Regularizer(0.1), bRegularizer = L2Regularizer(0.1), cRegularizer = L2Regularizer(0.1), withPeephole = true))) val (weights2, grad2) = model2.getParameters() weights2.copy(weights1.clone()) grad2.copy(grad1.clone()) val sgd = new SGD[Double] def feval1(x: Tensor[Double]): (Double, Tensor[Double]) = { val output = model1.forward(input).toTensor[Double] val _loss = criterion.forward(output, labels) model1.zeroGradParameters() val gradInput = criterion.backward(output, labels) model1.backward(input, gradInput) (_loss, grad1) } def feval2(x: Tensor[Double]): (Double, Tensor[Double]) = { val output = model2.forward(input).toTensor[Double] val _loss = criterion.forward(output, labels) model2.zeroGradParameters() val gradInput = criterion.backward(output, labels) model2.backward(input, gradInput) (_loss, grad2) } var loss1: Array[Double] = null for (i <- 1 to 100) { loss1 = sgd.optimize(feval1, weights1, state1)._2 println(s"${i}-th loss = ${loss1(0)}") } var loss2: Array[Double] = null for (i <- 1 to 100) { loss2 = sgd.optimize(feval2, weights2, state2)._2 println(s"${i}-th loss = ${loss2(0)}") } weights1 should be(weights2) loss1 should be(loss2) } }
zhangxiaoli73/BigDL
spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/ConvLSTMPeepholeSpec.scala
Scala
apache-2.0
75,783
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.utils.tf.loaders import java.nio.ByteOrder import com.intel.analytics.bigdl.Module import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric import org.tensorflow.framework.NodeDef import com.intel.analytics.bigdl.dllib.nn.ops.{LogicalNot => LogicalNotOp} import com.intel.analytics.bigdl.dllib.utils.tf.Context import scala.reflect.ClassTag class LogicalNot extends TensorflowOpsLoader { override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder , context: Context[T])(implicit ev: TensorNumeric[T]): Module[T] = { LogicalNotOp() } }
intel-analytics/BigDL
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/utils/tf/loaders/LogicalNot.scala
Scala
apache-2.0
1,226
package com.sksamuel.elastic4s.api import com.sksamuel.elastic4s.Indexes import com.sksamuel.elastic4s.requests.count.CountRequest trait CountApi { def count(indexes: Indexes): CountRequest = CountRequest(indexes) }
sksamuel/elastic4s
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/api/CountApi.scala
Scala
apache-2.0
220
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.parquet import java.io.IOException import java.net.URI import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.parallel.ForkJoinTaskSupport import scala.concurrent.forkjoin.ForkJoinPool import scala.util.{Failure, Try} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileStatus, Path} import org.apache.hadoop.mapreduce._ import org.apache.hadoop.mapreduce.lib.input.FileSplit import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl import org.apache.parquet.filter2.compat.FilterCompat import org.apache.parquet.filter2.predicate.FilterApi import org.apache.parquet.format.converter.ParquetMetadataConverter.SKIP_ROW_GROUPS import org.apache.parquet.hadoop._ import org.apache.parquet.hadoop.codec.CodecConfig import org.apache.parquet.hadoop.util.ContextUtil import org.apache.parquet.schema.MessageType import org.apache.spark.{SparkException, TaskContext} import org.apache.spark.internal.Logging import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection import org.apache.spark.sql.catalyst.parser.LegacyTypeStringParser import org.apache.spark.sql.execution.datasources._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources._ import org.apache.spark.sql.types._ import org.apache.spark.util.SerializableConfiguration class ParquetFileFormat extends FileFormat with DataSourceRegister with Logging with Serializable { // Hold a reference to the (serializable) singleton instance of ParquetLogRedirector. This // ensures the ParquetLogRedirector class is initialized whether an instance of ParquetFileFormat // is constructed or deserialized. Do not heed the Scala compiler's warning about an unused field // here. private val parquetLogRedirector = ParquetLogRedirector.INSTANCE override def shortName(): String = "parquet" override def toString: String = "Parquet" override def hashCode(): Int = getClass.hashCode() override def equals(other: Any): Boolean = other.isInstanceOf[ParquetFileFormat] override def prepareWrite( sparkSession: SparkSession, job: Job, options: Map[String, String], dataSchema: StructType): OutputWriterFactory = { val parquetOptions = new ParquetOptions(options, sparkSession.sessionState.conf) val conf = ContextUtil.getConfiguration(job) val committerClass = conf.getClass( SQLConf.PARQUET_OUTPUT_COMMITTER_CLASS.key, classOf[ParquetOutputCommitter], classOf[ParquetOutputCommitter]) if (conf.get(SQLConf.PARQUET_OUTPUT_COMMITTER_CLASS.key) == null) { logInfo("Using default output committer for Parquet: " + classOf[ParquetOutputCommitter].getCanonicalName) } else { logInfo("Using user defined output committer for Parquet: " + committerClass.getCanonicalName) } conf.setClass( SQLConf.OUTPUT_COMMITTER_CLASS.key, committerClass, classOf[ParquetOutputCommitter]) // We're not really using `ParquetOutputFormat[Row]` for writing data here, because we override // it in `ParquetOutputWriter` to support appending and dynamic partitioning. The reason why // we set it here is to setup the output committer class to `ParquetOutputCommitter`, which is // bundled with `ParquetOutputFormat[Row]`. job.setOutputFormatClass(classOf[ParquetOutputFormat[Row]]) ParquetOutputFormat.setWriteSupportClass(job, classOf[ParquetWriteSupport]) // We want to clear this temporary metadata from saving into Parquet file. // This metadata is only useful for detecting optional columns when pushdowning filters. ParquetWriteSupport.setSchema(dataSchema, conf) // Sets flags for `CatalystSchemaConverter` (which converts Catalyst schema to Parquet schema) // and `CatalystWriteSupport` (writing actual rows to Parquet files). conf.set( SQLConf.PARQUET_BINARY_AS_STRING.key, sparkSession.sessionState.conf.isParquetBinaryAsString.toString) conf.set( SQLConf.PARQUET_INT96_AS_TIMESTAMP.key, sparkSession.sessionState.conf.isParquetINT96AsTimestamp.toString) conf.set( SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key, sparkSession.sessionState.conf.writeLegacyParquetFormat.toString) // Sets compression scheme conf.set(ParquetOutputFormat.COMPRESSION, parquetOptions.compressionCodecClassName) // SPARK-15719: Disables writing Parquet summary files by default. if (conf.get(ParquetOutputFormat.ENABLE_JOB_SUMMARY) == null) { conf.setBoolean(ParquetOutputFormat.ENABLE_JOB_SUMMARY, false) } new OutputWriterFactory { // This OutputWriterFactory instance is deserialized when writing Parquet files on the // executor side without constructing or deserializing ParquetFileFormat. Therefore, we hold // another reference to ParquetLogRedirector.INSTANCE here to ensure the latter class is // initialized. private val parquetLogRedirector = ParquetLogRedirector.INSTANCE override def newInstance( path: String, dataSchema: StructType, context: TaskAttemptContext): OutputWriter = { new ParquetOutputWriter(path, context) } override def getFileExtension(context: TaskAttemptContext): String = { CodecConfig.from(context).getCodec.getExtension + ".parquet" } } } override def inferSchema( sparkSession: SparkSession, parameters: Map[String, String], files: Seq[FileStatus]): Option[StructType] = { val parquetOptions = new ParquetOptions(parameters, sparkSession.sessionState.conf) // Should we merge schemas from all Parquet part-files? val shouldMergeSchemas = parquetOptions.mergeSchema val mergeRespectSummaries = sparkSession.sessionState.conf.isParquetSchemaRespectSummaries val filesByType = splitFiles(files) // Sees which file(s) we need to touch in order to figure out the schema. // // Always tries the summary files first if users don't require a merged schema. In this case, // "_common_metadata" is more preferable than "_metadata" because it doesn't contain row // groups information, and could be much smaller for large Parquet files with lots of row // groups. If no summary file is available, falls back to some random part-file. // // NOTE: Metadata stored in the summary files are merged from all part-files. However, for // user defined key-value metadata (in which we store Spark SQL schema), Parquet doesn't know // how to merge them correctly if some key is associated with different values in different // part-files. When this happens, Parquet simply gives up generating the summary file. This // implies that if a summary file presents, then: // // 1. Either all part-files have exactly the same Spark SQL schema, or // 2. Some part-files don't contain Spark SQL schema in the key-value metadata at all (thus // their schemas may differ from each other). // // Here we tend to be pessimistic and take the second case into account. Basically this means // we can't trust the summary files if users require a merged schema, and must touch all part- // files to do the merge. val filesToTouch = if (shouldMergeSchemas) { // Also includes summary files, 'cause there might be empty partition directories. // If mergeRespectSummaries config is true, we assume that all part-files are the same for // their schema with summary files, so we ignore them when merging schema. // If the config is disabled, which is the default setting, we merge all part-files. // In this mode, we only need to merge schemas contained in all those summary files. // You should enable this configuration only if you are very sure that for the parquet // part-files to read there are corresponding summary files containing correct schema. // As filed in SPARK-11500, the order of files to touch is a matter, which might affect // the ordering of the output columns. There are several things to mention here. // // 1. If mergeRespectSummaries config is false, then it merges schemas by reducing from // the first part-file so that the columns of the lexicographically first file show // first. // // 2. If mergeRespectSummaries config is true, then there should be, at least, // "_metadata"s for all given files, so that we can ensure the columns of // the lexicographically first file show first. // // 3. If shouldMergeSchemas is false, but when multiple files are given, there is // no guarantee of the output order, since there might not be a summary file for the // lexicographically first file, which ends up putting ahead the columns of // the other files. However, this should be okay since not enabling // shouldMergeSchemas means (assumes) all the files have the same schemas. val needMerged: Seq[FileStatus] = if (mergeRespectSummaries) { Seq() } else { filesByType.data } needMerged ++ filesByType.metadata ++ filesByType.commonMetadata } else { // Tries any "_common_metadata" first. Parquet files written by old versions or Parquet // don't have this. filesByType.commonMetadata.headOption // Falls back to "_metadata" .orElse(filesByType.metadata.headOption) // Summary file(s) not found, the Parquet file is either corrupted, or different part- // files contain conflicting user defined metadata (two or more values are associated // with a same key in different files). In either case, we fall back to any of the // first part-file, and just assume all schemas are consistent. .orElse(filesByType.data.headOption) .toSeq } ParquetFileFormat.mergeSchemasInParallel(filesToTouch, sparkSession) } case class FileTypes( data: Seq[FileStatus], metadata: Seq[FileStatus], commonMetadata: Seq[FileStatus]) private def splitFiles(allFiles: Seq[FileStatus]): FileTypes = { // Lists `FileStatus`es of all leaf nodes (files) under all base directories. val leaves = allFiles.filter { f => isSummaryFile(f.getPath) || !((f.getPath.getName.startsWith("_") && !f.getPath.getName.contains("=")) || f.getPath.getName.startsWith(".")) }.toArray.sortBy(_.getPath.toString) FileTypes( data = leaves.filterNot(f => isSummaryFile(f.getPath)), metadata = leaves.filter(_.getPath.getName == ParquetFileWriter.PARQUET_METADATA_FILE), commonMetadata = leaves.filter(_.getPath.getName == ParquetFileWriter.PARQUET_COMMON_METADATA_FILE)) } private def isSummaryFile(file: Path): Boolean = { file.getName == ParquetFileWriter.PARQUET_COMMON_METADATA_FILE || file.getName == ParquetFileWriter.PARQUET_METADATA_FILE } /** * Returns whether the reader will return the rows as batch or not. */ override def supportBatch(sparkSession: SparkSession, schema: StructType): Boolean = { val conf = sparkSession.sessionState.conf conf.parquetVectorizedReaderEnabled && conf.wholeStageEnabled && schema.length <= conf.wholeStageMaxNumFields && schema.forall(_.dataType.isInstanceOf[AtomicType]) } override def isSplitable( sparkSession: SparkSession, options: Map[String, String], path: Path): Boolean = { true } override def buildReaderWithPartitionValues( sparkSession: SparkSession, dataSchema: StructType, partitionSchema: StructType, requiredSchema: StructType, filters: Seq[Filter], options: Map[String, String], hadoopConf: Configuration): (PartitionedFile) => Iterator[InternalRow] = { // For Parquet data source, `buildReader` already handles partition values appending. Here we // simply delegate to `buildReader`. buildReader( sparkSession, dataSchema, partitionSchema, requiredSchema, filters, options, hadoopConf) } override def buildReader( sparkSession: SparkSession, dataSchema: StructType, partitionSchema: StructType, requiredSchema: StructType, filters: Seq[Filter], options: Map[String, String], hadoopConf: Configuration): PartitionedFile => Iterator[InternalRow] = { hadoopConf.set(ParquetInputFormat.READ_SUPPORT_CLASS, classOf[ParquetReadSupport].getName) hadoopConf.set( ParquetReadSupport.SPARK_ROW_REQUESTED_SCHEMA, ParquetSchemaConverter.checkFieldNames(requiredSchema).json) hadoopConf.set( ParquetWriteSupport.SPARK_ROW_SCHEMA, ParquetSchemaConverter.checkFieldNames(requiredSchema).json) ParquetWriteSupport.setSchema(requiredSchema, hadoopConf) // Sets flags for `CatalystSchemaConverter` hadoopConf.setBoolean( SQLConf.PARQUET_BINARY_AS_STRING.key, sparkSession.sessionState.conf.isParquetBinaryAsString) hadoopConf.setBoolean( SQLConf.PARQUET_INT96_AS_TIMESTAMP.key, sparkSession.sessionState.conf.isParquetINT96AsTimestamp) // Try to push down filters when filter push-down is enabled. val pushed = if (sparkSession.sessionState.conf.parquetFilterPushDown) { filters // Collects all converted Parquet filter predicates. Notice that not all predicates can be // converted (`ParquetFilters.createFilter` returns an `Option`). That's why a `flatMap` // is used here. .flatMap(ParquetFilters.createFilter(requiredSchema, _)) .reduceOption(FilterApi.and) } else { None } val broadcastedHadoopConf = sparkSession.sparkContext.broadcast(new SerializableConfiguration(hadoopConf)) // TODO: if you move this into the closure it reverts to the default values. // If true, enable using the custom RecordReader for parquet. This only works for // a subset of the types (no complex types). val resultSchema = StructType(partitionSchema.fields ++ requiredSchema.fields) val enableVectorizedReader: Boolean = sparkSession.sessionState.conf.parquetVectorizedReaderEnabled && resultSchema.forall(_.dataType.isInstanceOf[AtomicType]) // Whole stage codegen (PhysicalRDD) is able to deal with batches directly val returningBatch = supportBatch(sparkSession, resultSchema) (file: PartitionedFile) => { assert(file.partitionValues.numFields == partitionSchema.size) val fileSplit = new FileSplit(new Path(new URI(file.filePath)), file.start, file.length, Array.empty) val split = new org.apache.parquet.hadoop.ParquetInputSplit( fileSplit.getPath, fileSplit.getStart, fileSplit.getStart + fileSplit.getLength, fileSplit.getLength, fileSplit.getLocations, null) val attemptId = new TaskAttemptID(new TaskID(new JobID(), TaskType.MAP, 0), 0) val hadoopAttemptContext = new TaskAttemptContextImpl(broadcastedHadoopConf.value.value, attemptId) // Try to push down filters when filter push-down is enabled. // Notice: This push-down is RowGroups level, not individual records. if (pushed.isDefined) { ParquetInputFormat.setFilterPredicate(hadoopAttemptContext.getConfiguration, pushed.get) } val parquetReader = if (enableVectorizedReader) { val vectorizedReader = new VectorizedParquetRecordReader() vectorizedReader.initialize(split, hadoopAttemptContext) logDebug(s"Appending $partitionSchema ${file.partitionValues}") vectorizedReader.initBatch(partitionSchema, file.partitionValues) if (returningBatch) { vectorizedReader.enableReturningBatches() } vectorizedReader } else { logDebug(s"Falling back to parquet-mr") // ParquetRecordReader returns UnsafeRow val reader = pushed match { case Some(filter) => new ParquetRecordReader[UnsafeRow]( new ParquetReadSupport, FilterCompat.get(filter, null)) case _ => new ParquetRecordReader[UnsafeRow](new ParquetReadSupport) } reader.initialize(split, hadoopAttemptContext) reader } val iter = new RecordReaderIterator(parquetReader) Option(TaskContext.get()).foreach(_.addTaskCompletionListener(_ => iter.close())) // UnsafeRowParquetRecordReader appends the columns internally to avoid another copy. if (parquetReader.isInstanceOf[VectorizedParquetRecordReader] && enableVectorizedReader) { iter.asInstanceOf[Iterator[InternalRow]] } else { val fullSchema = requiredSchema.toAttributes ++ partitionSchema.toAttributes val joinedRow = new JoinedRow() val appendPartitionColumns = GenerateUnsafeProjection.generate(fullSchema, fullSchema) // This is a horrible erasure hack... if we type the iterator above, then it actually check // the type in next() and we get a class cast exception. If we make that function return // Object, then we can defer the cast until later! if (partitionSchema.length == 0) { // There is no partition columns iter.asInstanceOf[Iterator[InternalRow]] } else { iter.asInstanceOf[Iterator[InternalRow]] .map(d => appendPartitionColumns(joinedRow(d, file.partitionValues))) } } } } } object ParquetFileFormat extends Logging { private[parquet] def readSchema( footers: Seq[Footer], sparkSession: SparkSession): Option[StructType] = { def parseParquetSchema(schema: MessageType): StructType = { val converter = new ParquetSchemaConverter( sparkSession.sessionState.conf.isParquetBinaryAsString, sparkSession.sessionState.conf.isParquetBinaryAsString, sparkSession.sessionState.conf.writeLegacyParquetFormat) converter.convert(schema) } val seen = mutable.HashSet[String]() val finalSchemas: Seq[StructType] = footers.flatMap { footer => val metadata = footer.getParquetMetadata.getFileMetaData val serializedSchema = metadata .getKeyValueMetaData .asScala.toMap .get(ParquetReadSupport.SPARK_METADATA_KEY) if (serializedSchema.isEmpty) { // Falls back to Parquet schema if no Spark SQL schema found. Some(parseParquetSchema(metadata.getSchema)) } else if (!seen.contains(serializedSchema.get)) { seen += serializedSchema.get // Don't throw even if we failed to parse the serialized Spark schema. Just fallback to // whatever is available. Some(Try(DataType.fromJson(serializedSchema.get)) .recover { case _: Throwable => logInfo( "Serialized Spark schema in Parquet key-value metadata is not in JSON format, " + "falling back to the deprecated DataType.fromCaseClassString parser.") LegacyTypeStringParser.parse(serializedSchema.get) } .recover { case cause: Throwable => logWarning( s"""Failed to parse serialized Spark schema in Parquet key-value metadata: |\\t$serializedSchema """.stripMargin, cause) } .map(_.asInstanceOf[StructType]) .getOrElse { // Falls back to Parquet schema if Spark SQL schema can't be parsed. parseParquetSchema(metadata.getSchema) }) } else { None } } finalSchemas.reduceOption { (left, right) => try left.merge(right) catch { case e: Throwable => throw new SparkException(s"Failed to merge incompatible schemas $left and $right", e) } } } /** * Reads Parquet footers in multi-threaded manner. * If the config "spark.sql.files.ignoreCorruptFiles" is set to true, we will ignore the corrupted * files when reading footers. */ private[parquet] def readParquetFootersInParallel( conf: Configuration, partFiles: Seq[FileStatus], ignoreCorruptFiles: Boolean): Seq[Footer] = { val parFiles = partFiles.par parFiles.tasksupport = new ForkJoinTaskSupport(new ForkJoinPool(8)) parFiles.flatMap { currentFile => try { // Skips row group information since we only need the schema. // ParquetFileReader.readFooter throws RuntimeException, instead of IOException, // when it can't read the footer. Some(new Footer(currentFile.getPath(), ParquetFileReader.readFooter( conf, currentFile, SKIP_ROW_GROUPS))) } catch { case e: RuntimeException => if (ignoreCorruptFiles) { logWarning(s"Skipped the footer in the corrupted file: $currentFile", e) None } else { throw new IOException(s"Could not read footer for file: $currentFile", e) } } }.seq } /** * Figures out a merged Parquet schema with a distributed Spark job. * * Note that locality is not taken into consideration here because: * * 1. For a single Parquet part-file, in most cases the footer only resides in the last block of * that file. Thus we only need to retrieve the location of the last block. However, Hadoop * `FileSystem` only provides API to retrieve locations of all blocks, which can be * potentially expensive. * * 2. This optimization is mainly useful for S3, where file metadata operations can be pretty * slow. And basically locality is not available when using S3 (you can't run computation on * S3 nodes). */ def mergeSchemasInParallel( filesToTouch: Seq[FileStatus], sparkSession: SparkSession): Option[StructType] = { val assumeBinaryIsString = sparkSession.sessionState.conf.isParquetBinaryAsString val assumeInt96IsTimestamp = sparkSession.sessionState.conf.isParquetINT96AsTimestamp val writeLegacyParquetFormat = sparkSession.sessionState.conf.writeLegacyParquetFormat val serializedConf = new SerializableConfiguration(sparkSession.sessionState.newHadoopConf()) // !! HACK ALERT !! // // Parquet requires `FileStatus`es to read footers. Here we try to send cached `FileStatus`es // to executor side to avoid fetching them again. However, `FileStatus` is not `Serializable` // but only `Writable`. What makes it worse, for some reason, `FileStatus` doesn't play well // with `SerializableWritable[T]` and always causes a weird `IllegalStateException`. These // facts virtually prevents us to serialize `FileStatus`es. // // Since Parquet only relies on path and length information of those `FileStatus`es to read // footers, here we just extract them (which can be easily serialized), send them to executor // side, and resemble fake `FileStatus`es there. val partialFileStatusInfo = filesToTouch.map(f => (f.getPath.toString, f.getLen)) // Set the number of partitions to prevent following schema reads from generating many tasks // in case of a small number of parquet files. val numParallelism = Math.min(Math.max(partialFileStatusInfo.size, 1), sparkSession.sparkContext.defaultParallelism) val ignoreCorruptFiles = sparkSession.sessionState.conf.ignoreCorruptFiles // Issues a Spark job to read Parquet schema in parallel. val partiallyMergedSchemas = sparkSession .sparkContext .parallelize(partialFileStatusInfo, numParallelism) .mapPartitions { iterator => // Resembles fake `FileStatus`es with serialized path and length information. val fakeFileStatuses = iterator.map { case (path, length) => new FileStatus(length, false, 0, 0, 0, 0, null, null, null, new Path(path)) }.toSeq // Reads footers in multi-threaded manner within each task val footers = ParquetFileFormat.readParquetFootersInParallel( serializedConf.value, fakeFileStatuses, ignoreCorruptFiles) // Converter used to convert Parquet `MessageType` to Spark SQL `StructType` val converter = new ParquetSchemaConverter( assumeBinaryIsString = assumeBinaryIsString, assumeInt96IsTimestamp = assumeInt96IsTimestamp, writeLegacyParquetFormat = writeLegacyParquetFormat) if (footers.isEmpty) { Iterator.empty } else { var mergedSchema = ParquetFileFormat.readSchemaFromFooter(footers.head, converter) footers.tail.foreach { footer => val schema = ParquetFileFormat.readSchemaFromFooter(footer, converter) try { mergedSchema = mergedSchema.merge(schema) } catch { case cause: SparkException => throw new SparkException( s"Failed merging schema of file ${footer.getFile}:\\n${schema.treeString}", cause) } } Iterator.single(mergedSchema) } }.collect() if (partiallyMergedSchemas.isEmpty) { None } else { var finalSchema = partiallyMergedSchemas.head partiallyMergedSchemas.tail.foreach { schema => try { finalSchema = finalSchema.merge(schema) } catch { case cause: SparkException => throw new SparkException( s"Failed merging schema:\\n${schema.treeString}", cause) } } Some(finalSchema) } } /** * Reads Spark SQL schema from a Parquet footer. If a valid serialized Spark SQL schema string * can be found in the file metadata, returns the deserialized [[StructType]], otherwise, returns * a [[StructType]] converted from the [[MessageType]] stored in this footer. */ def readSchemaFromFooter( footer: Footer, converter: ParquetSchemaConverter): StructType = { val fileMetaData = footer.getParquetMetadata.getFileMetaData fileMetaData .getKeyValueMetaData .asScala.toMap .get(ParquetReadSupport.SPARK_METADATA_KEY) .flatMap(deserializeSchemaString) .getOrElse(converter.convert(fileMetaData.getSchema)) } private def deserializeSchemaString(schemaString: String): Option[StructType] = { // Tries to deserialize the schema string as JSON first, then falls back to the case class // string parser (data generated by older versions of Spark SQL uses this format). Try(DataType.fromJson(schemaString).asInstanceOf[StructType]).recover { case _: Throwable => logInfo( "Serialized Spark schema in Parquet key-value metadata is not in JSON format, " + "falling back to the deprecated DataType.fromCaseClassString parser.") LegacyTypeStringParser.parse(schemaString).asInstanceOf[StructType] }.recoverWith { case cause: Throwable => logWarning( "Failed to parse and ignored serialized Spark schema in " + s"Parquet key-value metadata:\\n\\t$schemaString", cause) Failure(cause) }.toOption } }
big-pegasus/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
Scala
apache-2.0
28,313
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.serializer import java.io._ import java.nio.ByteBuffer import org.apache.spark.SparkConf import org.apache.spark.util.ByteBufferInputStream private[spark] class JavaSerializationStream(out: OutputStream) extends SerializationStream { val objOut = new ObjectOutputStream(out) def writeObject[T](t: T): SerializationStream = { objOut.writeObject(t); this } def flush() { objOut.flush() } def close() { objOut.close() } } private[spark] class JavaDeserializationStream(in: InputStream, loader: ClassLoader) extends DeserializationStream { val objIn = new ObjectInputStream(in) { override def resolveClass(desc: ObjectStreamClass) = Class.forName(desc.getName, false, loader) } def readObject[T](): T = objIn.readObject().asInstanceOf[T] def close() { objIn.close() } } private[spark] class JavaSerializerInstance extends SerializerInstance { def serialize[T](t: T): ByteBuffer = { val bos = new ByteArrayOutputStream() val out = serializeStream(bos) out.writeObject(t) out.close() ByteBuffer.wrap(bos.toByteArray) } def deserialize[T](bytes: ByteBuffer): T = { val bis = new ByteBufferInputStream(bytes) val in = deserializeStream(bis) in.readObject().asInstanceOf[T] } def deserialize[T](bytes: ByteBuffer, loader: ClassLoader): T = { val bis = new ByteBufferInputStream(bytes) val in = deserializeStream(bis, loader) in.readObject().asInstanceOf[T] } def serializeStream(s: OutputStream): SerializationStream = { new JavaSerializationStream(s) } def deserializeStream(s: InputStream): DeserializationStream = { new JavaDeserializationStream(s, Thread.currentThread.getContextClassLoader) } def deserializeStream(s: InputStream, loader: ClassLoader): DeserializationStream = { new JavaDeserializationStream(s, loader) } } /** * A Spark serializer that uses Java's built-in serialization. */ class JavaSerializer(conf: SparkConf) extends Serializer { def newInstance(): SerializerInstance = new JavaSerializerInstance }
sryza/spark
core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
Scala
apache-2.0
2,866
/** * Copyright 2015 Thomson Reuters * * Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package cmwell.ctrl.controllers import cmwell.ctrl.config.Config /** * Created by michael on 7/12/15. */ object DcController extends ComponentController(s"${Config.cmwellHome}/app/dc", "app/dc", Set("dc")) { }
nruppin/CM-Well
server/cmwell-controller/src/main/scala/cmwell/ctrl/controllers/DcController.scala
Scala
apache-2.0
833
// Copyright (c) 2011-2015 ScalaMock Contributors (https://github.com/paulbutcher/ScalaMock/graphs/contributors) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package com.paulbutcher.test.features import com.paulbutcher.test.IsolatedSpec import org.scalatest.{OneInstancePerTest, Suite} class CallCountTest extends IsolatedSpec { val noArgFunMock = mockFunction[String] val intFunMock = mockFunction[Int, String] autoVerify = false behavior of "Mock function" it should "fail if an unexpected call is made" in withExpectations { intercept[ExpectationException] { intFunMock(42) } } it should "fail if a method isn't called often enough (once)" in withExpectations { intFunMock.expects(42).once intFunMock(42) } it should "not fail if a method is called once (once)" in withExpectations { intFunMock.expects(42).once intFunMock(42) } it should "fail if a method is called too often (once)" in withExpectations { intFunMock.expects(42).twice intFunMock(42) intFunMock(42) intercept[ExpectationException] { intFunMock(42) } } it should "fail if a method isn't called often enough (twice)" in { intercept[ExpectationException] { withExpectations { intFunMock.expects(42).twice intFunMock(42) } } } it should "fail if a method is called too often (twice)" in withExpectations { intFunMock.expects(42).twice intFunMock(42) intFunMock(42) intercept[ExpectationException] { intFunMock(42) } } it should "handle noMoreThanTwice call count (zero)" in withExpectations { intFunMock.expects(2).noMoreThanTwice } it should "handle noMoreThanTwice call count (one)" in withExpectations { intFunMock.expects(2).noMoreThanTwice intFunMock(2) } it should "handle noMoreThanTwice call count (two)" in withExpectations { intFunMock.expects(2).noMoreThanTwice intFunMock(2) intFunMock(2) } it should "handle noMoreThanTwice call count (three)" in withExpectations { intFunMock.expects(2).noMoreThanTwice intFunMock(2) intFunMock(2) intercept[ExpectationException] { intFunMock(42) } } it should "treat stubs as syntactic sugar for anyNumberOfTimes" in withExpectations { intFunMock.stubs(*).returning("a return value") assertResult("a return value") { intFunMock(1) } assertResult("a return value") { intFunMock(2) } assertResult("a return value") { intFunMock(3) } } it should "handle never call count (zero)" in withExpectations { intFunMock.expects(2).never() } it should "handle never call count (one)" in withExpectations { intFunMock.expects(2).never() intercept[ExpectationException] { intFunMock(2) } } it should "handle repeated(3).times call count (3)" in withExpectations { intFunMock.expects(2).repeated(3).times intFunMock(2) intFunMock(2) intFunMock(2) } it should "handle repeat(1 to 2) call count (0)" in { intercept[ExpectationException] { withExpectations { intFunMock.expects(2).repeat(1 to 2) } } } it should "handle repeat(1 to 2) call count (1)" in withExpectations { intFunMock.expects(2).repeat(1 to 2) intFunMock(2) } it should "handle repeat(1 to 2) call count (2)" in withExpectations { intFunMock.expects(2).repeat(1 to 2) intFunMock(2) intFunMock(2) } it should "handle repeat(1 to 2) call count (3)" in withExpectations { intFunMock.expects(2).repeat(1 to 2) intFunMock(2) intFunMock(2) intercept[ExpectationException] { intFunMock(2) } } it should "handle repeat(2) call count (2)" in withExpectations { intFunMock.expects(2).repeat(2) intFunMock(2) intFunMock(2) } override def newInstance = new CallCountTest }
paulbutcher/ScalaMock
shared/src/test/scala/com/paulbutcher/test/features/CallCountTest.scala
Scala
mit
4,819
/* * Copyright 2014-2022 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.atlas.guice import com.google.inject.AbstractModule import com.netflix.atlas.core.db.Database import com.netflix.atlas.webapi.DatabaseProvider import com.netflix.iep.guice.LifecycleModule /** * Configures the database needed for the webapi. */ final class WebApiModule extends AbstractModule { override def configure(): Unit = { install(new LifecycleModule) bind(classOf[Database]).toProvider(classOf[DatabaseProvider]) } override def equals(obj: Any): Boolean = { obj != null && getClass.equals(obj.getClass) } override def hashCode(): Int = getClass.hashCode() }
Netflix/atlas
atlas-module-webapi/src/main/scala/com/netflix/atlas/guice/WebApiModule.scala
Scala
apache-2.0
1,218
package foo.sbt object SbtFoo { }
Duhemm/sbt
sbt/src/sbt-test/actions/cross-multiproject/sbt-foo/B.scala
Scala
bsd-3-clause
35
import org.scalatest.{Matchers, FunSuite} /** @version 1.0.1 */ class BowlingTest extends FunSuite with Matchers { test("should be able to score a game with all zeros") { val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(0)) } test("should be able to score a game with no strikes or spares") { pending val score = List(3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(90)) } test("a spare followed by zeros is worth ten points") { pending val score = List(6, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(10)) } test("points scored in the roll after a spare are counted twice") { pending val score = List(6, 4, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(16)) } test("consecutive spares each get a one roll bonus") { pending val score = List(5, 5, 3, 7, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(31)) } test("a spare in the last frame gets a one roll bonus that is counted once") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 3, 7).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(17)) } test("a strike earns ten points in a frame with a single roll") { pending val score = List(10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(10)) } test("points scored in the two rolls after a strike are counted twice as a bonus") { pending val score = List(10, 5, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(26)) } test("consecutive strikes each get the two roll bonus") { pending val score = List(10, 10, 10, 5, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(81)) } test("a strike in the last frame gets a two roll bonus that is counted once") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 7, 1).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(18)) } test("rolling a spare with the two roll bonus does not get a bonus roll") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 7, 3).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(20)) } test("strikes with the two roll bonus do not get bonus rolls") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10, 10).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(30)) } test("a strike with the one roll bonus after a spare in the last frame does not get a bonus") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 3, 10).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(20)) } test("all strikes is a perfect game") { pending val score = List(10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(300)) } test("rolls cannot score negative points") { pending val score = List().foldLeft(Bowling())((acc, roll) => acc.roll(roll)).roll(-1).score() score.isLeft should be (true) } test("a roll cannot score more than 10 points") { pending val score = List().foldLeft(Bowling())((acc, roll) => acc.roll(roll)).roll(11).score() score.isLeft should be (true) } test("two rolls in a frame cannot score more than 10 points") { pending val score = List(5).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).roll(6).score() score.isLeft should be (true) } test("bonus roll after a strike in the last frame cannot score more than 10 points") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).roll(11).score() score.isLeft should be (true) } test("two bonus rolls after a strike in the last frame cannot score more than 10 points") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 5).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).roll(6).score() score.isLeft should be (true) } test("two bonus rolls after a strike in the last frame can score more than 10 points if one is a strike") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10, 6).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score should be (Right(26)) } test("the second bonus rolls after a strike in the last frame cannot be a strike if the first one is not a strike") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 6).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).roll(10).score() score.isLeft should be (true) } test("second bonus roll after a strike in the last frame cannot score more than 10 points") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).roll(11).score() score.isLeft should be (true) } test("an unstarted game cannot be scored") { pending val score = List().foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score.isLeft should be (true) } test("an incomplete game cannot be scored") { pending val score = List(0, 0).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score.isLeft should be (true) } test("cannot roll if game already has ten frames") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).roll(0).score() score.isLeft should be (true) } test("bonus rolls for a strike in the last frame must be rolled before score can be calculated") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score.isLeft should be (true) } test("both bonus rolls for a strike in the last frame must be rolled before score can be calculated") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score.isLeft should be (true) } test("bonus roll for a spare in the last frame must be rolled before score can be calculated") { pending val score = List(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 3).foldLeft(Bowling())((acc, roll) => acc.roll(roll)).score() score.isLeft should be (true) } }
exercism/xscala
exercises/practice/bowling/src/test/scala/BowlingSuite.scala
Scala
mit
7,318
package net.lemonmodel.patterns import java.net.URI import org.scalatest._ import org.scalatest.matchers._ import scala.xml._ import scala.xml.Utility._ class NounTest extends FlatSpec with ShouldMatchers { val defaultNamer = new URINamer { def apply(pos : String, form : String, element : Option[String] = None) = { URI.create("file:example/"+java.net.URLEncoder.encode(form,"UTF-8")+"-"+pos+(element match { case Some(elem) => "#" + elem case None => "" })) } def auxiliaryEntry(form : String, pos : String, ontolex : Boolean) = URI.create("file:example/"+form) def auxXML = Nil private val anonURIs = collection.mutable.Map[Any,URI]() private def randomID = (math.random * 10000000000l).toLong.toString def anonURI(ref : Any) = anonURIs.get(ref) match { case Some(x) => x case None => { val anon = URI.create("file:example/" + randomID) anonURIs.put(ref,anon) anon } } } def xmlCheck(pattern : Pattern, xml : Elem) { trim(pattern.toXML(defaultNamer,"en")).toString() should equal (trim(xml).toString()) } "The name pattern" should "produce valid lemon" in { xmlCheck(Name("Microsoft","http://microsoft.com"), <lemon:LexicalEntry rdf:about="file:example/Microsoft-noun"> <lemon:canonicalForm> <lemon:Form rdf:about="file:example/Microsoft-noun#canonicalForm"> <lemon:writtenRep xml:lang="en">Microsoft</lemon:writtenRep> </lemon:Form> </lemon:canonicalForm> <lexinfo:partOfSpeech rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#properNoun"></lexinfo:partOfSpeech> <lemon:sense> <lemon:LexicalSense rdf:about="file:example/Microsoft-noun#sense"> <lemon:reference> <owl:NamedIndividual rdf:about="http://microsoft.com"/> </lemon:reference> </lemon:LexicalSense> </lemon:sense> </lemon:LexicalEntry>) } "The name pattern for MWEs" should "produce valid lemon" in { xmlCheck(Name(PNP("Microsoft"/pos.properNoun,"Windows"/pos.properNoun),"http://microsoft.com"), <lemon:LexicalEntry rdf:about="file:example/Microsoft+Windows-noun"> <lemon:canonicalForm> <lemon:Form rdf:about="file:example/Microsoft+Windows-noun#canonicalForm"> <lemon:writtenRep xml:lang="en">Microsoft Windows</lemon:writtenRep> </lemon:Form> </lemon:canonicalForm> <rdf:type rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#NounPhrase"></rdf:type> <lemon:decomposition rdf:parseType="Collection"> <lemon:Component rdf:about="file:example/Microsoft+Windows-noun#element_0"> <lemon:element> <lemon:LexicalEntry rdf:about="file:example/Microsoft-properNoun"> <lemon:canonicalForm> <lemon:Form rdf:about="file:example/Microsoft-properNoun#canonicalForm"> <lemon:writtenRep xml:lang="en">Microsoft</lemon:writtenRep> </lemon:Form> </lemon:canonicalForm> <lexinfo:partOfSpeech rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#properNoun"/> </lemon:LexicalEntry> </lemon:element> </lemon:Component> <lemon:Component rdf:about="file:example/Microsoft+Windows-noun#element_1"> <lemon:element> <lemon:LexicalEntry rdf:about="file:example/Windows-properNoun"> <lemon:canonicalForm> <lemon:Form rdf:about="file:example/Windows-properNoun#canonicalForm"> <lemon:writtenRep xml:lang="en">Windows</lemon:writtenRep> </lemon:Form> </lemon:canonicalForm> <lexinfo:partOfSpeech rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#properNoun"/> </lemon:LexicalEntry> </lemon:element> </lemon:Component> </lemon:decomposition> <lemon:sense> <lemon:LexicalSense rdf:about="file:example/Microsoft+Windows-noun#sense"> <lemon:reference> <owl:NamedIndividual rdf:about="http://microsoft.com"/> </lemon:reference> </lemon:LexicalSense> </lemon:sense> </lemon:LexicalEntry>) } val dbpedia = Namespace("http://dbpedia.org/resource/") // "The class noun pattern" should "produce valid lemon" in { // xmlCheck(ClassNoun("cat",dbpedia("Cat")) withPlural "cats", // <lemon:LexicalEntry rdf:about="file:example/cat-noun"> // <lemon:canonicalForm> // <lemon:Form rdf:about="file:example/cat-noun#canonicalForm"> // <lemon:writtenRep xml:lang="en">cat</lemon:writtenRep> // </lemon:Form> // </lemon:canonicalForm> // <lexinfo:partOfSpeech rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#commonNoun"></lexinfo:partOfSpeech> // <lemon:otherForm> // <lemon:Form rdf:about="file:example/cat-noun#form"> // <lemon:writtenRep xml:lang="en">cats</lemon:writtenRep> // <lexinfo:number xmlns:lexinfo="http://www.lexinfo.net/ontology/2.0/lexinfo#" rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#plural"></lexinfo:number> // </lemon:Form> // </lemon:otherForm> // <lemon:sense> // <lemon:LexicalSense rdf:about="file:example/cat-noun#sense"> // <lemon:reference> // <owl:Class rdf:about="http://dbpedia.org/resource/Cat"/> // </lemon:reference> // <lemon:isA> // <lemon:Argument rdf:about="file:example/cat-noun#subject"/> // </lemon:isA> // </lemon:LexicalSense> // </lemon:sense> // <lemon:synBehavior> // <lemon:Frame rdf:about="file:example/cat-noun#frame"> // <rdf:type rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#NounPredicateFrame"/> // <lexinfo:subject rdf:resource="file:example/cat-noun#subject"/> // </lemon:Frame> // </lemon:synBehavior> // </lemon:LexicalEntry>) // } val ontology = Namespace("http://www.example.com/ontology#") "The relation noun pattern" should "produce valid lemon" in { xmlCheck(RelationalNoun("agreement",ontology("agreeAbout"), propSubj=CopulativeArg, propObj=PrepositionalObject("about")), <lemon:LexicalEntry rdf:about="file:example/agreement-noun"> <lemon:canonicalForm> <lemon:Form rdf:about="file:example/agreement-noun#canonicalForm"> <lemon:writtenRep xml:lang="en">agreement</lemon:writtenRep> </lemon:Form> </lemon:canonicalForm> <lexinfo:partOfSpeech rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#commonNoun"></lexinfo:partOfSpeech> <lemon:sense> <lemon:LexicalSense rdf:about="file:example/agreement-noun#sense"> <lemon:reference> <rdf:Property rdf:about="http://www.example.com/ontology#agreeAbout"/> </lemon:reference> <lemon:subjOfProp> <lemon:Argument rdf:about="file:example/agreement-noun#subject"/> </lemon:subjOfProp> <lemon:objOfProp> <lemon:Argument rdf:about="file:example/agreement-noun#adpositionalObject"/> </lemon:objOfProp> </lemon:LexicalSense> </lemon:sense> <lemon:synBehavior> <lemon:Frame rdf:about="file:example/agreement-noun#frame"> <rdf:type rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#NounPPFrame"/> <lexinfo:copulativeArg rdf:resource="file:example/agreement-noun#subject"/> <lexinfo:prepositionalObject> <lemon:Argument rdf:about="file:example/agreement-noun#adpositionalObject"> <lemon:marker rdf:resource="file:example/about"/> </lemon:Argument> </lexinfo:prepositionalObject> </lemon:Frame> </lemon:synBehavior> </lemon:LexicalEntry>) } "The multivalent relational noun pattern" should "produce valid lemon" in { xmlCheck(RelationalMultivalentNoun("position",ontology("Employment"), args=Seq(ontology("employee") as PossessiveAdjunct, ontology("role") as PrepositionalObject("as") optional, ontology("startOfEmployment") as PrepositionalObject("since") optional)), <lemon:LexicalEntry rdf:about="file:example/position-noun"> <lemon:canonicalForm> <lemon:Form rdf:about="file:example/position-noun#canonicalForm"> <lemon:writtenRep xml:lang="en">position</lemon:writtenRep> </lemon:Form> </lemon:canonicalForm> <lexinfo:partOfSpeech rdf:resource="http://www.lexinfo.net/ontology/2.0/lexinfo#commonNoun"></lexinfo:partOfSpeech> <lemon:sense> <lemon:LexicalSense rdf:about="file:example/position-noun#sense"> <lemon:reference> <rdfs:Class rdf:about="http://www.example.com/ontology#Employment"> <rdfs:subClassOf rdf:resource="http://www.lemon-model.net/oils#Relationship"/> </rdfs:Class> </lemon:reference> <lemon:subsense> <lemon:LexicalSense rdf:about="file:example/position-noun#subsense1"> <lemon:objOfProp> <lemon:Argument rdf:about="file:example/position-noun#arg1"> <!-- Mandatory argument --> </lemon:Argument> </lemon:objOfProp> <lemon:reference> <rdf:Property rdf:about="http://www.example.com/ontology#employee"/> </lemon:reference> </lemon:LexicalSense> </lemon:subsense> <lemon:subsense> <lemon:LexicalSense rdf:about="file:example/position-noun#subsense2"> <lemon:objOfProp> <lemon:Argument rdf:about="file:example/position-noun#arg2"> <lemon:optional rdf:datatype="http://www.w3.org/2001/XMLSchema#boolean">true</lemon:optional> </lemon:Argument> </lemon:objOfProp> <lemon:reference> <rdf:Property rdf:about="http://www.example.com/ontology#role"/> </lemon:reference> </lemon:LexicalSense> </lemon:subsense> <lemon:subsense> <lemon:LexicalSense rdf:about="file:example/position-noun#subsense3"> <lemon:objOfProp> <lemon:Argument rdf:about="file:example/position-noun#arg3"> <lemon:optional rdf:datatype="http://www.w3.org/2001/XMLSchema#boolean">true</lemon:optional> </lemon:Argument> </lemon:objOfProp> <lemon:reference> <rdf:Property rdf:about="http://www.example.com/ontology#startOfEmployment"/> </lemon:reference> </lemon:LexicalSense> </lemon:subsense> </lemon:LexicalSense> </lemon:sense> <lemon:synBehavior> <lemon:Frame rdf:about="file:example/position-noun#frame"> <lexinfo:possessiveAdjunct rdf:resource="file:example/position-noun#arg1"/> <lexinfo:prepositionalObject> <lemon:Argument rdf:about="file:example/position-noun#arg2"> <lemon:marker rdf:resource="file:example/as"/> <lemon:optional rdf:datatype="http://www.w3.org/2001/XMLSchema#boolean">true</lemon:optional> </lemon:Argument> </lexinfo:prepositionalObject> <lexinfo:prepositionalObject> <lemon:Argument rdf:about="file:example/position-noun#arg3"> <lemon:marker rdf:resource="file:example/since"/> <lemon:optional rdf:datatype="http://www.w3.org/2001/XMLSchema#boolean">true</lemon:optional> </lemon:Argument> </lexinfo:prepositionalObject> </lemon:Frame> </lemon:synBehavior> </lemon:LexicalEntry>) } }
jmccrae/lemon.patterns
src/test/scala/NounTest.scala
Scala
apache-2.0
12,345
package proofpeer.proofscript.proofdoc import proofpeer.proofscript.frontend.{TracksSourcePosition, ParseTree} import proofpeer.general.StringUtils import proofpeer.indent.Document sealed trait ProofDoc extends ParseTree final object ProofDoc { sealed trait Prop extends TracksSourcePosition case class StringProp(s : String) extends Prop val DEFAULT_PROP = "default" val KEYWORD_PROP = "keyword" case class V(ty : T, props : Map[String, Prop], errors : Vector[Prop], values : Vector[V]) extends TracksSourcePosition { def add(v : V) : V = if (v != null) V(ty, props, errors, values :+ v) else this def addInFront(v : V) = if (v != null) V(ty, props, errors, v +: values) else this def add(vs : Vector[V]) = V(ty, props, errors, values ++ vs) def add(name : String, p : Prop) = V(ty, props + (name -> p), errors, values) def addError(p : Prop) = V(ty, props, errors :+ p, values) def stringOf(propname : String) : String = props(propname).asInstanceOf[StringProp].s } object V { private val noprops : Map[String, Prop] = Map() private val noerrors : Vector[Prop] = Vector() private val novalues : Vector[V] = Vector() def apply(ty : T) : V = V(ty, noprops, noerrors, novalues) def apply(ty : T, prop : Prop) : V = V(ty).add(DEFAULT_PROP, prop) def apply(ty : T, v : V) : V = V(ty).add(v) } sealed trait T case object Text extends T case object Punctuation extends T case object Reference extends T case object User extends T case object Blocks extends T case object Words extends T case object Link extends T case object Emph extends T case object DoubleEmph extends T case object Header extends T case object Emoticon extends T case object Hashtag extends T case object InlineMath extends T case object InlineVerbatim extends T case object InlineProofScript extends T case object InlineRun extends T case object Math extends T case object Verbatim extends T case object Quote extends T case object ProofScript extends T case object Run extends T case object Anything extends T case object ItemList extends T case object ListItem extends T case object Table extends T case object TableParam extends T case object Row extends T case object Line extends T case object Cell extends T case object Label extends T case object LabelOpen extends T case object LabelClose extends T case object References extends T case object RefItem extends T case object RefItemFields extends T case object RefItemField extends T case object Fallback extends T /** This is used until we know what to do with accidental ambiguitites */ case object Ambiguity extends T } final object Linearization { import ProofDoc.V type Style = String final case class Position(row : Int, column : Int) extends Ordered[Position] { def compare(that : Position) : Int = { if (row < that.row) -1 else if (row > that.row) 1 else if (column < that.column) -1 else if (column > that.column) 1 else 0 } def inc(document : Document) : Position = { Position(row, column + 1) } def dec(document : Document) : Position = { if (column > 0) Position(row, column - 1) else { assert(row > 0) Position(row - 1, document.lengthOfRow(row - 1)) } } } case class Token(startPosition : Position, endPosition : Position, styles : Set[String]) { assert(startPosition <= endPosition) } type Tokens = Vector[Token] val empty : Tokens = Vector() def single(token : Token) : Tokens = Vector(token) def concat(tokens1 : Tokens, tokens2 : Tokens) : Tokens = { if (tokens1.isEmpty) tokens2 else if (tokens2.isEmpty) tokens1 else { val u = tokens1.last val v = tokens2.head assert (u.endPosition < v.startPosition) tokens1 ++ tokens2 } } def append(tokens : Tokens, v : Token) : Tokens = { if (tokens.isEmpty) single(v) else { val u = tokens.last assert (u.endPosition < v.startPosition) tokens :+ v } } def make(document : Document, v : V) : Tokens = { new MakeTokens(document).make(v) } private class MakeTokens(document : Document) { def increment(p : Position, limit : Position) : Position = { val q = p.inc(document) if (q > limit) limit else q } def decrement(p : Position, limit : Position) : Position = { val q = p.dec(document) if (q < limit) limit else q } def make(v : V) : Tokens = { val tokens = make(v, empty) val r = document.numberOfRows - 1 if (r >= 0) { val start = Position(0, 0) val end = Position(r, document.lengthOfRow(r)) addStyle(addStyle(tokens, start, end, "text"), start, end, "proofdoc") } else tokens } def make(v : V, tokens : Tokens) : Tokens = { import ProofDoc._ v.ty match { case Fallback => makeFallback(v, "error", tokens) case Ambiguity => makeSimple(v, "ambiguity", tokens) case Reference => makeSimple(v, "reference", tokens) case User => makeSimple(v, "user", tokens) case Link => makeSimpleRecursive(v, "link", tokens) case Emph => makeSimpleRecursive(v, "emph", tokens) case Label => makeSimple(v, "label", tokens) case LabelOpen => makeSimple(v, "label", tokens) case LabelClose => makeSimple(v, "label", tokens) case DoubleEmph => makeSimpleRecursive(v, "doubleemph", tokens) case Header => val level = v.props(KEYWORD_PROP).asInstanceOf[StringProp].s.length val style = "header" + (if (level <= 3) level else 3) makeSimpleRecursive(v, style, tokens) case Emoticon => makeSimple(v, "emoticon", tokens) case Hashtag => makeSimple(v, "hashtag", tokens) case InlineMath => makeSimple(v, "inlinemath", tokens) case InlineVerbatim => makeSimple(v, "inlineverbatim", tokens) case InlineRun => makeSimple(v, "inlinerun", tokens) case InlineProofScript => makeSimple(v, "inlineproofscript", tokens) case Math => makeKeywordAnythingBlock(v, "keyword", "math", tokens) case Verbatim => makeKeywordAnythingBlock(v, "keyword", "verbatim", tokens) case ProofScript => makeKeywordAnythingBlock(v, "keyword", "proofscript", tokens) case Run => makeKeywordAnythingBlock(v, "keyword", "run", tokens) case ListItem => makeKeywordRecursive(v, "listbullet", tokens) case Table => makeKeywordRecursive(v, "keyword", tokens) case Line => makeKeywordRecursive(v, "keyword", tokens) case Row => makeKeywordRecursive(v, "keyword", tokens) case Cell => makeKeywordRecursive(v, "keyword", tokens) case TableParam => makeTableParam(v, tokens) case Quote => makeQuote(v, "keyword", "quote", tokens) case References => makeKeywordRecursive(v, "keyword", tokens) case RefItem => makeDefaultRecursive(v, "label", tokens) case RefItemFields => makeRecursive(v, tokens) case RefItemField => makeKeywordRecursive(v, "keyword", tokens) case _ => makeRecursive(v, tokens) } } def isEmpty(t : TracksSourcePosition) : Boolean = { t.sourcePosition.span.get.isNull } def locationOf(t : TracksSourcePosition) : (Position, Position) = { if (document.size == 0) return (Position(0, 0), Position(0, 0)) val span = t.sourcePosition.span.get if (span.firstIndexIncl < span.lastIndexExcl) { val (r1, c1, _) = document.character(span.firstIndexIncl) val (r2, c2, _) = document.character(span.lastIndexExcl - 1) (Position(r1, c1), Position(r2, c2)) } else if (span.firstIndexIncl > 0 && span.firstIndexIncl < document.size) { val (r1, c1, _) = document.character(span.firstIndexIncl - 1) val (r2, c2, _) = document.character(span.firstIndexIncl) (Position(r1, c1 + 1), Position(r2, c2)) } else if (span.firstIndexIncl == 0) { val (r, c, _) = document.character(0) (Position(0, 0), Position(r, c)) } else /* span.firstIndexIncl >= document.size */ { val (r, c, _) = document.character(span.firstIndexIncl - 1) (Position(r, c), Position(r, c + 1)) } } def addStyle(tokens : Tokens, startPosition : Position, endPosition : Position, style : Style) : Tokens = { assert(startPosition <= endPosition) var start = startPosition var end = endPosition var processing : Boolean = true var newTokens : Tokens = Vector() for (t <- tokens) { if (processing) { if (end < t.startPosition) { newTokens = append(append(newTokens, Token(start, end, Set(style))), t) processing = false } else if (start > t.endPosition) { newTokens = append(newTokens, t) processing = true } else { if (start < t.startPosition) { newTokens = append(newTokens, Token(start, decrement(t.startPosition, start), Set(style))) start = t.startPosition } else if (start > t.startPosition) { newTokens = append(newTokens, Token(t.startPosition, decrement(start, t.startPosition), t.styles)) } // start points now to the beginning of the new token with the combined styles if (end > t.endPosition) { newTokens = append(newTokens, Token(start, t.endPosition, t.styles + style)) start = increment(t.endPosition, end) processing = true } else if (end < t.endPosition) { newTokens = append(newTokens, Token(start, end, t.styles + style)) newTokens = append(newTokens, Token(increment(end, t.endPosition), t.endPosition, t.styles)) processing = false } else /* end == t.endPosition */ { newTokens = append(newTokens, Token(start, end, t.styles + style)) processing = false } } } else newTokens = append(newTokens, t) } if (processing) { newTokens = append(newTokens, Token(start, end, Set(style))) } newTokens } // apply the style to the whole span of V // do not descend further into V def makeSimple(v : V, style : Style, tokens : Tokens) : Tokens = { val l = locationOf(v) val ts = if (isEmpty(v)) empty else single(Token(l._1, l._2, Set(style))) val errors = makeErrors(v.errors.map(locationOf _), ts) concat(tokens, errors) } def makeFallback(v : V, style : Style, tokens : Tokens) : Tokens = { val l = locationOf(v) addStyle(tokens, l._1, l._2, "error") } // descend into V to compute the styles to be applied def makeRecursive(v : V, tokens : Tokens) : Tokens = { var ts : Tokens = empty for (value <- v.values) ts = make(value, ts) concat(tokens, makeErrors(v.errors.map(locationOf _), ts)) } // combines makeSimple and makeRecursive def makeSimpleRecursive(v : V, style : Style, tokens : Tokens) : Tokens = { var ts : Tokens = empty for (value <- v.values) ts = make(value, ts) val l = locationOf(v) ts = if (isEmpty(v)) ts else addStyle(ts, l._1, l._2, style) concat(tokens, makeErrors(v.errors.map(locationOf _), ts)) } def makeErrors(errors : Vector[(Position, Position)], tokens : Tokens) : Tokens = { var ts = tokens for ((startPosition, endPosition) <- errors) ts = addStyle(ts, startPosition, endPosition, "error") ts } def makeKeywordAnythingBlock(v : V, keywordStyle : Style, anythingStyle : Style, tokens : Tokens) : Tokens = { val keywordLocation = locationOf(v.props(ProofDoc.KEYWORD_PROP)) val keywordToken = Token(keywordLocation._1, keywordLocation._2, Set(keywordStyle)) val span = v.values(0).sourcePosition.span.get val ts = if (span.isNull) { single(keywordToken) } else { val start = Position(span.firstRow, 0) val end = Position(span.lastRow, document.lengthOfRow(span.lastRow) - 1) addStyle(single(keywordToken), start, end, anythingStyle) } concat(tokens, makeErrors(v.errors.map(locationOf _), ts)) } def makeKeywordRecursive(v : V, keywordStyle : Style, tokens : Tokens) : Tokens = { val keywordLocation = locationOf(v.props(ProofDoc.KEYWORD_PROP)) val keywordToken = Token(keywordLocation._1, keywordLocation._2, Set(keywordStyle)) makeRecursive(v, append(tokens, keywordToken)) } def makeDefaultRecursive(v : V, keywordStyle : Style, tokens : Tokens) : Tokens = { val keywordLocation = locationOf(v.props(ProofDoc.DEFAULT_PROP)) val keywordToken = Token(keywordLocation._1, keywordLocation._2, Set(keywordStyle)) makeRecursive(v, append(tokens, keywordToken)) } def makeQuote(v : V, keywordStyle : Style, quoteStyle : Style, tokens : Tokens) : Tokens = { val keywordLocation = locationOf(v.props(ProofDoc.KEYWORD_PROP)) val keywordToken = Token(keywordLocation._1, keywordLocation._2, Set(keywordStyle)) val ts = makeRecursive(v, append(tokens, keywordToken)) val token = ts.last val start = Position(keywordToken.startPosition.row + 1, 0) val end = locationOf(v)._2 if (start <= end) addStyle(ts, start, end, quoteStyle) else ts } def makeTableParam(v : V, tokens : Tokens) : Tokens = { val paramProp = v.props(ProofDoc.DEFAULT_PROP) val (pos, _) = locationOf(paramProp) val s = paramProp.asInstanceOf[ProofDoc.StringProp].s var ts = makeRecursive(v, tokens) var i = 0 for (c <- s) { val style = if (c == '|') "tableparam-bar" else "tableparam-orientation" val p = Position(pos.row, pos.column + i) ts = addStyle(ts, p, p, style) i = i + 1 } ts } } } class ProofDocSyntax(annotate : (Any, Option[proofpeer.indent.Span]) => Any) { import proofpeer.indent._ import proofpeer.indent.regex._ import proofpeer.indent.Constraint._ import ProofDoc._ import ProofDoc.V def computeAmbiguousValue(nonterminal : String, span : Span, parsetrees : Vector[proofpeer.indent.ParseTree]) : Any = { val values = parsetrees.map(_.getValue[V]) V(Ambiguity, StringProp(nonterminal)).add(values) } def range(cs : Char*) : Range = { var r = Range() for (c <- cs) r += Range(c) r } // case insensitive string as regular expression def cis(s : String) : RegularExpr = { import proofpeer.general.StringUtils._ var v : Vector[RegularExpr] = Vector() for (c <- s) { v = v :+ CHAR(Range(toLowerCase(c)) + Range(toUpperCase(c))) } seq(v : _*) } /** Generally useful regular expressions */ val letter = alt(chars('a', 'z'), chars('A', 'Z')) val digit = chars('0', '9') val anychar = CHAR(Range.universal) var scope : String = null var priority : Int = 1 var grammar : Grammar = Grammar(computeAmbiguousValue _) def setScope(s : String) { scope = s priority = 1 } def lex(terminal : String, r : RegularExpr) { grammar = grammar ++ rule(terminal, r, Some(priority), scope) priority += 1 } def lex(terminal : String, lexer : Lexer) { grammar = grammar ++ lexrule(terminal, lexer, Some(priority), scope) priority += 1 } def grule(nonterminal : String, rhs : String, constraint : Constraint, action : ParseContext => Any) { def annotatedAction(c : ParseContext) : Any = { val s : Option[Span] = if (c.span == null) None else Some(c.span) annotate(action(c), s) } grammar = grammar ++ rule(nonterminal, rhs, constraint, annotatedAction _) } def grule(nonterminal : String, rhs : String, action : ParseContext => Any) { grule(nonterminal, rhs, unconstrained, action) } def annotateProp[T <: Prop](p : T, s : Span) : T = { annotate(p, Some(s)).asInstanceOf[T] } def sprop(c : ParseContext, t : String) : StringProp = { annotateProp(StringProp(c.text(t)), c.span(t)) } def sprop(c : ParseContext) : StringProp = { annotateProp(StringProp(c.text), c.span) } def eprop(c : ParseContext, descr : String) : StringProp = { annotateProp(StringProp(descr), c.span) } def eprop(c : ParseContext, t : String, descr : String) : StringProp = { annotateProp(StringProp(descr), c.span(t)) } val punctuation = range('(', ')', '.', '!', '?', ':', ';', ',', '"', '\\'', '-') val specialrange = range('*', '~', '$', '`', '@', '\\\\', '<', '>', '{', '}', '[', ']', '#') val handledspecialrange = range('*', '$', '`', '@', '\\\\', '<', '{', '[', ']', '#') val nonsimple = specialrange + punctuation val simplechar = CHAR(-nonsimple) val escapechar = seq(char('\\\\'), CHAR(nonsimple)) val unicodechar = seq(char('\\\\'), REPEAT1(alt(digit, chars('a', 'f'), chars('A', 'F'))), char('/')) val simpleword = REPEAT1(alt(simplechar, escapechar, unicodechar)) def unescape(s : String) : String = { val builder : StringBuilder = new StringBuilder() var escape : Boolean = false var code : Int = 0 val MAX_UNICODE : Int = 0x10ffff for (c <- s) { if (!escape) { if (c == '\\\\') escape = true else builder.append(c) } else { if (nonsimple.contains(c)) { builder.append(c) escape = false } else { if (c == '/') { StringUtils.fromCodePoint(code) match { case Some(s) => builder.append(s) case None => // ignore code } escape = false code = 0 } else { val d : Int = if (c >= '0' && c <= '9') c - '0' else if (c >= 'A' && c <= 'F') (c - 'A') + 10 else if (c >= 'a' && c <= 'f') (c - 'a') + 10 else throw new RuntimeException("internal error: c = " + c) if (code <= MAX_UNICODE) code = code * 16 + d } } } } builder.toString } // It's a shame that this lexer is needed, it exists so that double backquotes are recognised // only when the backquotes are not separated by spaces. // The Lexer library should be extended at some point to cover these cases. private object InlineProofScriptLexer extends Lexer { val zero = false val first = Range('`') def lex(d : Document, startPosition : Int, param : ParseParam.V) : (Int, ParseParam.V) = { val size = d.size var pos = startPosition val List(minColumn, minRow, maxStartColumn, maxStartRow) = ParseParam.v2Ints(List(Int.MinValue, Int.MinValue, Int.MaxValue, Int.MaxValue), param) var lastRow = -1 def nil(len : Int) : (Int, ParseParam.V) = (len, ParseParam.NIL) def isDoubleBackquote : Boolean = { if (pos + 1 < size) { val (row1, col1, code1) = d.character(pos) lastRow = row1 if (code1 == '`' && col1 >= minColumn && row1 >= minRow && (pos > startPosition || (col1 <= maxStartColumn && row1 <= maxStartRow))) { val (row2, col2, code2) = d.character(pos + 1) code2 == '`' && row1 == row2 && col2 == col1 + 1 && col2 >= minColumn } else false } else false } if (isDoubleBackquote) { pos += 2 while (pos < size) { val (row, col, code) = d.character(pos) if (col < minColumn || row != lastRow) return nil(pos - startPosition) if (code != '`') pos += 1 else { if (isDoubleBackquote) pos += 2 else pos += 1 return nil(pos - startPosition) } lastRow = row } } nil(pos - startPosition) } } private object ProtrudeLexer extends Lexer { val zero = false val first = Range.universal def lex(d : Document, startPosition : Int, param : ParseParam.V) : (Int, ParseParam.V) = { val size = d.size if (startPosition >= size) return (-1, ParseParam.UNDEFINED) val List(minColumn, maxStartColumn) = ParseParam.v2Ints(List(Int.MinValue, Int.MaxValue), param) val (_, startColumn, _) = d.character(startPosition) if (startColumn < minColumn || startColumn > maxStartColumn) return (-1, ParseParam.UNDEFINED) var pos = startPosition + 1 while (pos < size) { val (_, column, _) = d.character(pos) if (column > startColumn) pos = pos + 1 else return (pos - startPosition, ParseParam.NIL) } (size - startPosition, ParseParam.NIL) } } /** Fallbacks */ setScope(FALLBACK_SCOPE) lex("FALLBACK", Lexer.untilEnd(anything)) lex("FALLBACKLINE", Lexer.untilNewline(anything)) lex("FALLBACKWORD", Lexer.untilWhitespace(REPEAT1(CHAR(-(punctuation + handledspecialrange))))) lex("FALLBACKPROTRUDE", Lexer.demandLeftBorder(ProtrudeLexer, 1)) def fallback(c : ParseContext, nonterminal : String) : V = { V(Fallback, eprop(c, nonterminal)) } /** Scope Paragraph */ setScope("Scope Paragraph") def keyword(r : RegularExpr) : Lexer = { Lexer.demandRightBorder(Lexer.demandLeftBorder(Lexer.untilWhitespace(r), 1)) } def klex(terminal : String, r : RegularExpr) { lex(terminal, keyword(r)) } lex("BACKSLASH", char('\\\\')) lex("SIMPLEWORD", simpleword) lex("PUNCTUATION", alt(CHAR(punctuation))) lex("ASTERISK", char('*')) lex("DOUBLEASTERISK", string("**")) lex("OPENLINK", char('[')) lex("CLOSELINK", char(']')) lex("USER", seq(char('@'), OPT(simpleword))) lex("HASHTAG", seq(char('#'), OPT(simpleword))) lex("LABEL", seq(string("::"), simpleword)) lex("LABELOPEN", seq(string("::("), simpleword)) lex("LABELCLOSE", seq(simpleword, string(")::"))) lex("HYPHEN", char('-')) lex("ENDASH", string("--")) lex("EMDASH", string("---")) lex("LEFTQUOTE", Lexer.demandLeftBorder(Lexer.untilWhitespace(string("'")), 0, nonsimple)) lex("RIGHTQUOTE", Lexer.demandRightBorder(Lexer.untilWhitespace(string("'")), 0, nonsimple)) lex("LEFTDOUBLEQUOTE", Lexer.demandLeftBorder(Lexer.untilWhitespace(char('"')), 0, nonsimple)) lex("RIGHTDOUBLEQUOTE", Lexer.demandRightBorder(Lexer.untilWhitespace(char('"')), 0, nonsimple)) klex("REFITEMLABEL", simpleword) klex("LISTBULLET", alt(char('-'), seq(alt(REPEAT1(digit), letter), char('.')))) klex("ITEMHYPHEN", char('-')) klex("KEYWORD", seq(REPEAT1(char('~')), REPEAT(letter), OPT(char('$')))) klex("MATH", alt(cis("~math"), string("~$"))) klex("VERBATIM", alt(cis("~verbatim"), string("~`"))) klex("RUN", alt(cis("~run"), string("~{"))) klex("PROOFSCRIPT", alt(cis("~proofscript"), string("~``"))) klex("QUOTE", cis("~quote")) klex("TABLE", cis("~table")) klex("REFERENCES", cis("~references")) lex("TABLEPARAM", Lexer.untilNewline(REPEAT1(alt(char('|'), cis("c"), cis("l"), cis("r"))))) klex("ROW", cis("~row")) klex("LINE", cis("~line")) klex("HEADER", REPEAT1(char('~'))) klex("DISPLAY", cis("~display")) klex("IN", cis("~in")) klex("TITLE", cis("~title")) klex("AUTHOR", cis("~author")) klex("YEAR", cis("~year")) klex("MONTH", cis("~month")) klex("DAY", cis("~day")) lex("EMOTICON", alt(string(":-)"), string(";-)"), string(":-D"), string(":-P"), string(":-/"), string(":-|"), string(":-("), string(":-O"))) lex("INLINEMATH", Lexer.untilNewline(seq(char('$'), REPEAT(CHAR(-Range('$'))), OPT(char('$'))))) lex("INLINEVERBATIM", Lexer.untilNewline(seq(char('`'), REPEAT(CHAR(-Range('`'))), OPT(char('`'))))) lex("INLINERUN", Lexer.untilNewline(seq(char('{'), REPEAT(CHAR(-Range('}'))), OPT(char('}'))))) lex("INLINEPROOFSCRIPT", InlineProofScriptLexer) lex("REFERENCE", Lexer.untilNewline(seq(char('<'), REPEAT(CHAR(-Range('>'))), OPT(char('>'))))) lex("ANYTHING", Lexer.untilEnd(anything)) def mkLexedEntity(c : ParseContext, ty : T, start : String, stop : String, prep : String => String = (x => x)) : V = { var text = c.text val span = c.span var v : V = V(ty) def str(s : String) : Prop = annotateProp(StringProp(s), span) if (!text.startsWith(start)) v = v.addError(str("doesn't start with " + start)) else text = text.substring(start.size) if (!text.endsWith(stop)) v = v.addError(str("doesn't end with " + stop)) else text = text.substring(0, text.size - stop.size) v = v.add(DEFAULT_PROP, str(prep(text))) v } def mkInlineMath(c : ParseContext) : V = mkLexedEntity(c, InlineMath, "$", "$") def mkReference(c : ParseContext) : V = mkLexedEntity(c, Reference, "<", ">") def mkInlineVerbatim(c : ParseContext) : V = mkLexedEntity(c, InlineVerbatim, "`", "`") def mkInlineRun(c : ParseContext) : V = mkLexedEntity(c, InlineRun, "{", "}") def mkUser(c : ParseContext) : V = { val v = mkLexedEntity(c, User, "@", "", unescape _) if (v.stringOf(DEFAULT_PROP) == "") v.addError(eprop(c, "empty peer name")) else v } def mkHashtag(c : ParseContext) : V = { val v = mkLexedEntity(c, Hashtag, "#", "", unescape _) if (v.stringOf(DEFAULT_PROP) == "") v.addError(eprop(c, "empty hashtag")) else v } def mkInlineProofScript(c : ParseContext) : V = { def prep(s : String) : String = if (s.endsWith("`")) s.substring(0, s.size - 1) else s mkLexedEntity(c, InlineProofScript, "``", "``", prep _) } /** Grammar rules */ grule("ProofDoc", "Blocks(nil, nil)", _.Blocks[V]) // Blocks: (minColumn, minRow) => [(maxStartColumn, WordsMinRow, ItemListMinRow)] grule("Blocks", "", c => V(Blocks)) grule("Blocks", "Blocks_1~ Words(~.1, Blocks_1.val.2 | ~.2, Blocks_1.val.1 | nil, nil, 0) { (Words.val, Words.lastRow + 2, Words.lastRow + 2) }", c => c.Blocks_1[V].add(c.Words[V])) grule("Blocks", "Blocks_1~ ItemList(~.1, Blocks_1.val.3 | ~.2, Blocks_1.val.1 | nil) { (ItemList.val, ItemList.lastRow + 1, ItemList.lastRow + 2) }", c => c.Blocks_1[V].add(c.ItemList[V])) // Words: (minColumn, minRow, maxStartColumn, maxStartRow, type) => maxStartColumn grule("Words", "CompactWord~ { CompactWord.val }", c => V(Words, c.CompactWord[V])) grule("Words", "BlockWord~ { BlockWord.leftMost }", c => V(Words, c.BlockWord[V])) grule("Words", "Words_1~ CompactWord(~.1, ~.2, Words_1.val, Words_1.lastRow + 1, ~.5) { CompactWord.val }", c => c.Words_1[V].add(c.CompactWord[V])) grule("Words", "Words_1~ BlockWord(~.1, ~.2, Words_1.val, Words_1.lastRow + 1, ~.5) { BlockWord.leftMost }", c => c.Words_1[V].add(c.BlockWord[V])) // These are the bits which can be set in the "type" parameter for Words / CompactWord private val B_LINK = 0 private val B_EMPH = 1 private val B_DOUBLEEMPH = 2 def mkPunctuation(c : ParseContext, character : String) : V = V(Punctuation, annotateProp(StringProp(character), c.span)) // CompactWord: (minColumn, minRow, maxStartColumn, maxStartRow, type) => -1 or nil, // a return value of -1 designates that this is the last word in Words grule("CompactWord", "SIMPLEWORD~", c => V(Text, annotateProp(StringProp(unescape(c.text)), c.span))) grule("CompactWord", "BACKSLASH~", c => V(Text, annotateProp(StringProp(""), c.span))) grule("CompactWord", "PUNCTUATION~", c => V(Punctuation, sprop(c))) grule("CompactWord", "HYPHEN~", c => mkPunctuation(c, "\\u2010")) grule("CompactWord", "ENDASH~", c => mkPunctuation(c, "\\u2013")) grule("CompactWord", "EMDASH~", c => mkPunctuation(c, "\\u2014")) grule("CompactWord", "LEFTQUOTE~", c => mkPunctuation(c, "\\u2018")) grule("CompactWord", "RIGHTQUOTE~", c => mkPunctuation(c, "\\u2019")) grule("CompactWord", "LEFTDOUBLEQUOTE~", c => mkPunctuation(c, "\\u201C")) grule("CompactWord", "RIGHTDOUBLEQUOTE~", c => mkPunctuation(c, "\\u201D")) grule("CompactWord", "EMOTICON~", c => V(Emoticon, sprop(c))) grule("CompactWord", "USER~", c => mkUser(c)) grule("CompactWord", "Reference~", _.Reference[V]) grule("CompactWord", "INLINEMATH~", c => mkInlineMath(c)) grule("CompactWord", "INLINEPROOFSCRIPT~", c => mkInlineProofScript(c)) grule("CompactWord", "INLINEVERBATIM~", c => mkInlineVerbatim(c)) grule("CompactWord", "INLINERUN~", c => mkInlineRun(c)) grule("CompactWord", "HASHTAG~", c => mkHashtag(c)) grule("CompactWord", "LABEL~", c => mkLexedEntity(c, Label, "::", "", unescape _)) grule("CompactWord", "LABELOPEN~", c => mkLexedEntity(c, LabelOpen, "::(", "", unescape _)) grule("CompactWord", "LABELCLOSE~", c => mkLexedEntity(c, LabelClose, "", ")::", unescape _)) grule("CompactWord", "Link~ { Link.val }", Not(TestBit(ParameterSelect(5), B_LINK)), _.Link[V]) grule("CompactWord", "OPENLINK~", TestBit(ParameterSelect(5), B_LINK), c => V(Text).addError(eprop(c, "cannot have link within link"))) grule("CompactWord", "CLOSELINK~", Not(TestBit(ParameterSelect(5), B_LINK)), c => V(Text).addError(eprop(c, "no link to close"))) grule("CompactWord", "Emph~ { Emph.val }", Not(TestBit(ParameterSelect(5), B_EMPH)), _.Emph[V]) grule("CompactWord", "DoubleEmph~ { DoubleEmph.val }", Not(TestBit(ParameterSelect(5), B_DOUBLEEMPH)), _.DoubleEmph[V]) grule("CompactWord", "FALLBACKWORD~", c => fallback(c, "CompactWord")) // BlockWord: (minColumn, minRow, maxStartColumn, maxStartRow, type) => ? grule("BlockWord", "HEADER~ Blocks(HEADER.leftMost + 1, nil)", c => V(Header).add(KEYWORD_PROP, sprop(c, "HEADER")).add(c.Blocks[V])) grule("BlockWord", "QUOTE~ Blocks(QUOTE.leftMost + 1, QUOTE.lastRow + 1)", c => V(Quote).add(KEYWORD_PROP, sprop(c, "QUOTE")).add(c.Blocks[V])) grule("BlockWord", "QUOTE~ FALLBACKLINE(nil, nil, nil, QUOTE.lastRow) Blocks(QUOTE.leftMost + 1, QUOTE.lastRow + 1)", c => V(Quote).add(KEYWORD_PROP, sprop(c, "QUOTE")).addError( eprop(c, "FALLBACKLINE", "superfluous parameter")).add(c.Blocks[V])) grule("BlockWord", "Table~", _.Table[V]) grule("BlockWord", "References~", _.References[V]) def mkBlockWord(terminal : String, t : T) { grule("BlockWord", s"$terminal~ Anything($terminal.leftMost + 1, $terminal.lastRow + 1)", c => V(t, c.Anything[V]).add(KEYWORD_PROP, sprop(c, terminal))) grule("BlockWord", s"$terminal~ FALLBACKLINE(nil, nil, nil, $terminal.lastRow) Anything($terminal.leftMost + 1, $terminal.lastRow + 1)", c => V(t, c.Anything[V]).addError(eprop(c, "FALLBACKLINE", "superfluous parameter")).add(KEYWORD_PROP, sprop(c, terminal))) } mkBlockWord("MATH", Math) mkBlockWord("VERBATIM", Verbatim) mkBlockWord("PROOFSCRIPT", ProofScript) mkBlockWord("RUN", Run) // Reference grule("Reference", "REFERENCE~", c => mkReference(c)) // Link grule("Link", s"OPENLINK~ Words(~.1, nil, nil, OPENLINK.lastRow + 1, ~.5 ! $B_LINK) CLOSELINK(~.1, nil, Words.val | nil, Words.lastRow + 1)", c => V(Link, c.Words[V])) grule("Link", "OPENLINK~ CLOSELINK(~.1, nil, nil, OPENLINK.lastRow + 1)", c => V(Link)) grule("Link", s"OPENLINK~ Words(~.1, nil, nil, OPENLINK.lastRow + 1, ~.5 ! $B_LINK) { -1 }", c => V(Link, c.Words[V]).addError(eprop(c, "unclosed link, closing ] missing"))) grule("Link", "OPENLINK~ { -1 }", c => V(Link).addError(eprop(c, "unclosed link, closing [ missing"))) // Emph grule("Emph", s"ASTERISK_1~ Words(~.1, nil, nil, ASTERISK_1.lastRow + 1, ~.5 ! $B_EMPH) ASTERISK_2(~.1, nil, Words.val | nil, Words.lastRow + 1)", c => V(Emph, c.Words[V])) grule("Emph", s"ASTERISK_1~ ASTERISK_2(~.1, nil, nil, ASTERISK_1.lastRow + 1)", c => V(Emph)) grule("Emph", s"ASTERISK_1~ Words(~.1, nil, nil, ASTERISK_1.lastRow + 1, ~.5 ! $B_EMPH) { -1 }", c => V(Emph, c.Words[V]).addError(eprop(c, "closing * missing"))) grule("Emph", s"ASTERISK_1~ { -1 }", c => V(Emph).addError(eprop(c, "closing * missing"))) // DoubleEmph grule("DoubleEmph", s"DOUBLEASTERISK_1~ Words(~.1, nil, nil, DOUBLEASTERISK_1.lastRow + 1, ~.5 ! $B_DOUBLEEMPH) DOUBLEASTERISK_2(~.1, nil, Words.val | nil, Words.lastRow + 1)", c => V(DoubleEmph, c.Words[V])) grule("DoubleEmph", s"DOUBLEASTERISK_1~ DOUBLEASTERISK_2(~.1, nil, nil, DOUBLEASTERISK_1.lastRow + 1)", c => V(DoubleEmph)) grule("DoubleEmph", s"DOUBLEASTERISK_1~ Words(~.1, nil, nil, DOUBLEASTERISK_1.lastRow + 1, ~.5 ! $B_DOUBLEEMPH) { -1 }", c => V(DoubleEmph, c.Words[V]).addError(eprop(c, "closing ** missing"))) grule("DoubleEmph", s"DOUBLEASTERISK_1~ { -1 }", c => V(DoubleEmph).addError(eprop(c, "closing ** missing"))) // Anything grule("Anything", "ANYTHING~", c => V(Anything, sprop(c))) grule("Anything", "", c => V(Anything, sprop(c))) // ItemList: (minColumn, minRow, maxStartColumn) => maxStartColumn grule("ItemList", "ListItem(~.1, ~.2, ~.3, nil) { ListItem.val }", c => V(ItemList, c.ListItem[V])) grule("ItemList", "ItemList_1~ ListItem(~.1, nil, ItemList_1.val, ItemList_1.lastRow + 1) { ListItem.val }", c => c.ItemList_1[V].add(c.ListItem[V])) grule("ListItem", "LISTBULLET~ Blocks(LISTBULLET.leftMost + 1, nil) { LISTBULLET.leftMost }", c => V(ListItem, c.Blocks[V]).add(KEYWORD_PROP, sprop(c, "LISTBULLET"))) // Table grule("Table", "TABLE~ TableParam(TABLE.lastRow) TableBody(TABLE.leftMost + 1)", c => c.TableBody[V].addInFront(c.TableParam[V]).add(KEYWORD_PROP, sprop(c, "TABLE"))) // TableParam: maxRowStart => ? grule("TableParam", "TableParamCore~ FALLBACKLINE(nil, nil, nil, ~)", c => c.TableParamCore[V].addError(eprop(c, "FALLBACKLINE", "invalid table parameters"))) grule("TableParam", "TableParamCore~", c => c.TableParamCore[V]) grule("TableParamCore", "TABLEPARAM(nil, nil, nil, ~)", c => V(TableParam, sprop(c))) grule("TableParamCore", "", c => V(TableParam, sprop(c))) // TableBody: minColumn => maxStartColumn grule("TableBody", "", c => V(Table)) grule("TableBody", "TableBody_1~ TableElem(~, TableBody_1.val) { TableElem.leftMost }", c => c.TableBody_1[V].add(c.TableElem[V])) // TableElem: (minColumn, maxStartColumn) => ? grule("TableElem", "TableLine~", _.TableLine[V]) grule("TableElem", "TableRow~", _.TableRow[V]) grule("TableElem", "FALLBACKPROTRUDE~", c => fallback(c, "TableElem")) // TableLine: (minColumn, maxStartColumn) => ? grule("TableLine", "LINE(~.1, nil, ~.2)", c => V(ProofDoc.Line).add(KEYWORD_PROP, sprop(c, "LINE"))) grule("TableLine", "LINE(~.1, nil, ~.2) FALLBACK(LINE.leftMost + 1)", c => V(ProofDoc.Line).add(KEYWORD_PROP, sprop(c, "LINE")).addError( eprop(c, "FALLBACK", "unexpected parameters to ~line"))) // TableRow: (minColumn, maxStartColumn) => ? grule("TableRow", "ROW(~.1, nil, ~.2) TableCells(ROW.leftMost + 1, nil)", c => c.TableCells[V].add(KEYWORD_PROP, sprop(c, "ROW"))) grule("TableRow", "ROW(~.1, nil, ~.2) FALLBACKLINE(nil, nil, nil, ROW.lastRow) TableCells(ROW.leftMost + 1, nil)", c => c.TableCells[V].add(KEYWORD_PROP, sprop(c, "ROW")).addError( eprop(c, "FALLBACKLINE", "unexpected parameters to ~row"))) // TableCells, TableCell: (minColumn, maxStartColumn) => maxStartColumn grule("TableCells", "", c => V(Row)) grule("TableCells", "TableCells_1~ TableCell(~.1, TableCells_1.val | ~.2) { TableCell.leftMost }", c => c.TableCells_1[V].add(c.TableCell[V])) grule("TableCell", "ITEMHYPHEN(~.1, nil, ~.2) Blocks(ITEMHYPHEN.leftMost+1, nil) { ITEMHYPHEN.leftMost }", c => V(Cell, c.Blocks[V]).add(KEYWORD_PROP, sprop(c, "ITEMHYPHEN"))) grule("TableCell", "FALLBACKPROTRUDE~", c => fallback(c, "TableCell")) // References grule("References", "REFERENCES~ RefItems(REFERENCES.leftMost + 1)", c => c.RefItems[V].add(KEYWORD_PROP, sprop(c, "REFERENCES"))) grule("References", "REFERENCES~ FALLBACKLINE(nil, nil, nil, REFERENCES.lastRow) RefItems(REFERENCES.leftMost + 1)", c => c.RefItems[V].add(KEYWORD_PROP, sprop(c, "REFERENCES")).addError(eprop(c, "FALLBACKLINE", "superfluous parameter"))) // RefItems: minColumn => maxStartColumn grule("RefItems", "", c => V(References)) grule("RefItems", "RefItems_1~ RefItem(~, RefItems_1.val) { RefItem.leftMost }", c => c.RefItems_1[V].add(c.RefItem[V])) // RefItem: (minColumn, maxStartColumn) => ? grule("RefItem", "RefItemLabel~ RefItemReference(RefItemLabel.lastRow) RefItemFields(RefItemLabel.leftMost + 1)", c => c.RefItemLabel[V].add(c.RefItemReference[V]).add(c.RefItemFields[V])) grule("RefItem", "RefItemLabel~ RefItemReference(RefItemLabel.lastRow) FALLBACKLINE(nil, nil, nil, RefItemLabel.lastRow) RefItemFields(RefItemLabel.leftMost + 1)", c => c.RefItemLabel[V].add(c.RefItemReference[V]).add(c.RefItemFields[V]).addError(eprop(c, "FALLBACKLINE", "invalid reference item parameter"))) grule("RefItem", "FALLBACKPROTRUDE~", c => fallback(c, "RefItem")) grule("RefItemLabel", "REFITEMLABEL(~.1, nil, ~.2)", c => mkLexedEntity(c, RefItem, "", "", unescape _)) grule("RefItemReference", "Reference(nil, nil, nil, ~)", _.Reference[V]) grule("RefItemReference", "", c => null) // RefItemFields: minColumn => maxStartColumn grule("RefItemFields", "", c => V(RefItemFields)) grule("RefItemFields", "RefItemFields_1~ RefItemField(~, RefItemFields_1.val) { RefItemField.leftMost }", c => c.RefItemFields_1[V].add(c.RefItemField[V])) // RefItemField: (minColumn, maxStartColumn) => ? def gRefItemField(FIELD : String) { grule("RefItemField", FIELD + "(~.1, nil, ~.2) Blocks(" + FIELD + ".leftMost + 1)", c => V(RefItemField, c.Blocks[V]).add(KEYWORD_PROP, sprop(c, FIELD))) } grule("RefItemField", "FALLBACKPROTRUDE~", c => fallback(c, "RefItemField")) gRefItemField("DISPLAY") gRefItemField("AUTHOR") gRefItemField("TITLE") gRefItemField("IN") gRefItemField("YEAR") gRefItemField("MONTH") gRefItemField("DAY") /** Experiment to see how to do error recovery for embedded rich layout-insensitive syntax */ setScope("Experiment") klex("EXPERIMENT", cis("~x")) lex("XPLUS", char('+')) lex("XTIMES", char('*')) lex("XOPEN", char('(')) lex("XCLOSE", char(')')) lex("XVAR", char('n')) scope = FALLBACK_SCOPE lex("ERROR_F", Lexer.untilWhitespace(REPEAT(CHAR(-range('+', '*', '(', ')'))))) lex("ERROR", EMPTY) case object Experiment extends T case object Sum extends T case object Product extends T case object App extends T case object Bracket extends T case object Var extends T grule("BlockWord", "Experiment~", _.Experiment[V]) grule("Experiment", "EXPERIMENT~ E(EXPERIMENT.leftMost + 1)", c => V(Experiment, c.E[V])) grule("Experiment", "EXPERIMENT~ E(EXPERIMENT.leftMost + 1) FALLBACK(EXPERIMENT.leftMost + 1)", c => V(Experiment, c.E[V]).addError(eprop(c, "FALLBACK", "superfluous input"))) grule("E", "E_1~ XPLUS~ T~", c => V(Sum, c.E_1[V]).add(c.T[V])) grule("E", "T~", _.T[V]) grule("T", "T_1~ XTIMES~ A~", c => V(Product, c.T_1[V]).add(c.A[V])) grule("T", "A~", _.A[V]) grule("A", "A_1~ F~", c => V(App, c.A_1[V]).add(c.F[V])) grule("A", "F~", _.F[V]) grule("F", "XVAR~", c => V(Var)) grule("F", "XOPEN~ E~ XCLOSE~", c => V(Bracket, c.E[V])) grule("F", "XOPEN~ E~ ERROR", c => V(Bracket, c.E[V]).addError(eprop(c, "missing closing bracket"))) grule("F", "ERROR_F~", c => fallback(c, "F").add(DEFAULT_PROP,sprop(c))) }
proofpeer/proofpeer-proofscript
shared/src/main/scala/proofpeer/proofscript/proofdoc/ProofDoc.scala
Scala
mit
39,276
package progscala2.traits.ui2 import progscala2.traits.ui.Widget /** * Created by younggi on 11/11/16. */ class Button(val label: String) extends Widget with Clickable { // 구현 클래스에서 이 메서드 구현 override protected def updateUI(): Unit = {/* GUI 모양을 변경하는 Logic */ } }
younggi/books
programming_scala/progscala2/src/main/scala/progscala2/traits/ui2/Button.scala
Scala
mit
311
/* * Copyright (C) 2005, The OpenURP Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.openurp.edu.program.domain import org.beangle.data.dao.EntityDao import org.openurp.base.edu.model.Student import org.openurp.edu.program.model._ class DefaultCoursePlanProvider extends CoursePlanProvider { var programProvider: ProgramProvider = _ var entityDao: EntityDao = _ /** * 获得原始专业培养计划 */ override def getMajorPlan(student: Student): Option[MajorPlan] = { programProvider.getProgram(student) match { case Some(p) => getMajorPlan(p) case None => None } } /** * 获得专业培养计划 */ override def getExecutionPlan(student: Student): Option[ExecutionPlan] = { programProvider.getProgram(student) match { case Some(p) => getExecutionPlan(p, student) case None => None } } /** * 获得单个学生的个人计划 */ def getStdPlan(student: Student): Option[StdPlan] = { entityDao.findBy(classOf[StdPlan], "std", List(student)).headOption } /** * 获得学生的计划 * * @param std * @return */ def getCoursePlan(std: Student): Option[CoursePlan] = { getStdPlan(std) match { case Some(stdPlan) => Some(stdPlan) case None => programProvider.getProgram(std) match { case Some(p) => getExecutionPlan(p, std).orElse(getMajorPlan(p)) case None => None } } } private def getMajorPlan(p: Program): Option[MajorPlan] = { entityDao.findBy(classOf[MajorPlan], "program", List(p)).headOption } private def getExecutionPlan(p: Program, student: Student): Option[ExecutionPlan] = { val plans = entityDao.findBy(classOf[ExecutionPlan], "program", List(p)) val matched = plans.filter { p => p.department == student.state.get.department && (p.stdType.isEmpty || p.stdType.get == student.stdType) && (p.campus.isEmpty || p.campus.get == student.state.get.campus) } matched.headOption } }
openurp/api
edu/src/main/scala/org/openurp/edu/program/domain/DefaultCoursePlanProvider.scala
Scala
lgpl-3.0
2,666
package io.simao.riepete.server import java.net.InetSocketAddress import akka.actor.{Actor, ActorLogging, ActorRef, Props} import akka.io.Inet.SO.ReceiveBufferSize import akka.io.{IO, Udp} import io.simao.riepete.messages.StatsdMetric object RiepeteServer { def props()(implicit config: Config) = { Props(new RiepeteServer) } } class RiepeteServer(implicit config: Config) extends Actor with ActorLogging { import context.system val udpRcvBufferSize = 1073741824 override def preStart(): Unit = { val localAddress = new InetSocketAddress(config.bind_ip, config.bind_port) IO(Udp) ! Udp.Bind(self, localAddress, List(ReceiveBufferSize(udpRcvBufferSize))) } lazy val statsdHandler = system.actorOf(StatsdMetricHandler.props(), "StatsDHandler") def receive: Receive = { case Udp.Bound(local) => log.info(s"UDP server ready on ${local.getHostName}:${local.getPort}") context become ready(sender(), statsdHandler) case Udp.CommandFailed(cmd) => log.error("Failed to start server: " + cmd) context stop self } def ready(socket: ActorRef, statsdHandler: ActorRef): Receive = { case Udp.Received(data, remote) => val dataStr = data.decodeString("utf-8") log.debug(s"Received UDP MSG: $dataStr from ${remote.getAddress}") statsdHandler ! StatsdMetric(dataStr, remote.getHostName) case Udp.Unbound => log.error("UDP Unbound from server socket") context stop self } } object RiepeteServerApp extends App { import akka.actor.ActorSystem implicit val riepete_config = args.lift(0).map(Config(_)).getOrElse(Config.default) implicit val system = ActorSystem("riepeteActorSystem") system.actorOf(RiepeteServer.props(), "StatsDServer") }
simao/riepete
src/main/scala/io/simao/riepete/server/RiepeteServer.scala
Scala
mit
1,749
package controller import domain.user.UserAuthority /** * ログアウト用Controller. */ class LogoutController extends ApiController { override val loginCheck = false override val authentications: Option[Seq[UserAuthority]] = None /** * ログアウト. */ def execute: String = { session.invalidate() createJsonResult("success") } }
nemuzuka/vss-kanban
src/main/scala/controller/LogoutController.scala
Scala
mit
366
package org.orbeon.oxf.fr.process import org.orbeon.oxf.fr.process.ProcessParser._ import org.scalatest.funspec.AnyFunSpec class ProcessParserTest extends AnyFunSpec { describe("Basic process parsing") { val Expected = List( ( """validate then save""", GroupNode( ActionNode("validate", Map()), List( (ThenCombinator, ActionNode("save", Map())) ) ) ), ( """require-uploads |then validate-all |then save |then new-to-edit |then success-message("save-success") |recover error-message("database-error")""".stripMargin, GroupNode( ActionNode("require-uploads", Map()), List( (ThenCombinator, ActionNode("validate-all", Map())), (ThenCombinator, ActionNode("save", Map())), (ThenCombinator, ActionNode("new-to-edit", Map())), (ThenCombinator, ActionNode("success-message", Map(None -> "save-success"))), (RecoverCombinator, ActionNode("error-message", Map(None -> "database-error"))) ) ) ), ( """if ("xxf:get-request-method() = 'GET'") |then navigate(uri = "/fr/{fr:app-name()}/{fr:form-name()}/edit/{fr:document-id()}") |else edit""".stripMargin, GroupNode( ConditionNode( "xxf:get-request-method() = 'GET'", ActionNode("navigate", Map(Some("uri") -> "/fr/{fr:app-name()}/{fr:form-name()}/edit/{fr:document-id()}")), Some(ActionNode("edit",Map())) ), Nil ) ), ) for ((process, expected) <- Expected) it(s"must parse `$process`") { val r = ProcessParser.parse(process) assert(expected == r) } } }
orbeon/orbeon-forms
form-runner/js/src/test/scala/org/orbeon/oxf/fr/process/ProcessParserTest.scala
Scala
lgpl-2.1
1,849
import sounder.Sounder._ import sounder.Util._ import scala.math.sin import scala.math.sqrt import scala.math.min import scala.math.exp import scala.math.max import scala.math.floor import scala.math.ceil import scala.math.Pi import scala.math.log print("Plug in the active band-pass filter and press ENTER") System.in.read println("Playing for 2 seconds and recording input") val f1 = 500 val f2 = 1333 val Fs = 44100 val xtrue : Double => Double = t => sin(2*Pi*f1*t)/3 + sin(2*Pi*f2*t)/3 val (right, left) = playRecord(xtrue, 0, 2.0, Fs) //playSamples(left) //playSamples(right) //total number of samples recorded val L = min(left.length, right.length) //value at which we truncate sinc function (makes things faster) val sinc_truncate = 500 //reconstructed input signal x def x(t : Double) : Double = { val mini = max(0, floor(Fs*t - sinc_truncate).toInt) val maxi = min(L-1, ceil(Fs*t + sinc_truncate).toInt) (mini to maxi).foldLeft(0.0){ (sum, i) => sum + left(i)*sinc(Fs*t - i) } } //reconstructed output signal y def y(t : Double) : Double = { val mini = max(0, floor(Fs*t - sinc_truncate).toInt) val maxi = min(L-1, ceil(Fs*t + sinc_truncate).toInt) (mini to maxi).foldLeft(0.0){ (sum, i) => sum + right(i)*sinc(Fs*t - i) } } /** * Approximates the integral of f from a to b using the trapezoidal * rule with N intervals */ def trapezoidal(func : Double => Double, a : Double, b : Double, N : Int) : Double = { val del = (b - a)/N // val inner = (1 to N-1).foldLeft(0.0)( (s,n) => s+2*func(a + n*del) ) val inner = (1 to N-1).par.aggregate(0.0)( (s,n) => s+2*func(a + n*del), _ + _ ); //parallel fold return del/2 * ( inner + func(a) + func(b) ) } //circuit parameters val R1 = 3300 val C1 = 100e-9 val R2 = 15000 val C2 = 10e-9 val a = -R2*C1 val b = R2*C2 + R1*C1 val c = C1*R1*C2*R2 val alpha = (-b+sqrt(b*b-4*c))/c/2 val beta = (-b-sqrt(b*b-4*c))/c/2 val A = a*alpha/(alpha-beta)/c //equal to a\\alpha/(alpha - beta) val B = a*beta/(alpha-beta)/c //equal to a\\beta(alpha - beta) println(alpha,beta,A,B) //the function f from the tests. Requires numerical integration def f(t : Double) : Double = { val K = -log(1e-4)/min(alpha.abs,beta.abs) //log(1e-4) should get around 1e-4 error in the trapezoidal sum val N = ceil(K*10*Fs).toInt //number of intervals in the trapezoidal sum, 10 points per sample. val g : Double=>Double = tau => (A*exp(alpha*tau) - B*exp(beta*tau)) * sinc(t - Fs*tau) //g function from Test (Active RC again) return trapezoidal(g,0,K,N) } def Hx(t : Double) : Double = { val mini = max(0, floor(Fs*t - sinc_truncate).toInt) val maxi = min(L-1, ceil(Fs*t + 2*sinc_truncate).toInt) (mini to maxi).foldLeft(0.0){ (sum, i) => sum + left(i)*f(Fs*t - i) } } println("Writing data to file impulsedata.csv") val tmin = 0.9999 val tmax = 1.0041 val filetfun = new java.io.FileWriter("impulsedata.csv") (tmin to tmax by 0.00002) foreach { t => //writing time in milliseconds filetfun.write((1000*t).toString.replace('E', 'e') + "\\t" + x(t).toString.replace('E', 'e') + "\\t" + y(t).toString.replace('E', 'e') + "\\t" + Hx(t).toString.replace('E', 'e') + "\\n") } filetfun.close println("Scala finished")
robbymckilliam/testablelinearsystems
tests/activebandpass/impulseresponsetest.scala
Scala
agpl-3.0
3,187
package monocle.std import monocle.MonocleSuite import monocle.law.discipline.PrismTests class TheseSpec extends MonocleSuite { import cats.laws.discipline.arbitrary._ checkAll("These - Disjunction", PrismTests(theseToDisjunction[Int, String])) }
aoiroaoino/Monocle
test/shared/src/test/scala/monocle/std/TheseSpec.scala
Scala
mit
254
/* *\\ ** _____ __ _____ __ ____ ** ** / ___/ / / /____/ / / / \\ FieldKit ** ** / ___/ /_/ /____/ / /__ / / / (c) 2010, FIELD.io ** ** /_/ /____/ /____/ /_____/ http://www.field.io ** \\* */ /* created February 23, 2010 */ package field.kit.physics import field.kit._ import field.kit.math.geometry._ /** * 3D Verlet Particle class with support for multiple behaviours, finite state handling, colour integration * This is */ class Particle extends Vec3 with Behavioural { /** * Creates a new particle at the given position */ def this(v:Vec3) { this() init(v) } /** * Advances the particle one timestep */ def update(dt:Float) { updateState(dt) if(_isLocked) return updatePosition updateBounds applyBehaviours applyConstraints } // -- Weights -------------------------------------------------------------- def weight = _weight def weight_=(value:Float) { _weight = value _invWeight = 1f / value } def invWeight = _invWeight protected var _weight = 1f protected var _invWeight = 1f/ _weight // -- Verlet Integration --------------------------------------------------- def lock = _isLocked = true def unlock { clearVelocity _isLocked = false } def isLocked = _isLocked protected var _isLocked = false /** Steering force */ val force = new Vec3 /** Air resistance or fluid resistance, force opposed to the relative motion of this particle */ var drag = 0.03f /** Time between updates */ def timestep = _timestep def timestep_=(value:Float) { _timestep = value _timestepSq = value * value } protected var _timestep = 1f protected var _timestepSq = _timestep * _timestep val prev = new Vec3 private val tmp = new Vec3 /** * Updates the position based on the previous position and the steering force */ protected def updatePosition { tmp := this //this += (this - prev) + force * (1f - drag) * _timestepSq // force *= (1f - drag) *= _timestepSq // // this.x += (this.x - prev.x) + force.x // this.y += (this.y - prev.y) + force.y // this.z += (this.z - prev.z) + force.z force *= _timestepSq this.x += (this.x - prev.x) + force.x this.y += (this.y - prev.y) + force.y this.z += (this.z - prev.z) + force.z prev := tmp scaleVelocity(1f - drag) force.zero } /** initialises the particle at the given position */ def init(v:Vec3) { this := v clearVelocity } // /** @return the velocity of this particle */ // def velocity = distance(prev) // // /** sets the velocity of this particle */ // def velocity_=(vel:Float) = prev := (prev - this).normaliseTo(vel) //// def velocity_=(vel:Float) = prev := (this - prev).normaliseTo(vel) /** sets the velocity of this particle to zero */ def clearVelocity = prev := this def scaleVelocity(s:Float) = prev.interpolate(this, 1f - s) // -- Neighbours ----------------------------------------------------------- import scala.collection.mutable.ArrayBuffer /** List to keep track of this particles neighbours */ var neighbours:ArrayBuffer[Particle] = _ /** * Adds another particle as this particles neighbour */ def +=(p:Particle) = { if(neighbours == null) neighbours = new ArrayBuffer[Particle] neighbours += p } /** * Clears neighbour list (if set) */ def clearNeighbours = { if(neighbours == null) neighbours = new ArrayBuffer[Particle] neighbours.clear } // -- State Machine -------------------------------------------------------- var age = 0f var state = 0 /** * For now simply keeps track of the particle's age; override this method to * perform custom state changes e.g. fading in, alive, fading out etc. */ protected def updateState(dt:Float) { this.age += dt } // -- Boundaries ----------------------------------------------------------- var bounds:BoundingVolume = _ def size = if(bounds == null) 0f else bounds.size def size_=(value:Float) { if(bounds == null) bounds = createBounds bounds.size = value } protected def createBounds = new Sphere(this, 0f) /** * Updates the bounding box (if set) */ protected def updateBounds { if(bounds == null) return bounds := this } // -- Colour --------------------------------------------------------------- var colour:Colour = _ // protected def updateColour { // this.x = 2*x - prev.x + steer.x * _timestepSq // this.y = 2*y - prev.y + steer.y * _timestepSq // this.z = 2*z - prev.z + steer.z * _timestepSq // prev := this // } // -- Behaviours ----------------------------------------------------------- /** * Applies all assigned behaviours to this particle */ def applyBehaviours { if(behaviours == null) return var i = 0 while(i < behaviours.length) { behaviours(i).apply(this) i += 1 } } // -- Constraints ---------------------------------------------------------- /** * Applies all assigned constraints to this particle */ def applyConstraints { if(constraints == null) return var i = 0 while(i < constraints.length) { constraints(i).apply(this) i += 1 } } }
field/FieldKit.scala
src.physics/field/kit/physics/Particle.scala
Scala
lgpl-3.0
5,386
/*********************************************************************** * Copyright (c) 2013-2018 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.utils.bin import java.nio.ByteBuffer trait BinaryOutputCallback { /** * Callback for reduced (16-byte) values */ def apply(trackId: Int, lat: Float, lon: Float, dtg: Long): Unit /** * Callback for expanded (24-byte) values */ def apply(trackId: Int, lat: Float, lon: Float, dtg: Long, label: Long): Unit /** * Fills in basic values */ protected def put(buffer: ByteBuffer, trackId: Int, lat: Float, lon: Float, dtg: Long): Unit = { buffer.putInt(trackId) buffer.putInt((dtg / 1000).toInt) buffer.putFloat(lat) buffer.putFloat(lon) } /** * Fills in extended values */ protected def put(buffer: ByteBuffer, trackId: Int, lat: Float, lon: Float, dtg: Long, label: Long): Unit = { put(buffer, trackId, lat, lon, dtg) buffer.putLong(label) } }
ddseapy/geomesa
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/bin/BinaryOutputCallback.scala
Scala
apache-2.0
1,320
package model import akka.http.javadsl.model.StatusCode import akka.http.scaladsl.model.StatusCodes import domain.errors.{ErrorCode, ErrorCodes} import model.Status.Status import protocols.StandardResponseProtocols._ import spray.json.{JsObject, JsValue, pimpAny} object Status extends Enumeration { type Status = Value val Success = Value("success") val Fail = Value("fail") val Error = Value("error") } //trait Status { // def value: String //} // //object Success extends Status { // override def value = "success" //} // //object Fail extends Status { // override def value = "fail" //} // //object Error extends Status { // override def value = "error" //} sealed trait CodeContainer { val errorCodes: List[ErrorCode] } case class WarningCodeContainer(errorCodes: List[ErrorCode]) extends CodeContainer case class ErrorCodeContainer(errorCodes: List[ErrorCode]) extends CodeContainer object Response { import model.Status._ def success(): StandardResponse = { StandardResponse(Success, None, None, None) } def success(data: JsValue): StandardResponse = { StandardResponse(Success, None, None, Some(data)) } def success(errorCodes: List[ErrorCode]): StandardResponse = { if (errorCodes.isEmpty) Response.success() else StandardResponse(Success, None, None, Some(WarningCodeContainer(errorCodes).toJson)) } def fail(errorCodes: List[ErrorCode]): (StatusCode, StandardResponse) = { StatusCodes.BadRequest -> StandardResponse(Fail, None, None, Some(ErrorCodeContainer(errorCodes).toJson)) } def failNotFound: (StatusCode, StandardResponse) = { StatusCodes.NotFound -> StandardResponse(Fail, Some("not found"), None, None) } def error(errorCode: ErrorCode): (StatusCode, StandardResponse) = { StatusCodes.BadRequest -> StandardResponse(Error, Some(errorCode.description), Some(errorCode.code), None) } def error(errorCode: ErrorCode, data: JsObject): (StatusCode, StandardResponse) = { StatusCodes.BadRequest -> StandardResponse(Error, Some(errorCode.description), Some(errorCode.code), Some(data)) } def unauthorised() = { val errorCode = ErrorCodes.AUTH001 StatusCodes.Unauthorized -> StandardResponse(Error, Some(errorCode.description), Some(errorCode.code), None) } def forbidden() = { val errorCode = ErrorCodes.AUTH003 StatusCodes.Forbidden -> StandardResponse(Error, Some(errorCode.description), Some(errorCode.code), None) } } case class StandardResponse(status: Status, message: Option[String], code: Option[String], data: Option[JsValue])
k0ner/micro-proxy
src/main/scala/model/StandardResponse.scala
Scala
apache-2.0
2,569
/* * This file is part of jacoco4sbt. * * Copyright (c) 2011-2013 Joachim Hofer & contributors * All rights reserved. * * This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package de.johoop.jacoco4sbt import org.jacoco.core.data._ import org.jacoco.core.analysis._ import org.jacoco.report._ import html.HTMLFormatter import java.io.File import java.io.FileInputStream class Report(executionDataFile: File, classDirectories: Seq[File], sourceDirectories: Seq[File], sourceEncoding: String, tabWidth: Int, reportFormats: Seq[FormattedReport], reportTitle: String, reportDirectory: File) { def generate : Unit = { val (executionDataStore, sessionInfoStore) = loadExecutionData val bundleCoverage = analyzeStructure(executionDataStore, sessionInfoStore) reportFormats foreach (createReport(_, bundleCoverage, executionDataStore, sessionInfoStore)) } private def loadExecutionData = { val executionDataStore = new ExecutionDataStore val sessionInfoStore = new SessionInfoStore val fis = new FileInputStream(executionDataFile) try { val executionDataReader = new ExecutionDataReader(fis) executionDataReader setExecutionDataVisitor executionDataStore executionDataReader setSessionInfoVisitor sessionInfoStore while (executionDataReader.read()) { /* side effects galore :( */ } } finally { fis.close() } (executionDataStore, sessionInfoStore) } private def analyzeStructure(executionDataStore: ExecutionDataStore, sessionInfoStore: SessionInfoStore) = { val coverageBuilder = new CoverageBuilder val analyzer = new Analyzer(executionDataStore, coverageBuilder) classDirectories foreach { analyzer analyzeAll _ } coverageBuilder getBundle reportTitle } private def createReport(reportFormat: FormattedReport, bundleCoverage: IBundleCoverage, executionDataStore: ExecutionDataStore, sessionInfoStore: SessionInfoStore) = { val visitor = reportFormat.visitor(reportDirectory) visitor.visitInfo(sessionInfoStore.getInfos, executionDataStore.getContents); visitor.visitBundle(bundleCoverage, new DirectoriesSourceFileLocator(sourceDirectories, sourceEncoding, tabWidth)) visitor.visitEnd() } }
paddymahoney/jacoco4sbt
src/main/scala/de/johoop/jacoco4sbt/Report.scala
Scala
epl-1.0
2,501
package controllers import com.github.tototoshi.play2.json4s.native.Json4s import javax.inject.{Inject, Singleton} import models.Camera import org.json4s.{DefaultFormats, Extraction} import play.api.mvc.InjectedController @Singleton class CameraController @Inject()(json4s: Json4s) extends InjectedController { import json4s.implicits._ implicit val formats = DefaultFormats def get(id: Long) = Action { val camera = Camera.findById(id) Ok(Extraction.decompose(camera)) } }
ponkotuy/aggregate-exif
app/controllers/CameraController.scala
Scala
apache-2.0
493
package scorex.consensus import scorex.account.Account import scorex.block.Block trait OneGeneratorConsensusModule { /** * In most of algorithms there's only one block generator */ def feesDistribution(block: Block): Map[Account, Long] = { val forger = block.consensusModule.generators(block).ensuring(_.size == 1).head val fee = block.transactions.map(_.fee).sum Map(forger -> fee) } }
ScorexProject/Scorex-Lagonaki
scorex-consensus/src/main/scala/scorex/consensus/OneGeneratorConsensusModule.scala
Scala
cc0-1.0
415
package com.sksamuel.elastic4s.admin import com.sksamuel.elastic4s.Executable import org.elasticsearch.action.fieldstats.{FieldStats, FieldStatsRequestBuilder, FieldStatsResponse} import org.elasticsearch.client.Client import scala.concurrent.Future trait FieldStatsDsl { def fieldStats(fields: String*): FieldStatsDefinition = FieldStatsDefinition(fields = fields) def fieldStats(fields: Iterable[String]): FieldStatsDefinition = FieldStatsDefinition(fields = fields.toSeq) implicit object FieldStatsDefinitionExecutable extends Executable[FieldStatsDefinition, FieldStatsResponse, FieldStatsResult] { override def apply(c: Client, t: FieldStatsDefinition): Future[FieldStatsResult] = { injectFutureAndMap(t.build(c.prepareFieldStats).execute) { resp => FieldStatsResult(resp) } } } } case class FieldStatsDefinition(indexes: Seq[String] = Nil, fields: Seq[String] = Nil, level: Option[String] = None) { def build(builder: FieldStatsRequestBuilder): FieldStatsRequestBuilder = { builder.setFields(fields: _*) level.foreach(builder.setLevel) if (indexes.nonEmpty) builder.setIndices(indexes: _*) builder } def indexes(indexes: String*): FieldStatsDefinition = { copy(indexes = indexes) } def level(level: String): FieldStatsDefinition = copy(level = Option(level)) } case class FieldStatsResult(resp: FieldStatsResponse) { import scala.collection.JavaConverters._ def fieldStats: Map[String, FieldStats[_]] = resp.getAllFieldStats.asScala.toMap def indicesMergedFieldStats: Map[String, Map[String, FieldStats[_]]] = { resp.getIndicesMergedFieldStats.asScala.toMap.map { case (key, value) => key -> value.asScala.toMap } } }
FabienPennequin/elastic4s
elastic4s-tcp/src/main/scala/com/sksamuel/elastic4s/admin/FieldStatsDsl.scala
Scala
apache-2.0
1,711
package com.typesafe.akka.http.benchmark import org.fusesource.scalate.Binding trait Templating { def layout(uri: String, attributes: Map[String, Any] = Map(), extraBindings: Traversable[Binding] = Nil): String }
actframework/FrameworkBenchmarks
frameworks/Scala/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/Templating.scala
Scala
bsd-3-clause
217
/* * Copyright 2015 Stephen Couchman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.greyscribes.async.db.postgresql import java.net.URI import javax.inject.{Inject, Singleton} import com.github.mauricio.async.db.postgresql.PostgreSQLConnection import com.github.mauricio.async.db.postgresql.pool.PostgreSQLConnectionFactory import com.github.mauricio.async.db.{Configuration ⇒ DBConfiguration} import com.greyscribes.async.db.{ConfigurationBuilder, LifecycleBoundConnectionPoolGroup} import play.api.inject.ApplicationLifecycle import play.api.{Configuration, Logger} import play.core.parsers.FormUrlEncodedParser /** * The primary purpose of this class is to create a strong type for the dependency injector. * @param config the Play Configuration from which to gather our configuration(s). * @param lifecycle the Play Lifecycle object, so that this class can self-register for shutdown. */ @Singleton() class PostgreSQLConnectionPool @Inject()(config: Configuration, lifecycle: ApplicationLifecycle) extends LifecycleBoundConnectionPoolGroup[PostgreSQLConnection](config, PostgreSQLConnectionPool, lifecycle) /** * This companion object acts as the PostgresSQLConnection ConfigurationBuilder, providing Configuration parsing * services to PostgreSQLConnectionPool. */ object PostgreSQLConnectionPool extends ConfigurationBuilder[PostgreSQLConnection] { /** * @return the logger to use for this class */ override protected val logger: Logger = Logger(classOf[PostgreSQLConnectionPool]) /** * @return the name of the currently processing database driver, as it should be entered on the db.*.driver line */ override protected val getName: String = "postgresql-async" override protected def buildFactory(config: DBConfiguration): PostgreSQLConnectionFactory = new PostgreSQLConnectionFactory(config) /** * Used to parse the URIs that this particular DBConfigurationBuilder is interested in. * @param uri the source URI */ override protected[postgresql] def parseURI(uri: URI): Option[DBConfiguration] = { val simplePGDB = "^postgresql:(\\\\w+)$".r (uri.getScheme match { case "postgresql" | "postgres" ⇒ val userInfo = parseUserInfo(Option(uri.getUserInfo)) var port = uri.getPort if(port < 0) { port = 5432 } Some(DBConfiguration( username = userInfo._1.getOrElse("postgres"), password = userInfo._2, host = Option(uri.getHost).getOrElse("localhost"), port = port, database = Option(uri.getPath).map(_.stripPrefix("/")).filterNot(_.isEmpty) )) case "jdbc" ⇒ uri.getSchemeSpecificPart match { case simplePGDB(db) ⇒ // Localhost, no password, user = ??? Some(DBConfiguration( username = "postgres", database = Some(db) )) case x ⇒ // the schemeSpecificPart is just a normal postgresql:// connection string now parseURI(new URI(x)) } }).map { startingConfiguration ⇒ Option(uri.getQuery).map { qs ⇒ val parameters = FormUrlEncodedParser.parseNotPreservingOrder(qs) var finalConfiguration = startingConfiguration // Correct for query parameter settings, if they exist for(nameSeq ← parameters.get("user")) finalConfiguration = finalConfiguration.copy(username = nameSeq.head) for(pwSeq ← parameters.get("password")) finalConfiguration = finalConfiguration.copy(password = pwSeq.headOption) finalConfiguration }.getOrElse(startingConfiguration) } } }
SattaiLanfear/db-async-play
pg/src/main/scala/com/greyscribes/async/db/postgresql/PostgreSQLConnectionPool.scala
Scala
apache-2.0
4,046
/* * Scala.js (https://www.scala-js.org/) * * Copyright EPFL. * * Licensed under Apache License 2.0 * (https://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package org.scalajs.testsuite.javalib.io import java.io._ import org.junit.Test import org.junit.Assert._ class ByteArrayOutputStreamTest { @Test def simpleWriteInt(): Unit = { val out = new ByteArrayOutputStream() for (i <- 0 to 9) out.write(i) assertArrayEquals(Array[Byte](0, 1, 2, 3, 4, 5, 6, 7, 8, 9), out.toByteArray) } @Test def simpleWriteByteArray(): Unit = { val out = new ByteArrayOutputStream() val arr = Array[Byte](0, 1, 2, 3, 4, 5) out.write(arr, 1, 4) out.write(arr) assertArrayEquals(Array[Byte](1, 2, 3, 4, 0, 1, 2, 3, 4, 5), out.toByteArray) } @Test def writeByteArrayWithBufferResize(): Unit = { val out = new ByteArrayOutputStream(16) val arr = Array[Byte](0, 1, 2, 3, 4, 5, 6, 7, 8, 9) out.write(arr) out.write(arr) assertArrayEquals(arr ++ arr, out.toByteArray) } @Test def toStringWithUTF8(): Unit = { val buf = Array[Byte](72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 46, -29, -127, -109, -29, -126, -109, -29, -127, -85, -29, -127, -95, -29, -127, -81, -26, -105, -91, -26, -100, -84, -24, -86, -98, -29, -126, -110, -24, -86, -83, -29, -126, -127, -29, -127, -66, -29, -127, -103, -29, -127, -117, -29, -128, -126) val out = new ByteArrayOutputStream() out.write(buf) assertEquals("Hello World.こんにちは日本語を読めますか。", out.toString) } @Test def reset(): Unit = { val out = new ByteArrayOutputStream() for (i <- 0 to 9) out.write(i) out.reset() for (i <- 0 to 9) out.write(i) assertArrayEquals(Array[Byte](0, 1, 2, 3, 4, 5, 6, 7, 8, 9), out.toByteArray) } @Test def bufField(): Unit = { class ByteArrayOutputStreamWithBufAccess extends ByteArrayOutputStream { def getBuf(): Array[Byte] = buf def setBuf(b: Array[Byte]): Unit = buf = b } val os = new ByteArrayOutputStreamWithBufAccess os.write(5.toInt) os.flush() assertEquals(5.toByte, os.getBuf()(0)) val newBuf = Array(10.toByte) os.setBuf(newBuf) assertSame(newBuf, os.getBuf()) val output = os.toByteArray() assertArrayEquals(newBuf, output) assertNotSame(newBuf, output) } @Test def countField(): Unit = { class ByteArrayOutputStreamWithCountAccess extends ByteArrayOutputStream { def getCount(): Int = count def setCount(c: Int): Unit = count = c } val os = new ByteArrayOutputStreamWithCountAccess os.write(Array[Byte](5, 7, 10, 15, 25, -4)) os.flush() assertEquals(6, os.getCount()) assertArrayEquals(Array[Byte](5, 7, 10, 15, 25, -4), os.toByteArray()) os.setCount(3) assertEquals(3, os.getCount()) assertEquals(3, os.size()) assertArrayEquals(Array[Byte](5, 7, 10), os.toByteArray()) } }
scala-js/scala-js
test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/io/ByteArrayOutputStreamTest.scala
Scala
apache-2.0
3,065
/* * Copyright 2011-2018 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.commons import scala.util.{ Try, Failure => TFailure, Success => TSuccess } import scala.util.control.NonFatal import io.gatling.commons.util.Throwables._ import com.typesafe.scalalogging.StrictLogging package object validation extends StrictLogging { val TrueSuccess = true.success val FalseSuccess = false.success val NoneSuccess = None.success val NullStringSuccess = "null".success def safely[T](errorMapper: String => String = identity)(f: => Validation[T]): Validation[T] = try { f } catch { case NonFatal(e) => val message = errorMapper(e.detailedMessage) logger.info(message, e) message.failure } implicit class SuccessWrapper[T](val value: T) extends AnyVal { def success: Validation[T] = Success(value) } implicit class FailureWrapper(val message: String) extends AnyVal { def failure = Failure(message) } implicit class OptionWrapper[T](val option: Option[T]) extends AnyVal { def toValidation(error: => String): Validation[T] = option match { case Some(value) => Success(value) case _ => Failure(error) } } implicit class TryWrapper[T](val t: Try[T]) extends AnyVal { def toValidation: Validation[T] = t match { case TSuccess(value) => Success(value) case TFailure(e) => Failure(e.rootMessage) } } }
wiacekm/gatling
gatling-commons/src/main/scala/io/gatling/commons/validation/package.scala
Scala
apache-2.0
1,989