code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package gapt.formats.tptp
import gapt.expr._
import gapt.expr.formula.And
import gapt.expr.formula.Bottom
import gapt.expr.formula.Formula
import gapt.expr.formula.Imp
import gapt.expr.formula.Neg
import gapt.expr.formula.fol.FOLAtom
import gapt.expr.formula.fol.FOLConst
import gapt.expr.formula.fol.FOLFormula
import gapt.expr.formula.fol.FOLVar
import gapt.expr.formula.hol.{ containsStrongQuantifier, universalClosure }
import gapt.expr.util.freeVariables
import gapt.formats.InputFile
import gapt.logic.Polarity
import gapt.logic.clauseSubsumption
import gapt.logic.hol.CNFn
import gapt.logic.hol.CNFp
import gapt.proofs.resolution.{ AvatarDefinition, AvatarGroundComp, AvatarNonGroundComp, AvatarSplit }
import gapt.proofs.sketch._
import gapt.proofs.{ FOLClause, HOLClause, HOLSequent, Sequent }
import scala.collection.mutable
/**
* Represents a malformed input file e.g. one that contains an unknown parent step
*/
class MalformedInputFileException( s: String ) extends IllegalArgumentException( s )
object TptpProofParser {
def parse( out: InputFile, labelledCNF: Map[String, Seq[FOLClause]] ): RefutationSketch =
parseSteps( TptpImporter.loadWithoutIncludes( out ), labelledCNF )
def removeStrongQuants( tptpFile: TptpFile ): TptpFile = {
val stepsWithStrongQuants = tptpFile.inputs.filter {
case AnnotatedFormula( _, _, _, _, TptpTerm( "introduced", TptpTerm( sat_splitting ), _ ) +: _
) if sat_splitting.startsWith( "sat_splitting" ) =>
false
case AnnotatedFormula( _, _, _, _, TptpTerm( "introduced", FOLVar( avatar ), _ ) +: _
) if avatar.startsWith( "AVATAR" ) =>
false
case AnnotatedFormula( _, _, _, _, TptpTerm( "introduced", FOLConst( avatar ), _ ) +: _
) if avatar.startsWith( "avatar" ) =>
false
case AnnotatedFormula( _, label, "conjecture", formula, _ ) =>
containsStrongQuantifier( formula, Polarity.InSuccedent )
case AnnotatedFormula( _, label, _, formula, _ ) =>
containsStrongQuantifier( formula, Polarity.InAntecedent )
case _ => false
}.collect { case f: AnnotatedFormula => f.name }.toSet
if ( stepsWithStrongQuants.isEmpty )
tptpFile
else
TptpFile( tptpFile.inputs.collect { case f: AnnotatedFormula if !stepsWithStrongQuants( f.name ) => f }.map {
case f @ AnnotatedFormula( _, _, _, _, just +: _ ) if getParents( just ).
toSet.intersect( stepsWithStrongQuants ).isEmpty => f
case f @ AnnotatedFormula( _, label, "conjecture", formula, _ ) =>
AnnotatedFormula( "fof", label, "conjecture", formula, Seq() )
case f => AnnotatedFormula( "fof", f.name, "axiom", f.formula, Seq() )
} )
}
def parse( out: InputFile, ignoreStrongQuants: Boolean = false ): ( Sequent[FOLFormula], RefutationSketch ) = {
var tptpFile = TptpImporter.loadWithoutIncludes( out )
if ( ignoreStrongQuants ) tptpFile = removeStrongQuants( tptpFile )
tptpFile = inventSources( tptpFile )
val ( endSequent, labelledCNF ) = extractEndSequentAndCNF( tptpFile )
endSequent -> parseSteps( tptpFile, labelledCNF )
}
def inventSources( stepList: TptpFile ): TptpFile = TptpFile( stepList.inputs map {
case af @ AnnotatedFormula( lang, label,
role @ ( "axiom" | "hypothesis" | "conjecture" | "negated_conjecture" ), formula, Seq() ) =>
af.copy( annotations = Seq( TptpTerm( "file", TptpTerm( "unknown" ), TptpTerm( s"source_$label" ) ) ) )
case af @ AnnotatedFormula( lang, label,
role @ ( "axiom" | "hypothesis" | "conjecture" | "negated_conjecture" ), formula,
Seq( TptpTerm( "file", _, TptpTerm( "unknown" ) ), _* ) ) =>
af.copy( annotations = Seq( TptpTerm( "file", TptpTerm( "unknown" ), TptpTerm( s"source_$label" ) ) ) )
case other => other
} )
def extractEndSequentAndCNF( stepList: TptpFile ): ( Sequent[FOLFormula], Map[String, Seq[FOLClause]] ) = {
var endSequent = Sequent[FOLFormula]()
val labelledCNF = mutable.Map[String, Seq[FOLClause]]().withDefaultValue( Seq() )
stepList.inputs foreach {
case AnnotatedFormula( "fof", _, "conjecture", formula: FOLFormula,
Seq( TptpTerm( "file", _, TptpTerm( label ) ) ) ) =>
endSequent :+= formula
labelledCNF( label ) ++= CNFn( formula ).toSeq
case AnnotatedFormula( lang, _, _, formula: FOLFormula, Seq( TptpTerm( "file", _, TptpTerm( label ) ) ) ) =>
endSequent +:= ( if ( lang == "cnf" ) universalClosure( formula ) else formula )
labelledCNF( label ) ++= CNFp( formula ).toSeq
case _ =>
}
endSequent -> labelledCNF.toMap
}
def getParents( justification: GeneralTerm ): Seq[String] = justification match {
case TptpTerm( "file", _, _ ) => Seq()
case TptpTerm( "inference", _, _, GeneralList( parents @ _* ) ) => parents flatMap getParents
case TptpTerm( "introduced", _, _ ) => Seq()
case TptpTerm( "theory", TptpTerm( "equality", _* ), _* ) => Seq()
case GeneralColon( TptpTerm( label ), _ ) => Seq( label )
case TptpTerm( dagSource ) => Seq( dagSource )
}
def findClauseRenaming( from: HOLSequent, to: HOLSequent ): Option[Map[Var, Var]] =
if ( from.sizes != to.sizes )
None
else for {
subst <- clauseSubsumption( from, to )
// FIXME: this would only be correct if we considered all subsumptions...
if subst.isInjectiveRenaming
} yield subst.map.map { case ( l, r ) => l -> r.asInstanceOf[Var] }
def parseSteps( stepList: TptpFile, labelledCNF: Map[String, Seq[FOLClause]] ): RefutationSketch = {
val steps = ( for ( input @ AnnotatedFormula( _, name, _, _, _ ) <- stepList.inputs )
yield name -> input ).toMap
val memo = mutable.Map[String, Seq[RefutationSketch]]()
val alreadyVisited = mutable.Set[String]()
val splDefs = mutable.Map[( FOLAtom, Boolean ), AvatarDefinition]()
val splAtoms = mutable.Set[FOLAtom]()
def filterVampireSplits( clause: FOLClause ): FOLClause = clause.filterNot( splAtoms )
def convertAvatarDefinition( defn: Formula, splAtom: FOLAtom ): Seq[RefutationSketch] = {
splAtoms += splAtom
val comps = defn match {
case splAtom @ FOLAtom( _, _ ) if freeVariables( splAtom ).isEmpty =>
Polarity.values.map {
AvatarGroundComp( splAtom, _ )
}
case Neg( splAtom @ FOLAtom( _, _ ) ) if freeVariables( splAtom ).isEmpty =>
Polarity.values.map {
AvatarGroundComp( splAtom, _ )
}
case _ =>
Seq( AvatarNonGroundComp( splAtom, AvatarNonGroundComp.DefinitionFormula.canonize( defn ) ) )
}
comps map { comp =>
splDefs( ( splAtom, comp.assertion.succedent.nonEmpty ) ) = comp
SketchComponentIntro( comp )
}
}
def haveAlreadyVisited( stepName: String ): Boolean = {
val res = alreadyVisited( stepName )
alreadyVisited += stepName
res
}
def convert( stepName: String ): Seq[RefutationSketch] = {
val step = steps.getOrElse( stepName, throw new MalformedInputFileException( s"unknown step $stepName" ) )
memo.getOrElseUpdate( stepName, step match {
case _ if haveAlreadyVisited( stepName ) =>
throw new IllegalArgumentException( s"Cyclic inference: ${steps( stepName )}" )
case AnnotatedFormula( "fof", _, "plain", And( Imp( defn, Neg( splAtom: FOLAtom ) ), _ ),
TptpTerm( "introduced", TptpTerm( "sat_splitting_component" ), _ ) +: _ ) =>
convertAvatarDefinition( defn, splAtom )
case AnnotatedFormula( "fof", _, "plain", Bottom(),
( justification @ TptpTerm( "inference", TptpTerm( "sat_splitting_refutation" ),
_, _ ) ) +: _ ) =>
val sketchParents = getParents( justification ) flatMap convert
val splitParents = sketchParents map { parent0 =>
var parent = parent0
for {
clauseComponent <- AvatarSplit.getComponents( parent0.conclusion )
comp <- splDefs.values
renaming <- findClauseRenaming( comp.clause, clauseComponent )
} parent = SketchComponentElim( parent, comp match {
case comp @ AvatarNonGroundComp( _, _, vars ) => comp.copy( vars = vars.map( renaming ) )
case AvatarGroundComp( _, _ ) => comp
} )
require( parent.conclusion.isEmpty )
parent
}
Seq( SketchSplitCombine( splitParents ) )
case AnnotatedFormula( "fof", _, "plain", And( Imp( splAtom: FOLAtom, defn ), _ ),
TptpTerm( "introduced", FOLVar( "AVATAR_definition" ) | FOLConst( "avatar_definition" ), _ ) +: _ ) =>
convertAvatarDefinition( defn, splAtom )
case AnnotatedFormula( "fof", _, "plain", disj,
( justification @ TptpTerm( "inference", FOLVar( "AVATAR_split_clause" ) | FOLConst( "avatar_split_clause" ),
_, _ ) ) +: _ ) =>
val Seq( assertion ) = CNFp( disj ).toSeq
val Seq( splittedClause, _* ) = getParents( justification ) flatMap convert
var p = splittedClause
for {
clauseComponent <- AvatarSplit.getComponents( splittedClause.conclusion )
( splAtom: FOLAtom, i ) <- assertion.zipWithIndex
comp <- splDefs.get( ( splAtom, i.isSuc ) )
renaming <- findClauseRenaming( comp.clause, clauseComponent )
} p = SketchComponentElim( p, comp match {
case comp @ AvatarNonGroundComp( _, _, vars ) => comp.copy( vars = vars.map( renaming ) )
case AvatarGroundComp( _, _ ) => comp
} )
require( p.conclusion.isEmpty, s"$assertion\\n$splittedClause\\n$splDefs" )
Seq( p )
case AnnotatedFormula( "fof", _, "plain", Bottom(),
( justification @ TptpTerm( "inference", FOLVar( "AVATAR_sat_refutation" ) |
FOLConst( "avatar_sat_refutation" ), _, _ ) ) +: _ ) =>
Seq( SketchSplitCombine( getParents( justification ).flatMap( convert ) ) )
case AnnotatedFormula( "fof", _, "conjecture", _, TptpTerm( "file", _, TptpTerm( label ) ) +: _ ) =>
labelledCNF( label ) map SketchAxiom
case AnnotatedFormula( _, _, _, axiom: FOLFormula, TptpTerm( "file", _, TptpTerm( label ) ) +: _ ) =>
CNFp( axiom ).toSeq match {
case Seq( axiomClause ) =>
Seq( SketchInference(
axiomClause,
labelledCNF( label ) map SketchAxiom ) )
case clauses => labelledCNF( label ) map SketchAxiom
}
case AnnotatedFormula( "cnf", _, "axiom", axiom: FOLFormula, Seq() ) =>
val label = stepName
CNFp( axiom ).toSeq match {
case Seq( axiomClause ) =>
Seq( SketchInference(
axiomClause,
labelledCNF( label ) map SketchAxiom ) )
case clauses => labelledCNF( label ) map SketchAxiom
}
case AnnotatedFormula( _, _, _, conclusion: FOLFormula, justification +: _ ) =>
CNFp( conclusion ).toSeq match {
case Seq( conclusionClause ) =>
val sketchParents = getParents( justification ) flatMap convert
val conclusionClause_ = filterVampireSplits( conclusionClause )
val sketchParents_ = sketchParents.
find( p => clauseSubsumption( p.conclusion, conclusionClause_ ).isDefined ).
fold( sketchParents )( Seq( _ ) )
Seq( SketchInference( conclusionClause_, sketchParents_ ) )
case clauses => getParents( justification ) flatMap convert
}
} )
}
val emptyClauseLabel = stepList.inputs.collect {
case AnnotatedFormula( _, label, _, Bottom(), _ ) => label
}.head
convert( emptyClauseLabel ).head
}
}
|
gapt/gapt
|
core/src/main/scala/gapt/formats/tptp/TptpProofParser.scala
|
Scala
|
gpl-3.0
| 11,983
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.bark
import org.apache.jackrabbit.oak.bark.web.home.Index
import org.apache.jackrabbit.oak.bark.web.login.Login
import org.apache.jackrabbit.oak.bark.web.view.View
import org.apache.jackrabbit.oak.bark.web.viewadmin.ViewAdmin
import org.apache.wicket.protocol.http.WebApplication
import org.apache.wicket.request.{ Request, Response }
import javax.jcr.Repository
import org.apache.jackrabbit.oak.api.ContentRepository
import org.apache.jackrabbit.oak.spi.state.NodeStore
class BarkApp extends WebApplication {
var repository: ContentRepository = null
var store: NodeStore = null
override def getHomePage = classOf[Index];
override def newSession(request: Request, response: Response) = new BaseSession(request, repository);
override def init() = {
super.init();
// markup settings
getMarkupSettings().setStripWicketTags(true);
getMarkupSettings().setDefaultMarkupEncoding("UTF-8");
// page settings
getPageSettings().setVersionPagesByDefault(false);
mountPage("/login", classOf[Login]);
mountPage("/view", classOf[View]);
mountPage("/viewadmin", classOf[ViewAdmin]);
store = OakRepositorySupport.newNodeStore();
repository = OakRepositorySupport.createRepository(store);
}
}
|
stillalex/bark-oak
|
src/main/scala/org/apache/jackrabbit/oak/bark/BarkApp.scala
|
Scala
|
apache-2.0
| 2,078
|
/*
* Copyright 2012-2014 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.xfinity.sirius.api.impl.paxos
import com.comcast.xfinity.sirius.NiceTest
import akka.testkit.{TestActorRef, TestProbe}
import akka.actor.{Terminated, ActorContext, ActorSystem, ActorRef}
import com.comcast.xfinity.sirius.api.impl.paxos.LeaderWatcher._
import com.comcast.xfinity.sirius.api.impl.paxos.PaxosMessages.Preempted
import com.comcast.xfinity.sirius.api.impl.paxos.LeaderWatcher.DifferentLeader
import org.scalatest.BeforeAndAfterAll
import com.comcast.xfinity.sirius.api.SiriusConfiguration
class LeaderWatcherTest extends NiceTest with BeforeAndAfterAll {
implicit val actorSystem = ActorSystem("LeaderPingerTest")
case class PingersCreated(num: Int)
def makeWatcher(leaderToWatch: ActorRef = TestProbe().ref,
ballot: Ballot = Ballot(1, TestProbe().ref.path.toString),
pinger: ActorRef = TestProbe().ref,
replyTo: ActorRef = TestProbe().ref,
pingerCreationNotifier: ActorRef = TestProbe().ref) = {
val childProvider = new ChildProvider(leaderToWatch, ballot, new SiriusConfiguration) {
var pingersCreated = 0
override def createPinger(replyTo: ActorRef)
(implicit context: ActorContext) = {
pingersCreated += 1
pingerCreationNotifier ! PingersCreated(pingersCreated)
pinger
}
}
TestActorRef(new LeaderWatcher(replyTo, childProvider, new SiriusConfiguration))
}
override def afterAll(): Unit = {
actorSystem.terminate()
}
describe ("on instantiation") {
it ("creates a pinger") {
val pingerCreationNotifier = TestProbe()
makeWatcher(pingerCreationNotifier = pingerCreationNotifier.ref)
pingerCreationNotifier.expectMsg(PingersCreated(1))
}
}
describe ("upon receiving a CheckLeader message") {
it ("creates a pinger") {
val pingerCreationNotifier = TestProbe()
val watcher = makeWatcher(pingerCreationNotifier = pingerCreationNotifier.ref)
pingerCreationNotifier.expectMsg(PingersCreated(1))
watcher ! CheckLeader
pingerCreationNotifier.expectMsg(PingersCreated(2))
}
}
describe ("upon receiving a LeaderGone message") {
it ("tells replyTo to seek leadership and stops") {
val terminationProbe = TestProbe()
val replyTo = TestProbe()
val watcher = makeWatcher(replyTo = replyTo.ref)
terminationProbe.watch(watcher) // who watches the watchmen?
watcher ! LeaderGone
replyTo.expectMsg(LeaderGone)
terminationProbe.expectMsgClass(classOf[Terminated])
}
}
describe ("upon receiving a DifferentLeader message") {
it ("preempts replyTo with the new ballot") {
val terminationProbe = TestProbe()
val replyTo = TestProbe()
val watcher = makeWatcher(replyTo = replyTo.ref)
val newBallot = Ballot(1, TestProbe().ref.path.toString)
terminationProbe.watch(watcher)
watcher ! DifferentLeader(newBallot)
replyTo.expectMsg(Preempted(newBallot))
terminationProbe.expectMsgClass(classOf[Terminated])
}
}
describe ("upon receiving a Close message") {
it ("dies quietly") {
val terminationProbe = TestProbe()
val watcher = makeWatcher()
terminationProbe.watch(watcher)
watcher ! Close
terminationProbe.expectMsgClass(classOf[Terminated])
}
}
}
|
Comcast/sirius
|
src/test/scala/com/comcast/xfinity/sirius/api/impl/paxos/LeaderWatcherTest.scala
|
Scala
|
apache-2.0
| 4,019
|
//======================================================================================================================
// Facsimile: A Discrete-Event Simulation Library
// Copyright © 2004-2020, Michael J Allen.
//
// This file is part of Facsimile.
//
// Facsimile is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later
// version.
//
// Facsimile is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
// details.
//
// You should have received a copy of the GNU Lesser General Public License along with Facsimile. If not, see:
//
// http://www.gnu.org/licenses/lgpl.
//
// The developers welcome all comments, suggestions and offers of assistance. For further information, please visit the
// project home page at:
//
// http://facsim.org/
//
// Thank you for your interest in the Facsimile project!
//
// IMPORTANT NOTE: All patches (modifications to existing files and/or the addition of new files) submitted for
// inclusion as part of the official Facsimile code base, must comply with the published Facsimile Coding Standards. If
// your code fails to comply with the standard, then your patches will be rejected. For further information, please
// visit the coding standards at:
//
// http://facsim.org/Documentation/CodingStandards/
//======================================================================================================================
//======================================================================================================================
// Scala source file belonging to the org.facsim.sim.model package.
//======================================================================================================================
package org.facsim.sim.model
import com.typesafe.config.Config
import scala.jdk.CollectionConverters._
import squants.time.Time
/** Simulation model parameters, obtained from configuration files.
*
* @constructor Create a new simulation model parameters instance.
*
* @param config ''Lightbend Config'' configuration instance. It includes all configuration elements from constituent
* libraries and applications, as well as custom configuration identified on the command line.
*
* @since 0.2
*/
final class Parameters private[sim](config: Config) {
/** Retrieve a parameter value as a string.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* There is no guarantee that the returned value will be valid; for example, the returned value may be outside of a
* valid range, etc. It is the responsibility of the caller to ensure the validity of the returned value.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify a string value, then an exception will be thrown.
*
* @return Value of the associated parameter as a string.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*/
def stringParam(path: String): String = config.getString(path)
/** Retrieve a parameter value as a list of strings.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* There is no guarantee that the returned values are valid; for example, the returned values may be outside of a
* valid range, etc. It is the responsibility of the caller to ensure the validity of the returned values.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify a list of strings, then an exception will be thrown.
*
* @return Value of the associated parameter as a list of strings.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*/
def stringListParam(path: String): List[String] = config.getStringList(path).asScala.toList
/** Retrieve a parameter value as an integer.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* There is no guarantee that the returned value will be valid; for example, the returned value may be outside of a
* valid range, etc. It is the responsibility of the caller to ensure the validity of the returned value.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify an integer value, then an exception will be thrown.
*
* @return Value of the associated parameter as an integer.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*/
def intParam(path: String): Int = config.getInt(path)
/** Retrieve a parameter value as a list of integers.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* There is no guarantee that the returned values are valid; for example, the returned values may be outside of a
* valid range, etc. It is the responsibility of the caller to ensure the validity of the returned values.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify a list of integers, then an exception will be thrown.
*
* @return Value of the associated parameter as a list of integers.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*/
def intListParam(path: String): List[Int] = config.getIntList(path).asScala.map(_.intValue()).toList
/** Retrieve a parameter value as a Boolean.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify a Boolean value, then an exception will be thrown.
*
* @return Value of the associated parameter as a Boolean.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*/
def boolParam(path: String): Boolean = config.getBoolean(path)
/** Retrieve a parameter value as a list of Booleans.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify a list of Booleans, then an exception will be thrown.
*
* @return Value of the associated parameter as a list of Booleans.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*/
def boolListParam(path: String): List[Boolean] = config.getBooleanList(path).asScala.map(_.booleanValue()).toList
/** Retrieve a parameter value as a list of double precision values.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* There is no guarantee that the returned value will be valid; for example, the returned value may be outside of a
* valid range, etc. It is the responsibility of the caller to ensure the validity of the returned value.
*
* @note Consider using an alternative type, such as a `Time`, etc., which may reflect the usage of the parameter
* better. Plain double precision values should only be used for unitless types, such as scale factors, ratios, etc.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify a double value, then an exception will be thrown.
*
* @return Value of the associated parameter as a double precision value.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*/
def doubleParam(path: String): Double = config.getDouble(path)
/** Retrieve a parameter value as a list of double precision values.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* There is no guarantee that the returned values are valid; for example, the returned values may be outside of a
* valid range, etc. It is the responsibility of the caller to ensure the validity of the returned values.
*
* @note Consider using an alternative type, such as a `Time`, etc., which may reflect the usage of the parameter
* better. Plain double precision values should only be used for unitless types, such as scale factors, ratios, etc.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify a list of doubles, then an exception will be thrown.
*
* @return Value of the associated parameter as a list of double precision values.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*/
def doubleListParam(path: String): List[Double] = config.getDoubleList(path).asScala.map(_.doubleValue()).toList
/** Retrieve a parameter value as a list of time values.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* There is no guarantee that the returned value will be valid; for example, the returned value may be outside of a
* valid range, etc. It is the responsibility of the caller to ensure the validity of the returned value.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify a time value, then an exception will be thrown.
*
* @return Value of the associated parameter as a time value.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*
* @throws x.y.z if the value cannot be parsed as a time value.
*/
def timeParam(path: String): Time = {
// Get the value as a string first.
val strVal = stringParam(path)
// Now parse the value as a time.
Time.parseString(strVal).get
}
/** Retrieve a parameter value as a list of time values.
*
* In normal usage, this function must complete without an exception; exceptions will only be thrown if the parameter
* configuration is invalid.
*
* There is no guarantee that the returned values are valid; for example, the returned values may be outside of a
* valid range, etc. It is the responsibility of the caller to ensure the validity of the returned values.
*
* @param path Configuration path of the associated parameter. If the path does not identify a parameter value, or if
* the path does not identify a list of time values, then an exception will be thrown.
*
* @return Value of the associated parameter as a list of time values.
*
* @throws com.typesafe.config.ConfigException if an exception occurs while retrieving the parameter value.
*
* @throws x.y.z if a value cannot be parsed as a time value.
*/
def timeListParam(path: String): List[Time] = {
// Get the list of values as strings first.
val strVals = stringListParam(path)
// Now parse the value as a time.
strVals.map(s => Time.parseString(s).get)
}
}
|
Facsimile/facsimile
|
facsimile-simulation/src/main/scala/org/facsim/sim/model/Parameters.scala
|
Scala
|
lgpl-3.0
| 12,412
|
/******************************************************************************
Copyright (c) 2012-2014, KAIST, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.bug_detector
import kr.ac.kaist.jsaf.analysis.cfg._
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.models.ModelManager
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.nodes.IROp
import kr.ac.kaist.jsaf.nodes_util.Span
import kr.ac.kaist.jsaf.nodes_util.EJSOp
import kr.ac.kaist.jsaf.analysis.typing.CState
import kr.ac.kaist.jsaf.analysis.typing.{SemanticsExpr => SE}
import kr.ac.kaist.jsaf.{Shell, ShellParameters}
class ExprDetect(bugDetector: BugDetector) {
val cfg = bugDetector.cfg
val typing = bugDetector.typing
val bugStorage = bugDetector.bugStorage
val bugOption = bugDetector.bugOption
val varManager = bugDetector.varManager
val stateManager = bugDetector.stateManager
val CommonDetect = bugDetector.CommonDetect
////////////////////////////////////////////////////////////////
// Bug Detection Main (check CFGExpr)
////////////////////////////////////////////////////////////////
def check(inst: CFGInst, expr: CFGExpr, cstate: CState, typeof: Boolean): Unit = {
val node = cfg.findEnclosingNode(inst)
//val state = typing.mergeState(cstate)
//val heap = state._1
//val context = state._2
//if (heap <= HeapBot) Unit
//else {
expr match {
case CFGBin(info, first, op, second) =>
val opStr = op.getText
opStr match {
case "*" | "/" | "%" =>
//defaultValueCheck2(inst, opStr, first, second)
case "+" | "<" | ">" | "<=" | ">=" =>
//defaultValueCheck2(inst, opStr, first, second)
convertToNumberCheck2(opStr, first, second)
case "|" | "&" | "^" | "<<" | ">>" | ">>>" | "-" | "/" | "%" | "*" =>
convertToNumberCheck2(opStr, first, second)
case "==" =>
//defaultValueCheck2(inst, opStr, first, second)
convertToNumberCheck2(opStr, first, second)
implicitTypeConversionEqualityComparison(info.getSpan, opStr, first, second)
case "!=" =>
convertToNumberCheck2(opStr, first, second)
implicitTypeConversionEqualityComparison(info.getSpan, opStr, first, second)
case "in" =>
//CommonDetect.defaultValueCheck(inst, first, "String")
binaryOpSecondTypeCheck(info.getSpan, op, second)
case "instanceof" =>
binaryOpSecondTypeCheck(info.getSpan, op, second)
case _ => Unit
}
case CFGLoad(info, obj, index) =>
if(!(Shell.params.command == ShellParameters.CMD_WEBAPP_BUG_DETECTOR))
absentReadPropertyCheck(info.getSpan, obj, index)
case CFGThis(info) =>
globalThisCheck(info.getSpan, node._1)
case CFGUn(info, op, expr) =>
val opStr = op.getText
opStr match {
// 11.4.6 Unary + Operator
// 11.4.7 Unary - Operator
case "+" | "-" =>
//CommonDetect.defaultValueCheck(inst, expr, "Number")
CommonDetect.convertToNumberCheck(node, inst, expr, null, true, null)
//convertToNumberCheck1(expr)
// 11.4.8 Bitwise NOT Operator ( ~ )
case "~" =>
CommonDetect.convertToNumberCheck(node, inst, expr, null, true, null)
//convertToNumberCheck1(expr)
case _ => Unit
}
case CFGVarRef(info, id) =>
if (!typeof) absentReadVariableCheck(info.getSpan, id)
case _ => Unit
}
//}
////////////////////////////////////////////////////////////////
// AbsentRead Check (Property check)
////////////////////////////////////////////////////////////////
def absentReadPropertyCheck(span: Span, obj: CFGExpr, index: CFGExpr): Unit = {
if(!bugOption.AbsentReadProperty_Check) return
// Don't check if this instruction is "LHS = <>fun<>["prototype"]".
if (obj.isInstanceOf[CFGVarRef] && obj.asInstanceOf[CFGVarRef].id.contains("<>fun<>") &&
index.isInstanceOf[CFGString] && index.asInstanceOf[CFGString].str == "prototype") return
// Get the object name and property name
val objId: String = varManager.getUserVarAssign(obj) match {
case bv: BugVar0 => bv.toString
case _ => "an object"
}
val propId: String = varManager.getUserVarAssign(index) match {
case bv: BugVar0 => bv.toString
case _ => null
}
// Check for each CState
val bugCheckInstance = new BugCheckInstance()
val mergedCState = stateManager.getInputCState(node, inst.getInstId, bugOption.contextSensitive(AbsentReadProperty))
for ((callContext, state) <- mergedCState) {
val objLocSet = SE.V(obj, state.heap, state.context)._1.locset
val propValue = SE.V(index, state.heap, state.context)._1.pvalue
// Check for each object location
for (objLoc <- objLocSet) {
// Check for each primitive value
for (absValue <- propValue) {
objLocSet.foreach((loc) => {
for (fid <- state.heap(loc)("@construct").funid) {
ModelManager.getFIdMap("Builtin").get(fid) match {
case Some(funName) if funName == "RegExp.constructor" =>
val propValue = SE.V(index, state.heap, state.context)._1.pvalue
propValue.foreach((absValue) => if (regExpDeprecated contains BugHelper.getPropName(absValue.toAbsString)) {
bugStorage.addMessage(span, RegExpDeprecated, inst, callContext, "'" + objId + "." + BugHelper.getPropName(absValue.toAbsString) + "'")
return
})
case _ => // pass
}
}
})
if(!absValue.isBottom) {
val isBug = if(absValue.isConcrete || bugOption.AbsentReadProperty_CheckAbstractIndexValue) {
val propStr = absValue.toAbsString
val propExist = Helper.HasProperty(state.heap, objLoc, propStr)
// Collect property's existence
bugOption.AbsentReadProperty_PropertyMustExistDefinitely match {
case true => propExist != BoolTrue
case false => propExist <= BoolFalse
}
}
else false
val checkInstance = bugCheckInstance.insert(isBug, span, callContext, state)
checkInstance.loc1 = objLoc
checkInstance.absValue = absValue
//println(callContext + "objLoc = " + objLoc + ", " + objId + "[" + absValue + "] => isBug = " + isBug)
}
}
}
}
// Filter out bugs depending on options
if (!bugOption.AbsentReadProperty_PropertyMustExistInEveryState) {
//bugCheckInstance.filter((bug, notBug) => (bug.loc1 == notBug.loc1 && bug.absValue == notBug.absValue))
bugCheckInstance.filter((bug, notBug) => true)
}
if (!bugOption.AbsentReadProperty_PropertyMustExistInEveryLocation) {
bugCheckInstance.filter((bug, notBug) => (bug.callContext == notBug.callContext && bug.state == notBug.state && bug.absValue == notBug.absValue))
}
if( !bugOption.AbsentReadProperty_PropertyMustExistForAllValue) {
bugCheckInstance.filter((bug, notBug) => (bug.callContext == notBug.callContext && bug.state == notBug.state && bug.loc1 == notBug.loc1))
}
// Report bugs
// If the index(propId) is CFGUserId (show possible values of the index variable)
if (propId != null) {
// Group by CState to collect values of the index variable
bugCheckInstance.group(checkInstance => (checkInstance.callContext, checkInstance.state).hashCode)
for ((_, checkInstanceList) <- bugCheckInstance.groupedBugList) {
// Collect values of the index variable
var concreteValues: List[AbsDomain] = List()
checkInstanceList.foreach((ci) => if (!concreteValues.contains(ci.absValue)) concreteValues = concreteValues :+ ci.absValue)
val msg = if (concreteValues.isEmpty) "." else ", where property '" + propId + "' can be " + concreteValues.tail.foldLeft(concreteValues.head.toString)((str, s) => str + ", " + s.toString) + "."
bugStorage.addMessage(checkInstanceList.head.span, AbsentReadProperty, inst, checkInstanceList.head.callContext, propId, "'" + objId + "'", msg)
}
}
else bugCheckInstance.bugList.foreach((e) => bugStorage.addMessage(e.span, AbsentReadProperty, inst, e.callContext, BugHelper.getPropName(e.absValue.toAbsString), "'" + objId + "'", "."))
}
////////////////////////////////////////////////////////////////
// AbsentRead Check (Variable check)
////////////////////////////////////////////////////////////////
def absentReadVariableCheck(span: Span, id: CFGId): Unit = {
if(!bugOption.AbsentReadVariable_Check) return
// Check for user variable only
if (!id.isInstanceOf[CFGUserId]) return
val idAbsString = AbsString.alpha(id.getText)
// Check for each CState
val bugCheckInstance = new BugCheckInstance()
val mergedCState = stateManager.getInputCState(node, inst.getInstId, bugOption.contextSensitive(AbsentReadVariable))
for ((callContext, state) <- mergedCState) {
val doesExist: AbsBool = id.getVarKind match {
case PureLocalVar => BoolTrue
case CapturedVar => BoolTrue
case CapturedCatchVar => BoolTrue
case GlobalVar => Helper.HasProperty(state.heap, GlobalLoc, idAbsString)
}
// Collect variable's existence
val isBug = bugOption.AbsentReadVariable_VariableMustExistDefinitely match {
case true => doesExist != BoolTrue
case false => doesExist <= BoolFalse
}
bugCheckInstance.insert(isBug, span, callContext, state)
}
// Filter out bugs depending on options
if (!bugOption.AbsentReadVariable_VariableMustExistInEveryState) bugCheckInstance.filter((bug, notBug) => true)
// Report bugs
for (b <- bugCheckInstance.bugList) bugStorage.addMessage(b.span, AbsentReadVariable, inst, b.callContext, id.getText)
}
////////////////////////////////////////////////////////////////
// BinaryOpSecondType Check (in & instanceof)
////////////////////////////////////////////////////////////////
def binaryOpSecondTypeCheck(span: Span, op: IROp, second: CFGExpr): Unit = {
if(!bugOption.BinaryOpSecondType_Check) return
// Check for each CState
val bugCheckInstance = new BugCheckInstance()
val mergedCState = stateManager.getInputCState(node, inst.getInstId, bugOption.contextSensitive(BinaryOpSecondType))
for ((callContext, state) <- mergedCState) {
val value = SE.V(second, state.heap, state.context)._1
val pvalue = value.pvalue
// Check object type (in & instanceof)
val isBug = bugOption.BinaryOpSecondType_OperandMustBeCorrectForAllValue match {
case true =>
value.locset.isEmpty || !pvalue.undefval.isBottom ||
!pvalue.nullval.isBottom || !pvalue.boolval.isBottom ||
!pvalue.numval.isBottom || !pvalue.strval.isBottom
case false => value.locset.isEmpty
}
val checkInstance = bugCheckInstance.insertWithStrings(isBug, span, callContext, state, "non-object")
checkInstance.loc1 = Integer.MIN_VALUE
checkInstance.pValue = pvalue
// Check function type (instanceof)
if (op.getKind == EJSOp.BIN_COMP_REL_INSTANCEOF) {
value.locset.foreach(loc => {
val isBug = bugOption.BinaryOpSecondType_OperandMustBeCorrectForAllValue match {
case true =>
Helper.IsCallable(state.heap, loc) != BoolTrue ||
!pvalue.undefval.isBottom || !pvalue.nullval.isBottom || !pvalue.boolval.isBottom ||
!pvalue.numval.isBottom || !pvalue.strval.isBottom
case false =>
Helper.IsCallable(state.heap, loc) == BoolFalse
}
val checkInstance = bugCheckInstance.insertWithStrings(isBug, span, callContext, state, "non-function object")
checkInstance.loc1 = loc
checkInstance.pValue = pvalue
})
}
}
// Filter out bugs depending on options
if (!bugOption.BinaryOpSecondType_OperandMustBeCorrectInEveryState) {
bugCheckInstance.filter((bug, notBug) => bug.loc1 == notBug.loc1 && bug.string1 == notBug.string1)
}
if (!bugOption.BinaryOpSecondType_OperandMustBeCorrectInEveryLocation) {
bugCheckInstance.filter((bug, notBug) => bug.callContext == notBug.callContext && bug.state == notBug.state && bug.string1 == notBug.string1)
}
// Report bugs
for (checkInstance <- bugCheckInstance.bugList) {
var string = "."
if (second.isInstanceOf[CFGVarRef]) {
val concreteValue = BugHelper.pvalueToString(checkInstance.pValue)
if (concreteValue.length > 0) string = ", where operand '" + second.toString + "' can be " + concreteValue + "."
}
bugStorage.addMessage(checkInstance.span, BinaryOpSecondType, inst, checkInstance.callContext, second.toString, op.getText, checkInstance.string1, string)
}
}
////////////////////////////////////////////////////////////////
// ConvertToNumber Check (# of args: 2)
////////////////////////////////////////////////////////////////
def convertToNumberCheck2(op: String, expr1: CFGExpr, expr2: CFGExpr): Unit = {
if(!bugOption.ConvertUndefToNum_Check) return
op match {
// 11.5 Multiplicative Operators
// 11.6.2 The Subtraction Operator ( - )
// 11.7 Bitwise Shift Operators
case "*" | "/" | "%" | "-" | "<<" | ">>" | ">>>" | "&" | "^" | "|" =>
CommonDetect.convertToNumberCheck(node, inst, expr1, expr2, false, null)
// 11.6.1 The Addition operator ( + )
case "+" =>
CommonDetect.convertToNumberCheck(node, inst, expr1, expr2, true, (pvalue1: PValue, pvalue2: PValue) => {
// "7. If Type(lprim) is String or Type(rprim) is String," does not call ToNumber function.
if (bugOption.ConvertUndefToNum_ToNumberMustBeCalledForExactValue) {
pvalue1.strval == StrBot && pvalue2.strval == StrBot
}
else {
!(pvalue1.typeCount == 1 && pvalue1.strval != StrBot ||
pvalue2.typeCount == 1 && pvalue2.strval != StrBot)
}
}: Boolean)
// 11.8 Relational Operators
case "<" | ">" | "<=" | ">=" =>
CommonDetect.convertToNumberCheck(node, inst, expr1, expr2, true, (pvalue1: PValue, pvalue2: PValue) => {
var conditionResult = false
// 11.8.5 The Abstract Relational Comparison Algorithm
// "4. Else, both px and py are Strings" does not call ToNumber function.
if (bugOption.ConvertUndefToNum_ToNumberMustBeCalledForExactValue) {
pvalue1.strval == StrBot && pvalue2.strval == StrBot
}
else {
!(pvalue1.typeCount == 1 && pvalue1.strval != StrBot ||
pvalue2.typeCount == 1 && pvalue2.strval != StrBot)
}
}: Boolean)
// 11.9 Equality Operators
case "==" | "!=" =>
CommonDetect.convertToNumberCheck(node, inst, expr1, expr2, false, (pvalue1: PValue, pvalue2: PValue) => {
// 11.9.3 The Abstract Equality Comparison Algorithm
if (bugOption.ConvertUndefToNum_ToNumberMustBeCalledForExactValue) {
val pvalue1TypeCount = pvalue1.typeCount
val pvalue2TypeCount = pvalue2.typeCount
// "4. If Type(x) is Number and Type(y) is String,"
pvalue1TypeCount == 1 && pvalue2TypeCount == 1 && pvalue1.numval != NumBot && pvalue2.strval != StrBot ||
// "5. If Type(x) is String and Type(y) is Number,"
pvalue1TypeCount == 1 && pvalue2TypeCount == 1 && pvalue1.strval != StrBot && pvalue2.numval != StrBot ||
// "6. If Type(x) is Boolean,"
pvalue1TypeCount == 1 && pvalue1.boolval != BoolBot ||
// "7. If Type(y) is Boolean,"
pvalue2TypeCount == 1 && pvalue2.boolval != BoolBot
// "8. If Type(x) is either String or Number and Type(y) is Object,"
// "9. If Type(x) is Object and Type(y) is either String or Number,"
}
else {
// "4. If Type(x) is Number and Type(y) is String,"
pvalue1.numval != NumBot && pvalue2.strval != StrBot ||
// "5. If Type(x) is String and Type(y) is Number,"
pvalue1.strval != StrBot && pvalue2.numval != StrBot
// "6. If Type(x) is Boolean,"
pvalue1.boolval != BoolBot ||
// "7. If Type(y) is Boolean,"
pvalue2.boolval != BoolBot
// TODO: (doToPrimitive parameter must be true)
// "8. If Type(x) is either String or Number and Type(y) is Object,"
// "9. If Type(x) is Object and Type(y) is either String or Number,"
}
}: Boolean)
}
}
////////////////////////////////////////////////////////////////
// DefaultValue (called by main function)
////////////////////////////////////////////////////////////////
/*
def defaultValueCheck2(inst: CFGInst, op: String, expr1: CFGExpr, expr2: CFGExpr): Unit = {
op match {
case "==" =>
val v1 = SE.V(expr1, heap, context)._1
val v2 = SE.V(expr2, heap, context)._1
if (!definite_only && (v1.pvalue.undefval </ UndefBot || v1.pvalue.nullval </ NullBot || v1.pvalue.boolval </ BoolBot)) Unit // Maybe
else if ((NumBot <= v1.pvalue.numval || StrBot <= v1.pvalue.strval) && (!v2.locset.subsetOf(LocSetBot))) CommonDetect.defaultValueCheck(inst, expr2, "Number");
else if ((!v1.locset.subsetOf(LocSetBot)) && (NumBot <= v2.pvalue.numval || StrBot <= v2.pvalue.strval)) CommonDetect.defaultValueCheck(inst, expr1, "Number");
case _ =>
CommonDetect.defaultValueCheck(inst, expr1, "Number")
CommonDetect.defaultValueCheck(inst, expr2, "Number")
}
}
*/
////////////////////////////////////////////////////////////////
// GlobalThis Check
////////////////////////////////////////////////////////////////
def globalThisCheck(span: Span, fid: Int): Unit = {
if(!bugOption.GlobalThis_Check) return
// Check for each CState
val bugCheckInstance = new BugCheckInstance()
val mergedCState = stateManager.getInputCState(node, inst.getInstId, bugOption.contextSensitive(GlobalThis))
for((callContext, state) <- mergedCState) {
val thisLocSet = state.heap(SinglePureLocalLoc)("@this").objval.value.locset
val isGlobalCode = (fid == cfg.getGlobalFId) // Is current instruction in the global code?
val referGlobal = bugOption.GlobalThis_MustReferExactly match { // Does 'this' refer global object?
case true => thisLocSet.contains(GlobalLoc) && thisLocSet.size == 1
case false => thisLocSet.contains(GlobalLoc)
}
val isBug = !isGlobalCode && referGlobal
bugCheckInstance.insert(isBug, span, callContext, state)
// Debug
//println("fid = " + fid + ", isGlobalCode = " + isGlobalCode + ", referGlobal = " + referGlobal + ", thisLocSet.size = " + thisLocSet.size)
}
// Filter out bugs depending on options
if(!bugOption.GlobalThis_MustReferInEveryState) bugCheckInstance.filter((bug, notBug) => true)
// Report bugs
for(b <- bugCheckInstance.bugList) bugStorage.addMessage(b.span, GlobalThis, inst, b.callContext)
/* Previous code
val lset_this = heap(SinglePureLocalLoc)("@this")._2._2
val notGlobal = (fid != cfg.getGlobalFId) // true: current function is not the global object.
val mayGlobal = lset_this.contains(GlobalLoc) // true: "MAYBE" this refers the global object.
val defGlobal = lset_this.size == 1 // true: "DEFINITELY" this refers the global object.
/* bug check */
if (!definite_only && !defGlobal) Unit // maybe
else if (notGlobal && mayGlobal) bugStorage.addMessage(span, (if (defGlobal) GlobalThisDefinite else GlobalThisMaybe), inst, null)
*/
}
////////////////////////////////////////////////////////////////
// Implicit Type Conversion Check for "11.9.3 The Abstract Equality Comparison Algorithm"
////////////////////////////////////////////////////////////////
def implicitTypeConversionEqualityComparison(span: Span, op: String, expr1: CFGExpr, expr2: CFGExpr): Unit = {
if(!bugOption.ImplicitTypeConvert_Check) return
if (inst.isInstanceOf[CFGAssert] && !inst.asInstanceOf[CFGAssert].flag) return
// Check for each CState
val bugCheckInstance = new BugCheckInstance()
val mergedCState = stateManager.getInputCState(node, inst.getInstId, bugOption.contextSensitive(ImplicitTypeConvert))
for ((callContext, state) <- mergedCState) {
// expr1, expr2
val value1: Value = SE.V(expr1, state.heap, state.context)._1
val value2: Value = SE.V(expr2, state.heap, state.context)._1
val pvalue1: PValue = value1.pvalue
val pvalue2: PValue = value2.pvalue
def nonBugCase(): Boolean = {
// undefined == undefined ?
pvalue1.undefval != UndefBot && pvalue2.undefval != UndefBot ||
// null == null ?
pvalue1.nullval != NullBot && pvalue2.nullval != NullBot ||
// number == undefined ?
pvalue1.numval != NumBot && pvalue2.undefval != UndefBot ||
pvalue1.undefval != UndefBot && pvalue2.numval != NumBot ||
// number == null ?
pvalue1.numval != NumBot && pvalue2.nullval != NullBot ||
pvalue1.nullval != NullBot && pvalue2.numval != NumBot ||
// number == number ?
pvalue1.numval != NumBot && pvalue2.numval != NumBot ||
// string == undefined ?
pvalue1.strval != StrBot && pvalue2.undefval != UndefBot ||
pvalue1.undefval != UndefBot && pvalue2.strval != StrBot ||
// string == null ?
pvalue1.strval != StrBot && pvalue2.nullval != NullBot ||
pvalue1.nullval != NullBot && pvalue2.strval != StrBot ||
// string == string ?
pvalue1.strval != StrBot && pvalue2.strval != StrBot ||
// boolean == boolean ?
pvalue1.boolval != BoolBot && pvalue2.boolval != BoolBot ||
// object == undefined ?
!value1.locset.isEmpty && pvalue2.undefval != UndefBot ||
pvalue1.undefval != UndefBot && !value2.locset.isEmpty ||
// object == null ?
!value1.locset.isEmpty && pvalue2.nullval != NullBot ||
pvalue1.nullval != NullBot && !value2.locset.isEmpty ||
// object == object ?
!value1.locset.isEmpty && !value2.locset.isEmpty
}
// Insert a bug check instance
def insertBugCheckInstance(isBug: Boolean, value1Type: String, value2Type: String, absValue1: String, absValue2: String): Unit = {
val checkInstance = bugCheckInstance.insert(isBug, span, callContext, state)
checkInstance.value1 = value1
checkInstance.value2 = value2
checkInstance.string1 = value1Type
checkInstance.string3 = value2Type
if (absValue1.length > 0) checkInstance.string2 = "(" + absValue1 + ")" else checkInstance.string2 = ""
if (absValue2.length > 0) checkInstance.string4 = "(" + absValue2 + ")" else checkInstance.string4 = ""
}
var isBug = false
// null == undefined ?
if (bugOption.ImplicitTypeConvert_CheckNullAndUndefined) {
if (pvalue1.nullval != NullBot && pvalue2.undefval != UndefBot ||
pvalue1.undefval != UndefBot && pvalue2.nullval != NullBot) {
if (!bugOption.ImplicitTypeConvert_MustBeConvertedForAllValue || !nonBugCase) {
isBug = true
if (pvalue1.nullval != NullBot) insertBugCheckInstance(isBug, "null", "undefined", "", "")
else insertBugCheckInstance(isBug, "undefined", "null", "", "")
}
}
}
// string == number ?
if (bugOption.ImplicitTypeConvert_CheckStringAndNumber) {
if (pvalue1.strval != StrBot && pvalue2.numval != NumBot ||
pvalue1.numval != NumBot && pvalue2.strval != StrBot) {
if (!bugOption.ImplicitTypeConvert_MustBeConvertedForAllValue || !nonBugCase) {
isBug = true
if (pvalue1.strval != StrBot) insertBugCheckInstance(isBug, "string", "number", pvalue1.strval.getConcreteValueAsString(), pvalue2.numval.getConcreteValueAsString())
else insertBugCheckInstance(isBug, "number", "string", pvalue1.numval.getConcreteValueAsString(), pvalue2.strval.getConcreteValueAsString())
}
}
}
// boolean == undefined ?
if (bugOption.ImplicitTypeConvert_CheckBooleanAndUndefined) {
if (pvalue1.boolval != BoolBot && pvalue2.undefval != UndefBot ||
pvalue1.numval != NumBot && pvalue2.boolval != BoolBot) {
if (!bugOption.ImplicitTypeConvert_MustBeConvertedForAllValue || !nonBugCase) {
isBug = true
if (pvalue1.boolval != BoolBot) insertBugCheckInstance(isBug, "boolean", "undefined", pvalue1.boolval.getConcreteValueAsString(), "")
else insertBugCheckInstance(isBug, "undefined", "boolean", "", pvalue2.boolval.getConcreteValueAsString())
}
}
}
// boolean == null ?
if (bugOption.ImplicitTypeConvert_CheckBooleanAndNull) {
if (pvalue1.boolval != BoolBot && pvalue2.nullval != NullBot ||
pvalue1.nullval != NullBot && pvalue2.boolval != BoolBot) {
if (!bugOption.ImplicitTypeConvert_MustBeConvertedForAllValue || !nonBugCase) {
isBug = true
if (pvalue1.boolval != BoolBot) insertBugCheckInstance(isBug, "boolean", "null", pvalue1.boolval.getConcreteValueAsString(), "")
else insertBugCheckInstance(isBug, "null", "boolean", "", pvalue2.boolval.getConcreteValueAsString())
}
}
}
// boolean == number ?
if (bugOption.ImplicitTypeConvert_CheckBooleanAndNumber) {
if (pvalue1.boolval != BoolBot && pvalue2.numval != NumBot ||
pvalue1.numval != NumBot && pvalue2.boolval != BoolBot) {
if (!bugOption.ImplicitTypeConvert_MustBeConvertedForAllValue || !nonBugCase) {
isBug = true
if (pvalue1.boolval != BoolBot) insertBugCheckInstance(isBug, "boolean", "number", pvalue1.boolval.getConcreteValueAsString(), pvalue2.numval.getConcreteValueAsString())
else insertBugCheckInstance(isBug, "number", "boolean", pvalue1.numval.getConcreteValueAsString(), pvalue2.boolval.getConcreteValueAsString())
}
}
}
// boolean == string ?
if (bugOption.ImplicitTypeConvert_CheckBooleanAndString) {
if (pvalue1.boolval != BoolBot && pvalue2.strval != StrBot ||
pvalue1.strval != StrBot && pvalue2.boolval != BoolBot) {
if (!bugOption.ImplicitTypeConvert_MustBeConvertedForAllValue || !nonBugCase) {
isBug = true
if (pvalue1.boolval != BoolBot) insertBugCheckInstance(isBug, "boolean", "string", pvalue1.boolval.getConcreteValueAsString(), pvalue2.strval.getConcreteValueAsString())
else insertBugCheckInstance(isBug, "string", "boolean", pvalue1.strval.getConcreteValueAsString(), pvalue2.boolval.getConcreteValueAsString())
}
}
}
// object == number ?
if (bugOption.ImplicitTypeConvert_CheckObjectAndNumber) {
if (!value1.locset.isEmpty && pvalue2.numval != NumBot ||
pvalue1.numval != NumBot && !value2.locset.isEmpty) {
if (!bugOption.ImplicitTypeConvert_MustBeConvertedForAllValue || !nonBugCase) {
isBug = true
if (!value1.locset.isEmpty) insertBugCheckInstance(isBug, "object", "number", "", pvalue2.numval.getConcreteValueAsString())
else insertBugCheckInstance(isBug, "number", "object", pvalue1.numval.getConcreteValueAsString(), "")
}
}
}
// object == string ?
if (bugOption.ImplicitTypeConvert_CheckObjectAndString) {
if (!value1.locset.isEmpty && pvalue2.strval != StrBot ||
pvalue1.strval != StrBot && !value2.locset.isEmpty) {
if (!bugOption.ImplicitTypeConvert_MustBeConvertedForAllValue || !nonBugCase) {
isBug = true
if (!value1.locset.isEmpty) insertBugCheckInstance(isBug, "object", "string", "", pvalue2.strval.getConcreteValueAsString())
else insertBugCheckInstance(isBug, "string", "object", pvalue1.strval.getConcreteValueAsString(), "")
}
}
}
// object == boolean ?
if (bugOption.ImplicitTypeConvert_CheckObjectAndBoolean) {
if (!value1.locset.isEmpty && pvalue2.boolval != BoolBot ||
pvalue1.boolval != BoolBot && !value2.locset.isEmpty) {
if (!bugOption.ImplicitTypeConvert_MustBeConvertedForAllValue || !nonBugCase) {
isBug = true
if (!value1.locset.isEmpty) insertBugCheckInstance(isBug, "object", "boolean", "", pvalue2.boolval.getConcreteValueAsString())
else insertBugCheckInstance(isBug, "boolean", "object", pvalue1.boolval.getConcreteValueAsString(), "")
}
}
}
// Insert a bug check instance
if (!isBug) insertBugCheckInstance(isBug, "", "", "", "")
}
// Filter out bugs depending on options
if (!bugOption.ImplicitTypeConvert_MustBeConvertedInEveryState) bugCheckInstance.filter((bug, notBug) => true)
// Report bugs
bugCheckInstance.bugList.foreach((e) => bugStorage.addMessage(e.span, ImplicitTypeConvert, inst, e.callContext, e.string1, e.string2, op, e.string3, e.string4))
}
}
}
|
darkrsw/safe
|
src/main/scala/kr/ac/kaist/jsaf/bug_detector/ExprDetect.scala
|
Scala
|
bsd-3-clause
| 30,902
|
package ch.uzh.ifi.pdeboer.pplib.patterns
import ch.uzh.ifi.pdeboer.pplib.process.entities._
import ch.uzh.ifi.pdeboer.pplib.util.CollectionUtils._
import ch.uzh.ifi.pdeboer.pplib.util.LazyLogger
import scala.xml.NodeSeq
/**
* Created by pdeboer on 14/12/14.
*/
class FixPatchExecuter(@transient var driver: FixPatchDriver,
val allOrderedPatches: List[Patch],
val indicesOfPatchesToFix: List[Int],
val patchesToIncludeBeforeAndAfterMain: (Int, Int) = (2, 2),
@transient var memoizer: ProcessMemoizer = new NoProcessMemoizer(),
val memoizerPrefix: String = "") extends Serializable with LazyLogger {
lazy val allFixedPatches: List[(Int, Patch)] = {
indicesOfPatchesToFix.mpar.map(i => (i,
memoizer.mem(memoizerPrefix + "fixpatch" + i)(getFixForPatchAtIndex(i))
)).toList
}
lazy val allPatches: List[Patch] = {
logger.info("fixing patches")
allOrderedPatches.zipWithIndex.map(p => {
val possibleFix: Option[Patch] = allFixedPatches.find(_._1 == p._2).map(_._2)
possibleFix.getOrElse(p._1)
})
}
def getFixForPatchAtIndex(index: Int) = driver.fix(allOrderedPatches(index),
allOrderedPatches.slice(Math.max(0, index - patchesToIncludeBeforeAndAfterMain._1), index),
allOrderedPatches.slice(index + 1, Math.min(allOrderedPatches.length, index + 1 + patchesToIncludeBeforeAndAfterMain._2)))
}
trait FixPatchDriver {
def fix(patch: Patch, patchesBefore: List[Patch] = Nil, patchesAfterwards: List[Patch] = Nil): Patch
}
class FixVerifyFPDriver(val process: PassableProcessParam[CreateProcess[Patch, Patch]],
val beforeAfterHandler: FixVerifyFPDriver.FVFPDBeforeAfterHandler = FixVerifyFPDriver.DEFAULT_BEFORE_AFTER_HANDLER) extends FixPatchDriver with LazyLogger {
override def fix(patch: Patch, patchesBefore: List[Patch], patchesAfterwards: List[Patch]): Patch = {
logger.info(s"Fixing patch $patch")
val memPrefixInParams: Option[String] = process.getParam[Option[String]](
DefaultParameters.MEMOIZER_NAME.key).flatten
val higherPriorityParams = Map(
DefaultParameters.MEMOIZER_NAME.key -> memPrefixInParams.map(m => m.hashCode + "fixprocess")
)
val fixProcess = process.create(higherPrioParams = higherPriorityParams)
if (beforeAfterHandler.isDefined) beforeAfterHandler.get.apply(fixProcess, patchesBefore, patchesAfterwards)
fixProcess.process(patch)
}
}
object FixVerifyFPDriver {
type FVFPDBeforeAfterHandler = Option[(ProcessStub[Patch, Patch], List[Patch], List[Patch]) => Unit]
val DEFAULT_BEFORE_AFTER_HANDLER = None // beforeAfterInstructions()
def beforeAfterInstructions(targetNameSingular: String = "sentence", targetNamePlural: String = "sentences", joiner: String = ". ", targetField: ProcessParameter[Option[NodeSeq]] = DefaultParameters.QUESTION_AUX) = Some((p: ProcessStub[Patch, Patch], before: List[Patch], after: List[Patch]) => {
val beforeXML = <before>
<p>The following information is just provided such that you get a better feel for the whole. Please
<b>do not</b>
copy
&
paste it into your answer. The
{" " + (if (before.length > 1) targetNamePlural else targetNameSingular) + " "}
before this
{targetNameSingular}{if (before.length > 1) " are " else " is "}
listed below
{if (before.length > 1) " according to their order of appearance"}
</p> <p>
<i>
{before.mkString(joiner)}
</i>
</p>
</before>.child
val afterXML = <after>
<p>The
{" " + (if (after.length > 1) targetNamePlural else targetNameSingular) + " "}
after this
{targetNameSingular}{if (after.length > 1) " are " else " is "}
listed below
{if (after.length > 1) " according to their order of appearance"}
</p> <p>
<i>
{after.mkString(joiner)}
</i>
</p>
</after>.child
val xml: NodeSeq = <all>
{if (before.length > 0) beforeXML}{if (after.length > 0) afterXML}
</all>.child
p.params += targetField.key -> Some(xml)
})
}
|
uzh/PPLib
|
src/main/scala/ch/uzh/ifi/pdeboer/pplib/patterns/FixPatch.scala
|
Scala
|
mit
| 3,945
|
package org.jetbrains.plugins.scala
package format
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createExpressionFromText
import org.junit.Assert._
/**
* Pavel Fatin
*/
class InterpolatedStringFormatterTest extends ScalaLightCodeInsightFixtureTestAdapter {
def testEmpty(): Unit = {
assertEquals("", formatS())
assertEquals("", formatF())
assertEquals("", formatRaw())
}
def testText(): Unit = {
assertEquals("foo", formatS(Text("foo")))
assertEquals("foo", formatF(Text("foo")))
assertEquals("foo", formatRaw(Text("foo")))
}
def testEscapeChar(): Unit = {
val text = Text("a \\\\ \\n \\t b")
assertEquals("a \\\\\\\\ \\\\n \\\\t b", formatS(text))
assertEquals(quoted("a \\\\ \\n \\t b", multiline = true), formatFull(text))
}
def testSlash(): Unit = {
assertEquals("\\\\\\\\ \\\\\\\\\\\\\\\\", formatS(Text("\\\\ \\\\\\\\")))
assertEquals("\\\\\\\\ \\\\\\\\\\\\\\\\", formatF(Text("\\\\ \\\\\\\\")))
assertEquals("\\\\ \\\\\\\\", formatRaw(Text("\\\\ \\\\\\\\")))
}
def testDollar(): Unit = {
assertEquals("$$", formatS(Text("$")))
assertEquals(quoted("$"), formatFull(Text("$")))
val parts = Seq(Text("$ "), Injection(exp("amount"), None))
assertEquals("$$ $amount", formatS(parts: _*))
assertEquals(quoted("$$ $amount", prefix = "s"), formatFull(parts: _*))
}
def testPlainExpression(): Unit = {
val injection = Injection(exp("foo"), None)
assertEquals("$foo", formatS(injection))
assertEquals(quoted("$foo", prefix = "s"), formatFull(injection))
}
def testExpressionWithDispensableFormat(): Unit = {
val injection = Injection(exp("foo"), Some(Specifier(null, "%d")))
assertEquals(quoted("$foo", prefix = "s"), formatFull(injection))
}
def testExpressionWithMadatoryFormat(): Unit = {
val injection = Injection(exp("foo"), Some(Specifier(null, "%2d")))
assertEquals(quoted("$foo%2d", prefix = "f"), formatFull(injection))
}
def testPlainLiteral(): Unit = {
assertEquals(quoted("123"), formatFull(Injection(exp("123"), None)))
}
def testLiteralWithDispensableFormat(): Unit = {
val injection = Injection(exp("123"), Some(Specifier(null, "%d")))
assertEquals(quoted("123"), formatFull(injection))
}
def testLiteralWithMadatoryFormat(): Unit = {
val injection = Injection(exp("123"), Some(Specifier(null, "%2d")))
assertEquals(quoted("${123}%2d", prefix = "f"), formatFull(injection))
}
def testPlainComplexExpression(): Unit = {
val injection = Injection(exp("foo.bar"), None)
assertEquals(quoted("${foo.bar}", prefix = "s"), formatFull(injection))
}
def testComplexExpressionWithDispensableFormat(): Unit = {
val injection = Injection(exp("foo.bar"), Some(Specifier(null, "%d")))
assertEquals(quoted("${foo.bar}", prefix = "s"), formatFull(injection))
}
def testComplexExpressionWithMadatoryFormat(): Unit = {
val injection = Injection(exp("foo.bar"), Some(Specifier(null, "%2d")))
assertEquals(quoted("${foo.bar}%2d", prefix = "f"), formatFull(injection))
}
def testPlainBlockExpression(): Unit = {
val injection = Injection(exp("{foo.bar}"), None)
assertEquals(quoted("${foo.bar}", prefix = "s"), formatFull(injection))
}
def testBlockExpressionWithDispensableFormat(): Unit = {
val injection = Injection(exp("{foo.bar}"), Some(Specifier(null, "%d")))
assertEquals(quoted("${foo.bar}", prefix = "s"), formatFull(injection))
}
def testBlockExpressionWithMandatoryFormat(): Unit = {
val injection = Injection(exp("{foo.bar}"), Some(Specifier(null, "%2d")))
assertEquals(quoted("${foo.bar}%2d", prefix = "f"), formatFull(injection))
}
def testMixedParts(): Unit = {
val parts = Seq(Text("foo "), Injection(exp("exp"), None), Text(" bar"))
assertEquals(quoted("foo $exp bar", prefix = "s"), formatFull(parts: _*))
}
def testLiterals(): Unit = {
val stringLiteral = exp(quoted("foo"))
assertEquals(quoted("foo"), formatFull(Injection(stringLiteral, None)))
val longLiteralInjection = Injection(exp("123L"), None)
assertEquals(quoted("123"), formatFull(longLiteralInjection))
val booleanLiteralInjection = Injection(exp("true"), None)
assertEquals(quoted("true"), formatFull(booleanLiteralInjection))
}
def testOther(): Unit = {
assertEquals("", formatS(UnboundExpression(exp("foo"))))
}
private def formatS(parts: StringPart*): String =
InterpolatedStringFormatter.formatContent(parts, "s", toMultiline = false)
private def formatF(parts: StringPart*): String =
InterpolatedStringFormatter.formatContent(parts, "f", toMultiline = false)
private def formatRaw(parts: StringPart*): String =
InterpolatedStringFormatter.formatContent(parts, "raw", toMultiline = false)
//with prefix and quotes
private def formatFull(parts: StringPart*): String = {
InterpolatedStringFormatter.format(parts)
}
private def quoted(content: String, multiline: Boolean = false, prefix: String = "") = {
val quote = if (multiline) "\\"\\"\\"" else "\\""
s"$prefix$quote$content$quote"
}
private def exp(s: String): ScExpression = {
createExpressionFromText(s)(getProject)
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/format/InterpolatedStringFormatterTest.scala
|
Scala
|
apache-2.0
| 5,314
|
package filodb.coordinator.client
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.reflect.ClassTag
import akka.actor.{ActorRef, ActorSystem, Address}
import akka.pattern.ask
import akka.util.Timeout
import com.typesafe.scalalogging.StrictLogging
import filodb.coordinator.{ActorName, NodeClusterActor}
import filodb.core._
object Client {
implicit val context = GlobalScheduler.globalImplicitScheduler
def parse[T, B](cmd: => Future[T], awaitTimeout: FiniteDuration = 30 seconds)(func: T => B): B = {
func(Await.result(cmd, awaitTimeout))
}
/**
* Synchronous ask of an actor, parsing the result with a PartialFunction
*/
def actorAsk[B](actor: ActorRef, msg: Any,
askTimeout: FiniteDuration = 30 seconds)(f: PartialFunction[Any, B]): B = {
implicit val timeout = Timeout(askTimeout)
parse(actor ? msg, askTimeout)(f)
}
def asyncAsk(actor: ActorRef, msg: Any, askTimeout: FiniteDuration = 30 seconds): Future[Any] = {
implicit val timeout = Timeout(askTimeout)
actor ? msg
}
def asyncTypedAsk[T: ClassTag](actor: ActorRef, msg: Any, askTimeout: FiniteDuration = 30 seconds): Future[T] = {
implicit val timeout = Timeout(askTimeout)
(actor ? msg).mapTo[T]
}
def standardResponse[B](partial: PartialFunction[Any, B]): PartialFunction[Any, B] =
(partial orElse {
case other: ErrorResponse => throw ClientException(other)
case other: Any => throw new RuntimeException(s"Unexpected response message: $other")
})
def actorsAsk[B](actors: Seq[ActorRef], msg: Any,
askTimeout: FiniteDuration = 30 seconds)(f: PartialFunction[Any, B]): Seq[B] = {
implicit val timeout = Timeout(askTimeout)
val fut = Future.sequence(actors.map(_ ? msg))
Await.result(fut, askTimeout).map(f)
}
/**
* Creates a LocalClient that remotely connects to a standalone FiloDB node NodeCoordinator.
* @param host the full host string (without port) or IP address where the FiloDB standalone node resides
* @param port the Akka port number for remote connectivity
* @param system the ActorSystem to connect to
*/
def standaloneClient(system: ActorSystem,
host: String,
port: Int = 2552,
askTimeout: FiniteDuration = 30 seconds): LocalClient = {
val addr = Address("akka.tcp", "filo-standalone", host, port)
val refFuture = system.actorSelection(ActorName.nodeCoordinatorPath(addr))
.resolveOne(askTimeout)
new LocalClient(Await.result(refFuture, askTimeout))
}
}
case class ClientException(error: ErrorResponse) extends Exception(error.toString)
trait ClientBase {
/**
* Convenience standard function for sending a message to one NodeCoordinator and parsing responses.
* (Which one depends on the specific client)
* @param msg the message to send
* @param askTimeout timeout for expecting a response
* @param f the partialFunction for processing responses. Does not need to deal with ErrorResponses,
* as that will automatically be handled by the fallback function defined in standardResponse -
* unless it is desired to override that
*/
def askCoordinator[B](msg: Any, askTimeout: FiniteDuration = 30 seconds)(f: PartialFunction[Any, B]): B
/**
* Sends a message to ALL the coordinators, parsing the responses and returning a sequence
*/
def askAllCoordinators[B](msg: Any, askTimeout: FiniteDuration = 30 seconds)(f: PartialFunction[Any, B]):
Seq[B]
/**
* Sends a message to ALL coordinators without waiting for a response
*/
def sendAllIngestors(msg: Any): Unit
def clusterActor: Option[ActorRef]
}
trait AllClientOps extends IngestionOps with QueryOps with ClusterOps
/**
* Standard client for a local FiloDB coordinator actor, which takes reference to a single NodeCoordinator
* For example, this would be used by the CLI.
*/
class LocalClient(val nodeCoordinator: ActorRef) extends AllClientOps {
def askCoordinator[B](msg: Any, askTimeout: FiniteDuration = 30 seconds)(f: PartialFunction[Any, B]): B =
Client.actorAsk(nodeCoordinator, msg, askTimeout)(Client.standardResponse(f))
def askAllCoordinators[B](msg: Any, askTimeout: FiniteDuration = 30 seconds)(f: PartialFunction[Any, B]):
Seq[B] = Seq(askCoordinator(msg, askTimeout)(f))
def sendAllIngestors(msg: Any): Unit = { nodeCoordinator ! msg }
// Always get the cluster actor ref anew. Cluster actor may move around the cluster!
def clusterActor: Option[ActorRef] =
askCoordinator(MiscCommands.GetClusterActor) { case x: Option[ActorRef] @unchecked => x }
}
/**
* A client for connecting to a cluster of NodeCoordinators.
* @param nodeClusterActor ActorRef to an instance of NodeClusterActor
* @param ingestionRole the role of the cluster members doing the ingestion
* @param metadataRole the role of the cluster member handling metadata updates
*/
class ClusterClient(nodeClusterActor: ActorRef,
ingestionRole: String,
metadataRole: String) extends AllClientOps with StrictLogging {
import NodeClusterActor._
def askCoordinator[B](msg: Any, askTimeout: FiniteDuration = 30 seconds)(f: PartialFunction[Any, B]): B =
Client.actorAsk(nodeClusterActor, ForwardToOne(metadataRole, msg), askTimeout)(
Client.standardResponse(f))
def askAllCoordinators[B](msg: Any, askTimeout: FiniteDuration = 30 seconds)(f: PartialFunction[Any, B]):
Seq[B] = {
implicit val timeout = Timeout(askTimeout)
val coords: Set[ActorRef] = Await.result(nodeClusterActor ? GetRefs(ingestionRole), askTimeout) match {
case refs: Set[ActorRef] @unchecked => refs
case NoSuchRole => throw ClientException(NoSuchRole)
}
logger.debug(s"Sending message $msg to coords $coords, addresses ${coords.map(_.path.address)}...")
Client.actorsAsk(coords.toSeq, msg, askTimeout)(Client.standardResponse(f))
}
def sendAllIngestors(msg: Any): Unit = nodeClusterActor ! Broadcast(ingestionRole, msg)
val clusterActor = Some(nodeClusterActor)
}
|
filodb/FiloDB
|
coordinator/src/main/scala/filodb.coordinator/client/Client.scala
|
Scala
|
apache-2.0
| 6,208
|
package com.artclod.mathml
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import scala.xml._
import play.api.test._
import play.api.test.Helpers._
import org.specs2.mutable._
import com.artclod.mathml.scalar._
import com.artclod.mathml.scalar.apply._
import com.artclod.mathml.scalar.apply.trig._
import com.artclod.mathml.Match._
// LATER try out http://rlegendi.github.io/specs2-runner/ and remove RunWith
@RunWith(classOf[JUnitRunner])
class MathMLSpec extends Specification {
"apply" should {
"fail to parse non MathML" in {
MathML(<not_math_ml_tag> </not_math_ml_tag>).isFailure must beTrue
}
"be able to parse numbers" in {
val xml = <cn>5</cn>
val mathML = `5`
MathML(xml).get must beEqualTo(mathML)
}
"be able to parse variables" in {
val xml = <ci>x</ci>
val mathML = x
MathML(xml).get must beEqualTo(mathML)
}
"be able to parse plus with one argument" in {
val xml = <apply> <plus/> <cn>5</cn> </apply>
val mathML = ApplyPlus(`5`)
MathML(xml).get must beEqualTo(mathML)
}
"be able to parse plus with two arguments" in {
val xml = <apply> <plus/> <cn>5</cn> <cn>5</cn> </apply>
val mathML = ApplyPlus(`5`, `5`)
MathML(xml).get must beEqualTo(mathML)
}
"be able to parse plus with more then two arguments" in {
val xml = <apply> <plus/> <cn>5</cn> <cn>4</cn> <cn>3</cn> </apply>
val mathML = ApplyPlus(`5`, `4`, `3`)
MathML(xml).get must beEqualTo(mathML)
}
"be able to parse minus with one argument" in {
val xml = <apply> <minus/> <cn>5</cn> </apply>
val mathML = ApplyMinusU(`5`)
MathML(xml).get must beEqualTo(mathML)
}
"be able to parse minus with two arguments" in {
val xml = <apply> <minus/> <cn>5</cn> <cn>5</cn> </apply>
val mathML = ApplyMinusB(`5`, `5`)
MathML(xml).get must beEqualTo(mathML)
}
"fail to parse minus with more then two arguments" in {
MathML(<apply> <minus/> <cn>5</cn> <cn>4</cn> <cn>3</cn> </apply>).isFailure must beTrue
}
"be able to parse times" in {
val xml = <apply> <times/> <cn>5</cn> <cn>5</cn> </apply>
val mathML = ApplyTimes(`5`, `5`)
MathML(xml).get must beEqualTo(mathML)
}
"be able to parse times with more then two arguments" in {
val xml = <apply> <times/> <cn>5</cn> <cn>4</cn> <cn>3</cn> </apply>
val mathML = ApplyTimes(`5`, `4`, `3`)
MathML(xml).get must beEqualTo(mathML)
}
"be able to parse divide" in {
val xml = <apply> <divide/> <cn>5</cn> <cn>5</cn> </apply>
val mathML = ApplyDivide(`5`, `5`)
MathML(xml).get must beEqualTo(mathML)
}
"fail to parse divide with more then two arguments" in {
MathML(<apply> <divide/> <cn>5</cn> <cn>4</cn> <cn>3</cn> </apply>).isFailure must beTrue
}
"be able to parse power" in {
val xml = <apply> <power/> <cn>5</cn> <cn>5</cn> </apply>
val mathML = ApplyPower(`5`, `5`)
MathML(xml).get must beEqualTo(mathML)
}
"fail to parse power with more then two arguments" in {
MathML(<apply> <power/> <cn>5</cn> <cn>4</cn> <cn>3</cn> </apply>).isFailure must beTrue
}
"be able to parse nested applys" in {
val xml = <apply> <plus/> <apply> <plus/> <cn>4</cn> <cn>4</cn> </apply> <cn>5</cn> <cn>5</cn> </apply>
val mathML = ApplyPlus(ApplyPlus(`4`, `4`), `5`, `5`)
MathML(xml).get must beEqualTo(mathML)
}
"be able to parse log with base" in {
val xml = <apply> <log/> <logbase> <cn>4</cn> </logbase> <cn>16</cn> </apply>
val mathML = ApplyLog(4, `16`)
MathML(xml).get must beEqualTo(mathML)
}
"fail to parse log with a cn instead of a logbase" in {
MathML(<apply> <log/> <cn>4</cn> <cn>16</cn> </apply>).isFailure must beTrue
}
"parse log without base as log 10" in {
val xml = <apply> <log/> <cn>16</cn> </apply>
val mathML = ApplyLog10(`16`)
MathML(xml).get must beEqualTo(mathML)
}
"parse e" in {
val xml = <exponentiale/>
MathML(xml).get must beEqualTo(ExponentialE)
}
"parse e nested" in {
val xml = <apply> <plus/> <ci>x</ci> <exponentiale/> </apply>
MathML(xml).get must beEqualTo(x + e)
}
"parse pi" in {
val xml = <pi/>
MathML(xml).get must beEqualTo(π)
}
"be able to parse cos" in {
MathML(<apply> <cos/> <pi/> </apply>).get must beEqualTo(ApplyCos(π))
}
"be able to parse cot" in {
MathML(<apply> <cot/> <pi/> </apply>).get must beEqualTo(ApplyCot(π))
}
"be able to parse csc" in {
MathML(<apply> <csc/> <pi/> </apply>).get must beEqualTo(ApplyCsc(π))
}
"be able to parse sec" in {
MathML(<apply> <sec/> <pi/> </apply>).get must beEqualTo(ApplySec(π))
}
"be able to parse sin" in {
MathML(<apply> <sin/> <pi/> </apply>).get must beEqualTo(ApplySin(π))
}
"be able to parse tan" in {
MathML(<apply> <tan/> <pi/> </apply>).get must beEqualTo(ApplyTan(π))
}
"be able to parse root with a degree" in {
MathML(<apply> <root/> <degree> <cn>3</cn> </degree> <ci>x</ci> </apply>).get must beEqualTo(ApplyRoot(3, x))
}
"be able to parse root with no specified degree as sqrt" in {
MathML(<apply> <root/> <ci>x</ci> </apply>).get must beEqualTo(ApplySqrt(x))
}
}
}
|
kristiankime/web-education-games
|
test/com/artclod/mathml/MathMLSpec.scala
|
Scala
|
mit
| 5,123
|
package chat.tox.antox.av
import android.content.Context
import android.content.res.AssetFileDescriptor
import android.media.MediaPlayer.OnCompletionListener
import android.media.{AudioManager, MediaPlayer, RingtoneManager}
import chat.tox.antox.R
import chat.tox.antox.utils.MediaUtils
/**
* Attach to a call and add sounds for the appropriate call events.
*/
class CallSounds(val call: Call, val context: Context) extends CallEnhancement {
val ended = MediaUtils.setupSound(context, R.raw.end_call, AudioManager.STREAM_VOICE_CALL, looping = false)
val ringback: MediaPlayer = MediaUtils.setupSound(context, R.raw.ringback_tone, AudioManager.STREAM_VOICE_CALL, looping = true)
val maybeRingtone = findRingtone()
def findRingtone(): Option[MediaPlayer] = {
val maybeRingtoneUri = Option(RingtoneManager.getActualDefaultRingtoneUri(context, RingtoneManager.TYPE_RINGTONE))
maybeRingtoneUri.map(ringtoneUri => {
val ringtone =
try {
val tempRingtone = new MediaPlayer()
tempRingtone.setDataSource(context, ringtoneUri)
tempRingtone
} catch {
case e: Exception =>
val afd: AssetFileDescriptor = context.getResources.openRawResourceFd(R.raw.incoming_call)
val backupRingtone: MediaPlayer = new MediaPlayer()
backupRingtone.setDataSource(afd.getFileDescriptor, afd.getStartOffset, afd.getLength)
backupRingtone
}
ringtone.setAudioStreamType(AudioManager.STREAM_RING)
ringtone.setLooping(true)
ringtone.prepare()
ringtone
})
}
// Add subscriptions for call events mapping to sounds
subscriptions +=
call.ringingObservable.distinctUntilChanged.subscribe(ringing => {
if (call.incoming) {
if (ringing) {
maybeRingtone.foreach(_.start())
} else {
maybeRingtone.foreach(_.stop())
}
} else {
if (ringing) {
ringback.start()
} else {
ringback.stop()
}
}
})
subscriptions +=
call.endedObservable.subscribe(_ => {
onEnd()
})
private def onEnd(): Unit = {
ended.start()
ended.setOnCompletionListener(new OnCompletionListener {
override def onCompletion(mp: MediaPlayer): Unit = {
mp.release()
}
})
release()
}
private def release(): Unit = {
subscriptions.unsubscribe()
ringback.release()
maybeRingtone.foreach(_.release())
}
}
|
wiiam/Antox
|
app/src/main/scala/chat/tox/antox/av/CallSounds.scala
|
Scala
|
gpl-3.0
| 2,485
|
object O {
def foo = {}
}
println(O /* */ foo)
println(O /* applicable: false */ foo())
println(O /* applicable: false */ foo 1)
println(O /* applicable: false */ foo (1))
println(O /* applicable: false */ foo (1, 2))
|
ilinum/intellij-scala
|
testdata/resolve2/function/operator/ParametersNone.scala
|
Scala
|
apache-2.0
| 220
|
package BIDMat
import scala.collection.mutable.HashMap
import java.lang.ref._
import jcuda.NativePointerObject
class Mat(nr:Int, nc:Int) {
val nrows = nr
val ncols = nc
def dims:(Int, Int) = (nr, nc)
def length = nr*nc
def llength = 1L*nr*nc
private var _GUID = Mat.myrand.nextLong
def setGUID(v:Long):Unit = {_GUID = v}
def GUID:Long = _GUID
def notImplemented0(s:String):Mat = {
throw new RuntimeException("operator "+s+" not implemented for "+this.mytype)
}
def notImplemented1(s:String,that:Mat):Mat = {
throw new RuntimeException("operator "+s+" not implemented for "+this.mytype+" and "+that.mytype)
}
def notImplemented2(s:String,that:Float):Mat = {
throw new RuntimeException("operator "+s+" not implemented for "+this.mytype+" and Float")
}
def notImplemented2(s:String,that:Double):Mat = {
throw new RuntimeException("operator "+s+" not implemented for "+this.mytype+" and Double")
}
def notImplementedf(s:String):Float = {
throw new RuntimeException("operator "+s+" not implemented for "+this.mytype)
}
def notImplementedd(s:String):Double = {
throw new RuntimeException("operator "+s+" not implemented for "+this.mytype)
}
def notImplementedi(s:String):Int = {
throw new RuntimeException("operator "+s+" not implemented for "+this.mytype)
}
def notImplementedl(s:String):Long = {
throw new RuntimeException("operator "+s+" not implemented for "+this.mytype)
}
def t = notImplemented0("t")
def dv:Double = throw new RuntimeException("operator dv not implemented for "+this.mytype)
def mytype = "Mat"
def copyTo(a:Mat) = notImplemented0("copyTo");
def copy = notImplemented0("copy");
def newcopy = notImplemented0("newcopy");
def set(v:Float) = notImplemented0("set");
def set(v:Double) = notImplemented0("set");
def zeros(nr:Int, nc:Int) = notImplemented0("zeros");
def ones(nr:Int, nc:Int) = notImplemented0("ones");
def izeros(nr:Int, nc:Int) = notImplemented0("izeros");
def iones(nr:Int, nc:Int) = notImplemented0("iones");
def clearUpper(i:Int) = notImplemented0("clearUpper");
def clearLower(i:Int) = notImplemented0("clearLower");
def clearUpper = notImplemented0("clearUpper");
def clearLower = notImplemented0("clearLower");
def free = notImplemented0("free");
def view(nr:Int, nc:Int):Mat = notImplemented0("view");
def view(nr:Int, nc:Int, setGUID:Boolean):Mat = notImplemented0("view");
def nnz:Int = {notImplemented0("nnz"); 0}
def clear = notImplemented0("clear");
def zeros(nr:Int, nc:Int, nnz:Int):Mat = zeros(nr, nc)
def recycle(nr:Int, nc:Int, nnz:Int):Mat = notImplemented0("recycle");
def contents:Mat = notImplemented0("contents");
def colslice(a:Int, b:Int, out:Mat):Mat = notImplemented0("colslice");
def colslice(a:Int, b:Int, out:Mat, c:Int):Mat = notImplemented0("colslice");
def rowslice(a:Int, b:Int, out:Mat):Mat = notImplemented0("rowslice");
def rowslice(a:Int, b:Int, out:Mat, c:Int):Mat = notImplemented0("rowslice");
def colslice(a:Int, b:Int):Mat = notImplemented0("colslice");
def rowslice(a:Int, b:Int):Mat = notImplemented0("rowslice");
def apply(a:IMat):Mat = notImplemented0("linear array access");
def apply(a:IMat, b:IMat):Mat = notImplemented0("block array access");
def apply(a:IMat, b:Int):Mat = notImplemented0("block array access");
def apply(a:Int, b:IMat):Mat = notImplemented0("block array access");
def apply(a:GIMat):Mat = notImplemented0("linear array access");
def apply(a:GIMat, b:GIMat):Mat = notImplemented0("block array access");
def apply(a:GIMat, b:Int):Mat = notImplemented0("block array access");
def apply(a:Int, b:GIMat):Mat = notImplemented0("block array access");
def apply(a:IMat, b:GIMat):Mat = notImplemented0("block array access");
def apply(a:GIMat, b:IMat):Mat = notImplemented0("block array access");
def apply(a:Mat):Mat = notImplemented0("linear array access");
def apply(a:Mat, b:Mat):Mat = notImplemented0("block array access");
def apply(a:Mat, b:Int):Mat = notImplemented0("block array access");
def apply(a:Int, b:Mat):Mat = notImplemented0("block array access");
def update(a:IMat, b:Mat) = notImplemented0("linear update");
def update(a:IMat, b:IMat, m:Mat) = notImplemented0("block update");
def update(a:IMat, b:Int, m:Mat) = notImplemented0("block update");
def update(a:Int, b:IMat, m:Mat) = notImplemented0("block update");
def update(a:IMat, b:Int) = notImplemented0("linear update");
def update(a:IMat, b:IMat, c:Int) = notImplemented0("block update");
def update(a:IMat, b:Int, c:Int) = notImplemented0("block update");
def update(a:Int, b:IMat, c:Int) = notImplemented0("block update");
def update(a:IMat, b:Float) = notImplemented0("linear update");
def update(a:IMat, b:IMat, c:Float) = notImplemented0("block update");
def update(a:IMat, b:Int, c:Float) = notImplemented0("block update");
def update(a:Int, b:IMat, c:Float) = notImplemented0("block update");
def update(a:IMat, b:Double) = notImplemented0("linear update");
def update(a:IMat, b:IMat, c:Double) = notImplemented0("block update");
def update(a:IMat, b:Int, c:Double) = notImplemented0("block update");
def update(a:Int, b:IMat, c:Double) = notImplemented0("block update");
def update(a:IMat, b:Long) = notImplemented0("linear update");
def update(a:IMat, b:IMat, c:Long) = notImplemented0("block update");
def update(a:IMat, b:Int, c:Long) = notImplemented0("block update");
def update(a:Int, b:IMat, c:Long) = notImplemented0("block update");
def update(a:GIMat, b:Mat) = notImplemented0("linear update");
def update(a:GIMat, b:GIMat, m:Mat) = notImplemented0("block update");
def update(a:GIMat, b:Int, m:Mat) = notImplemented0("block update");
def update(a:Int, b:GIMat, m:Mat) = notImplemented0("block update");
def update(a:GIMat, b:IMat, m:Mat) = notImplemented0("block update");
def update(a:IMat, b:GIMat, m:Mat) = notImplemented0("block update");
def update(a:GIMat, b:Int) = notImplemented0("linear update");
def update(a:GIMat, b:GIMat, c:Int) = notImplemented0("block update");
def update(a:GIMat, b:Int, c:Int) = notImplemented0("block update");
def update(a:Int, b:GIMat, c:Int) = notImplemented0("block update");
def update(a:GIMat, b:IMat, c:Int) = notImplemented0("block update");
def update(a:IMat, b:GIMat, c:Int) = notImplemented0("block update");
def update(a:GIMat, b:Float) = notImplemented0("linear update");
def update(a:GIMat, b:GIMat, c:Float) = notImplemented0("block update");
def update(a:GIMat, b:Int, c:Float) = notImplemented0("block update");
def update(a:Int, b:GIMat, c:Float) = notImplemented0("block update");
def update(a:GIMat, b:IMat, c:Float) = notImplemented0("block update");
def update(a:IMat, b:GIMat, c:Float) = notImplemented0("block update");
def update(a:GIMat, b:Double) = notImplemented0("linear update");
def update(a:GIMat, b:GIMat, c:Double) = notImplemented0("block update");
def update(a:GIMat, b:Int, c:Double) = notImplemented0("block update");
def update(a:Int, b:GIMat, c:Double) = notImplemented0("block update");
def update(a:GIMat, b:IMat, c:Double) = notImplemented0("block update");
def update(a:IMat, b:GIMat, c:Double) = notImplemented0("block update");
def update(a:Mat, v:Mat):Mat = notImplemented0("linear update");
def update(a:Mat, b:Mat, v:Mat):Mat = notImplemented0("block update");
def update(a:Mat, b:Int, v:Mat):Mat = notImplemented0("block update");
def update(a:Int, b:Mat, v:Mat):Mat = notImplemented0("block update");
def update(a:Mat, v:Int):Mat = notImplemented0("linear update");
def update(a:Mat, b:Mat, v:Int):Mat = notImplemented0("block update");
def update(a:Mat, b:Int, v:Int):Mat = notImplemented0("block update");
def update(a:Int, b:Mat, v:Int):Mat = notImplemented0("block update");
def update(a:Mat, v:Float):Mat = notImplemented0("linear update");
def update(a:Mat, b:Mat, v:Float):Mat = notImplemented0("block update");
def update(a:Mat, b:Int, v:Float):Mat = notImplemented0("block update");
def update(a:Int, b:Mat, v:Float):Mat = notImplemented0("block update");
def update(a:Mat, v:Double):Mat = notImplemented0("linear update");
def update(a:Mat, b:Mat, v:Double):Mat = notImplemented0("block update");
def update(a:Mat, b:Int, v:Double):Mat = notImplemented0("block update");
def update(a:Int, b:Mat, v:Double):Mat = notImplemented0("block update");
def update(a:Mat, v:Long):Mat = notImplemented0("linear update");
def update(a:Mat, b:Mat, v:Long):Mat = notImplemented0("block update");
def update(a:Mat, b:Int, v:Long):Mat = notImplemented0("block update");
def update(a:Int, b:Mat, v:Long):Mat = notImplemented0("block update");
def update(a:Int, v:Float):Mat = notImplemented0("linear update");
def update(a:Int, v:Double):Mat = notImplemented0("linear update");
def update(a:Int, v:Int):Mat = notImplemented0("linear update");
def update(a:Int, v:Long):Mat = notImplemented0("linear update");
def update(a:Int, b:Int, v:Float):Mat = notImplemented0("update");
def update(a:Int, b:Int, v:Double):Mat = notImplemented0("update");
def update(a:Int, b:Int, v:Int):Mat = notImplemented0("update");
def update(a:Int, b:Int, v:Long):Mat = notImplemented0("update");
def tileMult(nr:Int, nc:Int, kk:Int, aroff:Int, acoff:Int, b:Mat, broff:Int, bcoff:Int, c:Mat, croff:Int, ccoff:Int):Mat =
notImplemented0("tileMult");
def tileMultT(nr:Int, nc:Int, kk:Int, aroff:Int, acoff:Int, b:Mat, broff:Int, bcoff:Int, c:Mat, croff:Int, ccoff:Int):Mat =
notImplemented0("tileMultT");
def blockGemm(transa:Int, transb:Int, nr:Int, nc:Int, reps:Int, aoff:Int, lda:Int, astep:Int,
b:Mat, boff:Int, ldb:Int, bstep:Int, c:Mat, coff:Int, ldc:Int, cstep:Int):Mat = notImplemented0("blockGemm");
def unary_-():Mat = notImplemented1("-", this)
def + (b : Mat):Mat = notImplemented1("+", b)
def - (b : Mat):Mat = notImplemented1("-", b)
def * (b : Mat):Mat = notImplemented1("*", b)
def *^ (b : Mat):Mat = notImplemented1("*^", b)
def xT (b : Mat):Mat = notImplemented1("*", b)
def Tx (b : Mat):Mat = notImplemented1("*", b)
def ^* (b : Mat):Mat = notImplemented1("*^", b)
def ** (b : Mat):Mat = notImplemented1("**", b)
def ⊗ (b : Mat):Mat = notImplemented1("⊗", b) // unicode 8855, 0x2297
def /< (b : Mat):Mat = notImplemented1("/<", b)
def ∘ (b : Mat):Mat = notImplemented1("∘", b) // unicode 8728, 0x2218
def *@ (b : Mat):Mat = notImplemented1("*@", b)
def / (b : Mat):Mat = notImplemented1("/", b)
def \\\\ (b : Mat):Mat = notImplemented1("\\\\\\\\", b)
def ^ (b : Mat):Mat = notImplemented1("^", b)
def ◁ (b : Mat):Mat = notImplemented1("◁", b) // unicode 9665, 0x25C1
def ▷ (b : Mat):Mat = notImplemented1("▷", b) // unicode 9666, 0x25C2
def dot (b : Mat):Mat = notImplemented1("dot", b)
def dotr (b : Mat):Mat = notImplemented1("dotr", b)
def ∙ (b : Mat):Mat = notImplemented1("dot", b) // unicode 8729, 0x2219
def ∙→ (b : Mat):Mat = notImplemented1("dotr", b) // unicode (8729, 8594) (0x2219, 0x2192)
def > (b : Mat):Mat = notImplemented1(">", b)
def < (b : Mat):Mat = notImplemented1("<", b)
def >= (b : Mat):Mat = notImplemented1(">=", b)
def <= (b : Mat):Mat = notImplemented1("<=", b)
def == (b : Mat):Mat = notImplemented1("==", b)
def === (b : Mat):Mat = notImplemented1("===", b)
def != (b : Mat):Mat = notImplemented1("!=", b)
def * (b : Float):Mat = notImplemented2("*", b)
def + (b : Float):Mat = notImplemented2("+", b)
def - (b : Float):Mat = notImplemented2("-", b)
def *@ (b : Float):Mat = notImplemented2("*@", b)
def ∘ (b : Float):Mat = notImplemented2("∘", b)
def / (b : Float):Mat = notImplemented2("/", b)
def ^ (b : Float):Mat = notImplemented2("^", b)
def > (b : Float):Mat = notImplemented2(">", b)
def < (b : Float):Mat = notImplemented2("<", b)
def >= (b : Float):Mat = notImplemented2(">=", b)
def <= (b : Float):Mat = notImplemented2("<=", b)
def == (b : Float):Mat = notImplemented2("==", b)
def === (b : Float):Mat = notImplemented2("===", b)
def != (b : Float):Mat = notImplemented2("!=", b)
def * (b : Int):Mat = notImplemented2("*", b)
def + (b : Int):Mat = notImplemented2("+", b)
def - (b : Int):Mat = notImplemented2("-", b)
def *@ (b : Int):Mat = notImplemented2("*@", b)
def ∘ (b : Int):Mat = notImplemented2("∘", b)
def / (b : Int):Mat = notImplemented2("/", b)
def ^ (b : Int):Mat = notImplemented2("^", b)
def > (b : Int):Mat = notImplemented2(">", b)
def < (b : Int):Mat = notImplemented2("<", b)
def >= (b : Int):Mat = notImplemented2(">=", b)
def <= (b : Int):Mat = notImplemented2("<=", b)
def == (b : Int):Mat = notImplemented2("==", b)
def === (b : Int):Mat = notImplemented2("===", b)
def != (b : Int):Mat = notImplemented2("!=", b)
def * (b : Double):Mat = notImplemented2("*", b)
def + (b : Double):Mat = notImplemented2("+", b)
def - (b : Double):Mat = notImplemented2("-", b)
def *@ (b : Double):Mat = notImplemented2("*@", b)
def ∘ (b : Double):Mat = notImplemented2("∘", b)
def / (b : Double):Mat = notImplemented2("/", b)
def ^ (b : Double):Mat = notImplemented2("^", b)
def > (b : Double):Mat = notImplemented2(">", b)
def < (b : Double):Mat = notImplemented2("<", b)
def >= (b : Double):Mat = notImplemented2(">=", b)
def <= (b : Double):Mat = notImplemented2("<=", b)
def == (b : Double):Mat = notImplemented2("==", b)
def === (b : Double):Mat = notImplemented2("===", b)
def != (b : Double):Mat = notImplemented2("!=", b)
def <-- (b : Mat):Mat = b.copyTo(this)
def \\ (b : Mat):Mat = notImplemented1("\\\\", b)
def on (b : Mat):Mat = notImplemented1("on", b)
def ~ (b : Mat):Pair = b match {
case bb:FMat => new FPair(this, bb)
case bb:DMat => new DPair(this, bb)
case bb:IMat => new IPair(this, bb)
case bb:SMat => new SPair(this, bb)
case bb:SDMat => new SDPair(this, bb)
case bb:CMat => new CPair(this, bb)
case bb:GMat => new GPair(this, bb)
case bb:GIMat => new GIPair(this, bb)
case bb:GDMat => new GDPair(this, bb)
case bb:GLMat => new GLPair(this, bb)
}
def ddot (b : Mat):Double = {notImplemented1("ddot", b); 0}
def ∙∙ (b : Mat):Double = {notImplemented1("ddot", b); 0}
def ^* (b : DSPair):Mat = notImplemented0("^*")
def Tx (b : DSPair):Mat = notImplemented0("Tx")
def @@ (b : Mat):DSPair = (this, b) match {
case (aa:FMat, bb:SMat) => new FDSPair(aa, bb)
case (aa:GMat, bb:GSMat) => new GDSPair(aa, bb)
}
}
abstract class DSPair {}
abstract class Pair {
def notImplemented0(s:String):Mat = {
throw new RuntimeException("operator "+s+" not implemented for "+this)
}
def notImplemented1(s:String,that:Mat):Mat = {
throw new RuntimeException("operator "+s+" not implemented for "+this+" and "+that.mytype)
}
def t = notImplemented0("t")
def + (b : Mat):Mat = notImplemented1("+", b)
def - (b : Mat):Mat = notImplemented1("-", b)
def * (b : Mat):Mat = notImplemented1("*", b)
def xT (b : Mat):Mat = notImplemented1("xT", b)
def *^ (b : Mat):Mat = notImplemented1("*^", b)
def Tx (b : Mat):Mat = notImplemented1("Tx", b)
def ^* (b : Mat):Mat = notImplemented1("*^", b)
def /< (b : Mat):Mat = notImplemented1("/<", b)
def *@ (b : Mat):Mat = notImplemented1("*@", b)
def ∘ (b : Mat):Mat = notImplemented1("∘", b)
def / (b : Mat):Mat = notImplemented1("/", b)
def \\\\ (b : Mat):Mat = notImplemented1("\\\\\\\\", b)
def ^ (b : Mat):Mat = notImplemented1("^", b)
def ◁ (b : Mat):Mat = notImplemented1("◁", b)
def ▷ (b : Mat):Mat = notImplemented1("▷", b)
def dot (b : Mat):Mat = notImplemented1("dot", b)
def dotr (b : Mat):Mat = notImplemented1("dotr", b)
def ∙ (b : Mat):Mat = notImplemented1("dot", b)
def ∙→ (b : Mat):Mat = notImplemented1("dotr", b)
def ** (b : Mat):Mat = notImplemented1("**", b)
def ⊗ (b : Mat):Mat = notImplemented1("⊗", b)
def > (b : Mat):Mat = notImplemented1(">", b)
def < (b : Mat):Mat = notImplemented1("<", b)
def >= (b : Mat):Mat = notImplemented1(">=", b)
def <= (b : Mat):Mat = notImplemented1("<=", b)
def == (b : Mat):Mat = notImplemented1("==", b)
def === (b : Mat):Mat = notImplemented1("===", b)
def != (b : Mat):Mat = notImplemented1("!=", b)
def \\ (b : Mat):Mat = notImplemented1("\\\\", b)
def on (b : Mat):Mat = notImplemented1("on", b)
def + (b : Float):Mat = notImplemented0("+")
def - (b : Float):Mat = notImplemented0("-")
def * (b : Float):Mat = notImplemented0("*")
def xT (b : Float):Mat = notImplemented0("xT")
def *^ (b : Float):Mat = notImplemented0("*^")
def Tx (b : Float):Mat = notImplemented0("Tx")
def ^* (b : Float):Mat = notImplemented0("*^")
def /< (b : Float):Mat = notImplemented0("/<")
def *@ (b : Float):Mat = notImplemented0("*@")
def ∘ (b : Float):Mat = notImplemented0("∘")
def / (b : Float):Mat = notImplemented0("/")
def \\\\ (b : Float):Mat = notImplemented0("\\\\\\\\")
def ^ (b : Float):Mat = notImplemented0("^")
def ◁ (b : Float):Mat = notImplemented0("◁")
def ▷ (b : Float):Mat = notImplemented0("▷")
def dot (b : Float):Mat = notImplemented0("dot")
def dotr (b : Float):Mat = notImplemented0("dotr")
def ∙ (b : Float):Mat = notImplemented0("dot")
def ∙→ (b : Float):Mat = notImplemented0("dotr")
def > (b : Float):Mat = notImplemented0(">")
def < (b : Float):Mat = notImplemented0("<")
def >= (b : Float):Mat = notImplemented0(">=")
def <= (b : Float):Mat = notImplemented0("<=")
def == (b : Float):Mat = notImplemented0("==")
def === (b : Float):Mat = notImplemented0("===")
def != (b : Float):Mat = notImplemented0("!=")
def \\ (b : Float):Mat = notImplemented0("\\\\")
def on (b : Float):Mat = notImplemented0("on")
def + (b : Int):Mat = notImplemented0("+")
def - (b : Int):Mat = notImplemented0("-")
def * (b : Int):Mat = notImplemented0("*")
def xT (b : Int):Mat = notImplemented0("xT")
def *^ (b : Int):Mat = notImplemented0("*^")
def Tx (b : Int):Mat = notImplemented0("Tx")
def ^* (b : Int):Mat = notImplemented0("*^")
def /< (b : Int):Mat = notImplemented0("/<")
def *@ (b : Int):Mat = notImplemented0("*@")
def ∘ (b : Int):Mat = notImplemented0("∘")
def / (b : Int):Mat = notImplemented0("/")
def \\\\ (b : Int):Mat = notImplemented0("\\\\\\\\")
def ^ (b : Int):Mat = notImplemented0("^")
def ◁ (b : Int):Mat = notImplemented0("◁")
def ▷ (b : Int):Mat = notImplemented0("▷")
def dot (b : Int):Mat = notImplemented0("dot")
def dotr (b : Int):Mat = notImplemented0("dotr")
def ∙ (b : Int):Mat = notImplemented0("dot")
def ∙→ (b : Int):Mat = notImplemented0("dotr")
def > (b : Int):Mat = notImplemented0(">")
def < (b : Int):Mat = notImplemented0("<")
def >= (b : Int):Mat = notImplemented0(">=")
def <= (b : Int):Mat = notImplemented0("<=")
def == (b : Int):Mat = notImplemented0("==")
def === (b : Int):Mat = notImplemented0("===")
def != (b : Int):Mat = notImplemented0("!=")
def \\ (b : Int):Mat = notImplemented0("\\\\")
def on (b : Int):Mat = notImplemented0("on")
def + (b : Long):Mat = notImplemented0("+")
def - (b : Long):Mat = notImplemented0("-")
def * (b : Long):Mat = notImplemented0("*")
def xT (b : Long):Mat = notImplemented0("xT")
def *^ (b : Long):Mat = notImplemented0("*^")
def Tx (b : Long):Mat = notImplemented0("Tx")
def ^* (b : Long):Mat = notImplemented0("*^")
def /< (b : Long):Mat = notImplemented0("/<")
def *@ (b : Long):Mat = notImplemented0("*@")
def ∘ (b : Long):Mat = notImplemented0("∘")
def / (b : Long):Mat = notImplemented0("/")
def \\\\ (b : Long):Mat = notImplemented0("\\\\\\\\")
def ^ (b : Long):Mat = notImplemented0("^")
def ◁ (b : Long):Mat = notImplemented0("◁")
def ▷ (b : Long):Mat = notImplemented0("▷")
def dot (b : Long):Mat = notImplemented0("dot")
def dotr (b : Long):Mat = notImplemented0("dotr")
def ∙ (b : Long):Mat = notImplemented0("dot")
def ∙→ (b : Long):Mat = notImplemented0("dotr")
def > (b : Long):Mat = notImplemented0(">")
def < (b : Long):Mat = notImplemented0("<")
def >= (b : Long):Mat = notImplemented0(">=")
def <= (b : Long):Mat = notImplemented0("<=")
def == (b : Long):Mat = notImplemented0("==")
def === (b : Long):Mat = notImplemented0("===")
def != (b : Long):Mat = notImplemented0("!=")
def \\ (b : Long):Mat = notImplemented0("\\\\")
def on (b : Long):Mat = notImplemented0("on")
def + (b : Double):Mat = notImplemented0("+")
def - (b : Double):Mat = notImplemented0("-")
def * (b : Double):Mat = notImplemented0("*")
def xT (b : Double):Mat = notImplemented0("xT")
def *^ (b : Double):Mat = notImplemented0("*^")
def Tx (b : Double):Mat = notImplemented0("Tx")
def ^* (b : Double):Mat = notImplemented0("*^")
def /< (b : Double):Mat = notImplemented0("/<")
def *@ (b : Double):Mat = notImplemented0("*@")
def ∘ (b : Double):Mat = notImplemented0("∘")
def / (b : Double):Mat = notImplemented0("/")
def \\\\ (b : Double):Mat = notImplemented0("\\\\\\\\")
def ^ (b : Double):Mat = notImplemented0("^")
def ◁ (b : Double):Mat = notImplemented0("◁")
def ▷ (b : Double):Mat = notImplemented0("▷")
def dot (b : Double):Mat = notImplemented0("dot")
def dotr (b : Double):Mat = notImplemented0("dotr")
def ∙ (b : Double):Mat = notImplemented0("dot")
def ∙→ (b : Double):Mat = notImplemented0("dotr")
def > (b : Double):Mat = notImplemented0(">")
def < (b : Double):Mat = notImplemented0("<")
def >= (b : Double):Mat = notImplemented0(">=")
def <= (b : Double):Mat = notImplemented0("<=")
def == (b : Double):Mat = notImplemented0("==")
def === (b : Double):Mat = notImplemented0("===")
def != (b : Double):Mat = notImplemented0("!=")
def \\ (b : Double):Mat = notImplemented0("\\\\")
def on (b : Double):Mat = notImplemented0("on")
}
object Mat {
import Ordered._
import jline.TerminalFactory;
var terminal = TerminalFactory.create;
def terminalWidth = math.max(terminal.getWidth,80);
var useCache = false // Use matrix caching
var recycleGrow = 1.2 // For caching, amount to grow re-allocated matrices
var hasCUDA = 0 // Number of available CUDA GPUs
var useMKL:Boolean = true // Use MKL libs
var debugMem = false // Debug GPU mem calls
var debugMemThreshold = 1000;
var compressType = 1 // For HDF5 I/O, 0=none, 1=zlib, 2=szip
var compressionLevel = 3 // for HDF5 zlib
var chunkSize = 1024*1024 // for HDF5 compression
var szipBlock = 32 // HDF5 szip block size
var numThreads = Runtime.getRuntime().availableProcessors();
var numOMPthreads = numThreads;
var nflops = 0L
var oneBased = 0 // Whether matrix indices are 0: zero-based (like C) or 1: one-based (like Matlab)
var ioneBased = 1 // Whether sparse matrix *internal* indices are zero 0: or one-based 1:
var useGPUsort = true
var hostAllocSize = 0xffffffffL
final val MSEED:Int = 1452462553
final val myrand = new java.util.Random(MSEED)
val opcodes = HashMap.empty[String, Int]
val _opcode = 1
var useStdio = (! System.getProperty("os.name").startsWith("Windows")) // HDF5 directive
private val _cache2 = HashMap.empty[Tuple2[Long,Int], Mat] // Matrix caches
private val _cache3 = HashMap.empty[Tuple3[Long,Long,Int], Mat]
private val _cache4 = HashMap.empty[Tuple4[Long,Long,Long,Int], Mat]
def cache2(key:Tuple2[Long,Int]):Mat = {
_cache2.synchronized {
if (_cache2.contains(key)) {
_cache2(key)
} else {
null
}
}
}
def cache3(key:Tuple3[Long,Long,Int]):Mat = {
_cache3.synchronized {
if (_cache3.contains(key)) {
_cache3(key)
} else {
null
}
}
}
def cache4(key:Tuple4[Long,Long,Long,Int]):Mat = {
_cache4.synchronized {
if (_cache4.contains(key)) {
_cache4(key)
} else {
null
}
}
}
def cache2put(key:Tuple2[Long,Int], m:Mat):Unit = {
_cache2.synchronized {
_cache2(key) = m
}
}
def cache3put(key:Tuple3[Long,Long,Int], m:Mat):Unit = {
_cache3.synchronized {
_cache3(key) = m
}
}
def cache4put(key:Tuple4[Long,Long,Long,Int], m:Mat):Unit = {
_cache4.synchronized {
_cache4(key) = m
}
}
def clearCaches = {
_cache2.clear
_cache3.clear
_cache4.clear
ND.clearCaches
}
def trimCache2(ithread:Int) = {
_cache2.synchronized {
val keys = _cache2.keySet
keys.foreach((key:Tuple2[Long,Int]) => {
val toremove:Boolean = _cache2.get(key).get match {
case aa:GMat => (aa.myGPU == ithread)
case aa:GSMat => (aa.myGPU == ithread)
case _ => false
}
if (toremove) _cache2.remove(key)
})
}
}
def trimCache3(ithread:Int) = {
_cache3.synchronized {
val keys = _cache3.keySet
keys.foreach((key:Tuple3[Long,Long,Int]) => {
val toremove:Boolean = _cache3.get(key).get match {
case aa:GMat => (aa.myGPU == ithread)
case aa:GSMat => (aa.myGPU == ithread)
case _ => false
}
if (toremove) _cache3.remove(key)
})
}
}
def trimCache4(ithread:Int) = {
_cache3.synchronized {
val keys = _cache4.keySet
keys.foreach((key:Tuple4[Long,Long,Long,Int]) => {
val toremove:Boolean = _cache4.get(key).get match {
case aa:GMat => (aa.myGPU == ithread)
case aa:GSMat => (aa.myGPU == ithread)
case _ => false
}
if (toremove) _cache4.remove(key)
})
}
}
def trimCaches(ithread:Int) = {
trimCache2(ithread)
trimCache3(ithread)
trimCache4(ithread)
}
def getJARdir:String = {
val path = Mat.getClass.getProtectionDomain().getCodeSource().getLocation().getPath()
val jstr = java.net.URLDecoder.decode(path, "UTF-8")
path.replace("BIDMat.jar","")
}
def checkMKL:Unit = {
if (useMKL) {
try {
jcuda.LibUtils.loadLibrary("bidmatmkl")
} catch {
case _:Throwable => {
println("Cant find native CPU libraries")
useMKL = false
}
}
}
try {
// jcuda.LibUtils.loadLibrary("jhdf5")
System.loadLibrary("jhdf5")
} catch {
case _:Throwable => {
println("Cant find native HDF5 library")
}
}
}
def checkCUDA:Unit = checkCUDA(false)
def checkCUDA(verbose:Boolean):Unit = {
if (hasCUDA == 0) {
val os = System.getProperty("os.name")
try {
if (os.equals("Linux") || os.equals("Mac OS X")) {
System.loadLibrary("cudart")
} else {
val libnames = List("cudart64_70", "cudart64_65", "cudart64_55", "cudart64_50_35", "cudart64_42_9").iterator
var found = false
while (!found && libnames.hasNext) {
found = true
try{
System.loadLibrary(libnames.next)
} catch {
case _:Throwable => found = false
}
}
if (!found) throw new RuntimeException("Couldnt find a cudart lib")
}
} catch {
case x:Throwable => {
println("Couldnt load CUDA runtime");
if (verbose) {
val msg = x.getMessage;
if (msg != null) println(msg);
}
hasCUDA = -1
}
}
if (hasCUDA >= 0) {
try {
jcuda.LibUtils.loadLibrary("JCudaRuntime")
} catch {
case y:Throwable => {
println("Couldnt load JCuda");
if (verbose) {
val msg = y.getMessage;
if (msg != null) println(msg);
}
hasCUDA = -1
}
}
}
}
if (hasCUDA >= 0) {
try {
var cudanum = new Array[Int](1)
jcuda.runtime.JCuda.cudaGetDeviceCount(cudanum)
hasCUDA = cudanum(0)
printf("%d CUDA device%s found", hasCUDA, if (hasCUDA == 1) "" else "s")
if (hasCUDA > 0) {
jcuda.runtime.JCuda.cudaRuntimeGetVersion(cudanum)
println(", CUDA version %d.%d" format (cudanum(0)/1000, (cudanum(0)%100) / 10))
} else {
println("")
}
} catch {
case e:NoClassDefFoundError => println("Couldn't load the JCUDA driver")
case e:Exception => println("Exception while initializing JCUDA driver")
case z:Throwable => println("Something went wrong while loading JCUDA driver" + z.getMessage)
}
if (hasCUDA > 0) {
try {
jcuda.LibUtils.loadLibrary("bidmatcuda")
} catch {
case z:Throwable => println("Something went wrong while loading BIDMat CUDA library" + z.getMessage)
}
}
}
}
def copyToIntArray[@specialized(Double, Float, Long, Byte, Short) T](data:Array[T], i0:Int, idata:Array[Int], d0:Int, n:Int)
(implicit numeric : Numeric[T]) = {
var i = 0
while (i < n) {
idata(i+d0) = numeric.toInt(data(i+i0));
i += 1
}
}
def copyToDoubleArray[@specialized(Int, Float, Long, Byte, Short) T](data:Array[T], i0:Int, ddata:Array[Double], d0:Int, n:Int)
(implicit numeric : Numeric[T]) = {
var i = 0
while (i < n) {
ddata(i+d0) = numeric.toDouble(data(i+i0));
i += 1
}
}
def copyToFloatArray[@specialized(Int, Double, Long, Byte, Short) T](data:Array[T], i0:Int, fdata:Array[Float], d0:Int, n:Int)
(implicit numeric : Numeric[T]) = {
var i = 0
while (i < n) {
fdata(i+d0) = numeric.toFloat(data(i+i0));
i += 1
}
}
def copyToLongArray[@specialized(Int, Double, Float, Byte, Short) T](data:Array[T], i0:Int, fdata:Array[Long], d0:Int, n:Int)
(implicit numeric : Numeric[T]) = {
var i = 0
while (i < n) {
fdata(i+d0) = numeric.toLong(data(i+i0));
i += 1
}
}
def copyListToFloatArray[T](a:List[T], b:Array[Float])(implicit numeric : Numeric[T]) = {
var i = 0;
var todo = a.iterator
val alen = a.length
while (i < alen) {
val h = todo.next
b(i) = numeric.toFloat(h)
i += 1
}
}
def ibinsearch(v:Int, x:Array[Int], istartp:Int, iendp:Int):Int = {
var istart = istartp
var iend = iendp
while (iend - istart > 1) {
var mid:Int = (istart + iend)/2
if (v < x(mid)) iend = mid else istart = mid
}
if (iend > istart && v == x(istart)) istart else -1
}
def binsearch[T : Ordering](v:T, x:Array[T], istartp:Int, iendp:Int):Int = {
var istart = istartp
var iend = iendp
while (iend - istart > 1) {
var mid:Int = (istart + iend)/2
if (v < x(mid)) iend = mid else istart = mid
}
if (v == x(istart)) istart else -1
}
def lexsort[T :Ordering](a:List[Array[T]]):Array[Int] = {
val n = a(0).length
val ind = new Array[Int](n)
var i = 0; while(i < n) {ind(i) = i; i += 1}
def comp(i:Int, j:Int):Int = {
val alen = a.length;
val ip = ind(i)
val jp = ind(j)
var c0 = 0
var k = 0;
while (k < alen && c0 == 0) {
c0 = a(k)(ip) compare a(k)(jp)
k += 1
}
if (c0 != 0) {
c0
} else {
ip compare jp
}
}
def swap(i:Int, j:Int):Unit = {
val tmp = ind(i)
ind(i) = ind(j)
ind(j) = tmp
}
BIDMat.Sorting.quickSort(comp, swap, 0, n)
ind
}
def ilexsort(a:List[Array[Int]]):Array[Int] = {
val n = a(0).length
val ind = new Array[Int](n)
var i = 0; while(i < n) {ind(i) = i; i += 1}
def comp(i:Int, j:Int):Int = {
var k = 0;
val alen = a.length;
var c0 = 0
val ip = ind(i)
val jp = ind(j)
while (k < alen && c0 == 0) {
c0 = a(k)(ip) compare a(k)(jp)
k += 1
}
if (c0 != 0) {
c0
} else {
ip compare jp
}
}
def swap(i:Int, j:Int):Unit = {
val tmp = ind(i)
ind(i) = ind(j)
ind(j) = tmp
}
BIDMat.Sorting.quickSort(comp, swap, 0, n)
ind
}
def ilexsort2(a:Array[Int], b:Array[Int]):Array[Int] = {
val n = a.length
val ind = new Array[Int](n)
var i = 0; while(i < n) {ind(i) = i; i += 1}
def comp(i:Int, j:Int):Int = {
val c0 = a(i) compare a(j)
if (c0 != 0) {
c0
} else {
val c1 = b(i) compare b(j)
if (c1 != 0) {
c1
} else {
ind(i) compare ind(j)
}
}
}
def swap(i:Int, j:Int):Unit = {
val tmpa = a(i)
a(i) = a(j)
a(j) = tmpa
val tmpb = b(i)
b(i) = b(j)
b(j) = tmpb
val tmpi = ind(i)
ind(i) = ind(j)
ind(j) = tmpi
}
BIDMat.Sorting.quickSort(comp, swap, 0, n)
ind
}
def ilexsort3[T](a:Array[Int], b:Array[Int], c:Array[T]):Unit = {
val n = a.length
def comp(i:Int, j:Int):Int = {
val c0 = a(i) compare a(j)
if (c0 != 0) {
c0
} else {
b(i) compare b(j)
}
}
def swap(i:Int, j:Int):Unit = {
val tmpa = a(i)
a(i) = a(j)
a(j) = tmpa
val tmpb = b(i)
b(i) = b(j)
b(j) = tmpb
val tmpc = c(i)
c(i) = c(j)
c(j) = tmpc
}
BIDMat.Sorting.quickSort(comp, swap, 0, n)
}
def ilexsort(args:Array[Int]*):Array[Int] = {
ilexsort(args.toList)
}
def lexsort[T : Ordering](args:Array[T]*):Array[Int] = {
lexsort(args.toList)
}
}
|
codeaudit/BIDMat
|
src/main/scala/BIDMat/Mat.scala
|
Scala
|
bsd-3-clause
| 34,640
|
package uk.gov.gds.ier.transaction.crown.contact
import uk.gov.gds.ier.validation.ErrorTransformForm
import uk.gov.gds.ier.step.StepTemplate
import uk.gov.gds.ier.transaction.crown.InprogressCrown
trait ContactMustache extends StepTemplate[InprogressCrown] {
case class ContactModel (
question:Question,
contactFieldSet: FieldSet,
contactEmailCheckbox: Field,
contactPhoneCheckbox: Field,
contactPostCheckbox: Field,
contactEmailText: Field,
contactPhoneText: Field,
showEmailFieldFlag: Text) extends MustacheData
val mustache = MustacheTemplate("crown/contact") { (form, postUrl) =>
implicit val progressForm = form
var emailAddress = form(keys.contact.email.detail).value
if (!emailAddress.isDefined){
emailAddress = form(keys.postalOrProxyVote.deliveryMethod.emailAddress).value
}
val title = "If we have questions about your application, how should we contact you?"
ContactModel(
question = Question(
postUrl = postUrl.url,
errorMessages = form.globalErrors.map{ _.message },
title = title
),
contactFieldSet = FieldSet(
classes = if (progressForm(keys.contact).hasErrors) "invalid" else ""
),
contactEmailCheckbox = CheckboxField(
key = keys.contact.email.contactMe, value = "true"
),
contactPhoneCheckbox = CheckboxField(
key = keys.contact.phone.contactMe, value = "true"
),
contactPostCheckbox = CheckboxField(
key = keys.contact.post.contactMe, value = "true"
),
contactEmailText = TextField(
key = keys.contact.email.detail,
default = emailAddress
),
contactPhoneText = TextField(
key = keys.contact.phone.detail
),
showEmailFieldFlag = Text (
value = if (!form(keys.contact.email.detail).value.isEmpty) "selected" else if
(!form(keys.postalOrProxyVote.deliveryMethod.emailAddress).value.isEmpty) "selected" else if
(!form(keys.postalVote.deliveryMethod.emailAddress).value.isEmpty) "selected" else ""
)
)
}
}
|
alphagov/ier-frontend
|
app/uk/gov/gds/ier/transaction/crown/contact/ContactMustache.scala
|
Scala
|
mit
| 2,114
|
package jp.co.cyberagent.aeromock.template.thymeleaf
import jp.co.cyberagent.aeromock.config.TemplateConfig
/**
* Configuration of class for thymeleaf.
* @author stormcat24
*/
case class ThymeleafConfig(
suffix: Option[String],
characterEncoding: Option[String],
templateAliases: Option[Map[String, String]],
templateMode: Option[String],
legacyHtml5TemplateModePatterns: Option[List[String]],
validXhtmlTemplateModePatterns: Option[List[String]],
validXmlTemplateModePatterns: Option[List[String]],
xhtmlTemplateModePatterns: Option[List[String]],
xmlTemplateModePatterns: Option[List[String]]
) extends TemplateConfig
|
CyberAgent/aeromock
|
aeromock-thymeleaf/src/main/scala/jp/co/cyberagent/aeromock/template/thymeleaf/ThymeleafConfig.scala
|
Scala
|
mit
| 643
|
package pl.touk.nussknacker.restmodel
import io.circe.Decoder
import io.circe.generic.JsonCodec
import io.circe.generic.extras.semiauto.deriveConfiguredDecoder
import pl.touk.nussknacker.engine.api.CirceUtil._
import pl.touk.nussknacker.engine.api.component.ComponentType.ComponentType
import pl.touk.nussknacker.engine.api.component.{ComponentGroupName, SingleComponentConfig}
import pl.touk.nussknacker.engine.api.definition.{MandatoryParameterValidator, ParameterEditor, ParameterValidator}
import pl.touk.nussknacker.engine.api.deployment.CustomAction
import pl.touk.nussknacker.engine.api.typed.typing.TypingResult
import pl.touk.nussknacker.engine.graph.evaluatedparam
import pl.touk.nussknacker.engine.graph.node.NodeData
import pl.touk.nussknacker.restmodel.displayedgraph.displayablenode.EdgeType
import java.net.URI
package object definition {
@JsonCodec(encodeOnly = true) case class UIProcessObjects(componentGroups: List[ComponentGroup],
processDefinition: UIProcessDefinition,
componentsConfig: Map[String, SingleComponentConfig],
additionalPropertiesConfig: Map[String, UiAdditionalPropertyConfig],
edgesForNodes: List[NodeEdges],
customActions: List[UICustomAction],
defaultAsyncInterpretation: Boolean)
@JsonCodec(encodeOnly = true) case class UIProcessDefinition(services: Map[String, UIObjectDefinition],
sourceFactories: Map[String, UIObjectDefinition],
sinkFactories: Map[String, UIObjectDefinition],
customStreamTransformers: Map[String, UIObjectDefinition],
signalsWithTransformers: Map[String, UIObjectDefinition],
globalVariables: Map[String, UIObjectDefinition],
typesInformation: Set[UIClazzDefinition],
subprocessInputs: Map[String, UIObjectDefinition]) {
// skipping exceptionHandlerFactory
val allDefinitions: Map[String, UIObjectDefinition] = services ++ sourceFactories ++ sinkFactories ++
customStreamTransformers ++ signalsWithTransformers ++ globalVariables ++ subprocessInputs
}
@JsonCodec(encodeOnly = true) case class UIClazzDefinition(clazzName: TypingResult, methods: Map[String, UIMethodInfo], staticMethods: Map[String, UIMethodInfo])
@JsonCodec(encodeOnly = true) case class UIMethodInfo(parameters: List[UIBasicParameter], refClazz: TypingResult, description: Option[String], varArgs: Boolean)
@JsonCodec(encodeOnly = true) case class UIBasicParameter(name: String, refClazz: TypingResult)
@JsonCodec(encodeOnly = true) case class UIParameter(name: String, typ: TypingResult, editor: ParameterEditor, validators: List[ParameterValidator], defaultValue: String, additionalVariables: Map[String, TypingResult], variablesToHide: Set[String], branchParam: Boolean) {
def isOptional: Boolean = !validators.contains(MandatoryParameterValidator)
}
@JsonCodec(encodeOnly = true) case class UIObjectDefinition(parameters: List[UIParameter],
returnType: Option[TypingResult],
categories: List[String],
componentConfig: SingleComponentConfig) {
def hasNoReturn: Boolean = returnType.isEmpty
}
@JsonCodec case class NodeTypeId(`type`: String, id: Option[String] = None)
@JsonCodec case class NodeEdges(nodeId: NodeTypeId, edges: List[EdgeType], canChooseNodes: Boolean, isForInputDefinition: Boolean)
import pl.touk.nussknacker.engine.graph.NodeDataCodec._
object ComponentTemplate {
def create(`type`: ComponentType, node: NodeData, categories: List[String], branchParametersTemplate: List[evaluatedparam.Parameter] = List.empty): ComponentTemplate =
ComponentTemplate(`type`, `type`.toString, node, categories, branchParametersTemplate)
}
@JsonCodec(encodeOnly = true) case class ComponentTemplate(`type`: ComponentType, label: String, node: NodeData, categories: List[String], branchParametersTemplate: List[evaluatedparam.Parameter] = List.empty)
@JsonCodec(encodeOnly = true) case class ComponentGroup(name: ComponentGroupName, components: List[ComponentTemplate])
@JsonCodec case class UiAdditionalPropertyConfig(defaultValue: Option[String],
editor: ParameterEditor,
validators: List[ParameterValidator],
label: Option[String])
object UIParameter {
implicit def decoder(implicit typing: Decoder[TypingResult]): Decoder[UIParameter] = deriveConfiguredDecoder[UIParameter]
}
object UICustomAction {
import pl.touk.nussknacker.restmodel.codecs.URICodecs.{uriDecoder, uriEncoder}
def apply(action: CustomAction): UICustomAction = UICustomAction(
name = action.name, allowedStateStatusNames = action.allowedStateStatusNames, icon = action.icon, parameters =
action.parameters.map(p => UICustomActionParameter(p.name, p.editor))
)
}
@JsonCodec case class UICustomAction(name: String,
allowedStateStatusNames: List[String],
icon: Option[URI],
parameters: List[UICustomActionParameter])
@JsonCodec case class UICustomActionParameter(name: String, editor: ParameterEditor)
}
|
TouK/nussknacker
|
ui/restmodel/src/main/scala/pl/touk/nussknacker/restmodel/definition/package.scala
|
Scala
|
apache-2.0
| 6,098
|
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO2
package com.google.protobuf.descriptor
/** Describes a method of a service.
*
* @param inputType
* Input and output type names. These are resolved in the same way as
* FieldDescriptorProto.type_name, but must refer to a message type.
* @param clientStreaming
* Identifies if client streams multiple client messages
* @param serverStreaming
* Identifies if server streams multiple server messages
*/
@SerialVersionUID(0L)
final case class MethodDescriptorProto(
name: _root_.scala.Option[_root_.scala.Predef.String] = _root_.scala.None,
inputType: _root_.scala.Option[_root_.scala.Predef.String] = _root_.scala.None,
outputType: _root_.scala.Option[_root_.scala.Predef.String] = _root_.scala.None,
options: _root_.scala.Option[com.google.protobuf.descriptor.MethodOptions] = _root_.scala.None,
clientStreaming: _root_.scala.Option[_root_.scala.Boolean] = _root_.scala.None,
serverStreaming: _root_.scala.Option[_root_.scala.Boolean] = _root_.scala.None
) extends scalapb.GeneratedMessage with scalapb.Message[MethodDescriptorProto] with scalapb.lenses.Updatable[MethodDescriptorProto] {
@transient
private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
private[this] def __computeSerializedValue(): _root_.scala.Int = {
var __size = 0
if (name.isDefined) {
val __value = name.get
__size += _root_.com.google.protobuf.CodedOutputStream.computeStringSize(1, __value)
};
if (inputType.isDefined) {
val __value = inputType.get
__size += _root_.com.google.protobuf.CodedOutputStream.computeStringSize(2, __value)
};
if (outputType.isDefined) {
val __value = outputType.get
__size += _root_.com.google.protobuf.CodedOutputStream.computeStringSize(3, __value)
};
if (options.isDefined) {
val __value = options.get
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
if (clientStreaming.isDefined) {
val __value = clientStreaming.get
__size += _root_.com.google.protobuf.CodedOutputStream.computeBoolSize(5, __value)
};
if (serverStreaming.isDefined) {
val __value = serverStreaming.get
__size += _root_.com.google.protobuf.CodedOutputStream.computeBoolSize(6, __value)
};
__size
}
final override def serializedSize: _root_.scala.Int = {
var read = __serializedSizeCachedValue
if (read == 0) {
read = __computeSerializedValue()
__serializedSizeCachedValue = read
}
read
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
name.foreach { __v =>
val __m = __v
_output__.writeString(1, __m)
};
inputType.foreach { __v =>
val __m = __v
_output__.writeString(2, __m)
};
outputType.foreach { __v =>
val __m = __v
_output__.writeString(3, __m)
};
options.foreach { __v =>
val __m = __v
_output__.writeTag(4, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
clientStreaming.foreach { __v =>
val __m = __v
_output__.writeBool(5, __m)
};
serverStreaming.foreach { __v =>
val __m = __v
_output__.writeBool(6, __m)
};
}
def mergeFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): com.google.protobuf.descriptor.MethodDescriptorProto = {
var __name = this.name
var __inputType = this.inputType
var __outputType = this.outputType
var __options = this.options
var __clientStreaming = this.clientStreaming
var __serverStreaming = this.serverStreaming
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__name = Option(_input__.readString())
case 18 =>
__inputType = Option(_input__.readString())
case 26 =>
__outputType = Option(_input__.readString())
case 34 =>
__options = Option(_root_.scalapb.LiteParser.readMessage(_input__, __options.getOrElse(com.google.protobuf.descriptor.MethodOptions.defaultInstance)))
case 40 =>
__clientStreaming = Option(_input__.readBool())
case 48 =>
__serverStreaming = Option(_input__.readBool())
case tag => _input__.skipField(tag)
}
}
com.google.protobuf.descriptor.MethodDescriptorProto(
name = __name,
inputType = __inputType,
outputType = __outputType,
options = __options,
clientStreaming = __clientStreaming,
serverStreaming = __serverStreaming
)
}
def getName: _root_.scala.Predef.String = name.getOrElse("")
def clearName: MethodDescriptorProto = copy(name = _root_.scala.None)
def withName(__v: _root_.scala.Predef.String): MethodDescriptorProto = copy(name = Option(__v))
def getInputType: _root_.scala.Predef.String = inputType.getOrElse("")
def clearInputType: MethodDescriptorProto = copy(inputType = _root_.scala.None)
def withInputType(__v: _root_.scala.Predef.String): MethodDescriptorProto = copy(inputType = Option(__v))
def getOutputType: _root_.scala.Predef.String = outputType.getOrElse("")
def clearOutputType: MethodDescriptorProto = copy(outputType = _root_.scala.None)
def withOutputType(__v: _root_.scala.Predef.String): MethodDescriptorProto = copy(outputType = Option(__v))
def getOptions: com.google.protobuf.descriptor.MethodOptions = options.getOrElse(com.google.protobuf.descriptor.MethodOptions.defaultInstance)
def clearOptions: MethodDescriptorProto = copy(options = _root_.scala.None)
def withOptions(__v: com.google.protobuf.descriptor.MethodOptions): MethodDescriptorProto = copy(options = Option(__v))
def getClientStreaming: _root_.scala.Boolean = clientStreaming.getOrElse(false)
def clearClientStreaming: MethodDescriptorProto = copy(clientStreaming = _root_.scala.None)
def withClientStreaming(__v: _root_.scala.Boolean): MethodDescriptorProto = copy(clientStreaming = Option(__v))
def getServerStreaming: _root_.scala.Boolean = serverStreaming.getOrElse(false)
def clearServerStreaming: MethodDescriptorProto = copy(serverStreaming = _root_.scala.None)
def withServerStreaming(__v: _root_.scala.Boolean): MethodDescriptorProto = copy(serverStreaming = Option(__v))
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @_root_.scala.unchecked) match {
case 1 => name.orNull
case 2 => inputType.orNull
case 3 => outputType.orNull
case 4 => options.orNull
case 5 => clientStreaming.orNull
case 6 => serverStreaming.orNull
}
}
def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @_root_.scala.unchecked) match {
case 1 => name.map(_root_.scalapb.descriptors.PString).getOrElse(_root_.scalapb.descriptors.PEmpty)
case 2 => inputType.map(_root_.scalapb.descriptors.PString).getOrElse(_root_.scalapb.descriptors.PEmpty)
case 3 => outputType.map(_root_.scalapb.descriptors.PString).getOrElse(_root_.scalapb.descriptors.PEmpty)
case 4 => options.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
case 5 => clientStreaming.map(_root_.scalapb.descriptors.PBoolean).getOrElse(_root_.scalapb.descriptors.PEmpty)
case 6 => serverStreaming.map(_root_.scalapb.descriptors.PBoolean).getOrElse(_root_.scalapb.descriptors.PEmpty)
}
}
def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
def companion = com.google.protobuf.descriptor.MethodDescriptorProto
}
object MethodDescriptorProto extends scalapb.GeneratedMessageCompanion[com.google.protobuf.descriptor.MethodDescriptorProto] with scalapb.JavaProtoSupport[com.google.protobuf.descriptor.MethodDescriptorProto, com.google.protobuf.DescriptorProtos.MethodDescriptorProto] {
implicit def messageCompanion: scalapb.GeneratedMessageCompanion[com.google.protobuf.descriptor.MethodDescriptorProto] with scalapb.JavaProtoSupport[com.google.protobuf.descriptor.MethodDescriptorProto, com.google.protobuf.DescriptorProtos.MethodDescriptorProto] = this
def toJavaProto(scalaPbSource: com.google.protobuf.descriptor.MethodDescriptorProto): com.google.protobuf.DescriptorProtos.MethodDescriptorProto = {
val javaPbOut = com.google.protobuf.DescriptorProtos.MethodDescriptorProto.newBuilder
scalaPbSource.name.foreach(javaPbOut.setName)
scalaPbSource.inputType.foreach(javaPbOut.setInputType)
scalaPbSource.outputType.foreach(javaPbOut.setOutputType)
scalaPbSource.options.map(com.google.protobuf.descriptor.MethodOptions.toJavaProto).foreach(javaPbOut.setOptions)
scalaPbSource.clientStreaming.foreach(javaPbOut.setClientStreaming)
scalaPbSource.serverStreaming.foreach(javaPbOut.setServerStreaming)
javaPbOut.build
}
def fromJavaProto(javaPbSource: com.google.protobuf.DescriptorProtos.MethodDescriptorProto): com.google.protobuf.descriptor.MethodDescriptorProto = com.google.protobuf.descriptor.MethodDescriptorProto(
name = if (javaPbSource.hasName) Some(javaPbSource.getName) else _root_.scala.None,
inputType = if (javaPbSource.hasInputType) Some(javaPbSource.getInputType) else _root_.scala.None,
outputType = if (javaPbSource.hasOutputType) Some(javaPbSource.getOutputType) else _root_.scala.None,
options = if (javaPbSource.hasOptions) Some(com.google.protobuf.descriptor.MethodOptions.fromJavaProto(javaPbSource.getOptions)) else _root_.scala.None,
clientStreaming = if (javaPbSource.hasClientStreaming) Some(javaPbSource.getClientStreaming.booleanValue) else _root_.scala.None,
serverStreaming = if (javaPbSource.hasServerStreaming) Some(javaPbSource.getServerStreaming.booleanValue) else _root_.scala.None
)
def fromFieldsMap(__fieldsMap: scala.collection.immutable.Map[_root_.com.google.protobuf.Descriptors.FieldDescriptor, _root_.scala.Any]): com.google.protobuf.descriptor.MethodDescriptorProto = {
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.getContainingType() == javaDescriptor), "FieldDescriptor does not match message type.")
val __fields = javaDescriptor.getFields
com.google.protobuf.descriptor.MethodDescriptorProto(
__fieldsMap.get(__fields.get(0)).asInstanceOf[_root_.scala.Option[_root_.scala.Predef.String]],
__fieldsMap.get(__fields.get(1)).asInstanceOf[_root_.scala.Option[_root_.scala.Predef.String]],
__fieldsMap.get(__fields.get(2)).asInstanceOf[_root_.scala.Option[_root_.scala.Predef.String]],
__fieldsMap.get(__fields.get(3)).asInstanceOf[_root_.scala.Option[com.google.protobuf.descriptor.MethodOptions]],
__fieldsMap.get(__fields.get(4)).asInstanceOf[_root_.scala.Option[_root_.scala.Boolean]],
__fieldsMap.get(__fields.get(5)).asInstanceOf[_root_.scala.Option[_root_.scala.Boolean]]
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[com.google.protobuf.descriptor.MethodDescriptorProto] = _root_.scalapb.descriptors.Reads{
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage == scalaDescriptor), "FieldDescriptor does not match message type.")
com.google.protobuf.descriptor.MethodDescriptorProto(
__fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Predef.String]]),
__fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Predef.String]]),
__fieldsMap.get(scalaDescriptor.findFieldByNumber(3).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Predef.String]]),
__fieldsMap.get(scalaDescriptor.findFieldByNumber(4).get).flatMap(_.as[_root_.scala.Option[com.google.protobuf.descriptor.MethodOptions]]),
__fieldsMap.get(scalaDescriptor.findFieldByNumber(5).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Boolean]]),
__fieldsMap.get(scalaDescriptor.findFieldByNumber(6).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Boolean]])
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = DescriptorProtoCompanion.javaDescriptor.getMessageTypes.get(9)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = DescriptorProtoCompanion.scalaDescriptor.messages(9)
def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
(__number: @_root_.scala.unchecked) match {
case 4 => __out = com.google.protobuf.descriptor.MethodOptions
}
__out
}
lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
lazy val defaultInstance = com.google.protobuf.descriptor.MethodDescriptorProto(
)
implicit class MethodDescriptorProtoLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.descriptor.MethodDescriptorProto]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, com.google.protobuf.descriptor.MethodDescriptorProto](_l) {
def name: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Predef.String] = field(_.getName)((c_, f_) => c_.copy(name = Option(f_)))
def optionalName: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Predef.String]] = field(_.name)((c_, f_) => c_.copy(name = f_))
def inputType: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Predef.String] = field(_.getInputType)((c_, f_) => c_.copy(inputType = Option(f_)))
def optionalInputType: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Predef.String]] = field(_.inputType)((c_, f_) => c_.copy(inputType = f_))
def outputType: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Predef.String] = field(_.getOutputType)((c_, f_) => c_.copy(outputType = Option(f_)))
def optionalOutputType: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Predef.String]] = field(_.outputType)((c_, f_) => c_.copy(outputType = f_))
def options: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.descriptor.MethodOptions] = field(_.getOptions)((c_, f_) => c_.copy(options = Option(f_)))
def optionalOptions: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[com.google.protobuf.descriptor.MethodOptions]] = field(_.options)((c_, f_) => c_.copy(options = f_))
def clientStreaming: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Boolean] = field(_.getClientStreaming)((c_, f_) => c_.copy(clientStreaming = Option(f_)))
def optionalClientStreaming: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Boolean]] = field(_.clientStreaming)((c_, f_) => c_.copy(clientStreaming = f_))
def serverStreaming: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Boolean] = field(_.getServerStreaming)((c_, f_) => c_.copy(serverStreaming = Option(f_)))
def optionalServerStreaming: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Boolean]] = field(_.serverStreaming)((c_, f_) => c_.copy(serverStreaming = f_))
}
final val NAME_FIELD_NUMBER = 1
final val INPUT_TYPE_FIELD_NUMBER = 2
final val OUTPUT_TYPE_FIELD_NUMBER = 3
final val OPTIONS_FIELD_NUMBER = 4
final val CLIENT_STREAMING_FIELD_NUMBER = 5
final val SERVER_STREAMING_FIELD_NUMBER = 6
def of(
name: _root_.scala.Option[_root_.scala.Predef.String],
inputType: _root_.scala.Option[_root_.scala.Predef.String],
outputType: _root_.scala.Option[_root_.scala.Predef.String],
options: _root_.scala.Option[com.google.protobuf.descriptor.MethodOptions],
clientStreaming: _root_.scala.Option[_root_.scala.Boolean],
serverStreaming: _root_.scala.Option[_root_.scala.Boolean]
): _root_.com.google.protobuf.descriptor.MethodDescriptorProto = _root_.com.google.protobuf.descriptor.MethodDescriptorProto(
name,
inputType,
outputType,
options,
clientStreaming,
serverStreaming
)
}
|
dotty-staging/ScalaPB
|
scalapb-runtime/jvm/src/main/scala/com/google/protobuf/descriptor/MethodDescriptorProto.scala
|
Scala
|
apache-2.0
| 16,815
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.io.File
import java.util.concurrent.{Executors, TimeUnit}
import scala.util.Random
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.metrics.source.HiveCatalogMetrics
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.execution.datasources.FileStatusCache
import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.hive.client.HiveClient
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.internal.{HiveSerDe, SQLConf}
import org.apache.spark.sql.internal.SQLConf.HiveCaseSensitiveInferenceMode.{Value => InferenceMode, _}
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.sql.types._
class HiveSchemaInferenceSuite
extends QueryTest with TestHiveSingleton with SQLTestUtils with BeforeAndAfterEach {
import HiveSchemaInferenceSuite._
import HiveExternalCatalog.DATASOURCE_SCHEMA_PREFIX
override def beforeEach(): Unit = {
super.beforeEach()
FileStatusCache.resetForTesting()
}
override def afterEach(): Unit = {
super.afterEach()
spark.sessionState.catalog.tableRelationCache.invalidateAll()
FileStatusCache.resetForTesting()
}
private val externalCatalog = spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog]
private val client = externalCatalog.client
// Return a copy of the given schema with all field names converted to lower case.
private def lowerCaseSchema(schema: StructType): StructType = {
StructType(schema.map(f => f.copy(name = f.name.toLowerCase)))
}
// Create a Hive external test table containing the given field and partition column names.
// Returns a case-sensitive schema for the table.
private def setupExternalTable(
fileType: String,
fields: Seq[String],
partitionCols: Seq[String],
dir: File): StructType = {
// Treat all table fields as bigints...
val structFields = fields.map { field =>
StructField(
name = field,
dataType = LongType,
nullable = true,
metadata = new MetadataBuilder().putString(HIVE_TYPE_STRING, "bigint").build())
}
// and all partition columns as ints
val partitionStructFields = partitionCols.map { field =>
StructField(
// Partition column case isn't preserved
name = field.toLowerCase,
dataType = IntegerType,
nullable = true,
metadata = new MetadataBuilder().putString(HIVE_TYPE_STRING, "int").build())
}
val schema = StructType(structFields ++ partitionStructFields)
// Write some test data (partitioned if specified)
val writer = spark.range(NUM_RECORDS)
.selectExpr((fields ++ partitionCols).map("id as " + _): _*)
.write
.partitionBy(partitionCols: _*)
.mode("overwrite")
fileType match {
case ORC_FILE_TYPE =>
writer.orc(dir.getAbsolutePath)
case PARQUET_FILE_TYPE =>
writer.parquet(dir.getAbsolutePath)
}
// Create Hive external table with lowercased schema
val serde = HiveSerDe.serdeMap(fileType)
client.createTable(
CatalogTable(
identifier = TableIdentifier(table = TEST_TABLE_NAME, database = Option(DATABASE)),
tableType = CatalogTableType.EXTERNAL,
storage = CatalogStorageFormat(
locationUri = Option(new java.net.URI(dir.getAbsolutePath)),
inputFormat = serde.inputFormat,
outputFormat = serde.outputFormat,
serde = serde.serde,
compressed = false,
properties = Map("serialization.format" -> "1")),
schema = schema,
provider = Option("hive"),
partitionColumnNames = partitionCols.map(_.toLowerCase),
properties = Map.empty),
true)
// Add partition records (if specified)
if (!partitionCols.isEmpty) {
spark.catalog.recoverPartitions(TEST_TABLE_NAME)
}
// Check that the table returned by HiveExternalCatalog has schemaPreservesCase set to false
// and that the raw table returned by the Hive client doesn't have any Spark SQL properties
// set (table needs to be obtained from client since HiveExternalCatalog filters these
// properties out).
assert(!externalCatalog.getTable(DATABASE, TEST_TABLE_NAME).schemaPreservesCase)
val rawTable = client.getTable(DATABASE, TEST_TABLE_NAME)
assert(rawTable.properties.filterKeys(_.startsWith(DATASOURCE_SCHEMA_PREFIX)) == Map.empty)
schema
}
private def withTestTables(
fileType: String)(f: (Seq[String], Seq[String], StructType) => Unit): Unit = {
// Test both a partitioned and unpartitioned Hive table
val tableFields = Seq(
(Seq("fieldOne"), Seq("partCol1", "partCol2")),
(Seq("fieldOne", "fieldTwo"), Seq.empty[String]))
tableFields.foreach { case (fields, partCols) =>
withTempDir { dir =>
val schema = setupExternalTable(fileType, fields, partCols, dir)
withTable(TEST_TABLE_NAME) { f(fields, partCols, schema) }
}
}
}
private def withFileTypes(f: (String) => Unit): Unit
= Seq(ORC_FILE_TYPE, PARQUET_FILE_TYPE).foreach(f)
private def withInferenceMode(mode: InferenceMode)(f: => Unit): Unit = {
withSQLConf(
HiveUtils.CONVERT_METASTORE_ORC.key -> "true",
SQLConf.HIVE_CASE_SENSITIVE_INFERENCE.key -> mode.toString)(f)
}
private val inferenceKey = SQLConf.HIVE_CASE_SENSITIVE_INFERENCE.key
private def testFieldQuery(fields: Seq[String]): Unit = {
if (!fields.isEmpty) {
val query = s"SELECT * FROM ${TEST_TABLE_NAME} WHERE ${Random.shuffle(fields).head} >= 0"
assert(spark.sql(query).count == NUM_RECORDS)
}
}
private def testTableSchema(expectedSchema: StructType): Unit
= assert(spark.table(TEST_TABLE_NAME).schema == expectedSchema)
withFileTypes { fileType =>
test(s"$fileType: schema should be inferred and saved when INFER_AND_SAVE is specified") {
withInferenceMode(INFER_AND_SAVE) {
withTestTables(fileType) { (fields, partCols, schema) =>
testFieldQuery(fields)
testFieldQuery(partCols)
testTableSchema(schema)
// Verify the catalog table now contains the updated schema and properties
val catalogTable = externalCatalog.getTable(DATABASE, TEST_TABLE_NAME)
assert(catalogTable.schemaPreservesCase)
assert(catalogTable.schema == schema)
assert(catalogTable.partitionColumnNames == partCols.map(_.toLowerCase))
}
}
}
}
withFileTypes { fileType =>
test(s"$fileType: schema should be inferred but not stored when INFER_ONLY is specified") {
withInferenceMode(INFER_ONLY) {
withTestTables(fileType) { (fields, partCols, schema) =>
val originalTable = externalCatalog.getTable(DATABASE, TEST_TABLE_NAME)
testFieldQuery(fields)
testFieldQuery(partCols)
testTableSchema(schema)
// Catalog table shouldn't be altered
assert(externalCatalog.getTable(DATABASE, TEST_TABLE_NAME) == originalTable)
}
}
}
}
withFileTypes { fileType =>
test(s"$fileType: schema should not be inferred when NEVER_INFER is specified") {
withInferenceMode(NEVER_INFER) {
withTestTables(fileType) { (fields, partCols, schema) =>
val originalTable = externalCatalog.getTable(DATABASE, TEST_TABLE_NAME)
// Only check the table schema as the test queries will break
testTableSchema(lowerCaseSchema(schema))
assert(externalCatalog.getTable(DATABASE, TEST_TABLE_NAME) == originalTable)
}
}
}
}
test("mergeWithMetastoreSchema() should return expected results") {
// Field type conflict resolution
assertResult(
StructType(Seq(
StructField("lowerCase", StringType),
StructField("UPPERCase", DoubleType, nullable = false)))) {
HiveMetastoreCatalog.mergeWithMetastoreSchema(
StructType(Seq(
StructField("lowercase", StringType),
StructField("uppercase", DoubleType, nullable = false))),
StructType(Seq(
StructField("lowerCase", BinaryType),
StructField("UPPERCase", IntegerType, nullable = true))))
}
// MetaStore schema is subset of parquet schema
assertResult(
StructType(Seq(
StructField("UPPERCase", DoubleType, nullable = false)))) {
HiveMetastoreCatalog.mergeWithMetastoreSchema(
StructType(Seq(
StructField("uppercase", DoubleType, nullable = false))),
StructType(Seq(
StructField("lowerCase", BinaryType),
StructField("UPPERCase", IntegerType, nullable = true))))
}
// Metastore schema contains additional non-nullable fields.
assert(intercept[Throwable] {
HiveMetastoreCatalog.mergeWithMetastoreSchema(
StructType(Seq(
StructField("uppercase", DoubleType, nullable = false),
StructField("lowerCase", BinaryType, nullable = false))),
StructType(Seq(
StructField("UPPERCase", IntegerType, nullable = true))))
}.getMessage.contains("Detected conflicting schemas"))
// Conflicting non-nullable field names
intercept[Throwable] {
HiveMetastoreCatalog.mergeWithMetastoreSchema(
StructType(Seq(StructField("lower", StringType, nullable = false))),
StructType(Seq(StructField("lowerCase", BinaryType))))
}
// Check that merging missing nullable fields works as expected.
assertResult(
StructType(Seq(
StructField("firstField", StringType, nullable = true),
StructField("secondField", StringType, nullable = true),
StructField("thirdfield", StringType, nullable = true)))) {
HiveMetastoreCatalog.mergeWithMetastoreSchema(
StructType(Seq(
StructField("firstfield", StringType, nullable = true),
StructField("secondfield", StringType, nullable = true),
StructField("thirdfield", StringType, nullable = true))),
StructType(Seq(
StructField("firstField", StringType, nullable = true),
StructField("secondField", StringType, nullable = true))))
}
// Merge should fail if the Metastore contains any additional fields that are not
// nullable.
assert(intercept[Throwable] {
HiveMetastoreCatalog.mergeWithMetastoreSchema(
StructType(Seq(
StructField("firstfield", StringType, nullable = true),
StructField("secondfield", StringType, nullable = true),
StructField("thirdfield", StringType, nullable = false))),
StructType(Seq(
StructField("firstField", StringType, nullable = true),
StructField("secondField", StringType, nullable = true))))
}.getMessage.contains("Detected conflicting schemas"))
// Schema merge should maintain metastore order.
assertResult(
StructType(Seq(
StructField("first_field", StringType, nullable = true),
StructField("second_field", StringType, nullable = true),
StructField("third_field", StringType, nullable = true),
StructField("fourth_field", StringType, nullable = true),
StructField("fifth_field", StringType, nullable = true)))) {
HiveMetastoreCatalog.mergeWithMetastoreSchema(
StructType(Seq(
StructField("first_field", StringType, nullable = true),
StructField("second_field", StringType, nullable = true),
StructField("third_field", StringType, nullable = true),
StructField("fourth_field", StringType, nullable = true),
StructField("fifth_field", StringType, nullable = true))),
StructType(Seq(
StructField("fifth_field", StringType, nullable = true),
StructField("third_field", StringType, nullable = true),
StructField("second_field", StringType, nullable = true))))
}
}
}
object HiveSchemaInferenceSuite {
private val NUM_RECORDS = 10
private val DATABASE = "default"
private val TEST_TABLE_NAME = "test_table"
private val ORC_FILE_TYPE = "orc"
private val PARQUET_FILE_TYPE = "parquet"
}
|
sachintyagi22/spark
|
sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSchemaInferenceSuite.scala
|
Scala
|
apache-2.0
| 12,976
|
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.util
import scala.concurrent.duration.FiniteDuration
import io.gatling.commons.util.ClockSingleton.nowMillis
private[gatling] class Retry private (maxRetryLimit: Int, retryWindow: FiniteDuration, retries: List[Long]) {
def this(maxRetryLimit: Int, retryWindow: FiniteDuration) =
this(maxRetryLimit, retryWindow, Nil)
private def copyWithNewRetries(retries: List[Long]) =
new Retry(maxRetryLimit, retryWindow, retries)
def newRetry: Retry = copyWithNewRetries(nowMillis :: cleanupOldRetries)
def isLimitReached = cleanupOldRetries.length >= maxRetryLimit
private def cleanupOldRetries: List[Long] = {
val now = nowMillis
retries.filterNot(_ < (now - retryWindow.toMillis))
}
}
|
MykolaB/gatling
|
gatling-commons/src/main/scala/io/gatling/commons/util/Retry.scala
|
Scala
|
apache-2.0
| 1,357
|
package rodinia
import ir.ast._
import ir.{ArrayTypeWSWC, TupleType}
import lift.arithmetic.SizeVar
import opencl.executor._
import opencl.ir._
import opencl.ir.pattern._
import org.junit.Assert._
import org.junit.Test
object Kmeans extends TestWithExecutor {
val P = SizeVar("P") // number of points
val C = SizeVar("C") // number of clusters
val F = SizeVar("F") // number of features
val featuresType = ArrayTypeWSWC(ArrayTypeWSWC(Float, P), F)
val clustersType = ArrayTypeWSWC(ArrayTypeWSWC(Float, F), C)
val update = UserFun("update", Array("dist", "pair"),
"{ return dist + (pair._0 - pair._1) * (pair._0 - pair._1); }",
Seq(Float, TupleType(Float, Float)), Float)
val update2 = UserFun("update", Array("dist", "pair0", "pair1"),
"{ return dist + (pair0 - pair1) * (pair0 - pair1); }",
Seq(Float, Float, Float), Float)
val test = UserFun("test", Array("dist", "tuple"),
"{" +
"float min_dist = tuple._0;" +
"int i = tuple._1;" +
"int index = tuple._2;" +
"if (dist < min_dist) {" +
" Tuple t = {dist, i + 1, i};" +
" return t;" +
"} else {" +
" Tuple t = {min_dist, i + 1, index};" +
" return t;" +
"}" +
"}",
Seq(Float, TupleType(Float, Int, Int)), TupleType(Float, Int, Int))
val select = UserFun("select_", Array("tuple"),
"{ return tuple._2; }",
Seq(TupleType(Float, Int, Int)), Int)
def calculateMembership(points: Array[(Float, Float)], centres: Array[(Float, Float, Int)]): Array[Int] = {
points.map(x => {
centres
.map(c => ((x._1 - c._1) * (x._1 - c._1) + (x._2 - c._2) * (x._2 - c._2), c._3))
.reduce((p1, p2) => if (p1._1 < p2._1) p1 else p2)
._2
})
}
def calculateMembership(points: Array[Array[Float]], centres: Array[Array[Float]]): Array[Int] = {
points.map(x => {
centres.zipWithIndex
.map(c => ((c._1,x).zipped.map((p1, p2) => (p1-p2)*(p1-p2)).sum, c._2))
.reduce((p1, p2) => if (p1._1 < p2._1) p1 else p2)
._2
})
}
}
class Kmeans {
import rodinia.Kmeans._
@Test def kMeansMembership2Dim(): Unit = {
val inputSize = 512
val k = 16
val pointsX = Array.fill(inputSize)(util.Random.nextFloat())
val pointsY = Array.fill(inputSize)(util.Random.nextFloat())
val centresX = Array.fill(k)(util.Random.nextFloat())
val centresY = Array.fill(k)(util.Random.nextFloat())
val indices = Array.range(0, k)
val distance = UserFun("dist", Array("x", "y", "a", "b", "id"), "{ Tuple t = {(x - a) * (x - a) + (y - b) * (y - b), id}; return t; }", Seq(Float, Float, Float, Float, Int), TupleType(Float, Int))
val minimum = UserFun("minimum", Array("x", "y"), "{ return x._0 < y._0 ? x : y; }", Seq(TupleType(Float, Int), TupleType(Float, Int)), TupleType(Float, Int))
val getSecond = UserFun("getSecond", "x", "{ return x._1; }", TupleType(Float, Int), Int)
val points = pointsX zip pointsY
val centres = (centresX, centresY, indices).zipped.toArray
val gold = calculateMembership(points, centres)
val N = SizeVar("N")
val K = SizeVar("K")
val function = fun(
ArrayTypeWSWC(Float, N),
ArrayTypeWSWC(Float, N),
ArrayTypeWSWC(Float, K),
ArrayTypeWSWC(Float, K),
ArrayTypeWSWC(Int, K),
(x, y, a, b, i) => {
MapGlb(fun(xy => {
toGlobal(MapSeq(idI)) o
MapSeq(getSecond) o
ReduceSeq(minimum, (scala.Float.MaxValue, -1)) o
MapSeq(fun(ab => {
distance(Get(xy, 0), Get(xy, 1), Get(ab, 0), Get(ab, 1), Get(ab, 2))
})) $ Zip(a, b, i)
})) $ Zip(x, y)
}
)
val (output, _) = Execute(inputSize)[Array[Int]](function, pointsX, pointsY, centresX, centresY, indices)
assertArrayEquals(gold, output)
}
@Test
def kMeans(): Unit = {
val numPoints = 1024
val numClusters = 5
val numFeatures = 34
val points = Array.fill(numPoints, numFeatures)(util.Random.nextFloat())
val clusters = Array.fill(numClusters, numFeatures)(util.Random.nextFloat())
val gold = calculateMembership(points, clusters)
val kMeans = fun(
featuresType, clustersType,
(features, clusters) => {
features :>> Transpose() :>> MapGlb( \\( feature => {
clusters :>> ReduceSeq( \\( (tuple, cluster) => {
val dist = Zip(feature, cluster) :>> ReduceSeq(update, 0.0f )
Zip(dist, tuple) :>> MapSeq(test)
}), Value("{3.40282347e+38, 0, 0}", ArrayTypeWSWC(TupleType(Float, Int, Int), 1)) ) :>>
toGlobal(MapSeq(MapSeq(select)))
}) )
})
val (output, _) = Execute(numPoints)[Array[Int]](kMeans, points.transpose, clusters)
assertArrayEquals(gold, output)
}
@Test
def kMeansLocalMemory(): Unit = {
val numPoints = 1024
val numClusters = 5
val numFeatures = 8
val points = Array.fill(numPoints, numFeatures)(util.Random.nextFloat())
val clusters = Array.fill(numClusters, numFeatures)(util.Random.nextFloat())
val gold = calculateMembership(points, clusters)
val splitFactor = 128
val kMeans = fun(
featuresType, clustersType,
(features, clusters) => {
features :>> Transpose() :>> Split(splitFactor) :>> MapWrg(\\( featuresChunk =>
clusters :>> toLocal(MapLcl(MapSeq(id))) :>> Let(localClusters =>
MapLcl( \\( feature => {
localClusters :>> ReduceSeq( \\( (tuple, cluster) => {
val dist = Zip(feature, cluster) :>> ReduceSeq(\\((acc, b) => update2(acc, Get(b, 0), Get(b,1))), 0.0f )
Zip(dist, tuple) :>> MapSeq(test)
}), Value("{3.40282347e+38, 0, 0}", ArrayTypeWSWC(TupleType(Float, Int, Int), 1)) ) :>>
toGlobal(MapSeq(MapSeq(select)))
})) $ featuresChunk
)
)) :>> Join()
})
val (output, _) = Execute(numPoints)[Array[Int]](kMeans, points.transpose, clusters)
assertArrayEquals(gold, output)
}
@Test
def kMeans_swap(): Unit = {
val kMeans_swap = fun(
featuresType,
features => {
features :>> MapGlb(MapSeq(id)) :>> TransposeW()
})
val code = Compile(kMeans_swap)
println(code)
}
}
|
lift-project/lift
|
src/test/rodinia/Kmeans.scala
|
Scala
|
mit
| 6,254
|
package chana.avro
import java.io.ByteArrayOutputStream
import java.io.IOException
import java.io.StringWriter
import org.apache.avro.Schema
import org.apache.avro.Schema.Type
import org.apache.avro.generic.GenericDatumWriter
import org.apache.avro.generic.GenericEnumSymbol
import org.apache.avro.generic.IndexedRecord
import org.apache.avro.io.EncoderFactory
import org.apache.avro.specific.SpecificDatumWriter
import org.codehaus.jackson.JsonNode
/**
*
* Encode an Avro value into JSON.
*/
object ToJson {
/**
* Serializes a Java Avro value into JSON.
*
* When serializing records, fields whose value matches the fields' default value are omitted.
*
* @param value the Java value to serialize.
* @param schema Avro schema of the value to serialize.
* @return the value encoded as a JSON tree.
* @throws IOException on error.
*/
@throws(classOf[IOException])
def toJsonNode(value: Any, schema: Schema): JsonNode = {
if (value != null) {
schema.getType match {
case Type.NULL => JSON_NODE_FACTORY.nullNode
case Type.BOOLEAN => JSON_NODE_FACTORY.booleanNode(value.asInstanceOf[Boolean])
case Type.DOUBLE => JSON_NODE_FACTORY.numberNode(value.asInstanceOf[Double])
case Type.FLOAT => JSON_NODE_FACTORY.numberNode(value.asInstanceOf[Float])
case Type.INT => JSON_NODE_FACTORY.numberNode(value.asInstanceOf[Int])
case Type.LONG => JSON_NODE_FACTORY.numberNode(value.asInstanceOf[Long])
case Type.STRING => JSON_NODE_FACTORY.textNode(value.asInstanceOf[CharSequence].toString)
case Type.ENUM =>
val strVal = value match {
case x: GenericEnumSymbol => x.toString
case x: Enum[_] => x.toString
case x: String => x
}
JSON_NODE_FACTORY.textNode(strVal) // Enums are represented as strings
case Type.BYTES | Type.FIXED =>
// TODO Bytes are represented as strings (BASE64?)...
throw new RuntimeException("toJsonNode(byte array) not implemented")
case Type.ARRAY =>
val jsonArray = JSON_NODE_FACTORY.arrayNode()
val javaArray = value.asInstanceOf[java.lang.Iterable[_]].iterator
while (javaArray.hasNext) {
val element = javaArray.next
jsonArray.add(toJsonNode(element, schema.getElementType))
}
jsonArray
case Type.MAP =>
val jsonObject = JSON_NODE_FACTORY.objectNode()
val javaMap = value.asInstanceOf[java.util.Map[String, Any]].entrySet.iterator
while (javaMap.hasNext) {
val entry = javaMap.next
jsonObject.put(entry.getKey, toJsonNode(entry.getValue, schema.getValueType))
}
jsonObject
case Type.RECORD =>
val jsonObject = JSON_NODE_FACTORY.objectNode()
val record = value.asInstanceOf[IndexedRecord]
//if (record.getSchema != schema) { // TODO not allow multiple version schema?
// throw new IOException("Avro schema specifies record type '%s' but got '%s'.".format(schema.getFullName, record.getSchema.getFullName))
//}
val fields = schema.getFields.iterator
while (fields.hasNext) {
val field = fields.next
val fieldValue = record.get(field.pos)
val fieldNode = toJsonNode(fieldValue, field.schema)
// Outputs the field only if its value differs from the field's default:
if (field.defaultValue == null || fieldNode != field.defaultValue) {
jsonObject.put(field.name, fieldNode)
}
}
jsonObject
case Type.UNION =>
toUnionJsonNode(value, schema)
case _ =>
throw new RuntimeException("Unexpected schema type '%s'.".format(schema))
}
} else {
JSON_NODE_FACTORY.nullNode
}
}
/**
* Encodes an Avro union into a JSON node.
*
* @param value an Avro union to encode.
* @param schema schema of the union to encode.
* @return the encoded value as a JSON node.
* @throws IOException on error.
*/
@throws(classOf[IOException])
private def toUnionJsonNode(value: Any, schema: Schema): JsonNode = {
if (schema.getType != Type.UNION) {
throw new IOException("Avro schema specifies '%s' but got value: '%s'.".format(schema, value))
}
val optionalType = chana.avro.getFirstNoNullTypeOfUnion(schema)
if (null != optionalType) {
return if (null == value) JSON_NODE_FACTORY.nullNode else toJsonNode(value, optionalType)
}
val typeMap = new java.util.HashMap[Schema.Type, java.util.List[Schema]]()
val tpes = schema.getTypes.iterator
while (tpes.hasNext) {
val tpe = tpes.next
val typeList = typeMap.get(tpe.getType) match {
case null =>
val xs = new java.util.ArrayList[Schema]()
typeMap.put(tpe.getType, xs)
xs
case xs => xs
}
typeList.add(tpe)
}
// null is shortened as an immediate JSON null:
if (null == value) {
if (!typeMap.containsKey(Type.NULL)) {
throw new IOException("Avro schema specifies '%s' but got 'null'.".format(schema))
}
return JSON_NODE_FACTORY.nullNode
}
val union = JSON_NODE_FACTORY.objectNode
val tpes2 = schema.getTypes.iterator
while (tpes2.hasNext) {
val tpe = tpes2.next
try {
val actualNode = toJsonNode(value, tpe)
union.put(tpe.getFullName, actualNode)
return union
} catch {
case ex: IOException => // This type was not the correct union case, ignore...
}
}
throw new IOException("Unable to encode '%s' as union '%s'.".format(value, schema))
}
/**
* Encodes an Avro value into a JSON string.
*
* Fields with default values are omitted.
*
* @param value Avro value to encode.
* @param schema Avro schema of the value.
* @return Pretty string representation of the JSON-encoded value.
* @throws IOException on error.
*/
@throws(classOf[IOException])
def toJsonString(value: Any, schema: Schema): String = {
val node = toJsonNode(value, schema)
val stringWriter = new StringWriter()
val generator = JSON_FACTORY.createJsonGenerator(stringWriter)
// We have disabled this because we used unions to represent row key formats
// in the table layout. This is a HACK and needs a better solution.
// TODO: Find better solution.
//generator.disable(Feature.QUOTE_FIELD_NAMES);
JSON_MAPPER.writeValue(generator, node)
stringWriter.toString
}
/**
* Encodes an Avro record into JSON.
*
* @param record Avro record to encode.
* @return Pretty JSON representation of the record.
* @throws IOException on error.
*/
@throws(classOf[IOException])
def toJsonString(record: IndexedRecord): String = {
toJsonString(record, record.getSchema)
}
/**
* Standard Avro/JSON encoder.
*
* @param value Avro value to encode.
* @param schema Avro schema of the value.
* @return JSON-encoded value.
* @throws IOException on error.
*/
@throws(classOf[IOException])
def toAvroJsonString(value: Any, schema: Schema): String = {
try {
val jsonOutputStream = new ByteArrayOutputStream()
val jsonEncoder = EncoderFactory.get.jsonEncoder(schema, jsonOutputStream)
val writer = new GenericDatumWriter[Any](schema)
writer.write(value, jsonEncoder)
jsonEncoder.flush()
new String(jsonOutputStream.toByteArray)
} catch {
case ex: IOException => throw new RuntimeException("Internal error: " + ex)
}
}
/**
* Standard Avro/JSON encoder.
*
* @param record Avro record to encode.
* @return JSON-encoded value.
* @throws IOException on error.
*/
@throws(classOf[IOException])
def toAvroJsonString(record: IndexedRecord): String = {
val schema = record.getSchema
try {
val jsonOutputStream = new ByteArrayOutputStream()
val jsonEncoder = EncoderFactory.get().jsonEncoder(schema, jsonOutputStream)
val writer = new SpecificDatumWriter(record.getClass.asInstanceOf[Class[Any]])
writer.write(record, jsonEncoder)
jsonEncoder.flush()
new String(jsonOutputStream.toByteArray)
} catch {
case ex: IOException => throw new RuntimeException("Internal error: " + ex)
}
}
}
|
matthewtt/chana
|
avpath/src/main/scala/chana/avro/ToJson.scala
|
Scala
|
apache-2.0
| 8,419
|
package probability
private[probability] object Utils {
def block1[A](x:A)(blk: A => Unit) = {blk(x); x}
def K[A](value:A)(tmp:A) = value
}
|
urso/scala_mprob
|
src/main/scala/probability/Utils.scala
|
Scala
|
bsd-3-clause
| 152
|
package mesosphere.util.state
import scala.language.implicitConversions
import org.apache.curator.framework.CuratorFramework
package object zk {
implicit def toRichCurator(client: CuratorFramework): RichCuratorFramework = new RichCuratorFramework(client)
}
|
yp-engineering/marathon
|
src/main/scala/mesosphere/util/state/zk/package.scala
|
Scala
|
apache-2.0
| 261
|
package com.lunatic.mlx.cuisines.mllib.trainers
import com.lunatic.mlx.cuisines.mllib.FlowData
import com.lunatic.mlx.cuisines.{Configuration, printEvaluationMetrix}
import com.lunatic.mlx.removeHdfsFile
import org.apache.spark.mllib.classification.{NaiveBayes, NaiveBayesModel}
import org.apache.spark.{SparkConf, SparkContext}
/**
*
* @param modelType acceptable values: "multinomial" or "bernoulli"
*/
class NaiveBayesTrainer(lambda: Double = 1.0, modelType: String = "multinomial") extends Trainer[NaiveBayesModel] {
def train(flowData: FlowData)(implicit sc: SparkContext) = {
val trainingData = flowData.data
NaiveBayes.train(trainingData, lambda, modelType)
}
}
object NaiveBayesTrainer {
def apply() = new NaiveBayesTrainer()
def main(args: Array[String]) = {
val conf = new SparkConf(true).setAppName(this.getClass.getSimpleName).
setMaster("local[*]")
implicit val sc = new SparkContext(conf)
implicit val configuration = Configuration(args)
val flowData = FlowData.load(configuration.dataPath)
val (model, metrics) = NaiveBayesTrainer().trainEvaluate(flowData)
removeHdfsFile(configuration.naiveBayesPath)
model.save(configuration.naiveBayesPath)
println(s"### ${model.self.getClass.getSimpleName} model evaluation")
printEvaluationMetrix(metrics)
}
}
|
tupol/sparx-mllib
|
src/main/scala/com/lunatic/mlx/cuisines/mllib/trainers/NaiveBayesTrainer.scala
|
Scala
|
apache-2.0
| 1,345
|
package models
import akka.actor._
import play.api.Play.current
import play.api.libs.concurrent.Akka
import play.api.libs.mailer._
object EMailActor {
val actor = Akka.system.actorOf(Props[EmailActor])
}
class EmailActor extends Actor {
def receive = {
case m: BidToppedMessage => sendEmail(m, views.html.email.bidTopped.render(m.itemName, m.itemUrl).body)
case m: BidReceivedMessage => sendEmail(m, views.html.email.bidReceived.render(m.itemName, m.itemUrl).body)
}
def sendEmail(m: EmailMessage, body: String) {
val email = Email(
m.subject,
"Lojinha JCranky <noreply@jcranky.com>",
Seq(m.to),
bodyText = Some(body)
)
MailerPlugin.send(email)
}
}
sealed trait EmailMessage {
val itemName: String
val itemUrl: String
val to: String
val subject: String
}
case class BidToppedMessage(itemName: String, itemUrl: String, to: String) extends EmailMessage {
val subject = "better bid received"
}
case class BidReceivedMessage(itemName: String, itemUrl: String, to: String) extends EmailMessage {
val subject = "your bid has been received"
}
|
aspectcg15/play-app
|
app/models/EmailActor.scala
|
Scala
|
gpl-3.0
| 1,111
|
package com.twitter.querulous.async
import java.sql.ResultSet
import com.twitter.util.Future
import com.twitter.querulous.query.{QueryClass, QueryFactory}
import com.twitter.querulous.evaluator.{Transaction, ParamsApplier}
class StandardAsyncQueryEvaluatorFactory(
databaseFactory: AsyncDatabaseFactory,
queryFactory: QueryFactory)
extends AsyncQueryEvaluatorFactory {
def apply(
hosts: List[String],
name: String,
username: String,
password: String,
urlOptions: Map[String, String],
driverName: String
): AsyncQueryEvaluator = {
new StandardAsyncQueryEvaluator(
databaseFactory(hosts, name, username, password, urlOptions, driverName),
queryFactory
)
}
}
class StandardAsyncQueryEvaluator(val database: AsyncDatabase, queryFactory: QueryFactory)
extends AsyncQueryEvaluator {
def select[A](queryClass: QueryClass, query: String, params: Any*)(f: ResultSet => A) = {
withTransaction(_.select(queryClass, query, params: _*)(f))
}
def selectOne[A](queryClass: QueryClass, query: String, params: Any*)(f: ResultSet => A) = {
withTransaction(_.selectOne(queryClass, query, params: _*)(f))
}
def count(queryClass: QueryClass, query: String, params: Any*) = {
withTransaction(_.count(queryClass, query, params: _*))
}
def execute(queryClass: QueryClass, query: String, params: Any*) = {
withTransaction(_.execute(queryClass, query, params: _*))
}
def executeBatch(queryClass: QueryClass, query: String)(f: ParamsApplier => Unit) = {
withTransaction(_.executeBatch(queryClass, query)(f))
}
def nextId(tableName: String) = {
withTransaction(_.nextId(tableName))
}
def insert(queryClass: QueryClass, query: String, params: Any*) = {
withTransaction(_.insert(queryClass, query, params: _*))
}
def transaction[T](f: Transaction => T) = {
withTransaction { transaction =>
transaction.begin()
try {
val rv = f(transaction)
transaction.commit()
rv
} catch {
case e: Throwable => {
try {
transaction.rollback()
} catch { case _ => () }
throw e
}
}
}
}
private def withTransaction[R](f: Transaction => R): Future[R] = {
database.withConnection { c => f(new Transaction(queryFactory, c)) }
}
def shutdown() {
// TODO: Having to call shutdown on a factory object seems bizarre. We need to rewrite this
// (seems to be meant for tracing support) in a saner way.
// queryFactory.shutdown()
database.shutdown()
}
// equality overrides
override def equals(other: Any) = other match {
case other: StandardAsyncQueryEvaluator => database eq other.database
case _ => false
}
override def hashCode = database.hashCode
}
|
twitter/querulous
|
querulous-core/src/main/scala/com/twitter/querulous/async/StandardAsyncQueryEvaluator.scala
|
Scala
|
apache-2.0
| 2,784
|
package scala.pickling.pickler
import org.scalatest.FunSuite
/**
* Tests Either picklers
*/
class EitherPicklerTest extends FunSuite {
import scala.pickling._, Defaults._, static._, json._
test("pickle Left") {
val l: Left[Int, String] = Left(1)
val up = l.pickle.unpickle[Left[Int,String]]
assert(l == up)
val l2: Either[Int, String] = Left(2)
val up2 = l2.pickle.unpickle[Left[Int,String]]
assert(l2 == up2)
val up22 = l2.pickle.unpickle[Either[Int,String]]
assert(l2 == up22)
}
test("pickle Right") {
val r: Right[Int, String] = Right("hi")
val up = r.pickle.unpickle[Right[Int,String]]
assert(r == up)
val r2: Either[Int, String] = Right("hello")
val up2 = r2.pickle.unpickle[Right[Int,String]]
assert(r2 == up2)
val up22 = r2.pickle.unpickle[Either[Int,String]]
assert(r2 == up22)
}
}
|
beni55/pickling
|
core/src/test/scala/scala/pickling/pickler/EitherPicklerTest.scala
|
Scala
|
bsd-3-clause
| 871
|
import scala.collection.JavaConversions._
import tw.com.ehanlin.mde.MongoEmbedder
class DbActionIndexTest extends ReadmeExampleTest {def is = s2"""
test $test
"""
def test = {
val dsl =
"""
|@find <db=game coll=game query={ team : "zebra" } projection={ _id : 0 } index=[ { team : 1 } , { _id : 0 , box_score : 1 } ]>
|[
|]
""".stripMargin
val result = MongoEmbedder.instance.embed(null, dsl)
val keyList = mongo.getDB("game").getCollection("game").getIndexInfo().map(_.get("key").toString).toList
val teamIndex = MObj("team" -> NumInt(1)).toString
val box_scoreIndex = MObj("_id" -> NumInt(0), "box_score" -> NumInt(1)).toString
(keyList.contains(teamIndex) must_== true) and (keyList.contains(box_scoreIndex) must_== true)
}
}
|
eHanlin/mongodb-dbobject-embedder
|
src/test/scala/DbActionIndexTest.scala
|
Scala
|
mit
| 802
|
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.lsp.core
import java.io.{ InputStream, OutputStream }
import akka.event.slf4j.SLF4JLogging
import org.ensime.lsp.api.commands._
import org.ensime.lsp.api.methods.Notifications._
import org.ensime.lsp.api.methods._
import org.ensime.lsp.api.types._
import org.ensime.lsp.rpc.companions._
import org.ensime.lsp.rpc.messages.{
JsonRpcResponseErrorMessage => RpcError,
JsonRpcResponseErrorMessages => RpcErrors,
_
}
import spray.json._
import scala.util.{ Failure, Success, Try }
/**
* A connection that reads and writes Language Server Protocol messages.
*
* @note Commands are executed asynchronously via a thread pool
* @note Notifications are executed synchronously on the calling thread
* @note The command handler returns Any because sometimes response objects can't be part
* of a sealed hierarchy. For instance, goto definition returns a {{{Seq[Location]}}}
* and that can't subclass anything other than Any
*/
class Connection(inStream: InputStream,
outStream: OutputStream,
notificationHandlers: Seq[Notification => Unit],
commandHandler: (
String,
ServerCommand,
CorrelationId
) => JsonRpcResponseSuccessMessage)
extends SLF4JLogging {
private val msgReader = new MessageReader(inStream)
private val msgWriter = new MessageWriter(outStream)
def notifySubscribers(n: Notification): Unit =
notificationHandlers.foreach(
f =>
Try(f(n)).recover {
case e => log.error("failed notification handler", e)
}
)
def sendNotification[N <: Notification: RpcNotification](
notification: N
): Unit = {
val json = Notification.write(notification)
msgWriter.write(json)
}
/**
* A notification sent to the client to show a message.
*
* @param tpe One of MessageType values
* @param message The message to display in the client
*/
def showMessage(tpe: Int, message: String): Unit =
sendNotification(ShowMessageParams(tpe, message))
/**
* The log message notification is sent from the server to the client to ask
* the client to log a particular message.
*
* @param tpe One of MessageType values
* @param message The message to display in the client
*/
def logMessage(tpe: Int, message: String): Unit =
sendNotification(LogMessageParams(tpe, message))
/**
* Publish compilation errors for the given file.
*/
def publishDiagnostics(uri: String, diagnostics: Seq[Diagnostic]): Unit =
sendNotification(PublishDiagnostics(uri, diagnostics))
def handleMessage(): Boolean =
msgReader.nextPayload() match {
case None => false
case Some(jsonString) =>
readJsonRpcMessage(jsonString) match {
case Left(e) =>
msgWriter.write(e)
case Right(message) =>
message match {
case notification: JsonRpcNotificationMessage =>
Notification.read(notification) match {
case Left(UnknownMethod) =>
log.error(
s"No notification type exists with method=${notification.method}"
)
case Left(e) =>
log.error(
s"Invalid Notification: $e - Message: $message"
)
case Right(n) => notifySubscribers(n)
}
case request: JsonRpcRequestMessage =>
unpackRequest(request) match {
case Left(e) =>
msgWriter.write(e)
case Right(command) =>
msgWriter.write(
commandHandler(request.method, command, request.id)
)
}
case response: JsonRpcResponseMessage =>
log.info(s"Received response: $response")
case m =>
log.error(s"Received unknown message: $m")
}
case m => log.error(s"Received unknown message: $m")
}
true
}
def start(): Unit =
while (handleMessage()) {}
private def readJsonRpcMessage(
jsonString: String
): Either[RpcError, JsonRpcMessage] = {
log.debug(s"Received $jsonString")
Try(JsParser(jsonString)) match {
case Failure(e) =>
Left(RpcErrors.parseError(e, CorrelationId()))
case Success(json) =>
Try(JsReader[JsonRpcMessage].read(json)) match {
case Failure(e) => Left(RpcErrors.invalidRequest(e, CorrelationId()))
case Success(x) => Right(x)
}
}
}
private def unpackRequest(
request: JsonRpcRequestMessage
): Either[RpcError, ServerCommand] =
ServerCommand.read(request) match {
case Left(UnknownMethod) =>
Left(RpcErrors.methodNotFound(request.method, request.id))
case Left(e) =>
Left(RpcErrors.invalidParams(e.describe, request.id))
case Right(command) =>
Right(command)
}
}
|
fommil/ensime-server
|
lsp/src/main/scala/org/ensime/lsp/core/Connection.scala
|
Scala
|
gpl-3.0
| 5,181
|
package theia
import java.io.File
import st.sparse.sundry._
import com.sksamuel.scrimage._
import java.nio.file.Files
import st.sparse.sundry._
import scala.pickling._
import scala.pickling.binary._
import scala.util.Random
import theia.mitsuba.Render
trait TestingUtil extends Logging {
lazy val configureLogger = {
// Must be one of: "trace", "debug", "info", "warn", or "error".
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info")
}
configureLogger
val random = new Random(0)
val resourceRoot = ExistingDirectory(
new File(getClass.getResource("/").getPath))
val zippedCBox = Render.zip(ExistingDirectory(new File(resourceRoot, "cbox")))
// val goldfishGirl = Image(ExistingFile(new File(
// resourceRoot,
// "/goldfish_girl.png")))
val outputRoot =
ExistingDirectory(Files.createTempDirectory("TestTheiaOutputRoot").toFile)
implicit val logRoot = {
val directory = new File(outputRoot, "log")
if (!directory.isDirectory) directory.mkdir()
LogRoot(ExistingDirectory(directory))
}
}
|
emchristiansen/Theia
|
src/test/scala/theia/TestingUtil.scala
|
Scala
|
unlicense
| 1,071
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pointr.tcp.rpc
import java.util
import java.util.Random
import com.pointr.tcp.util.Logger._
//import breeze.linalg.{DenseVector => BDV}
object SolverServerIf {
val WeightsMergePolicies = Seq("average", "best")
}
class SolverServerIf(conf: ServerIfConf) extends ServerIf("SolverServer", Option(conf)) {
val weightsMergePolicy = conf.props("weightsMergePolicy")
import com.pointr.tcp.rpc.SolverIf._
import collection.mutable
var loops = 0
val MaxLoops = conf.service.get.props("MaxLoops").asInstanceOf[Int]
var curWeightsAndAccuracy: (DArray, Double) = (null,-1.0)
override def service(req: P2pReq[_]): P2pResp[_] = {
val allResults = new mutable.ArrayBuffer[EpochResult]()
req match {
case o: KeepGoingReq => {
KeepGoingResp(o.value < MaxLoops)
}
case o: GetModelParamsReq => {
GetModelParamsResp(ModelParams(DefaultModel(), DefaultHyperParams(),
Some(Weights(Array(4, 4), Array.tabulate(16) {
_ * new Random().nextDouble
}))))
}
case o: SendEpochResultReq => {
val epochResult = o.value
allResults += epochResult
curWeightsAndAccuracy = {
if (weightsMergePolicy == "best") {
if (epochResult.accuracy > curWeightsAndAccuracy._2) {
info(s"Found best: accuracy = ${epochResult.accuracy}")
(epochResult.W.d, epochResult.accuracy)
} else {
debug("Sorry we're worse .. skipping..")
curWeightsAndAccuracy
}
} else {
// val sum = allResults.map(x => new BDV[Double](x.W.d))
// .foldLeft(new BDV[Double](Array.fill(allResults.head.W.d.length)(0.0))) { case (sum, bdv) => sum + bdv }
// val avg = sum :/ allResults.length.toDouble
// (avg.toArray, allResults.map(_.accuracy).sum / allResults.length)
val sum = Array.fill(20)(20.0)
(sum, allResults.map(_.accuracy).sum / allResults.length)
}
}
SendEpochResultResp(ModelParams(DefaultModel(), DefaultHyperParams(), Some(Weights(epochResult.W.dims, curWeightsAndAccuracy._1))
))
}
case _ => throw new IllegalArgumentException(s"Unknown service type ${req.getClass.getName}")
}
}
}
|
OpenChaiSpark/OCspark
|
tcpclient/src/main/scala/com/pointr/tcp/rpc/SolverServerIf.scala
|
Scala
|
apache-2.0
| 3,100
|
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.core.cache;
import scouter.lang.pack.AlertPack;
/**
* singleton object that store realtime AlertPack.
*/
object AlertCache {
//Circular queue
val cache = new LoopCache[AlertPack](1024);
def put(alert: AlertPack) {
cache.put(CacheHelper.objType.unipoint(alert.objType), alert.objHash, alert);
}
def get(objType: String, last_loop: Long, last_index: Int): CacheOut[AlertPack] = {
return cache.get(objType, last_loop, last_index);
}
}
|
yuyupapa/OpenSource
|
scouter.server/src/scouter/server/core/cache/AlertCache.scala
|
Scala
|
apache-2.0
| 1,182
|
package com.tardis.common
import java.util.UUID
import com.temportalist.origin.api.common.lib.LogHelper
import com.temportalist.origin.api.common.utility.WorldHelper
import com.temportalist.origin.foundation.common.extended.ExtendedEntity
import com.temportalist.origin.foundation.common.network.PacketExtendedSync
import com.temportalist.origin.internal.common.Origin
import com.temportalist.origin.internal.common.extended.ExtendedEntityHandler
import cpw.mods.fml.relauncher.Side
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.nbt.NBTTagCompound
import net.minecraftforge.common.DimensionManager
/**
*
*
* @author TheTemportalist
*/
class PlayerTardis(p: EntityPlayer) extends ExtendedEntity(p) {
private var tardisDim: Int = 0
private var tardisUUID: UUID = null
private var originalPOV: Int = -1
override def saveNBTData(tagCom: NBTTagCompound): Unit = {
//tagCom.setInteger("tardisDim", this.tardisDim)
//tagCom.setInteger("tardisID", this.tardisID)
}
override def loadNBTData(tagCom: NBTTagCompound): Unit = {
//this.tardisDim = tagCom.getInteger("tardisDim")
//this.tardisID = tagCom.getInteger("tardisID")
}
def setTardis(tDim: Int, tUUID: UUID): Unit = {
this.tardisDim = tDim
this.tardisUUID = tUUID
val least = if (this.tardisUUID == null) 0L else this.tardisUUID.getLeastSignificantBits
val most = if (this.tardisUUID == null) 0L else this.tardisUUID.getMostSignificantBits
this.syncEntity("tardis", this.tardisDim, least, most)
}
def setTardis(tardis: EntityTardis): Unit = {
if (tardis == null) this.setTardis(0, null)
else this.setTardis(tardis.worldObj.provider.dimensionId, tardis.getUniqueID)
}
def hasTardisToControl: Boolean = this.tardisUUID != null
def getTardis: EntityTardis = {
if (this.hasTardisToControl)
Tardis.getTardisInWorld(
DimensionManager.getWorld(this.tardisDim),
this.tardisUUID, this.getTardis.getEntityId
)
else null
}
override def handleSyncPacketData(uniqueIdentifier: String, packet: PacketExtendedSync,
side: Side): Unit = {
uniqueIdentifier match {
case "tardis" =>
this.tardisDim = packet.get[Int]
val least = packet.get[Long]
val most = packet.get[Long]
if (least == 0L && most == 0L) this.tardisUUID = null
else this.tardisUUID = new UUID(most, least)
case _ =>
}
}
}
object PlayerTardis {
def get(player: EntityPlayer): PlayerTardis = {
ExtendedEntityHandler.getExtended(player, classOf[PlayerTardis])
}
}
|
TheTemportalist/Tardis
|
src/main/scala/com/tardis/common/PlayerTardis.scala
|
Scala
|
apache-2.0
| 2,489
|
package info.glennengstrand.perf3
import org.apache.kafka.clients.consumer.{KafkaConsumer, ConsumerRecord, ConsumerRecords}
import java.util.Properties
import scala.collection.JavaConversions._
import java.util.logging.{Logger, Level}
/** consumes the stream of messages from the feed topic in kafka and processes them as performance measurements */
class PerformanceMetricConsumer(host: String, topic: String) {
val log = Logger.getLogger("info.glennengstrand.perf3.PerformanceMetricConsumer")
def connect: KafkaConsumer[String, String] = {
val props = new Properties
props.setProperty("bootstrap.servers", s"${host}:9092")
props.setProperty("group.id", "news-feed-performance")
props.setProperty("enable.auto.commit", "true")
props.setProperty("partition.assignment.strategy", "org.apache.kafka.clients.consumer.RangeAssignor")
props.setProperty("auto.commit.interval.ms", "1000")
props.setProperty("session.timeout.ms", "30000")
props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
new KafkaConsumer[String, String](props)
}
lazy val consumer = connect
def consume(process: PerformanceMeasurement => Unit): Unit = {
val topics = new java.util.ArrayList[String]
topics.add(topic)
consumer.subscribe(topics)
while (true) {
val cr = consumer.poll(5000L)
if (cr != null) {
if (!cr.isEmpty()) {
cr.iterator.foreach {
case record: ConsumerRecord[String, String] => {
if (record != null) {
process(PerformanceMeasurement(record.value))
}
}
}
}
}
}
}
}
|
gengstrand/clojure-news-feed
|
client/perf3/src/main/scala/info/glennengstrand/perf3/PerformanceMetricConsumer.scala
|
Scala
|
epl-1.0
| 1,788
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.batch.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.utils.TableTestUtil._
import org.apache.flink.table.utils.TableTestBase
import org.junit.Test
class SingleRowJoinTest extends TableTestBase {
@Test
def testSingleRowCrossJoin(): Unit = {
val util = batchTestUtil()
util.addTable[(Int, Int)]("A", 'a1, 'a2)
val query =
"SELECT a1, asum " +
"FROM A, (SELECT sum(a1) + sum(a2) AS asum FROM A)"
val expected =
binaryNode(
"DataSetSingleRowJoin",
unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a1")
),
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetUnion",
unaryNode(
"DataSetValues",
batchTableNode(0),
tuples(List(null, null)),
term("values", "a1", "a2")
),
term("union","a1","a2")
),
term("select", "SUM(a1) AS $f0", "SUM(a2) AS $f1")
),
term("select", "+($f0, $f1) AS asum")
),
term("where", "true"),
term("join", "a1", "asum"),
term("joinType", "NestedLoopInnerJoin")
)
util.verifySql(query, expected)
}
@Test
def testSingleRowEquiJoin(): Unit = {
val util = batchTestUtil()
util.addTable[(Int, String)]("A", 'a1, 'a2)
val query =
"SELECT a1, a2 " +
"FROM A, (SELECT count(a1) AS cnt FROM A) " +
"WHERE a1 = cnt"
val expected =
unaryNode(
"DataSetCalc",
binaryNode(
"DataSetSingleRowJoin",
batchTableNode(0),
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetUnion",
unaryNode(
"DataSetValues",
unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a1")
),
tuples(List(null)),
term("values", "a1")
),
term("union","a1")
),
term("select", "COUNT(a1) AS cnt")
),
term("where", "=(CAST(a1), cnt)"),
term("join", "a1", "a2", "cnt"),
term("joinType", "NestedLoopInnerJoin")
),
term("select", "a1", "a2")
)
util.verifySql(query, expected)
}
@Test
def testSingleRowNotEquiJoin(): Unit = {
val util = batchTestUtil()
util.addTable[(Int, String)]("A", 'a1, 'a2)
val query =
"SELECT a1, a2 " +
"FROM A, (SELECT count(a1) AS cnt FROM A) " +
"WHERE a1 < cnt"
val expected =
unaryNode(
"DataSetCalc",
binaryNode(
"DataSetSingleRowJoin",
batchTableNode(0),
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetUnion",
unaryNode(
"DataSetValues",
unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a1")
),
tuples(List(null)),
term("values", "a1")
),
term("union", "a1")
),
term("select", "COUNT(a1) AS cnt")
),
term("where", "<(a1, cnt)"),
term("join", "a1", "a2", "cnt"),
term("joinType", "NestedLoopInnerJoin")
),
term("select", "a1", "a2")
)
util.verifySql(query, expected)
}
@Test
def testSingleRowJoinWithComplexPredicate(): Unit = {
val util = batchTestUtil()
util.addTable[(Int, Long)]("A", 'a1, 'a2)
util.addTable[(Int, Long)]("B", 'b1, 'b2)
val query =
"SELECT a1, a2, b1, b2 " +
"FROM A, (SELECT min(b1) AS b1, max(b2) AS b2 FROM B) " +
"WHERE a1 < b1 AND a2 = b2"
val expected = binaryNode(
"DataSetSingleRowJoin",
batchTableNode(0),
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetUnion",
unaryNode(
"DataSetValues",
batchTableNode(1),
tuples(List(null, null)),
term("values", "b1", "b2")
),
term("union","b1","b2")
),
term("select", "MIN(b1) AS b1", "MAX(b2) AS b2")
),
term("where", "AND(<(a1, b1)", "=(a2, b2))"),
term("join", "a1", "a2", "b1", "b2"),
term("joinType", "NestedLoopInnerJoin")
)
util.verifySql(query, expected)
}
@Test
def testRightSingleLeftJoinEqualPredicate(): Unit = {
val util = batchTestUtil()
util.addTable[(Long, Int)]("A", 'a1, 'a2)
util.addTable[(Int, Int)]("B", 'b1, 'b2)
val queryLeftJoin =
"SELECT a2 " +
"FROM A " +
" LEFT JOIN " +
"(SELECT COUNT(*) AS cnt FROM B) AS x " +
" ON a1 = cnt"
val expected =
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetSingleRowJoin",
batchTableNode(0),
term("where", "=(a1, cnt)"),
term("join", "a1", "a2", "cnt"),
term("joinType", "NestedLoopLeftJoin")
),
term("select", "a2")
) + "\\n" +
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetUnion",
unaryNode(
"DataSetValues",
unaryNode(
"DataSetCalc",
batchTableNode(1),
term("select", "0 AS $f0")),
tuples(List(null)), term("values", "$f0")
),
term("union", "$f0")
),
term("select", "COUNT(*) AS cnt")
)
util.verifySql(queryLeftJoin, expected)
}
@Test
def testRightSingleLeftJoinNotEqualPredicate(): Unit = {
val util = batchTestUtil()
util.addTable[(Long, Int)]("A", 'a1, 'a2)
util.addTable[(Int, Int)]("B", 'b1, 'b2)
val queryLeftJoin =
"SELECT a2 " +
"FROM A " +
" LEFT JOIN " +
"(SELECT COUNT(*) AS cnt FROM B) AS x " +
" ON a1 > cnt"
val expected =
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetSingleRowJoin",
batchTableNode(0),
term("where", ">(a1, cnt)"),
term("join", "a1", "a2", "cnt"),
term("joinType", "NestedLoopLeftJoin")
),
term("select", "a2")
) + "\\n" +
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetUnion",
unaryNode(
"DataSetValues",
unaryNode(
"DataSetCalc",
batchTableNode(1),
term("select", "0 AS $f0")),
tuples(List(null)), term("values", "$f0")
),
term("union", "$f0")
),
term("select", "COUNT(*) AS cnt")
)
util.verifySql(queryLeftJoin, expected)
}
@Test
def testLeftSingleRightJoinEqualPredicate(): Unit = {
val util = batchTestUtil()
util.addTable[(Long, Long)]("A", 'a1, 'a2)
util.addTable[(Long, Long)]("B", 'b1, 'b2)
val queryRightJoin =
"SELECT a1 " +
"FROM (SELECT COUNT(*) AS cnt FROM B) " +
" RIGHT JOIN " +
"A " +
" ON cnt = a2"
val expected =
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetSingleRowJoin",
"",
term("where", "=(cnt, a2)"),
term("join", "cnt", "a1", "a2"),
term("joinType", "NestedLoopRightJoin")
),
term("select", "a1")
) + unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetUnion",
unaryNode(
"DataSetValues",
unaryNode(
"DataSetCalc",
batchTableNode(1),
term("select", "0 AS $f0")),
tuples(List(null)), term("values", "$f0")
),
term("union", "$f0")
),
term("select", "COUNT(*) AS cnt")
) + "\\n" +
batchTableNode(0)
util.verifySql(queryRightJoin, expected)
}
@Test
def testLeftSingleRightJoinNotEqualPredicate(): Unit = {
val util = batchTestUtil()
util.addTable[(Long, Long)]("A", 'a1, 'a2)
util.addTable[(Long, Long)]("B", 'b1, 'b2)
val queryRightJoin =
"SELECT a1 " +
"FROM (SELECT COUNT(*) AS cnt FROM B) " +
" RIGHT JOIN " +
"A " +
" ON cnt < a2"
val expected =
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetSingleRowJoin",
"",
term("where", "<(cnt, a2)"),
term("join", "cnt", "a1", "a2"),
term("joinType", "NestedLoopRightJoin")
),
term("select", "a1")
) +
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetUnion",
unaryNode(
"DataSetValues",
unaryNode(
"DataSetCalc",
batchTableNode(1),
term("select", "0 AS $f0")),
tuples(List(null)), term("values", "$f0")
),
term("union", "$f0")
),
term("select", "COUNT(*) AS cnt")
) + "\\n" +
batchTableNode(0)
util.verifySql(queryRightJoin, expected)
}
@Test
def testSingleRowJoinInnerJoin(): Unit = {
val util = batchTestUtil()
util.addTable[(Int, Int)]("A", 'a1, 'a2)
val query =
"SELECT a2, sum(a1) " +
"FROM A " +
"GROUP BY a2 " +
"HAVING sum(a1) > (SELECT sum(a1) * 0.1 FROM A)"
val expected =
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetSingleRowJoin",
unaryNode(
"DataSetAggregate",
batchTableNode(0),
term("groupBy", "a2"),
term("select", "a2", "SUM(a1) AS EXPR$1")
),
term("where", ">(EXPR$1, EXPR$0)"),
term("join", "a2", "EXPR$1", "EXPR$0"),
term("joinType", "NestedLoopInnerJoin")
),
term("select", "a2", "EXPR$1")
) + "\\n" +
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetUnion",
unaryNode(
"DataSetValues",
unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a1")
),
tuples(List(null)), term("values", "a1")
),
term("union", "a1")
),
term("select", "SUM(a1) AS $f0")
),
term("select", "*($f0, 0.1) AS EXPR$0")
)
util.verifySql(query, expected)
}
}
|
haohui/flink
|
flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/sql/SingleRowJoinTest.scala
|
Scala
|
apache-2.0
| 11,701
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.client
import java.io.{File, PrintStream}
import java.util.Locale
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hive.common.StatsSetupConst
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hadoop.hive.metastore.{TableType => HiveTableType}
import org.apache.hadoop.hive.metastore.api.{Database => HiveDatabase, FieldSchema, Order}
import org.apache.hadoop.hive.metastore.api.{SerDeInfo, StorageDescriptor}
import org.apache.hadoop.hive.ql.Driver
import org.apache.hadoop.hive.ql.metadata.{Hive, Partition => HivePartition, Table => HiveTable}
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC
import org.apache.hadoop.hive.ql.processors._
import org.apache.hadoop.hive.ql.session.SessionState
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.metrics.source.HiveCatalogMetrics
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParseException}
import org.apache.spark.sql.execution.QueryExecutionException
import org.apache.spark.sql.execution.command.DDLUtils
import org.apache.spark.sql.hive.HiveExternalCatalog.{DATASOURCE_SCHEMA, DATASOURCE_SCHEMA_NUMPARTS, DATASOURCE_SCHEMA_PART_PREFIX}
import org.apache.spark.sql.hive.client.HiveClientImpl._
import org.apache.spark.sql.types._
import org.apache.spark.util.{CircularBuffer, Utils}
/**
* A class that wraps the HiveClient and converts its responses to externally visible classes.
* Note that this class is typically loaded with an internal classloader for each instantiation,
* allowing it to interact directly with a specific isolated version of Hive. Loading this class
* with the isolated classloader however will result in it only being visible as a [[HiveClient]],
* not a [[HiveClientImpl]].
*
* This class needs to interact with multiple versions of Hive, but will always be compiled with
* the 'native', execution version of Hive. Therefore, any places where hive breaks compatibility
* must use reflection after matching on `version`.
*
* Every HiveClientImpl creates an internal HiveConf object. This object is using the given
* `hadoopConf` as the base. All options set in the `sparkConf` will be applied to the HiveConf
* object and overrides any exiting options. Then, options in extraConfig will be applied
* to the HiveConf object and overrides any existing options.
*
* @param version the version of hive used when pick function calls that are not compatible.
* @param sparkConf all configuration options set in SparkConf.
* @param hadoopConf the base Configuration object used by the HiveConf created inside
* this HiveClientImpl.
* @param extraConfig a collection of configuration options that will be added to the
* hive conf before opening the hive client.
* @param initClassLoader the classloader used when creating the `state` field of
* this [[HiveClientImpl]].
*/
private[hive] class HiveClientImpl(
override val version: HiveVersion,
sparkConf: SparkConf,
hadoopConf: Configuration,
extraConfig: Map[String, String],
initClassLoader: ClassLoader,
val clientLoader: IsolatedClientLoader)
extends HiveClient
with Logging {
// Circular buffer to hold what hive prints to STDOUT and ERR. Only printed when failures occur.
private val outputBuffer = new CircularBuffer()
private val shim = version match {
case hive.v12 => new Shim_v0_12()
case hive.v13 => new Shim_v0_13()
case hive.v14 => new Shim_v0_14()
case hive.v1_0 => new Shim_v1_0()
case hive.v1_1 => new Shim_v1_1()
case hive.v1_2 => new Shim_v1_2()
case hive.v2_0 => new Shim_v2_0()
case hive.v2_1 => new Shim_v2_1()
}
// Create an internal session state for this HiveClientImpl.
val state: SessionState = {
val original = Thread.currentThread().getContextClassLoader
if (clientLoader.isolationOn) {
// Switch to the initClassLoader.
Thread.currentThread().setContextClassLoader(initClassLoader)
try {
newState()
} finally {
Thread.currentThread().setContextClassLoader(original)
}
} else {
// Isolation off means we detect a CliSessionState instance in current thread.
// 1: Inside the spark project, we have already started a CliSessionState in
// `SparkSQLCLIDriver`, which contains configurations from command lines. Later, we call
// `SparkSQLEnv.init()` there, which would new a hive client again. so we should keep those
// configurations and reuse the existing instance of `CliSessionState`. In this case,
// SessionState.get will always return a CliSessionState.
// 2: In another case, a user app may start a CliSessionState outside spark project with built
// in hive jars, which will turn off isolation, if SessionSate.detachSession is
// called to remove the current state after that, hive client created later will initialize
// its own state by newState()
val ret = SessionState.get
if (ret != null) {
// hive.metastore.warehouse.dir is determined in SharedState after the CliSessionState
// instance constructed, we need to follow that change here.
Option(hadoopConf.get(ConfVars.METASTOREWAREHOUSE.varname)).foreach { dir =>
ret.getConf.setVar(ConfVars.METASTOREWAREHOUSE, dir)
}
ret
} else {
newState()
}
}
}
// Log the default warehouse location.
logInfo(
s"Warehouse location for Hive client " +
s"(version ${version.fullVersion}) is ${conf.getVar(ConfVars.METASTOREWAREHOUSE)}")
private def newState(): SessionState = {
val hiveConf = new HiveConf(classOf[SessionState])
// HiveConf is a Hadoop Configuration, which has a field of classLoader and
// the initial value will be the current thread's context class loader
// (i.e. initClassLoader at here).
// We call initialConf.setClassLoader(initClassLoader) at here to make
// this action explicit.
hiveConf.setClassLoader(initClassLoader)
// 1: Take all from the hadoopConf to this hiveConf.
// This hadoopConf contains user settings in Hadoop's core-site.xml file
// and Hive's hive-site.xml file. Note, we load hive-site.xml file manually in
// SharedState and put settings in this hadoopConf instead of relying on HiveConf
// to load user settings. Otherwise, HiveConf's initialize method will override
// settings in the hadoopConf. This issue only shows up when spark.sql.hive.metastore.jars
// is not set to builtin. When spark.sql.hive.metastore.jars is builtin, the classpath
// has hive-site.xml. So, HiveConf will use that to override its default values.
// 2: we set all spark confs to this hiveConf.
// 3: we set all entries in config to this hiveConf.
(hadoopConf.iterator().asScala.map(kv => kv.getKey -> kv.getValue)
++ sparkConf.getAll.toMap ++ extraConfig).foreach { case (k, v) =>
logDebug(
s"""
|Applying Hadoop/Hive/Spark and extra properties to Hive Conf:
|$k=${if (k.toLowerCase(Locale.ROOT).contains("password")) "xxx" else v}
""".stripMargin)
hiveConf.set(k, v)
}
val state = new SessionState(hiveConf)
if (clientLoader.cachedHive != null) {
Hive.set(clientLoader.cachedHive.asInstanceOf[Hive])
}
SessionState.start(state)
state.out = new PrintStream(outputBuffer, true, "UTF-8")
state.err = new PrintStream(outputBuffer, true, "UTF-8")
state
}
/** Returns the configuration for the current session. */
def conf: HiveConf = state.getConf
private val userName = conf.getUser
override def getConf(key: String, defaultValue: String): String = {
conf.get(key, defaultValue)
}
// We use hive's conf for compatibility.
private val retryLimit = conf.getIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES)
private val retryDelayMillis = shim.getMetastoreClientConnectRetryDelayMillis(conf)
/**
* Runs `f` with multiple retries in case the hive metastore is temporarily unreachable.
*/
private def retryLocked[A](f: => A): A = clientLoader.synchronized {
// Hive sometimes retries internally, so set a deadline to avoid compounding delays.
val deadline = System.nanoTime + (retryLimit * retryDelayMillis * 1e6).toLong
var numTries = 0
var caughtException: Exception = null
do {
numTries += 1
try {
return f
} catch {
case e: Exception if causedByThrift(e) =>
caughtException = e
logWarning(
"HiveClient got thrift exception, destroying client and retrying " +
s"(${retryLimit - numTries} tries remaining)", e)
clientLoader.cachedHive = null
Thread.sleep(retryDelayMillis)
}
} while (numTries <= retryLimit && System.nanoTime < deadline)
if (System.nanoTime > deadline) {
logWarning("Deadline exceeded")
}
throw caughtException
}
private def causedByThrift(e: Throwable): Boolean = {
var target = e
while (target != null) {
val msg = target.getMessage()
if (msg != null && msg.matches("(?s).*(TApplication|TProtocol|TTransport)Exception.*")) {
return true
}
target = target.getCause()
}
false
}
private def client: Hive = {
if (clientLoader.cachedHive != null) {
clientLoader.cachedHive.asInstanceOf[Hive]
} else {
val c = Hive.get(conf)
clientLoader.cachedHive = c
c
}
}
/** Return the associated Hive [[SessionState]] of this [[HiveClientImpl]] */
override def getState: SessionState = withHiveState(state)
/**
* Runs `f` with ThreadLocal session state and classloaders configured for this version of hive.
*/
def withHiveState[A](f: => A): A = retryLocked {
val original = Thread.currentThread().getContextClassLoader
val originalConfLoader = state.getConf.getClassLoader
// The classloader in clientLoader could be changed after addJar, always use the latest
// classloader. We explicitly set the context class loader since "conf.setClassLoader" does
// not do that, and the Hive client libraries may need to load classes defined by the client's
// class loader.
Thread.currentThread().setContextClassLoader(clientLoader.classLoader)
state.getConf.setClassLoader(clientLoader.classLoader)
// Set the thread local metastore client to the client associated with this HiveClientImpl.
Hive.set(client)
// Replace conf in the thread local Hive with current conf
Hive.get(conf)
// setCurrentSessionState will use the classLoader associated
// with the HiveConf in `state` to override the context class loader of the current
// thread.
shim.setCurrentSessionState(state)
val ret = try f finally {
state.getConf.setClassLoader(originalConfLoader)
Thread.currentThread().setContextClassLoader(original)
HiveCatalogMetrics.incrementHiveClientCalls(1)
}
ret
}
def setOut(stream: PrintStream): Unit = withHiveState {
state.out = stream
}
def setInfo(stream: PrintStream): Unit = withHiveState {
state.info = stream
}
def setError(stream: PrintStream): Unit = withHiveState {
state.err = stream
}
override def setCurrentDatabase(databaseName: String): Unit = withHiveState {
if (databaseExists(databaseName)) {
state.setCurrentDatabase(databaseName)
} else {
throw new NoSuchDatabaseException(databaseName)
}
}
override def createDatabase(
database: CatalogDatabase,
ignoreIfExists: Boolean): Unit = withHiveState {
client.createDatabase(
new HiveDatabase(
database.name,
database.description,
CatalogUtils.URIToString(database.locationUri),
Option(database.properties).map(_.asJava).orNull),
ignoreIfExists)
}
override def dropDatabase(
name: String,
ignoreIfNotExists: Boolean,
cascade: Boolean): Unit = withHiveState {
client.dropDatabase(name, true, ignoreIfNotExists, cascade)
}
override def alterDatabase(database: CatalogDatabase): Unit = withHiveState {
client.alterDatabase(
database.name,
new HiveDatabase(
database.name,
database.description,
CatalogUtils.URIToString(database.locationUri),
Option(database.properties).map(_.asJava).orNull))
}
override def getDatabase(dbName: String): CatalogDatabase = withHiveState {
Option(client.getDatabase(dbName)).map { d =>
CatalogDatabase(
name = d.getName,
description = d.getDescription,
locationUri = CatalogUtils.stringToURI(d.getLocationUri),
properties = Option(d.getParameters).map(_.asScala.toMap).orNull)
}.getOrElse(throw new NoSuchDatabaseException(dbName))
}
override def databaseExists(dbName: String): Boolean = withHiveState {
client.databaseExists(dbName)
}
override def listDatabases(pattern: String): Seq[String] = withHiveState {
client.getDatabasesByPattern(pattern).asScala
}
override def tableExists(dbName: String, tableName: String): Boolean = withHiveState {
Option(client.getTable(dbName, tableName, false /* do not throw exception */)).nonEmpty
}
override def getTableOption(
dbName: String,
tableName: String): Option[CatalogTable] = withHiveState {
logDebug(s"Looking up $dbName.$tableName")
Option(client.getTable(dbName, tableName, false)).map { h =>
// Note: Hive separates partition columns and the schema, but for us the
// partition columns are part of the schema
val cols = h.getCols.asScala.map(fromHiveColumn)
val partCols = h.getPartCols.asScala.map(fromHiveColumn)
val schema = StructType(cols ++ partCols)
val bucketSpec = if (h.getNumBuckets > 0) {
val sortColumnOrders = h.getSortCols.asScala
// Currently Spark only supports columns to be sorted in ascending order
// but Hive can support both ascending and descending order. If all the columns
// are sorted in ascending order, only then propagate the sortedness information
// to downstream processing / optimizations in Spark
// TODO: In future we can have Spark support columns sorted in descending order
val allAscendingSorted = sortColumnOrders.forall(_.getOrder == HIVE_COLUMN_ORDER_ASC)
val sortColumnNames = if (allAscendingSorted) {
sortColumnOrders.map(_.getCol)
} else {
Seq.empty
}
Option(BucketSpec(h.getNumBuckets, h.getBucketCols.asScala, sortColumnNames))
} else {
None
}
// Skew spec and storage handler can't be mapped to CatalogTable (yet)
val unsupportedFeatures = ArrayBuffer.empty[String]
if (!h.getSkewedColNames.isEmpty) {
unsupportedFeatures += "skewed columns"
}
if (h.getStorageHandler != null) {
unsupportedFeatures += "storage handler"
}
if (h.getTableType == HiveTableType.VIRTUAL_VIEW && partCols.nonEmpty) {
unsupportedFeatures += "partitioned view"
}
val properties = Option(h.getParameters).map(_.asScala.toMap).orNull
// Hive-generated Statistics are also recorded in ignoredProperties
val ignoredProperties = scala.collection.mutable.Map.empty[String, String]
for (key <- HiveStatisticsProperties; value <- properties.get(key)) {
ignoredProperties += key -> value
}
val excludedTableProperties = HiveStatisticsProperties ++ Set(
// The property value of "comment" is moved to the dedicated field "comment"
"comment",
// For EXTERNAL_TABLE, the table properties has a particular field "EXTERNAL". This is added
// in the function toHiveTable.
"EXTERNAL"
)
val filteredProperties = properties.filterNot {
case (key, _) => excludedTableProperties.contains(key)
}
val comment = properties.get("comment")
CatalogTable(
identifier = TableIdentifier(h.getTableName, Option(h.getDbName)),
tableType = h.getTableType match {
case HiveTableType.EXTERNAL_TABLE => CatalogTableType.EXTERNAL
case HiveTableType.MANAGED_TABLE => CatalogTableType.MANAGED
case HiveTableType.VIRTUAL_VIEW => CatalogTableType.VIEW
case HiveTableType.INDEX_TABLE =>
throw new AnalysisException("Hive index table is not supported.")
},
schema = schema,
partitionColumnNames = partCols.map(_.name),
// If the table is written by Spark, we will put bucketing information in table properties,
// and will always overwrite the bucket spec in hive metastore by the bucketing information
// in table properties. This means, if we have bucket spec in both hive metastore and
// table properties, we will trust the one in table properties.
bucketSpec = bucketSpec,
owner = Option(h.getOwner).getOrElse(""),
createTime = h.getTTable.getCreateTime.toLong * 1000,
lastAccessTime = h.getLastAccessTime.toLong * 1000,
storage = CatalogStorageFormat(
locationUri = shim.getDataLocation(h).map(CatalogUtils.stringToURI),
// To avoid ClassNotFound exception, we try our best to not get the format class, but get
// the class name directly. However, for non-native tables, there is no interface to get
// the format class name, so we may still throw ClassNotFound in this case.
inputFormat = Option(h.getTTable.getSd.getInputFormat).orElse {
Option(h.getStorageHandler).map(_.getInputFormatClass.getName)
},
outputFormat = Option(h.getTTable.getSd.getOutputFormat).orElse {
Option(h.getStorageHandler).map(_.getOutputFormatClass.getName)
},
serde = Option(h.getSerializationLib),
compressed = h.getTTable.getSd.isCompressed,
properties = Option(h.getTTable.getSd.getSerdeInfo.getParameters)
.map(_.asScala.toMap).orNull
),
// For EXTERNAL_TABLE, the table properties has a particular field "EXTERNAL". This is added
// in the function toHiveTable.
properties = filteredProperties,
stats = readHiveStats(properties),
comment = comment,
// In older versions of Spark(before 2.2.0), we expand the view original text and store
// that into `viewExpandedText`, and that should be used in view resolution. So we get
// `viewExpandedText` instead of `viewOriginalText` for viewText here.
viewText = Option(h.getViewExpandedText),
unsupportedFeatures = unsupportedFeatures,
ignoredProperties = ignoredProperties.toMap)
}
}
override def createTable(table: CatalogTable, ignoreIfExists: Boolean): Unit = withHiveState {
verifyColumnDataType(table.dataSchema)
client.createTable(toHiveTable(table, Some(userName)), ignoreIfExists)
}
override def dropTable(
dbName: String,
tableName: String,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit = withHiveState {
shim.dropTable(client, dbName, tableName, true, ignoreIfNotExists, purge)
}
override def alterTable(
dbName: String,
tableName: String,
table: CatalogTable): Unit = withHiveState {
// getTableOption removes all the Hive-specific properties. Here, we fill them back to ensure
// these properties are still available to the others that share the same Hive metastore.
// If users explicitly alter these Hive-specific properties through ALTER TABLE DDL, we respect
// these user-specified values.
verifyColumnDataType(table.dataSchema)
val hiveTable = toHiveTable(
table.copy(properties = table.ignoredProperties ++ table.properties), Some(userName))
// Do not use `table.qualifiedName` here because this may be a rename
val qualifiedTableName = s"$dbName.$tableName"
shim.alterTable(client, qualifiedTableName, hiveTable)
}
override def alterTableDataSchema(
dbName: String,
tableName: String,
newDataSchema: StructType,
schemaProps: Map[String, String]): Unit = withHiveState {
val oldTable = client.getTable(dbName, tableName)
verifyColumnDataType(newDataSchema)
val hiveCols = newDataSchema.map(toHiveColumn)
oldTable.setFields(hiveCols.asJava)
// remove old schema table properties
val it = oldTable.getParameters.entrySet.iterator
while (it.hasNext) {
val entry = it.next()
val isSchemaProp = entry.getKey.startsWith(DATASOURCE_SCHEMA_PART_PREFIX) ||
entry.getKey == DATASOURCE_SCHEMA || entry.getKey == DATASOURCE_SCHEMA_NUMPARTS
if (isSchemaProp) {
it.remove()
}
}
// set new schema table properties
schemaProps.foreach { case (k, v) => oldTable.setProperty(k, v) }
val qualifiedTableName = s"$dbName.$tableName"
shim.alterTable(client, qualifiedTableName, oldTable)
}
override def createPartitions(
db: String,
table: String,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit = withHiveState {
shim.createPartitions(client, db, table, parts, ignoreIfExists)
}
override def dropPartitions(
db: String,
table: String,
specs: Seq[TablePartitionSpec],
ignoreIfNotExists: Boolean,
purge: Boolean,
retainData: Boolean): Unit = withHiveState {
// TODO: figure out how to drop multiple partitions in one call
val hiveTable = client.getTable(db, table, true /* throw exception */)
// do the check at first and collect all the matching partitions
val matchingParts =
specs.flatMap { s =>
assert(s.values.forall(_.nonEmpty), s"partition spec '$s' is invalid")
// The provided spec here can be a partial spec, i.e. it will match all partitions
// whose specs are supersets of this partial spec. E.g. If a table has partitions
// (b='1', c='1') and (b='1', c='2'), a partial spec of (b='1') will match both.
val parts = client.getPartitions(hiveTable, s.asJava).asScala
if (parts.isEmpty && !ignoreIfNotExists) {
throw new AnalysisException(
s"No partition is dropped. One partition spec '$s' does not exist in table '$table' " +
s"database '$db'")
}
parts.map(_.getValues)
}.distinct
var droppedParts = ArrayBuffer.empty[java.util.List[String]]
matchingParts.foreach { partition =>
try {
shim.dropPartition(client, db, table, partition, !retainData, purge)
} catch {
case e: Exception =>
val remainingParts = matchingParts.toBuffer -- droppedParts
logError(
s"""
|======================
|Attempt to drop the partition specs in table '$table' database '$db':
|${specs.mkString("\\n")}
|In this attempt, the following partitions have been dropped successfully:
|${droppedParts.mkString("\\n")}
|The remaining partitions have not been dropped:
|${remainingParts.mkString("\\n")}
|======================
""".stripMargin)
throw e
}
droppedParts += partition
}
}
override def renamePartitions(
db: String,
table: String,
specs: Seq[TablePartitionSpec],
newSpecs: Seq[TablePartitionSpec]): Unit = withHiveState {
require(specs.size == newSpecs.size, "number of old and new partition specs differ")
val catalogTable = getTable(db, table)
val hiveTable = toHiveTable(catalogTable, Some(userName))
specs.zip(newSpecs).foreach { case (oldSpec, newSpec) =>
val hivePart = getPartitionOption(catalogTable, oldSpec)
.map { p => toHivePartition(p.copy(spec = newSpec), hiveTable) }
.getOrElse { throw new NoSuchPartitionException(db, table, oldSpec) }
client.renamePartition(hiveTable, oldSpec.asJava, hivePart)
}
}
override def alterPartitions(
db: String,
table: String,
newParts: Seq[CatalogTablePartition]): Unit = withHiveState {
val hiveTable = toHiveTable(getTable(db, table), Some(userName))
shim.alterPartitions(client, table, newParts.map { p => toHivePartition(p, hiveTable) }.asJava)
}
/**
* Returns the partition names for the given table that match the supplied partition spec.
* If no partition spec is specified, all partitions are returned.
*
* The returned sequence is sorted as strings.
*/
override def getPartitionNames(
table: CatalogTable,
partialSpec: Option[TablePartitionSpec] = None): Seq[String] = withHiveState {
val hivePartitionNames =
partialSpec match {
case None =>
// -1 for result limit means "no limit/return all"
client.getPartitionNames(table.database, table.identifier.table, -1)
case Some(s) =>
assert(s.values.forall(_.nonEmpty), s"partition spec '$s' is invalid")
client.getPartitionNames(table.database, table.identifier.table, s.asJava, -1)
}
hivePartitionNames.asScala.sorted
}
override def getPartitionOption(
table: CatalogTable,
spec: TablePartitionSpec): Option[CatalogTablePartition] = withHiveState {
val hiveTable = toHiveTable(table, Some(userName))
val hivePartition = client.getPartition(hiveTable, spec.asJava, false)
Option(hivePartition).map(fromHivePartition)
}
/**
* Returns the partitions for the given table that match the supplied partition spec.
* If no partition spec is specified, all partitions are returned.
*/
override def getPartitions(
table: CatalogTable,
spec: Option[TablePartitionSpec]): Seq[CatalogTablePartition] = withHiveState {
val hiveTable = toHiveTable(table, Some(userName))
val partSpec = spec match {
case None => CatalogTypes.emptyTablePartitionSpec
case Some(s) =>
assert(s.values.forall(_.nonEmpty), s"partition spec '$s' is invalid")
s
}
val parts = client.getPartitions(hiveTable, partSpec.asJava).asScala.map(fromHivePartition)
HiveCatalogMetrics.incrementFetchedPartitions(parts.length)
parts
}
override def getPartitionsByFilter(
table: CatalogTable,
predicates: Seq[Expression]): Seq[CatalogTablePartition] = withHiveState {
val hiveTable = toHiveTable(table, Some(userName))
val parts = shim.getPartitionsByFilter(client, hiveTable, predicates).map(fromHivePartition)
HiveCatalogMetrics.incrementFetchedPartitions(parts.length)
parts
}
override def listTables(dbName: String): Seq[String] = withHiveState {
client.getAllTables(dbName).asScala
}
override def listTables(dbName: String, pattern: String): Seq[String] = withHiveState {
client.getTablesByPattern(dbName, pattern).asScala
}
/**
* Runs the specified SQL query using Hive.
*/
override def runSqlHive(sql: String): Seq[String] = {
val maxResults = 100000
val results = runHive(sql, maxResults)
// It is very confusing when you only get back some of the results...
if (results.size == maxResults) sys.error("RESULTS POSSIBLY TRUNCATED")
results
}
/**
* Execute the command using Hive and return the results as a sequence. Each element
* in the sequence is one row.
*/
protected def runHive(cmd: String, maxRows: Int = 1000): Seq[String] = withHiveState {
logDebug(s"Running hiveql '$cmd'")
if (cmd.toLowerCase(Locale.ROOT).startsWith("set")) { logDebug(s"Changing config: $cmd") }
try {
val cmd_trimmed: String = cmd.trim()
val tokens: Array[String] = cmd_trimmed.split("\\\\s+")
// The remainder of the command.
val cmd_1: String = cmd_trimmed.substring(tokens(0).length()).trim()
val proc = shim.getCommandProcessor(tokens(0), conf)
proc match {
case driver: Driver =>
val response: CommandProcessorResponse = driver.run(cmd)
// Throw an exception if there is an error in query processing.
if (response.getResponseCode != 0) {
driver.close()
CommandProcessorFactory.clean(conf)
throw new QueryExecutionException(response.getErrorMessage)
}
driver.setMaxRows(maxRows)
val results = shim.getDriverResults(driver)
driver.close()
CommandProcessorFactory.clean(conf)
results
case _ =>
if (state.out != null) {
// scalastyle:off println
state.out.println(tokens(0) + " " + cmd_1)
// scalastyle:on println
}
Seq(proc.run(cmd_1).getResponseCode.toString)
}
} catch {
case e: Exception =>
logError(
s"""
|======================
|HIVE FAILURE OUTPUT
|======================
|${outputBuffer.toString}
|======================
|END HIVE FAILURE OUTPUT
|======================
""".stripMargin)
throw e
}
}
def loadPartition(
loadPath: String,
dbName: String,
tableName: String,
partSpec: java.util.LinkedHashMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSrcLocal: Boolean): Unit = withHiveState {
val hiveTable = client.getTable(dbName, tableName, true /* throw exception */)
shim.loadPartition(
client,
new Path(loadPath), // TODO: Use URI
s"$dbName.$tableName",
partSpec,
replace,
inheritTableSpecs,
isSkewedStoreAsSubdir = hiveTable.isStoredAsSubDirectories,
isSrcLocal = isSrcLocal)
}
def loadTable(
loadPath: String, // TODO URI
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = withHiveState {
shim.loadTable(
client,
new Path(loadPath),
tableName,
replace,
isSrcLocal)
}
def loadDynamicPartitions(
loadPath: String,
dbName: String,
tableName: String,
partSpec: java.util.LinkedHashMap[String, String],
replace: Boolean,
numDP: Int): Unit = withHiveState {
val hiveTable = client.getTable(dbName, tableName, true /* throw exception */)
shim.loadDynamicPartitions(
client,
new Path(loadPath),
s"$dbName.$tableName",
partSpec,
replace,
numDP,
listBucketingEnabled = hiveTable.isStoredAsSubDirectories)
}
override def createFunction(db: String, func: CatalogFunction): Unit = withHiveState {
shim.createFunction(client, db, func)
}
override def dropFunction(db: String, name: String): Unit = withHiveState {
shim.dropFunction(client, db, name)
}
override def renameFunction(db: String, oldName: String, newName: String): Unit = withHiveState {
shim.renameFunction(client, db, oldName, newName)
}
override def alterFunction(db: String, func: CatalogFunction): Unit = withHiveState {
shim.alterFunction(client, db, func)
}
override def getFunctionOption(
db: String, name: String): Option[CatalogFunction] = withHiveState {
shim.getFunctionOption(client, db, name)
}
override def listFunctions(db: String, pattern: String): Seq[String] = withHiveState {
shim.listFunctions(client, db, pattern)
}
def addJar(path: String): Unit = {
val uri = new Path(path).toUri
val jarURL = if (uri.getScheme == null) {
// `path` is a local file path without a URL scheme
new File(path).toURI.toURL
} else {
// `path` is a URL with a scheme
uri.toURL
}
clientLoader.addJar(jarURL)
runSqlHive(s"ADD JAR $path")
}
def newSession(): HiveClientImpl = {
clientLoader.createClient().asInstanceOf[HiveClientImpl]
}
def reset(): Unit = withHiveState {
client.getAllTables("default").asScala.foreach { t =>
logDebug(s"Deleting table $t")
val table = client.getTable("default", t)
client.getIndexes("default", t, 255).asScala.foreach { index =>
shim.dropIndex(client, "default", t, index.getIndexName)
}
if (!table.isIndexTable) {
client.dropTable("default", t)
}
}
client.getAllDatabases.asScala.filterNot(_ == "default").foreach { db =>
logDebug(s"Dropping Database: $db")
client.dropDatabase(db, true, false, true)
}
}
}
private[hive] object HiveClientImpl {
/** Converts the native StructField to Hive's FieldSchema. */
def toHiveColumn(c: StructField): FieldSchema = {
val typeString = if (c.metadata.contains(HIVE_TYPE_STRING)) {
c.metadata.getString(HIVE_TYPE_STRING)
} else {
c.dataType.catalogString
}
new FieldSchema(c.name, typeString, c.getComment().orNull)
}
/** Get the Spark SQL native DataType from Hive's FieldSchema. */
private def getSparkSQLDataType(hc: FieldSchema): DataType = {
try {
CatalystSqlParser.parseDataType(hc.getType)
} catch {
case e: ParseException =>
throw new SparkException("Cannot recognize hive type string: " + hc.getType, e)
}
}
/** Builds the native StructField from Hive's FieldSchema. */
def fromHiveColumn(hc: FieldSchema): StructField = {
val columnType = getSparkSQLDataType(hc)
val metadata = if (hc.getType != columnType.catalogString) {
new MetadataBuilder().putString(HIVE_TYPE_STRING, hc.getType).build()
} else {
Metadata.empty
}
val field = StructField(
name = hc.getName,
dataType = columnType,
nullable = true,
metadata = metadata)
Option(hc.getComment).map(field.withComment).getOrElse(field)
}
private def verifyColumnDataType(schema: StructType): Unit = {
schema.foreach(col => getSparkSQLDataType(toHiveColumn(col)))
}
private def toInputFormat(name: String) =
Utils.classForName(name).asInstanceOf[Class[_ <: org.apache.hadoop.mapred.InputFormat[_, _]]]
private def toOutputFormat(name: String) =
Utils.classForName(name)
.asInstanceOf[Class[_ <: org.apache.hadoop.hive.ql.io.HiveOutputFormat[_, _]]]
/**
* Converts the native table metadata representation format CatalogTable to Hive's Table.
*/
def toHiveTable(table: CatalogTable, userName: Option[String] = None): HiveTable = {
val hiveTable = new HiveTable(table.database, table.identifier.table)
// For EXTERNAL_TABLE, we also need to set EXTERNAL field in the table properties.
// Otherwise, Hive metastore will change the table to a MANAGED_TABLE.
// (metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java#L1095-L1105)
hiveTable.setTableType(table.tableType match {
case CatalogTableType.EXTERNAL =>
hiveTable.setProperty("EXTERNAL", "TRUE")
HiveTableType.EXTERNAL_TABLE
case CatalogTableType.MANAGED =>
HiveTableType.MANAGED_TABLE
case CatalogTableType.VIEW => HiveTableType.VIRTUAL_VIEW
})
// Note: In Hive the schema and partition columns must be disjoint sets
val (partCols, schema) = table.schema.map(toHiveColumn).partition { c =>
table.partitionColumnNames.contains(c.getName)
}
hiveTable.setFields(schema.asJava)
hiveTable.setPartCols(partCols.asJava)
userName.foreach(hiveTable.setOwner)
hiveTable.setCreateTime((table.createTime / 1000).toInt)
hiveTable.setLastAccessTime((table.lastAccessTime / 1000).toInt)
table.storage.locationUri.map(CatalogUtils.URIToString).foreach { loc =>
hiveTable.getTTable.getSd.setLocation(loc)}
table.storage.inputFormat.map(toInputFormat).foreach(hiveTable.setInputFormatClass)
table.storage.outputFormat.map(toOutputFormat).foreach(hiveTable.setOutputFormatClass)
hiveTable.setSerializationLib(
table.storage.serde.getOrElse("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"))
table.storage.properties.foreach { case (k, v) => hiveTable.setSerdeParam(k, v) }
table.properties.foreach { case (k, v) => hiveTable.setProperty(k, v) }
table.comment.foreach { c => hiveTable.setProperty("comment", c) }
// Hive will expand the view text, so it needs 2 fields: viewOriginalText and viewExpandedText.
// Since we don't expand the view text, but only add table properties, we map the `viewText` to
// the both fields in hive table.
table.viewText.foreach { t =>
hiveTable.setViewOriginalText(t)
hiveTable.setViewExpandedText(t)
}
table.bucketSpec match {
case Some(bucketSpec) if DDLUtils.isHiveTable(table) =>
hiveTable.setNumBuckets(bucketSpec.numBuckets)
hiveTable.setBucketCols(bucketSpec.bucketColumnNames.toList.asJava)
if (bucketSpec.sortColumnNames.nonEmpty) {
hiveTable.setSortCols(
bucketSpec.sortColumnNames
.map(col => new Order(col, HIVE_COLUMN_ORDER_ASC))
.toList
.asJava
)
}
case _ =>
}
hiveTable
}
/**
* Converts the native partition metadata representation format CatalogTablePartition to
* Hive's Partition.
*/
def toHivePartition(
p: CatalogTablePartition,
ht: HiveTable): HivePartition = {
val tpart = new org.apache.hadoop.hive.metastore.api.Partition
val partValues = ht.getPartCols.asScala.map { hc =>
p.spec.get(hc.getName).getOrElse {
throw new IllegalArgumentException(
s"Partition spec is missing a value for column '${hc.getName}': ${p.spec}")
}
}
val storageDesc = new StorageDescriptor
val serdeInfo = new SerDeInfo
p.storage.locationUri.map(CatalogUtils.URIToString(_)).foreach(storageDesc.setLocation)
p.storage.inputFormat.foreach(storageDesc.setInputFormat)
p.storage.outputFormat.foreach(storageDesc.setOutputFormat)
p.storage.serde.foreach(serdeInfo.setSerializationLib)
serdeInfo.setParameters(p.storage.properties.asJava)
storageDesc.setSerdeInfo(serdeInfo)
tpart.setDbName(ht.getDbName)
tpart.setTableName(ht.getTableName)
tpart.setValues(partValues.asJava)
tpart.setSd(storageDesc)
tpart.setParameters(mutable.Map(p.parameters.toSeq: _*).asJava)
new HivePartition(ht, tpart)
}
/**
* Build the native partition metadata from Hive's Partition.
*/
def fromHivePartition(hp: HivePartition): CatalogTablePartition = {
val apiPartition = hp.getTPartition
val properties: Map[String, String] = if (hp.getParameters != null) {
hp.getParameters.asScala.toMap
} else {
Map.empty
}
CatalogTablePartition(
spec = Option(hp.getSpec).map(_.asScala.toMap).getOrElse(Map.empty),
storage = CatalogStorageFormat(
locationUri = Option(CatalogUtils.stringToURI(apiPartition.getSd.getLocation)),
inputFormat = Option(apiPartition.getSd.getInputFormat),
outputFormat = Option(apiPartition.getSd.getOutputFormat),
serde = Option(apiPartition.getSd.getSerdeInfo.getSerializationLib),
compressed = apiPartition.getSd.isCompressed,
properties = Option(apiPartition.getSd.getSerdeInfo.getParameters)
.map(_.asScala.toMap).orNull),
parameters = properties,
stats = readHiveStats(properties))
}
/**
* Reads statistics from Hive.
* Note that this statistics could be overridden by Spark's statistics if that's available.
*/
private def readHiveStats(properties: Map[String, String]): Option[CatalogStatistics] = {
val totalSize = properties.get(StatsSetupConst.TOTAL_SIZE).map(BigInt(_))
val rawDataSize = properties.get(StatsSetupConst.RAW_DATA_SIZE).map(BigInt(_))
val rowCount = properties.get(StatsSetupConst.ROW_COUNT).map(BigInt(_))
// NOTE: getting `totalSize` directly from params is kind of hacky, but this should be
// relatively cheap if parameters for the table are populated into the metastore.
// Currently, only totalSize, rawDataSize, and rowCount are used to build the field `stats`
// TODO: stats should include all the other two fields (`numFiles` and `numPartitions`).
// (see StatsSetupConst in Hive)
// When table is external, `totalSize` is always zero, which will influence join strategy.
// So when `totalSize` is zero, use `rawDataSize` instead. When `rawDataSize` is also zero,
// return None.
// In Hive, when statistics gathering is disabled, `rawDataSize` and `numRows` is always
// zero after INSERT command. So they are used here only if they are larger than zero.
if (totalSize.isDefined && totalSize.get > 0L) {
Some(CatalogStatistics(sizeInBytes = totalSize.get, rowCount = rowCount.filter(_ > 0)))
} else if (rawDataSize.isDefined && rawDataSize.get > 0) {
Some(CatalogStatistics(sizeInBytes = rawDataSize.get, rowCount = rowCount.filter(_ > 0)))
} else {
// TODO: still fill the rowCount even if sizeInBytes is empty. Might break anything?
None
}
}
// Below is the key of table properties for storing Hive-generated statistics
private val HiveStatisticsProperties = Set(
StatsSetupConst.COLUMN_STATS_ACCURATE,
StatsSetupConst.NUM_FILES,
StatsSetupConst.NUM_PARTITIONS,
StatsSetupConst.ROW_COUNT,
StatsSetupConst.RAW_DATA_SIZE,
StatsSetupConst.TOTAL_SIZE
)
}
|
saltstar/spark
|
sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
|
Scala
|
apache-2.0
| 42,782
|
package glasskey.spray.resource
import org.specs2.mutable.Specification
import spray.http.StatusCodes._
import spray.routing.HttpService
import spray.testkit.Specs2RouteTest
class HelloWorldResourceServiceSpec extends Specification with Specs2RouteTest with HttpService {
def actorRefFactory = system
val service = new HelloWorldResourceService(actorRefFactory)
"MyService" should {
"return a greeting for GET requests to the root path" in {
Get() ~> service.resourceRoute ~> check {
responseAs[String] must contain("Say hello")
}
}
"leave GET requests to other paths unhandled" in {
Get("/kermit") ~> service.resourceRoute ~> check {
handled must beFalse
}
}
"return a MethodNotAllowed error for PUT requests to the root path" in {
Put() ~> sealRoute(service.resourceRoute) ~> check {
status === MethodNotAllowed
responseAs[String] === "HTTP method not allowed, supported methods: GET"
}
}
}
}
|
MonsantoCo/glass-key
|
samples/glass-key-spray-resource/src/test/scala/glasskey/spray/resource/HelloWorldResourceServiceSpec.scala
|
Scala
|
bsd-3-clause
| 1,000
|
trait I[F[_], A]
def magic[F[_], A](in: I[F, A]): F[A] =
val deps: Vector[I[F, _]] = ???
val xx = deps.map(i => magic(i))
val y: Vector[F[Any]] = xx // error
???
|
lampepfl/dotty
|
tests/neg/i12284.scala
|
Scala
|
apache-2.0
| 171
|
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package noop.model;
import collection.mutable.{ArrayBuffer, Buffer};
/**
* @author alexeagle@google.com (Alex Eagle)
* @author tocman@gmail.com (Jeremie Lenfant-Engelmann)
*/
class Block extends Expression {
val statements: Buffer[Expression] = new ArrayBuffer[Expression]();
val anonymousBindings: Buffer[BindingDeclaration] = new ArrayBuffer[BindingDeclaration];
var namedBinding: Option[String] = None;
override def accept(visitor: Visitor): Unit = {
for (anonBinding <- anonymousBindings) {
anonBinding.accept(visitor);
}
for (statement <- statements) {
statement.accept(visitor);
if (statement.isInstanceOf[ReturnExpression]) {
return; // break is not in scala 2.7 !
}
visitor.visit(this);
}
}
}
|
masterx2/noop
|
core/src/main/scala/noop/model/ABlock.scala
|
Scala
|
apache-2.0
| 1,368
|
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.typedarray
import org.junit.Assert._
import org.junit.Test
import org.scalajs.testsuite.utils.Requires
import scala.scalajs.js
import js.typedarray._
object TypedArrayConversionTest extends Requires.TypedArray
class TypedArrayConversionTest {
val data = js.Array[Int](-1, 1, 2, 3, 4, 5, 6, 7, 8)
def sum(factor: Double): Double = (8 * 9 / 2 - 1) * factor
@Test def convertInt8ArrayToScalaArrayByte(): Unit = {
val x = new Int8Array(data.map(_.toByte))
val y = x.toArray
assertTrue(y.getClass == classOf[scala.Array[Byte]])
assertEquals(sum(1), y.sum, 0.0)
// Ensure its a copy
x(0) = 0
assertEquals(sum(1), y.sum, 0.0)
}
@Test def convertInt16ArrayToScalaArrayShort(): Unit = {
val x = new Int16Array(data.map(x => (100 * x).toShort))
val y = x.toArray
assertTrue(y.getClass == classOf[scala.Array[Short]])
assertEquals(sum(100), y.sum, 0.0)
// Ensure its a copy
x(0) = 0
assertEquals(sum(100), y.sum, 0.0)
}
@Test def convertUint16ArrayToScalaArrayChar(): Unit = {
val data = js.Array(1, 2, 3, 4, 5, 6).map(x => 10000 * x)
val sum = (6*7/2*10000).toChar
val x = new Uint16Array(data)
val y = x.toArray
assertTrue(y.getClass == classOf[scala.Array[Char]])
assertEquals(sum, y.sum)
// Ensure its a copy
x(0) = 0
assertEquals(sum, y.sum)
}
@Test def convertInt32ArrayToScalaArrayInt(): Unit = {
val x = new Int32Array(data.map(x => 10000 * x))
val y = x.toArray
assertTrue(y.getClass == classOf[scala.Array[Int]])
assertEquals(sum(10000), y.sum, 0.0)
// Ensure its a copy
x(0) = 0
assertEquals(sum(10000), y.sum, 0.0)
}
@Test def convertFloat32ArrayToScalaArrayFloat(): Unit = {
val x = new Float32Array(data.map(x => 0.2f * x.toFloat))
val y = x.toArray
assertTrue(y.getClass == classOf[scala.Array[Float]])
assertEquals(sum(0.2), y.sum, 1E-6)
// Ensure its a copy
x(0) = 0
assertEquals(sum(0.2), y.sum, 1E-6)
}
@Test def convertFloat64ArrayToScalaArrayDouble(): Unit = {
val x = new Float64Array(data.map(x => 0.2 * x.toDouble))
val y = x.toArray
assertTrue(y.getClass == classOf[scala.Array[Double]])
assertEquals(sum(0.2), y.sum, 0.0)
// Ensure its a copy
x(0) = 0
assertEquals(sum(0.2), y.sum, 0.0)
}
@Test def convertScalaArrayByteToInt8Array(): Unit = {
val x = (Byte.MinValue to Byte.MaxValue).map(_.toByte).toArray
val y = x.toTypedArray
assertTrue(y.isInstanceOf[Int8Array])
assertEquals(x.length, y.length)
for (i <- 0 until y.length)
assertEquals(x(i), y(i))
// Ensure its a copy
x(0) = 0
assertEquals(Byte.MinValue, y(0))
}
@Test def convertScalaArrayShortToInt16Array(): Unit = {
val x = ((Short.MinValue to (Short.MinValue + 1000)) ++
((Short.MaxValue - 1000) to Short.MaxValue)).map(_.toShort).toArray
val y = x.toTypedArray
assertTrue(y.isInstanceOf[Int16Array])
assertEquals(x.length, y.length)
for (i <- 0 until y.length)
assertEquals(x(i), y(i))
// Ensure its a copy
x(0) = 0
assertEquals(Short.MinValue, y(0))
}
@Test def convertScalaArrayCharToUint16Array(): Unit = {
val x = ((Char.MaxValue - 1000) to Char.MaxValue).map(_.toChar).toArray
val y = x.toTypedArray
assertTrue(y.isInstanceOf[Uint16Array])
assertEquals(x.length, y.length)
for (i <- 0 until y.length)
assertEquals(x(i).toInt, y(i))
// Ensure its a copy
x(0) = 0
assertEquals(Char.MaxValue - 1000, y(0))
}
@Test def convertScalaArrayIntToInt32Array(): Unit = {
val x = ((Int.MinValue to (Int.MinValue + 1000)) ++
((Int.MaxValue - 1000) to Int.MaxValue)).toArray
val y = x.toTypedArray
assertTrue(y.isInstanceOf[Int32Array])
assertEquals(x.length, y.length)
for (i <- 0 until y.length)
assertEquals(x(i), y(i))
// Ensure its a copy
x(0) = 0
assertEquals(Int.MinValue, y(0))
}
@Test def convertScalaArrayFloatToFloat32Array(): Unit = {
val x = Array[Float](1.0f, 2.0f, -2.3f, 5.3f)
val y = x.toTypedArray
assertTrue(y.isInstanceOf[Float32Array])
assertEquals(x.length, y.length)
for (i <- 0 until y.length)
assertEquals(x(i), y(i), 0.0)
// Ensure its a copy
x(0) = 0
assertEquals(1.0f, y(0), 0.0)
}
@Test def convertScalaArrayDoubleToFloat64Array(): Unit = {
val x = Array[Double](1.0, 2.0, -2.3, 5.3)
val y = x.toTypedArray
assertTrue(y.isInstanceOf[Float64Array])
assertEquals(x.length, y.length, 0.0)
for (i <- 0 until y.length)
assertEquals(x(i), y(i), 0.0)
// Ensure its a copy
x(0) = 0
assertEquals(1.0, y(0), 0.0)
}
}
|
scala-js/scala-js
|
test-suite/js/src/test/scala/org/scalajs/testsuite/typedarray/TypedArrayConversionTest.scala
|
Scala
|
apache-2.0
| 5,038
|
/**
* Determine if a string is a palindrome.
*/
object isPalindrome {
def apply(s: String): Boolean = {
val i:Int = 0
for (i <- 0 until s.length - i - 1) {
if (s.charAt(i) != s.charAt(s.length - i - 1)) {
return false
}
}
true
}
def main(args: Array[String]) {
print("isPalindrome")
assert(isPalindrome(""))
assert(!isPalindrome("abc"))
assert(isPalindrome("racecar"))
assert(isPalindrome("baccab"))
println(" ✓")
}
}
|
grant/interview-questions
|
scala/src/isPalindrome.scala
|
Scala
|
mit
| 491
|
class A {
var s = "
}
object Main { def main(args: Array[String]) { } }
|
tobast/compil-petitscala
|
tests/syntax/bad/testfile-unclosed_string-1.scala
|
Scala
|
gpl-3.0
| 81
|
package com.sksamuel.elastic4s.searches
import com.sksamuel.elastic4s.IndexesAndTypes
import com.sksamuel.elastic4s.script.ScriptFieldDefinition
import com.sksamuel.elastic4s.searches.aggs.AbstractAggregation
import com.sksamuel.elastic4s.searches.collapse.CollapseDefinition
import com.sksamuel.elastic4s.searches.queries._
import com.sksamuel.elastic4s.searches.queries.matches.{MatchAllQueryDefinition, MatchQueryDefinition}
import com.sksamuel.elastic4s.searches.queries.term.TermQueryDefinition
import com.sksamuel.elastic4s.searches.sort.{FieldSortDefinition, SortDefinition}
import com.sksamuel.elastic4s.searches.suggestion.SuggestionDefinition
import com.sksamuel.exts.OptionImplicits._
import org.elasticsearch.action.search.SearchType
import org.elasticsearch.action.support.IndicesOptions
import org.elasticsearch.cluster.routing.Preference
import org.elasticsearch.search.fetch.subphase.FetchSourceContext
import scala.concurrent.duration.{Duration, FiniteDuration}
case class SearchDefinition(indexesTypes: IndexesAndTypes,
aggs: Seq[AbstractAggregation] = Nil,
collapse: Option[CollapseDefinition] = None,
explain: Option[Boolean] = None,
fetchContext: Option[FetchSourceContext] = None,
from: Option[Int] = None,
indicesOptions: Option[IndicesOptions] = None,
inners: Seq[InnerHitDefinition] = Nil,
indexBoosts: Seq[(String, Double)] = Nil,
keepAlive: Option[String] = None,
highlight: Option[Highlight] = None,
minScore: Option[Double] = None,
pref: Option[String] = None,
query: Option[QueryDefinition] = None,
postFilter: Option[QueryDefinition] = None,
requestCache: Option[Boolean] = None,
rescorers: Seq[RescoreDefinition] = Nil,
scriptFields: Seq[ScriptFieldDefinition] = Nil,
sorts: Seq[SortDefinition] = Nil,
storedFields: Seq[String] = Nil,
suggs: Seq[SuggestionDefinition] = Nil,
globalSuggestionText: Option[String] = None,
size: Option[Int] = None,
routing: Option[String] = None,
stats: Seq[String] = Nil,
searchType: Option[SearchType] = None,
searchAfter: Seq[Any] = Nil,
trackScores: Option[Boolean] = None,
terminateAfter: Option[Int] = None,
timeout: Option[Duration] = None,
version: Option[Boolean] = None
) {
/** Adds a single string query to this search
*
* @param string the query string
*/
def query(string: String): SearchDefinition = query(QueryStringQueryDefinition(string))
// adds a query to this search
def query(q: QueryDefinition): SearchDefinition = copy(query = q.some)
def minScore(min: Double): SearchDefinition = copy(minScore = min.some)
def types(first: String, rest: String*): SearchDefinition = types(first +: rest)
def types(types: Iterable[String]): SearchDefinition =
copy(indexesTypes = IndexesAndTypes(indexesTypes.indexes, types.toSeq))
def bool(block: => BoolQueryDefinition): SearchDefinition = query(block)
@deprecated("Use matchAllQuery()", "5.2.0")
def matchAll(): SearchDefinition = query(new MatchAllQueryDefinition)
def inner(first: InnerHitDefinition, rest: InnerHitDefinition*): SearchDefinition = inner(first +: rest)
def inner(inners: Iterable[InnerHitDefinition]): SearchDefinition = copy(inners = inners.toSeq)
def searchAfter(values: Seq[Any]): SearchDefinition = copy(searchAfter = values)
def postFilter(block: => QueryDefinition): SearchDefinition = copy(postFilter = block.some)
def requestCache(requestCache: Boolean): SearchDefinition = copy(requestCache = requestCache.some)
def aggs(first: AbstractAggregation, rest: AbstractAggregation*): SearchDefinition = aggs(first +: rest)
def aggs(iterable: Iterable[AbstractAggregation]): SearchDefinition = aggregations(iterable)
def aggregations(aggs: Iterable[AbstractAggregation]): SearchDefinition = copy(aggs = aggs.toSeq)
def aggregations(first: AbstractAggregation, rest: AbstractAggregation*): SearchDefinition = aggregations(first +: rest)
@deprecated("use sortBy", "5.0.0")
def sort(sorts: SortDefinition*): SearchDefinition = sortBy(sorts)
def sortBy(sorts: SortDefinition*): SearchDefinition = sortBy(sorts)
def sortBy(sorts: Iterable[SortDefinition]): SearchDefinition = copy(sorts = sorts.toSeq)
def sortByFieldAsc(name: String): SearchDefinition = sortBy(FieldSortDefinition(name))
def sortByFieldDesc(name: String): SearchDefinition = sortBy(FieldSortDefinition(name).desc())
/** This method introduces zero or more script field definitions into the search construction
*
* @param fields zero or more [[ScriptFieldDefinition]] instances
* @return this, an instance of [[SearchDefinition]]
*/
def scriptfields(fields: ScriptFieldDefinition*): SearchDefinition = scriptfields(fields)
def scriptfields(fields: Iterable[ScriptFieldDefinition]): SearchDefinition = {
// defs.foreach {
// case ScriptFieldDefinition(name, script, None, None, _, ScriptType.INLINE) =>
// _builder.addScriptField(name, new Script(script))
// case ScriptFieldDefinition(name, script, lang, params, options, scriptType) =>
// _builder.addScriptField(name, new Script(scriptType, lang.getOrElse(Script.DEFAULT_SCRIPT_LANG), script,
// options.map(_.asJava).getOrElse(new util.HashMap()),
// params.map(_.asJava).getOrElse(new util.HashMap())))
// }
// this
copy(scriptFields = fields.toSeq)
}
/**
* Adds a new suggestion to the search request, which can be looked up in the response
* using the name provided.
*/
def suggestions(first: SuggestionDefinition,
rest: SuggestionDefinition*): SearchDefinition = suggestions(first +: rest)
def suggestions(suggs: Iterable[SuggestionDefinition]): SearchDefinition = copy(suggs = suggs.toSeq)
def suggestion(sugg: SuggestionDefinition): SearchDefinition = suggestions(Seq(sugg))
def globalSuggestionText(text: String): SearchDefinition = copy(globalSuggestionText = text.some)
// Adds a single prefix query to this search
def prefix(name: String, value: Any): SearchDefinition = query(PrefixQueryDefinition(name, value))
@deprecated("use regexQuery(...)", "5.0.0")
def regex(tuple: (String, String)): SearchDefinition = regexQuery(tuple)
def regexQuery(tuple: (String, String)): SearchDefinition = regexQuery(tuple._1, tuple._2)
// Adds a single regex query to this search
def regexQuery(field: String, value: String): SearchDefinition = query(RegexQueryDefinition(field, value))
@deprecated("use termQuery()", "5.0.0")
def term(tuple: (String, Any)): SearchDefinition = termQuery(tuple)
@deprecated("use termQuery()", "5.0.0")
def term(field: String, value: Any): SearchDefinition = termQuery(field, value)
def termQuery(tuple: (String, Any)): SearchDefinition = termQuery(tuple._1, tuple._2)
def termQuery(field: String, value: Any): SearchDefinition = {
val q = TermQueryDefinition(field, value)
query(q)
}
def matchQuery(field: String, value: Any): SearchDefinition = {
val q = MatchQueryDefinition(field, value)
query(q)
}
def matchAllQuery(): SearchDefinition = query(MatchAllQueryDefinition())
/** Expects a query in json format and sets the query of the search request.
* i.e. underneath a "query" field if referencing HTTP API
* Query must be valid json beginning with '{' and ending with '}'.
* Field names must be double quoted.
*
* Example:
* {{{
* search in "*" types("users", "tweets") limit 5 rawQuery {
* """{ "prefix": { "bands": { "prefix": "coldplay", "boost": 5.0, "rewrite": "yes" } } }"""
* } searchType SearchType.Scan
* }}}
*/
def rawQuery(json: String): SearchDefinition = query(RawQueryDefinition(json))
/** Sets the source of the request as a json string. Allows setting other parameters.
* Unlike rawQuery, setExtraSource is parsed at the "root" level
* Query must be valid json beginning with '{' and ending with '}'.
* Field names must be double quoted.
*
* Example:
* {{{
* search in "*" types("users", "tweets") limit 5 extraSource {
* """{ "query": { "prefix": { "bands": { "prefix": "coldplay", "boost": 5.0, "rewrite": "yes" } } } }"""
* } searchType SearchType.Scan
* }}}
*/
def extraSource(json: String): SearchDefinition = ??? // todo
/**
* Sets the source of the request as a json string. Note, setting anything other
* than the search type will cause this source to be overridden, consider using
* {@link #setExtraSource(String)}.
*
* Unlike rawQuery, setExtraSource is parsed at the "root" level
* Query must be valid json beginning with '{' and ending with '}'.
* Field names must be double quoted.
*
* Example:
* {{{
* search in "*" types("users", "tweets") limit 5 extraSource {
* """{ "query": { "prefix": { "bands": { "prefix": "coldplay", "boost": 5.0, "rewrite": "yes" } } } }"""
* } searchType SearchType.Scan
* }}}
*/
def source(json: String): SearchDefinition = ??? // todo
def explain(enabled: Boolean): SearchDefinition = copy(explain = enabled.some)
def highlighting(first: HighlightFieldDefinition,
rest: HighlightFieldDefinition*): SearchDefinition =
highlighting(HighlightOptionsDefinition(), first +: rest)
def highlighting(fields: Iterable[HighlightFieldDefinition]): SearchDefinition =
highlighting(HighlightOptionsDefinition(), fields)
def highlighting(options: HighlightOptionsDefinition,
first: HighlightFieldDefinition,
rest: HighlightFieldDefinition*): SearchDefinition = highlighting(options, first +: rest)
def highlighting(options: HighlightOptionsDefinition,
fields: Iterable[HighlightFieldDefinition]): SearchDefinition =
copy(highlight = Highlight(options, fields).some)
def routing(r: String): SearchDefinition = copy(routing = r.some)
def start(i: Int): SearchDefinition = from(i)
def from(i: Int): SearchDefinition = copy(from = i.some)
def limit(i: Int): SearchDefinition = size(i)
def size(i: Int): SearchDefinition = copy(size = i.some)
@deprecated("Use the elasticsearch enum rather than the elastic4s one", "5.2.0")
def preference(pref: com.sksamuel.elastic4s.Preference): SearchDefinition = preference(pref.value)
def preference(pref: Preference): SearchDefinition = preference(pref.`type`)
def preference(pref: String): SearchDefinition = copy(pref = pref.some)
def indicesOptions(options: IndicesOptions): SearchDefinition = copy(indicesOptions = options.some)
def rescore(first: RescoreDefinition, rest: RescoreDefinition*): SearchDefinition = rescore(first +: rest)
def rescore(rescorers: Iterable[RescoreDefinition]): SearchDefinition = copy(rescorers = rescorers.toSeq)
// alias for scroll
def keepAlive(keepAlive: String): SearchDefinition = scroll(keepAlive)
def scroll(keepAlive: String): SearchDefinition = copy(keepAlive = keepAlive.some)
def searchType(searchType: SearchType): SearchDefinition = copy(searchType = searchType.some)
def version(version: Boolean): SearchDefinition = copy(version = version.some)
/**
* The maximum number of documents to collect for each shard,
* upon reaching which the query execution will terminate early.
* If set, the response will have a boolean field terminated_early
* to indicate whether the query execution has actually terminated
* early. Defaults to no.
*/
def terminateAfter(terminateAfter: Int): SearchDefinition = copy(terminateAfter = terminateAfter.some)
def indexBoost(map: Map[String, Double]): SearchDefinition = indexBoost(map.toList: _*)
def indexBoost(tuples: (String, Double)*): SearchDefinition = copy(indexBoosts = tuples)
def timeout(duration: FiniteDuration): SearchDefinition = copy(timeout = duration.some)
def stats(groups: String*): SearchDefinition = copy(stats = groups.toSeq)
def trackScores(enabled: Boolean): SearchDefinition = copy(trackScores = enabled.some)
@deprecated("Renamed to storedFields", "5.0.0")
def fields(fields: String*): SearchDefinition = storedFields(fields)
def storedFields(first: String, rest: String*): SearchDefinition = storedFields(first +: rest)
def storedFields(fields: Iterable[String]): SearchDefinition = copy(storedFields = fields.toSeq)
def fetchContext(context: FetchSourceContext): SearchDefinition = copy(fetchContext = context.some)
def fetchSource(fetch: Boolean): SearchDefinition = copy(fetchContext = new FetchSourceContext(fetch).some)
def sourceInclude(first: String, rest: String*): SearchDefinition = sourceFiltering(first +: rest, Nil)
def sourceInclude(includes: Iterable[String]) : SearchDefinition = sourceFiltering(includes, Nil)
def sourceExclude(first: String, rest: String*): SearchDefinition = sourceFiltering(Nil, first +: rest)
def sourceExclude(excludes: Iterable[String]) : SearchDefinition = sourceFiltering(Nil, excludes)
def sourceFiltering(includes: Iterable[String], excludes: Iterable[String]): SearchDefinition =
copy(fetchContext = new FetchSourceContext(true, includes.toArray, excludes.toArray).some)
def collapse(collapse: CollapseDefinition): SearchDefinition = copy(collapse = collapse.some)
}
|
aroundus-inc/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/SearchDefinition.scala
|
Scala
|
apache-2.0
| 14,114
|
package org.jetbrains.plugins.scala
package lang
package structureView
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.packaging._
import com.intellij.psi._
import org.jetbrains.plugins.scala.lang.psi.api.base._
import psi.api.ScalaFile
import com.intellij.openapi.project.IndexNotReadyException
import psi.types.{ScSubstitutor, ScType}
import extensions.toPsiNamedElementExt
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContextOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScNamedElement
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.completion.ScalaKeyword
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
/**
* @author Alexander Podkhalyuzin
* Date: 04.05.2008
*/
object ScalaElementPresentation {
//TODO refactor with name getters
def getFilePresentableText(file: ScalaFile): String = file.name
def getPackagingPresentableText(packaging: ScPackaging): String = packaging.getPackageName
def getTypeDefinitionPresentableText(typeDefinition: ScTypeDefinition): String =
if (typeDefinition.nameId != null) typeDefinition.nameId.getText else "unnamed"
def getPrimaryConstructorPresentableText(constructor: ScPrimaryConstructor): String = {
val presentableText: StringBuffer = new StringBuffer
presentableText.append("this")
if (constructor.parameters != null)
presentableText.append(StructureViewUtil.getParametersAsString(constructor.parameterList))
presentableText.toString
}
def getMethodPresentableText(function: ScFunction, fast: Boolean = true,
subst: ScSubstitutor = ScSubstitutor.empty): String = {
val presentableText: StringBuffer = new StringBuffer
presentableText.append(if (!function.isConstructor) function.name else "this")
function.typeParametersClause match {
case Some(clause) => presentableText.append(clause.getText.replace("<", "<"))
case _ => ()
}
if (function.paramClauses != null)
presentableText.append(StructureViewUtil.getParametersAsString(function.paramClauses, fast, subst))
if (fast) {
function.returnTypeElement match {
case Some(rt) => presentableText.append(": ").append(rt.getText)
case _ => //do nothing
}
} else {
presentableText.append(": ")
try {
val typez = subst.subst(function.returnType.getOrAny)
presentableText.append(ScType.presentableText(typez))
}
catch {
case e: IndexNotReadyException => presentableText.append("NoTypeInfo")
}
}
presentableText.toString
}
def getTypeAliasPresentableText(typeAlias: ScTypeAlias): String =
if (typeAlias.nameId != null) typeAlias.nameId.getText else "type unnamed"
def getPresentableText(elem: PsiElement): String = elem.getText
def getValOrVarPresentableText(elem: ScNamedElement): String = {
val typeText = elem match {
case typed: TypingContextOwner => ": " + typed.getType().getOrAny.presentableText
case _ => ""
}
val keyword = ScalaPsiUtil.nameContext(elem) match {
case _: ScVariable => ScalaKeyword.VAR
case _: ScValue => ScalaKeyword.VAL
case param: ScClassParameter if param.isVar => ScalaKeyword.VAR
case param: ScClassParameter if param.isVal => ScalaKeyword.VAL
case _ => ""
}
s"$keyword ${elem.name}$typeText"
}
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/lang/structureView/ScalaElementPresentation.scala
|
Scala
|
apache-2.0
| 3,567
|
object Test extends App {
42 match {
case Extractor(x) => println(x)
case x => throw new MatchError(x)
}
}
|
lrytz/scala
|
test/files/run/macro-whitebox-extractor/Test_2.scala
|
Scala
|
apache-2.0
| 119
|
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.misc.test
import monix.execution.misc._
import scala.reflect.macros.whitebox
private[execution] object TestInlineMacros {
def testInlineSingleArg(): Either[String, Unit] =
macro Macros.testInlineSingleArg
def testInlineMultipleArgs(): Either[String, Unit] =
macro Macros.testInlineMultipleArgs
def testInlineSingleArgUnderscore(): Either[String, Unit] =
macro Macros.testInlineSingleArgUnderscore
def testInlineMultipleArgsUnderscore(): Either[String, Unit] =
macro Macros.testInlineMultipleArgsUnderscore
def testInlinePatternMatch(): Either[String, Unit] =
macro Macros.testInlinePatternMatch
class Macros(override val c: whitebox.Context) extends InlineMacros {
import c.universe._
def testInlineSingleArg(): c.Expr[Either[String, Unit]] = {
val tests = List(
{
val actual = inlineAndReset(q"((x:Int) => x + 1)(10)").tree
val expected = q"10 + 1"
if (actual.equalsStructure(expected))
Right(())
else
Left(s"Expected $expected but got $actual".replaceAll("[\\\\n\\\\s]+", " "))
}, {
val actual = inlineAndReset(q"((x:Int) => x + 1).apply(10)").tree
val expected = q"10 + 1"
if (actual.equalsStructure(expected))
Right(())
else
Left(s"Expected $expected but got $actual".replaceAll("[\\\\n\\\\s]+", " "))
}
)
val result = tests.collect { case Left(msg) => msg }
if (result.nonEmpty) {
val expr = c.Expr[String](Literal(Constant(result.mkString("; "))))
reify(Left(expr.splice): Either[String, Unit])
} else {
reify(Right(()): Either[String, Unit])
}
}
def testInlineMultipleArgs(): c.Expr[Either[String, Unit]] = {
val tests = List(
{
val actual = inlineAndReset(q"((x:Int, y:Int) => {val z = x + 1; y + z})(10, 20)").tree
val expected = q"{val z = 10 + 1; 20 + z}"
if (actual.equalsStructure(expected))
Right(())
else
Left(s"Expected $expected but got $actual".replaceAll("[\\\\n\\\\s]+", " "))
}, {
val actual = inlineAndReset(q"((x:Int, y:Int) => {val z = x + 1; y + z}).apply(10, 20)").tree
val expected = q"{val z = 10 + 1; 20 + z}"
if (actual.equalsStructure(expected))
Right(())
else
Left(s"Expected $expected but got $actual".replaceAll("[\\\\n\\\\s]+", " "))
}
)
val result = tests.collect { case Left(msg) => msg }
if (result.nonEmpty) {
val expr = c.Expr[String](Literal(Constant(result.mkString("; "))))
reify(Left(expr.splice): Either[String, Unit])
} else {
reify(Right(()): Either[String, Unit])
}
}
def testInlineSingleArgUnderscore(): c.Expr[Either[String, Unit]] = {
val tests = List(
{
val actual = inlineAndReset(q"(_ + 1)(10)").tree
val expected = q"10 + 1"
if (actual.equalsStructure(expected))
Right(())
else
Left(s"Expected $expected but got $actual".replaceAll("[\\\\n\\\\s]+", " "))
}, {
val actual = inlineAndReset(q"(_ + 1).apply(10)").tree
val expected = q"10 + 1"
if (actual.equalsStructure(expected))
Right(())
else
Left(s"Expected $expected but got $actual".replaceAll("[\\\\n\\\\s]+", " "))
}
)
val result = tests.collect { case Left(msg) => msg }
if (result.nonEmpty) {
val expr = c.Expr[String](Literal(Constant(result.mkString("; "))))
reify(Left(expr.splice): Either[String, Unit])
} else {
reify(Right(()): Either[String, Unit])
}
}
def testInlineMultipleArgsUnderscore(): c.Expr[Either[String, Unit]] = {
val tests = List(
{
val actual = inlineAndReset(q"(_ + _)(10, 20)").tree
val expected = q"10 + 20"
if (actual.equalsStructure(expected))
Right(())
else
Left(s"Expected $expected but got $actual".replaceAll("[\\\\n\\\\s]+", " "))
}, {
val actual = inlineAndReset(q"(_ + _).apply(10, 20)").tree
val expected = q"10 + 20"
if (actual.equalsStructure(expected))
Right(())
else
Left(s"Expected $expected but got $actual".replaceAll("[\\\\n\\\\s]+", " "))
}
)
val result = tests.collect { case Left(msg) => msg }
if (result.nonEmpty) {
val expr = c.Expr[String](Literal(Constant(result.mkString("; "))))
reify(Left(expr.splice): Either[String, Unit])
} else {
reify(Right(()): Either[String, Unit])
}
}
def testInlinePatternMatch(): c.Expr[Either[String, Unit]] = {
val tests = List({
val actual = inlineAndReset(q"((x:Int) => x match { case x => x + 1})(10)").tree
val expected = q"10 match { case x => 10 + 1}"
if (actual.equalsStructure(expected))
Right(())
else
Left(s"Expected $expected but got $actual".replaceAll("[\\\\n\\\\s]+", " "))
})
val result = tests.collect { case Left(msg) => msg }
if (result.nonEmpty) {
val expr = c.Expr[String](Literal(Constant(result.mkString("; "))))
reify(Left(expr.splice): Either[String, Unit])
} else {
reify(Right(()): Either[String, Unit])
}
}
}
}
|
monixio/monix
|
monix-execution/shared/src/main/scala_3.0-/monix/execution/misc/test/TestInlineMacros.scala
|
Scala
|
apache-2.0
| 6,150
|
package com.sageserpent.plutonium.javaApi
import java.time.Instant
import java.util.function.{BiConsumer, Consumer}
import com.sageserpent.americium.Unbounded
import com.sageserpent.plutonium.{typeTagForClass, Change => ScalaChange}
object Change {
def forOneItem[Item](when: Unbounded[Instant],
id: Any,
clazz: Class[Item],
update: Consumer[Item]): ScalaChange =
ScalaChange.forOneItem(when)(id, update.accept(_: Item))(
typeTagForClass(clazz))
def forOneItem[Item](when: Instant,
id: Any,
clazz: Class[Item],
update: Consumer[Item]): ScalaChange =
ScalaChange.forOneItem(when)(id, update.accept(_: Item))(
typeTagForClass(clazz))
def forOneItem[Item](id: Any,
clazz: Class[Item],
update: Consumer[Item]): ScalaChange =
ScalaChange.forOneItem(id, update.accept(_: Item))(typeTagForClass(clazz))
def forTwoItems[Item1, Item2](when: Unbounded[Instant],
id1: Any,
clazz1: Class[Item1],
id2: Any,
clazz2: Class[Item2],
update: BiConsumer[Item1, Item2]): ScalaChange =
ScalaChange.forTwoItems(when)(id1, id2, update.accept(_: Item1, _: Item2))(
typeTagForClass(clazz1),
typeTagForClass(clazz2))
def forTwoItems[Item1, Item2](when: Instant,
id1: Any,
clazz1: Class[Item1],
id2: Any,
clazz2: Class[Item2],
update: BiConsumer[Item1, Item2]): ScalaChange =
ScalaChange.forTwoItems(when)(id1, id2, update.accept(_: Item1, _: Item2))(
typeTagForClass(clazz1),
typeTagForClass(clazz2))
def forTwoItems[Item1, Item2](id1: Any,
clazz1: Class[Item1],
id2: Any,
clazz2: Class[Item2],
update: BiConsumer[Item1, Item2]): ScalaChange =
ScalaChange.forTwoItems(id1, id2, update.accept(_: Item1, _: Item2))(
typeTagForClass(clazz1),
typeTagForClass(clazz2))
}
|
sageserpent-open/open-plutonium
|
src/main/scala/com/sageserpent/plutonium/javaApi/Change.scala
|
Scala
|
mit
| 2,364
|
package mesosphere.marathon
package core.task.termination.impl
import java.util.UUID
import akka.Done
import akka.actor.{PoisonPill, Terminated}
import akka.stream.scaladsl.Source
import akka.testkit.{TestActorRef, TestProbe}
import com.typesafe.scalalogging.StrictLogging
import mesosphere.AkkaUnitTest
import mesosphere.marathon.core.condition.Condition
import mesosphere.marathon.core.event.{InstanceChanged, UnknownInstanceTerminated}
import mesosphere.marathon.core.instance.update.InstanceChange
import mesosphere.marathon.core.instance.{Instance, TestInstanceBuilder}
import mesosphere.marathon.core.pod.MesosContainer
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.task.bus.TaskStatusUpdateTestHelper
import mesosphere.marathon.core.task.termination.KillConfig
import mesosphere.marathon.core.task.tracker.InstanceTracker
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.metrics.dummy.DummyMetrics
import mesosphere.marathon.raml.Resources
import mesosphere.marathon.state.{AbsolutePathId, AppDefinition, Timestamp}
import scala.jdk.CollectionConverters._
import mesosphere.marathon.test.SettableClock
import org.apache.mesos
import org.apache.mesos.SchedulerDriver
import org.mockito.ArgumentCaptor
import org.scalatest.concurrent.Eventually
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future, Promise}
class KillServiceActorTest extends AkkaUnitTest with StrictLogging with Eventually {
val defaultConfig: KillConfig = new KillConfig {
override lazy val killChunkSize: Int = 5
override lazy val killRetryTimeout: FiniteDuration = 10.minutes
}
val retryConfig: KillConfig = new KillConfig {
override lazy val killChunkSize: Int = 5
override lazy val killRetryTimeout: FiniteDuration = 500.millis
}
"The KillServiceActor" when {
"asked to kill a single known instance" should {
"issue a kill to the driver" in withActor(defaultConfig) { (f, actor) =>
val instance = f.mockInstance(f.runSpecId, f.now(), mesos.Protos.TaskState.TASK_RUNNING)
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(Seq(instance), promise)
val (taskId, _) = instance.tasksMap.head
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(taskId.mesosTaskId)
f.publishInstanceChanged(TaskStatusUpdateTestHelper.killed(instance).wrapped)
promise.future.futureValue should be(Done)
}
}
"asked to kill an unknown instance" should {
"issue a kill to the driver" in withActor(defaultConfig) { (f, actor) =>
val instanceId = Instance.Id.forRunSpec(AbsolutePathId("/unknown"))
val taskId = Task.Id(instanceId)
actor ! KillServiceActor.KillUnknownTaskById(taskId)
f.publishUnknownInstanceTerminated(instanceId)
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(taskId.mesosTaskId)
noMoreInteractions(f.driver)
}
}
"asked to kill single known unreachable instance" should {
"issue no kill to the driver because the task is unreachable" in withActor(defaultConfig) { (f, actor) =>
val instance = f.mockInstance(f.runSpecId, f.now(), mesos.Protos.TaskState.TASK_UNREACHABLE)
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(Seq(instance), promise)
noMoreInteractions(f.driver)
f.publishInstanceChanged(TaskStatusUpdateTestHelper.killed(instance).wrapped)
promise.future.futureValue should be(Done)
}
}
"asked to kill multiple instances at once" should {
"issue three kill requests to the driver" in withActor(defaultConfig) { (f, actor) =>
val runningInstance = f.mockInstance(f.runSpecId, f.clock.now(), mesos.Protos.TaskState.TASK_RUNNING)
val unreachableInstance = f.mockInstance(f.runSpecId, f.clock.now(), mesos.Protos.TaskState.TASK_UNREACHABLE)
val stagingInstance = f.mockInstance(f.runSpecId, f.clock.now(), mesos.Protos.TaskState.TASK_STAGING)
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(Seq(runningInstance, unreachableInstance, stagingInstance), promise)
val (runningTaskId, _) = runningInstance.tasksMap.head
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(runningTaskId.mesosTaskId)
val (stagingTaskId, _) = stagingInstance.tasksMap.head
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(stagingTaskId.mesosTaskId)
noMoreInteractions(f.driver)
f.publishInstanceChanged(TaskStatusUpdateTestHelper.killed(runningInstance).wrapped)
f.publishInstanceChanged(TaskStatusUpdateTestHelper.gone(unreachableInstance).wrapped)
f.publishInstanceChanged(TaskStatusUpdateTestHelper.unreachable(stagingInstance).wrapped)
promise.future.futureValue should be(Done)
}
}
"asked to kill multiple tasks at once with an empty list" should {
"issue no kill" in withActor(defaultConfig) { (f, actor) =>
val emptyList = Seq.empty[Instance]
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(emptyList, promise)
promise.future.futureValue should be(Done)
noMoreInteractions(f.driver)
}
}
"asked to kill multiple instances subsequently" should {
"issue exactly 3 kills to the driver and complete the future successfully" in withActor(defaultConfig) { (f, actor) =>
val instance1 = f.mockInstance(f.runSpecId, f.clock.now(), mesos.Protos.TaskState.TASK_RUNNING)
val instance2 = f.mockInstance(f.runSpecId, f.clock.now(), mesos.Protos.TaskState.TASK_RUNNING)
val instance3 = f.mockInstance(f.runSpecId, f.clock.now(), mesos.Protos.TaskState.TASK_RUNNING)
val promise1 = Promise[Done]()
val promise2 = Promise[Done]()
val promise3 = Promise[Done]()
actor ! KillServiceActor.KillInstances(Seq(instance1), promise1)
actor ! KillServiceActor.KillInstances(Seq(instance2), promise2)
actor ! KillServiceActor.KillInstances(Seq(instance3), promise3)
val (taskId1, _) = instance1.tasksMap.head
val (taskId2, _) = instance2.tasksMap.head
val (taskId3, _) = instance3.tasksMap.head
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(taskId1.mesosTaskId)
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(taskId2.mesosTaskId)
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(taskId3.mesosTaskId)
noMoreInteractions(f.driver)
f.publishInstanceChanged(TaskStatusUpdateTestHelper.killed(instance1).wrapped)
f.publishInstanceChanged(TaskStatusUpdateTestHelper.killed(instance2).wrapped)
f.publishInstanceChanged(TaskStatusUpdateTestHelper.killed(instance3).wrapped)
promise1.future.futureValue should be(Done)
promise2.future.futureValue should be(Done)
promise3.future.futureValue should be(Done)
}
}
"killing instances is throttled (single requests)" should {
"issue 5 kills immediately to the driver" in withActor(defaultConfig) { (f, actor) =>
val instances: Map[Instance.Id, Instance] = (1 to 10).iterator.map { index =>
val instance = f.mockInstance(f.runSpecId, f.clock.now(), mesos.Protos.TaskState.TASK_RUNNING)
instance.instanceId -> instance
}.toMap
instances.valuesIterator.foreach { instance =>
actor ! KillServiceActor.KillInstances(Seq(instance), Promise[Done]())
}
val captor: ArgumentCaptor[mesos.Protos.TaskID] = ArgumentCaptor.forClass(classOf[mesos.Protos.TaskID])
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2).times(5)).killTask(captor.capture())
reset(f.driver)
captor.getAllValues().asScala.foreach { id =>
val instanceId = Task.Id.parse(id).instanceId
instances.get(instanceId).foreach { instance =>
f.publishInstanceChanged(TaskStatusUpdateTestHelper.killed(instance).wrapped)
}
}
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2).times(5)).killTask(any)
noMoreInteractions(f.driver)
}
}
"killing instances is throttled (batch request)" should {
"issue 5 kills immediately to the driver" in withActor(defaultConfig) { (f, actor) =>
val instances: Map[Instance.Id, Instance] = (1 to 10).iterator.map { index =>
val instance = f.mockInstance(f.runSpecId, f.clock.now(), mesos.Protos.TaskState.TASK_RUNNING)
instance.instanceId -> instance
}.toMap
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(instances.values.to(Seq), promise)
val captor: ArgumentCaptor[mesos.Protos.TaskID] = ArgumentCaptor.forClass(classOf[mesos.Protos.TaskID])
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2).times(5)).killTask(captor.capture())
reset(f.driver)
captor.getAllValues.asScala.foreach { id =>
val instanceId = Task.Id.parse(id).instanceId
instances.get(instanceId).foreach { instance =>
f.publishInstanceChanged(TaskStatusUpdateTestHelper.killed(instance).wrapped)
}
}
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2).times(5)).killTask(any)
noMoreInteractions(f.driver)
}
}
"killing with retry will be retried" should {
"issue a kill to the driver an eventually retry" in withActor(retryConfig) { (f, actor) =>
val instance = f.mockInstance(f.runSpecId, f.clock.now(), mesos.Protos.TaskState.TASK_RUNNING)
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(Seq(instance), promise)
val (taskId, _) = instance.tasksMap.head
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(taskId.mesosTaskId)
f.clock.advanceBy(10.seconds)
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(taskId.mesosTaskId)
}
}
"when asked to kill all non-terminal tasks of a pod instance" should {
"issue 2 kills to the driver and retry eventually" in withActor(defaultConfig) { (f, actor) =>
val stagingContainer = f.container("stagingContainer")
val runningContainer = f.container("runningContainer")
val finishedContainer = f.container("finishedContainer")
var instance = TestInstanceBuilder
.newBuilder(f.runSpecId)
.addTaskStaged(containerName = Some(stagingContainer.name))
.addTaskStaged(containerName = Some(runningContainer.name))
.addTaskStaged(containerName = Some(finishedContainer.name))
.getInstance()
instance = TaskStatusUpdateTestHelper.running(instance, Some(runningContainer)).updatedInstance
instance = TaskStatusUpdateTestHelper.finished(instance, Some(finishedContainer)).updatedInstance
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(Seq(instance), promise)
val captor = ArgumentCaptor.forClass(classOf[mesos.Protos.TaskID])
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2).times(2)).killTask(captor.capture())
captor.getAllValues should have size 2
captor.getAllValues should contain(f.taskIdFor(instance, stagingContainer))
captor.getAllValues should contain(f.taskIdFor(instance, runningContainer))
f.clock.advanceBy(10.seconds)
val (taskId, _) = instance.tasksMap.head
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(taskId.mesosTaskId)
}
}
"when killing all non-terminal tasks of a pod instance" should {
"issue 2 kills to the driver" in withActor(defaultConfig) { (f, actor) =>
val stagingContainer = f.container("stagingContainer")
val runningContainer = f.container("runningContainer")
val finishedContainer = f.container("finishedContainer")
var instance = TestInstanceBuilder
.newBuilder(f.runSpecId)
.addTaskStaged(containerName = Some(stagingContainer.name))
.addTaskStaged(containerName = Some(runningContainer.name))
.addTaskStaged(containerName = Some(finishedContainer.name))
.getInstance()
instance = TaskStatusUpdateTestHelper.running(instance, Some(runningContainer)).updatedInstance
instance = TaskStatusUpdateTestHelper.finished(instance, Some(finishedContainer)).updatedInstance
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(Seq(instance), promise)
val captor = ArgumentCaptor.forClass(classOf[mesos.Protos.TaskID])
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2).times(2)).killTask(captor.capture())
captor.getAllValues should have size 2
captor.getAllValues should contain(f.taskIdFor(instance, stagingContainer))
captor.getAllValues should contain(f.taskIdFor(instance, runningContainer))
f.clock.advanceBy(10.seconds)
val (taskId, _) = instance.tasksMap.head
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(taskId.mesosTaskId)
}
}
"a pod instance with only terminal tasks - no kills are issued" should {
"issue no kills" in withActor(defaultConfig) { (f, actor) =>
val finishedContainer1 = f.container("finishedContainer1")
val finishedContainer2 = f.container("finishedContainer2")
var instance = TestInstanceBuilder
.newBuilder(f.runSpecId)
.addTaskRunning(containerName = Some(finishedContainer1.name))
.addTaskRunning(containerName = Some(finishedContainer2.name))
.getInstance()
instance = TaskStatusUpdateTestHelper.finished(instance, Some(finishedContainer1)).updatedInstance
instance = TaskStatusUpdateTestHelper.finished(instance, Some(finishedContainer2)).updatedInstance
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(Seq(instance), promise)
promise.future.futureValue should be(Done)
actor.underlyingActor.instancesToKill should be('empty)
actor.underlyingActor.inFlight should be('empty)
noMoreInteractions(f.driver)
}
}
"KillServiceActor" should {
"not put scheduled instance into the actor queue" in withActor(defaultConfig) { (f, actor) =>
val scheduledInstance = Instance.scheduled(AppDefinition(AbsolutePathId("/scheduled-instance"), role = "*"))
val promise = Promise[Done]()
actor ! KillServiceActor.KillInstances(Seq(scheduledInstance), promise)
eventually {
promise.isCompleted
}
actor.underlyingActor.inFlight.isEmpty shouldBe (true) withClue (s"Expecting nothing in flight, actually '${actor.underlyingActor.inFlight}'")
actor.underlyingActor.instancesToKill.isEmpty shouldBe (true) withClue (s"Expecting nothing in instances to kill, actually '${actor.underlyingActor.instancesToKill}'")
}
}
"initialize actor based on not running instances from tracker state" in {
Given("KillServiceActor and instance tracker with active and decommissioned instance")
val f = new Fixture(defaultConfig)
val decommissionedInstance = TestInstanceBuilder
.newBuilder(AbsolutePathId("/decommissioned-app"))
.decommissioned()
.addTaskRunning()
.getInstance()
f.instanceTracker
.instancesBySpec()(any[ExecutionContext])
.returns(Future.successful(InstanceTracker.InstancesBySpec.forInstances(Seq(decommissionedInstance))))
When("Actor is started")
val actor = system.actorOf(
KillServiceActor.props(f.driverHolder, f.instanceTracker, f.killConfig, f.metrics, f.clock),
s"KillService-${UUID.randomUUID()}"
)
Then("Kill is issued for the decommissioned task")
val captor = ArgumentCaptor.forClass(classOf[mesos.Protos.TaskID])
verify(f.driver, timeout(f.killConfig.killRetryTimeout.toMillis.toInt * 2)).killTask(captor.capture())
captor.getAllValues should have size 1
captor.getAllValues should contain(Task.Id(decommissionedInstance.instanceId, None).mesosTaskId)
And("Wait for actor being killed")
// teardown actor after test is done
actor ! PoisonPill
val probe = TestProbe()
probe.watch(actor)
val terminated = probe.expectMsgAnyClassOf(classOf[Terminated])
assert(terminated.actor == actor)
}
}
def withActor(killConfig: KillConfig)(testCode: (Fixture, TestActorRef[KillServiceActor]) => Any): Unit = {
val f = new Fixture(killConfig)
val actor: TestActorRef[KillServiceActor] = TestActorRef(
KillServiceActor.props(f.driverHolder, f.instanceTracker, killConfig, f.metrics, f.clock),
s"KillService-${UUID.randomUUID()}"
)
try {
testCode(f, actor)
} finally {
actor ! PoisonPill
val probe = TestProbe()
probe.watch(actor)
val terminated = probe.expectMsgAnyClassOf(classOf[Terminated])
assert(terminated.actor == actor)
}
}
class Fixture(val killConfig: KillConfig) {
val runSpecId = AbsolutePathId("/test")
val driver = mock[SchedulerDriver]
val driverHolder: MarathonSchedulerDriverHolder = {
val holder = new MarathonSchedulerDriverHolder
holder.driver = Some(driver)
holder
}
val instanceTracker: InstanceTracker = mock[InstanceTracker]
instanceTracker.instanceUpdates returns Source.empty
val clock = new SettableClock()
val metrics: Metrics = DummyMetrics
def mockInstance(appId: AbsolutePathId, stagedAt: Timestamp, mesosState: mesos.Protos.TaskState): Instance = {
TestInstanceBuilder.newBuilder(appId).addTaskWithBuilder().taskForStatus(mesosState, stagedAt).build().getInstance()
}
def publishInstanceChanged(instanceChange: InstanceChange): Unit = {
val instanceChangedEvent = InstanceChanged(instanceChange)
logger.info("publish {} on the event stream", instanceChangedEvent)
system.eventStream.publish(instanceChangedEvent)
}
def publishUnknownInstanceTerminated(instanceId: Instance.Id): Unit = {
val event = UnknownInstanceTerminated(instanceId, instanceId.runSpecId, Condition.Killed)
logger.info("publish {} on the event stream", event)
system.eventStream.publish(event)
}
def now(): Timestamp = Timestamp.zero
def container(name: String) = MesosContainer(name = name, resources = Resources())
def taskIdFor(instance: Instance, container: MesosContainer): mesos.Protos.TaskID = {
val taskId = Task.Id(instance.instanceId, Some(container))
taskId.mesosTaskId
}
}
}
|
mesosphere/marathon
|
src/test/scala/mesosphere/marathon/core/task/termination/impl/KillServiceActorTest.scala
|
Scala
|
apache-2.0
| 19,147
|
package com.teambytes.inflatable.raft
import com.teambytes.inflatable.raft.protocol._
import akka.testkit.{ImplicitSender, TestProbe, TestFSMRef}
import org.scalatest.{OneInstancePerTest, BeforeAndAfterEach}
import concurrent.duration._
import scala.collection.immutable
import com.teambytes.inflatable.raft.example.WordConcatRaftActor
import com.teambytes.inflatable.raft.model.{Entry, Term}
class FollowerTest extends RaftSpec with BeforeAndAfterEach
with ImplicitSender {
behavior of "Follower"
val follower = TestFSMRef(new SnapshottingWordConcatRaftActor)
var data: Meta = _
val initialMembers = 0
override def beforeEach() {
super.beforeEach()
data = Meta.initial(follower)
.copy(
currentTerm = Term(2),
config = ClusterConfiguration(isLocal = false, self)
)
follower.underlyingActor.resetElectionDeadline()
}
it should "reply with Vote if Candidate has later Term than it" in {
// given
follower.setState(Follower, data)
// when
follower ! RequestVote(Term(2), self, Term(2), 2)
// then
expectMsg(VoteCandidate(Term(2)))
}
it should "Reject if Candidate has lower Term than it" in {
// given
follower.setState(Follower, data)
// when
follower ! RequestVote(Term(1), self, Term(1), 1)
// then
expectMsg(DeclineCandidate(Term(2)))
}
it should "only vote once during a Term" in {
// given
follower.setState(Follower, data)
// when / then
follower ! RequestVote(Term(2), self, Term(2), 2)
expectMsg(VoteCandidate(Term(2)))
follower ! RequestVote(Term(2), self, Term(2), 2)
expectMsg(DeclineCandidate(Term(2)))
}
it should "become a Candidate if the electionTimeout has elapsed" in {
// given
follower.setState(Follower, data)
// when
info("After awaiting for election timeout...")
// then
eventually {
follower.stateName should equal (Candidate)
}
}
it should "amortize taking the same write twice, the log should not contain duplicates then" in {
// given
data = Meta.initial(follower)
.copy(
currentTerm = Term(0),
config = ClusterConfiguration(false, self)
)
follower.setState(Follower, data)
val msg = AppendEntries(Term(1), Term(0), 0, immutable.Seq(Entry("a", Term(1), 1)), -1)
// when
info("Sending Append(a)")
follower.tell(msg, probe.ref)
info("Sending Append(a)")
follower.tell(msg, probe.ref)
// then
probe.expectMsg(AppendSuccessful(Term(1), 1))
probe.expectMsg(AppendSuccessful(Term(1), 1))
}
}
|
grahamar/inflatable
|
src/test/scala/com/teambytes/inflatable/raft/FollowerTest.scala
|
Scala
|
apache-2.0
| 2,597
|
package com.twitter.finagle.mux
import com.twitter.concurrent.AsyncQueue
import com.twitter.conversions.time._
import com.twitter.finagle.context.{Contexts, RemoteInfo}
import com.twitter.finagle.mux.lease.exp.{Lessor, nackOnExpiredLease}
import com.twitter.finagle.mux.transport.Message
import com.twitter.finagle.stats.{InMemoryStatsReceiver, NullStatsReceiver}
import com.twitter.finagle.tracing.NullTracer
import com.twitter.finagle.transport.{QueueTransport, Transport}
import com.twitter.finagle.{Dtab, Failure, Path, Service}
import com.twitter.io.Buf.Utf8
import com.twitter.io.{Buf, Charsets}
import com.twitter.util.{Await, Duration, Future, Promise, Return, Throw, Time}
import java.security.cert.X509Certificate
import java.net.SocketAddress
import org.junit.runner.RunWith
import org.mockito.Matchers.any
import org.mockito.Mockito.{never, verify, when}
import org.scalatest.FunSuite
import org.scalatest.junit.{AssertionsForJUnit, JUnitRunner}
import org.scalatest.mock.MockitoSugar
@RunWith(classOf[JUnitRunner])
class ServerTest extends FunSuite with MockitoSugar with AssertionsForJUnit {
private class LeaseCtx {
val clientToServer = new AsyncQueue[Message]
val serverToClient = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
val service = mock[Service[Request, Response]]
val lessor = mock[Lessor]
val server = ServerDispatcher.newRequestResponse(
transport, service, lessor, NullTracer, NullStatsReceiver)
def issue(lease: Duration) {
val m = serverToClient.poll()
assert(!m.isDefined)
server.issue(lease)
assert(m.isDefined)
checkFuture(m, Message.Tlease(lease))
}
def demonstrateNack() {
val m = serverToClient.poll()
assert(!m.isDefined)
clientToServer.offer(
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, Buf.Empty))
assert(m.isDefined)
checkFuture(m, Message.RdispatchNack(0, Seq.empty))
}
def demonstrateNoNack() {
val m = serverToClient.poll()
assert(!m.isDefined)
clientToServer.offer(
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, Buf.Empty))
assert(!m.isDefined)
}
}
private[this] def checkFuture(actual: Future[Message], expected: Message) {
actual.poll match {
case Some(Return(msg)) => assert(msg == expected)
case _ => fail()
}
}
test("register/unregister with lessor") {
val ctx = new LeaseCtx
import ctx._
verify(lessor).register(server)
verify(lessor, never()).unregister(server)
clientToServer.fail(new Exception)
verify(lessor).unregister(server)
}
test("propagate leases") {
val ctx = new LeaseCtx
import ctx._
val m = serverToClient.poll()
assert(!m.isDefined)
server.issue(123.milliseconds)
assert(m.isDefined)
assert(Await.result(m) == Message.Tlease(123.milliseconds))
}
test("nack on 0 leases") {
val ctx = new LeaseCtx
import ctx._
nackOnExpiredLease.parse("true")
issue(Duration.Zero)
demonstrateNack()
}
test("don't nack on > 0 leases") {
val ctx = new LeaseCtx
import ctx._
nackOnExpiredLease.parse("true")
issue(1.millisecond)
demonstrateNoNack()
}
test("unnack again after a > 0 lease") {
Time.withCurrentTimeFrozen { ctl =>
val ctx = new LeaseCtx
import ctx._
nackOnExpiredLease.parse("true")
issue(Duration.Zero)
demonstrateNack()
ctl.advance(2.seconds)
issue(1.second)
demonstrateNoNack()
}
}
test("does not leak pending on failures") {
val p = new Promise[Response]
val svc = Service.mk[Request, Response](_ => p)
val msg = Message.Treq(tag = 9, traceId = None, Buf.Empty)
val trans = mock[Transport[Message, Message]]
when(trans.onClose)
.thenReturn(new Promise[Throwable])
when(trans.read())
.thenReturn(Future.value(msg))
.thenReturn(Future.never)
when(trans.write(any[Message]))
.thenReturn(Future.Done)
when(trans.peerCertificate)
.thenReturn(None)
val dispatcher = ServerDispatcher.newRequestResponse(
trans, svc, Lessor.nil, NullTracer, NullStatsReceiver)
assert(dispatcher.npending() == 1)
p.updateIfEmpty(Throw(new RuntimeException("welp")))
assert(dispatcher.npending() == 0)
}
test("nack on restartable failures") {
val svc = new Service[Request, Response] {
def apply(req: Request) = Future.exception(Failure.rejected("overloaded!"))
}
val clientToServer = new AsyncQueue[Message]
val serverToClient = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
val server = ServerDispatcher.newRequestResponse(
transport, svc, Lessor.nil, NullTracer, NullStatsReceiver)
clientToServer.offer(
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, Buf.Empty))
val reply = serverToClient.poll()
assert(reply.isDefined)
assert(Await.result(reply).isInstanceOf[Message.RdispatchNack])
}
test("drains properly before closing the socket") {
Time.withCurrentTimeFrozen { ctl =>
val buf = Buf.Utf8("OK")
val serverToClient = new AsyncQueue[Message]
val clientToServer = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
val p = Promise[Response]
var req: Request = null
val server = ServerDispatcher.newRequestResponse(
transport,
Service.mk { _req: Request =>
req = _req
p
}
)
clientToServer.offer(Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, buf))
// one outstanding request
val drain = server.close(Time.Top) // synchronously sends drain request to client
clientToServer.offer(Message.Rdrain(1)) // client draining
assert(!drain.isDefined) // one outstanding request
p.setValue(Response(Buf.Utf8("KO")))
assert(drain.isDefined) // zero outstanding requests
}
}
test("drains properly before closing the socket with two outstanding") {
Time.withCurrentTimeFrozen { ctl =>
val serverToClient = new AsyncQueue[Message]
val clientToServer = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
var promises: List[Promise[Response]] = Nil
val server = ServerDispatcher.newRequestResponse(
transport, Service.mk { _: Request =>
val p = Promise[Response]()
promises ::= p
p
})
clientToServer.offer(
Message. Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, Buf.Empty))
// one outstanding request
clientToServer.offer(
Message.Tdispatch(1, Seq.empty, Path.empty, Dtab.empty, Buf.Empty))
// two outstanding requests
val drain = server.close(Time.Top) // synchronously sends drain request to client
clientToServer.offer(Message.Rdrain(1)) // client draining
assert(!drain.isDefined) // two outstanding request
assert(server.npending() == 2) // two outstanding request
promises(0).setValue(Response.empty)
assert(server.npending() == 1) // one outstanding request
assert(!drain.isDefined) // one outstanding request
promises(1).setValue(Response.empty)
assert(server.npending() == 0) // zero outstanding request
assert(drain.isDefined) // zero outstanding requests
}
}
test("closes properly without outstanding requests") {
Time.withCurrentTimeFrozen { ctl =>
val serverToClient = new AsyncQueue[Message]
val clientToServer = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
val server = ServerDispatcher.newRequestResponse(
transport, Service.mk(req => Future.???))
val drain = server.close(Time.Top) // synchronously sends drain request to client
val Some(Return(tdrain)) = serverToClient.poll.poll
val Message.Tdrain(tag) = tdrain
assert(!drain.isDefined) // client hasn't acked
clientToServer.offer(Message.Rdrain(tag)) // client draining
assert(drain.isDefined) // safe to shut down
}
}
private[this] class Server(
svc: Service[Request, Response],
peerCert: Option[X509Certificate] = None,
remoteAddr: SocketAddress = null)
{
val serverToClient = new AsyncQueue[Message]
val clientToServer = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer) {
override def peerCertificate = peerCert
override val remoteAddress = remoteAddr
}
def ping() = Future.Done
val server = ServerDispatcher.newRequestResponse(transport, svc)
def request(msg: Message): Unit = clientToServer.offer(msg)
def read(): Future[Message] = serverToClient.poll
}
test("starts nacking only after receiving an rdrain") {
Time.withCurrentTimeFrozen { ctl =>
import Message._
val server = new Server(Service.mk { req: Request =>
Future.value(Response.empty)
})
server.request( // request before closing
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, Buf.Empty))
assert(server.read().isDefined)
val drain = server.server.close(Time.Top) // synchronously sends drain request to client
val Some(Return(tdrain)) = server.read().poll
val Tdrain(tag) = tdrain
server.request( // request after sending tdrain, before getting rdrain
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, Buf.Empty))
assert(server.read().isDefined)
assert(!drain.isDefined) // client hasn't acked
server.request(Rdrain(tag)) // client draining
assert(drain.isDefined) // safe to shut down
server.request( // request after closing down
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, Buf.Empty))
val Some(Return(rdrain)) = server.read().poll
assert(rdrain.isInstanceOf[RdispatchNack])
}
}
test("propagates peer certificates") {
val mockCert = mock[X509Certificate]
val okResponse = Response(Utf8("ok"))
val failResponse = Response(Utf8("fail"))
val testService = new Service[Request, Response] {
override def apply(request: Request): Future[Response] = Future.value {
if (Contexts.local.get(Transport.peerCertCtx) == Some(mockCert)) okResponse else failResponse
}
}
val tag = 3
val server = new Server(testService, Some(mockCert))
val req = Message.Treq(tag, None, Request.empty.body)
server.request(req)
val Some(Return(res)) = server.read().poll
assert(res == Message.RreqOk(tag, okResponse.body))
}
test("propagates remote address to service dispatch") {
val mockAddr = mock[SocketAddress]
val okResponse = Response(Utf8("ok"))
val failResponse = Response(Utf8("fail"))
val testService = new Service[Request, Response] {
override def apply(request: Request): Future[Response] = {
val remoteInfo = Contexts.local.get(RemoteInfo.Upstream.AddressCtx)
val res = if (remoteInfo == Some(mockAddr)) okResponse else failResponse
Future.value(res)
}
}
val tag = 3
val server = new Server(testService, None, mockAddr)
val req = Message.Treq(tag, None, Request.empty.body)
server.request(req)
val Some(Return(res)) = server.read().poll
assert(res == Message.RreqOk(tag, okResponse.body))
}
test("interrupts writes on Tdiscarded") {
val writep = new Promise[Unit]
writep.setInterruptHandler { case exc => writep.setException(exc) }
val clientToServer = new AsyncQueue[Message]
val transport = new QueueTransport(new AsyncQueue[Message], clientToServer) {
override def write(in: Message) = writep
}
val svc = Service.mk { req: Request => Future.value(Response.empty) }
val server = ServerDispatcher.newRequestResponse(transport, svc)
clientToServer.offer(Message.Tdispatch(
20, Seq.empty, Path.empty, Dtab.empty, Buf.Empty))
clientToServer.offer(Message.Tdiscarded(20, "timeout"))
intercept[ClientDiscardedRequestException] { Await.result(writep, 1.second) }
}
test("duplicate tags are serviced") {
val clientToServer = new AsyncQueue[Message]
val serverToClient = new AsyncQueue[Message]
val writep = new Promise[Unit]
val transport = new QueueTransport(serverToClient, clientToServer) {
override def write(in: Message) = writep.before {
super.write(in)
}
}
val sr = new InMemoryStatsReceiver
val svc = Service.mk { req: Request => Future.value(Response.empty) }
val server = ServerDispatcher.newRequestResponse(
transport, svc, Lessor.nil, NullTracer, sr)
val msg = Message.Tdispatch(tag = 10,
Seq.empty, Path.empty, Dtab.empty, Buf.Empty)
clientToServer.offer(msg)
clientToServer.offer(msg)
assert(sr.counters(Seq("duplicate_tag")) == 1)
writep.setDone()
assert(Await.result(serverToClient.poll().liftToTry, 30.seconds).isReturn)
assert(Await.result(serverToClient.poll().liftToTry, 30.seconds).isReturn)
}
}
|
adriancole/finagle
|
finagle-mux/src/test/scala/com/twitter/finagle/mux/ServerTest.scala
|
Scala
|
apache-2.0
| 13,276
|
package filodb.core.memstore.ratelimit
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
import filodb.core.MachineMetricsData
import filodb.core.memstore.ratelimit.RocksDbCardinalityStore._
class RocksDbCardinalityStoreMemoryCapSpec extends AnyFunSpec with Matchers {
val ref = MachineMetricsData.dataset2.ref
val db = new RocksDbCardinalityStore(ref, 0)
val tracker = new CardinalityTracker(ref, 0, 3, Seq(100, 100, 1000, 1000), db)
it("should be able to write keys quickly and cap memory usage") {
def dumpStats() = {
println(db.statsAsString)
println(s"memTablesSize=${db.memTablesSize}")
println(s"blockCacheSize=${db.blockCacheSize}")
println(s"diskSpaceUsed=${db.diskSpaceUsed}")
println(s"estimatedNumKeys=${db.estimatedNumKeys}")
println()
}
def assertStats() = {
db.blockCacheSize should be < LRU_CACHE_SIZE
(db.memTablesSize + db.blockCacheSize) should be < TOTAL_OFF_HEAP_SIZE
db.diskSpaceUsed should be < (100L << 20)
}
val start = System.nanoTime()
for { ws <- 0 until 5
ns <- 0 until 20
name <- 0 until 50
ts <- 0 until 100 } {
val mName = s"name_really_really_really_really_very_really_long_metric_name_$name"
tracker.modifyCount(Seq( s"ws_prefix_$ws", s"ns_prefix_$ns", mName), 1, 0)
if (name == 0 && ts ==0 ) assertStats()
}
val end = System.nanoTime()
assertStats()
dumpStats()
val numTimeSeries = 5 * 20 * 100 * 50
val totalTimeSecs = (end-start) / 1000000000L
val timePerIncrementMicroSecs = (end-start) / numTimeSeries / 1000
println(s"Was able to increment $numTimeSeries time series, $timePerIncrementMicroSecs" +
s"us each increment total of ${totalTimeSecs}s")
timePerIncrementMicroSecs should be < 200L
}
}
|
filodb/FiloDB
|
core/src/test/scala/filodb.core/memstore/ratelimit/RocksDbCardinalityStoreMemoryCapSpec.scala
|
Scala
|
apache-2.0
| 1,861
|
package de.tu_berlin.formic.datastructure.linear.client
import akka.actor.ActorSystem
import akka.testkit.TestKit
import de.tu_berlin.formic.common.json.FormicJsonProtocol
import de.tu_berlin.formic.datastructure.linear.LinearFormicJsonDataStructureProtocol
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
/**
* @author Ronny Bräunlich
*/
class LinearClientDataStructureProviderSpec extends TestKit(ActorSystem("LinearClientDataStructureProviderSpec"))
with WordSpecLike
with Matchers
with BeforeAndAfterAll {
override def afterAll(): Unit = {
system.terminate()
}
"The LinearClientDataStructureProvider" must {
"create a factory actor for every list type" in {
val provider = LinearClientDataStructureProvider()
val factoryMap = provider.initFactories(system)
factoryMap.keySet should contain allOf(
FormicBooleanListDataStructureFactory.name,
FormicIntegerListDataStructureFactory.name,
FormicDoubleListDataStructureFactory.name,
FormicStringDataStructureFactory.name)
val actorPaths = factoryMap.values.map(ref => ref.path.name.toString)
actorPaths should contain allOf(
FormicBooleanListDataStructureFactory.name.name,
FormicIntegerListDataStructureFactory.name.name,
FormicDoubleListDataStructureFactory.name.name,
FormicStringDataStructureFactory.name.name
)
}
"register a FormicJsonDataTypeProtocols for each list type" in {
val protocol = new FormicJsonProtocol
val provider = LinearClientDataStructureProvider()
provider.registerFormicJsonDataStructureProtocols(protocol)
val registered = protocol.dataStructureOperationJsonProtocols
registered should contain allOf(
FormicBooleanListDataStructureFactory.name -> new LinearFormicJsonDataStructureProtocol[Boolean](FormicBooleanListDataStructureFactory.name),
FormicIntegerListDataStructureFactory.name -> new LinearFormicJsonDataStructureProtocol[Int](FormicIntegerListDataStructureFactory.name),
FormicDoubleListDataStructureFactory.name -> new LinearFormicJsonDataStructureProtocol[Double](FormicDoubleListDataStructureFactory.name),
FormicStringDataStructureFactory.name -> new LinearFormicJsonDataStructureProtocol[Char](FormicStringDataStructureFactory.name)
)
}
}
}
|
rbraeunlich/formic
|
linear/shared/src/test/scala/de/tu_berlin/formic/datastructure/linear/client/LinearClientDataStructureProviderSpec.scala
|
Scala
|
apache-2.0
| 2,370
|
package ohnosequences.loquat
import utils._, files._
import ohnosequences.statika._
import ohnosequences.awstools._, s3._, ec2._, sns._, regions._
import com.typesafe.scalalogging.LazyLogging
import scala.concurrent._, duration._
import scala.util.Try
case class LogUploaderBundle(
val config: AnyLoquatConfig,
val scheduler: Scheduler
) extends Bundle() with LazyLogging {
lazy val aws = AWSClients.withRegion(config.region)
lazy val logFile = file("/log.txt")
lazy val instanceID = getLocalMetadata("instance-id").getOrElse {
sys.error("Failed to get instance ID")
}
lazy val logS3: S3Object = config.resourceNames.logs / s"${instanceID}.log"
def uploadLog(): Try[Unit] = Try {
aws.s3.putObject(logS3.bucket, logS3.key, logFile)
// logger.info(s"Uploaded log to S3: [${logS3}]")
()
}.recover { case e =>
logger.error(s"Couldn't upload log to S3: ${e}")
}
def instructions: AnyInstructions = LazyTry[Unit] {
scheduler.repeat(
after = 30.seconds,
every = 30.seconds
)(uploadLog)
}
def failureNotification(subject: String): Try[String] = {
val logTail = logFile.lines.toSeq.takeRight(20).mkString("\\n") // 20 last lines
val tempLinkText: String = aws.s3.generateTemporaryLink(logS3, 1.day).map { url =>
s"Temporary download link: <${url}>"
}.getOrElse("")
val message = s"""${subject}. If it's a fatal failure, you should manually undeploy the loquat.
|Full log is at <${logS3}>. ${tempLinkText}
|Here is its tail:
|
|[...]
|${logTail}
|""".stripMargin
aws.sns
.getOrCreateTopic(config.resourceNames.notificationTopic)
.flatMap { _.publish(message, subject) }
}
}
|
ohnosequences/loquat
|
src/main/scala/ohnosequences/loquat/logger.scala
|
Scala
|
agpl-3.0
| 1,718
|
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert
package cluster
package common
import com.linkedin.norbert.util.WaitFor
import org.specs2.mock.Mockito
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.specification.{After, Before, Scope}
import scala.actors.Actor._
class ClusterNotificationManagerComponentSpec extends SpecificationWithJUnit with Mockito with WaitFor {
trait ClusterNotificationManagerSetup extends Scope with After with Before with ClusterNotificationManagerComponent {
import ClusterNotificationMessages._
val clusterNotificationManager = new ClusterNotificationManager
val shortNodes = Set(Node(1, "localhost:31313", false, Set(1, 2)))
val nodes = shortNodes ++ List(Node(2, "localhost:31314", true, Set(3, 4)),
Node(3, "localhost:31315", false, Set(5, 6)))
def before = {
clusterNotificationManager.start
}
def after = {
clusterNotificationManager ! Shutdown
}
}
trait ActorSetup extends ClusterNotificationManagerSetup {
import ClusterNotificationMessages._
override def after = {
clusterNotificationManager ! Shutdown
actors.Scheduler.shutdown
}
}
"ClusterNotificationManager" should {
"when handling an AddListener message" in new ClusterNotificationManagerSetup {
"send a Connected event to the listener if the cluster is connected" in {
import ClusterNotificationMessages._
var callCount = 0
val listener = actor {
react {
case ClusterEvents.Connected(_) => callCount += 1
}
}
clusterNotificationManager ! Connected(nodes)
var currentNodes: Set[Node] = Set()
clusterNotificationManager ! AddListener(listener)
callCount must eventually(be_==(1))
currentNodes.foreach { node =>
node.id must be_==(2)
}
currentNodes.size must be_==(1)
}
"not send a Connected event to the listener if the cluster is not connected" in {
import ClusterNotificationMessages._
var callCount = 0
val listener = actor {
react {
case ClusterEvents.Connected(_) => callCount += 1
}
}
clusterNotificationManager ! AddListener(listener)
waitFor(20.ms)
callCount must be_==(0)
}
"when handling a RemoveListener message remove the listener" in {
import ClusterNotificationMessages._
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Connected(_) => callCount += 1
case ClusterEvents.NodesChanged(_) => callCount += 1
}
}
}
val key = clusterNotificationManager !? AddListener(listener) match {
case AddedListener(key) => key
}
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! RemoveListener(key)
clusterNotificationManager ! NodesChanged(nodes)
callCount must eventually(be_==(1))
}
}
"when handling a Connected message" in new ActorSetup {
import ClusterNotificationMessages._
"notify listeners" in {
var callCount = 0
var currentNodes: Set[Node] = Set()
val listener = actor {
loop {
react {
case ClusterEvents.Connected(n) => callCount += 1; currentNodes = n
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Connected(nodes)
callCount must eventually(be_==(1))
currentNodes.foreach { node =>
node.id must be_==(2)
}
currentNodes.size must be_==(1)
}
"do nothing if already connected" in {
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Connected(_) => callCount += 1
case _ =>
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! Connected(nodes)
callCount must eventually(be_==(1))
}
}
"when handling a NodesChanged message" in new ActorSetup {
import ClusterNotificationMessages._
"notify listeners" in {
var callCount = 0
var currentNodes: Set[Node] = Set()
val listener = actor {
loop {
react {
case ClusterEvents.NodesChanged(n) =>
callCount += 1
currentNodes = n
}
}
}
clusterNotificationManager ! Connected(shortNodes)
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! NodesChanged(nodes)
callCount must eventually(be_==(1))
currentNodes.foreach { node =>
node.id must be_==(2)
}
currentNodes.size must be_==(1)
}
"do nothing is not connected" in {
import ClusterNotificationMessages._
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.NodesChanged(n) => callCount += 1
case _ =>
}
}
}
clusterNotificationManager ! Connected(shortNodes)
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! NodesChanged(nodes)
callCount must eventually(be_==(1))
}
"when handling a Disconnected message" in new ActorSetup {
import ClusterNotificationMessages._
"disconnects the cluster" in {
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! Disconnected
clusterNotificationManager !? GetCurrentNodes match {
case CurrentNodes(nodes) => nodes.size must be_==(0)
}
}
"notify listeners" in {
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Disconnected => callCount += 1
case _ =>
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! Disconnected
callCount must eventually(be_==(1))
}
"do nothing if not connected" in {
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Disconnected => callCount += 1
case _ =>
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Disconnected
callCount must eventually(be_==(0))
}
}
"when handling a Shutdown message stop handling events after shutdown" in new ActorSetup {
import ClusterNotificationMessages._
var connectedCallCount = 0
var shutdownCallCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Connected(_) => connectedCallCount += 1
case ClusterEvents.Shutdown => shutdownCallCount += 1
case _ =>
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! Shutdown
clusterNotificationManager ! Connected(nodes)
connectedCallCount must eventually(be_==(1))
shutdownCallCount must eventually(be_==(1))
}
}
}
}
|
linkedin/norbert
|
cluster/src/test/scala/com/linkedin/norbert/cluster/common/ClusterNotificationManagerComponentSpec.scala
|
Scala
|
apache-2.0
| 8,344
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import scala.collection.JavaConverters._
import org.apache.spark.annotation.Stable
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.encoders.{encoderFor, ExpressionEncoder}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.{toPrettySQL, CharVarcharUtils}
import org.apache.spark.sql.execution.aggregate.TypedAggregateExpression
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.types._
private[sql] object Column {
def apply(colName: String): Column = new Column(colName)
def apply(expr: Expression): Column = new Column(expr)
def unapply(col: Column): Option[Expression] = Some(col.expr)
private[sql] def generateAlias(e: Expression): String = {
e match {
case a: AggregateExpression if a.aggregateFunction.isInstanceOf[TypedAggregateExpression] =>
a.aggregateFunction.toString
case expr => toPrettySQL(expr)
}
}
private[sql] def stripColumnReferenceMetadata(a: AttributeReference): AttributeReference = {
val metadataWithoutId = new MetadataBuilder()
.withMetadata(a.metadata)
.remove(Dataset.DATASET_ID_KEY)
.remove(Dataset.COL_POS_KEY)
.build()
a.withMetadata(metadataWithoutId)
}
}
/**
* A [[Column]] where an [[Encoder]] has been given for the expected input and return type.
* To create a [[TypedColumn]], use the `as` function on a [[Column]].
*
* @tparam T The input type expected for this expression. Can be `Any` if the expression is type
* checked by the analyzer instead of the compiler (i.e. `expr("sum(...)")`).
* @tparam U The output type of this column.
*
* @since 1.6.0
*/
@Stable
class TypedColumn[-T, U](
expr: Expression,
private[sql] val encoder: ExpressionEncoder[U])
extends Column(expr) {
/**
* Inserts the specific input type and schema into any expressions that are expected to operate
* on a decoded object.
*/
private[sql] def withInputType(
inputEncoder: ExpressionEncoder[_],
inputAttributes: Seq[Attribute]): TypedColumn[T, U] = {
val unresolvedDeserializer = UnresolvedDeserializer(inputEncoder.deserializer, inputAttributes)
// This only inserts inputs into typed aggregate expressions. For untyped aggregate expressions,
// the resolving is handled in the analyzer directly.
val newExpr = expr transform {
case ta: TypedAggregateExpression if ta.inputDeserializer.isEmpty =>
ta.withInputInfo(
deser = unresolvedDeserializer,
cls = inputEncoder.clsTag.runtimeClass,
schema = inputEncoder.schema)
}
new TypedColumn[T, U](newExpr, encoder)
}
/**
* Gives the [[TypedColumn]] a name (alias).
* If the current `TypedColumn` has metadata associated with it, this metadata will be propagated
* to the new column.
*
* @group expr_ops
* @since 2.0.0
*/
override def name(alias: String): TypedColumn[T, U] =
new TypedColumn[T, U](super.name(alias).expr, encoder)
}
/**
* A column that will be computed based on the data in a `DataFrame`.
*
* A new column can be constructed based on the input columns present in a DataFrame:
*
* {{{
* df("columnName") // On a specific `df` DataFrame.
* col("columnName") // A generic column not yet associated with a DataFrame.
* col("columnName.field") // Extracting a struct field
* col("`a.column.with.dots`") // Escape `.` in column names.
* $"columnName" // Scala short hand for a named column.
* }}}
*
* [[Column]] objects can be composed to form complex expressions:
*
* {{{
* $"a" + 1
* $"a" === $"b"
* }}}
*
* @note The internal Catalyst expression can be accessed via [[expr]], but this method is for
* debugging purposes only and can change in any future Spark releases.
*
* @groupname java_expr_ops Java-specific expression operators
* @groupname expr_ops Expression operators
* @groupname df_ops DataFrame functions
* @groupname Ungrouped Support functions for DataFrames
*
* @since 1.3.0
*/
@Stable
class Column(val expr: Expression) extends Logging {
def this(name: String) = this(name match {
case "*" => UnresolvedStar(None)
case _ if name.endsWith(".*") =>
val parts = UnresolvedAttribute.parseAttributeName(name.substring(0, name.length - 2))
UnresolvedStar(Some(parts))
case _ => UnresolvedAttribute.quotedString(name)
})
override def toString: String = toPrettySQL(expr)
override def equals(that: Any): Boolean = that match {
case that: Column => that.normalizedExpr() == this.normalizedExpr()
case _ => false
}
override def hashCode: Int = this.normalizedExpr().hashCode()
private def normalizedExpr(): Expression = expr transform {
case a: AttributeReference => Column.stripColumnReferenceMetadata(a)
}
/** Creates a column based on the given expression. */
private def withExpr(newExpr: Expression): Column = new Column(newExpr)
/**
* Returns the expression for this column either with an existing or auto assigned name.
*/
private[sql] def named: NamedExpression = expr match {
case expr: NamedExpression => expr
// Leave an unaliased generator with an empty list of names since the analyzer will generate
// the correct defaults after the nested expression's type has been resolved.
case g: Generator => MultiAlias(g, Nil)
// If we have a top level Cast, there is a chance to give it a better alias, if there is a
// NamedExpression under this Cast.
case c: Cast =>
c.transformUp {
case c @ Cast(_: NamedExpression, _, _) => UnresolvedAlias(c)
} match {
case ne: NamedExpression => ne
case _ => UnresolvedAlias(expr, Some(Column.generateAlias))
}
case expr: Expression => UnresolvedAlias(expr, Some(Column.generateAlias))
}
/**
* Provides a type hint about the expected return value of this column. This information can
* be used by operations such as `select` on a [[Dataset]] to automatically convert the
* results into the correct JVM types.
* @since 1.6.0
*/
def as[U : Encoder]: TypedColumn[Any, U] = new TypedColumn[Any, U](expr, encoderFor[U])
/**
* Extracts a value or values from a complex type.
* The following types of extraction are supported:
* <ul>
* <li>Given an Array, an integer ordinal can be used to retrieve a single value.</li>
* <li>Given a Map, a key of the correct type can be used to retrieve an individual value.</li>
* <li>Given a Struct, a string fieldName can be used to extract that field.</li>
* <li>Given an Array of Structs, a string fieldName can be used to extract filed
* of every struct in that array, and return an Array of fields.</li>
* </ul>
* @group expr_ops
* @since 1.4.0
*/
def apply(extraction: Any): Column = withExpr {
UnresolvedExtractValue(expr, lit(extraction).expr)
}
/**
* Unary minus, i.e. negate the expression.
* {{{
* // Scala: select the amount column and negates all values.
* df.select( -df("amount") )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.select( negate(col("amount") );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def unary_- : Column = withExpr { UnaryMinus(expr) }
/**
* Inversion of boolean expression, i.e. NOT.
* {{{
* // Scala: select rows that are not active (isActive === false)
* df.filter( !df("isActive") )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( not(df.col("isActive")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def unary_! : Column = withExpr { Not(expr) }
/**
* Equality test.
* {{{
* // Scala:
* df.filter( df("colA") === df("colB") )
*
* // Java
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").equalTo(col("colB")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def === (other: Any): Column = withExpr {
val right = lit(other).expr
if (this.expr == right) {
logWarning(
s"Constructing trivially true equals predicate, '${this.expr} = $right'. " +
"Perhaps you need to use aliases.")
}
EqualTo(expr, right)
}
/**
* Equality test.
* {{{
* // Scala:
* df.filter( df("colA") === df("colB") )
*
* // Java
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").equalTo(col("colB")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def equalTo(other: Any): Column = this === other
/**
* Inequality test.
* {{{
* // Scala:
* df.select( df("colA") =!= df("colB") )
* df.select( !(df("colA") === df("colB")) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").notEqual(col("colB")) );
* }}}
*
* @group expr_ops
* @since 2.0.0
*/
def =!= (other: Any): Column = withExpr{ Not(EqualTo(expr, lit(other).expr)) }
/**
* Inequality test.
* {{{
* // Scala:
* df.select( df("colA") !== df("colB") )
* df.select( !(df("colA") === df("colB")) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").notEqual(col("colB")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
@deprecated("!== does not have the same precedence as ===, use =!= instead", "2.0.0")
def !== (other: Any): Column = this =!= other
/**
* Inequality test.
* {{{
* // Scala:
* df.select( df("colA") !== df("colB") )
* df.select( !(df("colA") === df("colB")) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").notEqual(col("colB")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def notEqual(other: Any): Column = withExpr { Not(EqualTo(expr, lit(other).expr)) }
/**
* Greater than.
* {{{
* // Scala: The following selects people older than 21.
* people.select( people("age") > 21 )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* people.select( people.col("age").gt(21) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def > (other: Any): Column = withExpr { GreaterThan(expr, lit(other).expr) }
/**
* Greater than.
* {{{
* // Scala: The following selects people older than 21.
* people.select( people("age") > lit(21) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* people.select( people.col("age").gt(21) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def gt(other: Any): Column = this > other
/**
* Less than.
* {{{
* // Scala: The following selects people younger than 21.
* people.select( people("age") < 21 )
*
* // Java:
* people.select( people.col("age").lt(21) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def < (other: Any): Column = withExpr { LessThan(expr, lit(other).expr) }
/**
* Less than.
* {{{
* // Scala: The following selects people younger than 21.
* people.select( people("age") < 21 )
*
* // Java:
* people.select( people.col("age").lt(21) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def lt(other: Any): Column = this < other
/**
* Less than or equal to.
* {{{
* // Scala: The following selects people age 21 or younger than 21.
* people.select( people("age") <= 21 )
*
* // Java:
* people.select( people.col("age").leq(21) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def <= (other: Any): Column = withExpr { LessThanOrEqual(expr, lit(other).expr) }
/**
* Less than or equal to.
* {{{
* // Scala: The following selects people age 21 or younger than 21.
* people.select( people("age") <= 21 )
*
* // Java:
* people.select( people.col("age").leq(21) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def leq(other: Any): Column = this <= other
/**
* Greater than or equal to an expression.
* {{{
* // Scala: The following selects people age 21 or older than 21.
* people.select( people("age") >= 21 )
*
* // Java:
* people.select( people.col("age").geq(21) )
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def >= (other: Any): Column = withExpr { GreaterThanOrEqual(expr, lit(other).expr) }
/**
* Greater than or equal to an expression.
* {{{
* // Scala: The following selects people age 21 or older than 21.
* people.select( people("age") >= 21 )
*
* // Java:
* people.select( people.col("age").geq(21) )
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def geq(other: Any): Column = this >= other
/**
* Equality test that is safe for null values.
*
* @group expr_ops
* @since 1.3.0
*/
def <=> (other: Any): Column = withExpr {
val right = lit(other).expr
if (this.expr == right) {
logWarning(
s"Constructing trivially true equals predicate, '${this.expr} <=> $right'. " +
"Perhaps you need to use aliases.")
}
EqualNullSafe(expr, right)
}
/**
* Equality test that is safe for null values.
*
* @group java_expr_ops
* @since 1.3.0
*/
def eqNullSafe(other: Any): Column = this <=> other
/**
* Evaluates a list of conditions and returns one of multiple possible result expressions.
* If otherwise is not defined at the end, null is returned for unmatched conditions.
*
* {{{
* // Example: encoding gender string column into integer.
*
* // Scala:
* people.select(when(people("gender") === "male", 0)
* .when(people("gender") === "female", 1)
* .otherwise(2))
*
* // Java:
* people.select(when(col("gender").equalTo("male"), 0)
* .when(col("gender").equalTo("female"), 1)
* .otherwise(2))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def when(condition: Column, value: Any): Column = this.expr match {
case CaseWhen(branches, None) =>
withExpr { CaseWhen(branches :+ ((condition.expr, lit(value).expr))) }
case CaseWhen(branches, Some(_)) =>
throw new IllegalArgumentException(
"when() cannot be applied once otherwise() is applied")
case _ =>
throw new IllegalArgumentException(
"when() can only be applied on a Column previously generated by when() function")
}
/**
* Evaluates a list of conditions and returns one of multiple possible result expressions.
* If otherwise is not defined at the end, null is returned for unmatched conditions.
*
* {{{
* // Example: encoding gender string column into integer.
*
* // Scala:
* people.select(when(people("gender") === "male", 0)
* .when(people("gender") === "female", 1)
* .otherwise(2))
*
* // Java:
* people.select(when(col("gender").equalTo("male"), 0)
* .when(col("gender").equalTo("female"), 1)
* .otherwise(2))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def otherwise(value: Any): Column = this.expr match {
case CaseWhen(branches, None) =>
withExpr { CaseWhen(branches, Option(lit(value).expr)) }
case CaseWhen(branches, Some(_)) =>
throw new IllegalArgumentException(
"otherwise() can only be applied once on a Column previously generated by when()")
case _ =>
throw new IllegalArgumentException(
"otherwise() can only be applied on a Column previously generated by when()")
}
/**
* True if the current column is between the lower bound and upper bound, inclusive.
*
* @group java_expr_ops
* @since 1.4.0
*/
def between(lowerBound: Any, upperBound: Any): Column = {
(this >= lowerBound) && (this <= upperBound)
}
/**
* True if the current expression is NaN.
*
* @group expr_ops
* @since 1.5.0
*/
def isNaN: Column = withExpr { IsNaN(expr) }
/**
* True if the current expression is null.
*
* @group expr_ops
* @since 1.3.0
*/
def isNull: Column = withExpr { IsNull(expr) }
/**
* True if the current expression is NOT null.
*
* @group expr_ops
* @since 1.3.0
*/
def isNotNull: Column = withExpr { IsNotNull(expr) }
/**
* Boolean OR.
* {{{
* // Scala: The following selects people that are in school or employed.
* people.filter( people("inSchool") || people("isEmployed") )
*
* // Java:
* people.filter( people.col("inSchool").or(people.col("isEmployed")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def || (other: Any): Column = withExpr { Or(expr, lit(other).expr) }
/**
* Boolean OR.
* {{{
* // Scala: The following selects people that are in school or employed.
* people.filter( people("inSchool") || people("isEmployed") )
*
* // Java:
* people.filter( people.col("inSchool").or(people.col("isEmployed")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def or(other: Column): Column = this || other
/**
* Boolean AND.
* {{{
* // Scala: The following selects people that are in school and employed at the same time.
* people.select( people("inSchool") && people("isEmployed") )
*
* // Java:
* people.select( people.col("inSchool").and(people.col("isEmployed")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def && (other: Any): Column = withExpr { And(expr, lit(other).expr) }
/**
* Boolean AND.
* {{{
* // Scala: The following selects people that are in school and employed at the same time.
* people.select( people("inSchool") && people("isEmployed") )
*
* // Java:
* people.select( people.col("inSchool").and(people.col("isEmployed")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def and(other: Column): Column = this && other
/**
* Sum of this expression and another expression.
* {{{
* // Scala: The following selects the sum of a person's height and weight.
* people.select( people("height") + people("weight") )
*
* // Java:
* people.select( people.col("height").plus(people.col("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def + (other: Any): Column = withExpr { Add(expr, lit(other).expr) }
/**
* Sum of this expression and another expression.
* {{{
* // Scala: The following selects the sum of a person's height and weight.
* people.select( people("height") + people("weight") )
*
* // Java:
* people.select( people.col("height").plus(people.col("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def plus(other: Any): Column = this + other
/**
* Subtraction. Subtract the other expression from this expression.
* {{{
* // Scala: The following selects the difference between people's height and their weight.
* people.select( people("height") - people("weight") )
*
* // Java:
* people.select( people.col("height").minus(people.col("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def - (other: Any): Column = withExpr { Subtract(expr, lit(other).expr) }
/**
* Subtraction. Subtract the other expression from this expression.
* {{{
* // Scala: The following selects the difference between people's height and their weight.
* people.select( people("height") - people("weight") )
*
* // Java:
* people.select( people.col("height").minus(people.col("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def minus(other: Any): Column = this - other
/**
* Multiplication of this expression and another expression.
* {{{
* // Scala: The following multiplies a person's height by their weight.
* people.select( people("height") * people("weight") )
*
* // Java:
* people.select( people.col("height").multiply(people.col("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def * (other: Any): Column = withExpr { Multiply(expr, lit(other).expr) }
/**
* Multiplication of this expression and another expression.
* {{{
* // Scala: The following multiplies a person's height by their weight.
* people.select( people("height") * people("weight") )
*
* // Java:
* people.select( people.col("height").multiply(people.col("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def multiply(other: Any): Column = this * other
/**
* Division this expression by another expression.
* {{{
* // Scala: The following divides a person's height by their weight.
* people.select( people("height") / people("weight") )
*
* // Java:
* people.select( people.col("height").divide(people.col("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def / (other: Any): Column = withExpr { Divide(expr, lit(other).expr) }
/**
* Division this expression by another expression.
* {{{
* // Scala: The following divides a person's height by their weight.
* people.select( people("height") / people("weight") )
*
* // Java:
* people.select( people.col("height").divide(people.col("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def divide(other: Any): Column = this / other
/**
* Modulo (a.k.a. remainder) expression.
*
* @group expr_ops
* @since 1.3.0
*/
def % (other: Any): Column = withExpr { Remainder(expr, lit(other).expr) }
/**
* Modulo (a.k.a. remainder) expression.
*
* @group java_expr_ops
* @since 1.3.0
*/
def mod(other: Any): Column = this % other
/**
* A boolean expression that is evaluated to true if the value of this expression is contained
* by the evaluated values of the arguments.
*
* Note: Since the type of the elements in the list are inferred only during the run time,
* the elements will be "up-casted" to the most common type for comparison.
* For eg:
* 1) In the case of "Int vs String", the "Int" will be up-casted to "String" and the
* comparison will look like "String vs String".
* 2) In the case of "Float vs Double", the "Float" will be up-casted to "Double" and the
* comparison will look like "Double vs Double"
*
* @group expr_ops
* @since 1.5.0
*/
@scala.annotation.varargs
def isin(list: Any*): Column = withExpr { In(expr, list.map(lit(_).expr)) }
/**
* A boolean expression that is evaluated to true if the value of this expression is contained
* by the provided collection.
*
* Note: Since the type of the elements in the collection are inferred only during the run time,
* the elements will be "up-casted" to the most common type for comparison.
* For eg:
* 1) In the case of "Int vs String", the "Int" will be up-casted to "String" and the
* comparison will look like "String vs String".
* 2) In the case of "Float vs Double", the "Float" will be up-casted to "Double" and the
* comparison will look like "Double vs Double"
*
* @group expr_ops
* @since 2.4.0
*/
def isInCollection(values: scala.collection.Iterable[_]): Column = isin(values.toSeq: _*)
/**
* A boolean expression that is evaluated to true if the value of this expression is contained
* by the provided collection.
*
* Note: Since the type of the elements in the collection are inferred only during the run time,
* the elements will be "up-casted" to the most common type for comparison.
* For eg:
* 1) In the case of "Int vs String", the "Int" will be up-casted to "String" and the
* comparison will look like "String vs String".
* 2) In the case of "Float vs Double", the "Float" will be up-casted to "Double" and the
* comparison will look like "Double vs Double"
*
* @group java_expr_ops
* @since 2.4.0
*/
def isInCollection(values: java.lang.Iterable[_]): Column = isInCollection(values.asScala)
/**
* SQL like expression. Returns a boolean column based on a SQL LIKE match.
*
* @group expr_ops
* @since 1.3.0
*/
def like(literal: String): Column = withExpr { new Like(expr, lit(literal).expr) }
/**
* SQL RLIKE expression (LIKE with Regex). Returns a boolean column based on a regex
* match.
*
* @group expr_ops
* @since 1.3.0
*/
def rlike(literal: String): Column = withExpr { RLike(expr, lit(literal).expr) }
/**
* An expression that gets an item at position `ordinal` out of an array,
* or gets a value by key `key` in a `MapType`.
*
* @group expr_ops
* @since 1.3.0
*/
def getItem(key: Any): Column = withExpr { UnresolvedExtractValue(expr, Literal(key)) }
// scalastyle:off line.size.limit
/**
* An expression that adds/replaces field in `StructType` by name.
*
* {{{
* val df = sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
* df.select($"struct_col".withField("c", lit(3)))
* // result: {"a":1,"b":2,"c":3}
*
* val df = sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
* df.select($"struct_col".withField("b", lit(3)))
* // result: {"a":1,"b":3}
*
* val df = sql("SELECT CAST(NULL AS struct<a:int,b:int>) struct_col")
* df.select($"struct_col".withField("c", lit(3)))
* // result: null of type struct<a:int,b:int,c:int>
*
* val df = sql("SELECT named_struct('a', 1, 'b', 2, 'b', 3) struct_col")
* df.select($"struct_col".withField("b", lit(100)))
* // result: {"a":1,"b":100,"b":100}
*
* val df = sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
* df.select($"struct_col".withField("a.c", lit(3)))
* // result: {"a":{"a":1,"b":2,"c":3}}
*
* val df = sql("SELECT named_struct('a', named_struct('b', 1), 'a', named_struct('c', 2)) struct_col")
* df.select($"struct_col".withField("a.c", lit(3)))
* // result: org.apache.spark.sql.AnalysisException: Ambiguous reference to fields
* }}}
*
* This method supports adding/replacing nested fields directly e.g.
*
* {{{
* val df = sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
* df.select($"struct_col".withField("a.c", lit(3)).withField("a.d", lit(4)))
* // result: {"a":{"a":1,"b":2,"c":3,"d":4}}
* }}}
*
* However, if you are going to add/replace multiple nested fields, it is more optimal to extract
* out the nested struct before adding/replacing multiple fields e.g.
*
* {{{
* val df = sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
* df.select($"struct_col".withField("a", $"struct_col.a".withField("c", lit(3)).withField("d", lit(4))))
* // result: {"a":{"a":1,"b":2,"c":3,"d":4}}
* }}}
*
* @group expr_ops
* @since 3.1.0
*/
// scalastyle:on line.size.limit
def withField(fieldName: String, col: Column): Column = withExpr {
require(fieldName != null, "fieldName cannot be null")
require(col != null, "col cannot be null")
UpdateFields(expr, fieldName, col.expr)
}
// scalastyle:off line.size.limit
/**
* An expression that drops fields in `StructType` by name.
* This is a no-op if schema doesn't contain field name(s).
*
* {{{
* val df = sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
* df.select($"struct_col".dropFields("b"))
* // result: {"a":1}
*
* val df = sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
* df.select($"struct_col".dropFields("c"))
* // result: {"a":1,"b":2}
*
* val df = sql("SELECT named_struct('a', 1, 'b', 2, 'c', 3) struct_col")
* df.select($"struct_col".dropFields("b", "c"))
* // result: {"a":1}
*
* val df = sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
* df.select($"struct_col".dropFields("a", "b"))
* // result: org.apache.spark.sql.AnalysisException: cannot resolve 'update_fields(update_fields(`struct_col`))' due to data type mismatch: cannot drop all fields in struct
*
* val df = sql("SELECT CAST(NULL AS struct<a:int,b:int>) struct_col")
* df.select($"struct_col".dropFields("b"))
* // result: null of type struct<a:int>
*
* val df = sql("SELECT named_struct('a', 1, 'b', 2, 'b', 3) struct_col")
* df.select($"struct_col".dropFields("b"))
* // result: {"a":1}
*
* val df = sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
* df.select($"struct_col".dropFields("a.b"))
* // result: {"a":{"a":1}}
*
* val df = sql("SELECT named_struct('a', named_struct('b', 1), 'a', named_struct('c', 2)) struct_col")
* df.select($"struct_col".dropFields("a.c"))
* // result: org.apache.spark.sql.AnalysisException: Ambiguous reference to fields
* }}}
*
* This method supports dropping multiple nested fields directly e.g.
*
* {{{
* val df = sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
* df.select($"struct_col".dropFields("a.b", "a.c"))
* // result: {"a":{"a":1}}
* }}}
*
* However, if you are going to drop multiple nested fields, it is more optimal to extract
* out the nested struct before dropping multiple fields from it e.g.
*
* {{{
* val df = sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
* df.select($"struct_col".withField("a", $"struct_col.a".dropFields("b", "c")))
* // result: {"a":{"a":1}}
* }}}
*
* @group expr_ops
* @since 3.1.0
*/
// scalastyle:on line.size.limit
def dropFields(fieldNames: String*): Column = withExpr {
fieldNames.tail.foldLeft(UpdateFields(expr, fieldNames.head)) {
(resExpr, fieldName) => UpdateFields(resExpr, fieldName)
}
}
/**
* An expression that gets a field by name in a `StructType`.
*
* @group expr_ops
* @since 1.3.0
*/
def getField(fieldName: String): Column = withExpr {
UnresolvedExtractValue(expr, Literal(fieldName))
}
/**
* An expression that returns a substring.
* @param startPos expression for the starting position.
* @param len expression for the length of the substring.
*
* @group expr_ops
* @since 1.3.0
*/
def substr(startPos: Column, len: Column): Column = withExpr {
Substring(expr, startPos.expr, len.expr)
}
/**
* An expression that returns a substring.
* @param startPos starting position.
* @param len length of the substring.
*
* @group expr_ops
* @since 1.3.0
*/
def substr(startPos: Int, len: Int): Column = withExpr {
Substring(expr, lit(startPos).expr, lit(len).expr)
}
/**
* Contains the other element. Returns a boolean column based on a string match.
*
* @group expr_ops
* @since 1.3.0
*/
def contains(other: Any): Column = withExpr { Contains(expr, lit(other).expr) }
/**
* String starts with. Returns a boolean column based on a string match.
*
* @group expr_ops
* @since 1.3.0
*/
def startsWith(other: Column): Column = withExpr { StartsWith(expr, lit(other).expr) }
/**
* String starts with another string literal. Returns a boolean column based on a string match.
*
* @group expr_ops
* @since 1.3.0
*/
def startsWith(literal: String): Column = this.startsWith(lit(literal))
/**
* String ends with. Returns a boolean column based on a string match.
*
* @group expr_ops
* @since 1.3.0
*/
def endsWith(other: Column): Column = withExpr { EndsWith(expr, lit(other).expr) }
/**
* String ends with another string literal. Returns a boolean column based on a string match.
*
* @group expr_ops
* @since 1.3.0
*/
def endsWith(literal: String): Column = this.endsWith(lit(literal))
/**
* Gives the column an alias. Same as `as`.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".alias("colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def alias(alias: String): Column = name(alias)
/**
* Gives the column an alias.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".as("colB"))
* }}}
*
* If the current column has metadata associated with it, this metadata will be propagated
* to the new column. If this not desired, use the API `as(alias: String, metadata: Metadata)`
* with explicit metadata.
*
* @group expr_ops
* @since 1.3.0
*/
def as(alias: String): Column = name(alias)
/**
* (Scala-specific) Assigns the given aliases to the results of a table generating function.
* {{{
* // Renames colA to colB in select output.
* df.select(explode($"myMap").as("key" :: "value" :: Nil))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def as(aliases: Seq[String]): Column = withExpr { MultiAlias(expr, aliases) }
/**
* Assigns the given aliases to the results of a table generating function.
* {{{
* // Renames colA to colB in select output.
* df.select(explode($"myMap").as("key" :: "value" :: Nil))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def as(aliases: Array[String]): Column = withExpr { MultiAlias(expr, aliases) }
/**
* Gives the column an alias.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".as("colB"))
* }}}
*
* If the current column has metadata associated with it, this metadata will be propagated
* to the new column. If this not desired, use the API `as(alias: String, metadata: Metadata)`
* with explicit metadata.
*
* @group expr_ops
* @since 1.3.0
*/
def as(alias: Symbol): Column = name(alias.name)
/**
* Gives the column an alias with metadata.
* {{{
* val metadata: Metadata = ...
* df.select($"colA".as("colB", metadata))
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def as(alias: String, metadata: Metadata): Column = withExpr {
Alias(expr, alias)(explicitMetadata = Some(metadata))
}
/**
* Gives the column a name (alias).
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".name("colB"))
* }}}
*
* If the current column has metadata associated with it, this metadata will be propagated
* to the new column. If this not desired, use the API `as(alias: String, metadata: Metadata)`
* with explicit metadata.
*
* @group expr_ops
* @since 2.0.0
*/
def name(alias: String): Column = withExpr {
// SPARK-33536: an alias is no longer a column reference. Therefore,
// we should not inherit the column reference related metadata in an alias
// so that it is not caught as a column reference in DetectAmbiguousSelfJoin.
Alias(expr, alias)(
nonInheritableMetadataKeys = Seq(Dataset.DATASET_ID_KEY, Dataset.COL_POS_KEY))
}
/**
* Casts the column to a different data type.
* {{{
* // Casts colA to IntegerType.
* import org.apache.spark.sql.types.IntegerType
* df.select(df("colA").cast(IntegerType))
*
* // equivalent to
* df.select(df("colA").cast("int"))
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def cast(to: DataType): Column = withExpr {
val cast = Cast(expr, CharVarcharUtils.replaceCharVarcharWithStringForCast(to))
cast.setTagValue(Cast.USER_SPECIFIED_CAST, true)
cast
}
/**
* Casts the column to a different data type, using the canonical string representation
* of the type. The supported types are: `string`, `boolean`, `byte`, `short`, `int`, `long`,
* `float`, `double`, `decimal`, `date`, `timestamp`.
* {{{
* // Casts colA to integer.
* df.select(df("colA").cast("int"))
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def cast(to: String): Column = cast(CatalystSqlParser.parseDataType(to))
/**
* Returns a sort expression based on the descending order of the column.
* {{{
* // Scala
* df.sort(df("age").desc)
*
* // Java
* df.sort(df.col("age").desc());
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def desc: Column = withExpr { SortOrder(expr, Descending) }
/**
* Returns a sort expression based on the descending order of the column,
* and null values appear before non-null values.
* {{{
* // Scala: sort a DataFrame by age column in descending order and null values appearing first.
* df.sort(df("age").desc_nulls_first)
*
* // Java
* df.sort(df.col("age").desc_nulls_first());
* }}}
*
* @group expr_ops
* @since 2.1.0
*/
def desc_nulls_first: Column = withExpr { SortOrder(expr, Descending, NullsFirst, Seq.empty) }
/**
* Returns a sort expression based on the descending order of the column,
* and null values appear after non-null values.
* {{{
* // Scala: sort a DataFrame by age column in descending order and null values appearing last.
* df.sort(df("age").desc_nulls_last)
*
* // Java
* df.sort(df.col("age").desc_nulls_last());
* }}}
*
* @group expr_ops
* @since 2.1.0
*/
def desc_nulls_last: Column = withExpr { SortOrder(expr, Descending, NullsLast, Seq.empty) }
/**
* Returns a sort expression based on ascending order of the column.
* {{{
* // Scala: sort a DataFrame by age column in ascending order.
* df.sort(df("age").asc)
*
* // Java
* df.sort(df.col("age").asc());
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def asc: Column = withExpr { SortOrder(expr, Ascending) }
/**
* Returns a sort expression based on ascending order of the column,
* and null values return before non-null values.
* {{{
* // Scala: sort a DataFrame by age column in ascending order and null values appearing first.
* df.sort(df("age").asc_nulls_first)
*
* // Java
* df.sort(df.col("age").asc_nulls_first());
* }}}
*
* @group expr_ops
* @since 2.1.0
*/
def asc_nulls_first: Column = withExpr { SortOrder(expr, Ascending, NullsFirst, Seq.empty) }
/**
* Returns a sort expression based on ascending order of the column,
* and null values appear after non-null values.
* {{{
* // Scala: sort a DataFrame by age column in ascending order and null values appearing last.
* df.sort(df("age").asc_nulls_last)
*
* // Java
* df.sort(df.col("age").asc_nulls_last());
* }}}
*
* @group expr_ops
* @since 2.1.0
*/
def asc_nulls_last: Column = withExpr { SortOrder(expr, Ascending, NullsLast, Seq.empty) }
/**
* Prints the expression to the console for debugging purposes.
*
* @group df_ops
* @since 1.3.0
*/
def explain(extended: Boolean): Unit = {
// scalastyle:off println
if (extended) {
println(expr)
} else {
println(expr.sql)
}
// scalastyle:on println
}
/**
* Compute bitwise OR of this expression with another expression.
* {{{
* df.select($"colA".bitwiseOR($"colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def bitwiseOR(other: Any): Column = withExpr { BitwiseOr(expr, lit(other).expr) }
/**
* Compute bitwise AND of this expression with another expression.
* {{{
* df.select($"colA".bitwiseAND($"colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def bitwiseAND(other: Any): Column = withExpr { BitwiseAnd(expr, lit(other).expr) }
/**
* Compute bitwise XOR of this expression with another expression.
* {{{
* df.select($"colA".bitwiseXOR($"colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def bitwiseXOR(other: Any): Column = withExpr { BitwiseXor(expr, lit(other).expr) }
/**
* Defines a windowing column.
*
* {{{
* val w = Window.partitionBy("name").orderBy("id")
* df.select(
* sum("price").over(w.rangeBetween(Window.unboundedPreceding, 2)),
* avg("price").over(w.rowsBetween(Window.currentRow, 4))
* )
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def over(window: expressions.WindowSpec): Column = window.withAggregate(this)
/**
* Defines an empty analytic clause. In this case the analytic function is applied
* and presented for all rows in the result set.
*
* {{{
* df.select(
* sum("price").over(),
* avg("price").over()
* )
* }}}
*
* @group expr_ops
* @since 2.0.0
*/
def over(): Column = over(Window.spec)
}
/**
* A convenient class used for constructing schema.
*
* @since 1.3.0
*/
@Stable
class ColumnName(name: String) extends Column(name) {
/**
* Creates a new `StructField` of type boolean.
* @since 1.3.0
*/
def boolean: StructField = StructField(name, BooleanType)
/**
* Creates a new `StructField` of type byte.
* @since 1.3.0
*/
def byte: StructField = StructField(name, ByteType)
/**
* Creates a new `StructField` of type short.
* @since 1.3.0
*/
def short: StructField = StructField(name, ShortType)
/**
* Creates a new `StructField` of type int.
* @since 1.3.0
*/
def int: StructField = StructField(name, IntegerType)
/**
* Creates a new `StructField` of type long.
* @since 1.3.0
*/
def long: StructField = StructField(name, LongType)
/**
* Creates a new `StructField` of type float.
* @since 1.3.0
*/
def float: StructField = StructField(name, FloatType)
/**
* Creates a new `StructField` of type double.
* @since 1.3.0
*/
def double: StructField = StructField(name, DoubleType)
/**
* Creates a new `StructField` of type string.
* @since 1.3.0
*/
def string: StructField = StructField(name, StringType)
/**
* Creates a new `StructField` of type date.
* @since 1.3.0
*/
def date: StructField = StructField(name, DateType)
/**
* Creates a new `StructField` of type decimal.
* @since 1.3.0
*/
def decimal: StructField = StructField(name, DecimalType.USER_DEFAULT)
/**
* Creates a new `StructField` of type decimal.
* @since 1.3.0
*/
def decimal(precision: Int, scale: Int): StructField =
StructField(name, DecimalType(precision, scale))
/**
* Creates a new `StructField` of type timestamp.
* @since 1.3.0
*/
def timestamp: StructField = StructField(name, TimestampType)
/**
* Creates a new `StructField` of type binary.
* @since 1.3.0
*/
def binary: StructField = StructField(name, BinaryType)
/**
* Creates a new `StructField` of type array.
* @since 1.3.0
*/
def array(dataType: DataType): StructField = StructField(name, ArrayType(dataType))
/**
* Creates a new `StructField` of type map.
* @since 1.3.0
*/
def map(keyType: DataType, valueType: DataType): StructField =
map(MapType(keyType, valueType))
def map(mapType: MapType): StructField = StructField(name, mapType)
/**
* Creates a new `StructField` of type struct.
* @since 1.3.0
*/
def struct(fields: StructField*): StructField = struct(StructType(fields))
/**
* Creates a new `StructField` of type struct.
* @since 1.3.0
*/
def struct(structType: StructType): StructField = StructField(name, structType)
}
|
BryanCutler/spark
|
sql/core/src/main/scala/org/apache/spark/sql/Column.scala
|
Scala
|
apache-2.0
| 44,332
|
package com.twitter.finagle.service
import com.twitter.conversions.time._
import com.twitter.finagle.stats.{NullStatsReceiver, InMemoryStatsReceiver}
import com.twitter.finagle.{Status, MockTimer, ServiceFactory, Service}
import com.twitter.util._
import java.util.concurrent.TimeUnit
import org.junit.runner.RunWith
import org.mockito.Mockito.{times, verify, when}
import org.mockito.Matchers
import org.mockito.Matchers._
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import scala.util.Random
@RunWith(classOf[JUnitRunner])
class FailureAccrualFactoryTest extends FunSuite with MockitoSugar {
class Helper {
val statsReceiver = new InMemoryStatsReceiver()
val underlyingService = mock[Service[Int, Int]]
when(underlyingService.close(any[Time])) thenReturn Future.Done
when(underlyingService.status) thenReturn Status.Open
when(underlyingService(Matchers.anyInt)) thenReturn Future.exception(new Exception)
val underlying = mock[ServiceFactory[Int, Int]]
when(underlying.close(any[Time])) thenReturn Future.Done
when(underlying.status) thenReturn Status.Open
when(underlying()) thenReturn Future.value(underlyingService)
val timer = new MockTimer
val factory = new FailureAccrualFactory[Int, Int](
underlying, 3, 10.seconds, timer, statsReceiver)
val service = Await.result(factory())
verify(underlying)()
}
test("a failing service should become unavailable") {
val h = new Helper
import h._
Time.withCurrentTimeFrozen { timeControl =>
intercept[Exception] {
Await.result(service(123))
}
intercept[Exception] {
Await.result(service(123))
}
assert(factory.isAvailable)
assert(service.isAvailable)
// Now fail:
intercept[Exception] {
Await.result(service(123))
}
assert(statsReceiver.counters.get(List("removals")) === Some(1))
assert(!factory.isAvailable)
assert(!service.isAvailable)
verify(underlyingService, times(3))(123)
}
}
test("a failing service should be revived (for one request) after the markDeadFor duration") {
val h = new Helper
import h._
Time.withCurrentTimeFrozen { timeControl =>
intercept[Exception] {
Await.result(service(123))
}
intercept[Exception] {
Await.result(service(123))
}
intercept[Exception] {
Await.result(service(123))
}
assert(statsReceiver.counters.get(List("removals")) === Some(1))
assert(!factory.isAvailable)
assert(!service.isAvailable)
timeControl.advance(10.seconds)
timer.tick()
// Healthy again!
assert(statsReceiver.counters.get(List("removals")) === Some(1))
assert(statsReceiver.counters.get(List("revivals")) === Some(1))
assert(factory.isAvailable)
assert(service.isAvailable)
// But after one bad dispatch, mark it again unhealthy.
intercept[Exception] {
Await.result(service(123))
}
assert(statsReceiver.counters.get(List("removals")) === Some(2))
assert(!factory.isAvailable)
assert(!service.isAvailable)
}
}
test("a failing factory should be busy; done when revived") {
Time.withCurrentTimeFrozen { tc =>
val h = new Helper
import h._
assert(factory.status === Status.Open)
intercept[Exception] {
Await.result(service(123))
}
intercept[Exception] {
Await.result(service(123))
}
assert(factory.status === Status.Open)
intercept[Exception] {
Await.result(service(123))
}
assert(factory.status == Status.Busy)
tc.advance(10.seconds)
timer.tick()
assert(factory.status === Status.Open)
}
}
test("a failing service should reset failure counters after an individual success") {
val h = new Helper
import h._
Time.withCurrentTimeFrozen { timeControl =>
intercept[Exception] {
Await.result(service(123))
}
intercept[Exception] {
Await.result(service(123))
}
intercept[Exception] {
Await.result(service(123))
}
assert(statsReceiver.counters.get(List("removals")) === Some(1))
assert(!factory.isAvailable)
assert(!service.isAvailable)
timeControl.advance(10.seconds)
timer.tick()
// Healthy again!
assert(statsReceiver.counters.get(List("revivals")) === Some(1))
assert(statsReceiver.counters.get(List("removals")) === Some(1))
assert(factory.isAvailable)
assert(service.isAvailable)
when(underlyingService(123)) thenReturn Future.value(321)
// A good dispatch!
assert(statsReceiver.counters.get(List("revivals")) === Some(1))
assert(statsReceiver.counters.get(List("removals")) === Some(1))
assert(Await.result(service(123)) === 321)
assert(factory.isAvailable)
assert(service.isAvailable)
// Counts are now reset.
when(underlyingService(123)) thenReturn Future.exception(new Exception)
intercept[Exception] {
Await.result(service(123))
}
assert(statsReceiver.counters.get(List("revivals")) === Some(1))
assert(statsReceiver.counters.get(List("removals")) === Some(1))
assert(factory.isAvailable)
assert(service.isAvailable)
intercept[Exception] {
Await.result(service(123))
}
assert(factory.isAvailable)
assert(service.isAvailable)
intercept[Exception] {
Await.result(service(123))
}
assert(statsReceiver.counters.get(List("revivals")) === Some(1))
assert(statsReceiver.counters.get(List("removals")) === Some(2))
assert(!factory.isAvailable)
assert(!service.isAvailable)
}
}
class HealthyServiceHelper {
val statsReceiver = new InMemoryStatsReceiver()
val underlyingService = mock[Service[Int, Int]]
when(underlyingService.close(any[Time])) thenReturn Future.Done
when(underlyingService.status) thenReturn Status.Open
when(underlyingService(Matchers.anyInt)) thenReturn Future.value(321)
val underlying = mock[ServiceFactory[Int, Int]]
when(underlying.close(any[Time])) thenReturn Future.Done
when(underlying.status) thenReturn Status.Open
when(underlying()) thenReturn Future.value(underlyingService)
val factory = new FailureAccrualFactory[Int, Int](
underlying, 3, 10.seconds, new MockTimer, statsReceiver)
val service = Await.result(factory())
verify(underlying)()
}
test("a healthy service should [service] pass through underlying availability") {
val h = new HealthyServiceHelper
import h._
assert(service.isAvailable)
when(underlyingService.status) thenReturn Status.Closed
assert(!service.isAvailable)
}
test("a healthy service should [factory] pass through underlying availability") {
val h = new HealthyServiceHelper
import h._
assert(factory.isAvailable)
assert(service.isAvailable)
when(underlying.status) thenReturn Status.Closed
assert(!factory.isAvailable)
// This propagates to the service as well.
assert(!service.isAvailable)
when(underlying.status) thenReturn Status.Busy
assert(service.status === Status.Busy)
}
class BrokenFactoryHelper {
val statsReceiver = new InMemoryStatsReceiver()
val underlying = mock[ServiceFactory[Int, Int]]
when(underlying.close(any[Time])) thenReturn Future.Done
when(underlying.status) thenReturn Status.Open
val exc = new Exception("i broked :-(")
when(underlying()) thenReturn Future.exception(exc)
val factory = new FailureAccrualFactory[Int, Int](
underlying, 3, 10.seconds, new MockTimer, statsReceiver)
}
test("a broken factory should fail after the given number of tries") {
val h = new BrokenFactoryHelper
import h._
Time.withCurrentTimeFrozen { timeControl =>
assert(factory.isAvailable)
intercept[Exception] {
Await.result(factory())
}
assert(factory.isAvailable)
intercept[Exception] {
Await.result(factory())
}
assert(factory.isAvailable)
intercept[Exception] {
Await.result(factory())
}
assert(!factory.isAvailable)
}
}
class CustomizedFactory {
class CustomizedFailureAccrualFactory(
underlying: ServiceFactory[Int, Int],
numFailures: Int,
markDeadFor: Duration,
timer: Timer
) extends FailureAccrualFactory[Int, Int](underlying, numFailures, markDeadFor, timer, NullStatsReceiver) {
override def isSuccess(response: Try[Int]): Boolean = {
response match {
case Throw(_) => false
case Return(x) => x != 321
}
}
}
val underlyingService = mock[Service[Int, Int]]
when(underlyingService.close(any[Time])) thenReturn Future.Done
when(underlyingService.status) thenReturn Status.Open
when(underlyingService(Matchers.anyInt)) thenReturn Future.value(321)
val underlying = mock[ServiceFactory[Int, Int]]
when(underlying.close(any[Time])) thenReturn Future.Done
when(underlying.status) thenReturn Status.Open
when(underlying()) thenReturn Future.value(underlyingService)
val timer = new MockTimer
val factory = new CustomizedFailureAccrualFactory(
underlying, 3, 10.seconds, timer)
val service = Await.result(factory())
verify(underlying)()
}
test("a customized factory should become unavailable") {
val h = new CustomizedFactory
import h._
Time.withCurrentTimeFrozen { timeControl =>
assert(Await.result(service(123)) === 321)
assert(Await.result(service(123)) === 321)
assert(factory.isAvailable)
assert(service.isAvailable)
// Now fail:
assert(Await.result(service(123)) === 321)
assert(!service.isAvailable)
verify(underlyingService, times(3))(123)
}
}
test("perturbs") {
val perturbation = 0.2f
val duration = 1.seconds
val rand = new Random(1)
for (_ <- 1 to 50) {
val d = FailureAccrualFactory.perturb(duration, perturbation, rand)()
val diff = d.diff(duration).inUnit(TimeUnit.MILLISECONDS)
assert(diff >= 0)
assert(diff < 200)
}
}
}
|
cogitate/twitter-finagle-uuid
|
finagle-core/src/test/scala/com/twitter/finagle/service/FailureAccrualFactoryTest.scala
|
Scala
|
apache-2.0
| 10,312
|
package chrome.contextMenus.bindings
import chrome.events.bindings.Event
import chrome.tabs.bindings.Tab
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSName, ScalaJSDefined}
import scala.scalajs.js.|
object MenuContexts {
val ALL = "all"
val PAGE = "page"
val FRAME = "frame"
val SELECTION = "selection"
val LINKE = "link"
val EDITABLE = "editable"
val IMAGE = "image"
val VIDEO = "video"
val AUDIO = "audio"
val LAUNCHER = "launcher"
val BROWSER_ACTION = "browser_action"
val PAGE_ACTION = "page_action"
}
object MenuType {
val NORMAL = "normal"
val CHECKBOX = "checkbox"
val RADIO = "radio"
val SEPARATOR = "separator"
}
@js.native
@JSName("chrome.contextMenus")
object ContextMenus extends js.Object{
def create(createProperties: CreateProperties): String | Int = js.native
def update(id: String | Int, properties: UpdateProperties): Unit = js.native
def remove(menuItemId: String | Int, callback: js.Function0[Unit]): String | Int = js.native
def removeAll(callback: js.Function0[Unit]): Unit = js.native
val onClicked: Event[js.Function2[MenuInfo, Tab, _]] = js.native
}
@ScalaJSDefined
class UpdateProperties(
val `type`: String = "normal", //"normal", "checkbox", "radio", or "separator"
val title: String,
val checked: js.UndefOr[Boolean] = js.undefined,
val contexts: js.UndefOr[js.Array[String]] = js.undefined,
val onclick: js.UndefOr[js.Function2[MenuInfo, Tab, Unit]],
val parentId: js.UndefOr[String | Int] = js.undefined,
val documentUrlPatterns: js.UndefOr[js.Array[String]] = js.undefined,
val targetUrlPatterns: js.UndefOr[js.Array[String]] = js.undefined,
val enabled: Boolean = true
) extends js.Object
object CreateProperties {
def apply(id: String, title: String, contexts: js.Array[String] = js.Array(MenuContexts.ALL)): CreateProperties =
new CreateProperties(id = id, title = title , contexts = contexts)
}
@ScalaJSDefined
class CreateProperties(
val `type`: String = "normal",
val id: String | Int,
val title: String,
val checked: js.UndefOr[Boolean] = js.undefined,
val contexts: js.UndefOr[js.Array[String]] = js.undefined,
val onclick: js.UndefOr[js.Function2[MenuInfo, Tab, Unit]] = js.undefined,
val parentId: js.UndefOr[String | Int] = js.undefined,
val documentUrlPatterns: js.UndefOr[js.Array[String]] = js.undefined,
val targetUrlPatterns: js.UndefOr[js.Array[String]] = js.undefined,
val enabled: Boolean = true
) extends js.Object
@js.native
trait MenuInfo extends js.Object{
val menuItemId: String | Int = js.native
val parentMenuItemId: js.UndefOr[String | Int]
val mediaType: js.UndefOr[String]
val linkUrl: js.UndefOr[String]
val srcUrl: js.UndefOr[String]
val pageUrl: js.UndefOr[String]
val frameUrl: js.UndefOr[String]
val selectionText: js.UndefOr[String]
val editable: Boolean
val wasChecked: js.UndefOr[Boolean]
val checked: js.UndefOr[Boolean]
}
|
antonkulaga/bio-nlp
|
chrome-bio/src/main/scala/chrome/contextMenus/bindings/ContextMenus.scala
|
Scala
|
mpl-2.0
| 3,157
|
package com.sksamuel.avro4s.record.decoder
import com.sksamuel.avro4s._
import org.apache.avro.generic.GenericData
import org.apache.avro.{Conversions, LogicalTypes, SchemaBuilder}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
case class WithBigDecimal(decimal: BigDecimal)
case class OptionalBigDecimal(big: Option[BigDecimal])
class BigDecimalDecoderTest extends AnyFlatSpec with Matchers {
"Decoder" should "convert byte array to decimal" in {
val schema = AvroSchema[WithBigDecimal]
val record = new GenericData.Record(schema)
val bytes = new Conversions.DecimalConversion().toBytes(BigDecimal(123.45).underlying(), null, LogicalTypes.decimal(8, 2))
record.put("decimal", bytes)
Decoder[WithBigDecimal].decode(schema).apply(record) shouldBe WithBigDecimal(BigDecimal(123.45))
}
it should "scale big decimals before decoding" in {
given ScalePrecision = ScalePrecision(3, 8)
val schema = AvroSchema[WithBigDecimal]
val record = new GenericData.Record(schema)
val bytes = new Conversions.DecimalConversion().toBytes(BigDecimal(12345.678).underlying(), null, LogicalTypes.decimal(8, 3))
record.put("decimal", bytes)
Decoder[WithBigDecimal].decode(schema).apply(record) shouldBe WithBigDecimal(BigDecimal(12345.678))
}
it should "support optional big decimals" in {
val schema = AvroSchema[OptionalBigDecimal]
val bytes =
new Conversions.DecimalConversion().toBytes(BigDecimal(123.45).bigDecimal, null, LogicalTypes.decimal(8, 2))
val record = new GenericData.Record(schema)
record.put("big", bytes)
Decoder[OptionalBigDecimal].decode(schema).apply(record) shouldBe OptionalBigDecimal(Option(BigDecimal(123.45)))
val emptyRecord = new GenericData.Record(schema)
emptyRecord.put("big", null)
Decoder[OptionalBigDecimal].decode(schema).apply(emptyRecord) shouldBe OptionalBigDecimal(None)
}
it should "be able to decode strings as bigdecimals based on the schema" in {
given SchemaFor[BigDecimal] = BigDecimals.AsString
val schema = AvroSchema[BigDecimal]
Decoder[BigDecimal].decode(schema).apply("123.45") shouldBe BigDecimal(123.45)
}
it should "be able to decode generic fixed as bigdecimals" in {
given SchemaFor[BigDecimal] = SchemaFor[BigDecimal](LogicalTypes.decimal(10, 8).addToSchema(SchemaBuilder.fixed("BigDecimal").size(8)))
val schema = AvroSchema[BigDecimal]
val fixed = GenericData.get().createFixed(null, Array[Byte](0, 4, 98, -43, 55, 43, -114, 0), schema)
Decoder[BigDecimal].decode(schema).apply(fixed) shouldBe BigDecimal(12345678)
}
// it should "be able to decode longs as bigdecimals" in {
// val schema = LogicalTypes.decimal(5, 2).addToSchema(SchemaBuilder.builder().longType())
// BigDecimalDecoder.decode(12345, schema) shouldBe ""
// BigDecimalDecoder.decode(9999, schema) shouldBe ""
// BigDecimalDecoder.decode(java.lang.Long.valueOf(99887766), schema) shouldBe ""
// BigDecimalDecoder.decode(java.lang.Integer.valueOf(654), schema) shouldBe ""
// }
}
|
sksamuel/avro4s
|
avro4s-core/src/test/scala/com/sksamuel/avro4s/record/decoder/BigDecimalDecoderTest.scala
|
Scala
|
apache-2.0
| 3,093
|
package pairs
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
//
// Various approaches to choosing a distinguished record within groups,
// based on various approaches from PairRDDFunctions
//
object ChooseWithinGroups {
case class Cust(id: Integer, name: String, sales: Double, discount: Double, state: String)
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Pairs-ChooseWithinGroups").setMaster("local[4]")
val sc = new SparkContext(conf)
val people = Seq(
(5,"Bob","Jones","Canada",23),
(7,"Fred","Smith","Canada",18),
(5,"Robert","Andrews","USA",32)
)
val peopleRows = sc.parallelize(people, 4)
type Payload = (String, String, String, Int)
//
// For all the approaches we need to get the data into an RDD[Pair[U,V]].
// Since the problem requires the groups to be defined byt he first element,
// that needs to be the first of the pair. Everything else ends up in the
// second.
//
val pairs: RDD[(Int, Payload)] = peopleRows.map({
case (id: Int, first: String, last: String, country: String, age: Int) =>
(id, (first, last, country, age))
}
)
// reduceByKey solution
{
def combine(p1: Payload, p2: Payload): Payload = {
if (p1._4 > p2._4) p1 else p2
}
val withMax: RDD[(Int, Payload)] =
pairs.reduceByKey(combine)
withMax.collect().foreach(println)
}
// aggregateByKey solution
{
def add(acc: Option[Payload], rec: Payload): Option[Payload] = {
acc match {
case None => Some(rec)
case Some(previous) => if (rec._4 > previous._4) Some(rec) else acc
}
}
def combine(acc1: Option[Payload], acc2: Option[Payload]): Option[Payload] = {
(acc1, acc2) match {
case (None, None) => None
case (None, _) => acc2
case (_, None) => acc1
case (Some(p1), Some(p2)) => if (p1._4 > p2._4) acc1 else acc2
}
}
val start: Option[Payload] = None
val withMax: RDD[(Int, Option[Payload])] =
pairs.aggregateByKey(start)(add, combine)
withMax.collect().foreach(println)
}
// combineByKey solution
// foldByKey solution
}}
|
chocolateBlack/LearningSpark
|
src/main/scala/pairs/ChooseWithinGroups.scala
|
Scala
|
mit
| 2,292
|
/**
* Created on: Dec 7, 2013
*/
package com.iteamsolutions.angular.services
package atom
import scala.concurrent.{
ExecutionContext,
Future
}
import scalaz.{
Failure => _,
Success => _,
_
}
import scalaz.contrib.std._
import akka.actor.ActorSystem
import akka.pattern._
import akka.util._
import com.iteamsolutions.angular.models.atom._
import actor._
/**
* The '''FeedOperations''' type defines system operations related to
* [[models.atom]] Domain types.
*
* @author svickers
*
*/
trait FeedOperations
{
/// Class Imports
import Scalaz._
/**
* The availableFeeds method resolves what [[models.atom.Feed]]s are
* _currently_ able to be manipulated.
*/
def availableFeeds ()
(implicit system : ActorSystem, EC : ExecutionContext, T : Timeout)
: FutureEither[List[Feed]] =
(AvailableFeedsExtension (system) ? CurrentlyAvailableFeedsRequest) >>= {
_.result.toFutureEither;
}
}
|
osxhacker/angular-codegen
|
src/main/scala/com/iteamsolutions/angular/services/atom/FeedOperations.scala
|
Scala
|
bsd-2-clause
| 926
|
package plugin
import app.Context
import scala.collection.mutable.ListBuffer
import plugin.PluginSystem.{Action, GlobalMenu, RepositoryMenu}
import javax.servlet.http.{HttpServletResponse, HttpServletRequest}
// TODO This is a sample implementation for Scala based plug-ins.
class ScalaPlugin(val id: String, val version: String,
val author: String, val url: String, val description: String) extends Plugin {
private val repositoryMenuList = ListBuffer[RepositoryMenu]()
private val globalMenuList = ListBuffer[GlobalMenu]()
private val repositoryActionList = ListBuffer[Action]()
private val globalActionList = ListBuffer[Action]()
def repositoryMenus : List[RepositoryMenu] = repositoryMenuList.toList
def globalMenus : List[GlobalMenu] = globalMenuList.toList
def repositoryActions : List[Action] = repositoryActionList.toList
def globalActions : List[Action] = globalActionList.toList
def addRepositoryMenu(label: String, name: String, url: String, icon: String)(condition: (Context) => Boolean): Unit = {
repositoryMenuList += RepositoryMenu(label, name, url, icon, condition)
}
def addGlobalMenu(label: String, url: String, icon: String)(condition: (Context) => Boolean): Unit = {
globalMenuList += GlobalMenu(label, url, icon, condition)
}
def addGlobalAction(path: String)(function: (HttpServletRequest, HttpServletResponse) => Any): Unit = {
globalActionList += Action(path, function)
}
def addRepositoryAction(path: String)(function: (HttpServletRequest, HttpServletResponse) => Any): Unit = {
repositoryActionList += Action(path, function)
}
}
|
campolake/gitbucketV2.1
|
src/main/scala/plugin/ScalaPlugin.scala
|
Scala
|
apache-2.0
| 1,674
|
package com.imaifactory.sparkplayground
/**
* Created by yimai on 2016/05/11.
*/
object Iterate {
def main(args: Array[String]): Unit = {
println("test")
val it2 = Iterator(1,2,3,4)
}
}
|
imaifactory/Spark-Playground
|
src/main/scala/com/imaifactory/sparkplayground/Iterate.scala
|
Scala
|
mit
| 203
|
package org.kduda.greedy.spark.reader.mongo
import java.io.{File, IOException}
import java.util
import com.mongodb.gridfs.GridFSDBFile
import org.apache.spark.sql.{Dataset, Row, DataFrame}
import org.kduda.greedy.service.storage.FileStorageService
import org.kduda.greedy.spark.reader.csv.SparkCsvReader
import org.slf4j.LoggerFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
class SparkMongo extends SparkMongoService {
private val log = LoggerFactory.getLogger(classOf[SparkMongo])
@Autowired private val csvReader: SparkCsvReader = null
@Autowired private val storage: FileStorageService = null
@Override
def readCsvByName(name: String): DataFrame = {
val gridFSDBFile = storage.findFileByName(name).get
readAsDataset(gridFSDBFile)
}
@Override
def readCsvById(id: String): DataFrame = {
val gridFSDBFile = storage.findFileById(id).get
readAsDataset(gridFSDBFile)
}
private def readAsDataset(gridFSDBFile: GridFSDBFile): DataFrame = {
val storageFile = new File("tmp-storage/" + gridFSDBFile.getFilename)
saveToTempStorage(gridFSDBFile, storageFile)
val data = sparkRead(storageFile)
data.cache().show()
deleteFromTempStorage(storageFile)
data
}
private def saveToTempStorage(gridFSDBFile: GridFSDBFile, storageFile: File): Unit = {
var writtenBytes = 0L
try
writtenBytes = gridFSDBFile.writeTo(storageFile)
catch {
case e: IOException =>
log.error("Could not write to file:" + storageFile.getAbsolutePath, e)
}
log.info("Written " + writtenBytes + " bytes into " + storageFile.getAbsolutePath)
}
private def sparkRead(file: File): DataFrame = {
val options = new util.HashMap[String, String]()
options.put("header", "true")
csvReader.read(file, options)
}
private def deleteFromTempStorage(file: File): Boolean = {
val isDeleted = file.delete
log.info(file.getAbsolutePath + " deleted: " + isDeleted)
isDeleted
}
}
|
DudaKamil/greedy
|
src/main/scala/org/kduda/greedy/spark/reader/mongo/SparkMongo.scala
|
Scala
|
agpl-3.0
| 2,048
|
package Client
import scala.collection.mutable
object ProfileMap {
val obj = mutable.HashMap[Int, Boolean]()
}
|
Nirespire/SecureFacebookAPI
|
src/main/scala/Client/ProfileMap.scala
|
Scala
|
mit
| 115
|
package controllers
import play.api._
import play.api.libs.json.Json
import play.api.mvc._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import services.TopicService
class Api extends Controller {
def check(url: String) = Action.async { req =>
req.getQueryString("result") match {
case Some(res) => Future.successful(Ok(Json.obj("confidence" -> res)))
case None =>
TopicService.getByUrl(url).map {
case Some(topic) => Ok(Json.obj("confidence" -> Option(topic.confidence)))
case None => NotFound(Json.obj("confidence" -> Option.empty[Double]))
}
}
}
}
|
dohzya/desinthoax
|
app/controllers/Api.scala
|
Scala
|
agpl-3.0
| 660
|
object Test {
import Macro.*
def main(args: Array[String]): Unit = {
println(ff"Hello World ${1}!")
}
}
|
dotty-staging/dotty
|
tests/run-macros/i5119/Main_2.scala
|
Scala
|
apache-2.0
| 115
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.io.{File, PrintWriter}
import java.net.URI
import java.util.TimeZone
import java.util.concurrent.TimeUnit
import scala.collection.mutable
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogColumnStat
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
import org.apache.spark.sql.catalyst.util.DateTimeUtils.TimeZoneUTC
import org.apache.spark.sql.functions.timestamp_seconds
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.test.SQLTestData.ArrayData
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
/**
* End-to-end suite testing statistics collection and use on both entire table and columns.
*/
class StatisticsCollectionSuite extends StatisticsCollectionTestBase with SharedSparkSession {
import testImplicits._
test("estimates the size of a limit 0 on outer join") {
withTempView("test") {
Seq(("one", 1), ("two", 2), ("three", 3), ("four", 4)).toDF("k", "v")
.createOrReplaceTempView("test")
val df1 = spark.table("test")
val df2 = spark.table("test").limit(0)
val df = df1.join(df2, Seq("k"), "left")
val sizes = df.queryExecution.analyzed.collect { case g: Join =>
g.stats.sizeInBytes
}
assert(sizes.size === 1, s"number of Join nodes is wrong:\\n ${df.queryExecution}")
assert(sizes.head === BigInt(128),
s"expected exact size 96 for table 'test', got: ${sizes.head}")
}
}
test("analyzing views is not supported") {
def assertAnalyzeUnsupported(analyzeCommand: String): Unit = {
val err = intercept[AnalysisException] {
sql(analyzeCommand)
}
assert(err.message.contains("ANALYZE TABLE is not supported"))
}
val tableName = "tbl"
withTable(tableName) {
spark.range(10).write.saveAsTable(tableName)
val viewName = "view"
withView(viewName) {
sql(s"CREATE VIEW $viewName AS SELECT * FROM $tableName")
assertAnalyzeUnsupported(s"ANALYZE TABLE $viewName COMPUTE STATISTICS")
assertAnalyzeUnsupported(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id")
}
}
}
test("statistics collection of a table with zero column") {
val table_no_cols = "table_no_cols"
withTable(table_no_cols) {
val rddNoCols = sparkContext.parallelize(1 to 10).map(_ => Row.empty)
val dfNoCols = spark.createDataFrame(rddNoCols, StructType(Seq.empty))
dfNoCols.write.format("json").saveAsTable(table_no_cols)
sql(s"ANALYZE TABLE $table_no_cols COMPUTE STATISTICS")
checkTableStats(table_no_cols, hasSizeInBytes = true, expectedRowCounts = Some(10))
}
}
test("analyze empty table") {
val table = "emptyTable"
withTable(table) {
val df = Seq.empty[Int].toDF("key")
df.write.format("json").saveAsTable(table)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS noscan")
val fetchedStats1 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = None)
assert(fetchedStats1.get.sizeInBytes == 0)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS")
val fetchedStats2 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = Some(0))
assert(fetchedStats2.get.sizeInBytes == 0)
val expectedColStat =
"key" -> CatalogColumnStat(Some(0), None, None, Some(0),
Some(IntegerType.defaultSize), Some(IntegerType.defaultSize))
// There won't be histogram for empty column.
Seq("true", "false").foreach { histogramEnabled =>
withSQLConf(SQLConf.HISTOGRAM_ENABLED.key -> histogramEnabled) {
checkColStats(df, mutable.LinkedHashMap(expectedColStat))
}
}
}
}
test("analyze column command - unsupported types and invalid columns") {
val tableName = "column_stats_test1"
withTable(tableName) {
Seq(ArrayData(Seq(1, 2, 3), Seq(Seq(1, 2, 3)))).toDF().write.saveAsTable(tableName)
// Test unsupported data types
val err1 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $tableName COMPUTE STATISTICS FOR COLUMNS data")
}
assert(err1.message.contains("does not support statistics collection"))
// Test invalid columns
val err2 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $tableName COMPUTE STATISTICS FOR COLUMNS some_random_column")
}
assert(err2.message.contains("does not exist"))
}
}
test("test table-level statistics for data source table") {
val tableName = "tbl"
withTable(tableName) {
sql(s"CREATE TABLE $tableName(i INT, j STRING) USING parquet")
Seq(1 -> "a", 2 -> "b").toDF("i", "j").write.mode("overwrite").insertInto(tableName)
// noscan won't count the number of rows
sql(s"ANALYZE TABLE $tableName COMPUTE STATISTICS noscan")
checkTableStats(tableName, hasSizeInBytes = true, expectedRowCounts = None)
// without noscan, we count the number of rows
sql(s"ANALYZE TABLE $tableName COMPUTE STATISTICS")
checkTableStats(tableName, hasSizeInBytes = true, expectedRowCounts = Some(2))
}
}
test("SPARK-15392: DataFrame created from RDD should not be broadcasted") {
val rdd = sparkContext.range(1, 100).map(i => Row(i, i))
val df = spark.createDataFrame(rdd, new StructType().add("a", LongType).add("b", LongType))
assert(df.queryExecution.analyzed.stats.sizeInBytes >
spark.sessionState.conf.autoBroadcastJoinThreshold)
assert(df.selectExpr("a").queryExecution.analyzed.stats.sizeInBytes >
spark.sessionState.conf.autoBroadcastJoinThreshold)
}
test("column stats round trip serialization") {
// Make sure we serialize and then deserialize and we will get the result data
val df = data.toDF(stats.keys.toSeq :+ "carray" : _*)
Seq(stats, statsWithHgms).foreach { s =>
s.zip(df.schema).foreach { case ((k, v), field) =>
withClue(s"column $k with type ${field.dataType}") {
val roundtrip = CatalogColumnStat.fromMap("table_is_foo", field.name, v.toMap(k))
assert(roundtrip == Some(v))
}
}
}
}
test("SPARK-33812: column stats round trip serialization with splitting histogram property") {
withSQLConf(SQLConf.HIVE_TABLE_PROPERTY_LENGTH_THRESHOLD.key -> "10") {
statsWithHgms.foreach { case (k, v) =>
val roundtrip = CatalogColumnStat.fromMap("t", k, v.toMap(k))
assert(roundtrip == Some(v))
}
}
}
test("analyze column command - result verification") {
// (data.head.productArity - 1) because the last column does not support stats collection.
assert(stats.size == data.head.productArity - 1)
val df = data.toDF(stats.keys.toSeq :+ "carray" : _*)
checkColStats(df, stats)
// test column stats with histograms
withSQLConf(SQLConf.HISTOGRAM_ENABLED.key -> "true", SQLConf.HISTOGRAM_NUM_BINS.key -> "2") {
checkColStats(df, statsWithHgms)
}
}
test("column stats collection for null columns") {
val dataTypes: Seq[(DataType, Int)] = Seq(
BooleanType, ByteType, ShortType, IntegerType, LongType,
DoubleType, FloatType, DecimalType.SYSTEM_DEFAULT,
StringType, BinaryType, DateType, TimestampType
).zipWithIndex
val df = sql("select " + dataTypes.map { case (tpe, idx) =>
s"cast(null as ${tpe.sql}) as col$idx"
}.mkString(", "))
val expectedColStats = dataTypes.map { case (tpe, idx) =>
(s"col$idx", CatalogColumnStat(Some(0), None, None, Some(1),
Some(tpe.defaultSize.toLong), Some(tpe.defaultSize.toLong)))
}
// There won't be histograms for null columns.
Seq("true", "false").foreach { histogramEnabled =>
withSQLConf(SQLConf.HISTOGRAM_ENABLED.key -> histogramEnabled) {
checkColStats(df, mutable.LinkedHashMap(expectedColStats: _*))
}
}
}
test("SPARK-25028: column stats collection for null partitioning columns") {
val table = "analyze_partition_with_null"
withTempDir { dir =>
withTable(table) {
sql(s"""
|CREATE TABLE $table (value string, name string)
|USING PARQUET
|PARTITIONED BY (name)
|LOCATION '${dir.toURI}'""".stripMargin)
val df = Seq(("a", null), ("b", null)).toDF("value", "name")
df.write.mode("overwrite").insertInto(table)
sql(s"ANALYZE TABLE $table PARTITION (name) COMPUTE STATISTICS")
val partitions = spark.sessionState.catalog.listPartitions(TableIdentifier(table))
assert(partitions.head.stats.get.rowCount.get == 2)
}
}
}
test("number format in statistics") {
val numbers = Seq(
BigInt(0) -> (("0.0 B", "0")),
BigInt(100) -> (("100.0 B", "100")),
BigInt(2047) -> (("2047.0 B", "2.05E+3")),
BigInt(2048) -> (("2.0 KiB", "2.05E+3")),
BigInt(3333333) -> (("3.2 MiB", "3.33E+6")),
BigInt(4444444444L) -> (("4.1 GiB", "4.44E+9")),
BigInt(5555555555555L) -> (("5.1 TiB", "5.56E+12")),
BigInt(6666666666666666L) -> (("5.9 PiB", "6.67E+15")),
BigInt(1L << 10 ) * (1L << 60) -> (("1024.0 EiB", "1.18E+21")),
BigInt(1L << 11) * (1L << 60) -> (("2.36E+21 B", "2.36E+21"))
)
numbers.foreach { case (input, (expectedSize, expectedRows)) =>
val stats = Statistics(sizeInBytes = input, rowCount = Some(input))
val expectedString = s"sizeInBytes=$expectedSize, rowCount=$expectedRows"
assert(stats.simpleString == expectedString)
}
}
test("change stats after set location command") {
val table = "change_stats_set_location_table"
val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier(table)))
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTable(table) {
spark.range(100).select($"id", $"id" % 5 as "value").write.saveAsTable(table)
// analyze to get initial stats
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS FOR COLUMNS id, value")
val fetched1 = checkTableStats(
table, hasSizeInBytes = true, expectedRowCounts = Some(100))
assert(fetched1.get.sizeInBytes > 0)
assert(fetched1.get.colStats.size == 2)
// set location command
val initLocation = spark.sessionState.catalog.getTableMetadata(TableIdentifier(table))
.storage.locationUri.get.toString
withTempDir { newLocation =>
sql(s"ALTER TABLE $table SET LOCATION '${newLocation.toURI.toString}'")
if (autoUpdate) {
val fetched2 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = None)
assert(fetched2.get.sizeInBytes == 0)
assert(fetched2.get.colStats.isEmpty)
// set back to the initial location
sql(s"ALTER TABLE $table SET LOCATION '$initLocation'")
val fetched3 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = None)
assert(fetched3.get.sizeInBytes == fetched1.get.sizeInBytes)
} else {
checkTableStats(table, hasSizeInBytes = false, expectedRowCounts = None)
// SPARK-19724: clean up the previous table location.
waitForTasksToFinish()
Utils.deleteRecursively(tableLoc)
}
}
}
}
}
}
test("change stats after insert command for datasource table") {
val table = "change_stats_insert_datasource_table"
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTable(table) {
sql(s"CREATE TABLE $table (i int, j string) USING PARQUET")
// analyze to get initial stats
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS FOR COLUMNS i, j")
val fetched1 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = Some(0))
assert(fetched1.get.sizeInBytes == 0)
assert(fetched1.get.colStats.size == 2)
// table lookup will make the table cached
spark.table(table)
assert(isTableInCatalogCache(table))
// insert into command
sql(s"INSERT INTO TABLE $table SELECT 1, 'abc'")
if (autoUpdate) {
val fetched2 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = None)
assert(fetched2.get.sizeInBytes > 0)
assert(fetched2.get.colStats.isEmpty)
} else {
checkTableStats(table, hasSizeInBytes = false, expectedRowCounts = None)
}
// check that tableRelationCache inside the catalog was invalidated after insert
assert(!isTableInCatalogCache(table))
}
}
}
}
test("auto gather stats after insert command") {
val table = "change_stats_insert_datasource_table"
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTable(table) {
sql(s"CREATE TABLE $table (i int, j string) USING PARQUET")
// insert into command
sql(s"INSERT INTO TABLE $table SELECT 1, 'abc'")
val stats = getCatalogTable(table).stats
if (autoUpdate) {
assert(stats.isDefined)
assert(stats.get.sizeInBytes >= 0)
} else {
assert(stats.isEmpty)
}
}
}
}
}
test("invalidation of tableRelationCache after inserts") {
val table = "invalidate_catalog_cache_table"
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTable(table) {
spark.range(100).write.saveAsTable(table)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS")
spark.table(table)
val initialSizeInBytes = getTableFromCatalogCache(table).stats.sizeInBytes
spark.range(100).write.mode(SaveMode.Append).saveAsTable(table)
spark.table(table)
assert(getTableFromCatalogCache(table).stats.sizeInBytes == 2 * initialSizeInBytes)
}
}
}
}
test("invalidation of tableRelationCache after alter table add partition") {
val table = "invalidate_catalog_cache_table"
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTempDir { dir =>
withTable(table) {
val path = dir.getCanonicalPath
sql(s"""
|CREATE TABLE $table (col1 int, col2 int)
|USING PARQUET
|PARTITIONED BY (col2)
|LOCATION '${dir.toURI}'""".stripMargin)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS")
spark.table(table)
assert(getTableFromCatalogCache(table).stats.sizeInBytes == 0)
spark.catalog.recoverPartitions(table)
val df = Seq((1, 2), (1, 2)).toDF("col2", "col1")
df.write.parquet(s"$path/col2=1")
sql(s"ALTER TABLE $table ADD PARTITION (col2=1) LOCATION '${dir.toURI}'")
spark.table(table)
val cachedTable = getTableFromCatalogCache(table)
val cachedTableSizeInBytes = cachedTable.stats.sizeInBytes
val defaultSizeInBytes = conf.defaultSizeInBytes
if (autoUpdate) {
assert(cachedTableSizeInBytes != defaultSizeInBytes && cachedTableSizeInBytes > 0)
} else {
assert(cachedTableSizeInBytes == defaultSizeInBytes)
}
}
}
}
}
}
test("Simple queries must be working, if CBO is turned on") {
withSQLConf(SQLConf.CBO_ENABLED.key -> "true") {
withTable("TBL1", "TBL") {
import org.apache.spark.sql.functions._
val df = spark.range(1000L).select('id,
'id * 2 as "FLD1",
'id * 12 as "FLD2",
lit("aaa") + 'id as "fld3")
df.write
.mode(SaveMode.Overwrite)
.bucketBy(10, "id", "FLD1", "FLD2")
.sortBy("id", "FLD1", "FLD2")
.saveAsTable("TBL")
sql("ANALYZE TABLE TBL COMPUTE STATISTICS ")
sql("ANALYZE TABLE TBL COMPUTE STATISTICS FOR COLUMNS ID, FLD1, FLD2, FLD3")
val df2 = spark.sql(
"""
|SELECT t1.id, t1.fld1, t1.fld2, t1.fld3
|FROM tbl t1
|JOIN tbl t2 on t1.id=t2.id
|WHERE t1.fld3 IN (-123.23,321.23)
""".stripMargin)
df2.createTempView("TBL2")
sql("SELECT * FROM tbl2 WHERE fld3 IN ('qqq', 'qwe') ").queryExecution.executedPlan
}
}
}
test("store and retrieve column stats in different time zones") {
val (start, end) = (0, TimeUnit.DAYS.toSeconds(2))
def checkTimestampStats(
t: DataType,
srcTimeZone: TimeZone,
dstTimeZone: TimeZone)(checker: ColumnStat => Unit): Unit = {
val table = "time_table"
val column = "T"
val original = TimeZone.getDefault
try {
withTable(table) {
TimeZone.setDefault(srcTimeZone)
spark.range(start, end)
.select(timestamp_seconds($"id").cast(t).as(column))
.write.saveAsTable(table)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS FOR COLUMNS $column")
TimeZone.setDefault(dstTimeZone)
val stats = getCatalogTable(table)
.stats.get.colStats(column).toPlanStat(column, t)
checker(stats)
}
} finally {
TimeZone.setDefault(original)
}
}
DateTimeTestUtils.outstandingZoneIds.foreach { zid =>
val timeZone = TimeZone.getTimeZone(zid)
checkTimestampStats(DateType, TimeZoneUTC, timeZone) { stats =>
assert(stats.min.get.asInstanceOf[Int] == TimeUnit.SECONDS.toDays(start))
assert(stats.max.get.asInstanceOf[Int] == TimeUnit.SECONDS.toDays(end - 1))
}
checkTimestampStats(TimestampType, TimeZoneUTC, timeZone) { stats =>
assert(stats.min.get.asInstanceOf[Long] == TimeUnit.SECONDS.toMicros(start))
assert(stats.max.get.asInstanceOf[Long] == TimeUnit.SECONDS.toMicros(end - 1))
}
}
}
def getStatAttrNames(tableName: String): Set[String] = {
val queryStats = spark.table(tableName).queryExecution.optimizedPlan.stats.attributeStats
queryStats.map(_._1.name).toSet
}
test("analyzes column statistics in cached query") {
withTempView("cachedQuery") {
sql(
"""CACHE TABLE cachedQuery AS
| SELECT c0, avg(c1) AS v1, avg(c2) AS v2
| FROM (SELECT id % 3 AS c0, id % 5 AS c1, 2 AS c2 FROM range(1, 30))
| GROUP BY c0
""".stripMargin)
// Analyzes one column in the cached logical plan
sql("ANALYZE TABLE cachedQuery COMPUTE STATISTICS FOR COLUMNS v1")
assert(getStatAttrNames("cachedQuery") === Set("v1"))
// Analyzes two more columns
sql("ANALYZE TABLE cachedQuery COMPUTE STATISTICS FOR COLUMNS c0, v2")
assert(getStatAttrNames("cachedQuery") === Set("c0", "v1", "v2"))
}
}
test("analyzes column statistics in cached local temporary view") {
withTempView("tempView") {
// Analyzes in a temporary view
sql("CREATE TEMPORARY VIEW tempView AS SELECT 1 id")
val errMsg = intercept[AnalysisException] {
sql("ANALYZE TABLE tempView COMPUTE STATISTICS FOR COLUMNS id")
}.getMessage
assert(errMsg.contains("Temporary view `tempView` is not cached for analyzing columns"))
// Cache the view then analyze it
sql("CACHE TABLE tempView")
assert(getStatAttrNames("tempView") !== Set("id"))
sql("ANALYZE TABLE tempView COMPUTE STATISTICS FOR COLUMNS id")
assert(getStatAttrNames("tempView") === Set("id"))
}
}
test("analyzes column statistics in cached global temporary view") {
withGlobalTempView("gTempView") {
val globalTempDB = spark.sharedState.globalTempViewManager.database
val errMsg1 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $globalTempDB.gTempView COMPUTE STATISTICS FOR COLUMNS id")
}.getMessage
assert(errMsg1.contains("Table or view not found: " +
s"$globalTempDB.gTempView"))
// Analyzes in a global temporary view
sql("CREATE GLOBAL TEMP VIEW gTempView AS SELECT 1 id")
val errMsg2 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $globalTempDB.gTempView COMPUTE STATISTICS FOR COLUMNS id")
}.getMessage
assert(errMsg2.contains(
s"Temporary view `$globalTempDB`.`gTempView` is not cached for analyzing columns"))
// Cache the view then analyze it
sql(s"CACHE TABLE $globalTempDB.gTempView")
assert(getStatAttrNames(s"$globalTempDB.gTempView") !== Set("id"))
sql(s"ANALYZE TABLE $globalTempDB.gTempView COMPUTE STATISTICS FOR COLUMNS id")
assert(getStatAttrNames(s"$globalTempDB.gTempView") === Set("id"))
}
}
test("analyzes column statistics in cached catalog view") {
withTempDatabase { database =>
sql(s"CREATE VIEW $database.v AS SELECT 1 c")
sql(s"CACHE TABLE $database.v")
assert(getStatAttrNames(s"$database.v") !== Set("c"))
sql(s"ANALYZE TABLE $database.v COMPUTE STATISTICS FOR COLUMNS c")
assert(getStatAttrNames(s"$database.v") === Set("c"))
}
}
test("analyzes table statistics in cached catalog view") {
def getTableStats(tableName: String): Statistics = {
spark.table(tableName).queryExecution.optimizedPlan.stats
}
withTempDatabase { database =>
sql(s"CREATE VIEW $database.v AS SELECT 1 c")
// Cache data eagerly by default, so this operation collects table stats
sql(s"CACHE TABLE $database.v")
val stats1 = getTableStats(s"$database.v")
assert(stats1.sizeInBytes > 0)
assert(stats1.rowCount === Some(1))
sql(s"UNCACHE TABLE $database.v")
// Cache data lazily, then analyze table stats
sql(s"CACHE LAZY TABLE $database.v")
val stats2 = getTableStats(s"$database.v")
assert(stats2.sizeInBytes === OneRowRelation().computeStats().sizeInBytes)
assert(stats2.rowCount === None)
sql(s"ANALYZE TABLE $database.v COMPUTE STATISTICS NOSCAN")
val stats3 = getTableStats(s"$database.v")
assert(stats3.sizeInBytes === OneRowRelation().computeStats().sizeInBytes)
assert(stats3.rowCount === None)
sql(s"ANALYZE TABLE $database.v COMPUTE STATISTICS")
val stats4 = getTableStats(s"$database.v")
assert(stats4.sizeInBytes === stats1.sizeInBytes)
assert(stats4.rowCount === Some(1))
}
}
test(s"CTAS should update statistics if ${SQLConf.AUTO_SIZE_UPDATE_ENABLED.key} is enabled") {
val tableName = "spark_27694"
Seq(false, true).foreach { updateEnabled =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> updateEnabled.toString) {
withTable(tableName) {
// Create a data source table using the result of a query.
sql(s"CREATE TABLE $tableName USING parquet AS SELECT 'a', 'b'")
val catalogTable = getCatalogTable(tableName)
if (updateEnabled) {
assert(catalogTable.stats.nonEmpty)
} else {
assert(catalogTable.stats.isEmpty)
}
}
}
}
}
test("Metadata files and temporary files should not be counted as data files") {
withTempDir { tempDir =>
val tableName = "t1"
val stagingDirName = ".test-staging-dir"
val tableLocation = s"${tempDir.toURI}/$tableName"
withSQLConf(
SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> "true",
"hive.exec.stagingdir" -> stagingDirName) {
withTable("t1") {
sql(s"CREATE TABLE $tableName(c1 BIGINT) USING PARQUET LOCATION '$tableLocation'")
sql(s"INSERT INTO TABLE $tableName VALUES(1)")
val staging = new File(new URI(s"$tableLocation/$stagingDirName"))
Utils.tryWithResource(new PrintWriter(staging)) { stagingWriter =>
stagingWriter.write("12")
}
val metadata = new File(new URI(s"$tableLocation/_metadata"))
Utils.tryWithResource(new PrintWriter(metadata)) { metadataWriter =>
metadataWriter.write("1234")
}
sql(s"INSERT INTO TABLE $tableName VALUES(1)")
val stagingFileSize = staging.length()
val metadataFileSize = metadata.length()
val tableLocationSize = getDataSize(new File(new URI(tableLocation)))
val stats = checkTableStats(tableName, hasSizeInBytes = true, expectedRowCounts = None)
assert(stats.get.sizeInBytes === tableLocationSize - stagingFileSize - metadataFileSize)
}
}
}
}
Seq(true, false).foreach { caseSensitive =>
test(s"SPARK-30903: Fail fast on duplicate columns when analyze columns " +
s"- caseSensitive=$caseSensitive") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
val table = "test_table"
withTable(table) {
sql(s"CREATE TABLE $table (value string, name string) USING PARQUET")
val dupCol = if (caseSensitive) "value" else "VaLuE"
val errorMsg = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS FOR COLUMNS value, name, $dupCol")
}.getMessage
assert(errorMsg.contains("Found duplicate column(s)"))
}
}
}
}
test("SPARK-34119: Keep necessary stats after PruneFileSourcePartitions") {
withTable("SPARK_34119") {
withSQLConf(SQLConf.CBO_ENABLED.key -> "true") {
sql(s"CREATE TABLE SPARK_34119 using parquet PARTITIONED BY (p) AS " +
"(SELECT id, CAST(id % 5 AS STRING) AS p FROM range(10))")
sql(s"ANALYZE TABLE SPARK_34119 COMPUTE STATISTICS FOR ALL COLUMNS")
checkOptimizedPlanStats(sql(s"SELECT id FROM SPARK_34119"),
160L,
Some(10),
Seq(ColumnStat(
distinctCount = Some(10),
min = Some(0),
max = Some(9),
nullCount = Some(0),
avgLen = Some(LongType.defaultSize),
maxLen = Some(LongType.defaultSize))))
checkOptimizedPlanStats(sql("SELECT id FROM SPARK_34119 WHERE p = '2'"),
32L,
Some(2),
Seq(ColumnStat(
distinctCount = Some(2),
min = Some(0),
max = Some(9),
nullCount = Some(0),
avgLen = Some(LongType.defaultSize),
maxLen = Some(LongType.defaultSize))))
}
}
}
test("SPARK-33687: analyze all tables in a specific database") {
withTempDatabase { database =>
spark.catalog.setCurrentDatabase(database)
withTempDir { dir =>
withTable("t1", "t2") {
spark.range(10).write.saveAsTable("t1")
sql(s"CREATE EXTERNAL TABLE t2 USING parquet LOCATION '${dir.toURI}' " +
"AS SELECT * FROM range(20)")
withView("v1", "v2") {
sql("CREATE VIEW v1 AS SELECT 1 c1")
sql("CREATE VIEW v2 AS SELECT 2 c2")
sql("CACHE TABLE v1")
sql("CACHE LAZY TABLE v2")
sql(s"ANALYZE TABLES IN $database COMPUTE STATISTICS NOSCAN")
checkTableStats("t1", hasSizeInBytes = true, expectedRowCounts = None)
checkTableStats("t2", hasSizeInBytes = true, expectedRowCounts = None)
assert(getCatalogTable("v1").stats.isEmpty)
checkOptimizedPlanStats(spark.table("v1"), 4, Some(1), Seq.empty)
checkOptimizedPlanStats(spark.table("v2"), 1, None, Seq.empty)
sql("ANALYZE TABLES COMPUTE STATISTICS")
checkTableStats("t1", hasSizeInBytes = true, expectedRowCounts = Some(10))
checkTableStats("t2", hasSizeInBytes = true, expectedRowCounts = Some(20))
checkOptimizedPlanStats(spark.table("v1"), 4, Some(1), Seq.empty)
checkOptimizedPlanStats(spark.table("v2"), 4, Some(1), Seq.empty)
}
}
}
}
val errMsg = intercept[AnalysisException] {
sql(s"ANALYZE TABLES IN db_not_exists COMPUTE STATISTICS")
}.getMessage
assert(errMsg.contains("Database 'db_not_exists' not found"))
}
}
|
shaneknapp/spark
|
sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala
|
Scala
|
apache-2.0
| 29,219
|
package mesosphere.marathon.state
import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream }
import javax.inject.Inject
import mesosphere.marathon.Protos.StorageVersion
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state.StorageVersions._
import mesosphere.marathon.tasks.TaskTracker
import mesosphere.marathon.tasks.TaskTracker.{ InternalApp, App }
import mesosphere.marathon.{ BuildInfo, MarathonConf, StorageException }
import mesosphere.util.BackToTheFuture.futureToFutureOption
import mesosphere.util.ThreadPoolContext.context
import mesosphere.util.{ BackToTheFuture, Logging }
import org.apache.mesos.state.{ State, Variable }
import scala.collection.JavaConverters._
import scala.concurrent.duration.Duration
import scala.concurrent.{ Await, Future }
import scala.util.{ Failure, Success, Try }
class Migration @Inject() (
state: State,
appRepo: AppRepository,
groupRepo: GroupRepository,
config: MarathonConf,
implicit val timeout: BackToTheFuture.Timeout = BackToTheFuture.Implicits.defaultTimeout) extends Logging {
type MigrationAction = (StorageVersion, () => Future[Any])
/**
* All the migrations, that have to be applied.
* They get applied after the master has been elected.
*/
def migrations: List[MigrationAction] = List(
StorageVersions(0, 5, 0) -> { () => changeApps(app => app.copy(id = app.id.toString.toLowerCase.replaceAll("_", "-").toRootPath)) },
StorageVersions(0, 7, 0) -> { () =>
{
changeTasks(app => new InternalApp(app.appName.canonicalPath(), app.tasks, app.shutdown))
changeApps(app => app.copy(id = app.id.canonicalPath()))
putAppsIntoGroup()
}
}
)
def applyMigrationSteps(from: StorageVersion): Future[List[StorageVersion]] = {
val result = migrations.filter(_._1 > from).sortBy(_._1).map {
case (migrateVersion, change) =>
log.info(s"Migration for storage: ${from.str} to current: ${current.str}: apply change for version: ${migrateVersion.str} ")
change.apply().map(_ => migrateVersion)
}
Future.sequence(result)
}
def migrate(): StorageVersion = {
val result = for {
changes <- currentStorageVersion.flatMap(applyMigrationSteps)
storedVersion <- storeCurrentVersion
} yield storedVersion
result.onComplete {
case Success(version) => log.info(s"Migration successfully applied for version ${version.str}")
case Failure(ex) => log.error(s"Migration failed! $ex")
}
Await.result(result, Duration.Inf)
}
private val storageVersionName = "internal:storage:version"
def currentStorageVersion: Future[StorageVersion] = {
state.fetch(storageVersionName).map {
case Some(variable) => Try(StorageVersion.parseFrom(variable.value())).getOrElse(StorageVersions.empty)
case None => throw new StorageException("Failed to read storage version")
}
}
def storeCurrentVersion: Future[StorageVersion] = {
state.fetch(storageVersionName) flatMap {
case Some(variable) =>
state.store(variable.mutate(StorageVersions.current.toByteArray)) map {
case Some(newVar) => StorageVersion.parseFrom(newVar.value)
case None => throw new StorageException(s"Failed to store storage version")
}
case None => throw new StorageException("Failed to read storage version")
}
}
// specific migration helper methods
private def changeApps(fn: AppDefinition => AppDefinition): Future[Any] = {
appRepo.apps().flatMap { apps =>
val mappedApps = apps.map { app => appRepo.store(fn(app)) }
Future.sequence(mappedApps)
}
}
private def changeTasks(fn: InternalApp => InternalApp): Future[Any] = {
val taskTracker = new TaskTracker(state, config)
def fetchApp(appId: PathId): Option[InternalApp] = {
val bytes = state.fetch("tasks:" + appId.safePath).get().value
if (bytes.length > 0) {
val source = new ObjectInputStream(new ByteArrayInputStream(bytes))
val fetchedTasks = taskTracker.legacyDeserialize(appId, source).map {
case (key, task) =>
val builder = task.toBuilder.clearOBSOLETEStatuses()
task.getOBSOLETEStatusesList.asScala.lastOption.foreach(builder.setStatus)
key -> builder.build()
}
Some(new InternalApp(appId, fetchedTasks, false))
}
else None
}
def store(app: InternalApp): Future[Seq[Variable]] = {
val oldVar = state.fetch("tasks:" + app.appName.safePath).get()
val bytes = new ByteArrayOutputStream()
val output = new ObjectOutputStream(bytes)
Future.sequence(app.tasks.values.toSeq.map(taskTracker.store(app.appName, _)))
}
appRepo.allPathIds().flatMap { apps =>
val res = apps.flatMap(fetchApp).map{ app => store(fn(app)) }
Future.sequence(res)
}
}
private def putAppsIntoGroup(): Future[Any] = {
groupRepo.group("root").map(_.getOrElse(Group.empty)).map { group =>
appRepo.apps().flatMap { apps =>
val updatedGroup = apps.foldLeft(group) { (group, app) =>
val updatedApp = app.copy(id = app.id.canonicalPath())
group.updateApp(updatedApp.id, _ => updatedApp, Timestamp.now())
}
groupRepo.store("root", updatedGroup)
}
}
}
}
object StorageVersions {
val VersionRegex = """^(\d+)\.(\d+)\.(\d+).*""".r
def apply(major: Int, minor: Int, patch: Int): StorageVersion = {
StorageVersion
.newBuilder()
.setMajor(major)
.setMinor(minor)
.setPatch(patch)
.build()
}
def current: StorageVersion = {
BuildInfo.version match {
case VersionRegex(major, minor, patch) =>
StorageVersions(
major.toInt,
minor.toInt,
patch.toInt
)
}
}
implicit class OrderedStorageVersion(val version: StorageVersion) extends AnyVal with Ordered[StorageVersion] {
override def compare(that: StorageVersion): Int = {
def by(left: Int, right: Int, fn: => Int): Int = if (left.compareTo(right) != 0) left.compareTo(right) else fn
by(version.getMajor, that.getMajor, by(version.getMinor, that.getMinor, by(version.getPatch, that.getPatch, 0)))
}
def str: String = s"Version(${version.getMajor}, ${version.getMinor}, ${version.getPatch})"
}
def empty: StorageVersion = StorageVersions(0, 0, 0)
}
|
tnachen/marathon
|
src/main/scala/mesosphere/marathon/state/Migration.scala
|
Scala
|
apache-2.0
| 6,438
|
package plantae.citrus.mqtt.actors.topic
import akka.actor._
import plantae.citrus.mqtt.actors.session.PublishMessage
import scodec.bits.ByteVector
import scala.collection.mutable.Map
import scala.util.Random
case class TopicSubscribe(session: ActorRef, qos: Short, reply: Boolean = true)
case class TopicSubscribed(topicName: String, result: Boolean, newbie: Boolean = false, session: ActorRef)
case class TopicUnsubscribe(session: ActorRef)
case class TopicUnsubscribed(topicName: String, result: Boolean)
case object TopicSubscriberClear
case object TopicGetSubscribers
case class TopicSubscribers(subscribers: List[(ActorRef, Short)])
//case class Publish(topic: String, payload: ByteVector, retain: Boolean, packetId: Option[Int]) extends TopicRequest
case class TopicStoreRetainMessage(payload: ByteVector)
case class TopicPublishRetainMessage(session: ActorRef)
object Topic {
def props(topicName: String) = {
Props(classOf[Topic], topicName)
}
}
class Topic(topicName: String) extends Actor with ActorLogging {
private val subscriberMap: collection.mutable.HashMap[ActorRef, Short] = collection.mutable.HashMap[ActorRef, Short]()
private val retainMessageSet: collection.mutable.Set[ByteVector] = collection.mutable.Set[ByteVector]()
def receive = {
case TopicSubscribe(session, qos, reply) =>
log.debug("[NEWTOPIC]TopicSubscribe topic({}) client({}) qos({})", topicName, session.path.name, qos)
if (!subscriberMap.contains(session)) {
subscriberMap.+=((session, qos))
if (reply)
sender ! TopicSubscribed(topicName, true, true, session)
}
else {
if (subscriberMap.get(session).get < qos) {
subscriberMap.-=(session)
subscriberMap.+=((session, qos))
}
if (reply)
sender ! TopicSubscribed(topicName, true, session = session)
}
case TopicUnsubscribe(session) =>
log.debug("[NEWTOPIC]TopicUnsubscribe topic({}) client({})", topicName, session.path.name)
subscriberMap.-=(session)
// sender ! TopicUnsubscribed(topicName, true)
case TopicSubscriberClear =>
log.debug("[NEWTOPIC]TopicSubscriberClear topic({})", topicName)
subscriberMap.clear
case TopicGetSubscribers =>
sender ! TopicSubscribers(subscriberMap.toList)
case message: TopicStoreRetainMessage => {
log.debug("RetainMessage Topic({}) Store retain message {}", topicName, message)
retainMessageSet.clear()
if (message.payload.size != 0)
retainMessageSet.add(message.payload)
}
case TopicPublishRetainMessage(session) => {
log.debug("PublishRetain retainMessage({}) topicName({}) session({})", retainMessageSet, topicName, session)
if (retainMessageSet.size > 0)
session ! PublishMessage(topicName, 2, retainMessageSet.head)
}
}
}
trait NTopic {
val children: Map[String, TopicNode2] = Map[String, TopicNode2]()
def pathToList(path: String): List[String] = {
path.split("/").toList
}
}
case class TopicNode2(name: String, elem: ActorRef, context: ActorRefFactory, root: Boolean = false) extends NTopic {
def getTopicNode(path: String): ActorRef = {
getTopicNode(pathToList(path))
}
def getTopicNode(paths: List[String]): ActorRef = {
paths match {
case Nil => elem
case x :: Nil => {
val node: TopicNode2 = children.get(x) match {
case Some(node) => node
case None => {
val newNodeName = if (root) x else name + "/" + x
val newTopicActor = context.actorOf(Topic.props(newNodeName), Random.alphanumeric.take(128).mkString)
val node = TopicNode2(name = newNodeName, elem = newTopicActor, context = context)
children.+=((x, node))
node
}
}
node.elem
}
case x :: others => {
val node: TopicNode2 = children.get(x) match {
case Some(node) => node
case None => {
val newNodeName = if (root) x else name + "/" + x
val newTopicActor = context.actorOf(Topic.props(newNodeName), Random.alphanumeric.take(128).mkString)
val node = TopicNode2(name = newNodeName, elem = newTopicActor, context = context)
children.+=((x, node))
node
}
}
node.getTopicNode(others)
}
}
}
def matchedTopicNodes(path: String): List[ActorRef] = {
matchedTopicNodes(pathToList(path))
}
def matchedTopicNodes(paths: List[String]): List[ActorRef] = {
paths match {
case Nil => List(elem)
case "+" :: Nil => {
children.map(x => x._2.elem).toList
}
case "#" :: Nil => {
getEveryElements()
}
case x :: Nil => {
children.get(x) match {
case Some(nodes) => nodes.elem :: Nil
case None => List()
}
}
case "+" :: others => {
children.map(x => x._2.matchedTopicNodes(others)).flatten.toList
}
case "#" :: others => {
getEveryElements()
}
case x :: others => {
children.get(x) match {
case Some(node) => node.matchedTopicNodes(others)
case None => List()
}
}
}
}
def getEveryElements() : List[ActorRef] = {
val childrenNodes = children.map( x => {
x._2.getEveryElements()
}).flatten.toList
if (!root) elem :: childrenNodes else childrenNodes
}
def matchedTopicNodesWithOutWildCard(path: String): List[ActorRef] = {
// you must not use wildcard in here
if (path.contains("+") || path.contains("#")) List()
else matchedTopicNodesWithOutWildCard(pathToList(path))
}
def matchedTopicNodesWithOutWildCard(paths: List[String]): List[ActorRef] = {
paths match {
case Nil => List(elem)
case x :: Nil => {
val l1 = children.get(x) match {
case Some(node) => node.elem :: Nil
case None => Nil
}
val l2 = children.get("+") match {
case Some(node) => node.elem :: Nil
case None => Nil
}
val l3 = children.get("#") match {
case Some(node) => node.elem :: Nil
case None => Nil
}
l1 ::: l2 ::: l3
}
case x :: others => {
val l1 = children.get(x) match {
case Some(node) => node.matchedTopicNodesWithOutWildCard(others)
case None => Nil
}
val l2 = children.get("+") match {
case Some(node) => node.matchedTopicNodesWithOutWildCard(others)
case None => Nil
}
val l3 = children.get("#") match {
case Some(node) => node.elem :: Nil
case None => Nil
}
l1 ::: l2 ::: l3
}
}
}
}
object Test extends App {
val root = TopicNode2("", null, ActorSystem(), true)
root.getTopicNode("a/1")
root.getTopicNode("a/2")
root.getTopicNode("a/3")
println(root.matchedTopicNodes("+/+"))
}
|
sureddy/mqttd
|
src/main/scala-2.11/plantae/citrus/mqtt/actors/topic/Ntopic.scala
|
Scala
|
mit
| 6,969
|
package im.actor.server.api.rpc.service.webhooks
import akka.actor.ActorSystem
import akka.http.scaladsl.util.FastFuture
import akka.util.Timeout
import im.actor.api.rpc.PeerHelpers._
import im.actor.api.rpc._
import im.actor.api.rpc.integrations.{ IntegrationsService, ResponseIntegrationToken }
import im.actor.api.rpc.peers.{ ApiOutPeer, ApiPeerType }
import im.actor.server.api.rpc.service.webhooks.IntegrationServiceHelpers._
import im.actor.server.db.DbExtension
import im.actor.server.group.GroupErrors.{ NoPermission, NotAMember, NotAdmin }
import im.actor.server.group.GroupExtension
import slick.driver.PostgresDriver.api._
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContext, Future }
class IntegrationsServiceImpl(baseUri: String)(implicit actorSystem: ActorSystem) extends IntegrationsService with PeersImplicits {
override implicit val ec: ExecutionContext = actorSystem.dispatcher
private implicit val timeout = Timeout(10.seconds)
private val db: Database = DbExtension(actorSystem).db
private val groupExt = GroupExtension(actorSystem)
private val PeerIsNotGroup = RpcError(403, "PEER_IS_NOT_GROUP", "", false, None)
override def doHandleGetIntegrationToken(peer: ApiOutPeer, clientData: ClientData): Future[HandlerResult[ResponseIntegrationToken]] =
authorized(clientData) { implicit client ⇒
if (peer.`type` != ApiPeerType.Group) {
FastFuture.successful(Error(PeerIsNotGroup))
} else {
withOutPeer(peer) {
for {
optToken ← groupExt.getIntegrationToken(peer.id, client.userId)
(token, url) = optToken map (t ⇒ t → makeUrl(baseUri, t)) getOrElse ("" → "")
} yield {
Ok(ResponseIntegrationToken(token, url))
}
}
}
}
override def doHandleRevokeIntegrationToken(peer: ApiOutPeer, clientData: ClientData): Future[HandlerResult[ResponseIntegrationToken]] =
authorized(clientData) { implicit client ⇒
if (peer.`type` != ApiPeerType.Group) {
FastFuture.successful(Error(PeerIsNotGroup))
} else {
withOutPeer(peer) {
for {
token ← groupExt.revokeIntegrationToken(peer.id, client.userId)
} yield Ok(ResponseIntegrationToken(token, makeUrl(baseUri, token)))
}
}
}
override def onFailure: PartialFunction[Throwable, RpcError] = {
case NotAdmin ⇒ CommonRpcErrors.forbidden("Only admin can perform this action.")
case NotAMember ⇒ CommonRpcErrors.forbidden("You are not a group member.")
case NoPermission ⇒ CommonRpcErrors.forbidden("You have no permission to execute this action")
}
}
|
EaglesoftZJ/actor-platform
|
actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/webhooks/IntegrationsServiceImpl.scala
|
Scala
|
agpl-3.0
| 2,680
|
// scalac: -opt:l:inline -opt-inline-from:**
class C {
def t(a: A): AnyRef = {
// a.a is inlined, resulting in a.b, which has return type B
var foo: AnyRef = if (hashCode == 0) a.a else this
if (foo == null)
foo = this
// at the merge point, the stack map frame calculation needs the LUB of (B, C),
// so the ClassBType for C needs to be cached
foo
}
}
object Test {
def main(args: Array[String]): Unit = {
new C
}
}
|
lrytz/scala
|
test/files/run/inline-stack-map-frames/Test_2.scala
|
Scala
|
apache-2.0
| 459
|
package devnull.storage
import java.util.UUID
import doobie.postgres.pgtypes._
import doobie.hi
import doobie.imports._
class PaperFeedbackRepository {
val uuidType = UuidType
object Queries {
def insert(fb: PaperFeedback): Update0 = {
sql"""
INSERT INTO paper_feedback (
created,
event_id,
session_id,
green,
yellow,
red,
participants
) VALUES (
current_timestamp,
${fb.eventId},
${fb.sessionId},
${fb.ratings.green},
${fb.ratings.yellow},
${fb.ratings.red},
${fb.participants}
)
""".update
}
def selectFeedback(sessionId: UUID): Query0[PaperFeedback] = {
sql"""
SELECT
id,
created,
event_id,
session_id,
green,
yellow,
red,
participants
FROM paper_feedback
WHERE session_id = $sessionId
""".query[PaperFeedback]
}
def selectAvgFeedbackForEvent(
eventId: UUID
): Query0[(PaperRatingResult, Option[Double])] = {
sql"""
SELECT
avg(green) :: FLOAT,
avg(yellow) :: FLOAT,
avg(red) :: FLOAT,
avg(participants) :: FLOAT
FROM paper_feedback
WHERE event_id = $eventId
""".query[(PaperRatingResult, Option[Double])]
}
}
def insertPaperFeedback(fb: PaperFeedback): hi.ConnectionIO[FeedbackId] = {
Queries.insert(fb).withUniqueGeneratedKeys[FeedbackId]("id")
}
def selectFeedbackForSession(
sessionId: UUID
): hi.ConnectionIO[Option[PaperFeedback]] = {
Queries.selectFeedback(sessionId).option
}
def selectAvgFeedbackForEvent(
eventId: UUID
): hi.ConnectionIO[Option[(PaperRatingResult, Option[Double])]] = {
Queries.selectAvgFeedbackForEvent(eventId).option
}
}
|
javaBin/devnull
|
src/main/scala/devnull/storage/PaperFeedbackRepository.scala
|
Scala
|
apache-2.0
| 1,987
|
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.stortill
import akka.NotUsed
import akka.stream.impl.fusing.MapAsyncUnordered
import akka.stream.{ClosedShape, Materializer, SourceShape}
import akka.stream.scaladsl.{Flow, GraphDSL, Keep, RunnableGraph, Sink, Source}
import cmwell.common.formats.{JsonSerializer, JsonSerializerForES}
import cmwell.domain.{FileContent, FileInfoton, Infoton, autoFixDcAndIndexTime, _}
import cmwell.driver.Dao
import cmwell.formats.JsonFormatter
import cmwell.fts._
import cmwell.irw.{IRWService, IRWServiceNativeImpl2}
import cmwell.stortill.Strotill._
import cmwell.common.formats.JsonSerializer
import com.typesafe.scalalogging.{LazyLogging, Logger}
import org.elasticsearch.action.bulk.BulkResponse
import org.elasticsearch.client.Requests
import org.elasticsearch.index.VersionType
import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Future, Promise}
import cmwell.syntaxutils._
import cmwell.util.BoxedFailure
import cmwell.util.stream.{MapInitAndLast, SortedStreamsMergeBy}
import org.elasticsearch.action.ActionRequest
import org.joda.time.DateTime
import scala.concurrent.duration._
import scala.util.Try
/**
* Created by markz on 3/4/15.
*/
abstract class Operations(irw: IRWService, ftsService: FTSServiceOps) {
def verify(path: String, limit: Int): Future[Boolean]
def fix(path: String, retries: Int, limit: Int): Future[(Boolean, String)]
def rFix(path: String, retries: Int, parallelism: Int = 1): Future[Source[(Boolean, String), NotUsed]]
def info(path: String, limit: Int): Future[(CasInfo, EsExtendedInfo, ZStoreInfo)]
def fixDc(path: String, dc: String, retries: Int = 1, indexTimeOpt: Option[Long] = None): Future[Boolean]
def shutdown: Unit
}
object ProxyOperations {
lazy val specialInconsistenciesLogger: Logger = Logger(LoggerFactory.getLogger("cmwell.xfix"))
lazy val jsonFormatter = new JsonFormatter(identity)
def apply(irw: IRWService, ftsService: FTSServiceOps): ProxyOperations = {
new ProxyOperations(irw, ftsService)
}
//used when working with the REPL
def apply(clusterName: String, hostname: String): ProxyOperations = {
System.setProperty("ftsService.transportAddress", hostname)
System.setProperty("ftsService.clusterName", clusterName)
val dao = Dao("operation", "data", hostname, 10)
val irw = IRWService(dao)
val fts = FTSServiceES.getOne("ftsService.yml")
new ProxyOperations(irw, fts)
}
}
class ProxyOperations private (irw: IRWService, ftsService: FTSServiceOps)
extends Operations(irw, ftsService)
with LazyLogging {
val isNewBg = {
val nFts = ftsService.isInstanceOf[FTSServiceNew]
val nIrw = irw.isInstanceOf[IRWServiceNativeImpl2]
if ((nFts && !nIrw) || (!nFts && nIrw))
throw new IllegalStateException(
s"ProxyOperations must have IRW & FTS conforoming in terms of `nbg`, got: ($nFts,$nIrw)"
)
else nFts && nIrw
}
val s = Strotill(irw, ftsService)
import ProxyOperations.{specialInconsistenciesLogger => log}
val maxRetries: Int = 3 //TODO Take from some config
val retryWait: FiniteDuration = 613.millis //TODO Take from some config
def retry[T](task: => Future[T]) = cmwell.util.concurrent.retry[T](maxRetries, retryWait)(task)(global)
override def verify(path: String, limit: Int) = {
val casInfoFut = s.extractHistoryCas(path, limit)
s.extractHistoryEs(path, limit).flatMap { v =>
if (v.groupBy(_._1).exists(_._2.size != 1)) Future.successful(false)
else {
val esMapFut = cmwell.util.concurrent.travemp(v) {
case (uuid, index) => {
import cmwell.fts.EsSourceExtractor.Implicits.esMapSourceExtractor
ftsService.extractSource(uuid, index).map {
case (source, version) => {
val system = source.get("system").asInstanceOf[java.util.HashMap[String, Object]]
val current = system.get("current").asInstanceOf[Boolean]
uuid -> (index, current)
}
}
}
}
for {
casInfo <- casInfoFut
esInfo <- esMapFut
} yield
esInfo.size == casInfo.size &&
esInfo.count(_._2._2) < 2 && //must be either 0 (latest is deleted) or 1. cannot have more than 1 current
casInfo.forall { case (uuid, _) => esInfo.contains(uuid) }
}
}
}
type Timestamp = Long
type Uuid = String
type EsIndex = String
// NOTE: in the old path, we must pull everything (without data!), sort in mem,
// and only then fix it in small chunks.
// in new data path, the stream from cassandra is truely reactive, and naturaly sorted.
// so when we have the new data path inplace, if we decide we still need "x-fix",
// we should also pull ES data reactively in a sorted fashion.
override def rFix(path: String, retries: Int, parallelism: Int): Future[Source[(Boolean, String), NotUsed]] = {
type MergedByTimestamp = (Timestamp, Vector[(Timestamp, Uuid)], Vector[(Timestamp, Uuid, EsIndex)])
val cassandraPathsSource: Source[(Timestamp, Uuid), NotUsed] = irw.historyReactive(path)
ftsService.rInfo(path, paginationParams = DefaultPaginationParams, withHistory = true).map { rEsInfo =>
//TODO: we can have a sorted stream from ES, a la consume style.
val elasticsearchPathInfo: Source[(Timestamp, Uuid, EsIndex), NotUsed] = {
rEsInfo
.fold(Vector.empty[(Timestamp, Uuid, EsIndex)])(_ ++ _)
.mapConcat(_.sortBy(_._1))
}
Source.fromGraph(GraphDSL.create() { implicit b =>
{
import GraphDSL.Implicits._
val caS = b.add(cassandraPathsSource)
val esS = b.add(elasticsearchPathInfo)
val smb =
b.add(new SortedStreamsMergeBy[(Timestamp, Uuid), (Timestamp, Uuid, EsIndex), Timestamp](_._1, _._1))
val ial = b.add(new MapInitAndLast[MergedByTimestamp, (MergedByTimestamp, Boolean)](_ -> false, _ -> true))
val fix = b.add(Flow[(MergedByTimestamp, Boolean)].mapAsyncUnordered(parallelism) {
case ((t, v1, v2), isLast) =>
fixWith(path,
retries,
Future.successful(v1),
Future.successful(v2.map(tt => tt._2 -> tt._3)),
isContainsCurrent = isLast).map {
case (bool, "") => bool -> s"${cmwell.util.string.dateStringify(new DateTime(t))} [$t]"
case (bool, m) => bool -> s"$m, ${cmwell.util.string.dateStringify(new DateTime(t))} [$t]"
}
})
caS ~> smb.in0
esS ~> smb.in1
smb.out ~> ial ~> fix
SourceShape(fix.out)
}
})
}
}
override def fix(path: String, retries: Int, limit: Int): Future[(Boolean, String)] = {
val cUuids: Future[Vector[(Timestamp, Uuid)]] = retry(irw.historyAsync(path, limit))
val rawEsUuids: Future[Vector[(Uuid, EsIndex)]] = retry(
ftsService.info(path, paginationParams = DefaultPaginationParams, withHistory = true)
)
fixWith(path, retries, cUuids, rawEsUuids, isContainsCurrent = true)
}
private def fixWith(path: String,
retries: Int,
cUuids: Future[Vector[(Timestamp, Uuid)]],
rawEsUuids: Future[Vector[(Uuid, EsIndex)]],
isContainsCurrent: Boolean = false) = {
val esUuids: Future[Vector[(Uuid, EsIndex)]] = rawEsUuids.flatMap {
case v if v.isEmpty => Future.successful(Vector.empty)
case esus => {
val esusmap = esus.groupBy(_._1)
cmwell.util.concurrent.travector(esusmap.toSeq) {
case (uuid, Vector(uuidInSingleIndex)) => Future.successful(uuidInSingleIndex)
case (uuid, vec) => {
val oldestIngest = vec.minBy(_._2)
ftsService.purgeByUuidsAndIndexes(vec.filterNot(oldestIngest.eq)).map { bulkRes =>
if (bulkRes.hasFailures) {
log.error(bulkRes.toString)
}
oldestIngest
}
}
}
}
}
cUuids.flatMap { usFromC =>
esUuids.flatMap { usFromES =>
val (onlyC, onlyES, both, all) = {
val (pb, oc) = usFromC.partition(u => usFromES.exists(_._1 == u))
val (b, onlyES) = usFromES.partition(u => pb.exists(_._2 == u))
val onlyC = oc.map { case (ts, u) => u -> ts }.toMap
val both = {
val gByUuid = b.groupBy(_._1)
gByUuid.map {
case (u, esIdxs) =>
u -> (pb.find(_._2 == u).get._1 -> esIdxs.map(_._2))
}
}
val all = usFromC.map(_._2).toSet ++ usFromES.map(_._1)
(onlyC, onlyES.groupBy(_._1).mapValues(_.map(_._2)), both, all)
}
val foundFut: Future[Set[Either[Uuid, Infoton]]] = Future.traverse(all) { uuid =>
// if o.isEmpty, o.get will throw,
// and the retry will kickoff again
retry(irw.readUUIDAsync(uuid, cmwell.irw.QUORUM).map(o => Option(o.get)))
.recoverWith {
case t: Throwable => {
log.error(s"could not retreive uuid [$uuid] with QUORUM", t)
irw.readUUIDAsync(uuid, cmwell.irw.ONE).map {
case BoxedFailure(e) =>
log.error(s"could not retreive uuid [$uuid] with ONE", e)
None
case box => box.toOption
}
}
}
.map(
infopt =>
infopt
.map {
case i if i.uuid != uuid => i.overrideUuid(uuid)
case i => i
}
.toEither(uuid)
)
}
foundFut.flatMap { findings =>
val found = findings.collect { case Right(i) => i }
val nFound = findings.collect { case Left(u) => u }
val purgeNotFound = {
val uuidFromEsButNotFound = usFromES.filter { case (u, _) => nFound(u) }
val uuidFromCasButNotFound = usFromC.filter { case (_, u) => nFound(u) }
val filteredOnlyES = onlyES.filter { case (u, _) => nFound(u) }
val filteredOnlyC = onlyC.filter { case (u, _) => nFound(u) }
val filteredBoth = both.filter { case (u, _) => nFound(u) }
purgeAndLog(path,
uuidFromEsButNotFound,
uuidFromCasButNotFound,
filteredBoth,
filteredOnlyES,
filteredOnlyC)
}
val fixFoundFut: Future[(Boolean, String)] = {
if (found.isEmpty) irw.purgePathOnly(path).map(_ => true -> "no UUIDs were found")
else {
//TODO 1: optimization: first find lastModified duplicates, and delete those, and only fix what is left
//TODO 2: if there is no indexTime (infoton was only written in cassandra, but not in ES),
//TODO: then if it's the latest (current) version, set indexTime = System.currentTimeMillis,
//TODO: and if it's history, set as lastModified, unless lastModified is 0, in this case, we should discuss what to do...
val foundAndFixedFut = Future.traverse(found)(fixAndUpdateInfotonInCas)
foundAndFixedFut.flatMap { foundAndFixed =>
//all infotons have valid indexTime since we already fixed it in `fixAndUpdateInfotonInCas`
lazy val cur = foundAndFixed.maxBy(_.indexTime.get)
Future
.traverse(foundAndFixed.groupBy(_.lastModified.getMillis)) {
case (_, is) if is.size == 1 => Future.successful(is.head)
case (_, is) => {
val maxiton = is.maxBy(_.indexTime.getOrElse(0L))
Future
.traverse(is.filterNot(_ == maxiton)) { i =>
log.debug(
s"purging an infoton only because lastModified collision: ${ProxyOperations.jsonFormatter.render(i)}"
)
// we are purging an infoton only because lastModified collision,
// but we should log the lost data (JsonFormatter which preserves the "last name" hash + quads data?)
val f1 = retry(ftsService.purgeByUuidsAndIndexes(onlyES(i.uuid).map(i.uuid -> _)))
.map[Infoton] { br =>
if (br.hasFailures) logEsBulkResponseFailures(br); i
}
.recover {
case e: Throwable =>
log.info(s"purge from es failed for uuid=${i.uuid} of path=${i.path}", e); i
}
val f2 = retry(
purgeFromCas(i.path, i.uuid, onlyC.getOrElse(i.uuid, i.lastModified.getMillis))
).map(_ => i).recover {
case e: Throwable =>
log.info(s"purge from cas failed for uuid=${i.uuid} of path=${i.path}", e); i
}
f1.flatMap(_ => f2)
}
.map(_ => maxiton)
}
}
.flatMap { noRepetitionsInLastModified =>
val writeToCasPathsFut =
Future.traverse(noRepetitionsInLastModified.filter(i => onlyES.contains(i.uuid))) {
onlyEsInfoton =>
val f = retry(irw.setPathHistory(onlyEsInfoton, cmwell.irw.QUORUM)).recover {
case e: Throwable =>
log.info(
s"setPathHistory failed for uuid=${onlyEsInfoton.uuid} of path=${onlyEsInfoton.path}",
e
)
onlyEsInfoton
}
if (isContainsCurrent && onlyEsInfoton == cur) {
f.flatMap(
i =>
retry(irw.setPathLast(i, cmwell.irw.QUORUM)).recover {
case e: Throwable =>
log.info(
s"setPathLast failed for uuid=${onlyEsInfoton.uuid} of path=${onlyEsInfoton.path}",
e
)
onlyEsInfoton
}
)
} else f
}
writeToCasPathsFut.flatMap { writeToCasPaths =>
// purge from ES all the found uuids in order to re-write 'em correctly
// this must be only after succeeding to write the uuid to cas.path,
// or else we might lose the uuid "handle", if something is wrong during the fix
val uFromEsThatAreFound = usFromES.filterNot { case (u, _) => nFound(u) }
val purgeFromEsFut = Future.traverse(uFromEsThatAreFound) {
// todo .recover + check .hasFailures
case (uuid, index) =>
retry(ftsService.purgeByUuidsAndIndexes(Vector(uuid -> index)))
}
purgeFromEsFut.flatMap { _ =>
//lastly, write infocolones for the good infotons
if (isNewBg) {
//TODO: if indexed in cm_well_latest, should we also take care of the update in cassandra for indexName?
val actions = foundAndFixed.map(
i =>
ESIndexRequest(createEsIndexActionForNewBG(i,
if (i.indexName.isEmpty) "cm_well_latest"
else i.indexName,
i eq cur),
None)
)
retry(ftsService.executeBulkIndexRequests(actions)).map(_ => (true, ""))
} else {
val actions = foundAndFixed.map {
case i if isContainsCurrent && (i eq cur) => createEsIndexAction(i, "cmwell_current_latest")
case i => createEsIndexAction(i, "cmwell_history_latest")
}
// todo .recover + check .hasFailures
retry(ftsService.executeBulkActionRequests(actions)).map(_ => (true, ""))
}
}
}
}
}
}
}
purgeNotFound.flatMap(_ => fixFoundFut).map(_ => true -> "")
}
}
}
}
private def logEsBulkResponseFailures(br: BulkResponse) = {
log.info(s"ES BulkResponse has failures: ${br.getItems.filter(_.isFailed).map(_.getFailureMessage).mkString(", ")}")
}
private def purgeAndLog(path: String,
uuidsFromEs: Vector[(Uuid, EsIndex)],
uuidsFromCas: Vector[(Timestamp, Uuid)],
both: Map[Uuid, (Timestamp, Vector[EsIndex])],
onlyES: Map[Uuid, Vector[EsIndex]],
onlyC: Map[Uuid, Timestamp]) = {
val logEsSourcesForMissingUuids = Future
.traverse(uuidsFromEs) {
case (uuidInEs, esIndex) =>
val f = retry(ftsService.extractSource(uuidInEs, esIndex)).recover {
case e: Throwable =>
log.error(s"could not retrieve ES sources for uuid=[$uuidInEs] from index=[$esIndex]", e)
"NO SOURCES AVAILABLE" -> -1L
}
f.map(uuidInEs -> _)
}
.map { uuidsToSourceTuplesVector =>
val uuidsToSourceMap = uuidsToSourceTuplesVector.toMap
both.foreach {
case (uuid, (timestamp, esIndex)) =>
log.info(
s"$uuid for $path was not found in cas.infoton, although it was found in cas.path[$timestamp] and also in ES${esIndex
.mkString("[", ",", "]")} with (source,version): ${uuidsToSourceMap(uuid)}"
)
}
onlyES.foreach {
case (uuid, index) =>
log.info(
s"$uuid for $path was not found in cas, although it was found in ES[$index] with (source,version): ${uuidsToSourceMap(uuid)}"
)
}
}
onlyC.foreach {
case (timestamp, uuid) =>
log.info(s"$uuid for $path was not found in cas.infoton, although it was found in cas.path[$timestamp]")
}
val p = Promise[Unit]()
//we want to purge only after we are done with the logging of the bad data
logEsSourcesForMissingUuids.onComplete { t =>
t.failed.foreach(err => log.error("logEsSourcesForMissingUuids future failed", err))
p.completeWith {
val f1 = {
if (uuidsFromEs.isEmpty) Future.successful(())
else
retry(ftsService.purgeByUuidsAndIndexes(uuidsFromEs)).recover {
case e: Throwable =>
log.error(
s"error occured while purging=${uuidsFromEs.mkString("[", ",", "]")} for path=[$path] from ES",
e
)
}
}
val f2 = Future.traverse(uuidsFromCas) {
case (timestamp, uuid) =>
retry(irw.purgeFromPathOnly(path, timestamp, cmwell.irw.QUORUM)).recover {
case e: Throwable =>
log.error(s"could not purge uuid=[$uuid] for path=[$path] from CAS.path with timestamp=[$timestamp}]",
e)
}
}
f1.flatMap(_ => f2).map(_ => ())
}
}
p.future
}
private def createEsIndexActionForNewBG(infoton: Infoton,
index: String,
isCurrent: Boolean): ActionRequest[_ <: ActionRequest[_ <: AnyRef]] = {
val infotonWithUpdatedIndexTime = infoton.indexTime.fold {
infoton.replaceIndexTime(infoton.lastModified.getMillis)
}(_ => infoton)
val serializedInfoton = JsonSerializerForES.encodeInfoton(infotonWithUpdatedIndexTime, isCurrent)
Requests.indexRequest(index).`type`("infoclone").id(infoton.uuid).create(true).source(serializedInfoton)
}
private def createEsIndexAction(infoton: Infoton, index: String) =
Requests
.indexRequest(index)
.`type`("infoclone")
.id(infoton.uuid)
.create(true)
.versionType(VersionType.FORCE)
.version(1)
.source(JsonSerializer.encodeInfoton(infoton, omitBinaryData = true, toEs = true))
private def purgeFromCas(path: String, uuid: String, timestamp: Long) =
irw.purgeUuid(path, uuid, timestamp, isOnlyVersion = false, cmwell.irw.QUORUM)
// filling up `dc` and `indexTime`, since some old data do not have these fields
private def fixAndUpdateInfotonInCas(i: Infoton): Future[Infoton] =
autoFixDcAndIndexTime(i, Settings.dataCenter)
.fold(Future.successful(i))(
j =>
irw.writeAsyncDataOnly(j, cmwell.irw.QUORUM).recover {
case e: Throwable =>
log.error(s"could not write to cassandra the infoton with uuid=[${j.uuid}}] for path=[${j.path}}]", e)
j
}
)
override def info(path: String, limit: Int): Future[(CasInfo, EsExtendedInfo, ZStoreInfo)] = {
val esinfo = s.extractHistoryEs(path, limit).flatMap { uuidIndexVec =>
cmwell.util.concurrent.travector(uuidIndexVec) {
case (uuid, index) =>
ftsService.extractSource(uuid, index).map {
case (source, version) => (uuid, index, version, source)
}
}
}
s.extractHistoryCas(path, limit).flatMap { v =>
val zsKeys = v.collect {
case (_, Some(FileInfoton(_, _, _, _, _, Some(FileContent(_, _, _, Some(dp))), _))) => dp
}.distinct
esinfo.map((v, _, zsKeys))
}
}
override def fixDc(path: String, dc: String, retries: Int, indexTimeOpt: Option[Long] = None): Future[Boolean] = {
import cmwell.domain.{addDc, addIndexInfo}
import com.datastax.driver.core.ConsistencyLevel._
import scala.concurrent.duration._
require(dc != "na", "fix-dc with \\"na\\"?")
require(!isNewBg, "fixDc not implemented for nbg path")
val task = cmwell.util.concurrent.retry(retries, 1.seconds) {
s.extractLastCas(path).flatMap { infoton =>
val dummyFut = Future.successful(())
val (infotonWithFixedIndexTime, addIdxTInCasFut) = {
val iIdxTOpt = infoton.indexTime
lazy val lmMillis = infoton.lastModified.getMillis
indexTimeOpt match {
case Some(t) if iIdxTOpt.isDefined && t == iIdxTOpt.get => infoton -> dummyFut
case None if iIdxTOpt.isDefined => infoton -> dummyFut
case Some(t) =>
addIndexTime(infoton, indexTimeOpt, force = true) ->
s.irwProxy.addIndexTimeToUuid(infoton.uuid, t, level = QUORUM)
case None =>
addIndexTime(infoton, Some(lmMillis), force = true) ->
s.irwProxy.addIndexTimeToUuid(infoton.uuid, lmMillis, level = QUORUM)
}
}
val (infotonWithFixedIndexTimeAndDc, addDcInCasFut) = infoton.dc match {
case dc2 if dc2 == dc => infotonWithFixedIndexTime -> dummyFut
case _ =>
addDc(infotonWithFixedIndexTime, dc, force = true) ->
s.irwProxy.addDcToUuid(infoton.uuid, dc, level = QUORUM)
}
Future.sequence(Seq(addIdxTInCasFut, addDcInCasFut)).map { _ => () =>
{
val esIndexAction = {
Requests
.indexRequest("cmwell_current_latest")
.`type`("infoclone")
.id(infotonWithFixedIndexTimeAndDc.uuid)
.create(true)
.versionType(VersionType.FORCE)
.version(1)
.source(JsonSerializer.encodeInfoton(infotonWithFixedIndexTimeAndDc, true, true))
}
s.ftsProxy.purgeByUuids(Seq(), Some(infotonWithFixedIndexTimeAndDc.uuid)).flatMap {
case br if br.hasFailures => Future.failed(new Exception("purging indexes failed"))
case _ =>
s.ftsProxy.executeBulkActionRequests(Seq(esIndexAction)).flatMap {
case br if br.hasFailures => Future.failed(new Exception("re-indexing failed"))
case _ => Future.successful(true)
}
}
}
}
}
}
task.flatMap { t =>
cmwell.util.concurrent.retry(retries, 1.seconds)(t())
}
}
override def shutdown: Unit = {
this.s.irwProxy.daoProxy.shutdown()
this.s.ftsProxy.close()
}
}
|
TRnonodename/CM-Well
|
server/cmwell-stortill/src/main/scala/cmwell/stortill/Operations.scala
|
Scala
|
apache-2.0
| 26,033
|
package sharry.store.doobie
import java.time.format.DateTimeFormatter
import java.time.{Instant, LocalDate}
import sharry.common._
import sharry.common.syntax.all._
import doobie._
import doobie.implicits.legacy.instant._
import doobie.util.log.Success
import io.circe.{Decoder, Encoder}
import scodec.bits.ByteVector
trait DoobieMeta {
implicit val sqlLogging = DoobieMeta.DefaultLogging.handler
def jsonMeta[A](implicit d: Decoder[A], e: Encoder[A]): Meta[A] =
Meta[String].imap(str => str.parseJsonAs[A].fold(ex => throw ex, identity))(a =>
e.apply(a).noSpaces
)
implicit val metaUserState: Meta[AccountState] =
Meta[String].timap(AccountState.unsafe)(AccountState.asString)
implicit val metaAccountSource: Meta[AccountSource] =
Meta[String].timap(AccountSource.unsafe)(_.name)
implicit val metaPassword: Meta[Password] =
Meta[String].timap(Password(_))(_.pass)
implicit val metaIdent: Meta[Ident] =
Meta[String].timap(Ident.unsafe)(_.id)
implicit val ciIdentMeta: Meta[CIIdent] =
metaIdent.timap(CIIdent.apply)(_.value)
implicit val metaTimestamp: Meta[Timestamp] =
Meta[Instant].imap(Timestamp(_))(_.value)
implicit val metaLocalDate: Meta[LocalDate] =
Meta[String].timap(str => LocalDate.parse(str))(_.format(DateTimeFormatter.ISO_DATE))
implicit val metaDuration: Meta[Duration] =
Meta[Long].timap(n => Duration.seconds(n))(_.seconds)
implicit val metaByteSize: Meta[ByteSize] =
Meta[Long].timap(n => ByteSize(n))(_.bytes)
implicit val byteVectorMeta: Meta[ByteVector] =
Meta[String].timap(s => ByteVector.fromValidHex(s))(_.toHex)
}
object DoobieMeta extends DoobieMeta {
private val logger = org.log4s.getLogger
object TraceLogging {
implicit val handler =
LogHandler {
case e @ Success(_, _, _, _) =>
DoobieMeta.logger.trace("SQL success: " + e)
case e =>
DoobieMeta.logger.trace(s"SQL failure: $e")
}
}
object DefaultLogging {
implicit val handler =
LogHandler {
case e @ Success(_, _, _, _) =>
DoobieMeta.logger.trace("SQL success: " + e)
case e =>
DoobieMeta.logger.warn(s"SQL failure: $e")
}
}
}
|
eikek/sharry
|
modules/store/src/main/scala/sharry/store/doobie/DoobieMeta.scala
|
Scala
|
gpl-3.0
| 2,221
|
/*
* Copyright 2013-2015 Websudos, Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Explicit consent must be obtained from the copyright owner, Outworkers Limited before any redistribution is made.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.websudos.phantom.builder.query.db.specialized
import com.datastax.driver.core.utils.UUIDs
import com.websudos.phantom.PhantomSuite
import com.websudos.phantom.dsl._
import com.websudos.phantom.tables._
import scala.concurrent.Await
import scala.concurrent.duration._
class EnumColumnTest extends PhantomSuite {
override def beforeAll(): Unit = {
super.beforeAll()
Await.result(TestDatabase.enumTable.create.ifNotExists().future(), 5.seconds)
Await.result(TestDatabase.namedEnumTable.create.ifNotExists().future(), 5.seconds)
}
it should "store a simple record and parse an Enumeration value back from the stored value" in {
val sample = EnumRecord(UUIDs.timeBased().toString, Records.TypeOne, None)
val chain = for {
insert <- TestDatabase.enumTable.store(sample).future()
get <- TestDatabase.enumTable.select.where(_.id eqs sample.name).one()
} yield get
whenReady(chain) {
res => {
res.value.enum shouldEqual sample.enum
res.value.optEnum shouldBe empty
}
}
}
it should "store a simple record and parse an Enumeration value and an Optional value back from the stored value" in {
val sample = EnumRecord(UUIDs.timeBased().toString, Records.TypeOne, Some(Records.TypeTwo))
val chain = for {
insert <- TestDatabase.enumTable.store(sample).future()
get <- TestDatabase.enumTable.select.where(_.id eqs sample.name).one()
} yield get
whenReady(chain) {
res => {
res.value.enum shouldEqual sample.enum
res.value.optEnum shouldBe defined
res.value.optEnum.value shouldBe Records.TypeTwo
}
}
}
it should "store a named record and parse an Enumeration value back from the stored value" in {
val sample = NamedEnumRecord(UUIDs.timeBased().toString, NamedRecords.One, None)
val chain = for {
insert <- TestDatabase.namedEnumTable.store(sample).future()
get <- TestDatabase.namedEnumTable.select.where(_.id eqs sample.name).one()
} yield get
whenReady(chain) {
res => {
res.value.enum shouldEqual sample.enum
res.value.optEnum shouldBe empty
}
}
}
it should "store a named record and parse an Enumeration value and an Optional value back from the stored value" in {
val sample = NamedEnumRecord(UUIDs.timeBased().toString, NamedRecords.One, Some(NamedRecords.Two))
val chain = for {
insert <- TestDatabase.namedEnumTable.store(sample).future()
get <- TestDatabase.namedEnumTable.select.where(_.id eqs sample.name).one()
} yield get
whenReady(chain) {
res => {
res.value.enum shouldEqual sample.enum
res.value.optEnum shouldBe defined
res.value.optEnum shouldEqual sample.optEnum
}
}
}
}
|
levinson/phantom
|
phantom-dsl/src/test/scala/com/websudos/phantom/builder/query/db/specialized/EnumColumnTest.scala
|
Scala
|
bsd-2-clause
| 4,290
|
/*
Copyright 2017-2020 Erik Erlandson
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package coulomb.infra
import shapeless._
import shapeless.syntax.singleton._
import singleton.ops._
import coulomb._
import coulomb.define._
trait UnitStringAST
object UnitStringAST {
case object Uni extends UnitStringAST
case class Def(d: UnitDefinition) extends UnitStringAST
case class Pre(p: UnitDefinition) extends UnitStringAST
case class Mul(l: UnitStringAST, r: UnitStringAST) extends UnitStringAST
case class Div(n: UnitStringAST, d: UnitStringAST) extends UnitStringAST
case class Pow(b: UnitStringAST, e: Int) extends UnitStringAST
def render(ast: UnitStringAST, f: UnitDefinition => String): String = ast match {
case FlatMul(t) => termStrings(t, f).mkString(" ")
case Div(Uni, d) => s"1/${paren(d, f)}"
case Div(n, Uni) => render(n, f)
case Div(n, d) => s"${paren(n, f)}/${paren(d, f)}"
case Pow(b, e) => {
val es = if (e < 0) s"($e)" else s"$e"
s"${paren(b, f)}^$es"
}
case Uni => "unitless"
case Def(d) => f(d)
case Pre(d) => f(d)
case _ => "!!!"
}
def paren(ast: UnitStringAST, f: UnitDefinition => String): String = {
val str = render(ast, f)
if (isAtomic(ast)) str else s"($str)"
}
object FlatMul {
def unapply(ast: UnitStringAST): Option[List[UnitStringAST]] = ast match {
case Mul(l, r) => {
val lflat = l match {
case FlatMul(lf) => lf
case _ => List(l)
}
val rflat = r match {
case FlatMul(rf) => rf
case _ => List(r)
}
Option(lflat ++ rflat)
}
case _ => None
}
}
def termStrings(terms: List[UnitStringAST], f: UnitDefinition => String): List[String] = terms match {
case Nil => Nil
case Pre(p) +: Def(d) +: tail => s"${f(p)}${f(d)}" :: termStrings(tail, f)
case term +: tail => s"${paren(term, f)}" :: termStrings(tail, f)
case _ => List("!!!")
}
def isAtomic(ast: UnitStringAST): Boolean = ast match {
case Uni => true
case Pre(_) => true
case Def(_) => true
case Pow(Def(_), _) => true
case Pow(FlatMul(Pre(_) +: Def(_) +: Nil), _) => true
case FlatMul(Pre(_) +: Def(_) +: Nil) => true
case _ => false
}
}
trait HasUnitStringAST[U] {
def ast: UnitStringAST
override def toString = ast.toString
}
object HasUnitStringAST {
import UnitStringAST._
implicit def evidence0: HasUnitStringAST[Unitless] =
new HasUnitStringAST[Unitless] { val ast = Uni }
implicit def evidence1[P](implicit d: DerivedUnit[P, Unitless]): HasUnitStringAST[P] =
new HasUnitStringAST[P] { val ast = Pre(d) }
implicit def evidence2[U, D](implicit d: DerivedUnit[U, D], nu: D =:!= Unitless): HasUnitStringAST[U] =
new HasUnitStringAST[U] { val ast = Def(d) }
implicit def evidence3[U](implicit bu: GetBaseUnit[U]): HasUnitStringAST[U] =
new HasUnitStringAST[U] { val ast = Def(bu.bu) }
implicit def evidence4[L, R](implicit l: HasUnitStringAST[L], r: HasUnitStringAST[R]): HasUnitStringAST[%*[L, R]] =
new HasUnitStringAST[%*[L, R]] { val ast = Mul(l.ast, r.ast) }
implicit def evidence5[N, D](implicit n: HasUnitStringAST[N], d: HasUnitStringAST[D]): HasUnitStringAST[%/[N, D]] =
new HasUnitStringAST[%/[N, D]] { val ast = Div(n.ast, d.ast) }
implicit def evidence6[B, E](implicit b: HasUnitStringAST[B], e: XIntValue[E]): HasUnitStringAST[%^[B, E]] =
new HasUnitStringAST[%^[B, E]] { val ast = Pow(b.ast, e.value) }
}
|
erikerlandson/coulomb
|
coulomb/shared/src/main/scala/coulomb/infra/unitstring.scala
|
Scala
|
apache-2.0
| 3,984
|
package sexamples.helloworld;
import se.sics.kompics.sl._
object Main {
def main(args: Array[String]): Unit = {
Kompics.createAndStart(classOf[HelloComponent]);
Kompics.waitForTermination();
}
}
|
kompics/kompics-scala
|
docs/src/main/scala/sexamples/helloworld/Main.scala
|
Scala
|
gpl-2.0
| 209
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.limits
import scala.concurrent.duration.DurationInt
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import common.TestHelpers
import common.TestUtils
import common.Wsk
import common.WskProps
import common.WskTestHelpers
import whisk.core.entity._
import spray.json.DefaultJsonProtocol._
import spray.json._
import whisk.http.Messages
import whisk.core.entity.TimeLimit
/**
* Tests for action duration limits. These tests require a deployed backend.
*/
@RunWith(classOf[JUnitRunner])
class MaxActionDurationTests extends TestHelpers with WskTestHelpers {
implicit val wskprops = WskProps()
val wsk = new Wsk
// swift is not tested, because it uses the same proxy like python
"node-, python, and java-action" should "run up to the max allowed duration" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
// When you add more runtimes, keep in mind, how many actions can be processed in parallel by the Invokers!
Map("node" -> "helloDeadline.js", "python" -> "timedout.py", "java" -> "timedout.jar").par.map {
case (k, name) =>
assetHelper.withCleaner(wsk.action, name) {
if (k == "java") { (action, _) =>
action.create(
name,
Some(TestUtils.getTestActionFilename(name)),
timeout = Some(TimeLimit.MAX_DURATION),
main = Some("TimedOut"))
} else { (action, _) =>
action.create(name, Some(TestUtils.getTestActionFilename(name)), timeout = Some(TimeLimit.MAX_DURATION))
}
}
val run = wsk.action.invoke(name, Map("forceHang" -> true.toJson))
withActivation(
wsk.activation,
run,
initialWait = 1.minute,
pollPeriod = 1.minute,
totalWait = TimeLimit.MAX_DURATION + 1.minute) { activation =>
activation.response.status shouldBe ActivationResponse.messageForCode(ActivationResponse.ApplicationError)
activation.response.result shouldBe Some(
JsObject("error" -> Messages.timedoutActivation(TimeLimit.MAX_DURATION, false).toJson))
activation.duration.toInt should be >= TimeLimit.MAX_DURATION.toMillis.toInt
}
}
}
}
|
tysonnorris/openwhisk
|
tests/src/test/scala/whisk/core/limits/MaxActionDurationTests.scala
|
Scala
|
apache-2.0
| 3,089
|
package com.twitter.zipkin.storage.mongodb
import java.nio.ByteBuffer
import java.util.Date
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicBoolean
import com.mongodb.casbah.Imports._
import com.mongodb.casbah.commons.conversions.scala._
import com.twitter.finagle.jsr166y.ForkJoinPool
import com.twitter.util._
import com.twitter.zipkin.common._
import com.twitter.zipkin.Constants
import com.twitter.zipkin.storage.mongodb.utils.{EnsureIndexes, Index}
import com.twitter.zipkin.storage.{IndexedTraceId, SpanStore, TraceIdDuration}
private object RegisterMongoDBSerializers {
private[this] lazy val registerSerializers = new Serializers {}.register()
def apply(): Unit = registerSerializers
}
private class Asyncifier extends Closable{
private[this] val pool = new ForkJoinPool()
private[this] val closed = new AtomicBoolean(false)
type JavaFuture[T] = java.util.concurrent.Future[T]
def apply[U](func: => U): Future[U] = {
if (!closed.get()) {
val promise = Promise[U]()
pool.execute(
new Runnable {
override def run(): Unit = promise.update(Try(func))
})
promise
} else {
Future.exception(new IllegalStateException("This Asyncifier is already closed"))
}
}
def close(deadline: Time): Future[Unit] = {
val promise = Promise[Unit]()
closed.set(true)
new Thread {
override def run(): Unit = promise.update(
Try(
pool.awaitTermination(deadline.sinceNow.inNanoseconds, TimeUnit.NANOSECONDS)
))
}.start()
promise
}
}
private[mongodb] trait MongoDBSpanStoreUtils {
private[mongodb] def timestampsFromMongoObject(obj: MongoDBObject): Seq[Long] =
obj.as[MongoDBList]("annotations").map((x) => new MongoDBObject(x.asInstanceOf[DBObject])).map(
_.as[Long]("timestamp")
)
private[mongodb] def startTimeStampFromMongoObject(obj: MongoDBObject): Long = timestampsFromMongoObject(obj).min
private[mongodb] def endTimeStampFromMongoObject(obj: MongoDBObject): Long = timestampsFromMongoObject(obj).max
private[mongodb] def dbObjectToIndexedTraceId(dbobj: DBObject): IndexedTraceId = {
val obj = new MongoDBObject(dbobj)
IndexedTraceId(
obj.as[Long]("traceId"),
startTimeStampFromMongoObject(obj)
)
}
private[mongodb] def toByteArray(buffer: ByteBuffer): Array[Byte] = {
val arr = new Array[Byte](buffer.remaining())
val copy = buffer
.duplicate() //get() modifies the ByteBuffer position. Slice makes a shallow copy so this isn't a problem
copy.get(arr)
arr
}
}
class MongoDBSpanStore(url: String, database: String, spanTTL: Duration) extends SpanStore with MongoDBSpanStoreUtils {
RegisterMongoDBSerializers()
private[this] val makeAsync = new Asyncifier
private[this] val client = MongoClient(MongoClientURI(url))
private[this] val db = client(database)
private[this] val traces = db("traces")
private[this] val servicesIndex = db("servicesIndex")
EnsureIndexes(traces)(
Index.ExpiresAt("expiresAt"),
Index.Unique("traceId"),
Index.Generic("spans.name"),
Index.Generic("annotations.timestamp"),
Index.Generic("annotations.host.service"),
Index.Generic("binaryAnnotations.host.service"),
Index.Generic("binaryAnnotations.key"),
Index.Generic("binaryAnnotations.value")
)
EnsureIndexes(servicesIndex)(Index.ExpiresAt("expiresAt"), Index.Unique("serviceName"))
override def getTimeToLive(traceId: Long): Future[Duration] = makeAsync {
Time(traces.findOne(MongoDBObject("traceId" -> traceId)).get.apply("expiresAt").asInstanceOf[Date]).sinceNow
}
// Used for pinning
override def setTimeToLive(traceId: Long, ttl: Duration): Future[Unit] = makeAsync {
traces.update(
MongoDBObject("traceId" -> traceId), MongoDBObject(
"$set" -> MongoDBObject(
"expiresAt" -> ttl.fromNow.toDate
)
))
}
/**
* Get the trace ids for this particular service and if provided, span name.
* Only return maximum of limit trace ids from before the endTs.
*/
override def getTraceIdsByName(
serviceName: String,
spanName: Option[String],
endTs: Long,
limit: Int): Future[Seq[IndexedTraceId]] =
makeAsync {
traces.find(
MongoDBObject(
List(
spanName.map(
name => List(
"spans.name" -> name
)).getOrElse(List()),
List(
"$or" -> MongoDBList(
MongoDBObject("annotations.host.service" -> serviceName),
MongoDBObject("binaryAnnotations.host.service" -> serviceName)
),
"annotations.timestamp" -> MongoDBObject(
"$lte" -> endTs
)
)
).flatten)).limit(limit).map(dbObjectToIndexedTraceId(_)).toSeq
}
private[this] def optionalGetSpansByTraceId(traceId: Long): Future[Option[Seq[Span]]] = makeAsync {
traces.findOne(MongoDBObject("traceId" -> traceId)).map {
(rawTrace) =>
val trace = new MongoDBObject(rawTrace)
val annotations = new MongoDBList(trace("annotations").asInstanceOf[BasicDBList])
.map((x) => new MongoDBObject(x.asInstanceOf[DBObject]))
.toSeq
val binaryAnnotations = new MongoDBList(trace("binaryAnnotations").asInstanceOf[BasicDBList])
.map((x) => new MongoDBObject(x.asInstanceOf[DBObject]))
.toSeq
def getHostOption(obj: MongoDBObject): Option[Endpoint] =
obj.getAs[DBObject]("host").map(new MongoDBObject(_)).map(
(host) =>
Endpoint(
ipv4 = host.as[Int]("ipv4"),
port = host.as[Int]("port").toShort,
serviceName = host.as[String]("service")
)
)
val rawSpans = new MongoDBList(trace("spans").asInstanceOf[BasicDBList])
.map((x) => new MongoDBObject(x.asInstanceOf[DBObject]))
val spanMap = rawSpans.groupBy(_.as[Long]("id"))
spanMap.map {
case (spanId, spans) =>
Span(
traceId,
spans.map(_.getAs[String]("name")).filter(_.nonEmpty).map(_.get).filter(_.nonEmpty).head,
spanId,
spans.map(_.getAs[Long]("parentId")).filter(_.nonEmpty).map(_.get).headOption,
annotations.filter(_.as[Long]("span") == spanId).map(
(obj) => Annotation(
timestamp = obj.as[Long]("timestamp"),
value = obj.as[String]("value"),
duration = obj.getAs[Long]("durationInNanoseconds").map(Duration.fromNanoseconds(_)),
host = getHostOption(obj)
)).toList,
binaryAnnotations.filter(_.as[Long]("span") == spanId).map(
(obj) => BinaryAnnotation(
key = obj.as[String]("key"),
value = ByteBuffer.wrap(obj.as[Array[Byte]]("value")),
annotationType = AnnotationType.fromInt(obj.as[Int]("kind")),
host = getHostOption(obj)
))
)
}.toSeq
}
}
override def getSpansByTraceId(traceId: Long): Future[Seq[Span]] = optionalGetSpansByTraceId(traceId).map(_.get)
/**
* Get the trace ids for this annotation between the two timestamps. If value is also passed we expect
* both the annotation key and value to be present in index for a match to be returned.
* Only return maximum of limit trace ids from before the endTs.
*/
override def getTraceIdsByAnnotation(
serviceName: String, annotation: String, value: Option[ByteBuffer],
endTs: Long, limit: Int): Future[Seq[IndexedTraceId]] =
if (Constants.CoreAnnotations.contains(annotation)) Future(Seq())
else makeAsync {
val serviceClauses = MongoDBList(
MongoDBObject("annotations.host.service" -> serviceName.toLowerCase),
MongoDBObject("binaryAnnotations.host.service" -> serviceName.toLowerCase)
)
val queryParts = List(
value.map(
(buffer) => List(
"binaryAnnotations" -> MongoDBObject(
"$elemMatch" -> MongoDBObject(
"key" -> annotation,
"value" -> toByteArray(buffer)
)
)
)
).getOrElse(List()),
List(
value match {
case None => "$and" -> MongoDBList(
MongoDBObject("$or" -> serviceClauses),
MongoDBObject(
"$or" -> MongoDBList(
MongoDBObject("annotations.value" -> annotation),
MongoDBObject("binaryAnnotations.key" -> annotation)
))
)
case Some(_) => "$or" -> serviceClauses
},
"annotations.timestamp" -> MongoDBObject(
"$lte" -> endTs
)
)
)
traces.find(MongoDBObject(queryParts.flatten)).limit(limit).map(dbObjectToIndexedTraceId(_)).toSeq
}
override def tracesExist(traceIds: Seq[Long]): Future[Set[Long]] =
Future.collect(
traceIds.map(
(traceId) =>
makeAsync(if (traces.count(MongoDBObject("traceId" -> traceId), limit = 1) == 0) None else Some(traceId))
)).map {
maybeTraceIds =>
val foundTraceIds = maybeTraceIds.collect { case Some(id) => id }
foundTraceIds.toSet
}
/**
* Get the available trace information from the storage system.
* Spans in trace should be sorted by the first annotation timestamp
* in that span. First event should be first in the spans list.
*
* The return list will contain only spans that have been found, thus
* the return list may not match the provided list of ids.
*/
override def getSpansByTraceIds(traceIds: Seq[Long]): Future[Seq[Seq[Span]]] =
Future
.collect(traceIds.map(optionalGetSpansByTraceId(_)))
.map(_ /*Option[Seq[Span]]*/ .filter(_.nonEmpty).map(_.get))
/**
* Get all the span names for a particular service, as far back as the ttl allows.
*/
override def getSpanNames(service: String): Future[Set[String]] = makeAsync {
servicesIndex.findOne(MongoDBObject("serviceName" -> service.toLowerCase)).map(new MongoDBObject(_))
.map(_.as[MongoDBList]("methods").map(_.asInstanceOf[String]).toSet).getOrElse(Set())
}
/**
* Fetch the duration or an estimate thereof from the traces.
* Duration returned in micro seconds.
*/
override def getTracesDuration(traceIds: Seq[Long]): Future[Seq[TraceIdDuration]] =
Future.collect(
traceIds.map(
(traceId) =>
makeAsync {
val trace = new MongoDBObject(traces.findOne(MongoDBObject("traceId" -> traceId)).get)
val timestamp = startTimeStampFromMongoObject(trace)
TraceIdDuration(
traceId = traceId,
duration = endTimeStampFromMongoObject(trace) - timestamp,
startTimestamp = timestamp
)
}
).toSeq)
/**
* Get all the service names for as far back as the ttl allows.
*/
override def getAllServiceNames: Future[Set[String]] = makeAsync {
servicesIndex.find().map(new MongoDBObject(_)).map(_.as[String]("serviceName")).toSet
}
// store a list of spans
override def apply(spans: Seq[Span]): Future[Unit] = Future.join(
(
spans map {
(span) =>
makeAsync {
val hostToMongoObject: PartialFunction[Endpoint, MongoDBObject] = {
case Endpoint(ipv4, port, service) => MongoDBObject(
"ipv4" -> ipv4,
"port" -> port,
"service" -> service.toLowerCase
)
}
traces.update(
MongoDBObject("traceId" -> span.traceId),
MongoDBObject(
"$set" -> MongoDBObject(
"expiresAt" -> spanTTL.fromNow.toDate //always update TTL on update
),
"$addToSet" -> MongoDBObject(
"spans" -> MongoDBObject(
"id" -> span.id,
"parentId" -> span.parentId,
"name" -> span.name
)
),
"$pushAll" -> MongoDBObject(
"annotations" -> span.annotations.map {
case Annotation(timestamp, value, host, duration) => MongoDBObject(
"span" -> span.id,
"timestamp" -> timestamp,
"value" -> value,
"durationInNanoseconds" -> duration.map(_.inNanoseconds),
"host" -> host.map(hostToMongoObject)
)
},
"binaryAnnotations" -> span.binaryAnnotations.map {
case BinaryAnnotation(key, value, kind, host) => MongoDBObject(
"span" -> span.id,
"key" -> key,
"value" -> toByteArray(value),
"kind" -> kind.value,
"host" -> host.map(hostToMongoObject)
)
}
)
),
upsert = true
)
//Note: we don't really care that this isn't transactional
span.name match {
case "" => ()
case name => span.serviceNames filter (_.nonEmpty) foreach {
(serviceName) =>
servicesIndex.update(
MongoDBObject(
"serviceName" -> serviceName.toLowerCase
),
MongoDBObject(
"$addToSet" -> MongoDBObject(
"methods" -> name
),
"$set" -> MongoDBObject(
"expiresAt" -> spanTTL.fromNow.toDate //always update TTL on update
)
),
upsert = true
)
}
}
}
})
.toList) //Eagerly evaluate the map, so that all futures are immediately added to the thread pool queue, despite the
// implementation of Future.join
override def close(deadline: Time): Future[Unit] = makeAsync.close(deadline).onSuccess(
(_: Unit) => {
client.close()
})
}
|
travisbrown/zipkin
|
zipkin-mongodb/src/main/scala/com/twitter/zipkin/storage/mongodb/MongoDBSpanStore.scala
|
Scala
|
apache-2.0
| 14,389
|
package controllers
import javax.inject.Inject
import akka.actor._
import akka.stream.Materializer
import play.api.mvc._
import play.api.libs.streams._
import autowire._
import upickle.default._
import shared.api.{Api, WSRequest, WSResponse}
import scala.concurrent.ExecutionContext.Implicits.global
class WebSocketCtrl @Inject() (implicit system: ActorSystem, materializer: Materializer) {
def socket = WebSocket.accept[String, String] { request =>
ActorFlow.actorRef(out => MyWebSocketActor.props(out))
}
}
object MyApiImpl extends Api{
def doThing(i: Int, s: String) = Seq.fill(i)(s)
}
object MyServerApi extends Server[String, Reader,Writer]{
def write[Result: Writer](r: Result) = upickle.default.write(r)
def read[Result: Reader](p: String) = upickle.default.read[Result](p)
val routes = MyServerApi.route[Api](MyApiImpl)
}
object MyWebSocketActor {
def props(out: ActorRef) = Props(new MyWebSocketActor(out))
}
class MyWebSocketActor(out: ActorRef) extends Actor {
def receive = {
case msg: String =>
val request = upickle.default.read[WSRequest[MyServerApi.Request]](msg)
MyServerApi.routes.apply(request.data).foreach((value) => out ! upickle.default.write(WSResponse(request.requestId,value)))
}
}
|
OmarCastro/ShellHive-scala
|
server/app/controllers/WebsocketCtrl.scala
|
Scala
|
mit
| 1,256
|
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.swing
package event
/**
* An event that indicates a change in a selection such as in a list view or a table.
*/
trait SelectionEvent
/**
* An event that indicates a selection of a range of indices.
*/
trait ListSelectionEvent extends SelectionEvent {
def range: Range
}
case class SelectionChanged(override val source: Component) extends ComponentEvent with SelectionEvent
object ListSelectionChanged {
def unapply[A](e: ListSelectionChanged[A]): Option[(ListView[A], Range, Boolean)] =
Some((e.source, e.range, e.live))
}
class ListSelectionChanged[A](override val source: ListView[A], val range: Range, val live: Boolean)
extends SelectionChanged(source) with ListEvent[A]
|
SethTisue/scala-swing
|
src/main/scala/scala/swing/event/SelectionEvent.scala
|
Scala
|
bsd-3-clause
| 1,236
|
// scalac -deprecation Misc.scala
import scala.annotation.unchecked._
import java.awt.Font
import java.awt.Font._
object Misc {
@deprecated(message = "Use factorial(n: BigInt) instead")
def factorial(n: Int): Int = if (n <= 0) 1 else n * factorial(n - 1)
// Recursive call causes a deprecation warning
def draw(@deprecatedName('sz) size: Int, style: Int = PLAIN) {
val font = new Font("Dialog", style, size)
// ...
}
draw(sz = 12)
def process(lst: List[Int]) =
(lst: @unchecked) match {
case head :: tail => head + tail.length
}
}
trait Comparable[-T] extends
java.lang.Comparable[T @uncheckedVariance]
|
yeahnoob/scala-impatient-2e-code
|
src/ch15/sec07/Misc.scala
|
Scala
|
gpl-3.0
| 652
|
package hclu.hreg.api
import java.util.UUID
import javax.servlet.http.HttpServletRequest
import hclu.hreg.BaseServletSpec
import hclu.hreg.dao.{ContactDao, UserDao}
import hclu.hreg.domain.Contact
import hclu.hreg.service.contact.ContactService
import hclu.hreg.service.email.DummyEmailService
import hclu.hreg.service.templates.EmailTemplatingEngine
import hclu.hreg.service.user.{RegistrationDataValidator, UserService}
import hclu.hreg.test.{FlatSpecWithSql}
import org.json4s.JsonDSL._
import org.mockito.Matchers
import org.mockito.Matchers._
import org.mockito.Mockito._
import scala.concurrent.ExecutionContext.Implicits.global
class ContactsServletSpec extends BaseServletSpec with FlatSpecWithSql {
var servlet: ContactsServlet = _
def onServletWithMocks(testToExecute: (ContactService) => Unit) = {
val dao = new ContactDao(sqlDatabase)
val userDao = new UserDao(sqlDatabase)
dao.add(Contact(UUID.randomUUID(), Some("first1"), Some("last1"), "contact1@sml.com"))
dao.add(Contact(UUID.randomUUID(), Some("first2"), Some("last2"), "contact2@sml.com"))
val userService = spy(new UserService(userDao, new RegistrationDataValidator(), new DummyEmailService(), new EmailTemplatingEngine))
val contactService = new ContactService(dao)
servlet = new ContactsServlet(contactService, userService) {
override protected def isAuthenticated(implicit request: HttpServletRequest): Boolean = true
}
addServlet(servlet, "/*")
testToExecute(contactService)
}
"POST /" should "register new contact" in {
onServletWithMocks {
(contactService) =>
post("/", mapToJson(Map("firstname" -> "firstname", "lastname" -> "lastname", "email" -> "newUser@sml.com")), defaultJsonHeaders) {
status should be (201)
}
}
}
"POST /" should "reject with invalid email" in {
onServletWithMocks {
(contactService) =>
post("/", mapToJson(Map("firstname" -> "firstname", "lastname" -> "lastname", "email" -> "newUser")), defaultJsonHeaders) {
val field: Option[String] = (stringToJson(body) \ "validationErrors" \ "field").extractOpt[String]
val msg: Option[String] = (stringToJson(body) \ "validationErrors" \ "msg").extractOpt[String]
status should be (400)
field should be (Some("email"))
msg should be (Some("invalid email format"))
}
}
}
"POST /" should "reject with no email" in {
onServletWithMocks {
(contactService) =>
post("/", mapToJson(Map("firstname" -> "firstname", "lastname" -> "lastname")), defaultJsonHeaders) {
val field: Option[String] = (stringToJson(body) \ "validationErrors" \ "field").extractOpt[String]
val msg: Option[String] = (stringToJson(body) \ "validationErrors" \ "msg").extractOpt[String]
status should be (400)
field should be (Some("email"))
msg should be (Some("no email provided"))
}
}
}
"GET /" should "fetch all the contacts" in {
onServletWithMocks {
(contactService) =>
get("/") {
status should be(200)
body should include(""""firstname":"first1","lastname":"last1","email":"contact1@sml.com"}""")
body should include(""""firstname":"first2","lastname":"last2","email":"contact2@sml.com"}""")
}
}
}
"GET /:id" should "fetch all contact by id" in {
onServletWithMocks {
(contactService) =>
val expected = contactService.findAll.futureValue.head
get(s"/${expected.id}") {
status should be(200)
body should include(""""firstname":"first1","lastname":"last1","email":"contact1@sml.com"}""")
}
}
}
}
|
tsechov/hclu-registry
|
backend/src/test/scala/hclu/hreg/api/ContactsServletSpec.scala
|
Scala
|
apache-2.0
| 3,715
|
/*
* Copyright 2011 Hui Wen Han, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.huiwen.prefz
case class Page(count: Int, cursor: Cursor)
|
huiwenhan/PrefStore
|
src/main/scala/me/huiwen/prefz/Page.scala
|
Scala
|
apache-2.0
| 671
|
package model.dtos
case class OrganizationStatsGrouped( val orderId:Int, val category:String, stats:List[OrganizationStats])
|
scify/DemocracIT-Web
|
app/model/dtos/OrganizationStatsGrouped.scala
|
Scala
|
apache-2.0
| 126
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.util
import java.time.ZoneId
import org.apache.arrow.vector.types.pojo.ArrowType
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.LA
import org.apache.spark.sql.types._
class ArrowUtilsSuite extends SparkFunSuite {
def roundtrip(dt: DataType): Unit = {
dt match {
case schema: StructType =>
assert(ArrowUtils.fromArrowSchema(ArrowUtils.toArrowSchema(schema, null)) === schema)
case _ =>
roundtrip(new StructType().add("value", dt))
}
}
test("simple") {
roundtrip(BooleanType)
roundtrip(ByteType)
roundtrip(ShortType)
roundtrip(IntegerType)
roundtrip(LongType)
roundtrip(FloatType)
roundtrip(DoubleType)
roundtrip(StringType)
roundtrip(BinaryType)
roundtrip(DecimalType.SYSTEM_DEFAULT)
roundtrip(DateType)
roundtrip(YearMonthIntervalType())
roundtrip(DayTimeIntervalType())
val tsExMsg = intercept[UnsupportedOperationException] {
roundtrip(TimestampType)
}
assert(tsExMsg.getMessage.contains("timeZoneId"))
}
test("timestamp") {
def roundtripWithTz(timeZoneId: String): Unit = {
val schema = new StructType().add("value", TimestampType)
val arrowSchema = ArrowUtils.toArrowSchema(schema, timeZoneId)
val fieldType = arrowSchema.findField("value").getType.asInstanceOf[ArrowType.Timestamp]
assert(fieldType.getTimezone() === timeZoneId)
assert(ArrowUtils.fromArrowSchema(arrowSchema) === schema)
}
roundtripWithTz(ZoneId.systemDefault().getId)
roundtripWithTz("Asia/Tokyo")
roundtripWithTz("UTC")
roundtripWithTz(LA.getId)
}
test("array") {
roundtrip(ArrayType(IntegerType, containsNull = true))
roundtrip(ArrayType(IntegerType, containsNull = false))
roundtrip(ArrayType(ArrayType(IntegerType, containsNull = true), containsNull = true))
roundtrip(ArrayType(ArrayType(IntegerType, containsNull = false), containsNull = true))
roundtrip(ArrayType(ArrayType(IntegerType, containsNull = true), containsNull = false))
roundtrip(ArrayType(ArrayType(IntegerType, containsNull = false), containsNull = false))
}
test("struct") {
roundtrip(new StructType())
roundtrip(new StructType().add("i", IntegerType))
roundtrip(new StructType().add("arr", ArrayType(IntegerType)))
roundtrip(new StructType().add("i", IntegerType).add("arr", ArrayType(IntegerType)))
roundtrip(new StructType().add(
"struct",
new StructType().add("i", IntegerType).add("arr", ArrayType(IntegerType))))
}
}
|
shaneknapp/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/util/ArrowUtilsSuite.scala
|
Scala
|
apache-2.0
| 3,397
|
//: ----------------------------------------------------------------------------
//: Copyright (C) 2015 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package funnel
package chemist
package aws
import scalaz.concurrent.Task
/**
* This default implementation does not properly handle the various upgrade
* cases that you might encounter when migrating from one set of clusters to
* another, but it instead provided as a default where all available flasks
* and chemists are are "active". There are a set of upgrade scenarios where
* you do not want to mirror from an existing flask cluster, so they are not
* targets, nor are they active flasks.
*
* Providing this function with a task return type so that extensions can do I/O
* if they need too (clearly a cache locally would be needed in that case)
*
* It is highly recommended you override this with your own classification logic.
*/
object DefaultClassifier extends Classifier[AwsInstance]{
import Classification._
private[funnel] val Flask = "flask"
private[funnel] val Chemist = "chemist"
def isApplication(prefix: String)(i: AwsInstance): Boolean =
i.application.exists(_.name.trim.toLowerCase.startsWith(prefix))
def isFlask(i: AwsInstance): Boolean =
isApplication(Flask)(i)
def isChemist(i: AwsInstance): Boolean =
isApplication(Chemist)(i)
val task: Task[AwsInstance => Classification] = {
Task.delay {
instance =>
if (isFlask(instance)) ActiveFlask
else if (isChemist(instance)) ActiveChemist
else ActiveTarget
}
}
}
|
neigor/funnel
|
chemist-aws/src/main/scala/DefaultClassifier.scala
|
Scala
|
apache-2.0
| 2,221
|
package no.nr.edvard.osiris.model
import no.nr.edvard.osiris.util.multireduce.Reducer
import util.Random
import collection.{immutable, mutable}
class MethodCorpus(
private val store: Set[JavaMethod] = Set[JavaMethod]()
) extends Traversable[JavaMethod] {
def +(jm: JavaMethod) = new MethodCorpus(store + jm)
def -(jm: JavaMethod) = new MethodCorpus(store - jm)
def ++(other: MethodCorpus) = new MethodCorpus(store ++ other.store)
def --(other: MethodCorpus) = new MethodCorpus(store -- other.store)
def &(other: MethodCorpus) = new MethodCorpus(store & other.store)
def |(other: MethodCorpus) = new MethodCorpus(store | other.store)
override def foreach[U](f: (JavaMethod) => U) = store.foreach(f)
def contains(jm: JavaMethod) = store.contains(jm)
override def filter(pred: JavaMethod => Boolean): MethodCorpus =
new MethodCorpus(store.filter(pred))
def concreteSubset: MethodCorpus =
new MethodCorpus(Set(filter(_.isConcrete).toList: _*))
def inclusionFactor(other: MethodCorpus): Double = {
require(other != null)
val bothAreEmptySet = store.isEmpty && other.store.isEmpty
require(!store.isEmpty || bothAreEmptySet)
if (bothAreEmptySet)
1.0
else
(store & other.store).size / store.size.toDouble
}
def randomSubset(num: Int, random: Random) = {
require(random != null)
val sample = random.shuffle(store.toList).take(num)
new MethodCorpus(Set[JavaMethod](sample: _*))
}
override def equals(that: Any) =
that != null && that.isInstanceOf[MethodCorpus] &&
that.asInstanceOf[MethodCorpus].store == store
override def hashCode() = store.hashCode
}
object MethodCorpus {
def apply(elems: JavaMethod*) =
new MethodCorpus(Set[JavaMethod](elems: _*))
def createStatefulReducer(predicate: JavaMethod => Boolean):
Reducer[JavaMethod, MethodCorpus] = {
var accum = mutable.ArrayBuffer[JavaMethod]()
return new Reducer[JavaMethod, MethodCorpus] {
override def apply(elem: JavaMethod) = {
if (predicate(elem))
accum += elem
this
}
override def res = new MethodCorpus(new immutable.HashSet() ++ accum)
}
}
}
|
edwkar/edwbsc
|
projects/Osiris/src/main/scala/no/nr/edvard/osiris/model/MethodCorpus.scala
|
Scala
|
gpl-2.0
| 2,184
|
/*
* Copyright (C) 2011 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.tool.lock
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.locks.{ Lock, ReentrantLock }
import scala.collection.mutable
object LockRepository {
def apply[T]() = new LockRepository[T]()
}
// FIXME potential race condition: map update and lock.lock() can be split in two transactions
class LockRepository[T] {
val locks = new mutable.HashMap[T, (ReentrantLock, AtomicInteger)]
def nbLocked(k: T) = locks.synchronized(locks.get(k).map { case (_, users) ⇒ users.get }.getOrElse(0))
def lock(obj: T) = locks.synchronized {
val (lock, users) = locks.getOrElseUpdate(obj, (new ReentrantLock, new AtomicInteger(0)))
users.incrementAndGet
lock
}.lock()
def unlock(obj: T) = locks.synchronized {
locks.get(obj) match {
case Some((lock, users)) ⇒
val value = users.decrementAndGet
if (value <= 0) locks.remove(obj)
lock
case None ⇒ throw new IllegalArgumentException("Unlocking an object that has not been locked.")
}
}.unlock()
def withLock[A](obj: T)(op: ⇒ A) = {
lock(obj)
try op
finally unlock(obj)
}
}
|
openmole/openmole
|
openmole/third-parties/org.openmole.tool.lock/src/main/scala/org/openmole/tool/lock/LockRepository.scala
|
Scala
|
agpl-3.0
| 1,866
|
package com.atomist.rug.test.gherkin.handler.event
import com.atomist.source.StringFileArtifact
object EventHandlerTestTargets {
val Feature1 =
"""
|Feature: Australian political history
| This is a test
| to demonstrate that the Gherkin DSL
| is a good fit for Rug BDD testing
|
|Scenario: Australian politics, 1972-1991
| Given a sleepy country
| When a visionary leader enters
| Then excitement ensues
""".stripMargin
val Feature1File = StringFileArtifact(
".atomist/tests/handlers/event/Feature1.feature",
Feature1
)
val Feature2 =
"""
|Feature: World should return messages not just plans
|
|Scenario: Let me return a message for you
| When i call you
| Then you call me
| Then to greet me
""".stripMargin
val Feature2File = StringFileArtifact(
".atomist/tests/handlers/event/Feature2.feature",
Feature2
)
}
|
atomist/rug
|
src/test/scala/com/atomist/rug/test/gherkin/handler/event/EventHandlerTestTargets.scala
|
Scala
|
gpl-3.0
| 954
|
/*
* Code Pulse: A real-time code coverage testing tool. For more information
* see http://code-pulse.com
*
* Copyright (C) 2014 Applied Visions - http://securedecisions.avi.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.secdec.codepulse.data
import scala.util.parsing.combinator.RegexParsers
import MethodTypeParam._
/** A parser that understands MethodSignatures when they are in a form like:
*
* {{{
* org/eclipse/equinox/launcher/Main.main;9;([Ljava/lang/String;)V
* }}}
*
* This signature represents a MethodSignature where:
*
* {{{
* name = "main"
* className = "org.eclipse.equinox.launcher.Main"
* modifiers = 9
* params = List(ArrayType(ReferenceType("java.lang.String")))
* returnType = PrimitiveType("Void")
* }}}
*/
object MethodSignatureParser extends RegexParsers {
/** Parser that accepts java identifier strings */
def identifierPart = rep1 { acceptIf(Character.isJavaIdentifierPart _)(c => "Unexpected: " + c) } ^^ (_.mkString)
/** A series of java identifiers, separated by "/" */
def classIdentifier: Parser[String] = identifierPart ~ rep("/" ~ identifierPart) ^^ {
case head ~ tail => tail.foldLeft(head) { case (accum, a ~ b) => accum + "." + b }
}
/** Either a single identifierPart, or "<init>" or "<clinit>" */
def methodName: Parser[String] = identifierPart | "<init>" | "<clinit>"
/** Parsers a series of digits as an integer */
def number: Parser[Int] = rep1(elem("digit", _.isDigit)) ^^ (_.mkString.toInt)
/** Parses a type parameter signature, as specified by the JVM. For details,
* see http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.3
*/
def typeParam: Parser[MethodTypeParam] =
"B" ^^^ Primitive("Byte") |
"C" ^^^ Primitive("Char") |
"D" ^^^ Primitive("Double") |
"F" ^^^ Primitive("Float") |
"I" ^^^ Primitive("Int") |
"J" ^^^ Primitive("Long") |
"S" ^^^ Primitive("Short") |
"Z" ^^^ Primitive("Boolean") |
"[" ~> typeParam ^^ { ArrayType(_) } |
"L" ~> classIdentifier <~ ";" ^^ { ReferenceType(_) }
/** Parses a series of type parameters, surrounded by parenthesis.
*/
def typeParamList: Parser[List[MethodTypeParam]] = "(" ~> typeParam.* <~ ")"
/** Parses a return type signature, which is any of the signatures
* from `typeParam`, with the addition of Void as a valid type.
*/
def returnType = typeParam | "V" ^^^ Primitive("Void")
/** Accepts a full method signature in the form of
* classIdentifier.methodName;modifiers;(paramsList)returnType
*/
def methodSignature(file: Option[String]) = {
classIdentifier ~
("." ~> methodName <~ ";") ~
(number <~ ";") ~
typeParamList ~
returnType ^^
{
case clazz ~ name ~ flags ~ params ~ returnType =>
MethodSignature(name, clazz, file, flags, params, returnType)
}
}
/** Parses an entire MethodSignature, returning the result as an Option
* (instead of the `ParseResult` class).
*/
def parseSignature(signatureString: String, file: Option[String]): Option[MethodSignature] = {
parse(methodSignature(file), signatureString) match {
case Success(sig, _) => Some(sig)
case _ => None
}
}
}
|
secdec/codepulse
|
codepulse/src/main/scala/com/secdec/codepulse/data/MethodSignatureParser.scala
|
Scala
|
apache-2.0
| 3,679
|
package uk.gov.gds.ier.transaction.overseas.dateLeftSpecial
import play.api.data.Forms._
import uk.gov.gds.ier.validation.{ErrorTransformForm, ErrorMessages, FormKeys}
import uk.gov.gds.ier.model.{DateLeft}
import uk.gov.gds.ier.validation.constraints.overseas.DateLeftSpecialConstraints
import uk.gov.gds.ier.model.LastRegisteredType
import uk.gov.gds.ier.model.DateLeftSpecial
import uk.gov.gds.ier.transaction.overseas.InprogressOverseas
trait DateLeftSpecialForms extends DateLeftSpecialConstraints {
self: FormKeys
with ErrorMessages =>
def dateLeftSpecialMapping = mapping(
keys.month.key -> text
.verifying("Please enter the month when you left", _.nonEmpty)
.verifying("The month you provided is invalid", month => month.isEmpty || month.matches("^(0?[1-9]|1[012])$")),
keys.year.key -> text
.verifying("Please enter the year when you left", _.nonEmpty)
.verifying("The year you provided is invalid", day => day.isEmpty || day.matches("\\\\d+"))
) {
(month, year) => DateLeft(year.toInt, month.toInt)
} {
dateLeftSpecial =>
Some(
dateLeftSpecial.month.toString,
dateLeftSpecial.year.toString
)
}
def dateLeftSpecialTypeMapping =
dateLeftSpecialMapping.transform[DateLeftSpecial](
date => DateLeftSpecial(date),
dateLeftSpecial => dateLeftSpecial.date
)
def dateLeftSpecialForm = ErrorTransformForm(
mapping (
keys.dateLeftSpecial.key -> optional(dateLeftSpecialTypeMapping)
) (
dateLeftSpecial => InprogressOverseas(dateLeftSpecial = dateLeftSpecial)
) (
inprogress => Some(inprogress.dateLeftSpecial)
).verifying (validateDateLeftSpecial)
)
}
|
alphagov/ier-frontend
|
app/uk/gov/gds/ier/transaction/overseas/dateLeftSpecial/DateLeftSpecialForms.scala
|
Scala
|
mit
| 1,696
|
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail
package batches
import scala.collection.mutable.ArrayBuilder
import scala.reflect.ClassTag
/** [[Batch]] implementation that wraps
* an array, based on [[ArrayCursor]].
*/
final class ArrayBatch[@specialized(Boolean, Byte, Char, Int, Long, Double) A]
(ref: Array[A], offset: Int, length: Int, newBuilder: () => ArrayBuilder[A])
extends Batch[A] {
def this(ref: Array[A], offset: Int, length: Int)(implicit tag: ClassTag[A]) =
this(ref, offset, length, () => ArrayBuilder.make[A]())
override def cursor(): ArrayCursor[A] =
new ArrayCursor[A](ref, offset, length, newBuilder)
override def take(n: Int): ArrayBatch[A] = {
val ref = cursor().take(n)
new ArrayBatch(ref.array, ref.offset, ref.length, newBuilder)
}
override def drop(n: Int): ArrayBatch[A] = {
val ref = cursor().drop(n)
new ArrayBatch(ref.array, ref.offset, ref.length, newBuilder)
}
override def slice(from: Int, until: Int): ArrayBatch[A] = {
val ref = cursor().slice(from, until)
new ArrayBatch(ref.array, ref.offset, ref.length, newBuilder)
}
override def map[B](f: (A) => B): ArrayBatch[B] = {
val ref = cursor().map(f)
Batch.fromArray(ref.array, 0, ref.length)
}
override def filter(p: (A) => Boolean): ArrayBatch[A] = {
val ref = cursor().filter(p)
new ArrayBatch(ref.array, ref.offset, ref.length, newBuilder)
}
override def collect[B](pf: PartialFunction[A, B]): ArrayBatch[B] = {
val ref = cursor().collect(pf)
Batch.fromArray(ref.array, 0, ref.length)
}
override def foldLeft[R](initial: R)(op: (R, A) => R): R =
cursor().foldLeft(initial)(op)
}
|
Wogan/monix
|
monix-tail/shared/src/main/scala/monix/tail/batches/ArrayBatch.scala
|
Scala
|
apache-2.0
| 2,321
|
/*
* Derived from https://github.com/spray/spray/blob/v1.1-M7/spray-http/src/main/scala/spray/http/parser/AuthorizationHeader.scala
*
* Copyright (C) 2011-2012 spray.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
package parser
import cats.data.NonEmptyList
import org.http4s.headers.Authorization
import org.http4s.internal.parboiled2.{ParserInput, Rule0, Rule1}
import org.http4s.syntax.string._
private[parser] trait AuthorizationHeader {
def AUTHORIZATION(value: String): ParseResult[`Authorization`] =
new AuthorizationParser(value).parse
// scalastyle:off public.methods.have.type
private class AuthorizationParser(input: ParserInput)
extends Http4sHeaderParser[Authorization](input) {
def entry: Rule1[Authorization] = rule {
CredentialDef ~ EOI ~> { creds: Credentials =>
Authorization(creds)
}
}
def CredentialDef = rule {
AuthParamsCredentialsDef |
TokenCredentialsDef
}
def TokenCredentialsDef = rule {
Token ~ LWS ~ token68 ~> { (scheme: String, value: String) =>
Credentials.Token(scheme.ci, value)
}
}
def AuthParamsCredentialsDef = rule {
Token ~ OptWS ~ CredentialParams ~> {
(scheme: String, params: NonEmptyList[(String, String)]) =>
Credentials.AuthParams(scheme.ci, params)
}
}
def CredentialParams: Rule1[NonEmptyList[(String, String)]] = rule {
oneOrMore(AuthParam).separatedBy(ListSep) ~> { params: collection.Seq[(String, String)] =>
NonEmptyList(params.head, params.tail.toList)
}
}
def AuthParam: Rule1[(String, String)] = rule {
Token ~ "=" ~ (Token | QuotedString) ~> { (s1: String, s2: String) =>
(s1, s2)
}
}
def Base64Char: Rule0 = rule { Alpha | Digit | '+' | '/' | '=' }
// https://tools.ietf.org/html/rfc6750#page-5
def b64token: Rule1[String] = rule {
capture(oneOrMore(Alpha | Digit | anyOf("-._~+/")) ~ zeroOrMore('='))
}
def token68: Rule1[String] = b64token
}
// scalastyle:on public.methods.have.type
}
|
ChristopherDavenport/http4s
|
core/src/main/scala/org/http4s/parser/AuthorizationHeader.scala
|
Scala
|
apache-2.0
| 2,608
|
package zzb.srvdemo.entites
import org.squeryl.KeyedEntity
import java.sql.Timestamp
import spray.json._
/**
* Created with IntelliJ IDEA.
* User: Simon Xiao
* Date: 13-8-20
* Time: 下午4:58
* Copyright baoxian.com 2012~2020
*/
class BaseEntity extends KeyedEntity[Long] {
val id:Long = 0
var lastModified = new Timestamp(System.currentTimeMillis)
}
case class User(var email: String, var password: String) extends BaseEntity {
def this() = this("", "")
}
case class ChangeCount(count:Int)
class Company (var name: String, var address: String) extends BaseEntity {
def this() = this("", "")
}
object ChangeCount extends DefaultJsonProtocol {
implicit val format = jsonFormat1(ChangeCount.apply)
}
/*{"email":"user1@demo.com","password":"p1","id":1,"lastModified":"2013-09-17 17:45:41.644"}*/
object User extends DefaultJsonProtocol {
implicit val userJsonFormat =new RootJsonFormat[User] {
def write(c: User) =
JsObject(Map("email"->JsString(c.email),"password"->JsString(c.password), "id"->JsNumber(c.id), "lastModified"->JsString(c.lastModified.toString)))
def read(value: JsValue) = value match {
case JsObject(fields) =>
new User(
fields("email") match {
case JsString(email)=>email
case _ => deserializationError("String expected")
},
fields("password") match {
case JsString(password)=>password
case _ => deserializationError("String expected")
}
)
case _ => deserializationError("User expected")
}
}
}
|
xiefeifeihu/zzb
|
examples/srvbox-demoService/src/main/scala/zzb/srvdemo/entites/Entites.scala
|
Scala
|
mit
| 1,581
|
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package noop.model
/**
* @author tocman@gmail.com (Jeremie Lenfant-Engelmann)
*/
class ConditionalOrExpression(val lhs: Expression, val rhs: Expression) extends Expression {
override def accept(visitor: Visitor) = {
lhs.accept(visitor);
visitor.visit(this);
rhs.accept(visitor);
visitor.visit(this);
}
}
|
SeaUrchinBot/noop
|
core/src/main/scala/noop/model/ConditionalOrExpression.scala
|
Scala
|
apache-2.0
| 924
|
// Copyright 2017 EPFL DATA Lab (data.epfl.ch)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package squid
package feature
class Antiquotation extends MyFunSuite {
import TestDSL.Predef._
import TestDSL.Quasicodes._
val n = code"42"
test("Term Unquote") {
eqt(code"$n.toDouble + $n : Double", code"42.toDouble + 42")
}
test("Type Unquote") {
val t = TestDSL.`internal CodeType`[Int](typeRepOf[Int])
code"(42,43)".erase match {
case code"($x: $$t, $y:$s)" =>
subt(x.trep, typeRepOf[Int])
subt(x.trep, s.rep)
}
}
test("Escaped Term Unquote in Expressions") {
val n = code"readInt"
val c0 = code"$$n + 1"
c0 eqt code"readInt+1"
code"$$(identity(n)) + $$(Const(1))" eqt c0
code"""$$(identity(code"readInt")) + 1""" eqt c0
}
test("Escaped Term Unquote in Patterns") {
code"(42,42)".erase match {
case code"($$n, $m: Int)" => eqt(m, n)
}
code"println(42)" match {
case code"println($$n)" =>
}
assertDoesNotCompile(""" code"$$" """) // scp.quasi.EmbeddingException: Empty escaped unquote name: '$$'
}
test("Escaped Type Unquote in Expressions") {
val t = codeTypeOf[Int]
code" Option.empty[ $t]" eqt
code"Option.empty[$$t]"
}
test("Escaped Type Unquote in Patterns") {
eqt(code"Option.empty[String]", code"scala.Option.empty[String]")
code"scala.Option.empty[Int]".erase match {
case code"scala.Option.empty[$t]" =>
eqt(t.rep, typeRepOf[Int])
code"Option.empty[String]" matches {
case code"scala.Option.empty[$$t]" => fail
case code"scala.Option.empty[$t]" => eqt(t.rep, typeRepOf[String])
}
code"Option.empty[Int]" matches {
case code"scala.Option.empty[$$t]" =>
}
}
}
test("Alternative Unquote Syntax") { // Note: probably never useful; rm syntax?
val (x,y) = (code"1", code"2")
assertDoesNotCompile(""" code"println($$(x,y))" """) // Error:(68, 5) Quasiquote Error: Vararg splice unexpected in that position: $(x, y)
assertDoesNotCompile("""
code"println(1,2)" match {
case code"println($$(x,y))" =>
}
""") // Error:(64, 12) Quasiquote Error: Vararg splice unexpected in that position: $(x, y)
eqt(code{List(${Seq(x,y):_*})}, code"List(1,2)")
eqt(code"List($$(x,y))", code"List(1,2)")
val seq = Seq(x,y)
assertDoesNotCompile(""" code{println($(seq:_*))} """) // Error:(87, 7) Quasiquote Error: Vararg splice unexpected in that position: ((seq): _*)
assertDoesNotCompile(""" code"println(${seq:_*})" """) // Error:(87, 7) Quasiquote Error: Vararg splice unexpected in that position: ((seq): _*)
eqt(code"(?x:Int)+$x", code"(?x:Int)+1")
assert(code"(?x:Int, ?y:Int)".rep.extractRep(code"(1,2)".rep).get._1 === Map("x" -> code"1".rep, "y" -> code"2".rep))
assert(code"( $x, $y )".rep.extractRep(code"(1,2)".rep).get._1 === Map())
code"List(1,2)" match { case code"List($$(x,y))" => }
val p = code"println(1,2)"
assertDoesNotCompile(""" p match { case code"println($$(x,y))" => } """) // Error:(147, 12) Quasicode Error: Vararg splice unexpected in that position: $(x, y)
assertDoesNotCompile(""" p match { case code"println($$(seq:_*))" => } """)
p match { case code"println($$x,$$y)" => }
var count = 0
code"$${count += 1; code{42}}" matches {
case code"$${count += 1; x}" => fail
case code"$${count += 1; n}" =>
} and {
case code"$${count += 1; code{42}}" =>
}
assertDoesNotCompile(""" (??? : Code[Int,_]) match { case code"$${count += 1; $m}" => } """) // Error:(159, 36) Quasiquote Error: Illegal hole position for: $m
assert(count == 4)
}
}
|
epfldata/squid
|
src/test/scala/squid/feature/Antiquotation.scala
|
Scala
|
apache-2.0
| 4,418
|
package com.atomist.project.common.template
object MustacheSamples {
val First =
"""Hello {{name}}
|You have just won {{value}} dollars!
|{{#in_ca}}
|Well, {{taxed_value}} dollars, after taxes.{{/in_ca}}""".stripMargin
val FirstContext = MergeContext(Map(
"name" -> "Chris",
"value" -> "10000",
"taxed_value" -> (10000 - (10000 * 0.4)),
"in_ca" -> "true"
))
// TODO should be 6000.0 in samples
val FirstExpected =
"""Hello Chris
|You have just won 10000 dollars!
|Well, 6000.0 dollars, after taxes.""".stripMargin
}
|
atomist/rug
|
src/test/scala/com/atomist/project/common/template/MustacheSamples.scala
|
Scala
|
gpl-3.0
| 585
|
/*
* Copyright 2015 eleflow.com.br.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.sparkts.models
import org.apache.spark.mllib.linalg.Vector
/**
* Created by dirceu on 08/06/16.
*/
class UberArimaModel(override val p: scala.Int,
override val d: scala.Int,
override val q: scala.Int,
override val coefficients: scala.Array[scala.Double],
override val hasIntercept: scala.Boolean = true)
extends ARIMAModel(p, q, d, coefficients, hasIntercept) {
lazy val params =
Map("ArimaP" -> p.toString, "ArimaD" -> d.toString, "ArimaQ" -> q.toString)
}
object UberArimaModel {
def fitModel(p: Int,
d: Int,
q: Int,
ts: Vector,
includeIntercept: Boolean = true,
method: String = "css-cgd",
userInitParams: Array[Double] = null): UberArimaModel = {
val model =
ARIMA.fitModel(p, d, q, ts, includeIntercept, method, userInitParams)
new UberArimaModel(p, d, q, model.coefficients, model.hasIntercept)
}
}
|
eleflow/uberdata
|
iuberdata_core/src/main/scala/com/cloudera/sparkts/models/UberArimaModel.scala
|
Scala
|
apache-2.0
| 1,632
|
package com.omega.repository
import java.util.{ List => JList }
import com.omega.domain.Book
import com.omega.util.BeanLifeCycle
import javax.persistence.EntityManager
import javax.persistence.PersistenceContext
class BookDaoJpaImpl extends BookDao with BeanLifeCycle {
@PersistenceContext(unitName = "OmegaUnit1")
private var entityManager: EntityManager = _
override def save(book: Book): Book = {
entityManager.persist(book)
book
}
override def getBooks: JList[Book] = {
// entityManager.createNativeQuery("INSERT INTO BOOK(NAME) VALUES('Book N')").executeUpdate()
val books = entityManager.createQuery("SELECT b FROM Book b").getResultList.asInstanceOf[JList[Book]]
books
}
}
|
milind-chawla/Omega
|
src/main/scala/com/omega/repository/BookDaoJpaImpl.scala
|
Scala
|
mit
| 763
|
package scala.quoted
package runtime.impl.printers
import scala.annotation.switch
/** Printer for fully elaborated representation of the source code */
object SourceCode {
def showTree(using Quotes)(tree: quotes.reflect.Tree)(syntaxHighlight: SyntaxHighlight, fullNames: Boolean): String =
new SourceCodePrinter[quotes.type](syntaxHighlight, fullNames).printTree(tree).result()
def showType(using Quotes)(tpe: quotes.reflect.TypeRepr)(syntaxHighlight: SyntaxHighlight, fullNames: Boolean): String =
new SourceCodePrinter[quotes.type](syntaxHighlight, fullNames).printType(tpe)(using None).result()
def showConstant(using Quotes)(const: quotes.reflect.Constant)(syntaxHighlight: SyntaxHighlight, fullNames: Boolean): String =
new SourceCodePrinter[quotes.type](syntaxHighlight, fullNames).printConstant(const).result()
def showSymbol(using Quotes)(symbol: quotes.reflect.Symbol)(syntaxHighlight: SyntaxHighlight): String =
symbol.fullName
def showFlags(using Quotes)(flags: quotes.reflect.Flags)(syntaxHighlight: SyntaxHighlight): String = {
import quotes.reflect._
val flagList = List.newBuilder[String]
if (flags.is(Flags.Abstract)) flagList += "abstract"
if (flags.is(Flags.Artifact)) flagList += "artifact"
if (flags.is(Flags.Case)) flagList += "case"
if (flags.is(Flags.CaseAccessor)) flagList += "caseAccessor"
if (flags.is(Flags.Contravariant)) flagList += "contravariant"
if (flags.is(Flags.Covariant)) flagList += "covariant"
if (flags.is(Flags.Deferred)) flagList += "deferred"
if (flags.is(Flags.Enum)) flagList += "enum"
if (flags.is(Flags.Erased)) flagList += "erased"
if (flags.is(Flags.Exported)) flagList += "exported"
if (flags.is(Flags.ExtensionMethod)) flagList += "extension"
if (flags.is(Flags.FieldAccessor)) flagList += "accessor"
if (flags.is(Flags.Final)) flagList += "final"
if (flags.is(Flags.HasDefault)) flagList += "hasDefault"
if (flags.is(Flags.Implicit)) flagList += "implicit"
if (flags.is(Flags.Infix)) flagList += "infix"
if (flags.is(Flags.Inline)) flagList += "inline"
if (flags.is(Flags.JavaDefined)) flagList += "javaDefined"
if (flags.is(Flags.JavaStatic)) flagList += "static"
if (flags.is(Flags.Lazy)) flagList += "lazy"
if (flags.is(Flags.Local)) flagList += "local"
if (flags.is(Flags.Macro)) flagList += "macro"
if (flags.is(Flags.Method)) flagList += "method"
if (flags.is(Flags.Module)) flagList += "object"
if (flags.is(Flags.Mutable)) flagList += "mutable"
if (flags.is(Flags.NoInits)) flagList += "noInits"
if (flags.is(Flags.Override)) flagList += "override"
if (flags.is(Flags.Package)) flagList += "package"
if (flags.is(Flags.Param)) flagList += "param"
if (flags.is(Flags.ParamAccessor)) flagList += "paramAccessor"
if (flags.is(Flags.Private)) flagList += "private"
if (flags.is(Flags.PrivateLocal)) flagList += "private[this]"
if (flags.is(Flags.Protected)) flagList += "protected"
if (flags.is(Flags.Scala2x)) flagList += "scala2x"
if (flags.is(Flags.Sealed)) flagList += "sealed"
if (flags.is(Flags.StableRealizable)) flagList += "stableRealizable"
if (flags.is(Flags.Static)) flagList += "javaStatic"
if (flags.is(Flags.Synthetic)) flagList += "synthetic"
if (flags.is(Flags.Trait)) flagList += "trait"
if (flags.is(Flags.Transparent)) flagList += "transparent"
flagList.result().mkString("/*", " ", "*/")
}
private class SourceCodePrinter[Q <: Quotes & Singleton](syntaxHighlight: SyntaxHighlight, fullNames: Boolean)(using val quotes: Q) {
import syntaxHighlight._
import quotes.reflect._
private[this] val sb: StringBuilder = new StringBuilder
private[this] var indent: Int = 0
private def indented(printIndented: => Unit): Unit = {
indent += 1
printIndented
indent -= 1
}
private def inParens(body: => Unit): this.type = {
this += "("
body
this += ")"
}
private def inSquare(body: => Unit): this.type = {
this += "["
body
this += "]"
}
private def inBlock(body: => Unit): this.type = {
this += " {"
indented {
this += lineBreak()
body
}
this += lineBreak() += "}"
}
def result(): String = sb.result()
private def lineBreak(): String = "\\n" + (" " * indent)
private def doubleLineBreak(): String = "\\n\\n" + (" " * indent)
def printTree(tree: Tree)(using elideThis: Option[Symbol] = None): this.type = tree match {
case PackageObject(body)=>
printTree(body) // Print package object
case PackageClause(Ident(name), (inner @ PackageClause(_, _)) :: Nil) if name != "<empty>" && PackageObject.unapply(inner).isEmpty =>
// print inner package as `package outer.inner { ... }`
printTree(inner)
case tree @ PackageClause(name, stats) =>
val stats1 = stats.collect {
case stat: PackageClause => stat
case stat: Definition if !(stat.symbol.flags.is(Flags.Module) && stat.symbol.flags.is(Flags.Lazy)) => stat
case stat @ (_:Import | _:Export) => stat
}
name match {
case Ident("<empty>") =>
printTrees(stats1, lineBreak())
case _ =>
this += "package "
printType(name.tpe)
inBlock(printTrees(stats1, lineBreak()))
}
case Import(expr, selectors) =>
this += "import "
printTree(expr)
this += "."
printSelectors(selectors)
case Export(expr, selectors) =>
this += "export "
printTree(expr)
this += "."
printSelectors(selectors)
case cdef @ ClassDef(name, DefDef(_, paramss, _, _), parents, self, stats) =>
printDefAnnotations(cdef)
val flags = cdef.symbol.flags
if (flags.is(Flags.Implicit)) this += highlightKeyword("implicit ")
if (flags.is(Flags.Sealed)) this += highlightKeyword("sealed ")
if (flags.is(Flags.Final) && !flags.is(Flags.Module)) this += highlightKeyword("final ")
if (flags.is(Flags.Case)) this += highlightKeyword("case ")
if (name == "package$") {
this += highlightKeyword("package object ") += highlightTypeDef(cdef.symbol.owner.name.stripSuffix("$"))
}
else if (flags.is(Flags.Module)) this += highlightKeyword("object ") += highlightTypeDef(name.stripSuffix("$"))
else if (flags.is(Flags.Trait)) this += highlightKeyword("trait ") += highlightTypeDef(name)
else if (flags.is(Flags.Abstract)) this += highlightKeyword("abstract class ") += highlightTypeDef(name)
else this += highlightKeyword("class ") += highlightTypeDef(name)
if (!flags.is(Flags.Module)) {
for paramClause <- paramss do
paramClause match
case TermParamClause(params) =>
printArgsDefs(params)
case TypeParamClause(params) =>
printTargsDefs(stats.collect { case targ: TypeDef => targ }.filter(_.symbol.isTypeParam).zip(params))
}
val parents1 = parents.filter {
case Apply(Select(New(tpt), _), _) => tpt.tpe.typeSymbol != Symbol.requiredClass("java.lang.Object")
case TypeSelect(Select(Ident("_root_"), "scala"), "Product") => false
case TypeSelect(Select(Ident("_root_"), "scala"), "Serializable") => false
case _ => true
}
if (parents1.nonEmpty)
this += highlightKeyword(" extends ")
def printParent(parent: Tree /* Term | TypeTree */, needEmptyParens: Boolean = false): Unit = parent match {
case parent: TypeTree =>
printTypeTree(parent)(using Some(cdef.symbol))
case TypeApply(fun, targs) =>
printParent(fun)
case Apply(fun@Apply(_,_), args) =>
printParent(fun, true)
if (!args.isEmpty || needEmptyParens)
inParens(printTrees(args, ", ")(using Some(cdef.symbol)))
case Apply(fun, args) =>
printParent(fun)
if (!args.isEmpty || needEmptyParens)
inParens(printTrees(args, ", ")(using Some(cdef.symbol)))
case Select(newTree: New, _) =>
printType(newTree.tpe)(using Some(cdef.symbol))
case parent: Term =>
throw new MatchError(parent.show(using Printer.TreeStructure))
}
def printSeparated(list: List[Tree /* Term | TypeTree */]): Unit = list match {
case Nil =>
case x :: Nil => printParent(x)
case x :: xs =>
printParent(x)
this += highlightKeyword(" with ")
printSeparated(xs)
}
printSeparated(parents1)
def keepDefinition(d: Definition): Boolean = {
val flags = d.symbol.flags
def isUndecompilableCaseClassMethod: Boolean = {
// Currently the compiler does not allow overriding some of the methods generated for case classes
d.symbol.flags.is(Flags.Synthetic) &&
(d match {
case DefDef("apply" | "unapply" | "writeReplace", _, _, _) if d.symbol.owner.flags.is(Flags.Module) => true
case DefDef(n, _, _, _) if d.symbol.owner.flags.is(Flags.Case) =>
n == "copy" ||
n.matches("copy\\\\$default\\\\$[1-9][0-9]*") || // default parameters for the copy method
n.matches("_[1-9][0-9]*") || // Getters from Product
n == "productElementName"
case _ => false
})
}
def isInnerModuleObject = d.symbol.flags.is(Flags.Lazy) && d.symbol.flags.is(Flags.Module)
!flags.is(Flags.Param) && !flags.is(Flags.ParamAccessor) && !flags.is(Flags.FieldAccessor) && !isUndecompilableCaseClassMethod && !isInnerModuleObject
}
val stats1 = stats.collect {
case stat: Definition if keepDefinition(stat) => stat
case stat @ (_:Import | _:Export) => stat
case stat: Term => stat
}
def printBody(printSelf: Boolean) = {
this += " {"
indented {
if (printSelf) {
val Some(ValDef(name, tpt, _)) = self
indented {
val name1 = if (name == "_") "this" else name
this += " " += highlightValDef(name1) += ": "
printTypeTree(tpt)(using Some(cdef.symbol))
this += " =>"
}
}
this += lineBreak()
printTrees(stats1, lineBreak())
}
this += lineBreak() += "}"
}
self match {
case Some(ValDef(_, Singleton(_), _)) =>
if (stats1.nonEmpty)
printBody(printSelf = false)
case Some(ValDef(_, _, _)) =>
printBody(printSelf = true)
case _ =>
if (stats1.nonEmpty)
printBody(printSelf = false)
}
this
case tdef @ TypeDef(name, rhs) =>
printDefAnnotations(tdef)
this += highlightKeyword("type ")
printTargDef((tdef, tdef), isMember = true)
case vdef @ ValDef(name, tpt, rhs) =>
printDefAnnotations(vdef)
val flags = vdef.symbol.flags
if (flags.is(Flags.Implicit)) this += highlightKeyword("implicit ")
if (flags.is(Flags.Override)) this += highlightKeyword("override ")
if (flags.is(Flags.Final) && !flags.is(Flags.Module)) this += highlightKeyword("final ")
printProtectedOrPrivate(vdef)
if (flags.is(Flags.Lazy)) this += highlightKeyword("lazy ")
if (vdef.symbol.flags.is(Flags.Mutable)) this += highlightKeyword("var ")
else this += highlightKeyword("val ")
val name1 = splicedName(vdef.symbol).getOrElse(name)
this += highlightValDef(name1) += ": "
printTypeTree(tpt)
rhs match {
case Some(tree) =>
this += " = "
printTree(tree)
case None =>
this
}
case While(cond, body) =>
(cond, body) match {
case (Block(Block(Nil, body1) :: Nil, Block(Nil, cond1)), Literal(UnitConstant())) =>
this += highlightKeyword("do ")
printTree(body1) += highlightKeyword(" while ")
inParens(printTree(cond1))
case _ =>
this += highlightKeyword("while ")
inParens(printTree(cond)) += " "
printTree(body)
}
case ddef @ DefDef(name, paramss, tpt, rhs) =>
printDefAnnotations(ddef)
val isConstructor = name == "<init>"
val flags = ddef.symbol.flags
if (flags.is(Flags.Implicit)) this += highlightKeyword("implicit ")
if (flags.is(Flags.Inline)) this += highlightKeyword("inline ")
if (flags.is(Flags.Override)) this += highlightKeyword("override ")
if (flags.is(Flags.Final) && !flags.is(Flags.Module)) this += highlightKeyword("final ")
printProtectedOrPrivate(ddef)
val name1: String = if (isConstructor) "this" else splicedName(ddef.symbol).getOrElse(name)
this += highlightKeyword("def ") += highlightValDef(name1)
for clause <- paramss do
clause match
case TermParamClause(params) => printArgsDefs(params)
case TypeParamClause(params) => printTargsDefs(params.zip(params))
if (!isConstructor) {
this += ": "
printTypeTree(tpt)
}
rhs match {
case Some(tree) =>
this += " = "
printTree(tree)
case None =>
}
this
case Wildcard() =>
this += "_"
case tree: Ident =>
splicedName(tree.symbol) match {
case Some(name) => this += highlightTypeDef(name)
case _ => printType(tree.tpe)
}
case Select(qual, name) =>
printQualTree(qual)
if (name != "<init>" && name != "package")
this += "." += name
this
case Literal(const) =>
printConstant(const)
case This(id) =>
id match {
case Some(name) =>
this += name.stripSuffix("$") += "."
case None =>
}
this += "this"
case tree: New =>
this += "new "
printType(tree.tpe)
case NamedArg(name, arg) =>
this += name += " = "
printTree(arg)
case SpecialOp("throw", expr :: Nil) =>
this += "throw "
printTree(expr)
case Apply(fn, args) if fn.symbol == Symbol.requiredMethod("scala.quoted.runtime.quote") =>
args.head match {
case Block(stats, expr) =>
this += "'{"
indented {
this += lineBreak()
printFlatBlock(stats, expr)
}
this += lineBreak() += "}"
case _ =>
this += "'{"
printTree(args.head)
this += "}"
}
case Apply(fn, arg :: Nil) if fn.symbol == Symbol.requiredMethod("scala.quoted.runtime.splice") =>
this += "${"
printTree(arg)
this += "}"
case Apply(fn, args) =>
var argsPrefix = ""
fn match {
case Select(This(_), "<init>") => this += "this" // call to constructor inside a constructor
case Select(qual, "apply") =>
if qual.tpe.isContextFunctionType then
argsPrefix += "using "
if qual.tpe.isErasedFunctionType then
argsPrefix += "erased "
printQualTree(fn)
case _ => printQualTree(fn)
}
val args1 = args match {
case init :+ Typed(Repeated(Nil, _), _) => init // drop empty var args at the end
case _ => args
}
inParens {
this += argsPrefix
printTrees(args1, ", ")
}
case TypeApply(fn, args) =>
printQualTree(fn)
fn match {
case Select(New(Applied(_, _)), "<init>") =>
// type bounds already printed in `fn`
this
case _ =>
inSquare(printTrees(args, ", "))
}
case Super(qual, idOpt) =>
qual match {
case This(Some(name)) => this += name += "."
case This(None) =>
}
this += "super"
for (id <- idOpt)
inSquare(this += id)
this
case Typed(term, tpt) =>
tpt.tpe match {
case Types.Repeated(_) =>
printTree(term)
term match {
case Repeated(_, _) | Inlined(None, Nil, Repeated(_, _)) => this
case _ => this += ": " += highlightTypeDef("_*")
}
case _ =>
inParens {
printTree(term)
this += (if (dotty.tools.dotc.util.Chars.isOperatorPart(sb.last)) " : " else ": ")
def printTypeOrAnnots(tpe: TypeRepr): Unit = tpe match {
case AnnotatedType(tp, annot) if tp == term.tpe =>
printAnnotation(annot)
case AnnotatedType(tp, annot) =>
printTypeOrAnnots(tp)
this += " "
printAnnotation(annot)
case tpe =>
printType(tpe)
}
printTypeOrAnnots(tpt.tpe)
}
}
case Assign(lhs, rhs) =>
printTree(lhs)
this += " = "
printTree(rhs)
case tree @ Lambda(params, body) => // must come before `Block`
inParens {
printArgsDefs(params)
this += (if tree.tpe.isContextFunctionType then " ?=> " else " => ")
printTree(body)
}
case Block(stats0, expr) =>
val stats = stats0.filter {
case tree: ValDef => !tree.symbol.flags.is(Flags.Module)
case _ => true
}
printFlatBlock(stats, expr)
case Inlined(_, bindings, expansion) =>
printFlatBlock(bindings, expansion)
case If(cond, thenp, elsep) =>
this += highlightKeyword("if ")
inParens(printTree(cond))
this += " "
printTree(thenp)
this+= highlightKeyword(" else ")
printTree(elsep)
case Match(selector, cases) =>
printQualTree(selector)
this += highlightKeyword(" match")
inBlock(printCases(cases, lineBreak()))
case SummonFrom(cases) =>
this += highlightKeyword("summonFrom ")
inBlock(printCases(cases, lineBreak()))
case Try(body, cases, finallyOpt) =>
this += highlightKeyword("try ")
printTree(body)
if (cases.nonEmpty) {
this += highlightKeyword(" catch")
inBlock(printCases(cases, lineBreak()))
}
finallyOpt match {
case Some(t) =>
this += highlightKeyword(" finally ")
printTree(t)
case None =>
this
}
case Return(expr, from) =>
this += "return "
printTree(expr)
case Repeated(elems, _) =>
printTrees(elems, ", ")
case TypeBoundsTree(lo, hi) =>
this += "_ >: "
printTypeTree(lo)
this += " <: "
printTypeTree(hi)
case tpt: WildcardTypeTree =>
printType(tpt.tpe)
case tpt: TypeTree =>
printTypeTree(tpt)
case Closure(meth, _) =>
printTree(meth)
case _:Unapply | _:Alternatives | _:Bind =>
printPattern(tree)
case _ =>
throw new MatchError(tree.show(using Printer.TreeStructure))
}
private def printQualTree(tree: Tree): this.type = tree match {
case _: If | _: Match | _: While | _: Try | _: Return =>
this += "("
printTree(tree)
this += ")"
case _ => printTree(tree)
}
private def flatBlock(stats: List[Statement], expr: Term): (List[Statement], Term) = {
val flatStats = List.newBuilder[Statement]
def extractFlatStats(stat: Statement): Unit = stat match {
case Lambda(_, _) => // must come before `Block`
flatStats += stat
case Block(stats1, expr1) =>
val it = stats1.iterator
while (it.hasNext)
extractFlatStats(it.next())
extractFlatStats(expr1)
case Inlined(_, bindings, expansion) =>
val it = bindings.iterator
while (it.hasNext)
extractFlatStats(it.next())
extractFlatStats(expansion)
case Literal(UnitConstant()) => // ignore
case stat => flatStats += stat
}
def extractFlatExpr(term: Term): Term = term match {
case Lambda(_, _) => // must come before `Block`
term
case Block(stats1, expr1) =>
val it = stats1.iterator
while (it.hasNext)
extractFlatStats(it.next())
extractFlatExpr(expr1)
case Inlined(_, bindings, expansion) =>
val it = bindings.iterator
while (it.hasNext)
extractFlatStats(it.next())
extractFlatExpr(expansion)
case term => term
}
val it = stats.iterator
while (it.hasNext)
extractFlatStats(it.next())
val flatExpr = extractFlatExpr(expr)
(flatStats.result(), flatExpr)
}
private def printFlatBlock(stats: List[Statement], expr: Term)(using elideThis: Option[Symbol]): this.type = {
val (stats1, expr1) = flatBlock(stats, expr)
val splicedTypeAnnot = Symbol.requiredClass("scala.quoted.runtime.SplicedType").primaryConstructor
val stats2 = stats1.filter {
case tree: TypeDef => !tree.symbol.hasAnnotation(splicedTypeAnnot)
case _ => true
}
if (stats2.isEmpty) {
printTree(expr1)
} else {
this += "{"
indented {
printStats(stats2, expr1)
}
this += lineBreak() += "}"
}
}
private def printStats(stats: List[Tree], expr: Tree)(using eliseThis: Option[Symbol]): Unit = {
def printSeparator(next: Tree): Unit = {
// Avoid accidental application of opening `{` on next line with a double break
def rec(next: Tree): Unit = next match {
case Lambda(_, _) => this += lineBreak()
case Block(stats, _) if stats.nonEmpty => this += doubleLineBreak()
case Inlined(_, bindings, _) if bindings.nonEmpty => this += doubleLineBreak()
case Select(qual, _) => rec(qual)
case Apply(fn, _) => rec(fn)
case TypeApply(fn, _) => rec(fn)
case Typed(_, _) => this += doubleLineBreak()
case _ => this += lineBreak()
}
next match {
case term: Term =>
flatBlock(Nil, term) match {
case (next :: _, _) => rec(next)
case (Nil, next) => rec(next)
}
case _ => this += lineBreak()
}
}
def printSeparated(list: List[Tree]): Unit = list match {
case Nil =>
printTree(expr)
case x :: xs =>
printTree(x)
printSeparator(if (xs.isEmpty) expr else xs.head)
printSeparated(xs)
}
this += lineBreak()
printSeparated(stats)
}
private def printList[T](xs: List[T], sep: String, print: T => this.type): this.type = {
def printSeparated(list: List[T]): Unit = list match {
case Nil =>
case x :: Nil => print(x)
case x :: xs =>
print(x)
this += sep
printSeparated(xs)
}
printSeparated(xs)
this
}
private def printTrees(trees: List[Tree], sep: String)(using elideThis: Option[Symbol]): this.type =
printList(trees, sep, (t: Tree) => printTree(t))
private def printTypeTrees(trees: List[TypeTree], sep: String)(using elideThis: Option[Symbol] = None): this.type =
printList(trees, sep, (t: TypeTree) => printTypeTree(t))
private def printTypes(trees: List[TypeRepr], sep: String)(using elideThis: Option[Symbol]): this.type = {
def printSeparated(list: List[TypeRepr]): Unit = list match {
case Nil =>
case x :: Nil => printType(x)
case x :: xs =>
printType(x)
this += sep
printSeparated(xs)
}
printSeparated(trees)
this
}
private def printSelectors(selectors: List[Selector]): this.type = {
def printSeparated(list: List[Selector]): Unit = list match {
case Nil =>
case x :: Nil => printSelector(x)
case x :: xs =>
printSelector(x)
this += ", "
printSeparated(xs)
}
this += "{"
printSeparated(selectors)
this += "}"
}
private def printCases(cases: List[CaseDef], sep: String): this.type = {
def printSeparated(list: List[CaseDef]): Unit = list match {
case Nil =>
case x :: Nil => printCaseDef(x)
case x :: xs =>
printCaseDef(x)
this += sep
printSeparated(xs)
}
printSeparated(cases)
this
}
private def printTypeCases(cases: List[TypeCaseDef], sep: String): this.type = {
def printSeparated(list: List[TypeCaseDef]): Unit = list match {
case Nil =>
case x :: Nil => printTypeCaseDef(x)
case x :: xs =>
printTypeCaseDef(x)
this += sep
printSeparated(xs)
}
printSeparated(cases)
this
}
private def printPatterns(cases: List[Tree], sep: String): this.type = {
def printSeparated(list: List[Tree]): Unit = list match {
case Nil =>
case x :: Nil => printPattern(x)
case x :: xs =>
printPattern(x)
this += sep
printSeparated(xs)
}
printSeparated(cases)
this
}
private def printTypesOrBounds(types: List[TypeRepr], sep: String)(using elideThis: Option[Symbol]): this.type = {
def printSeparated(list: List[TypeRepr]): Unit = list match {
case Nil =>
case x :: Nil => printType(x)
case x :: xs =>
printType(x)
this += sep
printSeparated(xs)
}
printSeparated(types)
this
}
private def printTargsDefs(targs: List[(TypeDef, TypeDef)], isDef:Boolean = true)(using elideThis: Option[Symbol]): Unit = {
if (!targs.isEmpty) {
def printSeparated(list: List[(TypeDef, TypeDef)]): Unit = list match {
case Nil =>
case x :: Nil => printTargDef(x, isDef = isDef)
case x :: xs =>
printTargDef(x, isDef = isDef)
this += ", "
printSeparated(xs)
}
inSquare(printSeparated(targs))
}
}
private def printTargDef(arg: (TypeDef, TypeDef), isMember: Boolean = false, isDef:Boolean = true)(using elideThis: Option[Symbol]): this.type = {
val (argDef, argCons) = arg
if (isDef) {
if (argDef.symbol.flags.is(Flags.Covariant)) {
this += highlightValDef("+")
} else if (argDef.symbol.flags.is(Flags.Contravariant)) {
this += highlightValDef("-")
}
}
this += argCons.name
argCons.rhs match {
case rhs: TypeBoundsTree => printBoundsTree(rhs)
case rhs: WildcardTypeTree =>
printType(rhs.tpe)
case rhs @ LambdaTypeTree(tparams, body) =>
def printParam(t: Tree /*TypeTree | TypeBoundsTree*/): Unit = t match {
case t: TypeBoundsTree => printBoundsTree(t)
case t: TypeTree => printTypeTree(t)
}
def printSeparated(list: List[TypeDef]): Unit = list match {
case Nil =>
case x :: Nil =>
this += x.name
printParam(x.rhs)
case x :: xs =>
this += x.name
printParam(x.rhs)
this += ", "
printSeparated(xs)
}
inSquare(printSeparated(tparams))
if (isMember) {
body match {
case MatchTypeTree(Some(bound), _, _) =>
this += " <: "
printTypeTree(bound)
case _ =>
}
this += " = "
printTypeOrBoundsTree(body)
}
else this
case rhs: TypeTree =>
this += " = "
printTypeTree(rhs)
}
}
private def printArgsDefs(args: List[ValDef])(using elideThis: Option[Symbol]): Unit = {
val argFlags = args match {
case Nil => Flags.EmptyFlags
case arg :: _ => arg.symbol.flags
}
if (argFlags.is(Flags.Erased | Flags.Given)) {
if (argFlags.is(Flags.Given)) this += " given"
if (argFlags.is(Flags.Erased)) this += " erased"
this += " "
}
inParens {
if (argFlags.is(Flags.Implicit) && !argFlags.is(Flags.Given)) this += "implicit "
def printSeparated(list: List[ValDef]): Unit = list match {
case Nil =>
case x :: Nil => printParamDef(x)
case x :: xs =>
printParamDef(x)
this += ", "
printSeparated(xs)
}
printSeparated(args)
}
}
private def printAnnotations(trees: List[Term])(using elideThis: Option[Symbol]): this.type = {
def printSeparated(list: List[Term]): Unit = list match {
case Nil =>
case x :: Nil => printAnnotation(x)
case x :: xs =>
printAnnotation(x)
this += " "
printSeparated(xs)
}
printSeparated(trees)
this
}
private def printParamDef(arg: ValDef)(using elideThis: Option[Symbol]): Unit = {
val name = splicedName(arg.symbol).getOrElse(arg.symbol.name)
val sym = arg.symbol.owner
if sym.isDefDef && sym.name == "<init>" then
val ClassDef(_, _, _, _, body) = sym.owner.tree
body.collectFirst {
case vdef @ ValDef(`name`, _, _) if vdef.symbol.flags.is(Flags.ParamAccessor) =>
if (!vdef.symbol.flags.is(Flags.Local)) {
var printedPrefix = false
if (vdef.symbol.flags.is(Flags.Override)) {
this += "override "
printedPrefix = true
}
printedPrefix |= printProtectedOrPrivate(vdef)
if (vdef.symbol.flags.is(Flags.Mutable)) this += highlightValDef("var ")
else if (printedPrefix || !vdef.symbol.flags.is(Flags.CaseAccessor)) this += highlightValDef("val ")
}
}
end if
this += highlightValDef(name) += ": "
printTypeTree(arg.tpt)
}
private def printCaseDef(caseDef: CaseDef): this.type = {
this += highlightValDef("case ")
printPattern(caseDef.pattern)
caseDef.guard match {
case Some(t) =>
this += " if "
printTree(t)
case None =>
}
this += highlightValDef(" =>")
indented {
caseDef.rhs match {
case Block(stats, expr) =>
printStats(stats, expr)(using None)
case body =>
this += lineBreak()
printTree(body)
}
}
this
}
private def printTypeCaseDef(caseDef: TypeCaseDef): this.type = {
this += highlightValDef("case ")
printTypeTree(caseDef.pattern)
this += highlightValDef(" => ")
printTypeTree(caseDef.rhs)
this
}
private def printPattern(pattern: Tree): this.type = pattern match {
case Wildcard() =>
this += "_"
case Bind(name, Wildcard()) =>
this += name
case Bind(name, Typed(Wildcard(), tpt)) =>
this += highlightValDef(name) += ": "
printTypeTree(tpt)
case Bind(name, pattern) =>
this += name += " @ "
printPattern(pattern)
case Unapply(fun, implicits, patterns) =>
val fun2 = fun match {
case TypeApply(fun2, _) => fun2
case _ => fun
}
fun2 match {
case Select(extractor, "unapply" | "unapplySeq") =>
printTree(extractor)
case Ident("unapply" | "unapplySeq") =>
this += fun.symbol.owner.fullName.stripSuffix("$")
case _ =>
throw new MatchError(fun.show(using Printer.TreeStructure))
}
inParens(printPatterns(patterns, ", "))
case Alternatives(trees) =>
inParens(printPatterns(trees, " | "))
case TypedOrTest(tree1, tpt) =>
tree1 match
case Wildcard() =>
this += "_: "
printTypeTree(tpt)
case _ =>
printPattern(tree1)
case v: Term =>
printTree(v)
case _ =>
throw new MatchError(pattern.show(using Printer.TreeStructure))
}
inline private val qc = '\\''
inline private val qSc = '"'
def printConstant(const: Constant): this.type = const match {
case UnitConstant() => this += highlightLiteral("()")
case NullConstant() => this += highlightLiteral("null")
case BooleanConstant(v) => this += highlightLiteral(v.toString)
case ByteConstant(v) => this += highlightLiteral(v.toString)
case ShortConstant(v) => this += highlightLiteral(v.toString)
case IntConstant(v) => this += highlightLiteral(v.toString)
case LongConstant(v) => this += highlightLiteral(v.toString + "L")
case FloatConstant(v) => this += highlightLiteral(v.toString + "f")
case DoubleConstant(v) => this += highlightLiteral(v.toString)
case CharConstant(v) => this += highlightString(s"${qc}${escapedChar(v)}${qc}")
case StringConstant(v) => this += highlightString(s"${qSc}${escapedString(v)}${qSc}")
case ClassOfConstant(v) =>
this += "classOf"
inSquare(printType(v))
}
private def printTypeOrBoundsTree(tpt: Tree)(using elideThis: Option[Symbol] = None): this.type = tpt match {
case TypeBoundsTree(lo, hi) =>
this += "_ >: "
printTypeTree(lo)
this += " <: "
printTypeTree(hi)
case tpt: WildcardTypeTree =>
printType(tpt.tpe)
case tpt: TypeTree =>
printTypeTree(tpt)
}
/** Print type tree
*
* @param elideThis Shoud printing elide `C.this` for the given class `C`?
* None means no eliding.
*
* Self type annotation and types in parent list should elide current class
* prefix `C.this` to avoid type checking errors.
*/
private def printTypeTree(tree: TypeTree)(using elideThis: Option[Symbol] = None): this.type = tree match {
case Inferred() =>
// TODO try to move this logic into `printType`
def printTypeAndAnnots(tpe: TypeRepr): this.type = tpe match {
case AnnotatedType(tp, annot) =>
printTypeAndAnnots(tp)
this += " "
printAnnotation(annot)
case tpe: TypeRef if tpe.typeSymbol == Symbol.requiredClass("scala.runtime.Null$") || tpe.typeSymbol == Symbol.requiredClass("scala.runtime.Nothing$") =>
// scala.runtime.Null$ and scala.runtime.Nothing$ are not modules, those are their actual names
printType(tpe)
case tpe: TermRef if tpe.termSymbol.isClassDef && tpe.termSymbol.name.endsWith("$") =>
printType(tpe)
this += ".type"
case tpe: TypeRef if tpe.typeSymbol.isClassDef && tpe.typeSymbol.name.endsWith("$") =>
printType(tpe)
this += ".type"
case tpe @ TermRef(sym, _) =>
printType(tpe)
this += ".type"
case tpe => printType(tpe)
}
printTypeAndAnnots(tree.tpe)
case TypeIdent(name) =>
printType(tree.tpe)
case TypeSelect(qual, name) =>
printTree(qual) += "." += highlightTypeDef(name)
case TypeProjection(qual, name) =>
printTypeTree(qual) += "#" += highlightTypeDef(name)
case Singleton(ref) =>
printTree(ref)
ref match {
case Literal(_) => this
case _ => this += ".type"
}
case Refined(tpt, refinements) =>
printTypeTree(tpt)
inBlock(printTrees(refinements, "; "))
case Applied(tpt, args) =>
printTypeTree(tpt)
inSquare(printTrees(args, ", "))
case Annotated(tpt, annot) =>
val Annotation(ref, args) = annot
ref.tpe match {
case tpe: TypeRef if tpe.typeSymbol == Symbol.requiredClass("scala.annotation.internal.Repeated") =>
val Types.Sequence(tp) = tpt.tpe
printType(tp)
this += highlightTypeDef("*")
case _ =>
printTypeTree(tpt)
this += " "
printAnnotation(annot)
}
case MatchTypeTree(bound, selector, cases) =>
printTypeTree(selector)
this += highlightKeyword(" match ")
inBlock(printTypeCases(cases, lineBreak()))
case ByName(result) =>
this += highlightTypeDef("=> ")
printTypeTree(result)
case LambdaTypeTree(tparams, body) =>
printTargsDefs(tparams.zip(tparams), isDef = false)
this += highlightTypeDef(" =>> ")
printTypeOrBoundsTree(body)
case TypeBind(name, _) =>
this += highlightTypeDef(name)
case TypeBlock(_, tpt) =>
printTypeTree(tpt)
case _ =>
throw new MatchError(tree.show(using Printer.TreeStructure))
}
/** Print type
*
* @param elideThis Shoud printing elide `C.this` for the given class `C`?
* None means no eliding.
*
* Self type annotation and types in parent list should elide current class
* prefix `C.this` to avoid type checking errors.
*/
def printType(tpe: TypeRepr)(using elideThis: Option[Symbol] = None): this.type = tpe match {
case ConstantType(const) =>
printConstant(const)
case tpe: TypeRef =>
val sym = tpe.typeSymbol
if fullNames then
tpe.qualifier match {
case ThisType(tp) if tp.typeSymbol == defn.RootClass || tp.typeSymbol == defn.EmptyPackageClass =>
case NoPrefix() =>
if (sym.owner.flags.is(Flags.Package)) {
// TODO should these be in the prefix? These are at least `scala`, `java` and `scala.collection`.
val packagePath = sym.owner.fullName.stripPrefix("<root>").stripPrefix("<empty>").stripPrefix(".")
if (packagePath != "")
this += packagePath += "."
}
case prefix: TermRef if prefix.termSymbol.isClassDef =>
printType(prefix)
this += "#"
case prefix: TypeRef if prefix.typeSymbol.isClassDef =>
printType(prefix)
this += "#"
case ThisType(TermRef(cdef, _)) if elideThis.nonEmpty && cdef == elideThis.get =>
case ThisType(TypeRef(cdef, _)) if elideThis.nonEmpty && cdef == elideThis.get =>
case prefix: TypeRepr =>
printType(prefix)
this += "."
}
this += highlightTypeDef(sym.name.stripSuffix("$"))
case TermRef(prefix, name) =>
if fullNames then
prefix match {
case NoPrefix() =>
this += highlightTypeDef(name)
case ThisType(tp) if tp.typeSymbol == defn.RootClass || tp.typeSymbol == defn.EmptyPackageClass =>
this += highlightTypeDef(name)
case _ =>
printType(prefix)
if (name != "package")
this += "." += highlightTypeDef(name)
this
}
else
this += highlightTypeDef(name)
case tpe @ Refinement(_, _, _) =>
printRefinement(tpe)
case AppliedType(tp, args) =>
tp match {
case tp: TypeLambda =>
this += "("
printType(tp)
this += ")"
inSquare(printTypesOrBounds(args, ", "))
case tp: TypeRef if tp.typeSymbol == Symbol.requiredClass("scala.<repeated>") =>
this += "_*"
case _ =>
printType(tp)
inSquare(printTypesOrBounds(args, ", "))
}
case AnnotatedType(tp, annot) =>
val Annotation(ref, args) = annot
printType(tp)
this += " "
printAnnotation(annot)
case AndType(left, right) =>
printType(left)
this += highlightTypeDef(" & ")
printType(right)
case OrType(left, right) =>
printType(left)
this += highlightTypeDef(" | ")
printType(right)
case MatchType(bound, scrutinee, cases) =>
printType(scrutinee)
this += highlightKeyword(" match ")
inBlock(printTypes(cases, lineBreak()))
case ByNameType(tp) =>
this += highlightTypeDef(" => ")
printType(tp)
case ThisType(tp) =>
tp match {
case tp: TypeRef if !tp.typeSymbol.flags.is(Flags.Module) =>
printFullClassName(tp)
this += highlightTypeDef(".this")
case TypeRef(prefix, name) if name.endsWith("$") =>
if (fullNames){
prefix match {
case NoPrefix() =>
case ThisType(tp) if tp.typeSymbol == defn.RootClass || tp.typeSymbol == defn.EmptyPackageClass =>
case _ =>
printType(prefix)
this += "."
}
}
this += highlightTypeDef(name.stripSuffix("$"))
case _ =>
printType(tp)
}
case SuperType(thistpe, supertpe) =>
printType(supertpe)
this += highlightTypeDef(".super")
case TypeLambda(paramNames, tparams, body) =>
inSquare(printMethodicTypeParams(paramNames, tparams))
this += highlightTypeDef(" => ")
printType(body)
case ParamRef(lambda, idx) =>
lambda match {
case MethodType(params, _, _) => this += params(idx)
case PolyType(params, _, _) => this += params(idx)
case TypeLambda(params, _, _) => this += params(idx)
}
case RecursiveType(tpe) =>
printType(tpe)
case RecursiveThis(_) =>
this += highlightTypeDef("this")
case tpe: MethodType =>
this += "("
printList(tpe.paramNames.zip(tpe.paramTypes), ", ",
(x: (String, TypeRepr)) => (this += x._1 += ": ").printType(x._2))
this += ")"
printType(tpe.resType)
case tpe: PolyType =>
this += "["
printList(tpe.paramNames.zip(tpe.paramBounds), ", ",
(x: (String, TypeBounds)) => (this += x._1 += " ").printType(x._2))
this += "]"
printType(tpe.resType)
case tpe@TypeBounds(lo, hi) =>
this += "_ >: "
printType(lo)
this += " <: "
printType(hi)
case MatchCase(pat, rhs) =>
this += "case "
printType(pat)
this += " => "
printType(rhs)
case _ =>
throw new MatchError(tpe.show(using Printer.TypeReprStructure))
}
private def printSelector(sel: Selector): this.type = sel match {
case SimpleSelector(name) => this += name
case OmitSelector(name) => this += name += " => _"
case RenameSelector(name, newName) => this += name += " => " += newName
case GivenSelector(bound) =>
bound match
case Some(tpt) =>
this += "given "
printTree(tpt)
case _ =>
this += "given"
}
private def printDefinitionName(tree: Definition): this.type = tree match {
case ValDef(name, _, _) => this += highlightValDef(name)
case DefDef(name, _, _, _) => this += highlightValDef(name)
case ClassDef(name, _, _, _, _) => this += highlightTypeDef(name.stripSuffix("$"))
case TypeDef(name, _) => this += highlightTypeDef(name)
}
private def printAnnotation(annot: Term)(using elideThis: Option[Symbol]): this.type = {
val Annotation(ref, args) = annot
this += "@"
printTypeTree(ref)
if (args.isEmpty)
this
else
inParens(printTrees(args, ", "))
}
private def printDefAnnotations(definition: Definition)(using elideThis: Option[Symbol]): this.type = {
val annots = definition.symbol.annotations.filter {
case Annotation(annot, _) =>
val sym = annot.tpe.typeSymbol
sym != Symbol.requiredClass("scala.forceInline") &&
sym.maybeOwner != Symbol.requiredPackage("scala.annotation.internal")
case x => throw new MatchError(x.show(using Printer.TreeStructure))
}
printAnnotations(annots)
if (annots.nonEmpty) this += " "
else this
}
private def printRefinement(tpe: TypeRepr)(using elideThis: Option[Symbol]): this.type = {
def printMethodicType(tp: TypeRepr): Unit = tp match {
case tp @ MethodType(paramNames, params, res) =>
inParens(printMethodicTypeParams(paramNames, params))
printMethodicType(res)
case tp @ TypeLambda(paramNames, params, res) =>
inSquare(printMethodicTypeParams(paramNames, params))
printMethodicType(res)
case ByNameType(t) =>
this += ": "
printType(t)
case tp: TypeRepr =>
this += ": "
printType(tp)
}
def rec(tp: TypeRepr): Unit = tp match {
case Refinement(parent, name, info) =>
rec(parent)
indented {
this += lineBreak()
info match {
case info: TypeBounds =>
this += highlightKeyword("type ") += highlightTypeDef(name)
printBounds(info)
case ByNameType(_) | MethodType(_, _, _) | TypeLambda(_, _, _) =>
this += highlightKeyword("def ") += highlightTypeDef(name)
printMethodicType(info)
case info: TypeRepr =>
this += highlightKeyword("val ") += highlightValDef(name)
printMethodicType(info)
}
}
case tp =>
printType(tp)
this += " {"
}
rec(tpe)
this += lineBreak() += "}"
}
private def printMethodicTypeParams(paramNames: List[String], params: List[TypeRepr])(using elideThis: Option[Symbol]): Unit = {
def printInfo(info: TypeRepr) = info match {
case info: TypeBounds => printBounds(info)
case info: TypeRepr =>
this += ": "
printType(info)
}
def printSeparated(list: List[(String, TypeRepr)]): Unit = list match {
case Nil =>
case (name, info) :: Nil =>
this += name
printInfo(info)
case (name, info) :: xs =>
this += name
printInfo(info)
this += ", "
printSeparated(xs)
}
printSeparated(paramNames.zip(params))
}
private def printBoundsTree(bounds: TypeBoundsTree)(using elideThis: Option[Symbol]): this.type = {
bounds.low match {
case Inferred() =>
case low =>
this += " >: "
printTypeTree(low)
}
bounds.hi match {
case Inferred() => this
case hi =>
this += " <: "
printTypeTree(hi)
}
}
private def printBounds(bounds: TypeBounds)(using elideThis: Option[Symbol]): this.type = {
this += " >: "
printType(bounds.low)
this += " <: "
printType(bounds.hi)
}
private def printProtectedOrPrivate(definition: Definition): Boolean = {
var prefixWasPrinted = false
def printWithin(within: TypeRepr) = within match {
case TypeRef(_, name) => this += name
case _ => printFullClassName(within)
}
if (definition.symbol.flags.is(Flags.Protected)) {
this += highlightKeyword("protected")
definition.symbol.protectedWithin match {
case Some(within) =>
inSquare(printWithin(within))
case _ =>
}
prefixWasPrinted = true
} else {
definition.symbol.privateWithin match {
case Some(within) =>
this += highlightKeyword("private")
inSquare(printWithin(within))
prefixWasPrinted = true
case _ =>
}
}
if (prefixWasPrinted)
this += " "
prefixWasPrinted
}
private def printFullClassName(tp: TypeRepr): Unit = {
def printClassPrefix(prefix: TypeRepr): Unit = prefix match {
case TypeRef(prefix2, name) if fullNames =>
printClassPrefix(prefix2)
this += name += "."
case _ =>
}
val TypeRef(prefix, name) = tp
printClassPrefix(prefix)
this += name
}
private def +=(x: Boolean): this.type = { sb.append(x); this }
private def +=(x: Byte): this.type = { sb.append(x); this }
private def +=(x: Short): this.type = { sb.append(x); this }
private def +=(x: Int): this.type = { sb.append(x); this }
private def +=(x: Long): this.type = { sb.append(x); this }
private def +=(x: Float): this.type = { sb.append(x); this }
private def +=(x: Double): this.type = { sb.append(x); this }
private def +=(x: Char): this.type = { sb.append(x); this }
private def +=(x: String): this.type = { sb.append(x); this }
private def escapedChar(ch: Char): String = (ch: @switch) match {
case '\\b' => "\\\\b"
case '\\t' => "\\\\t"
case '\\n' => "\\\\n"
case '\\f' => "\\\\f"
case '\\r' => "\\\\r"
case '"' => "\\\\\\""
case '\\'' => "\\\\\\'"
case '\\\\' => "\\\\\\\\"
case _ => if ch.isControl then f"${"\\\\"}u${ch.toInt}%04x" else String.valueOf(ch).nn
}
private def escapedString(str: String): String = str flatMap escapedChar
private[this] val names = collection.mutable.Map.empty[Symbol, String]
private[this] val namesIndex = collection.mutable.Map.empty[String, Int]
private def splicedName(sym: Symbol): Option[String] = {
if sym.owner.isClassDef then None
else names.get(sym).orElse {
val name0 = sym.name
val index = namesIndex.getOrElse(name0, 1)
namesIndex(name0) = index + 1
val name =
if index == 1 then name0
else s"`$name0${index.toString.toCharArray.nn.map {x => (x - '0' + '₀').toChar}.mkString}`"
names(sym) = name
Some(name)
}
}
private object SpecialOp {
def unapply(arg: Tree): Option[(String, List[Term])] = arg match {
case arg @ Apply(fn, args) =>
fn.tpe match {
case tpe @ TermRef(ThisType(TypeRef(_, name)), name2) if name == "<special-ops>" =>
Some((name2, args))
case _ => None
}
case _ => None
}
}
private object Annotation {
def unapply(arg: Tree): Option[(TypeTree, List[Term])] = arg match {
case New(annot) => Some((annot, Nil))
case Apply(Select(New(annot), "<init>"), args) => Some((annot, args))
case Apply(TypeApply(Select(New(annot), "<init>"), targs), args) => Some((annot, args))
case _ => None
}
}
// TODO Provide some of these in scala.tasty.Reflection.scala and implement them using checks on symbols for performance
private object Types {
object Sequence {
def unapply(tpe: TypeRepr): Option[TypeRepr] = tpe match {
case AppliedType(seq, (tp: TypeRepr) :: Nil)
if seq.typeSymbol == Symbol.requiredClass("scala.collection.Seq") || seq.typeSymbol == Symbol.requiredClass("scala.collection.immutable.Seq") =>
Some(tp)
case _ => None
}
}
object Repeated {
def unapply(tpe: TypeRepr): Option[TypeRepr] = tpe match {
case AppliedType(rep, (tp: TypeRepr) :: Nil) if rep.typeSymbol == Symbol.requiredClass("scala.<repeated>") => Some(tp)
case _ => None
}
}
}
private object PackageObject {
def unapply(tree: Tree): Option[Tree] = tree match {
case PackageClause(_, ValDef("package", _, _) :: body :: Nil) => Some(body)
case _ => None
}
}
}
}
|
dotty-staging/dotty
|
compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala
|
Scala
|
apache-2.0
| 52,087
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.