code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/** * Copyright 2015 Thomson Reuters * * Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package cmwell.bg.test import java.util.Properties import akka.actor.{ActorRef, ActorSystem} import cmwell.bg.{CMWellBGActor, ShutDown} import cmwell.common.{CommandSerializer, OffsetsService, WriteCommand, ZStoreOffsetsService} import cmwell.domain.{FieldValue, ObjectInfoton} import cmwell.driver.Dao import cmwell.fts._ import cmwell.irw.IRWService import cmwell.zstore.ZStore import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory} import com.typesafe.scalalogging.LazyLogging import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord} import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest import org.elasticsearch.common.unit.TimeValue import org.scalatest.{BeforeAndAfterAll, DoNotDiscover, FlatSpec, Matchers} import scala.concurrent.Await import scala.concurrent.duration._ import scala.io.Source /** * Created by israel on 13/09/2016. */ @DoNotDiscover class BGResilienceSpec extends FlatSpec with BeforeAndAfterAll with Matchers with LazyLogging { var kafkaProducer:KafkaProducer[Array[Byte], Array[Byte]] = _ var cmwellBGActor:ActorRef = _ var dao:Dao = _ var testIRWMockupService:IRWService = _ var irwService:IRWService = _ var zStore:ZStore = _ var offsetsService:OffsetsService = _ var ftsServiceES:FTSServiceNew = _ var bgConfig:Config = _ var actorSystem:ActorSystem = _ import concurrent.ExecutionContext.Implicits.global override def beforeAll = { val producerProperties = new Properties producerProperties.put("bootstrap.servers", "localhost:9092") producerProperties.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer") producerProperties.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer") kafkaProducer = new KafkaProducer[Array[Byte], Array[Byte]](producerProperties) dao = Dao("Test","data2") testIRWMockupService = FailingIRWServiceMockup(dao, 13) zStore = ZStore(dao) irwService = IRWService.newIRW(dao, 25 , true, 0.seconds) offsetsService = new ZStoreOffsetsService(zStore) ftsServiceES = FailingFTSServiceMockup("es.test.yml", 5) // wait for green status ftsServiceES.client.admin().cluster() .prepareHealth() .setWaitForGreenStatus() .setTimeout(TimeValue.timeValueMinutes(5)) .execute() .actionGet() // delete all existing indices ftsServiceES.client.admin().indices().delete(new DeleteIndexRequest("_all")) // load indices template val indicesTemplate = Source.fromURL(this.getClass.getResource("/indices_template_new.json")).getLines.reduceLeft(_ + _) ftsServiceES.client.admin().indices().preparePutTemplate("indices_template").setSource(indicesTemplate).execute().actionGet() // create current index ftsServiceES.client.admin().indices().prepareCreate("cm_well_0").execute().actionGet() ftsServiceES.client.admin().indices().prepareAliases() .addAlias("cm_well_0", "cm_well_all") .addAlias("cm_well_0", "cm_well_latest") .execute().actionGet() bgConfig = ConfigFactory.load bgConfig.withValue("cmwell.bg.esActionsBulkSize", ConfigValueFactory.fromAnyRef(100)) actorSystem = ActorSystem("cmwell-bg-test-system") cmwellBGActor = actorSystem.actorOf(CMWellBGActor.props(0, bgConfig, testIRWMockupService, ftsServiceES, zStore, offsetsService)) // scalastyle:off println("waiting 10 seconds for all components to load") Thread.sleep(10000) // scalastyle:on } "Resilient BG" should "process commands as usual on circumvented BGActor (periodically failing IRWService) after suspending and resuming" in { logger info "waiting 10 seconds for circumvented BGActor to start" Thread.sleep(10000) val numOfCommands = 1500 // prepare sequence of writeCommands val writeCommands = Seq.tabulate(numOfCommands){ n => val infoton = ObjectInfoton( path = s"/cmt/cm/bg-test/circumvented_bg/info$n", dc = "dc", indexTime = None, fields = Some(Map("games" -> Set(FieldValue("Taki"), FieldValue("Race")))), protocol = None) WriteCommand(infoton) } // make kafka records out of the commands val pRecords = writeCommands.map{ writeCommand => val commandBytes = CommandSerializer.encode(writeCommand) new ProducerRecord[Array[Byte], Array[Byte]]("persist_topic", commandBytes) } // send them all pRecords.foreach { kafkaProducer.send(_)} // scalastyle:off println("waiting for 10 seconds") Thread.sleep(10000) // scalastyle:on for( i <- 0 until numOfCommands) { val nextResult = Await.result(irwService.readPathAsync(s"/cmt/cm/bg-test/circumvented_bg/info$i"), 5.seconds) withClue(nextResult, s"/cmt/cm/bg-test/circumvented_bg/info$i"){ nextResult should not be empty } } for( i <- 0 until numOfCommands) { val searchResponse = Await.result( ftsServiceES.search( pathFilter = None, fieldsFilter = Some(SingleFieldFilter(Must, Equals, "system.path", Some(s"/cmt/cm/bg-test/circumvented_bg/info$i"))), datesFilter = None, paginationParams = PaginationParams(0, 200) ), 10.seconds ) withClue(s"/cmt/cm/bg-test/circumvented_bg/info$i"){ searchResponse.infotons.size should equal(1) } } } override def afterAll() = { logger debug "afterAll: sending Shutdown" cmwellBGActor ! ShutDown Thread.sleep(10000) ftsServiceES.shutdown() testIRWMockupService = null irwService = null } }
bryaakov/CM-Well
server/cmwell-bg/src/test/scala/cmwell/bg/test/BGResilienceSpec.scala
Scala
apache-2.0
6,231
package bot.line.model.event case class Events(events: List[Event])
xoyo24/akka-http-line-bot
src/main/scala/bot/line/model/event/Events.scala
Scala
mit
69
package org.scalarules.dsl.nl.grammar import org.scalarules.derivations.{DefaultDerivation, Derivation, SubRunData, SubRunDerivation} import org.scalarules.dsl.nl.grammar.DslCondition.{andCombineConditions, factFilledCondition} import org.scalarules.dsl.nl.grammar.meta.DslMacros import org.scalarules.engine import org.scalarules.facts.{Fact, ListFact, SingularFact} import org.scalarules.utils.{FileSourcePosition, SourcePosition, SourceUnknown} import scala.language.experimental.macros //scalastyle:off method.name object Specificatie { def apply[T](dslCondition: DslCondition, output: Fact[T], dslEvaluation: DslEvaluation[T], sourcePosition: SourcePosition): Derivation = { val condition = andCombineConditions(dslCondition, dslEvaluation.condition).condition val input = dslCondition.facts.toList ++ dslEvaluation.condition.facts DefaultDerivation(input, output, condition, dslEvaluation.evaluation, sourcePosition, dslCondition.sourcePosition) } } /* *********************************************************************************************************************************************************** * * The DSL syntax follows these rules: * * Derivation ::= `Gegeven` `(` Condition `)` `Bereken` (SingularBerekenStart | ListBerekenStart) * * Condition ::= Fact `is` (Value | Fact | Aanwezig) [en | of] * * SingularBerekenStart ::= Fact `is` DslEvaluation * ListBerekenStart ::= Fact `is` (DslListEvaluation | SubRunData) * * DslListAggregationOperation ::= (DslNumericalListAggregator) DslListEvaluation * DslNumericalListAggregator ::= (`totaal van` | `gemiddelde van`) * * DslEvaluation * LijstOpbouwConstruct ::= [Fact] `bevat` `resultaten` `van` [ElementBerekening] `over` [Fact] */ class GegevenWord(val initialCondition: DslCondition, val position: SourcePosition = SourceUnknown()) { val condition: DslCondition = position match { case SourceUnknown() => initialCondition case fsp @ FileSourcePosition(_, _, _, _, _) => { val DslCondition(facts, condition, _) = initialCondition DslCondition(facts, condition, position) } } def Bereken[A](fact: SingularFact[A]): SingularBerekenStart[A] = macro DslMacros.captureSingularBerekenSourcePositionMacroImpl[A] def Bereken[A](fact: ListFact[A]): ListBerekenStart[A] = macro DslMacros.captureListBerekenSourcePositionMacroImpl[A] } class SingularBerekenStart[T] (condition: DslCondition, output: Fact[T], berekeningenAcc: List[Derivation], sourcePosition: SourcePosition) { def is[T1 >: T](operation: DslEvaluation[T1]): BerekeningAccumulator = new BerekeningAccumulator(condition, Specificatie(condition, output, operation, sourcePosition) :: berekeningenAcc) } class ListBerekenStart[T] (condition: DslCondition, output: Fact[List[T]], berekeningenAcc: List[Derivation], sourcePosition: SourcePosition) { def is[T1 <: T](operation: DslEvaluation[List[T1]]): BerekeningAccumulator = new BerekeningAccumulator(condition, Specificatie(condition, output, operation, sourcePosition) :: berekeningenAcc) def is[B](subRunData : SubRunData[T, B]) : BerekeningAccumulator = { val c = andCombineConditions(condition, factFilledCondition(subRunData.inputList)).condition val input = subRunData.inputList :: condition.facts.toList val subRunDerivation: SubRunDerivation = SubRunDerivation(input, output, c, subRunData) new BerekeningAccumulator(condition, subRunDerivation :: berekeningenAcc) } /** * Specificeert hoe een lijst is opgebouwd uit de resultaten van een elementberekening die voor ieder element uit een invoerlijst is uitgevoerd. * * Syntax: <uitvoer> bevat resultaten van <ElementBerekening> over <invoer> */ def bevat(r: ResultatenWord): LijstOpbouwConstruct[T] = new LijstOpbouwConstruct[T](condition, output, berekeningenAcc) } class ResultatenWord class LijstOpbouwConstruct[Uit](condition: DslCondition, output: Fact[List[Uit]], berekeningAcc: List[Derivation]) { def van[I](uitTeVoerenElementBerekening: ElementBerekeningReference[I, Uit]): LijstOpbouwConstructMetBerekening[I] = new LijstOpbouwConstructMetBerekening[I](uitTeVoerenElementBerekening.berekening) class LijstOpbouwConstructMetBerekening[In](elementBerekening: ElementBerekening[In, Uit]) { def over(iterator: ListFact[In]): BerekeningAccumulator = { val contextAddition: In => engine.Context = (x: In) => Map(elementBerekening.invoerFact -> x) val subRunData = new SubRunData[Uit, In](elementBerekening.berekeningen, contextAddition, iterator, elementBerekening.uitvoerFact) val topLevelCondition = andCombineConditions(condition, factFilledCondition(subRunData.inputList)).condition new BerekeningAccumulator(condition, SubRunDerivation(subRunData.inputList :: condition.facts.toList, output, topLevelCondition, subRunData) :: berekeningAcc) } } } // --- supports naming the invoer and uitvoer inside an ElementBerekening class InvoerWord{ def is[In](iteratee: Fact[In]): InvoerSpecification[In] = new InvoerSpecification[In](iteratee) } class UitvoerWord{ def is[Uit](iteratee: Fact[Uit]): UitvoerSpecification[Uit] = new UitvoerSpecification[Uit](iteratee) } class BerekeningAccumulator private[grammar](val condition: DslCondition, val derivations: List[Derivation]) { def en[A](fact: SingularFact[A]): SingularBerekenStart[A] = macro DslMacros.captureSingularBerekenSourcePositionWithAccumulatorMacroImpl[A] def en[A](fact: ListFact[A]): ListBerekenStart[A] = macro DslMacros.captureListBerekenSourcePositionWithAccumulatorMacroImpl[A] }
scala-rules/rule-engine
engine/src/main/scala/org/scalarules/dsl/nl/grammar/BerekeningFlow.scala
Scala
mit
5,564
package de.unihamburg.vsis.sddf.pipe.optimize import org.apache.spark.rdd.RDD import org.apache.spark.storage.StorageLevel import de.unihamburg.vsis.sddf.pipe.PipeElementPassthrough import de.unihamburg.vsis.sddf.pipe.context.AbstractPipeContext import de.unihamburg.vsis.sddf.pipe.context.SddfPipeContext class PipeOptimizePersistAndName[A](rddname: String = null, newLevel: StorageLevel = StorageLevel.MEMORY_ONLY) extends PipeElementPassthrough[RDD[A]] { def substep(input: RDD[A])(implicit pipeContext: AbstractPipeContext): Unit = { pipeContext match { case pc: SddfPipeContext => { input.persist(newLevel) if(rddname != null){ input.name = rddname pc.persistedRDDs += (rddname -> input) analysable.values += ("name" -> rddname) } } case _ => { throw new Exception("Wrong AbstractPipeContext type.") } } } } object PipeOptimizePersistAndName { def apply[A](rddname: String = null, newLevel: StorageLevel = StorageLevel.MEMORY_ONLY) = { new PipeOptimizePersistAndName[A](rddname, newLevel) } }
numbnut/sddf
src/main/scala/de/unihamburg/vsis/sddf/pipe/optimize/PipeOptimizePersistAndName.scala
Scala
gpl-3.0
1,114
import sbt._ object Dependencies { object V { val macroParadise = "2.0.1" val datomic = "0.9.5130" val specs2 = "2.3.12" val scalaTest = "2.2.4" } object Compile { val datomic = "com.datomic" % "datomic-free" % V.datomic % "provided" exclude("org.slf4j", "slf4j-nop") } object Test { val specs2 = "org.specs2" %% "specs2" % V.specs2 % "test" } object IntegrationTest { val scalaTest = "org.scalatest" %% "scalatest" % V.scalaTest % "it" } }
Enalmada/datomisca
project/Dependencies.scala
Scala
apache-2.0
546
package metronome import metronome.temporal.{TemporalAdjuster, TemporalAccessor} /** * A day-of-week, such as 'Tuesday'. * <p> * {@code DayOfWeek} is an enum representing the 7 days of the week - * Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday. * <p> * In addition to the textual enum name, each day-of-week has an {@code int} value. * The {@code int} value follows the ISO-8601 standard, from 1 (Monday) to 7 (Sunday). * It is recommended that applications use the enum rather than the {@code int} value * to ensure code clarity. * <p> * This enum provides access to the localized textual form of the day-of-week. * Some locales also assign different numeric values to the days, declaring * Sunday to have the value 1, however this class provides no support for this. * See {@link WeekFields} for localized week-numbering. * <p> * <b>Do not use {@code ordinal()} to obtain the numeric representation of {@code DayOfWeek}. * Use {@code getValue()} instead.</b> * <p> * This enum represents a common concept that is found in many calendar systems. * As such, this enum may be used by any calendar system that has the day-of-week * concept defined exactly equivalent to the ISO calendar system. * * @implSpec * This is an immutable and thread-safe enum. * * @since 1.8 */ object DayOfWeek { /** * Obtains an instance of {@code DayOfWeek} from an {@code int} value. * <p> * {@code DayOfWeek} is an enum representing the 7 days of the week. * This factory allows the enum to be obtained from the {@code int} value. * The {@code int} value follows the ISO-8601 standard, from 1 (Monday) to 7 (Sunday). * * @param dayOfWeek the day-of-week to represent, from 1 (Monday) to 7 (Sunday) * @return the day-of-week singleton, not null * @throws DateTimeException if the day-of-week is invalid */ def of(dayOfWeek: Int): DayOfWeek = { if (dayOfWeek < 1 || dayOfWeek > 7) { throw new DateTimeException("Invalid value for DayOfWeek: " + dayOfWeek) } ENUMS(dayOfWeek - 1) } /** * Obtains an instance of {@code DayOfWeek} from a temporal object. * <p> * This obtains a day-of-week based on the specified temporal. * A {@code TemporalAccessor} represents an arbitrary set of date and time information, * which this factory converts to an instance of {@code DayOfWeek}. * <p> * The conversion extracts the {@link ChronoField#DAY_OF_WEEK DAY_OF_WEEK} field. * <p> * This method matches the signature of the functional interface {@link TemporalQuery} * allowing it to be used as a query via method reference, {@code DayOfWeek::from}. * * @param temporal the temporal object to convert, not null * @return the day-of-week, not null * @throws DateTimeException if unable to convert to a { @code DayOfWeek} */ def from(temporal: TemporalAccessor): DayOfWeek = { if (temporal.isInstanceOf[DayOfWeek]) { temporal.asInstanceOf[DayOfWeek] } of(temporal.get(DAY_OF_WEEK)) } /** * The singleton instance for the day-of-week of Monday. * This has the numeric value of {@code 1}. */ final val MONDAY: = null /** * The singleton instance for the day-of-week of Tuesday. * This has the numeric value of {@code 2}. */ final val TUESDAY: = null /** * The singleton instance for the day-of-week of Wednesday. * This has the numeric value of {@code 3}. */ final val WEDNESDAY: = null /** * The singleton instance for the day-of-week of Thursday. * This has the numeric value of {@code 4}. */ final val THURSDAY: = null /** * The singleton instance for the day-of-week of Friday. * This has the numeric value of {@code 5}. */ final val FRIDAY: = null /** * The singleton instance for the day-of-week of Saturday. * This has the numeric value of {@code 6}. */ final val SATURDAY: = null /** * The singleton instance for the day-of-week of Sunday. * This has the numeric value of {@code 7}. */ final val SUNDAY: = null /** * Private cache of all the constants. */ private final val ENUMS: Array[DayOfWeek] = DayOfWeek.values } final class DayOfWeek extends TemporalAccessor with TemporalAdjuster { /** * Gets the day-of-week {@code int} value. * <p> * The values are numbered following the ISO-8601 standard, from 1 (Monday) to 7 (Sunday). * See {@link WeekFields#dayOfWeek} for localized week-numbering. * * @return the day-of-week, from 1 (Monday) to 7 (Sunday) */ def getValue: Int = { ordinal + 1 } /** * Gets the textual representation, such as 'Mon' or 'Friday'. * <p> * This returns the textual name used to identify the day-of-week, * suitable for presentation to the user. * The parameters control the style of the returned text and the locale. * <p> * If no textual mapping is found then the {@link #getValue() numeric value} is returned. * * @param style the length of the text required, not null * @param locale the locale to use, not null * @return the text value of the day-of-week, not null */ def getDisplayName(style: TextStyle, locale: Locale): String = { new DateTimeFormatterBuilder().appendText(DAY_OF_WEEK, style).toFormatter(locale).format(this) } /** * Checks if the specified field is supported. * <p> * This checks if this day-of-week can be queried for the specified field. * If false, then calling the {@link #range(TemporalField) range} and * {@link #get(TemporalField) get} methods will throw an exception. * <p> * If the field is {@link ChronoField#DAY_OF_WEEK DAY_OF_WEEK} then * this method returns true. * All other {@code ChronoField} instances will false. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.isSupportedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the field is supported is determined by the field. * * @param field the field to check, null returns false * @return true if the field is supported on this day-of-week, false if not */ def isSupported(field: TemporalField): Boolean = { if (field.isInstanceOf[ChronoField]) { field eq DAY_OF_WEEK } field != null && field.isSupportedBy(this) } /** * Gets the range of valid values for the specified field. * <p> * The range object expresses the minimum and maximum valid values for a field. * This day-of-week is used to enhance the accuracy of the returned range. * If it is not possible to the range, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is {@link ChronoField#DAY_OF_WEEK DAY_OF_WEEK} then the * range of the day-of-week, from 1 to 7, will be returned. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.rangeRefinedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the range can be obtained is determined by the field. * * @param field the field to query the range for, not null * @return the range of valid values for the field, not null * @throws DateTimeException if the range for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported */ override def range(field: TemporalField): ValueRange = { if (field eq DAY_OF_WEEK) { field.range } TemporalAccessor.super.range(field) } /** * Gets the value of the specified field from this day-of-week as an {@code int}. * <p> * This queries this day-of-week for the value for the specified field. * The returned value will always be within the valid range of values for the field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is {@link ChronoField#DAY_OF_WEEK DAY_OF_WEEK} then the * value of the day-of-week, from 1 to 7, will be returned. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field, within the valid range of values * @throws DateTimeException if a value for the field cannot be obtained or * the value is outside the range of valid values for the field * @throws UnsupportedTemporalTypeException if the field is not supported or * the range of values exceeds an { @code int} * @throws ArithmeticException if numeric overflow occurs */ override def get(field: TemporalField): Int = { if (field eq DAY_OF_WEEK) { getValue } TemporalAccessor.super.get(field) } /** * Gets the value of the specified field from this day-of-week as a {@code long}. * <p> * This queries this day-of-week for the value for the specified field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is {@link ChronoField#DAY_OF_WEEK DAY_OF_WEEK} then the * value of the day-of-week, from 1 to 7, will be returned. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field * @throws DateTimeException if a value for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported * @throws ArithmeticException if numeric overflow occurs */ def getLong(field: TemporalField): Long = { if (field eq DAY_OF_WEEK) { getValue } else if (field.isInstanceOf[ChronoField]) { throw new UnsupportedTemporalTypeException("Unsupported field: " + field) } field.getFrom(this) } /** * Returns the day-of-week that is the specified number of days after this one. * <p> * The calculation rolls around the end of the week from Sunday to Monday. * The specified period may be negative. * <p> * This instance is immutable and unaffected by this method call. * * @param days the days to add, positive or negative * @return the resulting day-of-week, not null */ def plus(days: Long): DayOfWeek = { val amount: Int = (days % 7).asInstanceOf[Int] ENUMS((ordinal + (amount + 7)) % 7) } /** * Returns the day-of-week that is the specified number of days before this one. * <p> * The calculation rolls around the start of the year from Monday to Sunday. * The specified period may be negative. * <p> * This instance is immutable and unaffected by this method call. * * @param days the days to subtract, positive or negative * @return the resulting day-of-week, not null */ def minus(days: Long): DayOfWeek = { plus(-(days % 7)) } /** * Queries this day-of-week using the specified query. * <p> * This queries this day-of-week using the specified query strategy object. * The {@code TemporalQuery} object defines the logic to be used to * obtain the result. Read the documentation of the query to understand * what the result of this method will be. * <p> * The result of this method is obtained by invoking the * {@link TemporalQuery#queryFrom(TemporalAccessor)} method on the * specified query passing {@code this} as the argument. * * @param <R> the type of the result * @param query the query to invoke, not null * @return the query result, null may be returned (defined by the query) * @throws DateTimeException if unable to query (defined by the query) * @throws ArithmeticException if numeric overflow occurs (defined by the query) */ override def query(query: TemporalQuery[R]): R = { if (query eq TemporalQuery.precision) { DAYS.asInstanceOf[R] } TemporalAccessor.super.query(query) } /** * Adjusts the specified temporal object to have this day-of-week. * <p> * This returns a temporal object of the same observable type as the input * with the day-of-week changed to be the same as this. * <p> * The adjustment is equivalent to using {@link Temporal#with(TemporalField, long)} * passing {@link ChronoField#DAY_OF_WEEK} as the field. * Note that this adjusts forwards or backwards within a Monday to Sunday week. * See {@link WeekFields#dayOfWeek} for localized week start days. * See {@code TemporalAdjuster} for other adjusters with more control, * such as {@code next(MONDAY)}. * <p> * In most cases, it is clearer to reverse the calling pattern by using * {@link Temporal#with(TemporalAdjuster)}: * {{{ * // these two lines are equivalent, but the second approach is recommended * temporal = thisDayOfWeek.adjustInto(temporal); * temporal = temporal.with(thisDayOfWeek); * }}} * <p> * For example, given a date that is a Wednesday, the following are output: * {{{ * dateOnWed.with(MONDAY); // two days earlier * dateOnWed.with(TUESDAY); // one day earlier * dateOnWed.with(WEDNESDAY); // same date * dateOnWed.with(THURSDAY); // one day later * dateOnWed.with(FRIDAY); // two days later * dateOnWed.with(SATURDAY); // three days later * dateOnWed.with(SUNDAY); // four days later * }}} * <p> * This instance is immutable and unaffected by this method call. * * @param temporal the target object to be adjusted, not null * @return the adjusted object, not null * @throws DateTimeException if unable to make the adjustment * @throws ArithmeticException if numeric overflow occurs */ def adjustInto(temporal: Temporal): Temporal = { temporal.`with`(DAY_OF_WEEK, getValue) } } /** * A month-of-year, such as 'July'. * <p> * {@code Month} is an enum representing the 12 months of the year - * January, February, March, April, May, June, July, August, September, October, * November and December. * <p> * In addition to the textual enum name, each month-of-year has an {@code int} value. * The {@code int} value follows normal usage and the ISO-8601 standard, * from 1 (January) to 12 (December). It is recommended that applications use the enum * rather than the {@code int} value to ensure code clarity. * <p> * <b>Do not use {@code ordinal()} to obtain the numeric representation of {@code Month}. * Use {@code getValue()} instead.</b> * <p> * This enum represents a common concept that is found in many calendar systems. * As such, this enum may be used by any calendar system that has the month-of-year * concept defined exactly equivalent to the ISO-8601 calendar system. * * @implSpec * This is an immutable and thread-safe enum. * * @since 1.8 */ object Month { /** * Obtains an instance of {@code Month} from an {@code int} value. * <p> * {@code Month} is an enum representing the 12 months of the year. * This factory allows the enum to be obtained from the {@code int} value. * The {@code int} value follows the ISO-8601 standard, from 1 (January) to 12 (December). * * @param month the month-of-year to represent, from 1 (January) to 12 (December) * @return the month-of-year, not null * @throws DateTimeException if the month-of-year is invalid */ def of(month: Int): Month = { if (month < 1 || month > 12) { throw new DateTimeException("Invalid value for MonthOfYear: " + month) } ENUMS(month - 1) } /** * Obtains an instance of {@code Month} from a temporal object. * <p> * This obtains a month based on the specified temporal. * A {@code TemporalAccessor} represents an arbitrary set of date and time information, * which this factory converts to an instance of {@code Month}. * <p> * The conversion extracts the {@link ChronoField#MONTH_OF_YEAR MONTH_OF_YEAR} field. * The extraction is only permitted if the temporal object has an ISO * chronology, or can be converted to a {@code Date}. * <p> * This method matches the signature of the functional interface {@link TemporalQuery} * allowing it to be used in queries via method reference, {@code Month::from}. * * @param temporal the temporal object to convert, not null * @return the month-of-year, not null * @throws DateTimeException if unable to convert to a { @code Month} */ def from(temporal: TemporalAccessor): Month = { if (temporal.isInstanceOf[Month]) { temporal.asInstanceOf[Month] } try { if ((IsoChronology.INSTANCE == Chronology.from(temporal)) == false) { temporal = Date.from(temporal) } of(temporal.get(MONTH_OF_YEAR)) } catch { case ex: DateTimeException => { throw new DateTimeException("Unable to obtain Month from TemporalAccessor: " + temporal.getClass, ex) } } } /** * The singleton instance for the month of January with 31 days. * This has the numeric value of {@code 1}. */ final val JANUARY: = null /** * The singleton instance for the month of February with 28 days, or 29 in a leap year. * This has the numeric value of {@code 2}. */ final val FEBRUARY: = null /** * The singleton instance for the month of March with 31 days. * This has the numeric value of {@code 3}. */ final val MARCH: = null /** * The singleton instance for the month of April with 30 days. * This has the numeric value of {@code 4}. */ final val APRIL: = null /** * The singleton instance for the month of May with 31 days. * This has the numeric value of {@code 5}. */ final val MAY: = null /** * The singleton instance for the month of June with 30 days. * This has the numeric value of {@code 6}. */ final val JUNE: = null /** * The singleton instance for the month of July with 31 days. * This has the numeric value of {@code 7}. */ final val JULY: = null /** * The singleton instance for the month of August with 31 days. * This has the numeric value of {@code 8}. */ final val AUGUST: = null /** * The singleton instance for the month of September with 30 days. * This has the numeric value of {@code 9}. */ final val SEPTEMBER: = null /** * The singleton instance for the month of October with 31 days. * This has the numeric value of {@code 10}. */ final val OCTOBER: = null /** * The singleton instance for the month of November with 30 days. * This has the numeric value of {@code 11}. */ final val NOVEMBER: = null /** * The singleton instance for the month of December with 31 days. * This has the numeric value of {@code 12}. */ final val DECEMBER: = null /** * Private cache of all the constants. */ private final val ENUMS: Array[Month] = Month.values } final class Month extends TemporalAccessor with TemporalAdjuster { /** * Gets the month-of-year {@code int} value. * <p> * The values are numbered following the ISO-8601 standard, * from 1 (January) to 12 (December). * * @return the month-of-year, from 1 (January) to 12 (December) */ def getValue: Int = { ordinal + 1 } /** * Gets the textual representation, such as 'Jan' or 'December'. * <p> * This returns the textual name used to identify the month-of-year, * suitable for presentation to the user. * The parameters control the style of the returned text and the locale. * <p> * If no textual mapping is found then the {@link #getValue() numeric value} is returned. * * @param style the length of the text required, not null * @param locale the locale to use, not null * @return the text value of the month-of-year, not null */ def getDisplayName(style: TextStyle, locale: Locale): String = { new DateTimeFormatterBuilder().appendText(MONTH_OF_YEAR, style).toFormatter(locale).format(this) } /** * Checks if the specified field is supported. * <p> * This checks if this month-of-year can be queried for the specified field. * If false, then calling the {@link #range(TemporalField) range} and * {@link #get(TemporalField) get} methods will throw an exception. * <p> * If the field is {@link ChronoField#MONTH_OF_YEAR MONTH_OF_YEAR} then * this method returns true. * All other {@code ChronoField} instances will false. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.isSupportedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the field is supported is determined by the field. * * @param field the field to check, null returns false * @return true if the field is supported on this month-of-year, false if not */ def isSupported(field: TemporalField): Boolean = { if (field.isInstanceOf[ChronoField]) { field eq MONTH_OF_YEAR } field != null && field.isSupportedBy(this) } /** * Gets the range of valid values for the specified field. * <p> * The range object expresses the minimum and maximum valid values for a field. * This month is used to enhance the accuracy of the returned range. * If it is not possible to the range, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is {@link ChronoField#MONTH_OF_YEAR MONTH_OF_YEAR} then the * range of the month-of-year, from 1 to 12, will be returned. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.rangeRefinedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the range can be obtained is determined by the field. * * @param field the field to query the range for, not null * @return the range of valid values for the field, not null * @throws DateTimeException if the range for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported */ override def range(field: TemporalField): ValueRange = { if (field eq MONTH_OF_YEAR) { field.range } TemporalAccessor.super.range(field) } /** * Gets the value of the specified field from this month-of-year as an {@code int}. * <p> * This queries this month for the value for the specified field. * The returned value will always be within the valid range of values for the field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is {@link ChronoField#MONTH_OF_YEAR MONTH_OF_YEAR} then the * value of the month-of-year, from 1 to 12, will be returned. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field, within the valid range of values * @throws DateTimeException if a value for the field cannot be obtained or * the value is outside the range of valid values for the field * @throws UnsupportedTemporalTypeException if the field is not supported or * the range of values exceeds an { @code int} * @throws ArithmeticException if numeric overflow occurs */ override def get(field: TemporalField): Int = { if (field eq MONTH_OF_YEAR) { getValue } TemporalAccessor.super.get(field) } /** * Gets the value of the specified field from this month-of-year as a {@code long}. * <p> * This queries this month for the value for the specified field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is {@link ChronoField#MONTH_OF_YEAR MONTH_OF_YEAR} then the * value of the month-of-year, from 1 to 12, will be returned. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field * @throws DateTimeException if a value for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported * @throws ArithmeticException if numeric overflow occurs */ def getLong(field: TemporalField): Long = { if (field eq MONTH_OF_YEAR) { getValue } else if (field.isInstanceOf[ChronoField]) { throw new UnsupportedTemporalTypeException("Unsupported field: " + field) } field.getFrom(this) } /** * Returns the month-of-year that is the specified number of quarters after this one. * <p> * The calculation rolls around the end of the year from December to January. * The specified period may be negative. * <p> * This instance is immutable and unaffected by this method call. * * @param months the months to add, positive or negative * @return the resulting month, not null */ def plus(months: Long): Month = { val amount: Int = (months % 12).asInstanceOf[Int] ENUMS((ordinal + (amount + 12)) % 12) } /** * Returns the month-of-year that is the specified number of months before this one. * <p> * The calculation rolls around the start of the year from January to December. * The specified period may be negative. * <p> * This instance is immutable and unaffected by this method call. * * @param months the months to subtract, positive or negative * @return the resulting month, not null */ def minus(months: Long): Month = { plus(-(months % 12)) } /** * Gets the length of this month in days. * <p> * This takes a flag to determine whether to the length for a leap year or not. * <p> * February has 28 days in a standard year and 29 days in a leap year. * April, June, September and November have 30 days. * All other months have 31 days. * * @param leapYear true if the length is required for a leap year * @return the length of this month in days, from 28 to 31 */ def length(leapYear: Boolean): Int = { this match { case FEBRUARY => (if (leapYear) 29 else 28) case APRIL => case JUNE => case SEPTEMBER => case NOVEMBER => 30 case _ => 31 } } /** * Gets the minimum length of this month in days. * <p> * February has a minimum length of 28 days. * April, June, September and November have 30 days. * All other months have 31 days. * * @return the minimum length of this month in days, from 28 to 31 */ def minLength: Int = { this match { case FEBRUARY => 28 case APRIL => case JUNE => case SEPTEMBER => case NOVEMBER => 30 case _ => 31 } } /** * Gets the maximum length of this month in days. * <p> * February has a maximum length of 29 days. * April, June, September and November have 30 days. * All other months have 31 days. * * @return the maximum length of this month in days, from 29 to 31 */ def maxLength: Int = { this match { case FEBRUARY => 29 case APRIL => case JUNE => case SEPTEMBER => case NOVEMBER => 30 case _ => 31 } } /** * Gets the day-of-year corresponding to the first day of this month. * <p> * This returns the day-of-year that this month begins on, using the leap * year flag to determine the length of February. * * @param leapYear true if the length is required for a leap year * @return the day of year corresponding to the first day of this month, from 1 to 336 */ def firstDayOfYear(leapYear: Boolean): Int = { val leap: Int = if (leapYear) 1 else 0 this match { case JANUARY => 1 case FEBRUARY => 32 case MARCH => 60 + leap case APRIL => 91 + leap case MAY => 121 + leap case JUNE => 152 + leap case JULY => 182 + leap case AUGUST => 213 + leap case SEPTEMBER => 244 + leap case OCTOBER => 274 + leap case NOVEMBER => 305 + leap case DECEMBER => case _ => 335 + leap } } /** * Gets the month corresponding to the first month of this quarter. * <p> * The year can be divided into four quarters. * This method returns the first month of the quarter for the base month. * January, February and March January. * April, May and June April. * July, August and September July. * October, November and December October. * * @return the first month of the quarter corresponding to this month, not null */ def firstMonthOfQuarter: Month = { ENUMS((ordinal / 3) * 3) } /** * Queries this month-of-year using the specified query. * <p> * This queries this month-of-year using the specified query strategy object. * The {@code TemporalQuery} object defines the logic to be used to * obtain the result. Read the documentation of the query to understand * what the result of this method will be. * <p> * The result of this method is obtained by invoking the * {@link TemporalQuery#queryFrom(TemporalAccessor)} method on the * specified query passing {@code this} as the argument. * * @param <R> the type of the result * @param query the query to invoke, not null * @return the query result, null may be returned (defined by the query) * @throws DateTimeException if unable to query (defined by the query) * @throws ArithmeticException if numeric overflow occurs (defined by the query) */ override def query(query: TemporalQuery[R]): R = { if (query eq TemporalQuery.chronology) { IsoChronology.INSTANCE.asInstanceOf[R] } else if (query eq TemporalQuery.precision) { MONTHS.asInstanceOf[R] } TemporalAccessor.super.query(query) } /** * Adjusts the specified temporal object to have this month-of-year. * <p> * This returns a temporal object of the same observable type as the input * with the month-of-year changed to be the same as this. * <p> * The adjustment is equivalent to using {@link Temporal#with(TemporalField, long)} * passing {@link ChronoField#MONTH_OF_YEAR} as the field. * If the specified temporal object does not use the ISO calendar system then * a {@code DateTimeException} is thrown. * <p> * In most cases, it is clearer to reverse the calling pattern by using * {@link Temporal#with(TemporalAdjuster)}: * {{{ * // these two lines are equivalent, but the second approach is recommended * temporal = thisMonth.adjustInto(temporal); * temporal = temporal.with(thisMonth); * }}} * <p> * For example, given a date in May, the following are output: * {{{ * dateInMay.with(JANUARY); // four months earlier * dateInMay.with(APRIL); // one months earlier * dateInMay.with(MAY); // same date * dateInMay.with(JUNE); // one month later * dateInMay.with(DECEMBER); // seven months later * }}} * <p> * This instance is immutable and unaffected by this method call. * * @param temporal the target object to be adjusted, not null * @return the adjusted object, not null * @throws DateTimeException if unable to make the adjustment * @throws ArithmeticException if numeric overflow occurs */ def adjustInto(temporal: Temporal): Temporal = { if ((Chronology.from(temporal) == IsoChronology.INSTANCE) == false) { throw new DateTimeException("Adjustment only supported on ISO date-time") } temporal.`with`(MONTH_OF_YEAR, getValue) } } /** * A month-day in the ISO-8601 calendar system, such as {@code --12-03}. * <p> * {@code MonthDay} is an immutable date-time object that represents the combination * of a year and month. Any field that can be derived from a month and day, such as * quarter-of-year, can be obtained. * <p> * This class does not store or represent a year, time or time-zone. * For example, the value "December 3rd" can be stored in a {@code MonthDay}. * <p> * Since a {@code MonthDay} does not possess a year, the leap day of * February 29th is considered valid. * <p> * This class implements {@link TemporalAccessor} rather than {@link Temporal}. * This is because it is not possible to define whether February 29th is valid or not * without external information, preventing the implementation of plus/minus. * Related to this, {@code MonthDay} only provides access to query and set the fields * {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH}. * <p> * The ISO-8601 calendar system is the modern civil calendar system used today * in most of the world. It is equivalent to the proleptic Gregorian calendar * system, in which today's rules for leap years are applied for all time. * For most applications written today, the ISO-8601 rules are entirely suitable. * However, any application that makes use of historical dates, and requires them * to be accurate will find the ISO-8601 approach unsuitable. * * @implSpec * This class is immutable and thread-safe. * * @since 1.8 */ object MonthDay { /** * Obtains the current month-day from the system clock in the default time-zone. * <p> * This will query the {@link java.time.Clock#systemDefaultZone() system clock} in the default * time-zone to obtain the current month-day. * <p> * Using this method will prevent the ability to use an alternate clock for testing * because the clock is hard-coded. * * @return the current month-day using the system clock and default time-zone, not null */ def now: MonthDay = { now(Clock.systemDefaultZone) } /** * Obtains the current month-day from the system clock in the specified time-zone. * <p> * This will query the {@link Clock#system(java.time.ZoneId) system clock} to obtain the current month-day. * Specifying the time-zone avoids dependence on the default time-zone. * <p> * Using this method will prevent the ability to use an alternate clock for testing * because the clock is hard-coded. * * @param zone the zone ID to use, not null * @return the current month-day using the system clock, not null */ def now(zone: ZoneId): MonthDay = { now(Clock.system(zone)) } /** * Obtains the current month-day from the specified clock. * <p> * This will query the specified clock to obtain the current month-day. * Using this method allows the use of an alternate clock for testing. * The alternate clock may be introduced using {@link Clock dependency injection}. * * @param clock the clock to use, not null * @return the current month-day, not null */ def now(clock: Clock): MonthDay = { val now: Date = Date.now(clock) MonthDay.of(now.getMonth, now.getDayOfMonth) } /** * Obtains an instance of {@code MonthDay}. * <p> * The day-of-month must be valid for the month within a leap year. * Hence, for February, day 29 is valid. * <p> * For example, passing in April and day 31 will throw an exception, as * there can never be April 31st in any year. By contrast, passing in * February 29th is permitted, as that month-day can sometimes be valid. * * @param month the month-of-year to represent, not null * @param dayOfMonth the day-of-month to represent, from 1 to 31 * @return the month-day, not null * @throws DateTimeException if the value of any field is out of range, * or if the day-of-month is invalid for the month */ def of(month: Month, dayOfMonth: Int): MonthDay = { object DAY_OF_MONTH.checkValidValue(dayOfMonth) if (dayOfMonth > month.maxLength) { throw new DateTimeException("Illegal value for DayOfMonth field, value " + dayOfMonth + " is not valid for month " + month.name) } new MonthDay(month.getValue, dayOfMonth) } /** * Obtains an instance of {@code MonthDay}. * <p> * The day-of-month must be valid for the month within a leap year. * Hence, for month 2 (February), day 29 is valid. * <p> * For example, passing in month 4 (April) and day 31 will throw an exception, as * there can never be April 31st in any year. By contrast, passing in * February 29th is permitted, as that month-day can sometimes be valid. * * @param month the month-of-year to represent, from 1 (January) to 12 (December) * @param dayOfMonth the day-of-month to represent, from 1 to 31 * @return the month-day, not null * @throws DateTimeException if the value of any field is out of range, * or if the day-of-month is invalid for the month */ def of(month: Int, dayOfMonth: Int): MonthDay = { of(Month.of(month), dayOfMonth) } /** * Obtains an instance of {@code MonthDay} from a temporal object. * <p> * This obtains a month-day based on the specified temporal. * A {@code TemporalAccessor} represents an arbitrary set of date and time information, * which this factory converts to an instance of {@code MonthDay}. * <p> * The conversion extracts the {@link ChronoField#MONTH_OF_YEAR MONTH_OF_YEAR} and * {@link ChronoField#DAY_OF_MONTH DAY_OF_MONTH} fields. * The extraction is only permitted if the temporal object has an ISO * chronology, or can be converted to a {@code Date}. * <p> * This method matches the signature of the functional interface {@link TemporalQuery} * allowing it to be used in queries via method reference, {@code MonthDay::from}. * * @param temporal the temporal object to convert, not null * @return the month-day, not null * @throws DateTimeException if unable to convert to a { @code MonthDay} */ def from(temporal: TemporalAccessor): MonthDay = { if (temporal.isInstanceOf[MonthDay]) { temporal.asInstanceOf[MonthDay] } try { if ((IsoChronology.INSTANCE == Chronology.from(temporal)) == false) { temporal = Date.from(temporal) } of(temporal.get(MONTH_OF_YEAR), temporal.get(DAY_OF_MONTH)) } catch { case ex: DateTimeException => { throw new DateTimeException("Unable to obtain MonthDay from TemporalAccessor: " + temporal.getClass, ex) } } } /** * Obtains an instance of {@code MonthDay} from a text string such as {@code --12-03}. * <p> * The string must represent a valid month-day. * The format is {@code --MM-dd}. * * @param text the text to parse such as "--12-03", not null * @return the parsed month-day, not null * @throws DateTimeParseException if the text cannot be parsed */ def parse(text: CharSequence): MonthDay = { parse(text, PARSER) } /** * Obtains an instance of {@code MonthDay} from a text string using a specific formatter. * <p> * The text is parsed using the formatter, returning a month-day. * * @param text the text to parse, not null * @param formatter the formatter to use, not null * @return the parsed month-day, not null * @throws DateTimeParseException if the text cannot be parsed */ def parse(text: CharSequence, formatter: DateTimeFormatter): MonthDay = { object formatter.parse(text, MonthDay.from) } private[time] def readExternal(in: DataInput): MonthDay = { val month: Byte = in.readByte val day: Byte = in.readByte MonthDay.of(month, day) } /** * Parser. */ private final val PARSER: DateTimeFormatter = new DateTimeFormatterBuilder().appendLiteral("--").appendValue(MONTH_OF_YEAR, 2).appendLiteral('-').appendValue(DAY_OF_MONTH, 2).toFormatter } final class MonthDay extends TemporalAccessor with TemporalAdjuster with Comparable[MonthDay] { /** * Constructor, previously validated. * * @param month the month-of-year to represent, validated from 1 to 12 * @param dayOfMonth the day-of-month to represent, validated from 1 to 29-31 */ private def this(month: Int, dayOfMonth: Int) { this.month = month this.day = dayOfMonth } /** * Checks if the specified field is supported. * <p> * This checks if this month-day can be queried for the specified field. * If false, then calling the {@link #range(TemporalField) range} and * {@link #get(TemporalField) get} methods will throw an exception. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The supported fields are: * <ul> * <li>{@code MONTH_OF_YEAR} * <li>{@code YEAR} * </ul> * All other {@code ChronoField} instances will false. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.isSupportedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the field is supported is determined by the field. * * @param field the field to check, null returns false * @return true if the field is supported on this month-day, false if not */ def isSupported(field: TemporalField): Boolean = { if (field.isInstanceOf[ChronoField]) { field eq MONTH_OF_YEAR || field eq DAY_OF_MONTH } field != null && field.isSupportedBy(this) } /** * Gets the range of valid values for the specified field. * <p> * The range object expresses the minimum and maximum valid values for a field. * This month-day is used to enhance the accuracy of the returned range. * If it is not possible to the range, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The {@link #isSupported(TemporalField) supported fields} will * appropriate range instances. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.rangeRefinedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the range can be obtained is determined by the field. * * @param field the field to query the range for, not null * @return the range of valid values for the field, not null * @throws DateTimeException if the range for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported */ override def range(field: TemporalField): ValueRange = { if (field eq MONTH_OF_YEAR) { field.range } else if (field eq DAY_OF_MONTH) { ValueRange.of(1, getMonth.minLength, getMonth.maxLength) } TemporalAccessor.super.range(field) } /** * Gets the value of the specified field from this month-day as an {@code int}. * <p> * This queries this month-day for the value for the specified field. * The returned value will always be within the valid range of values for the field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The {@link #isSupported(TemporalField) supported fields} will valid * values based on this month-day. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field * @throws DateTimeException if a value for the field cannot be obtained or * the value is outside the range of valid values for the field * @throws UnsupportedTemporalTypeException if the field is not supported or * the range of values exceeds an { @code int} * @throws ArithmeticException if numeric overflow occurs */ override def get(field: TemporalField): Int = { range(field).checkValidIntValue(getLong(field), field) } /** * Gets the value of the specified field from this month-day as a {@code long}. * <p> * This queries this month-day for the value for the specified field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The {@link #isSupported(TemporalField) supported fields} will valid * values based on this month-day. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field * @throws DateTimeException if a value for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported * @throws ArithmeticException if numeric overflow occurs */ def getLong(field: TemporalField): Long = { if (field.isInstanceOf[ChronoField]) { field.asInstanceOf[ChronoField] match { case DAY_OF_MONTH => day case MONTH_OF_YEAR => month } throw new UnsupportedTemporalTypeException("Unsupported field: " + field) } field.getFrom(this) } /** * Gets the month-of-year field from 1 to 12. * <p> * This method returns the month as an {@code int} from 1 to 12. * Application code is frequently clearer if the enum {@link Month} * is used by calling {@link #getMonth()}. * * @return the month-of-year, from 1 to 12 * @see #getMonth() */ def getMonthValue: Int = { month } /** * Gets the month-of-year field using the {@code Month} enum. * <p> * This method returns the enum {@link Month} for the month. * This avoids confusion as to what {@code int} values mean. * If you need access to the primitive {@code int} value then the enum * provides the {@link Month#getValue() int value}. * * @return the month-of-year, not null * @see #getMonthValue() */ def getMonth: Month = { Month.of(month) } /** * Gets the day-of-month field. * <p> * This method returns the primitive {@code int} value for the day-of-month. * * @return the day-of-month, from 1 to 31 */ def getDayOfMonth: Int = { day } /** * Checks if the year is valid for this month-day. * <p> * This method checks whether this month and day and the input year form * a valid date. This can only false for February 29th. * * @param year the year to validate, an out of range value returns false * @return true if the year is valid for this month-day * @see Year#isValidMonthDay(MonthDay) */ def isValidYear(year: Int): Boolean = { (day == 29 && month == 2 && Year.isLeap(year) == false) == false } /** * Returns a copy of this {@code MonthDay} with the month-of-year altered. * <p> * This returns a month-day with the specified month. * If the day-of-month is invalid for the specified month, the day will * be adjusted to the last valid day-of-month. * <p> * This instance is immutable and unaffected by this method call. * * @param month the month-of-year to set in the returned month-day, from 1 (January) to 12 (December) * @return a { @code MonthDay} based on this month-day with the requested month, not null * @throws DateTimeException if the month-of-year value is invalid */ def withMonth(month: Int): MonthDay = { `with`(Month.of(month)) } /** * Returns a copy of this {@code MonthDay} with the month-of-year altered. * <p> * This returns a month-day with the specified month. * If the day-of-month is invalid for the specified month, the day will * be adjusted to the last valid day-of-month. * <p> * This instance is immutable and unaffected by this method call. * * @param month the month-of-year to set in the returned month-day, not null * @return a { @code MonthDay} based on this month-day with the requested month, not null */ def `with`(month: Month): MonthDay = { object if (month.getValue == this.month) { this } val day: Int = Math.min(this.day, month.maxLength) new MonthDay(month.getValue, day) } /** * Returns a copy of this {@code MonthDay} with the day-of-month altered. * <p> * This returns a month-day with the specified day-of-month. * If the day-of-month is invalid for the month, an exception is thrown. * <p> * This instance is immutable and unaffected by this method call. * * @param dayOfMonth the day-of-month to set in the month-day, from 1 to 31 * @return a { @code MonthDay} based on this month-day with the requested day, not null * @throws DateTimeException if the day-of-month value is invalid, * or if the day-of-month is invalid for the month */ def withDayOfMonth(dayOfMonth: Int): MonthDay = { if (dayOfMonth == this.day) { this } of(month, dayOfMonth) } /** * Queries this month-day using the specified query. * <p> * This queries this month-day using the specified query strategy object. * The {@code TemporalQuery} object defines the logic to be used to * obtain the result. Read the documentation of the query to understand * what the result of this method will be. * <p> * The result of this method is obtained by invoking the * {@link TemporalQuery#queryFrom(TemporalAccessor)} method on the * specified query passing {@code this} as the argument. * * @param <R> the type of the result * @param query the query to invoke, not null * @return the query result, null may be returned (defined by the query) * @throws DateTimeException if unable to query (defined by the query) * @throws ArithmeticException if numeric overflow occurs (defined by the query) */ override def query(query: TemporalQuery[R]): R = { if (query eq TemporalQuery.chronology) { IsoChronology.INSTANCE.asInstanceOf[R] } TemporalAccessor.super.query(query) } /** * Adjusts the specified temporal object to have this month-day. * <p> * This returns a temporal object of the same observable type as the input * with the month and day-of-month changed to be the same as this. * <p> * The adjustment is equivalent to using {@link Temporal#with(TemporalField, long)} * twice, passing {@link ChronoField#MONTH_OF_YEAR} and * {@link ChronoField#DAY_OF_MONTH} as the fields. * If the specified temporal object does not use the ISO calendar system then * a {@code DateTimeException} is thrown. * <p> * In most cases, it is clearer to reverse the calling pattern by using * {@link Temporal#with(TemporalAdjuster)}: * {{{ * // these two lines are equivalent, but the second approach is recommended * temporal = thisMonthDay.adjustInto(temporal); * temporal = temporal.with(thisMonthDay); * }}} * <p> * This instance is immutable and unaffected by this method call. * * @param temporal the target object to be adjusted, not null * @return the adjusted object, not null * @throws DateTimeException if unable to make the adjustment * @throws ArithmeticException if numeric overflow occurs */ def adjustInto(temporal: Temporal): Temporal = { if ((Chronology.from(temporal) == IsoChronology.INSTANCE) == false) { throw new DateTimeException("Adjustment only supported on ISO date-time") } temporal = temporal.`with`(MONTH_OF_YEAR, month) temporal.`with`(DAY_OF_MONTH, Math.min(temporal.range(DAY_OF_MONTH).getMaximum, day)) } /** * Formats this month-day using the specified formatter. * <p> * This month-day will be passed to the formatter to produce a string. * * @param formatter the formatter to use, not null * @return the formatted month-day string, not null * @throws DateTimeException if an error occurs during printing */ def format(formatter: DateTimeFormatter): String = { object formatter.format(this) } /** * Combines this month-day with a year to create a {@code Date}. * <p> * This returns a {@code Date} formed from this month-day and the specified year. * <p> * A month-day of February 29th will be adjusted to February 28th in the resulting * date if the year is not a leap year. * <p> * This instance is immutable and unaffected by this method call. * * @param year the year to use, from MIN_YEAR to MAX_YEAR * @return the local date formed from this month-day and the specified year, not null * @throws DateTimeException if the year is outside the valid range of years */ def atYear(year: Int): Date = { Date.of(year, month, if (isValidYear(year)) day else 28) } /** * Compares this month-day to another month-day. * <p> * The comparison is based first on value of the month, then on the value of the day. * It is "consistent with equals", as defined by {@link Comparable}. * * @param other the other month-day to compare to, not null * @return the comparator value, negative if less, positive if greater */ def compareTo(other: MonthDay): Int = { var cmp: Int = (month - other.month) if (cmp == 0) { cmp = (day - other.day) } cmp } /** * Is this month-day after the specified month-day. * * @param other the other month-day to compare to, not null * @return true if this is after the specified month-day */ def isAfter(other: MonthDay): Boolean = { compareTo(other) > 0 } /** * Is this month-day before the specified month-day. * * @param other the other month-day to compare to, not null * @return true if this point is before the specified month-day */ def isBefore(other: MonthDay): Boolean = { compareTo(other) < 0 } /** * Checks if this month-day is equal to another month-day. * <p> * The comparison is based on the time-line position of the month-day within a year. * * @param obj the object to check, null returns false * @return true if this is equal to the other month-day */ override def equals(obj: AnyRef): Boolean = { if (this eq obj) { true } if (obj.isInstanceOf[MonthDay]) { val other: MonthDay = obj.asInstanceOf[MonthDay] month == other.month && day == other.day } false } /** * A hash code for this month-day. * * @return a suitable hash code */ override def hashCode: Int = { (month << 6) + day } /** * Outputs this month-day as a {@code String}, such as {@code --12-03}. * <p> * The output will be in the format {@code --MM-dd}: * * @return a string representation of this month-day, not null */ override def toString: String = { new StringBuilder(10).append("--").append(if (month < 10) "0" else "").append(month).append(if (day < 10) "-0" else "-").append(day).toString } /** * Writes the object using a * <a href="../../../serialized-form.html#java.time.temporal.Ser">dedicated serialized form</a>. * {{{ * out.writeByte(13); // identifies this as a MonthDay * out.writeByte(month); * out.writeByte(day); * }}} * * @return the instance of { @code Ser}, not null */ private def writeReplace: AnyRef = { new Ser(Ser.MONTH_DAY_TYPE, this) } /** * Defend against malicious streams. * @return never * @throws InvalidObjectException always */ private def readResolve: AnyRef = { throw new InvalidObjectException("Deserialization via serialization delegate") } private[time] def writeExternal(out: DataOutput) { out.writeByte(month) out.writeByte(day) } /** * The month-of-year, not null. */ private final val month: Int = 0 /** * The day-of-month. */ private final val day: Int = 0 } /** * A year in the ISO-8601 calendar system, such as {@code 2007}. * <p> * {@code Year} is an immutable date-time object that represents a year. * Any field that can be derived from a year can be obtained. * <p> * <b>Note that years in the ISO chronology only align with years in the * Gregorian-Julian system for modern years. Parts of Russia did not switch to the * modern Gregorian/ISO rules until 1920. * As such, historical years must be treated with caution.</b> * <p> * This class does not store or represent a month, day, time or time-zone. * For example, the value "2007" can be stored in a {@code Year}. * <p> * Years represented by this class follow the ISO-8601 standard and use * the proleptic numbering system. Year 1 is preceded by year 0, then by year -1. * <p> * The ISO-8601 calendar system is the modern civil calendar system used today * in most of the world. It is equivalent to the proleptic Gregorian calendar * system, in which today's rules for leap years are applied for all time. * For most applications written today, the ISO-8601 rules are entirely suitable. * However, any application that makes use of historical dates, and requires them * to be accurate will find the ISO-8601 approach unsuitable. * * @implSpec * This class is immutable and thread-safe. * * @since 1.8 */ object Year { /** * Obtains the current year from the system clock in the default time-zone. * <p> * This will query the {@link java.time.Clock#systemDefaultZone() system clock} in the default * time-zone to obtain the current year. * <p> * Using this method will prevent the ability to use an alternate clock for testing * because the clock is hard-coded. * * @return the current year using the system clock and default time-zone, not null */ def now: Year = { now(Clock.systemDefaultZone) } /** * Obtains the current year from the system clock in the specified time-zone. * <p> * This will query the {@link Clock#system(java.time.ZoneId) system clock} to obtain the current year. * Specifying the time-zone avoids dependence on the default time-zone. * <p> * Using this method will prevent the ability to use an alternate clock for testing * because the clock is hard-coded. * * @param zone the zone ID to use, not null * @return the current year using the system clock, not null */ def now(zone: ZoneId): Year = { now(Clock.system(zone)) } /** * Obtains the current year from the specified clock. * <p> * This will query the specified clock to obtain the current year. * Using this method allows the use of an alternate clock for testing. * The alternate clock may be introduced using {@link Clock dependency injection}. * * @param clock the clock to use, not null * @return the current year, not null */ def now(clock: Clock): Year = { val now: Date = Date.now(clock) Year.of(now.getYear) } /** * Obtains an instance of {@code Year}. * <p> * This method accepts a year value from the proleptic ISO calendar system. * <p> * The year 2AD/CE is represented by 2.<br> * The year 1AD/CE is represented by 1.<br> * The year 1BC/BCE is represented by 0.<br> * The year 2BC/BCE is represented by -1.<br> * * @param isoYear the ISO proleptic year to represent, from { @code MIN_VALUE} to { @code MAX_VALUE} * @return the year, not null * @throws DateTimeException if the field is invalid */ def of(isoYear: Int): Year = { YEAR.checkValidValue(isoYear) new Year(isoYear) } /** * Obtains an instance of {@code Year} from a temporal object. * <p> * This obtains a year based on the specified temporal. * A {@code TemporalAccessor} represents an arbitrary set of date and time information, * which this factory converts to an instance of {@code Year}. * <p> * The conversion extracts the {@link ChronoField#YEAR year} field. * The extraction is only permitted if the temporal object has an ISO * chronology, or can be converted to a {@code Date}. * <p> * This method matches the signature of the functional interface {@link TemporalQuery} * allowing it to be used in queries via method reference, {@code Year::from}. * * @param temporal the temporal object to convert, not null * @return the year, not null * @throws DateTimeException if unable to convert to a { @code Year} */ def from(temporal: TemporalAccessor): Year = { if (temporal.isInstanceOf[Year]) { temporal.asInstanceOf[Year] } object try { if ((IsoChronology.INSTANCE == Chronology.from(temporal)) == false) { temporal = Date.from(temporal) } of(temporal.get(YEAR)) } catch { case ex: DateTimeException => { throw new DateTimeException("Unable to obtain Year from TemporalAccessor: " + temporal.getClass, ex) } } } /** * Obtains an instance of {@code Year} from a text string such as {@code 2007}. * <p> * The string must represent a valid year. * Years outside the range 0000 to 9999 must be prefixed by the plus or minus symbol. * * @param text the text to parse such as "2007", not null * @return the parsed year, not null * @throws DateTimeParseException if the text cannot be parsed */ def parse(text: CharSequence): Year = { parse(text, PARSER) } /** * Obtains an instance of {@code Year} from a text string using a specific formatter. * <p> * The text is parsed using the formatter, returning a year. * * @param text the text to parse, not null * @param formatter the formatter to use, not null * @return the parsed year, not null * @throws DateTimeParseException if the text cannot be parsed */ def parse(text: CharSequence, formatter: DateTimeFormatter): Year = { object formatter.parse(text, Year.from) } /** * Checks if the year is a leap year, according to the ISO proleptic * calendar system rules. * <p> * This method applies the current rules for leap years across the whole time-line. * In general, a year is a leap year if it is divisible by four without * remainder. However, years divisible by 100, are not leap years, with * the exception of years divisible by 400 which are. * <p> * For example, 1904 is a leap year it is divisible by 4. * 1900 was not a leap year as it is divisible by 100, however 2000 was a * leap year as it is divisible by 400. * <p> * The calculation is proleptic - applying the same rules into the far future and far past. * This is historically inaccurate, but is correct for the ISO-8601 standard. * * @param year the year to check * @return true if the year is leap, false otherwise */ def isLeap(year: Long): Boolean = { ((year & 3) == 0) && ((year % 100) != 0 || (year % 400) == 0) } private[time] def readExternal(in: DataInput): Year = { Year.of(in.readInt) } /** * The minimum supported year, '-999,999,999'. */ final val MIN_VALUE: Int = -999 _999_999 /** * The maximum supported year, '+999,999,999'. */ final val MAX_VALUE: Int = 999 _999_999 /** * Parser. */ private final val PARSER: DateTimeFormatter = new DateTimeFormatterBuilder().appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD).toFormatter } final class Year extends Temporal with TemporalAdjuster with Comparable[Year] { /** * Constructor. * * @param year the year to represent */ private def this(year: Int) { this.year = year } /** * Gets the year value. * <p> * The year returned by this method is proleptic as per {@code get(YEAR)}. * * @return the year, { @code MIN_VALUE} to { @code MAX_VALUE} */ def getValue: Int = { year } /** * Checks if the specified field is supported. * <p> * This checks if this year can be queried for the specified field. * If false, then calling the {@link #range(TemporalField) range}, * {@link #get(TemporalField) get} and {@link #with(TemporalField, long)} * methods will throw an exception. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The supported fields are: * <ul> * <li>{@code YEAR_OF_ERA} * <li>{@code YEAR} * <li>{@code ERA} * </ul> * All other {@code ChronoField} instances will false. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.isSupportedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the field is supported is determined by the field. * * @param field the field to check, null returns false * @return true if the field is supported on this year, false if not */ def isSupported(field: TemporalField): Boolean = { if (field.isInstanceOf[ChronoField]) { field eq YEAR || field eq YEAR_OF_ERA || field eq ERA } field != null && field.isSupportedBy(this) } /** * Checks if the specified unit is supported. * <p> * This checks if the specified unit can be added to, or subtracted from, this date-time. * If false, then calling the {@link #plus(long, TemporalUnit)} and * {@link #minus(long, TemporalUnit) minus} methods will throw an exception. * <p> * If the unit is a {@link ChronoUnit} then the query is implemented here. * The supported units are: * <ul> * <li>{@code YEARS} * <li>{@code DECADES} * <li>{@code CENTURIES} * <li>{@code MILLENNIA} * <li>{@code ERAS} * </ul> * All other {@code ChronoUnit} instances will false. * <p> * If the unit is not a {@code ChronoUnit}, then the result of this method * is obtained by invoking {@code TemporalUnit.isSupportedBy(Temporal)} * passing {@code this} as the argument. * Whether the unit is supported is determined by the unit. * * @param unit the unit to check, null returns false * @return true if the unit can be added/subtracted, false if not */ def isSupported(unit: TemporalUnit): Boolean = { if (unit.isInstanceOf[ChronoUnit]) { unit eq YEARS || unit eq DECADES || unit eq CENTURIES || unit eq MILLENNIA || unit eq ERAS } unit != null && unit.isSupportedBy(this) } /** * Gets the range of valid values for the specified field. * <p> * The range object expresses the minimum and maximum valid values for a field. * This year is used to enhance the accuracy of the returned range. * If it is not possible to the range, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The {@link #isSupported(TemporalField) supported fields} will * appropriate range instances. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.rangeRefinedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the range can be obtained is determined by the field. * * @param field the field to query the range for, not null * @return the range of valid values for the field, not null * @throws DateTimeException if the range for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported */ override def range(field: TemporalField): ValueRange = { if (field eq YEAR_OF_ERA) { (if (year <= 0) ValueRange.of(1, MAX_VALUE + 1) else ValueRange.of(1, MAX_VALUE)) } Temporal.super.range(field) } /** * Gets the value of the specified field from this year as an {@code int}. * <p> * This queries this year for the value for the specified field. * The returned value will always be within the valid range of values for the field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The {@link #isSupported(TemporalField) supported fields} will valid * values based on this year. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field * @throws DateTimeException if a value for the field cannot be obtained or * the value is outside the range of valid values for the field * @throws UnsupportedTemporalTypeException if the field is not supported or * the range of values exceeds an { @code int} * @throws ArithmeticException if numeric overflow occurs */ override def get(field: TemporalField): Int = { range(field).checkValidIntValue(getLong(field), field) } /** * Gets the value of the specified field from this year as a {@code long}. * <p> * This queries this year for the value for the specified field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The {@link #isSupported(TemporalField) supported fields} will valid * values based on this year. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field * @throws DateTimeException if a value for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported * @throws ArithmeticException if numeric overflow occurs */ def getLong(field: TemporalField): Long = { if (field.isInstanceOf[ChronoField]) { field.asInstanceOf[ChronoField] match { case YEAR_OF_ERA => (if (year < 1) 1 - year else year) case YEAR => year case ERA => (if (year < 1) 0 else 1) } throw new UnsupportedTemporalTypeException("Unsupported field: " + field) } field.getFrom(this) } /** * Checks if the year is a leap year, according to the ISO proleptic * calendar system rules. * <p> * This method applies the current rules for leap years across the whole time-line. * In general, a year is a leap year if it is divisible by four without * remainder. However, years divisible by 100, are not leap years, with * the exception of years divisible by 400 which are. * <p> * For example, 1904 is a leap year it is divisible by 4. * 1900 was not a leap year as it is divisible by 100, however 2000 was a * leap year as it is divisible by 400. * <p> * The calculation is proleptic - applying the same rules into the far future and far past. * This is historically inaccurate, but is correct for the ISO-8601 standard. * * @return true if the year is leap, false otherwise */ def isLeap: Boolean = { Year.isLeap(year) } /** * Checks if the month-day is valid for this year. * <p> * This method checks whether this year and the input month and day form * a valid date. * * @param monthDay the month-day to validate, null returns false * @return true if the month and day are valid for this year */ def isValidMonthDay(monthDay: MonthDay): Boolean = { monthDay != null && monthDay.isValidYear(year) } /** * Gets the length of this year in days. * * @return the length of this year in days, 365 or 366 */ def length: Int = { if (isLeap) 366 else 365 } /** * Returns an adjusted copy of this year. * <p> * This returns a {@code Year}, based on this one, with the year adjusted. * The adjustment takes place using the specified adjuster strategy object. * Read the documentation of the adjuster to understand what adjustment will be made. * <p> * The result of this method is obtained by invoking the * {@link TemporalAdjuster#adjustInto(Temporal)} method on the * specified adjuster passing {@code this} as the argument. * <p> * This instance is immutable and unaffected by this method call. * * @param adjuster the adjuster to use, not null * @return a { @code Year} based on { @code this} with the adjustment made, not null * @throws DateTimeException if the adjustment cannot be made * @throws ArithmeticException if numeric overflow occurs */ override def `with`(adjuster: TemporalAdjuster): Year = { adjuster.adjustInto(this).asInstanceOf[Year] } /** * Returns a copy of this year with the specified field set to a new value. * <p> * This returns a {@code Year}, based on this one, with the value * for the specified field changed. * If it is not possible to set the value, because the field is not supported or for * some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the adjustment is implemented here. * The supported fields behave as follows: * <ul> * <li>{@code YEAR_OF_ERA} - * Returns a {@code Year} with the specified year-of-era * The era will be unchanged. * <li>{@code YEAR} - * Returns a {@code Year} with the specified year. * This completely replaces the date and is equivalent to {@link #of(int)}. * <li>{@code ERA} - * Returns a {@code Year} with the specified era. * The year-of-era will be unchanged. * </ul> * <p> * In all cases, if the new value is outside the valid range of values for the field * then a {@code DateTimeException} will be thrown. * <p> * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.adjustInto(Temporal, long)} * passing {@code this} as the argument. In this case, the field determines * whether and how to adjust the instant. * <p> * This instance is immutable and unaffected by this method call. * * @param field the field to set in the result, not null * @param newValue the new value of the field in the result * @return a { @code Year} based on { @code this} with the specified field set, not null * @throws DateTimeException if the field cannot be set * @throws UnsupportedTemporalTypeException if the field is not supported * @throws ArithmeticException if numeric overflow occurs */ def `with`(field: TemporalField, newValue: Long): Year = { if (field.isInstanceOf[ChronoField]) { val f: ChronoField = field.asInstanceOf[ChronoField] f.checkValidValue(newValue) f match { case YEAR_OF_ERA => Year.of((if (year < 1) 1 - newValue else newValue).asInstanceOf[Int]) case YEAR => Year.of(newValue.asInstanceOf[Int]) case ERA => (if (getLong(ERA) == newValue) this else Year.of(1 - year)) } throw new UnsupportedTemporalTypeException("Unsupported field: " + field) } field.adjustInto(this, newValue) } /** * Returns a copy of this year with the specified amount added. * <p> * This returns a {@code Year}, based on this one, with the specified amount added. * The amount is typically {@link Period} but may be any other type implementing * the {@link TemporalAmount} interface. * <p> * The calculation is delegated to the amount object by calling * {@link TemporalAmount#addTo(Temporal)}. The amount implementation is free * to implement the addition in any way it wishes, however it typically * calls back to {@link #plus(long, TemporalUnit)}. Consult the documentation * of the amount implementation to determine if it can be successfully added. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToAdd the amount to add, not null * @return a { @code Year} based on this year with the addition made, not null * @throws DateTimeException if the addition cannot be made * @throws ArithmeticException if numeric overflow occurs */ override def plus(amountToAdd: TemporalAmount): Year = { amountToAdd.addTo(this).asInstanceOf[Year] } /** * Returns a copy of this year with the specified amount added. * <p> * This returns a {@code Year}, based on this one, with the amount * in terms of the unit added. If it is not possible to add the amount, because the * unit is not supported or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoUnit} then the addition is implemented here. * The supported fields behave as follows: * <ul> * <li>{@code YEARS} - * Returns a {@code Year} with the specified number of years added. * This is equivalent to {@link #plusYears(long)}. * <li>{@code DECADES} - * Returns a {@code Year} with the specified number of decades added. * This is equivalent to calling {@link #plusYears(long)} with the amount * multiplied by 10. * <li>{@code CENTURIES} - * Returns a {@code Year} with the specified number of centuries added. * This is equivalent to calling {@link #plusYears(long)} with the amount * multiplied by 100. * <li>{@code MILLENNIA} - * Returns a {@code Year} with the specified number of millennia added. * This is equivalent to calling {@link #plusYears(long)} with the amount * multiplied by 1,000. * <li>{@code ERAS} - * Returns a {@code Year} with the specified number of eras added. * Only two eras are supported so the amount must be one, zero or minus one. * If the amount is non-zero then the year is changed such that the year-of-era * is unchanged. * </ul> * <p> * All other {@code ChronoUnit} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoUnit}, then the result of this method * is obtained by invoking {@code TemporalUnit.addTo(Temporal, long)} * passing {@code this} as the argument. In this case, the unit determines * whether and how to perform the addition. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToAdd the amount of the unit to add to the result, may be negative * @param unit the unit of the amount to add, not null * @return a { @code Year} based on this year with the specified amount added, not null * @throws DateTimeException if the addition cannot be made * @throws UnsupportedTemporalTypeException if the unit is not supported * @throws ArithmeticException if numeric overflow occurs */ def plus(amountToAdd: Long, unit: TemporalUnit): Year = { if (unit.isInstanceOf[ChronoUnit]) { unit.asInstanceOf[ChronoUnit] match { case YEARS => plusYears(amountToAdd) case DECADES => plusYears(Math.multiplyExact(amountToAdd, 10)) case CENTURIES => plusYears(Math.multiplyExact(amountToAdd, 100)) case MILLENNIA => plusYears(Math.multiplyExact(amountToAdd, 1000)) case ERAS => `with`(ERA, Math.addExact(getLong(ERA), amountToAdd)) } throw new UnsupportedTemporalTypeException("Unsupported unit: " + unit) } unit.addTo(this, amountToAdd) } /** * Returns a copy of this year with the specified number of years added. * <p> * This instance is immutable and unaffected by this method call. * * @param yearsToAdd the years to add, may be negative * @return a { @code Year} based on this year with the period added, not null * @throws DateTimeException if the result exceeds the supported year range */ def plusYears(yearsToAdd: Long): Year = { if (yearsToAdd == 0) { this } of(YEAR.checkValidIntValue(year + yearsToAdd)) } /** * Returns a copy of this year with the specified amount subtracted. * <p> * This returns a {@code Year}, based on this one, with the specified amount subtracted. * The amount is typically {@link Period} but may be any other type implementing * the {@link TemporalAmount} interface. * <p> * The calculation is delegated to the amount object by calling * {@link TemporalAmount#subtractFrom(Temporal)}. The amount implementation is free * to implement the subtraction in any way it wishes, however it typically * calls back to {@link #minus(long, TemporalUnit)}. Consult the documentation * of the amount implementation to determine if it can be successfully subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToSubtract the amount to subtract, not null * @return a { @code Year} based on this year with the subtraction made, not null * @throws DateTimeException if the subtraction cannot be made * @throws ArithmeticException if numeric overflow occurs */ override def minus(amountToSubtract: TemporalAmount): Year = { amountToSubtract.subtractFrom(this).asInstanceOf[Year] } /** * Returns a copy of this year with the specified amount subtracted. * <p> * This returns a {@code Year}, based on this one, with the amount * in terms of the unit subtracted. If it is not possible to subtract the amount, * because the unit is not supported or for some other reason, an exception is thrown. * <p> * This method is equivalent to {@link #plus(long, TemporalUnit)} with the amount negated. * See that method for a full description of how addition, and thus subtraction, works. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToSubtract the amount of the unit to subtract from the result, may be negative * @param unit the unit of the amount to subtract, not null * @return a { @code Year} based on this year with the specified amount subtracted, not null * @throws DateTimeException if the subtraction cannot be made * @throws UnsupportedTemporalTypeException if the unit is not supported * @throws ArithmeticException if numeric overflow occurs */ override def minus(amountToSubtract: Long, unit: TemporalUnit): Year = { (if (amountToSubtract == Long.MIN_VALUE) plus(Long.MAX_VALUE, unit).plus(1, unit) else plus(-amountToSubtract, unit)) } /** * Returns a copy of this year with the specified number of years subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param yearsToSubtract the years to subtract, may be negative * @return a { @code Year} based on this year with the period subtracted, not null * @throws DateTimeException if the result exceeds the supported year range */ def minusYears(yearsToSubtract: Long): Year = { (if (yearsToSubtract == Long.MIN_VALUE) plusYears(Long.MAX_VALUE).plusYears(1) else plusYears(-yearsToSubtract)) } /** * Queries this year using the specified query. * <p> * This queries this year using the specified query strategy object. * The {@code TemporalQuery} object defines the logic to be used to * obtain the result. Read the documentation of the query to understand * what the result of this method will be. * <p> * The result of this method is obtained by invoking the * {@link TemporalQuery#queryFrom(TemporalAccessor)} method on the * specified query passing {@code this} as the argument. * * @param <R> the type of the result * @param query the query to invoke, not null * @return the query result, null may be returned (defined by the query) * @throws DateTimeException if unable to query (defined by the query) * @throws ArithmeticException if numeric overflow occurs (defined by the query) */ override def query(query: TemporalQuery[R]): R = { if (query eq TemporalQuery.chronology) { IsoChronology.INSTANCE.asInstanceOf[R] } else if (query eq TemporalQuery.precision) { YEARS.asInstanceOf[R] } Temporal.super.query(query) } /** * Adjusts the specified temporal object to have this year. * <p> * This returns a temporal object of the same observable type as the input * with the year changed to be the same as this. * <p> * The adjustment is equivalent to using {@link Temporal#with(TemporalField, long)} * passing {@link ChronoField#YEAR} as the field. * If the specified temporal object does not use the ISO calendar system then * a {@code DateTimeException} is thrown. * <p> * In most cases, it is clearer to reverse the calling pattern by using * {@link Temporal#with(TemporalAdjuster)}: * {{{ * // these two lines are equivalent, but the second approach is recommended * temporal = thisYear.adjustInto(temporal); * temporal = temporal.with(thisYear); * }}} * <p> * This instance is immutable and unaffected by this method call. * * @param temporal the target object to be adjusted, not null * @return the adjusted object, not null * @throws DateTimeException if unable to make the adjustment * @throws ArithmeticException if numeric overflow occurs */ def adjustInto(temporal: Temporal): Temporal = { if ((Chronology.from(temporal) == IsoChronology.INSTANCE) == false) { throw new DateTimeException("Adjustment only supported on ISO date-time") } temporal.`with`(YEAR, year) } /** * Calculates the amount of time until another year in terms of the specified unit. * <p> * This calculates the amount of time between two {@code Year} * objects in terms of a single {@code TemporalUnit}. * The start and end points are {@code this} and the specified year. * The result will be negative if the end is before the start. * The {@code Temporal} passed to this method is converted to a * {@code Year} using {@link #from(TemporalAccessor)}. * For example, the period in decades between two year can be calculated * using {@code startYear.until(endYear, DECADES)}. * <p> * The calculation returns a whole number, representing the number of * complete units between the two years. * For example, the period in decades between 2012 and 2031 * will only be one decade as it is one year short of two decades. * <p> * There are two equivalent ways of using this method. * The first is to invoke this method. * The second is to use {@link TemporalUnit#between(Temporal, Temporal)}: * {{{ * // these two lines are equivalent * amount = start.until(end, YEARS); * amount = YEARS.between(start, end); * }}} * The choice should be made based on which makes the code more readable. * <p> * The calculation is implemented in this method for {@link ChronoUnit}. * The units {@code YEARS}, {@code DECADES}, {@code CENTURIES}, * {@code MILLENNIA} and {@code ERAS} are supported. * Other {@code ChronoUnit} values will throw an exception. * <p> * If the unit is not a {@code ChronoUnit}, then the result of this method * is obtained by invoking {@code TemporalUnit.between(Temporal, Temporal)} * passing {@code this} as the first argument and the converted input temporal * as the second argument. * <p> * This instance is immutable and unaffected by this method call. * * @param endExclusive the end date, which is converted to a { @code Year}, not null * @param unit the unit to measure the amount in, not null * @return the amount of time between this year and the end year * @throws DateTimeException if the amount cannot be calculated, or the end * temporal cannot be converted to a { @code Year} * @throws UnsupportedTemporalTypeException if the unit is not supported * @throws ArithmeticException if numeric overflow occurs */ def until(endExclusive: Temporal, unit: TemporalUnit): Long = { val end: Year = Year.from(endExclusive) if (unit.isInstanceOf[ChronoUnit]) { val yearsUntil: Long = (end.year.asInstanceOf[Long]) - year unit.asInstanceOf[ChronoUnit] match { case YEARS => yearsUntil case DECADES => yearsUntil / 10 case CENTURIES => yearsUntil / 100 case MILLENNIA => yearsUntil / 1000 case ERAS => end.getLong(ERA) - getLong(ERA) } throw new UnsupportedTemporalTypeException("Unsupported unit: " + unit) } unit.between(this, end) } /** * Formats this year using the specified formatter. * <p> * This year will be passed to the formatter to produce a string. * * @param formatter the formatter to use, not null * @return the formatted year string, not null * @throws DateTimeException if an error occurs during printing */ def format(formatter: DateTimeFormatter): String = { object formatter.format(this) } /** * Combines this year with a day-of-year to create a {@code Date}. * <p> * This returns a {@code Date} formed from this year and the specified day-of-year. * <p> * The day-of-year value 366 is only valid in a leap year. * * @param dayOfYear the day-of-year to use, not null * @return the local date formed from this year and the specified date of year, not null * @throws DateTimeException if the day of year is zero or less, 366 or greater or equal * to 366 and this is not a leap year */ def atDay(dayOfYear: Int): Date = { Date.ofYearDay(year, dayOfYear) } /** * Combines this year with a month to create a {@code YearMonth}. * <p> * This returns a {@code YearMonth} formed from this year and the specified month. * All possible combinations of year and month are valid. * <p> * This method can be used as part of a chain to produce a date: * {{{ * Date date = year.atMonth(month).atDay(day); * }}} * * @param month the month-of-year to use, not null * @return the year-month formed from this year and the specified month, not null */ def atMonth(month: Month): YearMonth = { YearMonth.of(year, month) } /** * Combines this year with a month to create a {@code YearMonth}. * <p> * This returns a {@code YearMonth} formed from this year and the specified month. * All possible combinations of year and month are valid. * <p> * This method can be used as part of a chain to produce a date: * {{{ * Date date = year.atMonth(month).atDay(day); * }}} * * @param month the month-of-year to use, from 1 (January) to 12 (December) * @return the year-month formed from this year and the specified month, not null * @throws DateTimeException if the month is invalid */ def atMonth(month: Int): YearMonth = { YearMonth.of(year, month) } /** * Combines this year with a month-day to create a {@code Date}. * <p> * This returns a {@code Date} formed from this year and the specified month-day. * <p> * A month-day of February 29th will be adjusted to February 28th in the resulting * date if the year is not a leap year. * * @param monthDay the month-day to use, not null * @return the local date formed from this year and the specified month-day, not null */ def atMonthDay(monthDay: MonthDay): Date = { monthDay.atYear(year) } /** * Compares this year to another year. * <p> * The comparison is based on the value of the year. * It is "consistent with equals", as defined by {@link Comparable}. * * @param other the other year to compare to, not null * @return the comparator value, negative if less, positive if greater */ def compareTo(other: Year): Int = { year - other.year } /** * Is this year after the specified year. * * @param other the other year to compare to, not null * @return true if this is after the specified year */ def isAfter(other: Year): Boolean = { year > other.year } /** * Is this year before the specified year. * * @param other the other year to compare to, not null * @return true if this point is before the specified year */ def isBefore(other: Year): Boolean = { year < other.year } /** * Checks if this year is equal to another year. * <p> * The comparison is based on the time-line position of the years. * * @param obj the object to check, null returns false * @return true if this is equal to the other year */ override def equals(obj: AnyRef): Boolean = { if (this eq obj) { true } if (obj.isInstanceOf[Year]) { year == (obj.asInstanceOf[Year]).year } false } /** * A hash code for this year. * * @return a suitable hash code */ override def hashCode: Int = { year } /** * Outputs this year as a {@code String}. * * @return a string representation of this year, not null */ override def toString: String = { Integer.toString(year) } /** * Writes the object using a * <a href="../../../serialized-form.html#java.time.temporal.Ser">dedicated serialized form</a>. * {{{ * out.writeByte(11); // identifies this as a Year * out.writeInt(year); * }}} * * @return the instance of { @code Ser}, not null */ private def writeReplace: AnyRef = { new Ser(Ser.YEAR_TYPE, this) } /** * Defend against malicious streams. * @return never * @throws InvalidObjectException always */ private def readResolve: AnyRef = { throw new InvalidObjectException("Deserialization via serialization delegate") } private[time] def writeExternal(out: DataOutput) { out.writeInt(year) } /** * The year being represented. */ private final val year: Int = 0 } /** * A year-month in the ISO-8601 calendar system, such as {@code 2007-12}. * <p> * {@code YearMonth} is an immutable date-time object that represents the combination * of a year and month. Any field that can be derived from a year and month, such as * quarter-of-year, can be obtained. * <p> * This class does not store or represent a day, time or time-zone. * For example, the value "October 2007" can be stored in a {@code YearMonth}. * <p> * The ISO-8601 calendar system is the modern civil calendar system used today * in most of the world. It is equivalent to the proleptic Gregorian calendar * system, in which today's rules for leap years are applied for all time. * For most applications written today, the ISO-8601 rules are entirely suitable. * However, any application that makes use of historical dates, and requires them * to be accurate will find the ISO-8601 approach unsuitable. * * @implSpec * This class is immutable and thread-safe. * * @since 1.8 */ object YearMonth { /** * Obtains the current year-month from the system clock in the default time-zone. * <p> * This will query the {@link java.time.Clock#systemDefaultZone() system clock} in the default * time-zone to obtain the current year-month. * The zone and offset will be set based on the time-zone in the clock. * <p> * Using this method will prevent the ability to use an alternate clock for testing * because the clock is hard-coded. * * @return the current year-month using the system clock and default time-zone, not null */ def now: YearMonth = { now(Clock.systemDefaultZone) } /** * Obtains the current year-month from the system clock in the specified time-zone. * <p> * This will query the {@link Clock#system(java.time.ZoneId) system clock} to obtain the current year-month. * Specifying the time-zone avoids dependence on the default time-zone. * <p> * Using this method will prevent the ability to use an alternate clock for testing * because the clock is hard-coded. * * @param zone the zone ID to use, not null * @return the current year-month using the system clock, not null */ def now(zone: ZoneId): YearMonth = { now(Clock.system(zone)) } /** * Obtains the current year-month from the specified clock. * <p> * This will query the specified clock to obtain the current year-month. * Using this method allows the use of an alternate clock for testing. * The alternate clock may be introduced using {@link Clock dependency injection}. * * @param clock the clock to use, not null * @return the current year-month, not null */ def now(clock: Clock): YearMonth = { val now: Date = Date.now(clock) YearMonth.of(now.getYear, now.getMonth) } /** * Obtains an instance of {@code YearMonth} from a year and month. * * @param year the year to represent, from MIN_YEAR to MAX_YEAR * @param month the month-of-year to represent, not null * @return the year-month, not null * @throws DateTimeException if the year value is invalid */ def of(year: Int, month: Month): YearMonth = { object of(year, month.getValue) } /** * Obtains an instance of {@code YearMonth} from a year and month. * * @param year the year to represent, from MIN_YEAR to MAX_YEAR * @param month the month-of-year to represent, from 1 (January) to 12 (December) * @return the year-month, not null * @throws DateTimeException if either field value is invalid */ def of(year: Int, month: Int): YearMonth = { YEAR.checkValidValue(year) MONTH_OF_YEAR.checkValidValue(month) new YearMonth(year, month) } /** * Obtains an instance of {@code YearMonth} from a temporal object. * <p> * This obtains a year-month based on the specified temporal. * A {@code TemporalAccessor} represents an arbitrary set of date and time information, * which this factory converts to an instance of {@code YearMonth}. * <p> * The conversion extracts the {@link ChronoField#YEAR YEAR} and * {@link ChronoField#MONTH_OF_YEAR MONTH_OF_YEAR} fields. * The extraction is only permitted if the temporal object has an ISO * chronology, or can be converted to a {@code Date}. * <p> * This method matches the signature of the functional interface {@link TemporalQuery} * allowing it to be used in queries via method reference, {@code YearMonth::from}. * * @param temporal the temporal object to convert, not null * @return the year-month, not null * @throws DateTimeException if unable to convert to a { @code YearMonth} */ def from(temporal: TemporalAccessor): YearMonth = { if (temporal.isInstanceOf[YearMonth]) { temporal.asInstanceOf[YearMonth] } object try { if ((IsoChronology.INSTANCE == Chronology.from(temporal)) == false) { temporal = Date.from(temporal) } of(temporal.get(YEAR), temporal.get(MONTH_OF_YEAR)) } catch { case ex: DateTimeException => { throw new DateTimeException("Unable to obtain YearMonth from TemporalAccessor: " + temporal.getClass, ex) } } } /** * Obtains an instance of {@code YearMonth} from a text string such as {@code 2007-12}. * <p> * The string must represent a valid year-month. * The format must be {@code uuuu-MM}. * Years outside the range 0000 to 9999 must be prefixed by the plus or minus symbol. * * @param text the text to parse such as "2007-12", not null * @return the parsed year-month, not null * @throws DateTimeParseException if the text cannot be parsed */ def parse(text: CharSequence): YearMonth = { parse(text, PARSER) } /** * Obtains an instance of {@code YearMonth} from a text string using a specific formatter. * <p> * The text is parsed using the formatter, returning a year-month. * * @param text the text to parse, not null * @param formatter the formatter to use, not null * @return the parsed year-month, not null * @throws DateTimeParseException if the text cannot be parsed */ def parse(text: CharSequence, formatter: DateTimeFormatter): YearMonth = { object formatter.parse(text, YearMonth.from) } private[time] def readExternal(in: DataInput): YearMonth = { val year: Int = in.readInt val month: Byte = in.readByte YearMonth.of(year, month) } /** * Serialization version. */ private final val serialVersionUID: Long = 4183400860270640070L /** * Parser. */ private final val PARSER: DateTimeFormatter = new DateTimeFormatterBuilder().appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD).appendLiteral('-').appendValue(MONTH_OF_YEAR, 2).toFormatter } final class YearMonth extends Temporal with TemporalAdjuster with Comparable[YearMonth] { /** * Constructor. * * @param year the year to represent, validated from MIN_YEAR to MAX_YEAR * @param month the month-of-year to represent, validated from 1 (January) to 12 (December) */ private def this(year: Int, month: Int) { this.year = year this.month = month } /** * Returns a copy of this year-month with the new year and month, checking * to see if a new object is in fact required. * * @param newYear the year to represent, validated from MIN_YEAR to MAX_YEAR * @param newMonth the month-of-year to represent, validated not null * @return the year-month, not null */ private def `with`(newYear: Int, newMonth: Int): YearMonth = { if (year == newYear && month == newMonth) { this } new YearMonth(newYear, newMonth) } /** * Checks if the specified field is supported. * <p> * This checks if this year-month can be queried for the specified field. * If false, then calling the {@link #range(TemporalField) range}, * {@link #get(TemporalField) get} and {@link #with(TemporalField, long)} * methods will throw an exception. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The supported fields are: * <ul> * <li>{@code MONTH_OF_YEAR} * <li>{@code PROLEPTIC_MONTH} * <li>{@code YEAR_OF_ERA} * <li>{@code YEAR} * <li>{@code ERA} * </ul> * All other {@code ChronoField} instances will false. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.isSupportedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the field is supported is determined by the field. * * @param field the field to check, null returns false * @return true if the field is supported on this year-month, false if not */ def isSupported(field: TemporalField): Boolean = { if (field.isInstanceOf[ChronoField]) { field eq YEAR || field eq MONTH_OF_YEAR || field eq PROLEPTIC_MONTH || field eq YEAR_OF_ERA || field eq ERA } field != null && field.isSupportedBy(this) } /** * Checks if the specified unit is supported. * <p> * This checks if the specified unit can be added to, or subtracted from, this date-time. * If false, then calling the {@link #plus(long, TemporalUnit)} and * {@link #minus(long, TemporalUnit) minus} methods will throw an exception. * <p> * If the unit is a {@link ChronoUnit} then the query is implemented here. * The supported units are: * <ul> * <li>{@code MONTHS} * <li>{@code YEARS} * <li>{@code DECADES} * <li>{@code CENTURIES} * <li>{@code MILLENNIA} * <li>{@code ERAS} * </ul> * All other {@code ChronoUnit} instances will false. * <p> * If the unit is not a {@code ChronoUnit}, then the result of this method * is obtained by invoking {@code TemporalUnit.isSupportedBy(Temporal)} * passing {@code this} as the argument. * Whether the unit is supported is determined by the unit. * * @param unit the unit to check, null returns false * @return true if the unit can be added/subtracted, false if not */ def isSupported(unit: TemporalUnit): Boolean = { if (unit.isInstanceOf[ChronoUnit]) { unit eq MONTHS || unit eq YEARS || unit eq DECADES || unit eq CENTURIES || unit eq MILLENNIA || unit eq ERAS } unit != null && unit.isSupportedBy(this) } /** * Gets the range of valid values for the specified field. * <p> * The range object expresses the minimum and maximum valid values for a field. * This year-month is used to enhance the accuracy of the returned range. * If it is not possible to the range, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The {@link #isSupported(TemporalField) supported fields} will * appropriate range instances. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.rangeRefinedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the range can be obtained is determined by the field. * * @param field the field to query the range for, not null * @return the range of valid values for the field, not null * @throws DateTimeException if the range for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported */ override def range(field: TemporalField): ValueRange = { if (field eq YEAR_OF_ERA) { (if (getYear <= 0) ValueRange.of(1, Year.MAX_VALUE + 1) else ValueRange.of(1, Year.MAX_VALUE)) } Temporal.super.range(field) } /** * Gets the value of the specified field from this year-month as an {@code int}. * <p> * This queries this year-month for the value for the specified field. * The returned value will always be within the valid range of values for the field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The {@link #isSupported(TemporalField) supported fields} will valid * values based on this year-month, except {@code PROLEPTIC_MONTH} which is too * large to fit in an {@code int} and throw a {@code DateTimeException}. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field * @throws DateTimeException if a value for the field cannot be obtained or * the value is outside the range of valid values for the field * @throws UnsupportedTemporalTypeException if the field is not supported or * the range of values exceeds an { @code int} * @throws ArithmeticException if numeric overflow occurs */ override def get(field: TemporalField): Int = { range(field).checkValidIntValue(getLong(field), field) } /** * Gets the value of the specified field from this year-month as a {@code long}. * <p> * This queries this year-month for the value for the specified field. * If it is not possible to the value, because the field is not supported * or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the query is implemented here. * The {@link #isSupported(TemporalField) supported fields} will valid * values based on this year-month. * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)} * passing {@code this} as the argument. Whether the value can be obtained, * and what the value represents, is determined by the field. * * @param field the field to get, not null * @return the value for the field * @throws DateTimeException if a value for the field cannot be obtained * @throws UnsupportedTemporalTypeException if the field is not supported * @throws ArithmeticException if numeric overflow occurs */ def getLong(field: TemporalField): Long = { if (field.isInstanceOf[ChronoField]) { field.asInstanceOf[ChronoField] match { case MONTH_OF_YEAR => month case PROLEPTIC_MONTH => getProlepticMonth case YEAR_OF_ERA => (if (year < 1) 1 - year else year) case YEAR => year case ERA => (if (year < 1) 0 else 1) } throw new UnsupportedTemporalTypeException("Unsupported field: " + field) } field.getFrom(this) } private def getProlepticMonth: Long = { (year * 12L + month - 1) } /** * Gets the year field. * <p> * This method returns the primitive {@code int} value for the year. * <p> * The year returned by this method is proleptic as per {@code get(YEAR)}. * * @return the year, from MIN_YEAR to MAX_YEAR */ def getYear: Int = { year } /** * Gets the month-of-year field from 1 to 12. * <p> * This method returns the month as an {@code int} from 1 to 12. * Application code is frequently clearer if the enum {@link Month} * is used by calling {@link #getMonth()}. * * @return the month-of-year, from 1 to 12 * @see #getMonth() */ def getMonthValue: Int = { month } /** * Gets the month-of-year field using the {@code Month} enum. * <p> * This method returns the enum {@link Month} for the month. * This avoids confusion as to what {@code int} values mean. * If you need access to the primitive {@code int} value then the enum * provides the {@link Month#getValue() int value}. * * @return the month-of-year, not null * @see #getMonthValue() */ def getMonth: Month = { Month.of(month) } /** * Checks if the year is a leap year, according to the ISO proleptic * calendar system rules. * <p> * This method applies the current rules for leap years across the whole time-line. * In general, a year is a leap year if it is divisible by four without * remainder. However, years divisible by 100, are not leap years, with * the exception of years divisible by 400 which are. * <p> * For example, 1904 is a leap year it is divisible by 4. * 1900 was not a leap year as it is divisible by 100, however 2000 was a * leap year as it is divisible by 400. * <p> * The calculation is proleptic - applying the same rules into the far future and far past. * This is historically inaccurate, but is correct for the ISO-8601 standard. * * @return true if the year is leap, false otherwise */ def isLeapYear: Boolean = { IsoChronology.INSTANCE.isLeapYear(year) } /** * Checks if the day-of-month is valid for this year-month. * <p> * This method checks whether this year and month and the input day form * a valid date. * * @param dayOfMonth the day-of-month to validate, from 1 to 31, invalid value returns false * @return true if the day is valid for this year-month */ def isValidDay(dayOfMonth: Int): Boolean = { dayOfMonth >= 1 && dayOfMonth <= lengthOfMonth } /** * Returns the length of the month, taking account of the year. * <p> * This returns the length of the month in days. * For example, a date in January would 31. * * @return the length of the month in days, from 28 to 31 */ def lengthOfMonth: Int = { getMonth.length(isLeapYear) } /** * Returns the length of the year. * <p> * This returns the length of the year in days, either 365 or 366. * * @return 366 if the year is leap, 365 otherwise */ def lengthOfYear: Int = { (if (isLeapYear) 366 else 365) } /** * Returns an adjusted copy of this year-month. * <p> * This returns a {@code YearMonth}, based on this one, with the year-month adjusted. * The adjustment takes place using the specified adjuster strategy object. * Read the documentation of the adjuster to understand what adjustment will be made. * <p> * A simple adjuster might simply set the one of the fields, such as the year field. * A more complex adjuster might set the year-month to the next month that * Halley's comet will pass the Earth. * <p> * The result of this method is obtained by invoking the * {@link TemporalAdjuster#adjustInto(Temporal)} method on the * specified adjuster passing {@code this} as the argument. * <p> * This instance is immutable and unaffected by this method call. * * @param adjuster the adjuster to use, not null * @return a { @code YearMonth} based on { @code this} with the adjustment made, not null * @throws DateTimeException if the adjustment cannot be made * @throws ArithmeticException if numeric overflow occurs */ override def `with`(adjuster: TemporalAdjuster): YearMonth = { adjuster.adjustInto(this).asInstanceOf[YearMonth] } /** * Returns a copy of this year-month with the specified field set to a new value. * <p> * This returns a {@code YearMonth}, based on this one, with the value * for the specified field changed. * This can be used to change any supported field, such as the year or month. * If it is not possible to set the value, because the field is not supported or for * some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoField} then the adjustment is implemented here. * The supported fields behave as follows: * <ul> * <li>{@code MONTH_OF_YEAR} - * Returns a {@code YearMonth} with the specified month-of-year. * The year will be unchanged. * <li>{@code PROLEPTIC_MONTH} - * Returns a {@code YearMonth} with the specified proleptic-month. * This completely replaces the year and month of this object. * <li>{@code YEAR_OF_ERA} - * Returns a {@code YearMonth} with the specified year-of-era * The month and era will be unchanged. * <li>{@code YEAR} - * Returns a {@code YearMonth} with the specified year. * The month will be unchanged. * <li>{@code ERA} - * Returns a {@code YearMonth} with the specified era. * The month and year-of-era will be unchanged. * </ul> * <p> * In all cases, if the new value is outside the valid range of values for the field * then a {@code DateTimeException} will be thrown. * <p> * All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.adjustInto(Temporal, long)} * passing {@code this} as the argument. In this case, the field determines * whether and how to adjust the instant. * <p> * This instance is immutable and unaffected by this method call. * * @param field the field to set in the result, not null * @param newValue the new value of the field in the result * @return a { @code YearMonth} based on { @code this} with the specified field set, not null * @throws DateTimeException if the field cannot be set * @throws UnsupportedTemporalTypeException if the field is not supported * @throws ArithmeticException if numeric overflow occurs */ def `with`(field: TemporalField, newValue: Long): YearMonth = { if (field.isInstanceOf[ChronoField]) { val f: ChronoField = field.asInstanceOf[ChronoField] f.checkValidValue(newValue) f match { case MONTH_OF_YEAR => withMonth(newValue.asInstanceOf[Int]) case PROLEPTIC_MONTH => plusMonths(newValue - getProlepticMonth) case YEAR_OF_ERA => withYear((if (year < 1) 1 - newValue else newValue).asInstanceOf[Int]) case YEAR => withYear(newValue.asInstanceOf[Int]) case ERA => (if (getLong(ERA) == newValue) this else withYear(1 - year)) } throw new UnsupportedTemporalTypeException("Unsupported field: " + field) } field.adjustInto(this, newValue) } /** * Returns a copy of this {@code YearMonth} with the year altered. * <p> * This instance is immutable and unaffected by this method call. * * @param year the year to set in the returned year-month, from MIN_YEAR to MAX_YEAR * @return a { @code YearMonth} based on this year-month with the requested year, not null * @throws DateTimeException if the year value is invalid */ def withYear(year: Int): YearMonth = { YEAR.checkValidValue(year) `with`(year, month) } /** * Returns a copy of this {@code YearMonth} with the month-of-year altered. * <p> * This instance is immutable and unaffected by this method call. * * @param month the month-of-year to set in the returned year-month, from 1 (January) to 12 (December) * @return a { @code YearMonth} based on this year-month with the requested month, not null * @throws DateTimeException if the month-of-year value is invalid */ def withMonth(month: Int): YearMonth = { MONTH_OF_YEAR.checkValidValue(month) `with`(year, month) } /** * Returns a copy of this year-month with the specified amount added. * <p> * This returns a {@code YearMonth}, based on this one, with the specified amount added. * The amount is typically {@link Period} but may be any other type implementing * the {@link TemporalAmount} interface. * <p> * The calculation is delegated to the amount object by calling * {@link TemporalAmount#addTo(Temporal)}. The amount implementation is free * to implement the addition in any way it wishes, however it typically * calls back to {@link #plus(long, TemporalUnit)}. Consult the documentation * of the amount implementation to determine if it can be successfully added. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToAdd the amount to add, not null * @return a { @code YearMonth} based on this year-month with the addition made, not null * @throws DateTimeException if the addition cannot be made * @throws ArithmeticException if numeric overflow occurs */ override def plus(amountToAdd: TemporalAmount): YearMonth = { amountToAdd.addTo(this).asInstanceOf[YearMonth] } /** * Returns a copy of this year-month with the specified amount added. * <p> * This returns a {@code YearMonth}, based on this one, with the amount * in terms of the unit added. If it is not possible to add the amount, because the * unit is not supported or for some other reason, an exception is thrown. * <p> * If the field is a {@link ChronoUnit} then the addition is implemented here. * The supported fields behave as follows: * <ul> * <li>{@code MONTHS} - * Returns a {@code YearMonth} with the specified number of months added. * This is equivalent to {@link #plusMonths(long)}. * <li>{@code YEARS} - * Returns a {@code YearMonth} with the specified number of years added. * This is equivalent to {@link #plusYears(long)}. * <li>{@code DECADES} - * Returns a {@code YearMonth} with the specified number of decades added. * This is equivalent to calling {@link #plusYears(long)} with the amount * multiplied by 10. * <li>{@code CENTURIES} - * Returns a {@code YearMonth} with the specified number of centuries added. * This is equivalent to calling {@link #plusYears(long)} with the amount * multiplied by 100. * <li>{@code MILLENNIA} - * Returns a {@code YearMonth} with the specified number of millennia added. * This is equivalent to calling {@link #plusYears(long)} with the amount * multiplied by 1,000. * <li>{@code ERAS} - * Returns a {@code YearMonth} with the specified number of eras added. * Only two eras are supported so the amount must be one, zero or minus one. * If the amount is non-zero then the year is changed such that the year-of-era * is unchanged. * </ul> * <p> * All other {@code ChronoUnit} instances will throw an {@code UnsupportedTemporalTypeException}. * <p> * If the field is not a {@code ChronoUnit}, then the result of this method * is obtained by invoking {@code TemporalUnit.addTo(Temporal, long)} * passing {@code this} as the argument. In this case, the unit determines * whether and how to perform the addition. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToAdd the amount of the unit to add to the result, may be negative * @param unit the unit of the amount to add, not null * @return a { @code YearMonth} based on this year-month with the specified amount added, not null * @throws DateTimeException if the addition cannot be made * @throws UnsupportedTemporalTypeException if the unit is not supported * @throws ArithmeticException if numeric overflow occurs */ def plus(amountToAdd: Long, unit: TemporalUnit): YearMonth = { if (unit.isInstanceOf[ChronoUnit]) { unit.asInstanceOf[ChronoUnit] match { case MONTHS => plusMonths(amountToAdd) case YEARS => plusYears(amountToAdd) case DECADES => plusYears(Math.multiplyExact(amountToAdd, 10)) case CENTURIES => plusYears(Math.multiplyExact(amountToAdd, 100)) case MILLENNIA => plusYears(Math.multiplyExact(amountToAdd, 1000)) case ERAS => `with`(ERA, Math.addExact(getLong(ERA), amountToAdd)) } throw new UnsupportedTemporalTypeException("Unsupported unit: " + unit) } unit.addTo(this, amountToAdd) } /** * Returns a copy of this year-month with the specified period in years added. * <p> * This instance is immutable and unaffected by this method call. * * @param yearsToAdd the years to add, may be negative * @return a { @code YearMonth} based on this year-month with the years added, not null * @throws DateTimeException if the result exceeds the supported range */ def plusYears(yearsToAdd: Long): YearMonth = { if (yearsToAdd == 0) { this } val newYear: Int = YEAR.checkValidIntValue(year + yearsToAdd) `with`(newYear, month) } /** * Returns a copy of this year-month with the specified period in months added. * <p> * This instance is immutable and unaffected by this method call. * * @param monthsToAdd the months to add, may be negative * @return a { @code YearMonth} based on this year-month with the months added, not null * @throws DateTimeException if the result exceeds the supported range */ def plusMonths(monthsToAdd: Long): YearMonth = { if (monthsToAdd == 0) { this } val monthCount: Long = year * 12L + (month - 1) val calcMonths: Long = monthCount + monthsToAdd val newYear: Int = YEAR.checkValidIntValue(Math.floorDiv(calcMonths, 12)) val newMonth: Int = Math.floorMod(calcMonths, 12).asInstanceOf[Int] + 1 `with`(newYear, newMonth) } /** * Returns a copy of this year-month with the specified amount subtracted. * <p> * This returns a {@code YearMonth}, based on this one, with the specified amount subtracted. * The amount is typically {@link Period} but may be any other type implementing * the {@link TemporalAmount} interface. * <p> * The calculation is delegated to the amount object by calling * {@link TemporalAmount#subtractFrom(Temporal)}. The amount implementation is free * to implement the subtraction in any way it wishes, however it typically * calls back to {@link #minus(long, TemporalUnit)}. Consult the documentation * of the amount implementation to determine if it can be successfully subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToSubtract the amount to subtract, not null * @return a { @code YearMonth} based on this year-month with the subtraction made, not null * @throws DateTimeException if the subtraction cannot be made * @throws ArithmeticException if numeric overflow occurs */ override def minus(amountToSubtract: TemporalAmount): YearMonth = { amountToSubtract.subtractFrom(this).asInstanceOf[YearMonth] } /** * Returns a copy of this year-month with the specified amount subtracted. * <p> * This returns a {@code YearMonth}, based on this one, with the amount * in terms of the unit subtracted. If it is not possible to subtract the amount, * because the unit is not supported or for some other reason, an exception is thrown. * <p> * This method is equivalent to {@link #plus(long, TemporalUnit)} with the amount negated. * See that method for a full description of how addition, and thus subtraction, works. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToSubtract the amount of the unit to subtract from the result, may be negative * @param unit the unit of the amount to subtract, not null * @return a { @code YearMonth} based on this year-month with the specified amount subtracted, not null * @throws DateTimeException if the subtraction cannot be made * @throws UnsupportedTemporalTypeException if the unit is not supported * @throws ArithmeticException if numeric overflow occurs */ override def minus(amountToSubtract: Long, unit: TemporalUnit): YearMonth = { (if (amountToSubtract == Long.MIN_VALUE) plus(Long.MAX_VALUE, unit).plus(1, unit) else plus(-amountToSubtract, unit)) } /** * Returns a copy of this year-month with the specified period in years subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param yearsToSubtract the years to subtract, may be negative * @return a { @code YearMonth} based on this year-month with the years subtracted, not null * @throws DateTimeException if the result exceeds the supported range */ def minusYears(yearsToSubtract: Long): YearMonth = { (if (yearsToSubtract == Long.MIN_VALUE) plusYears(Long.MAX_VALUE).plusYears(1) else plusYears(-yearsToSubtract)) } /** * Returns a copy of this year-month with the specified period in months subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param monthsToSubtract the months to subtract, may be negative * @return a { @code YearMonth} based on this year-month with the months subtracted, not null * @throws DateTimeException if the result exceeds the supported range */ def minusMonths(monthsToSubtract: Long): YearMonth = { (if (monthsToSubtract == Long.MIN_VALUE) plusMonths(Long.MAX_VALUE).plusMonths(1) else plusMonths(-monthsToSubtract)) } /** * Queries this year-month using the specified query. * <p> * This queries this year-month using the specified query strategy object. * The {@code TemporalQuery} object defines the logic to be used to * obtain the result. Read the documentation of the query to understand * what the result of this method will be. * <p> * The result of this method is obtained by invoking the * {@link TemporalQuery#queryFrom(TemporalAccessor)} method on the * specified query passing {@code this} as the argument. * * @param <R> the type of the result * @param query the query to invoke, not null * @return the query result, null may be returned (defined by the query) * @throws DateTimeException if unable to query (defined by the query) * @throws ArithmeticException if numeric overflow occurs (defined by the query) */ override def query(query: TemporalQuery[R]): R = { if (query eq TemporalQuery.chronology) { IsoChronology.INSTANCE.asInstanceOf[R] } else if (query eq TemporalQuery.precision) { MONTHS.asInstanceOf[R] } Temporal.super.query(query) } /** * Adjusts the specified temporal object to have this year-month. * <p> * This returns a temporal object of the same observable type as the input * with the year and month changed to be the same as this. * <p> * The adjustment is equivalent to using {@link Temporal#with(TemporalField, long)} * passing {@link ChronoField#PROLEPTIC_MONTH} as the field. * If the specified temporal object does not use the ISO calendar system then * a {@code DateTimeException} is thrown. * <p> * In most cases, it is clearer to reverse the calling pattern by using * {@link Temporal#with(TemporalAdjuster)}: * {{{ * // these two lines are equivalent, but the second approach is recommended * temporal = thisYearMonth.adjustInto(temporal); * temporal = temporal.with(thisYearMonth); * }}} * <p> * This instance is immutable and unaffected by this method call. * * @param temporal the target object to be adjusted, not null * @return the adjusted object, not null * @throws DateTimeException if unable to make the adjustment * @throws ArithmeticException if numeric overflow occurs */ def adjustInto(temporal: Temporal): Temporal = { if ((Chronology.from(temporal) == IsoChronology.INSTANCE) == false) { throw new DateTimeException("Adjustment only supported on ISO date-time") } temporal.`with`(PROLEPTIC_MONTH, getProlepticMonth) } /** * Calculates the amount of time until another year-month in terms of the specified unit. * <p> * This calculates the amount of time between two {@code YearMonth} * objects in terms of a single {@code TemporalUnit}. * The start and end points are {@code this} and the specified year-month. * The result will be negative if the end is before the start. * The {@code Temporal} passed to this method is converted to a * {@code YearMonth} using {@link #from(TemporalAccessor)}. * For example, the period in years between two year-months can be calculated * using {@code startYearMonth.until(endYearMonth, YEARS)}. * <p> * The calculation returns a whole number, representing the number of * complete units between the two year-months. * For example, the period in decades between 2012-06 and 2032-05 * will only be one decade as it is one month short of two decades. * <p> * There are two equivalent ways of using this method. * The first is to invoke this method. * The second is to use {@link TemporalUnit#between(Temporal, Temporal)}: * {{{ * // these two lines are equivalent * amount = start.until(end, MONTHS); * amount = MONTHS.between(start, end); * }}} * The choice should be made based on which makes the code more readable. * <p> * The calculation is implemented in this method for {@link ChronoUnit}. * The units {@code MONTHS}, {@code YEARS}, {@code DECADES}, * {@code CENTURIES}, {@code MILLENNIA} and {@code ERAS} are supported. * Other {@code ChronoUnit} values will throw an exception. * <p> * If the unit is not a {@code ChronoUnit}, then the result of this method * is obtained by invoking {@code TemporalUnit.between(Temporal, Temporal)} * passing {@code this} as the first argument and the converted input temporal * as the second argument. * <p> * This instance is immutable and unaffected by this method call. * * @param endExclusive the end date, which is converted to a { @code YearMonth}, not null * @param unit the unit to measure the amount in, not null * @return the amount of time between this year-month and the end year-month * @throws DateTimeException if the amount cannot be calculated, or the end * temporal cannot be converted to a { @code YearMonth} * @throws UnsupportedTemporalTypeException if the unit is not supported * @throws ArithmeticException if numeric overflow occurs */ def until(endExclusive: Temporal, unit: TemporalUnit): Long = { val end: YearMonth = YearMonth.from(endExclusive) if (unit.isInstanceOf[ChronoUnit]) { val monthsUntil: Long = end.getProlepticMonth - getProlepticMonth unit.asInstanceOf[ChronoUnit] match { case MONTHS => monthsUntil case YEARS => monthsUntil / 12 case DECADES => monthsUntil / 120 case CENTURIES => monthsUntil / 1200 case MILLENNIA => monthsUntil / 12000 case ERAS => end.getLong(ERA) - getLong(ERA) } throw new UnsupportedTemporalTypeException("Unsupported unit: " + unit) } unit.between(this, end) } /** * Formats this year-month using the specified formatter. * <p> * This year-month will be passed to the formatter to produce a string. * * @param formatter the formatter to use, not null * @return the formatted year-month string, not null * @throws DateTimeException if an error occurs during printing */ def format(formatter: DateTimeFormatter): String = { object formatter.format(this) } /** * Combines this year-month with a day-of-month to create a {@code Date}. * <p> * This returns a {@code Date} formed from this year-month and the specified day-of-month. * <p> * The day-of-month value must be valid for the year-month. * <p> * This method can be used as part of a chain to produce a date: * {{{ * Date date = year.atMonth(month).atDay(day); * }}} * * @param dayOfMonth the day-of-month to use, from 1 to 31 * @return the date formed from this year-month and the specified day, not null * @throws DateTimeException if the day is invalid for the year-month * @see #isValidDay(int) */ def atDay(dayOfMonth: Int): Date = { Date.of(year, month, dayOfMonth) } /** * Returns a {@code Date} at the end of the month. * <p> * This returns a {@code Date} based on this year-month. * The day-of-month is set to the last valid day of the month, taking * into account leap years. * <p> * This method can be used as part of a chain to produce a date: * {{{ * Date date = year.atMonth(month).atEndOfMonth(); * }}} * * @return the last valid date of this year-month, not null */ def atEndOfMonth: Date = { Date.of(year, month, lengthOfMonth) } /** * Compares this year-month to another year-month. * <p> * The comparison is based first on the value of the year, then on the value of the month. * It is "consistent with equals", as defined by {@link Comparable}. * * @param other the other year-month to compare to, not null * @return the comparator value, negative if less, positive if greater */ def compareTo(other: YearMonth): Int = { var cmp: Int = (year - other.year) if (cmp == 0) { cmp = (month - other.month) } cmp } /** * Is this year-month after the specified year-month. * * @param other the other year-month to compare to, not null * @return true if this is after the specified year-month */ def isAfter(other: YearMonth): Boolean = { compareTo(other) > 0 } /** * Is this year-month before the specified year-month. * * @param other the other year-month to compare to, not null * @return true if this point is before the specified year-month */ def isBefore(other: YearMonth): Boolean = { compareTo(other) < 0 } /** * Checks if this year-month is equal to another year-month. * <p> * The comparison is based on the time-line position of the year-months. * * @param obj the object to check, null returns false * @return true if this is equal to the other year-month */ override def equals(obj: AnyRef): Boolean = { if (this eq obj) { true } if (obj.isInstanceOf[YearMonth]) { val other: YearMonth = obj.asInstanceOf[YearMonth] year == other.year && month == other.month } false } /** * A hash code for this year-month. * * @return a suitable hash code */ override def hashCode: Int = { year ^ (month << 27) } /** * Outputs this year-month as a {@code String}, such as {@code 2007-12}. * <p> * The output will be in the format {@code uuuu-MM}: * * @return a string representation of this year-month, not null */ override def toString: String = { val absYear: Int = Math.abs(year) val buf: StringBuilder = new StringBuilder(9) if (absYear < 1000) { if (year < 0) { buf.append(year - 10000).deleteCharAt(1) } else { buf.append(year + 10000).deleteCharAt(0) } } else { buf.append(year) } buf.append(if (month < 10) "-0" else "-").append(month).toString } /** * Writes the object using a * <a href="../../../serialized-form.html#java.time.temporal.Ser">dedicated serialized form</a>. * {{{ * out.writeByte(12); // identifies this as a YearMonth * out.writeInt(year); * out.writeByte(month); * }}} * * @return the instance of { @code Ser}, not null */ private def writeReplace: AnyRef = { new Ser(Ser.YEAR_MONTH_TYPE, this) } /** * Defend against malicious streams. * @return never * @throws InvalidObjectException always */ private def readResolve: AnyRef = { throw new InvalidObjectException("Deserialization via serialization delegate") } private[time] def writeExternal(out: DataOutput) { out.writeInt(year) out.writeByte(month) } /** * The year. */ private final val year: Int = 0 /** * The month-of-year, not null. */ private final val month: Int = 0 }
javierg1975/metronome
src/main/scala/metronome/CalendarElements.scala
Scala
gpl-2.0
141,021
package controllers import _root_.play.api.libs.json.Json import _root_.play.api.mvc._ class Application extends Controller { def index = Action { Ok(Json.obj()) } }
Xanho/xanho-api
app/controllers/Application.scala
Scala
apache-2.0
186
package hercules.config.notification import com.typesafe.config.{ ConfigFactory, Config } import hercules.protocols.NotificationChannelProtocol._ import scala.collection.JavaConversions._ /** * Utility functions associated with creating a SlackNotificationConfig. * Created by johda411 on 2015-03-18. */ object SlackNotificationConfig { /** * Get a default config as specified in the application.conf file. * @return a slack notifications config */ def apply(): SlackNotificationConfig = { getSlackNotificationConfig(ConfigFactory.load().getConfig("notifications.slack")) } /** * Load a SlackNotificationConfig from the specified config instance * @param conf the Config instance to load from * @return A SlackNotificationConfig */ def getSlackNotificationConfig(conf: Config): SlackNotificationConfig = { new SlackNotificationConfig( channels = asScalaBuffer(conf.getStringList("channels")).toSeq.map(stringToChannel), retryInterval = conf.getInt("num_retries"), numRetries = conf.getInt("retry_interval"), slackEndPoint = conf.getString("slack_endpoint"), slackChannel = conf.getString("slack_channel"), slackUserName = conf.getString("slack_user"), iconEmoji = conf.getString("icon_emoji") ) } } /** * Configure which (Hercules) channels should be passed on to the Slack notifications * and how to deal with retries. * @param channels the Notifications channels to use. * @param retryInterval * @param numRetries */ class SlackNotificationConfig( override val channels: Seq[NotificationChannel], override val retryInterval: Int, override val numRetries: Int, val slackEndPoint: String, val slackChannel: String, val slackUserName: String, val iconEmoji: String) extends NotificationConfig {}
johandahlberg/hercules
src/main/scala/hercules/config/notification/SlackNotificationConfig.scala
Scala
mit
1,810
package com.faacets.consolidate import cats.{Eq, ApplicativeError} import cats.data.{Validated, ValidatedNel, NonEmptyList => NEL} import cats.syntax.eq._ import cats.syntax.semigroup._ sealed trait Result[+A] { self => import Result.{Same, Updated, Failed} def in(element: String): Result[A] def in(elements: List[String]): Result[A] def validate[B](f: A => ValidatedNel[String, B]): Result[B] = self match { case Same(a) => f(a) match { case Validated.Valid(b) => Same(b) case Validated.Invalid(errors) => Failed(errors.map(err => (Path.empty, err))) } case Updated(a, updates) => f(a) match { case Validated.Valid(b) => Updated(b, updates) case Validated.Invalid(errors) => Failed(errors.map(err => (Path.empty, err))) } case failed: Failed => failed } def isSame: Boolean = self match { case _: Same[_] => true case _: Updated[_] => false case _: Failed => false } def isUpdated: Boolean = self match { case _: Updated[_] => true case _: Same[_] => false case _: Failed => false } def hasFailed: Boolean = self match { case _: Failed => true case _: Updated[_] => false case _: Same[_] => false } def value: Option[A] = self match { case Same(a) => Some(a) case Updated(a, _) => Some(a) case _: Failed => None } def fold[X]( same: A => X, updated: (A, NEL[(Path, String)]) => X, failed: NEL[(Path, String)] => X ): X = self match { case Same(value) => same(value) case Updated(newValue, updates) => updated(newValue, updates) case Failed(errors) => failed(errors) } def map[B](f: A => B): Result[B] = self match { case Same(value) => Same(f(value)) case Updated(newValue, updates) => Updated(f(newValue), updates) case failed: Failed => failed } def check(path: Path, f: A => List[String]): Result[A] = self match { case Same(value) => NEL.fromList(f(value)) match { case None => self case Some(errors) => throw new Exception("Should not happen: base element is inconsistent, with errors: " + errors.toString) } case Updated(newValue, updates) => NEL.fromList(f(newValue)) match { case None => self case Some(errors) => Failed(errors.map(error => (path, error))) } case failed: Failed => failed } } object Result { def same[A](baseValue: A): Result[A] = Same(baseValue) def updated[A](newValue: A, updates: NEL[(Path, String)]): Result[A] = Updated(newValue, updates) def failed[A](errors: NEL[(Path, String)]): Result[A] = Failed(errors) protected def appendPath(nel: NEL[(Path, String)], element: String): NEL[(Path, String)] = nel.map { case (path, string) => (element :: path, string) } protected def appendPath(nel: NEL[(Path, String)], elements: List[String]): NEL[(Path, String)] = nel.map { case (path, string) => (elements ::: path, string) } case class Same[+A](baseValue: A) extends Result[A] { def in(element: String) = this def in(elements: List[String]) = this } case class Updated[+A](newValue: A, updates: NEL[(Path, String)]) extends Result[A] { def in(element: String) = Updated(newValue, appendPath(updates, element)) def in(elements: List[String]) = Updated(newValue, appendPath(updates, elements)) } case class Failed(errors: NEL[(Path, String)]) extends Result[Nothing] { def in(element: String) = Failed(appendPath(errors, element)) def in(elements: List[String]) = Failed(appendPath(errors, elements)) } type Errors = NEL[(Path, String)] implicit val instance: ApplicativeError[Result, NEL[(Path, String)]] = new ApplicativeError[Result, NEL[(Path, String)]] { def raiseError[A](e: NEL[(Path, String)]): Result[A] = Failed(e) def handleErrorWith[A](fa: Result[A])(f: NEL[(Path, String)] => Result[A]): Result[A] = fa match { case r: Same[A] => r case r: Updated[A] => r case Failed(errors) => f(errors) } def pure[A](a: A): Result[A] = Same(a) override def map[A, B](fa: Result[A])(f: A => B): Result[B] = fa match { case Same(a) => Same(f(a)) case Updated(a, paths) => Updated(f(a), paths) case Failed(errors) => Failed(errors) } override def ap[A, B](ff: Result[A => B])(fa: Result[A]): Result[B] = (ff, fa) match { case (Same(ff1), Same(fa1)) => Same(ff1(fa1)) case (Same(ff1), Updated(fa1, paths)) => Updated(ff1(fa1), paths) case (Updated(ff1, paths), Same(fa1)) => Updated(ff1(fa1), paths) case (Updated(ff1, paths1), Updated(fa1, paths2)) => Updated(ff1(fa1), paths1 |+| paths2) case (Failed(errors1), Failed(errors2)) => Failed(errors1 |+| errors2) case (Failed(errors1), _) => Failed(errors1) case (_, Failed(errors2)) => Failed(errors2) } } implicit def consolidateEqForResult[A:Eq]: Eq[Result[A]] = new Eq[Result[A]] { import cats.instances.all._ def eqv(lhs: Result[A], rhs: Result[A]) = (lhs, rhs) match { case (Same(a1), Same(a2)) => a1 === a2 case (Updated(a1, u1), Updated(a2, u2)) => a1 === a2 && u1 === u2 case (Failed(e1), Failed(e2)) => e1 === e2 case _ => false } } }
denisrosset/consolidate
src/main/scala/com.faacets/consolidate/Result.scala
Scala
mit
5,193
package services import play.api.libs.json.JsValue import scala.concurrent.Future trait ProxyService { def get(cmd: String, params: Option[JsValue]): Future[JsValue] }
joakim-ribier/proxy-foscam
app/services/ProxyService.scala
Scala
mit
174
/** * * @author Richard Li */ import collection.mutable.Buffer object puzzle6 extends App { val buf1 = Buffer.empty[() => Int] val buf2 = Buffer.empty[() => Int] val buf3 = Buffer.empty[() => Int] val data = Seq(10, 20, 30) var j = 0 for (i <- 0 until data.length) { buf1 += (() => data(i)) buf2 += (() => data(j)) val valJ = j buf3 += (() => data(valJ)) j += 1 } buf1.foreach(f => println(f())) println("================") buf3.foreach(f => println(f())) println("================") // index out of boundary // in a closure, val is stored as a regular Int, but var is stored as a ref to Int buf2.foreach(f => println(f())) }
precompiler/scala-101
puzzles/src/main/scala/puzzle6.scala
Scala
apache-2.0
685
package fpinscala.datastructures sealed trait List[+A] // `List` data type case object Nil extends List[Nothing] // data constructor for `List` case class Cons[+A](head: A, tail: List[A]) extends List[A] object List { // `List` companion object def sum(ints: List[Int]): Int = ints match { // Pattern matching example case Nil => 0 case Cons(x,xs) => x + sum(xs) } def product(ds: List[Double]): Double = ds match { case Nil => 1.0 case Cons(0.0, _) => 0.0 case Cons(x,xs) => x * product(xs) } def apply[A](as: A*): List[A] = // Variadic function syntax if (as.isEmpty) Nil else Cons(as.head, apply(as.tail: _*)) val example = Cons(1, Cons(2, Cons(3, Nil))) // Creating lists val example2 = List(1,2,3) val total = sum(example) val x = List(1,2,3,4,5) match { case Cons(x, Cons(2, Cons(4, _))) => x case Nil => 42 case Cons(x, Cons(y, Cons(3, Cons(4, _)))) => x + y case Cons(h, t) => h + sum(t) case _ => 101 } def append[A](a1: List[A], a2: List[A]): List[A] = a1 match { case Nil => a2 case Cons(h,t) => Cons(h, append(t, a2)) } def foldRight[A,B](l: List[A], z: B)(f: (A, B) => B): B = // Utility functions l match { case Nil => z case Cons(x, xs) => f(x, foldRight(xs, z)(f)) } def sum2(l: List[Int]) = foldRight(l, 0.0)(_ + _) def product2(l: List[Double]) = foldRight(l, 1.0)(_ * _) }
galarragas/FpInScala
chaptercode/src/main/scala/fpinscala/datastructures/List.scala
Scala
mit
1,440
package com.blogspot.yetanothercoders.hfile import org.apache.hadoop.conf.Configuration import org.apache.hadoop.hbase.client.{HTable, Put} import org.apache.hadoop.hbase.io.ImmutableBytesWritable import org.apache.hadoop.hbase.mapreduce.{HFileOutputFormat2, TableMapReduceUtil} import org.apache.hadoop.hbase.{HBaseConfiguration, KeyValue} import org.apache.hadoop.mapreduce.Job import org.apache.spark.rdd.RDD import scala.reflect.ClassTag class BulkPhoenixLoader(rdd: RDD[(ImmutableBytesWritable, KeyValue)]) { private def createConf(tableName: String, inConf: Option[Configuration] = None): Configuration = { val conf = inConf.map(HBaseConfiguration.create).getOrElse(HBaseConfiguration.create()) val job: Job = Job.getInstance(conf, "Phoenix bulk load") job.setMapOutputKeyClass(classOf[ImmutableBytesWritable]) job.setMapOutputValueClass(classOf[KeyValue]) TableMapReduceUtil.initCredentials(job) val htable: HTable = new HTable(conf, tableName) HFileOutputFormat2.configureIncrementalLoad(job, htable) conf } /** * Saves the PairRDD into HFile's. * * @param tableName name of HBase's table to store to * @param outputPath path where to store the generated Hfiles * @param conf configuration of HBase */ def bulkSave(tableName: String, outputPath: String, conf: Option[Configuration] = None) = { val configuration: Configuration = createConf(tableName, conf) rdd.mapPartitions(PartitionSorter.sortPartition) .saveAsNewAPIHadoopFile( outputPath, classOf[ImmutableBytesWritable], classOf[Put], classOf[HFileOutputFormat2], configuration) } }
dawidwys/phoenix-on-spark
src/main/scala/com/blogspot/yetanothercoders/hfile/BulkPhoenixLoader.scala
Scala
apache-2.0
1,672
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.coordinator.group import java.util import kafka.utils.nonthreadsafe import org.apache.kafka.common.protocol.Errors case class MemberSummary(memberId: String, clientId: String, clientHost: String, metadata: Array[Byte], assignment: Array[Byte]) /** * Member metadata contains the following metadata: * * Heartbeat metadata: * 1. negotiated heartbeat session timeout * 2. timestamp of the latest heartbeat * * Protocol metadata: * 1. the list of supported protocols (ordered by preference) * 2. the metadata associated with each protocol * * In addition, it also contains the following state information: * * 1. Awaiting rebalance callback: when the group is in the prepare-rebalance state, * its rebalance callback will be kept in the metadata if the * member has sent the join group request * 2. Awaiting sync callback: when the group is in the awaiting-sync state, its sync callback * is kept in metadata until the leader provides the group assignment * and the group transitions to stable */ @nonthreadsafe private[group] class MemberMetadata(val memberId: String, // 消费者id,由GrouoCoordinator分配 val groupId: String, // 记录消费者所在的Consumer Groupid val clientId: String, val clientHost: String, val rebalanceTimeoutMs: Int, val sessionTimeoutMs: Int, // 心跳超时时间 val protocolType: String, var supportedProtocols: List[(String, Array[Byte])]) { //对应消费者支持的ParitionAssignor // 记录了分配给当前Member的分区信息 var assignment: Array[Byte] = Array.empty[Byte] // 与JoinGroupRequest相关的回调函数 var awaitingJoinCallback: JoinGroupResult => Unit = null // 与SyncGroupRequest相关的回调函数 var awaitingSyncCallback: (Array[Byte], Errors) => Unit = null // 最后一次收到心跳信息的时间戳 var latestHeartbeat: Long = -1 // 标示对应消费者是否已经离开了Consumer group var isLeaving: Boolean = false def protocols = supportedProtocols.map(_._1).toSet /** * Get metadata corresponding to the provided protocol. */ def metadata(protocol: String): Array[Byte] = { supportedProtocols.find(_._1 == protocol) match { case Some((_, metadata)) => metadata case None => throw new IllegalArgumentException("Member does not support protocol") } } /** * Check if the provided protocol metadata matches the currently stored metadata. */ def matches(protocols: List[(String, Array[Byte])]): Boolean = { if (protocols.size != this.supportedProtocols.size) return false for (i <- protocols.indices) { val p1 = protocols(i) val p2 = supportedProtocols(i) if (p1._1 != p2._1 || !util.Arrays.equals(p1._2, p2._2)) return false } true } def summary(protocol: String): MemberSummary = { MemberSummary(memberId, clientId, clientHost, metadata(protocol), assignment) } def summaryNoMetadata(): MemberSummary = { MemberSummary(memberId, clientId, clientHost, Array.empty[Byte], Array.empty[Byte]) } /** * Vote for one of the potential group protocols. This takes into account the protocol preference as * indicated by the order of supported protocols and returns the first one also contained in the set */ // 从指定候选PartitionAssignor中选择消费者支持的ParitionAssignor的功能 def vote(candidates: Set[String]): String = { supportedProtocols.find({ case (protocol, _) => candidates.contains(protocol)}) match { case Some((protocol, _)) => protocol case None => throw new IllegalArgumentException("Member does not support any of the candidate protocols") } } override def toString: String = { "MemberMetadata(" + s"memberId=$memberId, " + s"clientId=$clientId, " + s"clientHost=$clientHost, " + s"sessionTimeoutMs=$sessionTimeoutMs, " + s"rebalanceTimeoutMs=$rebalanceTimeoutMs, " + s"supportedProtocols=${supportedProtocols.map(_._1)}, " + ")" } }
YMCoding/kafka-0.11.0.0-src-with-comment
core/src/main/scala/kafka/coordinator/group/MemberMetadata.scala
Scala
apache-2.0
5,289
package actors.serializers import akka.serialization.SerializerWithStringManifest import org.slf4j.{Logger, LoggerFactory} import scalapb.GeneratedMessage import server.protobuf.messages.Alert.{Alert, AlertSnapshotMessage} import server.protobuf.messages.CrunchState.{CrunchRequestMessage, _} import server.protobuf.messages.FixedPointMessage.{FixedPointMessage, FixedPointsMessage, FixedPointsStateSnapshotMessage} import server.protobuf.messages.FlightsMessage._ import server.protobuf.messages.FlightsSummary.FlightsSummaryMessage import server.protobuf.messages.PaxMessage.{OriginTerminalPaxCountsMessage, OriginTerminalPaxCountsMessages, PaxCountMessage} import server.protobuf.messages.RegisteredArrivalMessage.{RegisteredArrivalMessage, RegisteredArrivalsMessage} import server.protobuf.messages.ShiftMessage.{ShiftMessage, ShiftStateSnapshotMessage, ShiftsMessage} import server.protobuf.messages.StaffMovementMessages.{RemoveStaffMovementMessage, StaffMovementMessage, StaffMovementsMessage, StaffMovementsStateSnapshotMessage} import server.protobuf.messages.TerminalQueuesSummary.TerminalQueuesSummaryMessage import server.protobuf.messages.VoyageManifest.{VoyageManifestLatestFileNameMessage, VoyageManifestMessage, VoyageManifestStateSnapshotMessage, VoyageManifestsMessage} class ProtoBufSerializer extends SerializerWithStringManifest { override def identifier: Int = 9001 override def manifest(targetObject: AnyRef): String = targetObject.getClass.getName final val CrunchDiff: String = classOf[CrunchDiffMessage].getName final val CrunchStateSnapshot: String = classOf[CrunchStateSnapshotMessage].getName final val CrunchMinutes: String = classOf[CrunchMinutesMessage].getName final val FlightsWithSplits: String = classOf[FlightsWithSplitsMessage].getName final val FlightsWithSplitsDiff: String = classOf[FlightsWithSplitsDiffMessage].getName final val Shifts: String = classOf[ShiftsMessage].getName final val ShiftStateSnapshot: String = classOf[ShiftStateSnapshotMessage].getName final val Shift: String = classOf[ShiftMessage].getName final val FixedPoints: String = classOf[FixedPointsMessage].getName final val FixedPointsStateSnapshot: String = classOf[FixedPointsStateSnapshotMessage].getName final val FixedPoint: String = classOf[FixedPointMessage].getName final val StaffMovements: String = classOf[StaffMovementsMessage].getName final val StaffMovementsStateSnapshot: String = classOf[StaffMovementsStateSnapshotMessage].getName final val StaffMovement: String = classOf[StaffMovementMessage].getName final val RemoveStaffMovement: String = classOf[RemoveStaffMovementMessage].getName final val FlightsDiff: String = classOf[FlightsDiffMessage].getName final val FlightStateSnapshot: String = classOf[FlightStateSnapshotMessage].getName final val Flight: String = classOf[FlightMessage].getName final val FeedStatus: String = classOf[FeedStatusMessage].getName final val FeedStatuses: String = classOf[FeedStatusesMessage].getName final val UniqueArrival: String = classOf[UniqueArrivalMessage].getName final val VoyageManifestStateSnapshot: String = classOf[VoyageManifestStateSnapshotMessage].getName final val VoyageManifestLatestFileName: String = classOf[VoyageManifestLatestFileNameMessage].getName final val VoyageManifests: String = classOf[VoyageManifestsMessage].getName final val VoyageManifest: String = classOf[VoyageManifestMessage].getName final val Alerts: String = classOf[Alert].getName final val AlertSnapshot: String = classOf[AlertSnapshotMessage].getName final val RegisteredArrival: String = classOf[RegisteredArrivalMessage].getName final val RegisteredArrivals: String = classOf[RegisteredArrivalsMessage].getName final val TerminalQueuesSummary: String = classOf[TerminalQueuesSummaryMessage].getName final val FlightsSummary: String = classOf[FlightsSummaryMessage].getName final val StaffMinutes: String = classOf[StaffMinutesMessage].getName final val PaxCount: String = classOf[PaxCountMessage].getName final val OriginTerminalPaxCounts: String = classOf[OriginTerminalPaxCountsMessage].getName final val OriginTerminalPaxCountsMgs: String = classOf[OriginTerminalPaxCountsMessages].getName final val Days: String = classOf[DaysMessage].getName final val RemoveDay: String = classOf[RemoveDayMessage].getName final val CrunchRequest: String = classOf[CrunchRequestMessage].getName final val CrunchRequests: String = classOf[CrunchRequestsMessage].getName final val RemoveCrunchRequest: String = classOf[RemoveCrunchRequestMessage].getName override def toBinary(objectToSerialize: AnyRef): Array[Byte] = { objectToSerialize match { case m: CrunchStateSnapshotMessage => m.toByteArray case m: CrunchMinutesMessage => m.toByteArray case m: FlightsWithSplitsMessage => m.toByteArray case m: FlightsWithSplitsDiffMessage => m.toByteArray case m: ShiftsMessage => m.toByteArray case m: ShiftStateSnapshotMessage => m.toByteArray case m: ShiftMessage => m.toByteArray case m: FixedPointsMessage => m.toByteArray case m: FixedPointsStateSnapshotMessage => m.toByteArray case m: FixedPointMessage => m.toByteArray case m: StaffMovementsMessage => m.toByteArray case m: StaffMovementsStateSnapshotMessage => m.toByteArray case m: StaffMovementMessage => m.toByteArray case m: RemoveStaffMovementMessage => m.toByteArray case m: FlightsDiffMessage => m.toByteArray case m: FlightStateSnapshotMessage => m.toByteArray case m: FlightMessage => m.toByteArray case m: FeedStatusMessage => m.toByteArray case m: FeedStatusesMessage => m.toByteArray case m: UniqueArrivalMessage => m.toByteArray case m: VoyageManifestStateSnapshotMessage => m.toByteArray case m: VoyageManifestLatestFileNameMessage => m.toByteArray case m: VoyageManifestsMessage => m.toByteArray case m: VoyageManifestMessage => m.toByteArray case m: Alert => m.toByteArray case m: AlertSnapshotMessage => m.toByteArray case m: RegisteredArrivalMessage => m.toByteArray case m: RegisteredArrivalsMessage => m.toByteArray case m: TerminalQueuesSummaryMessage => m.toByteArray case m: FlightsSummaryMessage => m.toByteArray case m: StaffMinutesMessage => m.toByteArray case m: PaxCountMessage => m.toByteArray case m: OriginTerminalPaxCountsMessage => m.toByteArray case m: OriginTerminalPaxCountsMessages => m.toByteArray case m: DaysMessage => m.toByteArray case m: RemoveDayMessage => m.toByteArray case m: CrunchRequestMessage => m.toByteArray case m: CrunchRequestsMessage => m.toByteArray case m: RemoveCrunchRequestMessage => m.toByteArray } } val log: Logger = LoggerFactory.getLogger(getClass) override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = { manifest match { case CrunchDiff => CrunchDiffMessage.parseFrom(bytes) case CrunchStateSnapshot => CrunchStateSnapshotMessage.parseFrom(bytes) case Shifts => ShiftsMessage.parseFrom(bytes) case ShiftStateSnapshot => ShiftStateSnapshotMessage.parseFrom(bytes) case Shift => ShiftMessage.parseFrom(bytes) case FixedPoints => FixedPointsMessage.parseFrom(bytes) case FixedPointsStateSnapshot => FixedPointsStateSnapshotMessage.parseFrom(bytes) case FixedPoint => FixedPointMessage.parseFrom(bytes) case StaffMovements => StaffMovementsMessage.parseFrom(bytes) case StaffMovementsStateSnapshot => StaffMovementsStateSnapshotMessage.parseFrom(bytes) case StaffMovement => StaffMovementMessage.parseFrom(bytes) case RemoveStaffMovement => RemoveStaffMovementMessage.parseFrom(bytes) case FlightsDiff => FlightsDiffMessage.parseFrom(bytes) case FlightStateSnapshot => FlightStateSnapshotMessage.parseFrom(bytes) case Flight => FlightMessage.parseFrom(bytes) case UniqueArrival => UniqueArrivalMessage.parseFrom(bytes) case FeedStatus => FeedStatusMessage.parseFrom(bytes) case FeedStatuses => FeedStatusesMessage.parseFrom(bytes) case VoyageManifestStateSnapshot => VoyageManifestStateSnapshotMessage.parseFrom(bytes) case VoyageManifestLatestFileName => VoyageManifestLatestFileNameMessage.parseFrom(bytes) case VoyageManifests => VoyageManifestsMessage.parseFrom(bytes) case VoyageManifest => VoyageManifestMessage.parseFrom(bytes) case AlertSnapshot => AlertSnapshotMessage.parseFrom(bytes) case Alerts => Alert.parseFrom(bytes) case RegisteredArrival => RegisteredArrivalMessage.parseFrom(bytes) case RegisteredArrivals => RegisteredArrivalsMessage.parseFrom(bytes) case TerminalQueuesSummary => TerminalQueuesSummaryMessage.parseFrom(bytes) case FlightsSummary => FlightsSummaryMessage.parseFrom(bytes) case CrunchMinutes => CrunchMinutesMessage.parseFrom(bytes) case StaffMinutes => StaffMinutesMessage.parseFrom(bytes) case PaxCount => PaxCountMessage.parseFrom(bytes) case OriginTerminalPaxCounts => OriginTerminalPaxCountsMessage.parseFrom(bytes) case OriginTerminalPaxCountsMgs => OriginTerminalPaxCountsMessages.parseFrom(bytes) case Days => DaysMessage.parseFrom(bytes) case RemoveDay => RemoveDayMessage.parseFrom(bytes) case FlightsWithSplits => FlightsWithSplitsMessage.parseFrom(bytes) case FlightsWithSplitsDiff => FlightsWithSplitsDiffMessage.parseFrom(bytes) case CrunchRequest => CrunchRequestMessage.parseFrom(bytes) case CrunchRequests => CrunchRequestsMessage.parseFrom(bytes) case RemoveCrunchRequest => RemoveCrunchRequestMessage.parseFrom(bytes) } } }
UKHomeOffice/drt-scalajs-spa-exploration
server/src/main/scala/actors/serializers/ProtoBufSerializer.scala
Scala
apache-2.0
10,912
package com.twitter.finagle.service import com.twitter.finagle.Filter.TypeAgnostic import com.twitter.finagle._ import com.twitter.finagle.context.Contexts import com.twitter.finagle.client.LatencyCompensation import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver} import com.twitter.finagle.tracing.Trace import com.twitter.util.{Future, Duration, Timer} object TimeoutFilter { val TimeoutAnnotation: String = "finagle.timeout" val role: Stack.Role = new Stack.Role("RequestTimeout") /** * A class eligible for configuring a [[com.twitter.finagle.Stackable]] * [[com.twitter.finagle.service.TimeoutFilter]] module. */ case class Param(timeout: Duration) { def mk(): (Param, Stack.Param[Param]) = (this, Param.param) } object Param { implicit val param = Stack.Param(Param(Duration.Top)) } /** * Creates a [[com.twitter.finagle.Stackable]] [[com.twitter.finagle.service.TimeoutFilter]] * for use in clients. */ def clientModule[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] = new Stack.Module4[ TimeoutFilter.Param, param.Timer, LatencyCompensation.Compensation, param.Stats, ServiceFactory[Req, Rep]] { val role = TimeoutFilter.role val description = "Apply a timeout-derived deadline to requests; adjust existing deadlines." def make( _param: Param, _timer: param.Timer, _compensation: LatencyCompensation.Compensation, _stats: param.Stats, next: ServiceFactory[Req, Rep] ): ServiceFactory[Req, Rep] = { val timeout = _param.timeout + _compensation.howlong if (!timeout.isFinite || timeout <= Duration.Zero) { next } else { val param.Timer(timer) = _timer val exc = new IndividualRequestTimeoutException(timeout) val param.Stats(stats) = _stats val filter = new TimeoutFilter[Req, Rep]( timeout, exc, timer, stats.scope("timeout")) filter.andThen(next) } } } /** * Creates a [[com.twitter.finagle.Stackable]] [[com.twitter.finagle.service.TimeoutFilter]] * for use in servers. */ def serverModule[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] = new Stack.Module3[ TimeoutFilter.Param, param.Timer, param.Stats, ServiceFactory[Req, Rep]] { val role = TimeoutFilter.role val description = "Apply a timeout-derived deadline to requests; adjust existing deadlines." def make( _param: Param, _timer: param.Timer, _stats: param.Stats, next: ServiceFactory[Req, Rep] ): ServiceFactory[Req, Rep] = { val Param(timeout) = _param val param.Timer(timer) = _timer val param.Stats(stats) = _stats if (!timeout.isFinite || timeout <= Duration.Zero) next else { val exc = new IndividualRequestTimeoutException(timeout) val filter = new TimeoutFilter[Req, Rep]( timeout, exc, timer, stats.scope("timeout")) filter.andThen(next) } } } def typeAgnostic( timeout: Duration, exception: RequestTimeoutException, timer: Timer ): TypeAgnostic = new TypeAgnostic { override def toFilter[Req, Rep]: Filter[Req, Rep, Req, Rep] = new TimeoutFilter[Req, Rep](timeout, exception, timer) } } /** * A [[com.twitter.finagle.Filter]] that applies a global timeout to requests. * * @param timeout the timeout to apply to requests * @param exception an exception object to return in cases of timeout exceedance * @param timer a `Timer` object used to track elapsed time * * @see The sections on * [[https://twitter.github.io/finagle/guide/Clients.html#timeouts-expiration clients]] * and [[https://twitter.github.io/finagle/guide/Servers.html#request-timeout servers]] * in the user guide for more details. */ class TimeoutFilter[Req, Rep]( timeout: Duration, exception: RequestTimeoutException, timer: Timer, statsReceiver: StatsReceiver) extends SimpleFilter[Req, Rep] { def this(timeout: Duration, exception: RequestTimeoutException, timer: Timer) = this(timeout, exception, timer, NullStatsReceiver) def this(timeout: Duration, timer: Timer) = this(timeout, new IndividualRequestTimeoutException(timeout), timer) private[this] val expiredDeadlineStat = statsReceiver.stat("expired_deadline_ms") def apply(request: Req, service: Service[Req, Rep]): Future[Rep] = { val timeoutDeadline = Deadline.ofTimeout(timeout) // If there's a current deadline, we combine it with the one derived // from our timeout. val deadline = Deadline.current match { case Some(current) => Deadline.combined(timeoutDeadline, current) case None => timeoutDeadline } if (deadline.expired) { expiredDeadlineStat.add(-deadline.remaining.inMillis) } Contexts.broadcast.let(Deadline, deadline) { val res = service(request) res.within(timer, timeout).rescue { case exc: java.util.concurrent.TimeoutException => res.raise(exc) Trace.record(TimeoutFilter.TimeoutAnnotation) Future.exception(exception) } } } }
sveinnfannar/finagle
finagle-core/src/main/scala/com/twitter/finagle/service/TimeoutFilter.scala
Scala
apache-2.0
5,248
package org.juanitodread.pitayafinch import org.scalatest._ import flatspec._ import matchers._ abstract class UnitSpec extends AnyFlatSpec with should.Matchers with OptionValues with Inside with Inspectors
juanitodread/pitaya-finch
src/test/scala/org/juanitodread/pitayafinch/UnitSpec.scala
Scala
apache-2.0
217
package com.twitter.gizzard.shards case class LinkInfo(upId: ShardId, downId: ShardId, weight: Int)
kangkot/gizzard
src/main/scala/com/twitter/gizzard/shards/LinkInfo.scala
Scala
apache-2.0
102
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.command import scala.util.control.NonFatal import org.apache.spark.sql.{Row, SparkSession} /** * Analyzes all tables in the given database to generate statistics. */ case class AnalyzeTablesCommand( databaseName: Option[String], noScan: Boolean) extends LeafRunnableCommand { override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val db = databaseName.getOrElse(catalog.getCurrentDatabase) catalog.listTables(db).foreach { tbl => try { CommandUtils.analyzeTable(sparkSession, tbl, noScan) } catch { case NonFatal(e) => logWarning(s"Failed to analyze table ${tbl.table} in the " + s"database $db because of ${e.toString}", e) } } Seq.empty[Row] } }
maropu/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTablesCommand.scala
Scala
apache-2.0
1,637
/*********************************************************************** * Copyright (c) 2013-2018 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.fs.storage.orc.utils import java.util.UUID import com.vividsolutions.jts.geom._ import org.apache.hadoop.hive.ql.exec.vector._ import org.geotools.filter.identity.FeatureIdImpl import org.geotools.geometry.jts.JTSFactoryFinder import org.locationtech.geomesa.features.serialization.ObjectType import org.locationtech.geomesa.features.serialization.ObjectType.ObjectType import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType} /** * Populates a simple feature from a given Orc row */ trait OrcAttributeReader { def apply(sf: SimpleFeature, row: Int): Unit } object OrcAttributeReader { private val gf = JTSFactoryFinder.getGeometryFactory /** * Create a reader for an ORC batch * * @param sft simple feature type * @param batch row batch * @param columns columns to read, corresponding to simple feature attributes * @param fid read feature id or not * @return */ def apply(sft: SimpleFeatureType, batch: VectorizedRowBatch, columns: Option[Set[Int]] = None, fid: Boolean = true): OrcAttributeReader = { val builder = Seq.newBuilder[OrcAttributeReader] builder.sizeHint(columns.map(_.size).getOrElse(sft.getAttributeCount) + (if (fid) { 1 } else { 0 })) var i = 0 var col = 0 while (i < sft.getAttributeCount) { val bindings = ObjectType.selectType(sft.getDescriptor(i)) if (columns.forall(_.contains(i))) { val reader = bindings.head match { case ObjectType.GEOMETRY => createGeometryReader(bindings(1), batch.cols(col), batch.cols(col + 1), i) case ObjectType.DATE => new DateReader(batch.cols(col).asInstanceOf[TimestampColumnVector], i) case ObjectType.STRING => new StringReader(batch.cols(col).asInstanceOf[BytesColumnVector], i) case ObjectType.INT => new IntReader(batch.cols(col).asInstanceOf[LongColumnVector], i) case ObjectType.LONG => new LongReader(batch.cols(col).asInstanceOf[LongColumnVector], i) case ObjectType.FLOAT => new FloatReader(batch.cols(col).asInstanceOf[DoubleColumnVector], i) case ObjectType.DOUBLE => new DoubleReader(batch.cols(col).asInstanceOf[DoubleColumnVector], i) case ObjectType.BOOLEAN => new BooleanReader(batch.cols(col).asInstanceOf[LongColumnVector], i) case ObjectType.BYTES => new BytesReader(batch.cols(col).asInstanceOf[BytesColumnVector], i) case ObjectType.JSON => new StringReader(batch.cols(col).asInstanceOf[BytesColumnVector], i) case ObjectType.UUID => new UuidReader(batch.cols(col).asInstanceOf[BytesColumnVector], i) case ObjectType.LIST => new ListReader(batch.cols(col).asInstanceOf[ListColumnVector], i, bindings(1)) case ObjectType.MAP => new MapReader(batch.cols(col).asInstanceOf[MapColumnVector], i, bindings(1), bindings(2)) case _ => throw new IllegalArgumentException(s"Unexpected object type ${bindings.head}") } builder += reader } i += 1 if (bindings.head == ObjectType.GEOMETRY) { col += 2 } else { col += 1 } } if (fid) { builder += new FidReader(batch.cols(col).asInstanceOf[BytesColumnVector]) } new SequenceReader(builder.result) } // noinspection LanguageFeature private def createGeometryReader(binding: ObjectType, x: ColumnVector, y: ColumnVector, i: Int): OrcAttributeReader = { implicit def toDoubleColumnVector(vec: ColumnVector): DoubleColumnVector = vec.asInstanceOf[DoubleColumnVector] implicit def toListColumnVector(vec: ColumnVector): ListColumnVector = vec.asInstanceOf[ListColumnVector] binding match { case ObjectType.POINT => new PointReader(x, y, i) case ObjectType.LINESTRING => new LineStringReader(x, y, i) case ObjectType.MULTIPOINT => new MultiPointReader(x, y, i) case ObjectType.POLYGON => new PolygonReader(x, y, i) case ObjectType.MULTILINESTRING => new MultiLineStringReader(x, y, i) case ObjectType.MULTIPOLYGON => new MultiPolygonReader(x, y, i) case _ => throw new IllegalArgumentException(s"Unexpected object type $binding") } } // invokes a sequence of readers in a single call class SequenceReader(readers: Seq[OrcAttributeReader]) extends OrcAttributeReader { override def apply(sf: SimpleFeature, row: Int): Unit = readers.foreach(_.apply(sf, row)) } // reads a feature ID from a vector and sets it in a simple feature class FidReader(vector: BytesColumnVector) extends OrcAttributeReader { override def apply(sf: SimpleFeature, row: Int): Unit = sf.getIdentifier.asInstanceOf[FeatureIdImpl].setID(vector.toString(row)) } // reads a date from a vector and sets it in a simple feature class DateReader(val vector: TimestampColumnVector, val attribute: Int) extends VectorReaderAdapter[TimestampColumnVector] with GetVectorDate // reads a string from a vector and sets it in a simple feature class StringReader(val vector: BytesColumnVector, val attribute: Int) extends VectorReaderAdapter[BytesColumnVector] with GetVectorString // reads an int from a vector and sets it in a simple feature class IntReader(val vector: LongColumnVector, val attribute: Int) extends VectorReaderAdapter[LongColumnVector] with GetVectorInt // reads a long from a vector and sets it in a simple feature class LongReader(val vector: LongColumnVector, val attribute: Int) extends VectorReaderAdapter[LongColumnVector] with GetVectorLong // reads a float from a vector and sets it in a simple feature class FloatReader(val vector: DoubleColumnVector, val attribute: Int) extends VectorReaderAdapter[DoubleColumnVector] with GetVectorFloat // reads a double from a vector and sets it in a simple feature class DoubleReader(val vector: DoubleColumnVector, val attribute: Int) extends VectorReaderAdapter[DoubleColumnVector] with GetVectorDouble // reads a boolean from a vector and sets it in a simple feature class BooleanReader(val vector: LongColumnVector, val attribute: Int) extends VectorReaderAdapter[LongColumnVector] with GetVectorBoolean // reads a byte array from a vector and sets it in a simple feature class BytesReader(val vector: BytesColumnVector, val attribute: Int) extends VectorReaderAdapter[BytesColumnVector] with GetVectorBytes // reads a UUID from a vector and sets it in a simple feature class UuidReader(val vector: BytesColumnVector, val attribute: Int) extends VectorReaderAdapter[BytesColumnVector] with GetVectorUuid /** * Reads a point attribute from a vector and sets it in a simple feature * * @param x x coordinates * @param y y coordinates * @param attribute simple feature attribute index */ class PointReader(x: DoubleColumnVector, y: DoubleColumnVector, attribute: Int) extends OrcAttributeReader { override def apply(sf: SimpleFeature, row: Int): Unit = { if (x.noNulls || !x.isNull(row)) { sf.setAttribute(attribute, gf.createPoint(new Coordinate(x.vector(row), y.vector(row)))) } else { sf.setAttribute(attribute, null) } } } /** * Reads a linestring attribute from a vector and sets it in a simple feature. * A linestring is modeled as a list of points. * * @see PointReader * * @param xx outer list vector for x coordinates, containing a double vector for points * @param yy outer list vector for y coordinates, containing a double vector for points * @param attribute simple feature attribute index */ class LineStringReader(xx: ListColumnVector, yy: ListColumnVector, attribute: Int) extends OrcAttributeReader { private val x = xx.child.asInstanceOf[DoubleColumnVector] private val y = yy.child.asInstanceOf[DoubleColumnVector] override def apply(sf: SimpleFeature, row: Int): Unit = { if (xx.noNulls || !xx.isNull(row)) { val offset = xx.offsets(row).toInt val length = xx.lengths(row).toInt val coordinates = Array.ofDim[Coordinate](length) var i = 0 while (i < length) { coordinates(i) = new Coordinate(x.vector(offset + i), y.vector(offset + i)) i += 1 } sf.setAttribute(attribute, gf.createLineString(coordinates)) } else { sf.setAttribute(attribute, null) } } } /** * Reads a multi-point attribute from a vector and sets it in a simple feature. * A multi-point is modeled as a list of points. * * @see PointReader * * @param xx outer list vector for x coordinates, containing a double vector for points * @param yy outer list vector for y coordinates, containing a double vector for points * @param attribute simple feature attribute index */ class MultiPointReader(xx: ListColumnVector, yy: ListColumnVector, attribute: Int) extends OrcAttributeReader { private val x = xx.child.asInstanceOf[DoubleColumnVector] private val y = yy.child.asInstanceOf[DoubleColumnVector] override def apply(sf: SimpleFeature, row: Int): Unit = { if (xx.noNulls || !xx.isNull(row)) { val offset = xx.offsets(row).toInt val length = xx.lengths(row).toInt val coordinates = Array.ofDim[Coordinate](length) var i = 0 while (i < length) { coordinates(i) = new Coordinate(x.vector(offset + i), y.vector(offset + i)) i += 1 } sf.setAttribute(attribute, gf.createMultiPoint(coordinates)) } else { sf.setAttribute(attribute, null) } } } /** * Reads a polygon attribute from a vector and sets it in a simple feature. * A polygon is modeled as a list of lines, with the first value being the shell, * and any subsequent values being interior holes. * * @see LineStringReader * * @param xxx outer list vector for x coordinates, containing a list vector for individual lines * @param yyy outer list vector for y coordinates, containing a list vector for individual lines * @param attribute simple feature attribute index */ class PolygonReader(xxx: ListColumnVector, yyy: ListColumnVector, attribute: Int) extends OrcAttributeReader { private val xx = xxx.child.asInstanceOf[ListColumnVector] private val yy = yyy.child.asInstanceOf[ListColumnVector] private val x = xx.child.asInstanceOf[DoubleColumnVector] private val y = yy.child.asInstanceOf[DoubleColumnVector] override def apply(sf: SimpleFeature, row: Int): Unit = { if (xxx.noNulls || !xxx.isNull(row)) { val lineOffset = xxx.offsets(row).toInt val lineLength = xxx.lengths(row).toInt val lines = Array.ofDim[LinearRing](lineLength) var j = 0 while (j < lineLength) { val offset = xx.offsets(lineOffset + j).toInt val length = xx.lengths(lineOffset + j).toInt val coordinates = Array.ofDim[Coordinate](length) var i = 0 while (i < length) { coordinates(i) = new Coordinate(x.vector(offset + i), y.vector(offset + i)) i += 1 } lines(j) = gf.createLinearRing(coordinates) j += 1 } val polygon = if (lineLength == 1) { gf.createPolygon(lines.head) } else { gf.createPolygon(lines.head, lines.tail) } sf.setAttribute(attribute, polygon) } else { sf.setAttribute(attribute, null) } } } /** * Reads a multi-linestring attribute from a vector and sets it in a simple feature. * A multi-linestring is modeled as a list of lines. * * @see LineStringReader * * @param xxx outer list vector for x coordinates, containing a list vector for individual lines * @param yyy outer list vector for y coordinates, containing a list vector for individual lines * @param attribute simple feature attribute index */ class MultiLineStringReader(xxx: ListColumnVector, yyy: ListColumnVector, attribute: Int) extends OrcAttributeReader { private val xx = xxx.child.asInstanceOf[ListColumnVector] private val yy = yyy.child.asInstanceOf[ListColumnVector] private val x = xx.child.asInstanceOf[DoubleColumnVector] private val y = yy.child.asInstanceOf[DoubleColumnVector] override def apply(sf: SimpleFeature, row: Int): Unit = { if (xxx.noNulls || !xxx.isNull(row)) { val lineOffset = xxx.offsets(row).toInt val lineLength = xxx.lengths(row).toInt val lines = Array.ofDim[LineString](lineLength) var j = 0 while (j < lineLength) { val offset = xx.offsets(lineOffset + j).toInt val length = xx.lengths(lineOffset + j).toInt val coordinates = Array.ofDim[Coordinate](length) var i = 0 while (i < length) { coordinates(i) = new Coordinate(x.vector(offset + i), y.vector(offset + i)) i += 1 } lines(j) = gf.createLineString(coordinates) j += 1 } sf.setAttribute(attribute, gf.createMultiLineString(lines)) } else { sf.setAttribute(attribute, null) } } } /** * Reads a multi-polygon attribute from a vector and sets it in a simple feature. * A multi-polygon is modeled as a list of polygons. * * @see PolygonReader * * @param xxxx outer list vector for x coordinates, containing a list vector for individual polygons * @param yyyy outer list vector for y coordinates, containing a list vector for individual polygons * @param attribute simple feature attribute index */ class MultiPolygonReader(xxxx: ListColumnVector, yyyy: ListColumnVector, attribute: Int) extends OrcAttributeReader { private val xxx = xxxx.child.asInstanceOf[ListColumnVector] private val yyy = yyyy.child.asInstanceOf[ListColumnVector] private val xx = xxx.child.asInstanceOf[ListColumnVector] private val yy = yyy.child.asInstanceOf[ListColumnVector] private val x = xx.child.asInstanceOf[DoubleColumnVector] private val y = yy.child.asInstanceOf[DoubleColumnVector] override def apply(sf: SimpleFeature, row: Int): Unit = { if (xxxx.noNulls || !xxxx.isNull(row)) { val polygonOffset = xxxx.offsets(row).toInt val polygonLength = xxxx.lengths(row).toInt val polygons = Array.ofDim[Polygon](polygonLength) var k = 0 while (k < polygonLength) { val lineOffset = xxx.offsets(polygonOffset + k).toInt val lineLength = xxx.lengths(polygonOffset + k).toInt val lines = Array.ofDim[LinearRing](lineLength) var j = 0 while (j < lineLength) { val offset = xx.offsets(lineOffset + j).toInt val length = xx.lengths(lineOffset + j).toInt val coordinates = Array.ofDim[Coordinate](length) var i = 0 while (i < length) { coordinates(i) = new Coordinate(x.vector(offset + i), y.vector(offset + i)) i += 1 } lines(j) = gf.createLinearRing(coordinates) j += 1 } polygons(k) = if (lineLength == 1) { gf.createPolygon(lines.head) } else { gf.createPolygon(lines.head, lines.tail)} k += 1 } sf.setAttribute(attribute, gf.createMultiPolygon(polygons)) } else { sf.setAttribute(attribute, null) } } } /** * Reads a java.util.List attribute from a vector * * @param vector vector * @param attribute simple feature attribute index * @param binding list value type */ class ListReader(vector: ListColumnVector, attribute: Int, binding: ObjectType) extends OrcAttributeReader { private val reader = getInnerReader(binding, vector.child) override def apply(sf: SimpleFeature, row: Int): Unit = { if (vector.noNulls || !vector.isNull(row)) { val offset = vector.offsets(row).toInt val length = vector.lengths(row).toInt val value = new java.util.ArrayList[AnyRef](length) var i = offset while (i < offset + length) { value.add(reader.getValue(i)) i += 1 } sf.setAttribute(attribute, value) } else { sf.setAttribute(attribute, null) } } } /** * Reads a java.util.Map attribute from a vector * * @param vector vector * @param attribute simple feature attribute index * @param keyBinding map key type * @param valueBinding map value type */ class MapReader(vector: MapColumnVector, attribute: Int, keyBinding: ObjectType, valueBinding: ObjectType) extends OrcAttributeReader { private val keyReader = getInnerReader(keyBinding, vector.keys) private val valueReader = getInnerReader(valueBinding, vector.values) override def apply(sf: SimpleFeature, row: Int): Unit = { if (vector.noNulls || !vector.isNull(row)) { val offset = vector.offsets(row).toInt val length = vector.lengths(row).toInt val value = new java.util.HashMap[AnyRef, AnyRef](length) var i = 0 while (i < length) { value.put(keyReader.getValue(i + offset), valueReader.getValue(i + offset)) i += 1 } sf.setAttribute(attribute, value) } else { sf.setAttribute(attribute, null) } } } // reads a value out of a typed vector trait GetVectorValue[T <: ColumnVector] { protected val vector: T def getValue(row: Int): AnyRef } // reads a value out of a vector and sets it into a simple feature trait VectorReaderAdapter[T <: ColumnVector] extends OrcAttributeReader with GetVectorValue[T]{ def attribute: Int override def apply(sf: SimpleFeature, row: Int): Unit = sf.setAttribute(attribute, getValue(row)) } // reads a date from a vector trait GetVectorDate extends GetVectorValue[TimestampColumnVector] { override def getValue(row: Int): AnyRef = { if (vector.noNulls || !vector.isNull(row)) { new java.util.Date(vector.time(row)) } else { null } } } // reads a string from a vector trait GetVectorString extends GetVectorValue[BytesColumnVector] { override def getValue(row: Int): AnyRef = { if (vector.noNulls || !vector.isNull(row)) { vector.toString(row) } else { null } } } // reads an int from a vector trait GetVectorInt extends GetVectorValue[LongColumnVector] { override def getValue(row: Int): AnyRef = { if (vector.noNulls || !vector.isNull(row)) { Int.box(vector.vector(row).toInt) } else { null } } } // reads a long from a vector trait GetVectorLong extends GetVectorValue[LongColumnVector] { override def getValue(row: Int): AnyRef = { if (vector.noNulls || !vector.isNull(row)) { Long.box(vector.vector(row)) } else { null } } } // reads a float from a vector trait GetVectorFloat extends GetVectorValue[DoubleColumnVector] { override def getValue(row: Int): AnyRef = { if (vector.noNulls || !vector.isNull(row)) { Float.box(vector.vector(row).toFloat) } else { null } } } // reads a double from a vector trait GetVectorDouble extends GetVectorValue[DoubleColumnVector] { override def getValue(row: Int): AnyRef = { if (vector.noNulls || !vector.isNull(row)) { Double.box(vector.vector(row)) } else { null } } } // reads a boolean from a vector trait GetVectorBoolean extends GetVectorValue[LongColumnVector] { override def getValue(row: Int): AnyRef = { if (vector.noNulls || !vector.isNull(row)) { Boolean.box(vector.vector(row) > 0L) } else { null } } } // reads a byte array from a vector trait GetVectorBytes extends GetVectorValue[BytesColumnVector] { override def getValue(row: Int): AnyRef = { if (vector.noNulls || !vector.isNull(row)) { var bytes = vector.vector(row) if (vector.start(row) != 0 || vector.length(row) != bytes.length) { val tmp = Array.ofDim[Byte](vector.length(row)) System.arraycopy(bytes, vector.start(row), tmp, 0, tmp.length) bytes = tmp } bytes } else { null } } } // reads a UUID from a vector trait GetVectorUuid extends GetVectorValue[BytesColumnVector] { override def getValue(row: Int): AnyRef = { if (vector.noNulls || !vector.isNull(row)) { UUID.fromString(vector.toString(row)) } else { null } } } /** * Gets a reader for getting a value directly out of a vector * * @param binding binding * @param vec vector * @return */ private def getInnerReader(binding: ObjectType, vec: ColumnVector): GetVectorValue[ColumnVector] = { val reader = binding match { case ObjectType.DATE => new GetVectorDate { override val vector: TimestampColumnVector = vec.asInstanceOf[TimestampColumnVector] } case ObjectType.STRING => new GetVectorString { override val vector: BytesColumnVector = vec.asInstanceOf[BytesColumnVector] } case ObjectType.INT => new GetVectorInt { override val vector: LongColumnVector = vec.asInstanceOf[LongColumnVector] } case ObjectType.LONG => new GetVectorLong { override val vector: LongColumnVector = vec.asInstanceOf[LongColumnVector] } case ObjectType.FLOAT => new GetVectorFloat { override val vector: DoubleColumnVector = vec.asInstanceOf[DoubleColumnVector] } case ObjectType.DOUBLE => new GetVectorDouble { override val vector: DoubleColumnVector = vec.asInstanceOf[DoubleColumnVector] } case ObjectType.BOOLEAN => new GetVectorBoolean { override val vector: LongColumnVector = vec.asInstanceOf[LongColumnVector] } case ObjectType.BYTES => new GetVectorBytes { override val vector: BytesColumnVector = vec.asInstanceOf[BytesColumnVector] } case ObjectType.JSON => new GetVectorString { override val vector: BytesColumnVector = vec.asInstanceOf[BytesColumnVector] } case ObjectType.UUID => new GetVectorUuid { override val vector: BytesColumnVector = vec.asInstanceOf[BytesColumnVector] } case _ => throw new IllegalArgumentException(s"Unexpected object type $binding") } reader.asInstanceOf[GetVectorValue[ColumnVector]] } }
jahhulbert-ccri/geomesa
geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-orc/src/main/scala/org/locationtech/geomesa/fs/storage/orc/utils/OrcAttributeReader.scala
Scala
apache-2.0
23,025
package hr.element.beepo package processor trait IEmailSender { def send(email: Model.EmailSmtpRequest): EmailStatus def send(email: IndexedSeq[Model.EmailSmtpRequest]): IndexedSeq[EmailStatus] }
element-doo/beepo
code/scala/model-interfaces-plus/src/main/scala/hr/element/beepo/processor/IEmailSender.scala
Scala
bsd-3-clause
204
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.hive.execution import java.io.File import java.nio.charset.StandardCharsets import java.sql.{Date, Timestamp} import java.util.Locale import com.google.common.io.Files import org.apache.hadoop.fs.Path import org.apache.spark.TestUtils import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, FunctionRegistry, NoSuchPartitionException} import org.apache.spark.sql.catalyst.catalog.{CatalogRelation, CatalogTableType, CatalogUtils} import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias} import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation} import org.apache.spark.sql.functions._ import org.apache.spark.sql.hive.HiveUtils import org.apache.spark.sql.hive.test.TestHiveSingleton import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SQLTestUtils import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.CalendarInterval case class Nested1(f1: Nested2) case class Nested2(f2: Nested3) case class Nested3(f3: Int) case class NestedArray2(b: Seq[Int]) case class NestedArray1(a: NestedArray2) case class Order( id: Int, make: String, `type`: String, price: Int, pdate: String, customer: String, city: String, state: String, month: Int) /** * A collection of hive query tests where we generate the answers ourselves instead of depending on * Hive to generate them (in contrast to HiveQuerySuite). Often this is because the query is * valid, but Hive currently cannot execute it. */ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { import hiveContext._ import spark.implicits._ test("query global temp view") { val df = Seq(1).toDF("i1") df.createGlobalTempView("tbl1") val global_temp_db = spark.conf.get("spark.sql.globalTempDatabase") checkAnswer(spark.sql(s"select * from ${global_temp_db}.tbl1"), Row(1)) spark.sql(s"drop view ${global_temp_db}.tbl1") } test("non-existent global temp view") { val global_temp_db = spark.conf.get("spark.sql.globalTempDatabase") val message = intercept[AnalysisException] { spark.sql(s"select * from ${global_temp_db}.nonexistentview") }.getMessage assert(message.contains("Table or view not found")) } test("script") { assume(TestUtils.testCommandAvailable("/bin/bash")) assume(TestUtils.testCommandAvailable("echo | sed")) val scriptFilePath = getTestResourcePath("test_script.sh") val df = Seq(("x1", "y1", "z1"), ("x2", "y2", "z2")).toDF("c1", "c2", "c3") df.createOrReplaceTempView("script_table") val query1 = sql( s""" |SELECT col1 FROM (from(SELECT c1, c2, c3 FROM script_table) tempt_table |REDUCE c1, c2, c3 USING 'bash $scriptFilePath' AS |(col1 STRING, col2 STRING)) script_test_table""".stripMargin) checkAnswer(query1, Row("x1_y1") :: Row("x2_y2") :: Nil) } test("UDTF") { withUserDefinedFunction("udtf_count2" -> true) { sql(s"ADD JAR ${hiveContext.getHiveFile("TestUDTF.jar").getCanonicalPath()}") // The function source code can be found at: // https://cwiki.apache.org/confluence/display/Hive/DeveloperGuide+UDTF sql( """ |CREATE TEMPORARY FUNCTION udtf_count2 |AS 'org.apache.spark.sql.hive.execution.GenericUDTFCount2' """.stripMargin) checkAnswer( sql("SELECT key, cc FROM src LATERAL VIEW udtf_count2(value) dd AS cc"), Row(97, 500) :: Row(97, 500) :: Nil) checkAnswer( sql("SELECT udtf_count2(a) FROM (SELECT 1 AS a FROM src LIMIT 3) t"), Row(3) :: Row(3) :: Nil) } } test("permanent UDTF") { withUserDefinedFunction("udtf_count_temp" -> false) { sql( s""" |CREATE FUNCTION udtf_count_temp |AS 'org.apache.spark.sql.hive.execution.GenericUDTFCount2' |USING JAR '${hiveContext.getHiveFile("TestUDTF.jar").toURI}' """.stripMargin) checkAnswer( sql("SELECT key, cc FROM src LATERAL VIEW udtf_count_temp(value) dd AS cc"), Row(97, 500) :: Row(97, 500) :: Nil) checkAnswer( sql("SELECT udtf_count_temp(a) FROM (SELECT 1 AS a FROM src LIMIT 3) t"), Row(3) :: Row(3) :: Nil) } } test("SPARK-6835: udtf in lateral view") { val df = Seq((1, 1)).toDF("c1", "c2") df.createOrReplaceTempView("table1") val query = sql("SELECT c1, v FROM table1 LATERAL VIEW stack(3, 1, c1 + 1, c1 + 2) d AS v") checkAnswer(query, Row(1, 1) :: Row(1, 2) :: Row(1, 3) :: Nil) } test("SPARK-13651: generator outputs shouldn't be resolved from its child's output") { withTempView("src") { Seq(("id1", "value1")).toDF("key", "value").createOrReplaceTempView("src") val query = sql("SELECT genoutput.* FROM src " + "LATERAL VIEW explode(map('key1', 100, 'key2', 200)) genoutput AS key, value") checkAnswer(query, Row("key1", 100) :: Row("key2", 200) :: Nil) } } test("SPARK-6851: Self-joined converted parquet tables") { val orders = Seq( Order(1, "Atlas", "MTB", 234, "2015-01-07", "John D", "Pacifica", "CA", 20151), Order(3, "Swift", "MTB", 285, "2015-01-17", "John S", "Redwood City", "CA", 20151), Order(4, "Atlas", "Hybrid", 303, "2015-01-23", "Jones S", "San Mateo", "CA", 20151), Order(7, "Next", "MTB", 356, "2015-01-04", "Jane D", "Daly City", "CA", 20151), Order(10, "Next", "YFlikr", 187, "2015-01-09", "John D", "Fremont", "CA", 20151), Order(11, "Swift", "YFlikr", 187, "2015-01-23", "John D", "Hayward", "CA", 20151), Order(2, "Next", "Hybrid", 324, "2015-02-03", "Jane D", "Daly City", "CA", 20152), Order(5, "Next", "Street", 187, "2015-02-08", "John D", "Fremont", "CA", 20152), Order(6, "Atlas", "Street", 154, "2015-02-09", "John D", "Pacifica", "CA", 20152), Order(8, "Swift", "Hybrid", 485, "2015-02-19", "John S", "Redwood City", "CA", 20152), Order(9, "Atlas", "Split", 303, "2015-02-28", "Jones S", "San Mateo", "CA", 20152)) val orderUpdates = Seq( Order(1, "Atlas", "MTB", 434, "2015-01-07", "John D", "Pacifica", "CA", 20151), Order(11, "Swift", "YFlikr", 137, "2015-01-23", "John D", "Hayward", "CA", 20151)) orders.toDF.createOrReplaceTempView("orders1") orderUpdates.toDF.createOrReplaceTempView("orderupdates1") sql( """CREATE TABLE orders( | id INT, | make String, | type String, | price INT, | pdate String, | customer String, | city String) |PARTITIONED BY (state STRING, month INT) |STORED AS PARQUET """.stripMargin) sql( """CREATE TABLE orderupdates( | id INT, | make String, | type String, | price INT, | pdate String, | customer String, | city String) |PARTITIONED BY (state STRING, month INT) |STORED AS PARQUET """.stripMargin) sql("set hive.exec.dynamic.partition.mode=nonstrict") sql("INSERT INTO TABLE orders PARTITION(state, month) SELECT * FROM orders1") sql("INSERT INTO TABLE orderupdates PARTITION(state, month) SELECT * FROM orderupdates1") checkAnswer( sql( """ |select orders.state, orders.month |from orders |join ( | select distinct orders.state,orders.month | from orders | join orderupdates | on orderupdates.id = orders.id) ao | on ao.state = orders.state and ao.month = orders.month """.stripMargin), (1 to 6).map(_ => Row("CA", 20151))) } test("show functions") { val allBuiltinFunctions = FunctionRegistry.builtin.listFunction().map(_.unquotedString) val allFunctions = sql("SHOW functions").collect().map(r => r(0)) allBuiltinFunctions.foreach { f => assert(allFunctions.contains(f)) } withTempDatabase { db => def createFunction(names: Seq[String]): Unit = { names.foreach { name => sql( s""" |CREATE TEMPORARY FUNCTION $name |AS '${classOf[PairUDF].getName}' """.stripMargin) } } def dropFunction(names: Seq[String]): Unit = { names.foreach { name => sql(s"DROP TEMPORARY FUNCTION $name") } } createFunction(Seq("temp_abs", "temp_weekofyear", "temp_sha", "temp_sha1", "temp_sha2")) checkAnswer(sql("SHOW functions temp_abs"), Row("temp_abs")) checkAnswer(sql("SHOW functions 'temp_abs'"), Row("temp_abs")) checkAnswer(sql(s"SHOW functions $db.temp_abs"), Row("temp_abs")) checkAnswer(sql(s"SHOW functions `$db`.`temp_abs`"), Row("temp_abs")) checkAnswer(sql(s"SHOW functions `$db`.`temp_abs`"), Row("temp_abs")) checkAnswer(sql("SHOW functions `a function doens't exist`"), Nil) checkAnswer(sql("SHOW functions `temp_weekofyea*`"), Row("temp_weekofyear")) // this probably will failed if we add more function with `sha` prefixing. checkAnswer( sql("SHOW functions `temp_sha*`"), List(Row("temp_sha"), Row("temp_sha1"), Row("temp_sha2"))) // Test '|' for alternation. checkAnswer( sql("SHOW functions 'temp_sha*|temp_weekofyea*'"), List(Row("temp_sha"), Row("temp_sha1"), Row("temp_sha2"), Row("temp_weekofyear"))) dropFunction(Seq("temp_abs", "temp_weekofyear", "temp_sha", "temp_sha1", "temp_sha2")) } } test("describe functions - built-in functions") { checkKeywordsExist(sql("describe function extended upper"), "Function: upper", "Class: org.apache.spark.sql.catalyst.expressions.Upper", "Usage: upper(str) - Returns `str` with all characters changed to uppercase", "Extended Usage:", "Examples:", "> SELECT upper('SparkSql');", "SPARKSQL") checkKeywordsExist(sql("describe functioN Upper"), "Function: upper", "Class: org.apache.spark.sql.catalyst.expressions.Upper", "Usage: upper(str) - Returns `str` with all characters changed to uppercase") checkKeywordsNotExist(sql("describe functioN Upper"), "Extended Usage") checkKeywordsExist(sql("describe functioN abcadf"), "Function: abcadf not found.") checkKeywordsExist(sql("describe functioN `~`"), "Function: ~", "Class: org.apache.spark.sql.catalyst.expressions.BitwiseNot", "Usage: ~ expr - Returns the result of bitwise NOT of `expr`.") // Hard coded describe functions checkKeywordsExist(sql("describe function `<>`"), "Function: <>", "Usage: expr1 <> expr2 - Returns true if `expr1` is not equal to `expr2`") checkKeywordsExist(sql("describe function `!=`"), "Function: !=", "Usage: expr1 != expr2 - Returns true if `expr1` is not equal to `expr2`") checkKeywordsExist(sql("describe function `between`"), "Function: between", "Usage: expr1 [NOT] BETWEEN expr2 AND expr3 - " + "evaluate if `expr1` is [not] in between `expr2` and `expr3`") checkKeywordsExist(sql("describe function `case`"), "Function: case", "Usage: CASE expr1 WHEN expr2 THEN expr3 " + "[WHEN expr4 THEN expr5]* [ELSE expr6] END - " + "When `expr1` = `expr2`, returns `expr3`; " + "when `expr1` = `expr4`, return `expr5`; else return `expr6`") } test("describe functions - user defined functions") { withUserDefinedFunction("udtf_count" -> false) { sql( s""" |CREATE FUNCTION udtf_count |AS 'org.apache.spark.sql.hive.execution.GenericUDTFCount2' |USING JAR '${hiveContext.getHiveFile("TestUDTF.jar").toURI}' """.stripMargin) checkKeywordsExist(sql("describe function udtf_count"), "Function: default.udtf_count", "Class: org.apache.spark.sql.hive.execution.GenericUDTFCount2", "Usage: N/A") checkAnswer( sql("SELECT udtf_count(a) FROM (SELECT 1 AS a FROM src LIMIT 3) t"), Row(3) :: Row(3) :: Nil) checkKeywordsExist(sql("describe function udtf_count"), "Function: default.udtf_count", "Class: org.apache.spark.sql.hive.execution.GenericUDTFCount2", "Usage: N/A") } } test("describe functions - temporary user defined functions") { withUserDefinedFunction("udtf_count_temp" -> true) { sql( s""" |CREATE TEMPORARY FUNCTION udtf_count_temp |AS 'org.apache.spark.sql.hive.execution.GenericUDTFCount2' |USING JAR '${hiveContext.getHiveFile("TestUDTF.jar").toURI}' """.stripMargin) checkKeywordsExist(sql("describe function udtf_count_temp"), "Function: udtf_count_temp", "Class: org.apache.spark.sql.hive.execution.GenericUDTFCount2", "Usage: N/A") checkAnswer( sql("SELECT udtf_count_temp(a) FROM (SELECT 1 AS a FROM src LIMIT 3) t"), Row(3) :: Row(3) :: Nil) checkKeywordsExist(sql("describe function udtf_count_temp"), "Function: udtf_count_temp", "Class: org.apache.spark.sql.hive.execution.GenericUDTFCount2", "Usage: N/A") } } test("SPARK-5371: union with null and sum") { val df = Seq((1, 1)).toDF("c1", "c2") df.createOrReplaceTempView("table1") val query = sql( """ |SELECT | MIN(c1), | MIN(c2) |FROM ( | SELECT | SUM(c1) c1, | NULL c2 | FROM table1 | UNION ALL | SELECT | NULL c1, | SUM(c2) c2 | FROM table1 |) a """.stripMargin) checkAnswer(query, Row(1, 1) :: Nil) } test("CTAS with WITH clause") { val df = Seq((1, 1)).toDF("c1", "c2") df.createOrReplaceTempView("table1") sql( """ |CREATE TABLE with_table1 AS |WITH T AS ( | SELECT * | FROM table1 |) |SELECT * |FROM T """.stripMargin) val query = sql("SELECT * FROM with_table1") checkAnswer(query, Row(1, 1) :: Nil) } test("explode nested Field") { Seq(NestedArray1(NestedArray2(Seq(1, 2, 3)))).toDF.createOrReplaceTempView("nestedArray") checkAnswer( sql("SELECT ints FROM nestedArray LATERAL VIEW explode(a.b) a AS ints"), Row(1) :: Row(2) :: Row(3) :: Nil) checkAnswer( sql("SELECT `ints` FROM nestedArray LATERAL VIEW explode(a.b) `a` AS `ints`"), Row(1) :: Row(2) :: Row(3) :: Nil) checkAnswer( sql("SELECT `a`.`ints` FROM nestedArray LATERAL VIEW explode(a.b) `a` AS `ints`"), Row(1) :: Row(2) :: Row(3) :: Nil) checkAnswer( sql( """ |SELECT `weird``tab`.`weird``col` |FROM nestedArray |LATERAL VIEW explode(a.b) `weird``tab` AS `weird``col` """.stripMargin), Row(1) :: Row(2) :: Row(3) :: Nil) } test("SPARK-4512 Fix attribute reference resolution error when using SORT BY") { checkAnswer( sql("SELECT * FROM (SELECT key + key AS a FROM src SORT BY value) t ORDER BY t.a"), sql("SELECT key + key as a FROM src ORDER BY a").collect().toSeq ) } def checkRelation( tableName: String, isDataSourceTable: Boolean, format: String, userSpecifiedLocation: Option[String] = None): Unit = { var relation: LogicalPlan = null withSQLConf( HiveUtils.CONVERT_METASTORE_PARQUET.key -> "false", HiveUtils.CONVERT_METASTORE_ORC.key -> "false") { relation = EliminateSubqueryAliases(spark.table(tableName).queryExecution.analyzed) } val catalogTable = sessionState.catalog.getTableMetadata(TableIdentifier(tableName)) relation match { case LogicalRelation(r: HadoopFsRelation, _, _) => if (!isDataSourceTable) { fail( s"${classOf[CatalogRelation].getCanonicalName} is expected, but found " + s"${HadoopFsRelation.getClass.getCanonicalName}.") } userSpecifiedLocation match { case Some(location) => assert(r.options("path") === location) case None => // OK. } assert(catalogTable.provider.get === format) case r: CatalogRelation => if (isDataSourceTable) { fail( s"${HadoopFsRelation.getClass.getCanonicalName} is expected, but found " + s"${classOf[CatalogRelation].getCanonicalName}.") } userSpecifiedLocation match { case Some(location) => assert(r.tableMeta.location === CatalogUtils.stringToURI(location)) case None => // OK. } // Also make sure that the format and serde are as desired. assert(catalogTable.storage.inputFormat.get.toLowerCase(Locale.ROOT).contains(format)) assert(catalogTable.storage.outputFormat.get.toLowerCase(Locale.ROOT).contains(format)) val serde = catalogTable.storage.serde.get format match { case "sequence" | "text" => assert(serde.contains("LazySimpleSerDe")) case "rcfile" => assert(serde.contains("LazyBinaryColumnarSerDe")) case _ => assert(serde.toLowerCase(Locale.ROOT).contains(format)) } } // When a user-specified location is defined, the table type needs to be EXTERNAL. val actualTableType = catalogTable.tableType userSpecifiedLocation match { case Some(location) => assert(actualTableType === CatalogTableType.EXTERNAL) case None => assert(actualTableType === CatalogTableType.MANAGED) } } test("CTAS without serde without location") { val originalConf = sessionState.conf.convertCTAS setConf(SQLConf.CONVERT_CTAS, true) val defaultDataSource = sessionState.conf.defaultDataSourceName try { sql("CREATE TABLE ctas1 AS SELECT key k, value FROM src ORDER BY k, value") sql("CREATE TABLE IF NOT EXISTS ctas1 AS SELECT key k, value FROM src ORDER BY k, value") val message = intercept[AnalysisException] { sql("CREATE TABLE ctas1 AS SELECT key k, value FROM src ORDER BY k, value") }.getMessage assert(message.contains("already exists")) checkRelation("ctas1", true, defaultDataSource) sql("DROP TABLE ctas1") // Specifying database name for query can be converted to data source write path // is not allowed right now. sql("CREATE TABLE default.ctas1 AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", true, defaultDataSource) sql("DROP TABLE ctas1") sql("CREATE TABLE ctas1 stored as textfile" + " AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", false, "text") sql("DROP TABLE ctas1") sql("CREATE TABLE ctas1 stored as sequencefile" + " AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", false, "sequence") sql("DROP TABLE ctas1") sql("CREATE TABLE ctas1 stored as rcfile AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", false, "rcfile") sql("DROP TABLE ctas1") sql("CREATE TABLE ctas1 stored as orc AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", false, "orc") sql("DROP TABLE ctas1") sql("CREATE TABLE ctas1 stored as parquet AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", false, "parquet") sql("DROP TABLE ctas1") } finally { setConf(SQLConf.CONVERT_CTAS, originalConf) sql("DROP TABLE IF EXISTS ctas1") } } test("CTAS with default fileformat") { val table = "ctas1" val ctas = s"CREATE TABLE IF NOT EXISTS $table SELECT key k, value FROM src" withSQLConf(SQLConf.CONVERT_CTAS.key -> "true") { withSQLConf("hive.default.fileformat" -> "textfile") { withTable(table) { sql(ctas) // We should use parquet here as that is the default datasource fileformat. The default // datasource file format is controlled by `spark.sql.sources.default` configuration. // This testcase verifies that setting `hive.default.fileformat` has no impact on // the target table's fileformat in case of CTAS. assert(sessionState.conf.defaultDataSourceName === "parquet") checkRelation(tableName = table, isDataSourceTable = true, format = "parquet") } } withSQLConf("spark.sql.sources.default" -> "orc") { withTable(table) { sql(ctas) checkRelation(tableName = table, isDataSourceTable = true, format = "orc") } } } } test("CTAS without serde with location") { withSQLConf(SQLConf.CONVERT_CTAS.key -> "true") { withTempDir { dir => val defaultDataSource = sessionState.conf.defaultDataSourceName val tempLocation = dir.toURI.getPath.stripSuffix("/") sql(s"CREATE TABLE ctas1 LOCATION 'file:$tempLocation/c1'" + " AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", true, defaultDataSource, Some(s"file:$tempLocation/c1")) sql("DROP TABLE ctas1") sql(s"CREATE TABLE ctas1 LOCATION 'file:$tempLocation/c2'" + " AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", true, defaultDataSource, Some(s"file:$tempLocation/c2")) sql("DROP TABLE ctas1") sql(s"CREATE TABLE ctas1 stored as textfile LOCATION 'file:$tempLocation/c3'" + " AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", false, "text", Some(s"file:$tempLocation/c3")) sql("DROP TABLE ctas1") sql(s"CREATE TABLE ctas1 stored as sequenceFile LOCATION 'file:$tempLocation/c4'" + " AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", false, "sequence", Some(s"file:$tempLocation/c4")) sql("DROP TABLE ctas1") sql(s"CREATE TABLE ctas1 stored as rcfile LOCATION 'file:$tempLocation/c5'" + " AS SELECT key k, value FROM src ORDER BY k, value") checkRelation("ctas1", false, "rcfile", Some(s"file:$tempLocation/c5")) sql("DROP TABLE ctas1") } } } test("CTAS with serde") { sql("CREATE TABLE ctas1 AS SELECT key k, value FROM src ORDER BY k, value") sql( """CREATE TABLE ctas2 | ROW FORMAT SERDE "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" | WITH SERDEPROPERTIES("serde_p1"="p1","serde_p2"="p2") | STORED AS RCFile | TBLPROPERTIES("tbl_p1"="p11", "tbl_p2"="p22") | AS | SELECT key, value | FROM src | ORDER BY key, value""".stripMargin) val storageCtas2 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("ctas2")).storage assert(storageCtas2.inputFormat == Some("org.apache.hadoop.hive.ql.io.RCFileInputFormat")) assert(storageCtas2.outputFormat == Some("org.apache.hadoop.hive.ql.io.RCFileOutputFormat")) assert(storageCtas2.serde == Some("org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe")) sql( """CREATE TABLE ctas3 | ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\\012' | STORED AS textfile AS | SELECT key, value | FROM src | ORDER BY key, value""".stripMargin) // the table schema may like (key: integer, value: string) sql( """CREATE TABLE IF NOT EXISTS ctas4 AS | SELECT 1 AS key, value FROM src LIMIT 1""".stripMargin) // do nothing cause the table ctas4 already existed. sql( """CREATE TABLE IF NOT EXISTS ctas4 AS | SELECT key, value FROM src ORDER BY key, value""".stripMargin) checkAnswer( sql("SELECT k, value FROM ctas1 ORDER BY k, value"), sql("SELECT key, value FROM src ORDER BY key, value")) checkAnswer( sql("SELECT key, value FROM ctas2 ORDER BY key, value"), sql( """ SELECT key, value FROM src ORDER BY key, value""")) checkAnswer( sql("SELECT key, value FROM ctas3 ORDER BY key, value"), sql( """ SELECT key, value FROM src ORDER BY key, value""")) intercept[AnalysisException] { sql( """CREATE TABLE ctas4 AS | SELECT key, value FROM src ORDER BY key, value""".stripMargin) } checkAnswer( sql("SELECT key, value FROM ctas4 ORDER BY key, value"), sql("SELECT key, value FROM ctas4 LIMIT 1").collect().toSeq) sql( """CREATE TABLE ctas5 | STORED AS parquet AS | SELECT key, value | FROM src | ORDER BY key, value""".stripMargin) val storageCtas5 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("ctas5")).storage assert(storageCtas5.inputFormat == Some("org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat")) assert(storageCtas5.outputFormat == Some("org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat")) assert(storageCtas5.serde == Some("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe")) // use the Hive SerDe for parquet tables withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> "false") { checkAnswer( sql("SELECT key, value FROM ctas5 ORDER BY key, value"), sql("SELECT key, value FROM src ORDER BY key, value")) } } test("specifying the column list for CTAS") { withTempView("mytable1") { Seq((1, "111111"), (2, "222222")).toDF("key", "value").createOrReplaceTempView("mytable1") withTable("gen__tmp") { sql("create table gen__tmp as select key as a, value as b from mytable1") checkAnswer( sql("SELECT a, b from gen__tmp"), sql("select key, value from mytable1").collect()) } withTable("gen__tmp") { val e = intercept[AnalysisException] { sql("create table gen__tmp(a int, b string) as select key, value from mytable1") }.getMessage assert(e.contains("Schema may not be specified in a Create Table As Select (CTAS)")) } withTable("gen__tmp") { val e = intercept[AnalysisException] { sql( """ |CREATE TABLE gen__tmp |PARTITIONED BY (key string) |AS SELECT key, value FROM mytable1 """.stripMargin) }.getMessage assert(e.contains("A Create Table As Select (CTAS) statement is not allowed to " + "create a partitioned table using Hive's file formats")) } } } test("command substitution") { sql("set tbl=src") checkAnswer( sql("SELECT key FROM ${hiveconf:tbl} ORDER BY key, value limit 1"), sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq) sql("set spark.sql.variable.substitute=false") // disable the substitution sql("set tbl2=src") intercept[Exception] { sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1").collect() } sql("set spark.sql.variable.substitute=true") // enable the substitution checkAnswer( sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1"), sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq) } test("ordering not in select") { checkAnswer( sql("SELECT key FROM src ORDER BY value"), sql("SELECT key FROM (SELECT key, value FROM src ORDER BY value) a").collect().toSeq) } test("ordering not in agg") { checkAnswer( sql("SELECT key FROM src GROUP BY key, value ORDER BY value"), sql(""" SELECT key FROM ( SELECT key, value FROM src GROUP BY key, value ORDER BY value) a""").collect().toSeq) } test("double nested data") { sparkContext.parallelize(Nested1(Nested2(Nested3(1))) :: Nil) .toDF().createOrReplaceTempView("nested") checkAnswer( sql("SELECT f1.f2.f3 FROM nested"), Row(1)) sql("CREATE TABLE test_ctas_1234 AS SELECT * from nested") checkAnswer( sql("SELECT * FROM test_ctas_1234"), sql("SELECT * FROM nested").collect().toSeq) intercept[AnalysisException] { sql("CREATE TABLE test_ctas_1234 AS SELECT * from notexists").collect() } } test("test CTAS") { sql("CREATE TABLE test_ctas_123 AS SELECT key, value FROM src") checkAnswer( sql("SELECT key, value FROM test_ctas_123 ORDER BY key"), sql("SELECT key, value FROM src ORDER BY key").collect().toSeq) } test("SPARK-4825 save join to table") { val testData = sparkContext.parallelize(1 to 10).map(i => TestData(i, i.toString)).toDF() sql("CREATE TABLE test1 (key INT, value STRING)") testData.write.mode(SaveMode.Append).insertInto("test1") sql("CREATE TABLE test2 (key INT, value STRING)") testData.write.mode(SaveMode.Append).insertInto("test2") testData.write.mode(SaveMode.Append).insertInto("test2") sql("CREATE TABLE test AS SELECT COUNT(a.value) FROM test1 a JOIN test2 b ON a.key = b.key") checkAnswer( table("test"), sql("SELECT COUNT(a.value) FROM test1 a JOIN test2 b ON a.key = b.key").collect().toSeq) } test("SPARK-3708 Backticks aren't handled correctly is aliases") { checkAnswer( sql("SELECT k FROM (SELECT `key` AS `k` FROM src) a"), sql("SELECT `key` FROM src").collect().toSeq) } test("SPARK-3834 Backticks not correctly handled in subquery aliases") { checkAnswer( sql("SELECT a.key FROM (SELECT key FROM src) `a`"), sql("SELECT `key` FROM src").collect().toSeq) } test("SPARK-3814 Support Bitwise & operator") { checkAnswer( sql("SELECT case when 1&1=1 then 1 else 0 end FROM src"), sql("SELECT 1 FROM src").collect().toSeq) } test("SPARK-3814 Support Bitwise | operator") { checkAnswer( sql("SELECT case when 1|0=1 then 1 else 0 end FROM src"), sql("SELECT 1 FROM src").collect().toSeq) } test("SPARK-3814 Support Bitwise ^ operator") { checkAnswer( sql("SELECT case when 1^0=1 then 1 else 0 end FROM src"), sql("SELECT 1 FROM src").collect().toSeq) } test("SPARK-3814 Support Bitwise ~ operator") { checkAnswer( sql("SELECT case when ~1=-2 then 1 else 0 end FROM src"), sql("SELECT 1 FROM src").collect().toSeq) } test("SPARK-4154 Query does not work if it has 'not between' in Spark SQL and HQL") { checkAnswer(sql("SELECT key FROM src WHERE key not between 0 and 10 order by key"), sql("SELECT key FROM src WHERE key between 11 and 500 order by key").collect().toSeq) } test("SPARK-2554 SumDistinct partial aggregation") { checkAnswer(sql("SELECT sum( distinct key) FROM src group by key order by key"), sql("SELECT distinct key FROM src order by key").collect().toSeq) } test("SPARK-4963 DataFrame sample on mutable row return wrong result") { sql("SELECT * FROM src WHERE key % 2 = 0") .sample(withReplacement = false, fraction = 0.3) .createOrReplaceTempView("sampled") (1 to 10).foreach { i => checkAnswer( sql("SELECT * FROM sampled WHERE key % 2 = 1"), Seq.empty[Row]) } } test("SPARK-4699 SparkSession with Hive Support should be case insensitive by default") { checkAnswer( sql("SELECT KEY FROM Src ORDER BY value"), sql("SELECT key FROM src ORDER BY value").collect().toSeq) } test("SPARK-5284 Insert into Hive throws NPE when a inner complex type field has a null value") { val schema = StructType( StructField("s", StructType( StructField("innerStruct", StructType(StructField("s1", StringType, true) :: Nil)) :: StructField("innerArray", ArrayType(IntegerType), true) :: StructField("innerMap", MapType(StringType, IntegerType)) :: Nil), true) :: Nil) val row = Row(Row(null, null, null)) val rowRdd = sparkContext.parallelize(row :: Nil) spark.createDataFrame(rowRdd, schema).createOrReplaceTempView("testTable") sql( """CREATE TABLE nullValuesInInnerComplexTypes | (s struct<innerStruct: struct<s1:string>, | innerArray:array<int>, | innerMap: map<string, int>>) """.stripMargin).collect() sql( """ |INSERT OVERWRITE TABLE nullValuesInInnerComplexTypes |SELECT * FROM testTable """.stripMargin) checkAnswer( sql("SELECT * FROM nullValuesInInnerComplexTypes"), Row(Row(null, null, null)) ) sql("DROP TABLE nullValuesInInnerComplexTypes") dropTempTable("testTable") } test("SPARK-4296 Grouping field with Hive UDF as sub expression") { val ds = Seq("""{"a": "str", "b":"1", "c":"1970-01-01 00:00:00"}""").toDS() read.json(ds).createOrReplaceTempView("data") checkAnswer( sql("SELECT concat(a, '-', b), year(c) FROM data GROUP BY concat(a, '-', b), year(c)"), Row("str-1", 1970)) dropTempTable("data") read.json(ds).createOrReplaceTempView("data") checkAnswer(sql("SELECT year(c) + 1 FROM data GROUP BY year(c) + 1"), Row(1971)) dropTempTable("data") } test("resolve udtf in projection #1") { val ds = (1 to 5).map(i => s"""{"a":[$i, ${i + 1}]}""").toDS() read.json(ds).createOrReplaceTempView("data") val df = sql("SELECT explode(a) AS val FROM data") val col = df("val") } test("resolve udtf in projection #2") { val ds = (1 to 2).map(i => s"""{"a":[$i, ${i + 1}]}""").toDS() read.json(ds).createOrReplaceTempView("data") checkAnswer(sql("SELECT explode(map(1, 1)) FROM data LIMIT 1"), Row(1, 1) :: Nil) checkAnswer(sql("SELECT explode(map(1, 1)) as (k1, k2) FROM data LIMIT 1"), Row(1, 1) :: Nil) intercept[AnalysisException] { sql("SELECT explode(map(1, 1)) as k1 FROM data LIMIT 1") } intercept[AnalysisException] { sql("SELECT explode(map(1, 1)) as (k1, k2, k3) FROM data LIMIT 1") } } // TGF with non-TGF in project is allowed in Spark SQL, but not in Hive test("TGF with non-TGF in projection") { val ds = Seq("""{"a": "1", "b":"1"}""").toDS() read.json(ds).createOrReplaceTempView("data") checkAnswer( sql("SELECT explode(map(a, b)) as (k1, k2), a, b FROM data"), Row("1", "1", "1", "1") :: Nil) } test("logical.Project should not be resolved if it contains aggregates or generators") { // This test is used to test the fix of SPARK-5875. // The original issue was that Project's resolved will be true when it contains // AggregateExpressions or Generators. However, in this case, the Project // is not in a valid state (cannot be executed). Because of this bug, the analysis rule of // PreInsertionCasts will actually start to work before ImplicitGenerate and then // generates an invalid query plan. val ds = (1 to 5).map(i => s"""{"a":[$i, ${i + 1}]}""").toDS() read.json(ds).createOrReplaceTempView("data") withSQLConf(SQLConf.CONVERT_CTAS.key -> "false") { sql("CREATE TABLE explodeTest (key bigInt)") table("explodeTest").queryExecution.analyzed match { case SubqueryAlias(_, r: CatalogRelation) => // OK case _ => fail("To correctly test the fix of SPARK-5875, explodeTest should be a MetastoreRelation") } sql(s"INSERT OVERWRITE TABLE explodeTest SELECT explode(a) AS val FROM data") checkAnswer( sql("SELECT key from explodeTest"), (1 to 5).flatMap(i => Row(i) :: Row(i + 1) :: Nil) ) sql("DROP TABLE explodeTest") dropTempTable("data") } } test("sanity test for SPARK-6618") { val threads: Seq[Thread] = (1 to 10).map { i => new Thread("test-thread-" + i) { override def run(): Unit = { val tableName = s"SPARK_6618_table_$i" sql(s"CREATE TABLE $tableName (col1 string)") sessionState.catalog.lookupRelation(TableIdentifier(tableName)) table(tableName) tables() sql(s"DROP TABLE $tableName") } } } threads.foreach(_.start()) threads.foreach(_.join(10000)) } test("SPARK-5203 union with different decimal precision") { Seq.empty[(java.math.BigDecimal, java.math.BigDecimal)] .toDF("d1", "d2") .select($"d1".cast(DecimalType(10, 5)).as("d")) .createOrReplaceTempView("dn") sql("select d from dn union all select d * 2 from dn") .queryExecution.analyzed } test("Star Expansion - script transform") { assume(TestUtils.testCommandAvailable("/bin/bash")) val data = (1 to 100000).map { i => (i, i, i) } data.toDF("d1", "d2", "d3").createOrReplaceTempView("script_trans") assert(100000 === sql("SELECT TRANSFORM (*) USING 'cat' FROM script_trans").count()) } test("test script transform for stdout") { assume(TestUtils.testCommandAvailable("/bin/bash")) val data = (1 to 100000).map { i => (i, i, i) } data.toDF("d1", "d2", "d3").createOrReplaceTempView("script_trans") assert(100000 === sql("SELECT TRANSFORM (d1, d2, d3) USING 'cat' AS (a,b,c) FROM script_trans").count()) } test("test script transform for stderr") { assume(TestUtils.testCommandAvailable("/bin/bash")) val data = (1 to 100000).map { i => (i, i, i) } data.toDF("d1", "d2", "d3").createOrReplaceTempView("script_trans") assert(0 === sql("SELECT TRANSFORM (d1, d2, d3) USING 'cat 1>&2' AS (a,b,c) FROM script_trans").count()) } test("test script transform data type") { assume(TestUtils.testCommandAvailable("/bin/bash")) val data = (1 to 5).map { i => (i, i) } data.toDF("key", "value").createOrReplaceTempView("test") checkAnswer( sql("""FROM |(FROM test SELECT TRANSFORM(key, value) USING 'cat' AS (`thing1` int, thing2 string)) t |SELECT thing1 + 1 """.stripMargin), (2 to 6).map(i => Row(i))) } test("Sorting columns are not in Generate") { withTempView("data") { spark.range(1, 5) .select(array($"id", $"id" + 1).as("a"), $"id".as("b"), (lit(10) - $"id").as("c")) .createOrReplaceTempView("data") // case 1: missing sort columns are resolvable if join is true checkAnswer( sql("SELECT explode(a) AS val, b FROM data WHERE b < 2 order by val, c"), Row(1, 1) :: Row(2, 1) :: Nil) // case 2: missing sort columns are resolvable if join is false checkAnswer( sql("SELECT explode(a) AS val FROM data order by val, c"), Seq(1, 2, 2, 3, 3, 4, 4, 5).map(i => Row(i))) // case 3: missing sort columns are resolvable if join is true and outer is true checkAnswer( sql( """ |SELECT C.val, b FROM data LATERAL VIEW OUTER explode(a) C as val |where b < 2 order by c, val, b """.stripMargin), Row(1, 1) :: Row(2, 1) :: Nil) } } test("test case key when") { (1 to 5).map(i => (i, i.toString)).toDF("k", "v").createOrReplaceTempView("t") checkAnswer( sql("SELECT CASE k WHEN 2 THEN 22 WHEN 4 THEN 44 ELSE 0 END, v FROM t"), Row(0, "1") :: Row(22, "2") :: Row(0, "3") :: Row(44, "4") :: Row(0, "5") :: Nil) } test("SPARK-7269 Check analysis failed in case in-sensitive") { Seq(1, 2, 3).map { i => (i.toString, i.toString) }.toDF("key", "value").createOrReplaceTempView("df_analysis") sql("SELECT kEy from df_analysis group by key").collect() sql("SELECT kEy+3 from df_analysis group by key+3").collect() sql("SELECT kEy+3, a.kEy, A.kEy from df_analysis A group by key").collect() sql("SELECT cast(kEy+1 as Int) from df_analysis A group by cast(key+1 as int)").collect() sql("SELECT cast(kEy+1 as Int) from df_analysis A group by key+1").collect() sql("SELECT 2 from df_analysis A group by key+1").collect() intercept[AnalysisException] { sql("SELECT kEy+1 from df_analysis group by key+3") } intercept[AnalysisException] { sql("SELECT cast(key+2 as Int) from df_analysis A group by cast(key+1 as int)") } } test("Cast STRING to BIGINT") { checkAnswer(sql("SELECT CAST('775983671874188101' as BIGINT)"), Row(775983671874188101L)) } test("dynamic partition value test") { try { sql("set hive.exec.dynamic.partition.mode=nonstrict") // date sql("drop table if exists dynparttest1") sql("create table dynparttest1 (value int) partitioned by (pdate date)") sql( """ |insert into table dynparttest1 partition(pdate) | select count(*), cast('2015-05-21' as date) as pdate from src """.stripMargin) checkAnswer( sql("select * from dynparttest1"), Seq(Row(500, java.sql.Date.valueOf("2015-05-21")))) // decimal sql("drop table if exists dynparttest2") sql("create table dynparttest2 (value int) partitioned by (pdec decimal(5, 1))") sql( """ |insert into table dynparttest2 partition(pdec) | select count(*), cast('100.12' as decimal(5, 1)) as pdec from src """.stripMargin) checkAnswer( sql("select * from dynparttest2"), Seq(Row(500, new java.math.BigDecimal("100.1")))) } finally { sql("drop table if exists dynparttest1") sql("drop table if exists dynparttest2") sql("set hive.exec.dynamic.partition.mode=strict") } } test("Call add jar in a different thread (SPARK-8306)") { @volatile var error: Option[Throwable] = None val thread = new Thread { override def run() { // To make sure this test works, this jar should not be loaded in another place. sql( s"ADD JAR ${hiveContext.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath()}") try { sql( """ |CREATE TEMPORARY FUNCTION example_max |AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax' """.stripMargin) } catch { case throwable: Throwable => error = Some(throwable) } } } thread.start() thread.join() error match { case Some(throwable) => fail("CREATE TEMPORARY FUNCTION should not fail.", throwable) case None => // OK } } test("SPARK-6785: HiveQuerySuite - Date comparison test 2") { checkAnswer( sql("SELECT CAST(CAST(0 AS timestamp) AS date) > CAST(0 AS timestamp) FROM src LIMIT 1"), Row(false)) } test("SPARK-6785: HiveQuerySuite - Date cast") { // new Date(0) == 1970-01-01 00:00:00.0 GMT == 1969-12-31 16:00:00.0 PST checkAnswer( sql( """ | SELECT | CAST(CAST(0 AS timestamp) AS date), | CAST(CAST(CAST(0 AS timestamp) AS date) AS string), | CAST(0 AS timestamp), | CAST(CAST(0 AS timestamp) AS string), | CAST(CAST(CAST('1970-01-01 23:00:00' AS timestamp) AS date) AS timestamp) | FROM src LIMIT 1 """.stripMargin), Row( Date.valueOf("1969-12-31"), String.valueOf("1969-12-31"), Timestamp.valueOf("1969-12-31 16:00:00"), String.valueOf("1969-12-31 16:00:00"), Timestamp.valueOf("1970-01-01 00:00:00"))) } test("SPARK-8588 HiveTypeCoercion.inConversion fires too early") { val df = createDataFrame(Seq((1, "2014-01-01"), (2, "2015-01-01"), (3, "2016-01-01"))) df.toDF("id", "datef").createOrReplaceTempView("test_SPARK8588") checkAnswer( sql( """ |select id, concat(year(datef)) |from test_SPARK8588 where concat(year(datef), ' year') in ('2015 year', '2014 year') """.stripMargin), Row(1, "2014") :: Row(2, "2015") :: Nil ) dropTempTable("test_SPARK8588") } test("SPARK-9371: fix the support for special chars in column names for hive context") { val ds = Seq("""{"a": {"c.b": 1}, "b.$q": [{"a@!.q": 1}], "q.w": {"w.i&": [1]}}""").toDS() read.json(ds).createOrReplaceTempView("t") checkAnswer(sql("SELECT a.`c.b`, `b.$q`[0].`a@!.q`, `q.w`.`w.i&`[0] FROM t"), Row(1, 1, 1)) } test("Convert hive interval term into Literal of CalendarIntervalType") { checkAnswer(sql("select interval '10-9' year to month"), Row(CalendarInterval.fromString("interval 10 years 9 months"))) checkAnswer(sql("select interval '20 15:40:32.99899999' day to second"), Row(CalendarInterval.fromString("interval 2 weeks 6 days 15 hours 40 minutes " + "32 seconds 99 milliseconds 899 microseconds"))) checkAnswer(sql("select interval '30' year"), Row(CalendarInterval.fromString("interval 30 years"))) checkAnswer(sql("select interval '25' month"), Row(CalendarInterval.fromString("interval 25 months"))) checkAnswer(sql("select interval '-100' day"), Row(CalendarInterval.fromString("interval -14 weeks -2 days"))) checkAnswer(sql("select interval '40' hour"), Row(CalendarInterval.fromString("interval 1 days 16 hours"))) checkAnswer(sql("select interval '80' minute"), Row(CalendarInterval.fromString("interval 1 hour 20 minutes"))) checkAnswer(sql("select interval '299.889987299' second"), Row(CalendarInterval.fromString( "interval 4 minutes 59 seconds 889 milliseconds 987 microseconds"))) } test("specifying database name for a temporary view is not allowed") { withTempPath { dir => val path = dir.toURI.toString val df = sparkContext.parallelize(1 to 10).map(i => (i, i.toString)).toDF("num", "str") df .write .format("parquet") .save(path) // We don't support creating a temporary table while specifying a database intercept[AnalysisException] { spark.sql( s""" |CREATE TEMPORARY VIEW db.t |USING parquet |OPTIONS ( | path '$path' |) """.stripMargin) } // If you use backticks to quote the name then it's OK. spark.sql( s""" |CREATE TEMPORARY VIEW `db.t` |USING parquet |OPTIONS ( | path '$path' |) """.stripMargin) checkAnswer(spark.table("`db.t`"), df) } } test("SPARK-10593 same column names in lateral view") { val df = spark.sql( """ |select |insideLayer2.json as a2 |from (select '{"layer1": {"layer2": "text inside layer 2"}}' json) test |lateral view json_tuple(json, 'layer1') insideLayer1 as json |lateral view json_tuple(insideLayer1.json, 'layer2') insideLayer2 as json """.stripMargin ) checkAnswer(df, Row("text inside layer 2") :: Nil) } ignore("SPARK-10310: " + "script transformation using default input/output SerDe and record reader/writer") { spark .range(5) .selectExpr("id AS a", "id AS b") .createOrReplaceTempView("test") val scriptFilePath = getTestResourcePath("data") checkAnswer( sql( s"""FROM( | FROM test SELECT TRANSFORM(a, b) | USING 'python $scriptFilePath/scripts/test_transform.py "\\t"' | AS (c STRING, d STRING) |) t |SELECT c """.stripMargin), (0 until 5).map(i => Row(i + "#"))) } ignore("SPARK-10310: script transformation using LazySimpleSerDe") { spark .range(5) .selectExpr("id AS a", "id AS b") .createOrReplaceTempView("test") val scriptFilePath = getTestResourcePath("data") val df = sql( s"""FROM test |SELECT TRANSFORM(a, b) |ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' |WITH SERDEPROPERTIES('field.delim' = '|') |USING 'python $scriptFilePath/scripts/test_transform.py "|"' |AS (c STRING, d STRING) |ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' |WITH SERDEPROPERTIES('field.delim' = '|') """.stripMargin) checkAnswer(df, (0 until 5).map(i => Row(i + "#", i + "#"))) } test("SPARK-10741: Sort on Aggregate using parquet") { withTable("test10741") { withTempView("src") { Seq("a" -> 5, "a" -> 9, "b" -> 6).toDF("c1", "c2").createOrReplaceTempView("src") sql("CREATE TABLE test10741 STORED AS PARQUET AS SELECT * FROM src") } checkAnswer(sql( """ |SELECT c1, AVG(c2) AS c_avg |FROM test10741 |GROUP BY c1 |HAVING (AVG(c2) > 5) ORDER BY c1 """.stripMargin), Row("a", 7.0) :: Row("b", 6.0) :: Nil) checkAnswer(sql( """ |SELECT c1, AVG(c2) AS c_avg |FROM test10741 |GROUP BY c1 |ORDER BY AVG(c2) """.stripMargin), Row("b", 6.0) :: Row("a", 7.0) :: Nil) } } test("run sql directly on files - parquet") { val df = spark.range(100).toDF() withTempPath(f => { df.write.parquet(f.getCanonicalPath) // data source type is case insensitive checkAnswer(sql(s"select id from Parquet.`${f.getCanonicalPath}`"), df) checkAnswer(sql(s"select id from `org.apache.spark.sql.parquet`.`${f.getCanonicalPath}`"), df) checkAnswer(sql(s"select a.id from parquet.`${f.getCanonicalPath}` as a"), df) }) } test("run sql directly on files - orc") { val df = spark.range(100).toDF() withTempPath(f => { df.write.orc(f.getCanonicalPath) // data source type is case insensitive checkAnswer(sql(s"select id from ORC.`${f.getCanonicalPath}`"), df) checkAnswer(sql(s"select id from `org.apache.spark.sql.hive.orc`.`${f.getCanonicalPath}`"), df) checkAnswer(sql(s"select a.id from orc.`${f.getCanonicalPath}` as a"), df) }) } test("run sql directly on files - csv") { val df = spark.range(100).toDF() withTempPath(f => { df.write.csv(f.getCanonicalPath) // data source type is case insensitive checkAnswer(sql(s"select cast(_c0 as int) id from CSV.`${f.getCanonicalPath}`"), df) checkAnswer( sql(s"select cast(_c0 as int) id from `com.databricks.spark.csv`.`${f.getCanonicalPath}`"), df) checkAnswer(sql(s"select cast(a._c0 as int) id from csv.`${f.getCanonicalPath}` as a"), df) }) } test("run sql directly on files - json") { val df = spark.range(100).toDF() withTempPath(f => { df.write.json(f.getCanonicalPath) // data source type is case insensitive checkAnswer(sql(s"select id from jsoN.`${f.getCanonicalPath}`"), df) checkAnswer(sql(s"select id from `org.apache.spark.sql.json`.`${f.getCanonicalPath}`"), df) checkAnswer(sql(s"select a.id from json.`${f.getCanonicalPath}` as a"), df) }) } test("run sql directly on files - hive") { withTempPath(f => { spark.range(100).toDF.write.parquet(f.getCanonicalPath) var e = intercept[AnalysisException] { sql(s"select id from hive.`${f.getCanonicalPath}`") } assert(e.message.contains("Unsupported data source type for direct query on files: hive")) // data source type is case insensitive e = intercept[AnalysisException] { sql(s"select id from HIVE.`${f.getCanonicalPath}`") } assert(e.message.contains("Unsupported data source type for direct query on files: HIVE")) }) } test("SPARK-8976 Wrong Result for Rollup #1") { checkAnswer(sql( "SELECT count(*) AS cnt, key % 5, grouping_id() FROM src GROUP BY key%5 WITH ROLLUP"), Seq( (113, 3, 0), (91, 0, 0), (500, null, 1), (84, 1, 0), (105, 2, 0), (107, 4, 0) ).map(i => Row(i._1, i._2, i._3))) } test("SPARK-8976 Wrong Result for Rollup #2") { checkAnswer(sql( """ |SELECT count(*) AS cnt, key % 5 AS k1, key-5 AS k2, grouping_id() AS k3 |FROM src GROUP BY key%5, key-5 |WITH ROLLUP ORDER BY cnt, k1, k2, k3 LIMIT 10 """.stripMargin), Seq( (1, 0, 5, 0), (1, 0, 15, 0), (1, 0, 25, 0), (1, 0, 60, 0), (1, 0, 75, 0), (1, 0, 80, 0), (1, 0, 100, 0), (1, 0, 140, 0), (1, 0, 145, 0), (1, 0, 150, 0) ).map(i => Row(i._1, i._2, i._3, i._4))) } test("SPARK-8976 Wrong Result for Rollup #3") { checkAnswer(sql( """ |SELECT count(*) AS cnt, key % 5 AS k1, key-5 AS k2, grouping_id() AS k3 |FROM (SELECT key, key%2, key - 5 FROM src) t GROUP BY key%5, key-5 |WITH ROLLUP ORDER BY cnt, k1, k2, k3 LIMIT 10 """.stripMargin), Seq( (1, 0, 5, 0), (1, 0, 15, 0), (1, 0, 25, 0), (1, 0, 60, 0), (1, 0, 75, 0), (1, 0, 80, 0), (1, 0, 100, 0), (1, 0, 140, 0), (1, 0, 145, 0), (1, 0, 150, 0) ).map(i => Row(i._1, i._2, i._3, i._4))) } test("SPARK-8976 Wrong Result for CUBE #1") { checkAnswer(sql( "SELECT count(*) AS cnt, key % 5, grouping_id() FROM src GROUP BY key%5 WITH CUBE"), Seq( (113, 3, 0), (91, 0, 0), (500, null, 1), (84, 1, 0), (105, 2, 0), (107, 4, 0) ).map(i => Row(i._1, i._2, i._3))) } test("SPARK-8976 Wrong Result for CUBE #2") { checkAnswer(sql( """ |SELECT count(*) AS cnt, key % 5 AS k1, key-5 AS k2, grouping_id() AS k3 |FROM (SELECT key, key%2, key - 5 FROM src) t GROUP BY key%5, key-5 |WITH CUBE ORDER BY cnt, k1, k2, k3 LIMIT 10 """.stripMargin), Seq( (1, null, -3, 2), (1, null, -1, 2), (1, null, 3, 2), (1, null, 4, 2), (1, null, 5, 2), (1, null, 6, 2), (1, null, 12, 2), (1, null, 14, 2), (1, null, 15, 2), (1, null, 22, 2) ).map(i => Row(i._1, i._2, i._3, i._4))) } test("SPARK-8976 Wrong Result for GroupingSet") { checkAnswer(sql( """ |SELECT count(*) AS cnt, key % 5 AS k1, key-5 AS k2, grouping_id() AS k3 |FROM (SELECT key, key%2, key - 5 FROM src) t GROUP BY key%5, key-5 |GROUPING SETS (key%5, key-5) ORDER BY cnt, k1, k2, k3 LIMIT 10 """.stripMargin), Seq( (1, null, -3, 2), (1, null, -1, 2), (1, null, 3, 2), (1, null, 4, 2), (1, null, 5, 2), (1, null, 6, 2), (1, null, 12, 2), (1, null, 14, 2), (1, null, 15, 2), (1, null, 22, 2) ).map(i => Row(i._1, i._2, i._3, i._4))) } ignore("SPARK-10562: partition by column with mixed case name") { withTable("tbl10562") { val df = Seq(2012 -> "a").toDF("Year", "val") df.write.partitionBy("Year").saveAsTable("tbl10562") checkAnswer(sql("SELECT year FROM tbl10562"), Row(2012)) checkAnswer(sql("SELECT Year FROM tbl10562"), Row(2012)) checkAnswer(sql("SELECT yEAr FROM tbl10562"), Row(2012)) // TODO(ekl) this is causing test flakes [SPARK-18167], but we think the issue is derby specific // checkAnswer(sql("SELECT val FROM tbl10562 WHERE Year > 2015"), Nil) checkAnswer(sql("SELECT val FROM tbl10562 WHERE Year == 2012"), Row("a")) } } test("SPARK-11453: append data to partitioned table") { withTable("tbl11453") { Seq("1" -> "10", "2" -> "20").toDF("i", "j") .write.partitionBy("i").saveAsTable("tbl11453") Seq("3" -> "30").toDF("i", "j") .write.mode(SaveMode.Append).partitionBy("i").saveAsTable("tbl11453") checkAnswer( spark.read.table("tbl11453").select("i", "j").orderBy("i"), Row("1", "10") :: Row("2", "20") :: Row("3", "30") :: Nil) // make sure case sensitivity is correct. Seq("4" -> "40").toDF("i", "j") .write.mode(SaveMode.Append).partitionBy("I").saveAsTable("tbl11453") checkAnswer( spark.read.table("tbl11453").select("i", "j").orderBy("i"), Row("1", "10") :: Row("2", "20") :: Row("3", "30") :: Row("4", "40") :: Nil) } } test("SPARK-11590: use native json_tuple in lateral view") { checkAnswer(sql( """ |SELECT a, b |FROM (SELECT '{"f1": "value1", "f2": 12}' json) test |LATERAL VIEW json_tuple(json, 'f1', 'f2') jt AS a, b """.stripMargin), Row("value1", "12")) // we should use `c0`, `c1`... as the name of fields if no alias is provided, to follow hive. checkAnswer(sql( """ |SELECT c0, c1 |FROM (SELECT '{"f1": "value1", "f2": 12}' json) test |LATERAL VIEW json_tuple(json, 'f1', 'f2') jt """.stripMargin), Row("value1", "12")) // we can also use `json_tuple` in project list. checkAnswer(sql( """ |SELECT json_tuple(json, 'f1', 'f2') |FROM (SELECT '{"f1": "value1", "f2": 12}' json) test """.stripMargin), Row("value1", "12")) // we can also mix `json_tuple` with other project expressions. checkAnswer(sql( """ |SELECT json_tuple(json, 'f1', 'f2'), 3.14, str |FROM (SELECT '{"f1": "value1", "f2": 12}' json, 'hello' as str) test """.stripMargin), Row("value1", "12", BigDecimal("3.14"), "hello")) } test("multi-insert with lateral view") { withTempView("t1") { spark.range(10) .select(array($"id", $"id" + 1).as("arr"), $"id") .createOrReplaceTempView("source") withTable("dest1", "dest2") { sql("CREATE TABLE dest1 (i INT)") sql("CREATE TABLE dest2 (i INT)") sql( """ |FROM source |INSERT OVERWRITE TABLE dest1 |SELECT id |WHERE id > 3 |INSERT OVERWRITE TABLE dest2 |select col LATERAL VIEW EXPLODE(arr) exp AS col |WHERE col > 3 """.stripMargin) checkAnswer( spark.table("dest1"), sql("SELECT id FROM source WHERE id > 3")) checkAnswer( spark.table("dest2"), sql("SELECT col FROM source LATERAL VIEW EXPLODE(arr) exp AS col WHERE col > 3")) } } } test("derived from Hive query file: drop_database_removes_partition_dirs.q") { // This test verifies that if a partition exists outside a table's current location when the // database is dropped the partition's location is dropped as well. sql("DROP database if exists test_database CASCADE") sql("CREATE DATABASE test_database") val previousCurrentDB = sessionState.catalog.getCurrentDatabase sql("USE test_database") sql("drop table if exists test_table") val tempDir = System.getProperty("test.tmp.dir") assert(tempDir != null, "TestHive should set test.tmp.dir.") sql( """ |CREATE TABLE test_table (key int, value STRING) |PARTITIONED BY (part STRING) |STORED AS RCFILE |LOCATION 'file:${system:test.tmp.dir}/drop_database_removes_partition_dirs_table' """.stripMargin) sql( """ |ALTER TABLE test_table ADD PARTITION (part = '1') |LOCATION 'file:${system:test.tmp.dir}/drop_database_removes_partition_dirs_table2/part=1' """.stripMargin) sql( """ |INSERT OVERWRITE TABLE test_table PARTITION (part = '1') |SELECT * FROM default.src """.stripMargin) checkAnswer( sql("select part, key, value from test_table"), sql("select '1' as part, key, value from default.src") ) val path = new Path( new Path(s"file:$tempDir"), "drop_database_removes_partition_dirs_table2") val fs = path.getFileSystem(sparkContext.hadoopConfiguration) // The partition dir is not empty. assert(fs.listStatus(new Path(path, "part=1")).nonEmpty) sql(s"USE $previousCurrentDB") sql("DROP DATABASE test_database CASCADE") // This table dir should not exist after we drop the entire database with the mode // of CASCADE. This probably indicates a Hive bug, which returns the wrong table // root location. So, the table's directory still there. We should change the condition // to fs.exists(path) after we handle fs operations. assert( fs.exists(path), "Thank you for making the changes of letting Spark SQL handle filesystem operations " + "for DDL commands. Originally, Hive metastore does not delete the table root directory " + "for this case. Now, please change this condition to !fs.exists(path).") } test("derived from Hive query file: drop_table_removes_partition_dirs.q") { // This test verifies that if a partition exists outside the table's current location when the // table is dropped the partition's location is dropped as well. sql("drop table if exists test_table") val tempDir = System.getProperty("test.tmp.dir") assert(tempDir != null, "TestHive should set test.tmp.dir.") sql( """ |CREATE TABLE test_table (key int, value STRING) |PARTITIONED BY (part STRING) |STORED AS RCFILE |LOCATION 'file:${system:test.tmp.dir}/drop_table_removes_partition_dirs_table2' """.stripMargin) sql( """ |ALTER TABLE test_table ADD PARTITION (part = '1') |LOCATION 'file:${system:test.tmp.dir}/drop_table_removes_partition_dirs_table2/part=1' """.stripMargin) sql( """ |INSERT OVERWRITE TABLE test_table PARTITION (part = '1') |SELECT * FROM default.src """.stripMargin) checkAnswer( sql("select part, key, value from test_table"), sql("select '1' as part, key, value from src") ) val path = new Path(new Path(s"file:$tempDir"), "drop_table_removes_partition_dirs_table2") val fs = path.getFileSystem(sparkContext.hadoopConfiguration) // The partition dir is not empty. assert(fs.listStatus(new Path(path, "part=1")).nonEmpty) sql("drop table test_table") assert(fs.exists(path), "This is an external table, so the data should not have been dropped") } test("select partitioned table") { val table = "table_with_partition" withTable(table) { sql( s""" |CREATE TABLE $table(c1 string) |PARTITIONED BY (p1 string,p2 string,p3 string,p4 string,p5 string) """.stripMargin) sql( s""" |INSERT OVERWRITE TABLE $table |PARTITION (p1='a',p2='b',p3='c',p4='d',p5='e') |SELECT 'blarr' """.stripMargin) // project list is the same order of paritioning columns in table definition checkAnswer( sql(s"SELECT p1, p2, p3, p4, p5, c1 FROM $table"), Row("a", "b", "c", "d", "e", "blarr") :: Nil) // project list does not have the same order of paritioning columns in table definition checkAnswer( sql(s"SELECT p2, p3, p4, p1, p5, c1 FROM $table"), Row("b", "c", "d", "a", "e", "blarr") :: Nil) // project list contains partial partition columns in table definition checkAnswer( sql(s"SELECT p2, p1, p5, c1 FROM $table"), Row("b", "a", "e", "blarr") :: Nil) } } test("SPARK-14981: DESC not supported for sorting columns") { withTable("t") { val cause = intercept[ParseException] { sql( """CREATE TABLE t USING PARQUET |OPTIONS (PATH '/path/to/file') |CLUSTERED BY (a) SORTED BY (b DESC) INTO 2 BUCKETS |AS SELECT 1 AS a, 2 AS b """.stripMargin ) } assert(cause.getMessage.contains("Column ordering must be ASC, was 'DESC'")) } } test("insert into datasource table") { withTable("tbl") { sql("CREATE TABLE tbl(i INT, j STRING) USING parquet") Seq(1 -> "a").toDF("i", "j").write.mode("overwrite").insertInto("tbl") checkAnswer(sql("SELECT * FROM tbl"), Row(1, "a")) } } test("spark-15557 promote string test") { withTable("tbl") { sql("CREATE TABLE tbl(c1 string, c2 string)") sql("insert into tbl values ('3', '2.3')") checkAnswer( sql("select (cast (99 as decimal(19,6)) + cast('3' as decimal)) * cast('2.3' as decimal)"), Row(204.0) ) checkAnswer( sql("select (cast(99 as decimal(19,6)) + '3') *'2.3' from tbl"), Row(234.6) ) checkAnswer( sql("select (cast(99 as decimal(19,6)) + c1) * c2 from tbl"), Row(234.6) ) } } test("SPARK-15752 optimize metadata only query for hive table") { withSQLConf(SQLConf.OPTIMIZER_METADATA_ONLY.key -> "true") { withTable("data_15752", "srcpart_15752", "srctext_15752") { val df = Seq((1, "2"), (3, "4")).toDF("key", "value") df.createOrReplaceTempView("data_15752") sql( """ |CREATE TABLE srcpart_15752 (col1 INT, col2 STRING) |PARTITIONED BY (partcol1 INT, partcol2 STRING) STORED AS parquet """.stripMargin) for (partcol1 <- Seq(0, 1); partcol2 <- Seq("a", "b")) { sql( s""" |INSERT OVERWRITE TABLE srcpart_15752 |PARTITION (partcol1='$partcol1', partcol2='$partcol2') |select key, value from data_15752 """.stripMargin) } checkAnswer( sql("select partcol1 from srcpart_15752 group by partcol1"), Row(0) :: Row(1) :: Nil) checkAnswer( sql("select partcol1 from srcpart_15752 where partcol1 = 1 group by partcol1"), Row(1)) checkAnswer( sql("select partcol1, count(distinct partcol2) from srcpart_15752 group by partcol1"), Row(0, 2) :: Row(1, 2) :: Nil) checkAnswer( sql("select partcol1, count(distinct partcol2) from srcpart_15752 where partcol1 = 1 " + "group by partcol1"), Row(1, 2) :: Nil) checkAnswer(sql("select distinct partcol1 from srcpart_15752"), Row(0) :: Row(1) :: Nil) checkAnswer(sql("select distinct partcol1 from srcpart_15752 where partcol1 = 1"), Row(1)) checkAnswer( sql("select distinct col from (select partcol1 + 1 as col from srcpart_15752 " + "where partcol1 = 1) t"), Row(2)) checkAnswer(sql("select distinct partcol1 from srcpart_15752 where partcol1 = 1"), Row(1)) checkAnswer(sql("select max(partcol1) from srcpart_15752"), Row(1)) checkAnswer(sql("select max(partcol1) from srcpart_15752 where partcol1 = 1"), Row(1)) checkAnswer(sql("select max(partcol1) from (select partcol1 from srcpart_15752) t"), Row(1)) checkAnswer( sql("select max(col) from (select partcol1 + 1 as col from srcpart_15752 " + "where partcol1 = 1) t"), Row(2)) sql( """ |CREATE TABLE srctext_15752 (col1 INT, col2 STRING) |PARTITIONED BY (partcol1 INT, partcol2 STRING) STORED AS textfile """.stripMargin) for (partcol1 <- Seq(0, 1); partcol2 <- Seq("a", "b")) { sql( s""" |INSERT OVERWRITE TABLE srctext_15752 |PARTITION (partcol1='$partcol1', partcol2='$partcol2') |select key, value from data_15752 """.stripMargin) } checkAnswer( sql("select partcol1 from srctext_15752 group by partcol1"), Row(0) :: Row(1) :: Nil) checkAnswer( sql("select partcol1 from srctext_15752 where partcol1 = 1 group by partcol1"), Row(1)) checkAnswer( sql("select partcol1, count(distinct partcol2) from srctext_15752 group by partcol1"), Row(0, 2) :: Row(1, 2) :: Nil) checkAnswer( sql("select partcol1, count(distinct partcol2) from srctext_15752 where partcol1 = 1 " + "group by partcol1"), Row(1, 2) :: Nil) checkAnswer(sql("select distinct partcol1 from srctext_15752"), Row(0) :: Row(1) :: Nil) checkAnswer(sql("select distinct partcol1 from srctext_15752 where partcol1 = 1"), Row(1)) checkAnswer( sql("select distinct col from (select partcol1 + 1 as col from srctext_15752 " + "where partcol1 = 1) t"), Row(2)) checkAnswer(sql("select max(partcol1) from srctext_15752"), Row(1)) checkAnswer(sql("select max(partcol1) from srctext_15752 where partcol1 = 1"), Row(1)) checkAnswer(sql("select max(partcol1) from (select partcol1 from srctext_15752) t"), Row(1)) checkAnswer( sql("select max(col) from (select partcol1 + 1 as col from srctext_15752 " + "where partcol1 = 1) t"), Row(2)) } } } test("SPARK-17354: Partitioning by dates/timestamps works with Parquet vectorized reader") { withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "true") { sql( """CREATE TABLE order(id INT) |PARTITIONED BY (pd DATE, pt TIMESTAMP) |STORED AS PARQUET """.stripMargin) sql("set hive.exec.dynamic.partition.mode=nonstrict") sql( """INSERT INTO TABLE order PARTITION(pd, pt) |SELECT 1 AS id, CAST('1990-02-24' AS DATE) AS pd, CAST('1990-02-24' AS TIMESTAMP) AS pt """.stripMargin) val actual = sql("SELECT * FROM order") val expected = sql( "SELECT 1 AS id, CAST('1990-02-24' AS DATE) AS pd, CAST('1990-02-24' AS TIMESTAMP) AS pt") checkAnswer(actual, expected) sql("DROP TABLE order") } } test("SPARK-17108: Fix BIGINT and INT comparison failure in spark sql") { sql("create table t1(a map<bigint, array<string>>)") sql("select * from t1 where a[1] is not null") sql("create table t2(a map<int, array<string>>)") sql("select * from t2 where a[1] is not null") sql("create table t3(a map<bigint, array<string>>)") sql("select * from t3 where a[1L] is not null") } test("SPARK-17796 Support wildcard character in filename for LOAD DATA LOCAL INPATH") { withTempDir { dir => val path = dir.toURI.toString.stripSuffix("/") val dirPath = dir.getAbsoluteFile for (i <- 1 to 3) { Files.write(s"$i", new File(dirPath, s"part-r-0000$i"), StandardCharsets.UTF_8) } for (i <- 5 to 7) { Files.write(s"$i", new File(dirPath, s"part-s-0000$i"), StandardCharsets.UTF_8) } withTable("load_t") { sql("CREATE TABLE load_t (a STRING)") sql(s"LOAD DATA LOCAL INPATH '$path/*part-r*' INTO TABLE load_t") checkAnswer(sql("SELECT * FROM load_t"), Seq(Row("1"), Row("2"), Row("3"))) val m = intercept[AnalysisException] { sql("LOAD DATA LOCAL INPATH '/non-exist-folder/*part*' INTO TABLE load_t") }.getMessage assert(m.contains("LOAD DATA input path does not exist")) val m2 = intercept[AnalysisException] { sql(s"LOAD DATA LOCAL INPATH '$path*/*part*' INTO TABLE load_t") }.getMessage assert(m2.contains("LOAD DATA input path allows only filename wildcard")) } } } test("Insert overwrite with partition") { withTable("tableWithPartition") { sql( """ |CREATE TABLE tableWithPartition (key int, value STRING) |PARTITIONED BY (part STRING) """.stripMargin) sql( """ |INSERT OVERWRITE TABLE tableWithPartition PARTITION (part = '1') |SELECT * FROM default.src """.stripMargin) checkAnswer( sql("SELECT part, key, value FROM tableWithPartition"), sql("SELECT '1' AS part, key, value FROM default.src") ) sql( """ |INSERT OVERWRITE TABLE tableWithPartition PARTITION (part = '1') |SELECT * FROM VALUES (1, "one"), (2, "two"), (3, null) AS data(key, value) """.stripMargin) checkAnswer( sql("SELECT part, key, value FROM tableWithPartition"), sql( """ |SELECT '1' AS part, key, value FROM VALUES |(1, "one"), (2, "two"), (3, null) AS data(key, value) """.stripMargin) ) } } test("SPARK-19292: filter with partition columns should be case-insensitive on Hive tables") { withTable("tbl") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { sql("CREATE TABLE tbl(i int, j int) USING hive PARTITIONED BY (j)") sql("INSERT INTO tbl PARTITION(j=10) SELECT 1") checkAnswer(spark.table("tbl"), Row(1, 10)) checkAnswer(sql("SELECT i, j FROM tbl WHERE J=10"), Row(1, 10)) checkAnswer(spark.table("tbl").filter($"J" === 10), Row(1, 10)) } } } test("SPARK-17409: Do Not Optimize Query in CTAS (Hive Serde Table) More Than Once") { withTable("bar") { withTempView("foo") { sql("select 0 as id").createOrReplaceTempView("foo") // If we optimize the query in CTAS more than once, the following saveAsTable will fail // with the error: `GROUP BY position 0 is not in select list (valid range is [1, 1])` sql("SELECT * FROM foo group by id").toDF().write.format("hive").saveAsTable("bar") checkAnswer(spark.table("bar"), Row(0) :: Nil) val tableMetadata = spark.sessionState.catalog.getTableMetadata(TableIdentifier("bar")) assert(tableMetadata.provider == Some("hive"), "the expected table is a Hive serde table") } } } test("Auto alias construction of get_json_object") { val df = Seq(("1", """{"f1": "value1", "f5": 5.23}""")).toDF("key", "jstring") val expectedMsg = "Cannot create a table having a column whose name contains commas " + "in Hive metastore. Table: `default`.`t`; Column: get_json_object(jstring, $.f1)" withTable("t") { val e = intercept[AnalysisException] { df.select($"key", functions.get_json_object($"jstring", "$.f1")) .write.format("hive").saveAsTable("t") }.getMessage assert(e.contains(expectedMsg)) } withTempView("tempView") { withTable("t") { df.createTempView("tempView") val e = intercept[AnalysisException] { sql("CREATE TABLE t AS SELECT key, get_json_object(jstring, '$.f1') FROM tempView") }.getMessage assert(e.contains(expectedMsg)) } } } test("SPARK-19912 String literals should be escaped for Hive metastore partition pruning") { withTable("spark_19912") { Seq( (1, "p1", "q1"), (2, "'", "q2"), (3, "\\"", "q3"), (4, "p1\\" and q=\\"q1", "q4") ).toDF("a", "p", "q").write.partitionBy("p", "q").saveAsTable("spark_19912") val table = spark.table("spark_19912") checkAnswer(table.filter($"p" === "'").select($"a"), Row(2)) checkAnswer(table.filter($"p" === "\\"").select($"a"), Row(3)) checkAnswer(table.filter($"p" === "p1\\" and q=\\"q1").select($"a"), Row(4)) } } }
aokolnychyi/spark
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
Scala
apache-2.0
75,663
package spark import scala.math.random import java.net.InetAddress import scala.util.Random import scala.util.Random._ import scala.collection.mutable._ import scala.io.Source import scala.io.Source._ /* Todo: This needs to be made threadsafe to work for multiple tasks running on one machine */ object LocalRandom { val seed = InetAddress.getLocalHost().hashCode() val rand = new Random(seed) var tempTour = randomCycle(7663, rand) var data = ArrayBuffer.empty[Array[Double]] for(line <- Source.fromFile("/home/princeton_ram/weakshared/ym7663.tsp").getLines()) { var city = new Array[Double](2) var node = line.split(' ') city(0) = node(1).toDouble city(1) = node(2).toDouble data += city } def getData() : ArrayBuffer[Array[Double]] = { return data } def getRandom () : Random = { return rand } def randomCycle (size: Int, rand: Random): ArrayBuffer[Int] = { var randNodes = ArrayBuffer.empty[Int] for(i <- 0 until size) { randNodes += i } return rand.shuffle(randNodes) } }
jperla/spark-advancers
core/src/main/scala/spark/LocalStorage.scala
Scala
bsd-3-clause
1,153
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.optimizer import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate.First import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan} import org.apache.spark.sql.catalyst.rules.RuleExecutor class OptimizeWindowFunctionsSuite extends PlanTest { object Optimize extends RuleExecutor[LogicalPlan] { val batches = Batch("OptimizeWindowFunctions", FixedPoint(10), OptimizeWindowFunctions) :: Nil } val testRelation = LocalRelation('a.double, 'b.double, 'c.string) val a = testRelation.output(0) val b = testRelation.output(1) val c = testRelation.output(2) test("replace first(col) by nth_value(col, 1)") { val inputPlan = testRelation.select( WindowExpression( First(a, false).toAggregateExpression(), WindowSpecDefinition(b :: Nil, c.asc :: Nil, SpecifiedWindowFrame(RowFrame, UnboundedPreceding, CurrentRow)))) val correctAnswer = testRelation.select( WindowExpression( NthValue(a, Literal(1), false), WindowSpecDefinition(b :: Nil, c.asc :: Nil, SpecifiedWindowFrame(RowFrame, UnboundedPreceding, CurrentRow)))) val optimized = Optimize.execute(inputPlan) assert(optimized == correctAnswer) } test("can't replace first(col) by nth_value(col, 1) if the window frame type is range") { val inputPlan = testRelation.select( WindowExpression( First(a, false).toAggregateExpression(), WindowSpecDefinition(b :: Nil, c.asc :: Nil, SpecifiedWindowFrame(RangeFrame, UnboundedPreceding, CurrentRow)))) val optimized = Optimize.execute(inputPlan) assert(optimized == inputPlan) } test("can't replace first(col) by nth_value(col, 1) if the window frame isn't ordered") { val inputPlan = testRelation.select( WindowExpression( First(a, false).toAggregateExpression(), WindowSpecDefinition(b :: Nil, Nil, SpecifiedWindowFrame(RowFrame, UnboundedPreceding, CurrentRow)))) val optimized = Optimize.execute(inputPlan) assert(optimized == inputPlan) } }
shuangshuangwang/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeWindowFunctionsSuite.scala
Scala
apache-2.0
3,116
/* * GraphemeFrameImpl.scala * (Mellite) * * Copyright (c) 2012-2022 Hanns Holger Rutz. All rights reserved. * * This software is published under the GNU Affero General Public License v3+ * * * For further information, please contact Hanns Holger Rutz at * contact@sciss.de */ package de.sciss.mellite.impl.grapheme import de.sciss.desktop.{KeyStrokes, Menu, Window} import de.sciss.lucre.edit.UndoManager import de.sciss.lucre.expr.CellView import de.sciss.lucre.synth.Txn import de.sciss.mellite.impl.WorkspaceWindowImpl import de.sciss.mellite.{Application, GraphemeFrame, GraphemeView, UniverseHandler} import de.sciss.proc.Grapheme import scala.swing.event.Key object GraphemeFrameImpl { def apply[T <: Txn[T]](obj: Grapheme[T]) (implicit tx: T, handler: UniverseHandler[T]): GraphemeFrame[T] = handler(obj, GraphemeFrame)(newInstance(obj)) private def newInstance[T <: Txn[T]](obj: Grapheme[T]) (implicit tx: T, handler: UniverseHandler[T]): GraphemeFrame[T] = { implicit val undoMgr: UndoManager[T] = UndoManager() val tlv = GraphemeView[T](obj) val name = CellView.name(obj) val res = new Impl(tlv) res.init().setTitle(name.map(n => s"$n : Grapheme")) res } private final class Impl[T <: Txn[T]](val view: GraphemeView[T])(implicit val handler: UniverseHandler[T]) extends GraphemeFrame[T] with WorkspaceWindowImpl[T] { override def newWindow()(implicit tx: T): GraphemeFrame[T] = newInstance(view.obj) override protected def initGUI(): Unit = { super.initGUI() val mf = Application.windowHandler.menuFactory val me = Some(window) bindMenus( "edit.select-all" -> view.actionSelectAll, "edit.delete" -> view.actionDelete ) // --- grapheme menu --- import KeyStrokes._ import Menu.{Group, Item, proxy} val mGrapheme = Group("grapheme", "Grapheme") .add(Item("insert-span" , proxy(("Insert Span...", menu1 + shift + Key.E)))) // .add(Item("clear-span" , view.actionClearSpan )) // .add(Item("remove-span" , view.actionRemoveSpan)) // .add(Item("dup-span-to-pos" , "Duplicate Span to Cursor")) .addLine() // .add(Item("nudge-amount" , "Nudge Amount...")) // .add(Item("nudge-left" , proxy(("Nudge Objects Backward", plain + Key.Minus)))) // .add(Item("nudge-right" , proxy(("Nudge Objects Forward", plain + Key.Plus)))) // .addLine() .add(Item("select-following" , view.actionSelectFollowing)) // .add(Item("align-obj-start-to-pos", view.actionAlignObjectsToCursor)) // .add(Item("split-objects" , view.actionSplitObjects)) // .add(Item("clean-up-objects" , view.actionCleanUpObjects)) // .addLine() // .add(Item("drop-marker" , view.actionDropMarker)) // .add(Item("drop-named-marker" , view.actionDropNamedMarker)) window.reactions += { case Window.Activated(_) => view.canvas.canvasComponent.requestFocusInWindow() } mf.add(me, mGrapheme) } } }
Sciss/Mellite
app/src/main/scala/de/sciss/mellite/impl/grapheme/GraphemeFrameImpl.scala
Scala
agpl-3.0
3,242
package shapeless3.data import scala.compiletime._ trait Monoidal { type to[_] <: Tuple type length[m] = Monoidal.length[to[m]] } object Monoidal { import Tuple._ type length[m <: Tuple] = Size[m] } trait UnboundedMonoidal[T0[_, _], U0] extends Monoidal { type to[t] <: Tuple = t match { case T0[hd, tl] => hd *: to[tl] case U0 => EmptyTuple } } object pairs extends UnboundedMonoidal[Tuple2, Unit] object MonoidalTest { // Compiles fine here type p = (Int, (String, (Boolean, Unit))) summon[pairs.length[p] =:= 3] }
dotty-staging/dotty
tests/pos/i11250/1.scala
Scala
apache-2.0
549
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.connector import java.util import scala.collection.JavaConverters._ import org.apache.spark.sql.{AnalysisException, DataFrame, SQLContext} import org.apache.spark.sql.catalyst.analysis.{AnalysisSuite, NamedRelation} import org.apache.spark.sql.catalyst.expressions.{AttributeReference, EqualTo, Literal} import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.connector.catalog.{CatalogPlugin, Identifier, Table, TableCapability, TableProvider} import org.apache.spark.sql.connector.catalog.TableCapability._ import org.apache.spark.sql.execution.datasources.DataSource import org.apache.spark.sql.execution.datasources.v2.{DataSourceV2Relation, TableCapabilityCheck} import org.apache.spark.sql.execution.streaming.{Offset, Source, StreamingRelation, StreamingRelationV2} import org.apache.spark.sql.sources.StreamSourceProvider import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types.{LongType, StringType, StructType} import org.apache.spark.sql.util.CaseInsensitiveStringMap class TableCapabilityCheckSuite extends AnalysisSuite with SharedSparkSession { private val emptyMap = CaseInsensitiveStringMap.empty private def createStreamingRelation(table: Table, v1Relation: Option[StreamingRelation]) = { StreamingRelationV2( new FakeV2Provider, "fake", table, CaseInsensitiveStringMap.empty(), TableCapabilityCheckSuite.schema.toAttributes, v1Relation)(spark) } private def createStreamingRelationV1() = { StreamingRelation(DataSource(spark, classOf[TestStreamSourceProvider].getName)) } test("batch scan: check missing capabilities") { val e = intercept[AnalysisException] { TableCapabilityCheck.apply( DataSourceV2Relation.create(CapabilityTable(), None, None, emptyMap) ) } assert(e.message.contains("does not support batch scan")) } test("streaming scan: check missing capabilities") { val e = intercept[AnalysisException] { TableCapabilityCheck.apply(createStreamingRelation(CapabilityTable(), None)) } assert(e.message.contains("does not support either micro-batch or continuous scan")) } test("streaming scan: mix micro-batch sources and continuous sources") { val microBatchOnly = createStreamingRelation(CapabilityTable(MICRO_BATCH_READ), None) val continuousOnly = createStreamingRelation(CapabilityTable(CONTINUOUS_READ), None) val both = createStreamingRelation(CapabilityTable(MICRO_BATCH_READ, CONTINUOUS_READ), None) val v1Source = createStreamingRelationV1() TableCapabilityCheck.apply(Union(microBatchOnly, microBatchOnly)) TableCapabilityCheck.apply(Union(continuousOnly, continuousOnly)) TableCapabilityCheck.apply(Union(both, microBatchOnly)) TableCapabilityCheck.apply(Union(both, continuousOnly)) TableCapabilityCheck.apply(Union(both, v1Source)) val e = intercept[AnalysisException] { TableCapabilityCheck.apply(Union(microBatchOnly, continuousOnly)) } assert(e.getMessage.contains( "The streaming sources in a query do not have a common supported execution mode")) } test("AppendData: check missing capabilities") { val plan = AppendData.byName( DataSourceV2Relation.create(CapabilityTable(), None, None, emptyMap), TestRelation) val exc = intercept[AnalysisException]{ TableCapabilityCheck.apply(plan) } assert(exc.getMessage.contains("does not support append in batch mode")) } test("AppendData: check correct capabilities") { Seq(BATCH_WRITE, V1_BATCH_WRITE).foreach { write => val plan = AppendData.byName( DataSourceV2Relation.create(CapabilityTable(write), None, None, emptyMap), TestRelation) TableCapabilityCheck.apply(plan) } } test("Truncate: check missing capabilities") { Seq(CapabilityTable(), CapabilityTable(BATCH_WRITE), CapabilityTable(V1_BATCH_WRITE), CapabilityTable(TRUNCATE), CapabilityTable(OVERWRITE_BY_FILTER)).foreach { table => val plan = OverwriteByExpression.byName( DataSourceV2Relation.create(table, None, None, emptyMap), TestRelation, Literal(true)) val exc = intercept[AnalysisException]{ TableCapabilityCheck.apply(plan) } assert(exc.getMessage.contains("does not support truncate in batch mode")) } } test("Truncate: check correct capabilities") { Seq(CapabilityTable(BATCH_WRITE, TRUNCATE), CapabilityTable(V1_BATCH_WRITE, TRUNCATE), CapabilityTable(BATCH_WRITE, OVERWRITE_BY_FILTER), CapabilityTable(V1_BATCH_WRITE, OVERWRITE_BY_FILTER)).foreach { table => val plan = OverwriteByExpression.byName( DataSourceV2Relation.create(table, None, None, emptyMap), TestRelation, Literal(true)) TableCapabilityCheck.apply(plan) } } test("OverwriteByExpression: check missing capabilities") { Seq(CapabilityTable(), CapabilityTable(V1_BATCH_WRITE), CapabilityTable(BATCH_WRITE), CapabilityTable(OVERWRITE_BY_FILTER)).foreach { table => val plan = OverwriteByExpression.byName( DataSourceV2Relation.create(table, None, None, emptyMap), TestRelation, EqualTo(AttributeReference("x", LongType)(), Literal(5))) val exc = intercept[AnalysisException]{ TableCapabilityCheck.apply(plan) } assert(exc.getMessage.contains("does not support overwrite by filter in batch mode")) } } test("OverwriteByExpression: check correct capabilities") { Seq(BATCH_WRITE, V1_BATCH_WRITE).foreach { write => val table = CapabilityTable(write, OVERWRITE_BY_FILTER) val plan = OverwriteByExpression.byName( DataSourceV2Relation.create(table, None, None, emptyMap), TestRelation, EqualTo(AttributeReference("x", LongType)(), Literal(5))) TableCapabilityCheck.apply(plan) } } test("OverwritePartitionsDynamic: check missing capabilities") { Seq(CapabilityTable(), CapabilityTable(BATCH_WRITE), CapabilityTable(OVERWRITE_DYNAMIC)).foreach { table => val plan = OverwritePartitionsDynamic.byName( DataSourceV2Relation.create(table, None, None, emptyMap), TestRelation) val exc = intercept[AnalysisException] { TableCapabilityCheck.apply(plan) } assert(exc.getMessage.contains("does not support dynamic overwrite in batch mode")) } } test("OverwritePartitionsDynamic: check correct capabilities") { val table = CapabilityTable(BATCH_WRITE, OVERWRITE_DYNAMIC) val plan = OverwritePartitionsDynamic.byName( DataSourceV2Relation.create(table, None, None, emptyMap), TestRelation) TableCapabilityCheck.apply(plan) } } private object TableCapabilityCheckSuite { val schema: StructType = new StructType().add("id", LongType).add("data", StringType) } private case object TestRelation extends LeafNode with NamedRelation { override def name: String = "source_relation" override def output: Seq[AttributeReference] = TableCapabilityCheckSuite.schema.toAttributes } private case class CapabilityTable(_capabilities: TableCapability*) extends Table { override def name(): String = "capability_test_table" override def schema(): StructType = TableCapabilityCheckSuite.schema override def capabilities(): util.Set[TableCapability] = _capabilities.toSet.asJava } private class TestStreamSourceProvider extends StreamSourceProvider { override def sourceSchema( sqlContext: SQLContext, schema: Option[StructType], providerName: String, parameters: Map[String, String]): (String, StructType) = { "test" -> TableCapabilityCheckSuite.schema } override def createSource( sqlContext: SQLContext, metadataPath: String, schema: Option[StructType], providerName: String, parameters: Map[String, String]): Source = { new Source { override def schema: StructType = TableCapabilityCheckSuite.schema override def getOffset: Option[Offset] = { throw new UnsupportedOperationException } override def getBatch(start: Option[Offset], end: Offset): DataFrame = { throw new UnsupportedOperationException } override def stop(): Unit = {} } } }
goldmedal/spark
sql/core/src/test/scala/org/apache/spark/sql/connector/TableCapabilityCheckSuite.scala
Scala
apache-2.0
9,147
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package vta.core import vta.util.config._ /** CoreConfig. * * This is one supported configuration for VTA. This file will * be eventually filled out with class configurations that can be * mixed/matched with Shell configurations for different backends. */ class CoreConfig extends Config((site, here, up) => { case CoreKey => CoreParams( batch = 1, blockOut = 16, blockIn = 16, inpBits = 8, wgtBits = 8, uopBits = 32, accBits = 32, outBits = 8, uopMemDepth = 2048, inpMemDepth = 2048, wgtMemDepth = 1024, accMemDepth = 2048, outMemDepth = 2048, instQueueEntries = 512 ) })
Huyuwei/tvm
vta/hardware/chisel/src/main/scala/core/Configs.scala
Scala
apache-2.0
1,564
package org.scalaide.refactoring.internal import org.scalaide.core.internal.jdt.model.ScalaSourceFile import scala.tools.refactoring.analysis.GlobalIndexes import scala.tools.refactoring.implementations import org.scalaide.core.internal.statistics.Features.NotSpecified /** * The Inline Local -- also known as Inline Temp -- refactoring is the dual to Extract Local. * It can be used to eliminate a local values by replacing all references to the local value * by its right hand side. * * The implementation does not show a wizard but directly applies the changes (ActionWithNoWizard trait). */ class InlineLocal extends RefactoringExecutor with RefactoringExecutorWithoutWizard { def createRefactoring(selectionStart: Int, selectionEnd: Int, file: ScalaSourceFile) = new InlineLocalScalaIdeRefactoring(selectionStart, selectionEnd, file) class InlineLocalScalaIdeRefactoring(start: Int, end: Int, file: ScalaSourceFile) // feature marked as [[NotSpecified]] because it is already categorized as quick assist extends ScalaIdeRefactoring(NotSpecified, "Inline Local", file, start, end) { val refactoring = file.withSourceFile((sourceFile, compiler) => new implementations.InlineLocal with GlobalIndexes { val global = compiler val index = { val tree = askLoadedAndTypedTreeForFile(sourceFile).left.get global.ask(() => GlobalIndex(tree)) } }) getOrElse fail() /** * The refactoring does not take any parameters. */ def refactoringParameters = new refactoring.RefactoringParameters } }
stephenh/scala-ide
org.scala-ide.sdt.core/src/org/scalaide/refactoring/internal/InlineLocal.scala
Scala
bsd-3-clause
1,578
package jinesra.vkMessageHistory import com.typesafe.scalalogging._ import com.typesafe.scalalogging.slf4j._ import scala.xml._ import scala.util.{ Success, Failure } import java.io.InputStreamReader import net.liftweb.json._ import scalaj.http._ import scalax.file.Path import scalaj.http.Http.Request class VkRestClient(accessToken: String) extends LazyLogging { val apiHost = "https://api.vk.com/" val chatHistoryMethod = "method/messages.getHistory" val getUserMethod = "method/users.get" def getMessageHistory(offset: Int, chat_id: Int, count: Int):JValue= { val req = Http(apiHost + chatHistoryMethod). option(HttpOptions.connTimeout(10000)). option(HttpOptions.readTimeout(50000)). params( ("offset", offset.toString), ("count", count.toString), ("chat_id", chat_id.toString), ("rev", "1"), ("access_token", accessToken)) requestToJson(req) } def getUser(userId: Int):JValue = { val req = Http(apiHost + getUserMethod). option(HttpOptions.connTimeout(10000)). option(HttpOptions.readTimeout(50000)). params( ("user_ids", userId.toString)) requestToJson(req) } def requestToJson(req: Request) = req { is => JsonParser.parse(new InputStreamReader(is)) } def getFromIds(json: JObject) = (json \\\\ "from_id").children.map(x => x.extract[String]) def writeToFile(offset: Int, content: String) = { val p = Path.fromString("/home/arsenij/chatik/chatik_" + offset.toString).createFile(failIfExists = false) p.write(content) } }
arsenij-solovjev/vkMessageHistory
src/main/scala/jinesra/vkMessageHistory/VkRestClient.scala
Scala
apache-2.0
1,631
package cs.ucla.edu.bwaspark.datatype import scala.collection.mutable.MutableList class MemSeedType(rbeg_i: Long, qbeg_i: Int, len_i: Int) { var rBeg: Long = rbeg_i var qBeg: Int = qbeg_i var len: Int = len_i } class MemChainType(pos_i: Long, seeds_i: MutableList[MemSeedType]) { var pos: Long = pos_i var seeds: MutableList[MemSeedType] = seeds_i var seedsRefArray: Array[MemSeedType] = _ def print() { println("The reference position of the chain: " + pos) seeds.map (ele => println("Ref Begin: " + ele.rBeg + ", Query Begin: " + ele.qBeg + ", Length: " + ele.len)) } }
peterpengwei/bwa-spark-fpga
src/main/scala/cs/ucla/edu/bwaspark/datatype/MemChainType.scala
Scala
gpl-2.0
601
package uscala.result object MapViewShim { implicit class MapViewImplicitShim[K, V](map: Map[K, V]) { def mapValuesShim[W](f: (V) => W): Map[K, W] = map.view.mapValues(f).toMap } }
albertpastrana/uscala
result/src/test/scala-2.13+/uscala/result/MapViewShim.scala
Scala
mit
190
/* * _ _ * _ __ ___ | |__ | | ___ * | '_ \\ / _ \\| '_ \\| |/ _ \\ noble :: norcane blog engine * | | | | (_) | |_) | | __/ Copyright (c) 2016-2018 norcane * |_| |_|\\___/|_.__/|_|\\___| * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.norcane.noble.atom import java.time.ZonedDateTime import java.time.format.DateTimeFormatter /** * Helper functions for formatting the Atom feed. * * @author Vaclav Svejcar (v.svejcar@norcane.cz) */ object AtomUtils { /** * Formats the given datetime into the ISO date string. * * @param date input date * @return output ISO date string */ def isoDate(date: ZonedDateTime): String = date.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME) }
norcane/noble
app/com/norcane/noble/atom/AtomUtils.scala
Scala
apache-2.0
1,259
def makeFunction[T](param: => T) = param _
grzegorzbalcerek/scala-book-examples
examples/MakeFunction.scala
Scala
mit
43
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.history import java.io.File import org.mockito.AdditionalAnswers import org.mockito.ArgumentMatchers.{anyBoolean, anyLong, eq => meq} import org.mockito.Mockito.{doAnswer, spy} import org.scalatest.BeforeAndAfter import org.apache.spark.{SparkConf, SparkFunSuite} import org.apache.spark.internal.config.History._ import org.apache.spark.status.KVUtils import org.apache.spark.util.{ManualClock, Utils} import org.apache.spark.util.kvstore.KVStore class HistoryServerDiskManagerSuite extends SparkFunSuite with BeforeAndAfter { private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*) private val MAX_USAGE = 3L private var testDir: File = _ private var store: KVStore = _ before { testDir = Utils.createTempDir() store = KVUtils.open(new File(testDir, "listing"), "test") } after { store.close() if (testDir != null) { Utils.deleteRecursively(testDir) } } private def mockManager(): HistoryServerDiskManager = { val conf = new SparkConf().set(MAX_LOCAL_DISK_USAGE, MAX_USAGE) val manager = spy(new HistoryServerDiskManager(conf, testDir, store, new ManualClock())) doAnswer(AdditionalAnswers.returnsFirstArg[Long]()).when(manager) .approximateSize(anyLong(), anyBoolean()) manager } test("leasing space") { val manager = mockManager() // Lease all available space. val leaseA = manager.lease(1) val leaseB = manager.lease(1) val leaseC = manager.lease(1) assert(manager.free() === 0) // Revert one lease, get another one. leaseA.rollback() assert(manager.free() > 0) assert(!leaseA.tmpPath.exists()) val leaseD = manager.lease(1) assert(manager.free() === 0) // Committing B should bring the "used" space up to 4, so there shouldn't be space left yet. doReturn(2L).when(manager).sizeOf(meq(leaseB.tmpPath)) val dstB = leaseB.commit("app2", None) assert(manager.free() === 0) assert(manager.committed() === 2) // Rollback C and D, now there should be 1 left. leaseC.rollback() leaseD.rollback() assert(manager.free() === 1) // Release app 2 to make it available for eviction. doReturn(2L).when(manager).sizeOf(meq(dstB)) manager.release("app2", None) assert(manager.committed() === 2) // Emulate an updated event log by replacing the store for lease B. Lease 1, and commit with // size 3. val leaseE = manager.lease(1) doReturn(3L).when(manager).sizeOf(meq(leaseE.tmpPath)) val dstE = leaseE.commit("app2", None) assert(dstE === dstB) assert(dstE.exists()) doReturn(3L).when(manager).sizeOf(meq(dstE)) assert(!leaseE.tmpPath.exists()) assert(manager.free() === 0) manager.release("app2", None) assert(manager.committed() === 3) // Try a big lease that should cause the released app to be evicted. val leaseF = manager.lease(6) assert(!dstB.exists()) assert(manager.free() === 0) assert(manager.committed() === 0) // Leasing when no free space is available should still be allowed. manager.lease(1) assert(manager.free() === 0) } test("tracking active stores") { val manager = mockManager() // Lease and commit space for app 1, making it active. val leaseA = manager.lease(2) assert(manager.free() === 1) doReturn(2L).when(manager).sizeOf(leaseA.tmpPath) assert(manager.openStore("appA", None).isEmpty) val dstA = leaseA.commit("appA", None) // Create a new lease. Leases are always granted, but this shouldn't cause app1's store // to be deleted. val leaseB = manager.lease(2) assert(dstA.exists()) // Trying to commit on top of an active application should fail. intercept[IllegalArgumentException] { leaseB.commit("appA", None) } leaseB.rollback() // Close appA with an updated size, then create a new lease. Now the app's directory should be // deleted. doReturn(3L).when(manager).sizeOf(dstA) manager.release("appA", None) assert(manager.free() === 0) val leaseC = manager.lease(1) assert(!dstA.exists()) leaseC.rollback() assert(manager.openStore("appA", None).isEmpty) } test("approximate size heuristic") { val manager = new HistoryServerDiskManager(new SparkConf(false), testDir, store, new ManualClock()) assert(manager.approximateSize(50L, false) < 50L) assert(manager.approximateSize(50L, true) > 50L) } }
goldmedal/spark
core/src/test/scala/org/apache/spark/deploy/history/HistoryServerDiskManagerSuite.scala
Scala
apache-2.0
5,290
package com.gravity.gdk.placement import com.fasterxml.jackson.core.JsonParseException /* ___...---'' ___...---'\\'___ '' _.-'' _`'.______\\\\. /_.) )..- __..--'\\\\ ( __..--'' '-''\\@ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ */ /** An exception thrown during [[com.gravity.gdk.placement.Placement.getRecos]]. */ abstract class PlacementGetRecosException extends Exception /** Thrown when a non-2xx HTTP status occurs during [[com.gravity.gdk.placement.Placement.getRecos]]. */ case class PlacementGetRecosBadHttpStatusException(statusCode: Int) extends PlacementGetRecosException { override def getMessage: String = s"Received non-2xx HTTP status $statusCode" } /** Thrown when [[com.gravity.gdk.placement.Placement.getRecos]] results in a JSON parse error. */ case class PlacementGetRecosResponseJsonParseException(ex: JsonParseException) extends PlacementGetRecosException { override def getMessage: String = ex.getMessage } /** * Thrown when response received during [[com.gravity.gdk.placement.Placement.getRecos]] is parsed but does not match * expected format. */ case class PlacementGetRecosResponseFormatException(errors: Seq[Error]) extends PlacementGetRecosException { override def getMessage: String = "Response format errors: " + errors.mkString("\\n") }
GravityLabs/gdk-scala
src/main/scala/com/gravity/gdk/placement/PlacementGetRecosException.scala
Scala
apache-2.0
1,335
package shopScala.async_reactive import com.mongodb.casbah.Imports._ import com.mongodb.casbah.MongoClient import shopScala.util.Constants._ import shopScala.util.User object SyncCasbah extends App { val client: MongoClient = MongoClient(new MongoClientURI(MONGODB_URI)) val db: MongoDB = client(SHOP_DB_NAME) val usersCollection: MongoCollection = db(USERS_COLLECTION_NAME) def blockingIO_GetDataFromDB: Seq[String] = { usersCollection .find() .toSeq .map(User(_)) .map(_.name) } val simpsons: Seq[String] = blockingIO_GetDataFromDB simpsons.foreach(println) }
hermannhueck/reactive-mongo-access
src/main/scala/shopScala/async_reactive/SyncCasbah.scala
Scala
apache-2.0
609
/******************************************************************************* * Copyright (c) 2019. Carl Minden * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package com.anathema_roguelike package entities.characters.foes.roles class Tracker extends Role {}
carlminden/anathema-roguelike
src/com/anathema_roguelike/entities/characters/foes/roles/Tracker.scala
Scala
gpl-3.0
947
// Copyright 2015,2016,2017,2018,2019,2020 Commonwealth Bank of Australia // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package commbank.grimlock.test import commbank.grimlock.framework._ import commbank.grimlock.framework.content._ import commbank.grimlock.framework.encoding._ import commbank.grimlock.framework.environment.implicits._ import commbank.grimlock.framework.metadata._ import commbank.grimlock.framework.position._ import commbank.grimlock.framework.sample._ import com.twitter.scalding.typed.ValuePipe import shapeless.HList trait TestMatrixExtract extends TestMatrix { val ext = "foo" val result1 = List(Cell(Position("foo"), Content(OrdinalSchema[String](), "3.14"))) val result2 = List( Cell(Position("bar", 2), Content(ContinuousSchema[Double](), 12.56)), Cell(Position("baz", 2), Content(DiscreteSchema[Long](), 19L)), Cell(Position("foo", 1), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2), Content(ContinuousSchema[Double](), 6.28)), Cell(Position("foo", 3), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ) ) val result3 = List( Cell(Position("bar", 2, "xyz"), Content(ContinuousSchema[Double](), 12.56)), Cell(Position("baz", 2, "xyz"), Content(DiscreteSchema[Long](), 19L)), Cell(Position("foo", 1, "xyz"), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2, "xyz"), Content(ContinuousSchema[Double](), 6.28)), Cell(Position("foo", 3, "xyz"), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4, "xyz"), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ) ) val result4 = List(Cell(Position("foo"), Content(OrdinalSchema[String](), "3.14"))) val result5 = List( Cell(Position("foo", 1), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2), Content(ContinuousSchema[Double](), 6.28)), Cell(Position("foo", 3), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ) ) val result6 = List( Cell(Position("foo", 1, "xyz"), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2, "xyz"), Content(ContinuousSchema[Double](), 6.28)), Cell(Position("foo", 3, "xyz"), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4, "xyz"), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ) ) } object TestMatrixExtract { case class TestSampler[P <: HList]() extends Sampler[P] { def select(cell: Cell[P]): Boolean = cell.position.asList.contains(StringValue("foo")) || cell.position.asList.contains(IntValue(2)) } case class TestSamplerWithValue[P <: HList]() extends SamplerWithValue[P] { type V = String def selectWithValue(cell: Cell[P], ext: V): Boolean = cell.position.asList.contains(StringValue(ext)) } } class TestScalaMatrixExtract extends TestMatrixExtract with TestScala { import commbank.grimlock.scala.environment.implicits._ "A Matrix.subset" should "return its sampled data in 1D" in { toU(data1) .extract(TestMatrixExtract.TestSampler()) .toList.sortBy(_.position) shouldBe result1 } it should "return its sampled data in 2D" in { toU(data2) .extract(TestMatrixExtract.TestSampler()) .toList.sortBy(_.position) shouldBe result2 } it should "return its sampled data in 3D" in { toU(data3) .extract(TestMatrixExtract.TestSampler()) .toList.sortBy(_.position) shouldBe result3 } "A Matrix.subsetWithValue" should "return its sampled data in 1D" in { toU(data1) .extractWithValue(ext, TestMatrixExtract.TestSamplerWithValue()) .toList.sortBy(_.position) shouldBe result4 } it should "return its sampled data in 2D" in { toU(data2) .extractWithValue(ext, TestMatrixExtract.TestSamplerWithValue()) .toList.sortBy(_.position) shouldBe result5 } it should "return its sampled data in 3D" in { toU(data3) .extractWithValue(ext, TestMatrixExtract.TestSamplerWithValue()) .toList.sortBy(_.position) shouldBe result6 } } class TestScaldingMatrixExtract extends TestMatrixExtract with TestScalding { import commbank.grimlock.scalding.environment.implicits._ "A Matrix.subset" should "return its sampled data in 1D" in { toU(data1) .extract(TestMatrixExtract.TestSampler()) .toList.sortBy(_.position) shouldBe result1 } it should "return its sampled data in 2D" in { toU(data2) .extract(TestMatrixExtract.TestSampler()) .toList.sortBy(_.position) shouldBe result2 } it should "return its sampled data in 3D" in { toU(data3) .extract(TestMatrixExtract.TestSampler()) .toList.sortBy(_.position) shouldBe result3 } "A Matrix.subsetWithValue" should "return its sampled data in 1D" in { toU(data1) .extractWithValue(ValuePipe(ext), TestMatrixExtract.TestSamplerWithValue()) .toList.sortBy(_.position) shouldBe result4 } it should "return its sampled data in 2D" in { toU(data2) .extractWithValue(ValuePipe(ext), TestMatrixExtract.TestSamplerWithValue()) .toList.sortBy(_.position) shouldBe result5 } it should "return its sampled data in 3D" in { toU(data3) .extractWithValue(ValuePipe(ext), TestMatrixExtract.TestSamplerWithValue()) .toList.sortBy(_.position) shouldBe result6 } } class TestSparkMatrixExtract extends TestMatrixExtract with TestSpark { import commbank.grimlock.spark.environment.implicits._ "A Matrix.subset" should "return its sampled data in 1D" in { toU(data1) .extract(TestMatrixExtract.TestSampler()) .toList.sortBy(_.position) shouldBe result1 } it should "return its sampled data in 2D" in { toU(data2) .extract(TestMatrixExtract.TestSampler()) .toList.sortBy(_.position) shouldBe result2 } it should "return its sampled data in 3D" in { toU(data3) .extract(TestMatrixExtract.TestSampler()) .toList.sortBy(_.position) shouldBe result3 } "A Matrix.subsetWithValue" should "return its sampled data in 1D" in { toU(data1) .extractWithValue(ext, TestMatrixExtract.TestSamplerWithValue()) .toList.sortBy(_.position) shouldBe result4 } it should "return its sampled data in 2D" in { toU(data2) .extractWithValue(ext, TestMatrixExtract.TestSamplerWithValue()) .toList.sortBy(_.position) shouldBe result5 } it should "return its sampled data in 3D" in { toU(data3) .extractWithValue(ext, TestMatrixExtract.TestSamplerWithValue()) .toList.sortBy(_.position) shouldBe result6 } }
CommBank/grimlock
grimlock-core/src/test/scala/commbank/grimlock/matrix/TestMatrixExtract.scala
Scala
apache-2.0
7,666
/* Copyright 2011 the original author or authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package net.gumbix.bioinf.hmm import net.gumbix.dynpro.CellPosition._ import net.gumbix.dynpro.{Backpropagation, DynPro, Idx, DynProMatrixPrinter} /** * The Viterbi algorithm to determine patterns in a string. * @param s The string to analyse. * @param alphabet Alphabet of the emissions. * @param states States (excl. q0). * @param transP Transition-Probabilities, e.g. probability * going from state p to q. * @param emmP Emission-Probabilities, e.g. probability to emit * the character c when being in state q. * @author Markus Gumbel (m.gumbel@hs-mannheim.de) */ class Viterbi(val s: Array[Char], val alphabet: Array[Char], val states: Array[Char], val transP: Array[Array[Double]], val emmP: Array[Array[Double]]) extends DynPro[Int] with Backpropagation[Int] with DynProMatrixPrinter[Int] { /** * Values can become very small, so a scientific notation is required. */ formatter = ENGINEER override val backpropagationStart = MAXIMUM_VALUE_LAST_ROW /** * Length of the string to analyse. */ def n = s.length /** * Number of states. */ def m = states.length /** * Decisions are the states (incl. q0) */ def decisions(idx: Idx) = { if (idx.i == 0) (0 to 0).toArray else (1 to states.length).toArray } def prevStates(idx: Idx, d: Int) = if (idx.i > 0) Array(Idx(idx.i - 1, d - 1)) else Array() def value(idx: Idx, dState: Int) = (idx.i, idx.j) match { case (iChar, jState) => { // The index of the current char (for this row): //val idxS = alphabet.indexOf(s(iChar - 1)) val idxS = alphabet.indexOf(s(iChar)) val e = emmP(jState + 1 - 1)(idxS) // state, char val t = transP(dState)(jState + 1) java.lang.Math.log10(e * t) } } override def rowLabels = Some(s.map(c => c.toString)) override def columnLabels = Some(states.map(c => c.toString)) }
markusgumbel/scalabioalg
core/src/main/scala/net/gumbix/bioinf/hmm/Viterbi.scala
Scala
apache-2.0
2,547
package com.aristocrat.mandrill.requests.Whitelists import com.aristocrat.mandrill.requests.MandrillRequest case class Add(key: String, email: String, comment: String) extends MandrillRequest
aristocratic/mandrill
src/main/scala/com/aristocrat/mandrill/requests/Whitelists/Add.scala
Scala
mit
194
/* * Copyright 2006-2008 Workingmouse * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.furnace.parse import Bytes._ import com.googlecode.instinct.expect.Expect._ import com.googlecode.instinct.marker.annotate.Specification import java.io.ByteArrayInputStream import parse.FastaParser._ import scalaz.OptionW._ import scalaz.list.NonEmptyList, NonEmptyList._ import sequence.GeneSequence._ final class AFastaParserWithNoSequenceToParse { private val noSequence = byteIterator("") @Specification def returnsNone { val sequences = parse(noSequence, 10) expect that sequences.isEmpty isEqualTo true } } final class AFastaParserWithASequenceWithNoHeader { private val noHeader = byteIterator(""" ATGACAAAGCTAATTATTCACTTAGTTTCAGACTCTTCCGTGCAAACTGCAAAATATACAGCAAATTCTG""") @Specification { val expectedException = classOf[RuntimeException], val withMessage = "Input sequence contains no header: ATGACAAAGCTAATTATTCACTTAGTTTCAGACTCTTCCGTGCAAACTGCAAAATATACAGCAAATTCTG"} def throwsAnError { parse(noHeader, 10) } } final class AFastaParserWithASequenceContainingASingleLineOfBases { private val sequence = byteIterator(""" >gi|15891923|ref|NC_003103.1| Rickettsia conorii str. Malish 7, complete genome ATGACAAAGCTAATTATTCACTTAGTTTCAGACTCTTCCGTGCAAACTGCAAAATATACAGCAAATTCTG""") @Specification def turnsAnIteratorOfBytesIntoAnIteratorOfGeneSequences { val result = parse(sequence, 10) expect.that(result.isEmpty).isEqualTo(false) val sequences = result.get.toList // Note. Scala bug https://lampsvn.epfl.ch/trac/scala/ticket/1246, this won't work. expect.that(sequences(0)).isEqualTo(geneSequence(baseSeq("ATGACAAAGC"))) // expect.that(sequences(1)).isEqualTo(geneSequence(baseSeq("TAATTATTCA"))) // expect.that(sequences(2)).isEqualTo(geneSequence(baseSeq("CTTAGTTTCA"))) // expect.that(sequences(3)).isEqualTo(geneSequence(baseSeq("GACTCTTCCG"))) // expect.that(sequences(4)).isEqualTo(geneSequence(baseSeq("TGCAAACTGC"))) // expect.that(sequences(5)).isEqualTo(geneSequence(baseSeq("AAAATATACA"))) // expect.that(sequences(6)).isEqualTo(geneSequence(baseSeq("GCAAATTCTG"))) } } final class AFastaParserWithALotOfData { import util.io.FilePath import util.io.FilePath._ import java.io.{File, FileInputStream} import scalaz.javas.InputStream._ import spec.SpecificationHelper._ @Specification def isFastAndDoesNotBlowMemory { val file = dataFile("sequences/NC_003103_r.conorii.fasta") val in = new FileInputStream(file) try { parse(in, 40).fold(error("No sequences found"), (s => s.foreach(_))) } finally { in.close } } } object Bytes { import sequence.Base, Base._ def byteIterator(bases: String): Iterator[Byte] = bases.map(_.toByte).elements def baseSeq(bases: String): NonEmptyList[Base] = list(bases.map(_.toByte: Base).toList) }
tomjadams/furnace
src/spec/scala/com/googlecode/furnace/parse/AFastaParser.scala
Scala
apache-2.0
3,432
package ohnosequences.awstools.ec2 /* ## Instance Types & AMI compatibility implicits Here we provide implicits for (hopefully) all valid combinations of - instance type & storage type - instance type & virtualization type - instance type & AMI (as a combination of storage and virtualization types) */ /* An instance type supports an AMI if it supports both its storage type and virtualization */ @annotation.implicitNotFound( msg = """ Instance type ${T} doesn't support the AMI type ${A} Try to choose different virtualization or storage type. """) sealed trait SupportsAMI[T <: AnyInstanceType, A <: AnyLinuxAMI] { implicit val supportsStorage: T SupportsStorageType A#Storage implicit val supportsVirtualization: T SupportsVirtualization A#Virt } case object SupportsAMI { implicit def supports[ T <: AnyInstanceType, A <: AnyLinuxAMI ](implicit ss: T SupportsStorageType A#Storage, sv: T SupportsVirtualization A#Virt ): (T SupportsAMI A) = new (T SupportsAMI A) { val supportsStorage = ss val supportsVirtualization = sv } } // TODO: check it with this table: http://aws.amazon.com/amazon-linux-ami/instance-type-matrix/ // The list can be retrieved from http://www.ec2instances.info/?min_storage=1 @annotation.implicitNotFound( msg = """ Instance type ${T} doesn't support storage type of the chosen AMI ${S} """) sealed trait SupportsStorageType[T <: AnyInstanceType, S <: AnyStorageType] case object SupportsStorageType { implicit def ebs[T <: AnyInstanceType]: (T SupportsStorageType EBS.type) = new (T SupportsStorageType EBS.type) {} implicit def is_m3[T <: AnyInstanceType.ofFamily[m3.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_r3[T <: AnyInstanceType.ofFamily[r3.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_m1[T <: AnyInstanceType.ofFamily[m1.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_m2[T <: AnyInstanceType.ofFamily[m2.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_i2[T <: AnyInstanceType.ofFamily[i2.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_hs1[T <: AnyInstanceType.ofFamily[hs1.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_hi1[T <: AnyInstanceType.ofFamily[hi1.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_d2[T <: AnyInstanceType.ofFamily[d2.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_cr1[T <: AnyInstanceType.ofFamily[cr1.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_cg1[T <: AnyInstanceType.ofFamily[cg1.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_cc2[T <: AnyInstanceType.ofFamily[cc2.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_c3[T <: AnyInstanceType.ofFamily[c3.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} implicit def is_c1[T <: AnyInstanceType.ofFamily[c1.type]]: (T SupportsStorageType InstanceStore.type) = new (T SupportsStorageType InstanceStore.type) {} // TODO: what's g2 instances? } @annotation.implicitNotFound( msg = """ Instance type ${T} doesn't support virtualization of the chosen AMI ${V} """) sealed trait SupportsVirtualization[T <: AnyInstanceType, V <: AnyVirtualization] case object SupportsVirtualization { /* All current generation instance types support HVM AMIs. The CC2, CR1, HI1, and HS1 previous generation instance types support HVM AMIs. */ implicit def hvm_CurrentGeneration[T <: AnyInstanceType.ofGeneration[CurrentGeneration]]: (T SupportsVirtualization HVM.type) = new (T SupportsVirtualization HVM.type) {} implicit def hvm_cc2[T <: AnyInstanceType.ofFamily[cc2.type]]: (T SupportsVirtualization HVM.type) = new (T SupportsVirtualization HVM.type) {} implicit def hvm_cr1[T <: AnyInstanceType.ofFamily[cr1.type]]: (T SupportsVirtualization HVM.type) = new (T SupportsVirtualization HVM.type) {} implicit def hvm_hi1[T <: AnyInstanceType.ofFamily[hi1.type]]: (T SupportsVirtualization HVM.type) = new (T SupportsVirtualization HVM.type) {} implicit def hvm_hs1[T <: AnyInstanceType.ofFamily[hs1.type]]: (T SupportsVirtualization HVM.type) = new (T SupportsVirtualization HVM.type) {} /* The C3 and M3 current generation instance types support PV AMIs. The C1, HI1, HS1, M1, M2, and T1 previous generation instance types support PV AMIs. */ implicit def pv_c3[T <: AnyInstanceType.ofFamily[c3.type]]: (T SupportsVirtualization PV.type) = new (T SupportsVirtualization PV.type) {} implicit def pv_m3[T <: AnyInstanceType.ofFamily[m3.type]]: (T SupportsVirtualization PV.type) = new (T SupportsVirtualization PV.type) {} implicit def pv_c1[T <: AnyInstanceType.ofFamily[c1.type]]: (T SupportsVirtualization PV.type) = new (T SupportsVirtualization PV.type) {} implicit def pv_hi1[T <: AnyInstanceType.ofFamily[hi1.type]]: (T SupportsVirtualization PV.type) = new (T SupportsVirtualization PV.type) {} implicit def pv_hs1[T <: AnyInstanceType.ofFamily[hs1.type]]: (T SupportsVirtualization PV.type) = new (T SupportsVirtualization PV.type) {} implicit def pv_m1[T <: AnyInstanceType.ofFamily[m1.type]]: (T SupportsVirtualization PV.type) = new (T SupportsVirtualization PV.type) {} implicit def pv_m2[T <: AnyInstanceType.ofFamily[m2.type]]: (T SupportsVirtualization PV.type) = new (T SupportsVirtualization PV.type) {} implicit def pv_t1[T <: AnyInstanceType.ofFamily[t1.type]]: (T SupportsVirtualization PV.type) = new (T SupportsVirtualization PV.type) {} }
ohnosequences/aws-scala-tools
src/main/scala/ohnosequences/awstools/ec2/InstanceType-AMI.scala
Scala
agpl-3.0
6,399
/* * Copyright (C) 2013 Alcatel-Lucent. * * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * Licensed to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package molecule.examples.core import molecule._ import molecule.stream._ import platform.Platform, channel.{ Console, Timer } import java.util.{ Date, Calendar, GregorianCalendar } import java.util.concurrent.TimeUnit object Clock extends App { val initTime: Calendar = new GregorianCalendar() initTime.setTime(new Date(System.currentTimeMillis)) def increment(cal: Calendar, tickCount: Int): Calendar = { val newCal = cal.clone.asInstanceOf[Calendar] newCal.add(Calendar.SECOND, 1) newCal } def show(cal: Calendar): String = cal.getTime.toString // Scan prepends initTime, which is read immediately. // Therefore we must introduce an initial delay before the next tick def timeFeed: IChan[String] = Timer.afterAndEvery(1, TimeUnit.SECONDS).scan(initTime)(increment).map(show).take(10) /** * The 'connect' method below takes a platform as implicit argument because it must * create a lightweight Connector process to send all data from an input to an output. */ val p = Platform("clock") val stream = timeFeed connect Console.logOut[String]("Time:") p.launch(stream).get_! }
molecule-labs/molecule
molecule-core-examples/src/main/scala/molecule/examples/core/Clock.scala
Scala
apache-2.0
1,856
package chat.tox.antox.activities import java.util.Locale import android.app.{AlertDialog, NotificationManager} import android.content.res.Configuration import android.content.{Context, DialogInterface, Intent, SharedPreferences} import android.media.AudioManager import android.net.ConnectivityManager import android.os.{Build, Bundle} import android.preference.PreferenceManager import android.support.v7.app.AppCompatActivity import android.support.v7.widget.Toolbar import android.view.{MenuItem, View, WindowManager} import chat.tox.antox.R import chat.tox.antox.data.State import chat.tox.antox.fragments.MainDrawerFragment import chat.tox.antox.theme.ThemeManager import chat.tox.antox.tox.ToxSingleton import chat.tox.antox.utils._ class MainActivity extends AppCompatActivity { var request: View = _ var preferences: SharedPreferences = _ protected override def onCreate(savedInstanceState: Bundle) { super.onCreate(savedInstanceState) preferences = PreferenceManager.getDefaultSharedPreferences(this) ThemeManager.init(getApplicationContext) // Set the right language selectLanguage() setContentView(R.layout.activity_main) // Use a toolbar so that the drawer goes above the action bar val toolbar = findViewById(R.id.toolbar).asInstanceOf[Toolbar] setSupportActionBar(toolbar) getSupportActionBar.setHomeAsUpIndicator(R.drawable.ic_menu) getSupportActionBar.setDisplayHomeAsUpEnabled(true) ThemeManager.applyTheme(this, getSupportActionBar) // The app will control the voice call audio level setVolumeControlStream(AudioManager.STREAM_VOICE_CALL) // Fix for Android 4.1.x if (Build.VERSION.SDK_INT != Build.VERSION_CODES.JELLY_BEAN && Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { getWindow.setFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED, WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED) } // Check to see if Internet is potentially available and show a warning if it isn't if (!isNetworkConnected) showAlertDialog(MainActivity.this, getString(R.string.main_no_internet), getString(R.string.main_not_connected)) // Give ToxSingleton an instance of notification manager for use in displaying notifications from callbacks ToxSingleton.mNotificationManager = getSystemService(Context.NOTIFICATION_SERVICE).asInstanceOf[NotificationManager] // Initialise the bitmap manager for storing bitmaps in a cache new BitmapManager() val db = State.db db.clearFileNumbers() // Removes the drop shadow from the actionbar as it overlaps the tabs getSupportActionBar.setElevation(0) } def onClickAdd(v: View) { val intent = new Intent(this, classOf[AddActivity]) startActivityForResult(intent, Constants.ADD_FRIEND_REQUEST_CODE) } override def onBackPressed(): Unit = { val drawerFragment = getSupportFragmentManager.findFragmentById(R.id.drawer).asInstanceOf[MainDrawerFragment] if (drawerFragment.isDrawerOpen) { drawerFragment.closeDrawer() } else { super.onBackPressed() } } override def onPause() { super.onPause() ToxSingleton.chatActive = false } override def onDestroy() { super.onDestroy() State.calls.removeAll() } /** * Displays a generic dialog using the strings passed in. * TODO: Should maybe be refactored into separate class and used for other dialogs? */ def showAlertDialog(context: Context, title: String, message: String) { val alertDialog = new AlertDialog.Builder(context).create() alertDialog.setTitle(title) alertDialog.setMessage(message) alertDialog.setIcon(R.drawable.ic_launcher) alertDialog.setButton("OK", new DialogInterface.OnClickListener() { def onClick(dialog: DialogInterface, which: Int) { } }) alertDialog.show() } /** * Checks to see if Wifi or Mobile have a network connection */ private def isNetworkConnected: Boolean = { val connectivityManager = getSystemService(Context.CONNECTIVITY_SERVICE).asInstanceOf[ConnectivityManager] val networkInfo = connectivityManager.getAllNetworkInfo for (info <- networkInfo) { if ("WIFI".equalsIgnoreCase(info.getTypeName) && info.isConnected) return true else if ("MOBILE".equalsIgnoreCase(info.getTypeName) && info.isConnected) return true } false } override def onOptionsItemSelected(item: MenuItem): Boolean = { val id = item.getItemId if (id == android.R.id.home) { val drawer = getSupportFragmentManager.findFragmentById(R.id.drawer).asInstanceOf[MainDrawerFragment] drawer.openDrawer() return true } super.onOptionsItemSelected(item) } private def selectLanguage() { val localeString = preferences.getString("locale", "-1") val locale = getResources.getConfiguration.locale if (localeString == "-1") { val editor = preferences.edit() val currentLanguage = locale.getLanguage.toLowerCase val currentCountry = locale.getCountry editor.putString("locale", currentLanguage + "_" + currentCountry) editor.apply() } else { val locale = if (localeString.contains("_")) { val (language, country) = localeString.splitAt(localeString.indexOf("_")) new Locale(language, country) } else { new Locale(localeString) } Locale.setDefault(locale) val config = new Configuration() config.locale = locale getApplicationContext.getResources.updateConfiguration(config, getApplicationContext.getResources.getDisplayMetrics) } } }
biaji/Antox
app/src/main/scala/chat/tox/antox/activities/MainActivity.scala
Scala
gpl-3.0
5,629
/*********************************************************************** * Copyright (c) 2013-2017 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.tools.ingest import java.io.{File, InputStream} import java.util.concurrent.atomic.AtomicLong import com.typesafe.config.{Config, ConfigRenderOptions} import org.apache.commons.pool2.{BasePooledObjectFactory, ObjectPool} import org.apache.commons.pool2.impl.{DefaultPooledObject, GenericObjectPool} import org.apache.hadoop.mapreduce.Job import org.apache.hadoop.mapreduce.lib.input.FileInputFormat import org.locationtech.geomesa.convert.{DefaultCounter, EvaluationContext, SimpleFeatureConverter, SimpleFeatureConverters} import org.locationtech.geomesa.jobs.mapreduce.{ConverterInputFormat, GeoMesaOutputFormat} import org.locationtech.geomesa.tools.Command import org.locationtech.geomesa.tools.DistributedRunParam.RunModes.RunMode import org.locationtech.geomesa.tools.ingest.AbstractIngest.StatusCallback import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType} /** * Ingestion that uses geomesa converters to process input files * * @param sft simple feature type * @param dsParams data store parameters * @param converterConfig converter definition * @param inputs files to ingest * @param libjarsFile file with list of jars needed for ingest * @param libjarsPaths paths to search for libjars * @param numLocalThreads for local ingest, how many threads to use */ class ConverterIngest(sft: SimpleFeatureType, dsParams: Map[String, String], converterConfig: Config, inputs: Seq[String], mode: Option[RunMode], libjarsFile: String, libjarsPaths: Iterator[() => Seq[File]], numLocalThreads: Int) extends AbstractIngest(dsParams, sft.getTypeName, inputs, mode, libjarsFile, libjarsPaths, numLocalThreads) { override def beforeRunTasks(): Unit = { // create schema for the feature prior to Ingest job Command.user.info(s"Creating schema ${sft.getTypeName}") ds.createSchema(sft) } protected val factory = new BasePooledObjectFactory[SimpleFeatureConverter[_]] { override def wrap(obj: SimpleFeatureConverter[_]) = new DefaultPooledObject[SimpleFeatureConverter[_]](obj) override def create(): SimpleFeatureConverter[_] = SimpleFeatureConverters.build(sft, converterConfig) } protected val converters = new GenericObjectPool[SimpleFeatureConverter[_]](factory) override def createLocalConverter(path: String, failures: AtomicLong): LocalIngestConverter = new LocalIngestConverterImpl(sft, path, converters, failures) override def runDistributedJob(statusCallback: StatusCallback): (Long, Long) = { val job = new ConverterIngestJob(sft, converterConfig) job.run(dsParams, sft.getTypeName, inputs, libjarsFile, libjarsPaths, statusCallback) } } class LocalIngestConverterImpl(sft: SimpleFeatureType, path: String, converters: ObjectPool[SimpleFeatureConverter[_]], failures: AtomicLong) extends LocalIngestConverter { class LocalIngestCounter extends DefaultCounter { // keep track of failure at a global level, keep line counts and success local override def incFailure(i: Long): Unit = failures.getAndAdd(i) override def getFailure: Long = failures.get() } protected val converter: SimpleFeatureConverter[_] = converters.borrowObject() protected val ec: EvaluationContext = converter.createEvaluationContext(Map("inputFilePath" -> path), new LocalIngestCounter) override def convert(is: InputStream): (SimpleFeatureType, Iterator[SimpleFeature]) = (sft, converter.process(is, ec)) override def close(): Unit = converters.returnObject(converter) } /** * Distributed job that uses converters to process input files * * @param sft simple feature type * @param converterConfig converter definition */ class ConverterIngestJob(sft: SimpleFeatureType, converterConfig: Config) extends AbstractIngestJob { import ConverterInputFormat.{Counters => ConvertCounters} import GeoMesaOutputFormat.{Counters => OutCounters} val failCounters = Seq((ConvertCounters.Group, ConvertCounters.Failed), (OutCounters.Group, OutCounters.Failed)) override val inputFormatClass: Class[_ <: FileInputFormat[_, SimpleFeature]] = classOf[ConverterInputFormat] override def configureJob(job: Job): Unit = { ConverterInputFormat.setConverterConfig(job, converterConfig.root().render(ConfigRenderOptions.concise())) ConverterInputFormat.setSft(job, sft) } override def written(job: Job): Long = job.getCounters.findCounter(OutCounters.Group, OutCounters.Written).getValue override def failed(job: Job): Long = failCounters.map(c => job.getCounters.findCounter(c._1, c._2).getValue).sum }
ronq/geomesa
geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/ingest/ConverterIngest.scala
Scala
apache-2.0
5,192
package org.jetbrains.plugins.scala package worksheet.actions import com.intellij.execution._ import com.intellij.execution.configurations.JavaParameters import com.intellij.execution.process.{OSProcessHandler, ProcessAdapter, ProcessEvent} import com.intellij.execution.ui.ConsoleViewContentType import com.intellij.icons.AllIcons import com.intellij.ide.scratch.{ScratchRootType, ScratchFileService} import com.intellij.ide.util.EditorHelper import com.intellij.internal.statistic.UsageTrigger import com.intellij.openapi.actionSystem.{AnAction, AnActionEvent} import com.intellij.openapi.application.{ApplicationManager, ModalityState} import com.intellij.openapi.compiler.{CompileContext, CompileStatusNotification, CompilerManager} import com.intellij.openapi.fileEditor.FileEditorManager import com.intellij.openapi.keymap.{KeymapManager, KeymapUtil} import com.intellij.openapi.module.{ModuleManager, Module} import com.intellij.openapi.project.Project import com.intellij.openapi.projectRoots.{JavaSdkType, JdkUtil} import com.intellij.openapi.roots.{ModuleRootManager, ProjectFileIndex} import com.intellij.openapi.util.Key import com.intellij.openapi.vfs.{VirtualFile, VirtualFileWithId} import com.intellij.psi.{PsiManager, PsiDocumentManager, PsiFile} import org.jetbrains.plugins.scala import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile import org.jetbrains.plugins.scala.project._ import org.jetbrains.plugins.scala.settings.ScalaProjectSettings import org.jetbrains.plugins.scala.util.ScalaUtil import org.jetbrains.plugins.scala.worksheet.processor.WorksheetCompiler import org.jetbrains.plugins.scala.worksheet.runconfiguration.WorksheetViewerInfo import org.jetbrains.plugins.scala.worksheet.server.WorksheetProcessManager import org.jetbrains.plugins.scala.worksheet.ui.WorksheetEditorPrinter /** * @author Ksenia.Sautina * @author Dmitry Naydanov * @since 10/17/12 */ class RunWorksheetAction extends AnAction with TopComponentAction { def actionPerformed(e: AnActionEvent) { RunWorksheetAction.runCompiler(e.getProject, auto = false) } override def update(e: AnActionEvent) { val presentation = e.getPresentation presentation.setIcon(AllIcons.Actions.Execute) val shortcuts = KeymapManager.getInstance.getActiveKeymap.getShortcuts("Scala.RunWorksheet") if (shortcuts.nonEmpty) { val shortcutText = " (" + KeymapUtil.getShortcutText(shortcuts(0)) + ")" presentation.setText(ScalaBundle.message("worksheet.execute.button") + shortcutText) } updateInner(presentation, e.getProject) } override def actionIcon = AllIcons.Actions.Execute override def bundleKey = "worksheet.execute.button" override def shortcutId: Option[String] = Some("Scala.RunWorksheet") } object RunWorksheetAction { private val runnerClassName = "org.jetbrains.plugins.scala.worksheet.MyWorksheetRunner" def runCompiler(project: Project, auto: Boolean) { UsageTrigger.trigger("scala.worksheet") if (project == null) return val editor = FileEditorManager.getInstance(project).getSelectedTextEditor if (editor == null) return val psiFile = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument) WorksheetProcessManager.stop(psiFile.getVirtualFile) psiFile match { case file: ScalaFile if file.isWorksheetFile => val viewer = WorksheetViewerInfo getViewer editor if (viewer != null) { ApplicationManager.getApplication.invokeAndWait(new Runnable { override def run() { scala.extensions.inWriteAction { CleanWorksheetAction.resetScrollModel(viewer) if (!auto) CleanWorksheetAction.cleanWorksheet(file.getNode, editor, viewer, project) } } }, ModalityState.any()) } def runnable() = { new WorksheetCompiler().compileAndRun(editor, file, (className: String, addToCp: String) => { ApplicationManager.getApplication invokeLater new Runnable { override def run() { executeWorksheet(file.getName, project, file.getContainingFile, className, addToCp) } } }, Option(editor), auto) } if (WorksheetCompiler isMakeBeforeRun psiFile) { CompilerManager.getInstance(project).make( getModuleFor(file), new CompileStatusNotification { override def finished(aborted: Boolean, errors: Int, warnings: Int, compileContext: CompileContext) { if (!aborted && errors == 0) runnable() } }) } else runnable() case _ => } } def executeWorksheet(name: String, project: Project, file: PsiFile, mainClassName: String, addToCp: String) { val virtualFile = file.getVirtualFile val params = createParameters(getModuleFor(file), mainClassName, Option(project.getBaseDir) map (_.getPath) getOrElse "", addToCp, "", virtualFile.getCanonicalPath) //todo extract default java options?? setUpUiAndRun(params.createOSProcessHandler(), file) } private def createParameters(module: Module, mainClassName: String, workingDirectory: String, additionalCp: String, consoleArgs: String, worksheetField: String) = { import _root_.scala.collection.JavaConverters._ if (module == null) throw new ExecutionException("Module is not specified") val project = module.getProject val scalaSdk = module.scalaSdk.getOrElse { throw new ExecutionException("No Scala facet configured for module " + module.getName) } val rootManager = ModuleRootManager.getInstance(module) val sdk = rootManager.getSdk if (sdk == null || !sdk.getSdkType.isInstanceOf[JavaSdkType]) { throw CantRunException.noJdkForModule(module) } val params = new JavaParameters() val files = scalaSdk.compilerClasspath params.getClassPath.addAllFiles(files.asJava) params.setUseDynamicClasspath(JdkUtil.useDynamicClasspath(project)) params.setUseDynamicVMOptions(JdkUtil.useDynamicVMOptions()) params.getClassPath.add(ScalaUtil.runnersPath()) params.setWorkingDirectory(workingDirectory) params.setMainClass(runnerClassName) params.configureByModule(module, JavaParameters.JDK_AND_CLASSES_AND_TESTS) params.getClassPath.add(ScalaUtil.runnersPath()) params.getClassPath.add(additionalCp) params.getProgramParametersList addParametersString worksheetField if (!consoleArgs.isEmpty) params.getProgramParametersList addParametersString consoleArgs params.getProgramParametersList prepend mainClassName //IMPORTANT! this must be first program argument params } private def setUpUiAndRun(handler: OSProcessHandler, file: PsiFile) { val virtualFile = file.getVirtualFile val editor = EditorHelper openInEditor file val worksheetPrinter = WorksheetEditorPrinter.newWorksheetUiFor(editor, virtualFile) val myProcessListener: ProcessAdapter = new ProcessAdapter { override def onTextAvailable(event: ProcessEvent, outputType: Key[_]) { val text = event.getText if (ConsoleViewContentType.NORMAL_OUTPUT == ConsoleViewContentType.getConsoleViewType(outputType)) { worksheetPrinter processLine text } } override def processTerminated(event: ProcessEvent): Unit = { worksheetPrinter.flushBuffer() } } worksheetPrinter.scheduleWorksheetUpdate() handler.addProcessListener(myProcessListener) handler.startNotify() } def isScratchWorksheet(vFileOpt: Option[VirtualFile], project: Project): Boolean = vFileOpt.exists { case vFile => ScratchFileService.getInstance().getRootType(vFile).isInstanceOf[ScratchRootType] && ScalaProjectSettings.getInstance(project).isTreatScratchFilesAsWorksheet } def isScratchWorksheet(file: PsiFile): Boolean = isScratchWorksheet(Option(file.getVirtualFile), file.getProject) def getModuleFor(vFile: VirtualFile, project: Project): Module = { vFile match { case _: VirtualFileWithId => Option(ProjectFileIndex.SERVICE getInstance project getModuleForFile vFile) getOrElse project.anyScalaModule.map(_.module).orNull case _ => project.anyScalaModule.map(_.module).orNull } } def getModuleFor(file: PsiFile): Module = WorksheetCompiler.getModuleForCpName(file) flatMap { case name => scala.extensions.inReadAction { Option(ModuleManager getInstance file.getProject findModuleByName name) } } getOrElse getModuleFor(file.getVirtualFile, file.getProject) }
double-y/translation-idea-plugin
src/org/jetbrains/plugins/scala/worksheet/actions/RunWorksheetAction.scala
Scala
apache-2.0
8,638
package com.sksamuel.elastic4s.searches.queries.funcscorer trait ScoreFunctionDefinition
FabienPennequin/elastic4s
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/queries/funcscorer/ScoreFunctionDefinition.scala
Scala
apache-2.0
90
/* * Copyright 2017 helloscala.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package helloscala.algorithm import scala.collection.mutable object Sorts { def genericQuicksort[T](list: IndexedSeq[T])(implicit ev1: T => Ordered[T]): Vector[T] = { if (list.size < 2) { list.toVector } else { val len = list.size val pivot = list(len / 2) val less = mutable.ArrayBuffer.empty[T] val greater = mutable.ArrayBuffer.empty[T] val pivots = mutable.ArrayBuffer.empty[T] for (item <- list) { if (item < pivot) less.append(item) else if (item > pivot) greater.append(item) else pivots.append(item) } genericQuicksort(less) ++ pivots ++ genericQuicksort(greater) } } }
helloscala/helloscala
hs-core/src/main/scala/helloscala/algorithm/Sorts.scala
Scala
apache-2.0
1,275
package pimpathon import pimpathon.filterMonadic._ class FilterMonadicSpec extends PSpec { "toMultiMap" in { Set.empty[(Int, Int)].toMultiMap[List] ≡ Map() List.empty[(Int, Int)].toMultiMap[List] ≡ Map() Set((1, 10), (1, 11), (2, 20), (2, 21)).toMultiMap[List] ≡ Map(1 → List(10, 11), 2 → List(20, 21)) List((1, 10), (1, 11), (2, 20), (2, 21)).toMultiMap[List] ≡ Map(1 → List(10, 11), 2 → List(20, 21)) Set((1, 10), (1, 11), (2, 20), (2, 21)).toMultiMap[Set] ≡ Map(1 → Set(10, 11), 2 → Set(20, 21)) List((1, 10), (1, 11), (2, 20), (2, 21)).toMultiMap[Set] ≡ Map(1 → Set(10, 11), 2 → Set(20, 21)) } }
stacycurl/pimpathon
src/test/scala/pimpathon/FilterMonadic.scala
Scala
apache-2.0
663
/* sbt -- Simple Build Tool * Copyright 2008, 2009, 2010, 2011 Mark Harrah */ package sbt import complete.{ DefaultParsers, Parser } import compiler.{ CompilerCache, EvalImports } import Types.{ const, idFun } import Aggregation.AnyKeys import Project.LoadAction import scala.annotation.tailrec import Path._ import StandardMain._ import java.io.File import java.net.URI import java.util.Locale /** This class is the entry point for sbt.*/ final class xMain extends xsbti.AppMain { def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = { import BasicCommands.early import BasicCommandStrings.runEarly import BuiltinCommands.{ initialize, defaults } import CommandStrings.{ BootCommand, DefaultsCommand, InitCommand } runManaged(initialState(configuration, Seq(defaults, early), runEarly(DefaultsCommand) :: runEarly(InitCommand) :: BootCommand :: Nil) ) } } final class ScriptMain extends xsbti.AppMain { def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = runManaged(initialState(configuration, BuiltinCommands.ScriptCommands, Script.Name :: Nil) ) } final class ConsoleMain extends xsbti.AppMain { def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = runManaged(initialState(configuration, BuiltinCommands.ConsoleCommands, IvyConsole.Name :: Nil) ) } object StandardMain { def runManaged(s: State): xsbti.MainResult = { val previous = TrapExit.installManager() try MainLoop.runLogged(s) finally TrapExit.uninstallManager(previous) } /** The common interface to standard output, used for all built-in ConsoleLoggers. */ val console = ConsoleOut.systemOutOverwrite(ConsoleOut.overwriteContaining("Resolving ")) def initialGlobalLogging: GlobalLogging = GlobalLogging.initial(MainLogging.globalDefault(console), File.createTempFile("sbt", ".log"), console) def initialState(configuration: xsbti.AppConfiguration, initialDefinitions: Seq[Command], preCommands: Seq[String]): State = { import BasicCommandStrings.isEarlyCommand val userCommands = configuration.arguments.map(_.trim) val (earlyCommands, normalCommands) = (preCommands ++ userCommands).partition(isEarlyCommand) val commands = earlyCommands ++ normalCommands val initAttrs = BuiltinCommands.initialAttributes val s = State(configuration, initialDefinitions, Set.empty, None, commands, State.newHistory, initAttrs, initialGlobalLogging, State.Continue) s.initializeClassLoaderCache } } import DefaultParsers._ import CommandStrings._ import BasicCommandStrings._ import BasicCommands._ import CommandUtil._ object BuiltinCommands { def initialAttributes = AttributeMap.empty def ConsoleCommands: Seq[Command] = Seq(ignore, exit, IvyConsole.command, setLogLevel, early, act, nop) def ScriptCommands: Seq[Command] = Seq(ignore, exit, Script.command, setLogLevel, early, act, nop) def DefaultCommands: Seq[Command] = Seq(ignore, help, completionsCommand, about, tasks, settingsCommand, loadProject, projects, project, reboot, read, history, set, sessionCommand, inspect, loadProjectImpl, loadFailed, Cross.crossBuild, Cross.switchVersion, setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, setLogLevel, plugin, plugins, ifLast, multi, shell, continuous, eval, alias, append, last, lastGrep, export, boot, nop, call, exit, early, initialize, act) ++ compatCommands def DefaultBootCommands: Seq[String] = LoadProject :: (IfLast + " " + Shell) :: Nil def boot = Command.make(BootCommand)(bootParser) def about = Command.command(AboutCommand, aboutBrief, aboutDetailed) { s => s.log.info(aboutString(s)); s } def setLogLevel = Command.arb(const(logLevelParser), logLevelHelp)(LogManager.setGlobalLogLevel) private[this] def logLevelParser: Parser[Level.Value] = oneOf(Level.values.toSeq.map(v => v.toString ^^^ v)) // This parser schedules the default boot commands unless overridden by an alias def bootParser(s: State) = { val orElse = () => DefaultBootCommands ::: s delegateToAlias(BootCommand, success(orElse))(s) } def sbtName(s: State): String = s.configuration.provider.id.name def sbtVersion(s: State): String = s.configuration.provider.id.version def scalaVersion(s: State): String = s.configuration.provider.scalaProvider.version def aboutProject(s: State): String = if (Project.isProjectLoaded(s)) { val e = Project.extract(s) val version = e.getOpt(Keys.version) match { case None => ""; case Some(v) => " " + v } val current = "The current project is " + Reference.display(e.currentRef) + version + "\\n" val sc = aboutScala(s, e) val built = if (sc.isEmpty) "" else "The current project is built against " + sc + "\\n" current + built + aboutPlugins(e) } else "No project is currently loaded" def aboutPlugins(e: Extracted): String = { def list(b: BuildUnit) = b.plugins.detected.autoPlugins.map(_.value.label) ++ b.plugins.detected.plugins.names val allPluginNames = e.structure.units.values.flatMap(u => list(u.unit)).toSeq.distinct if (allPluginNames.isEmpty) "" else allPluginNames.mkString("Available Plugins: ", ", ", "") } def aboutScala(s: State, e: Extracted): String = { val scalaVersion = e.getOpt(Keys.scalaVersion) val scalaHome = e.getOpt(Keys.scalaHome).flatMap(idFun) val instance = e.getOpt(Keys.scalaInstance.task).flatMap(_ => quiet(e.runTask(Keys.scalaInstance, s)._2)) (scalaVersion, scalaHome, instance) match { case (sv, Some(home), Some(si)) => "local Scala version " + selectScalaVersion(sv, si) + " at " + home.getAbsolutePath case (_, Some(home), None) => "a local Scala build at " + home.getAbsolutePath case (sv, None, Some(si)) => "Scala " + selectScalaVersion(sv, si) case (Some(sv), None, None) => "Scala " + sv case (None, None, None) => "" } } def aboutString(s: State): String = { val (name, ver, scalaVer, about) = (sbtName(s), sbtVersion(s), scalaVersion(s), aboutProject(s)) """This is %s %s |%s |%s, %s plugins, and build definitions are using Scala %s |""".stripMargin.format(name, ver, about, name, name, scalaVer) } private[this] def selectScalaVersion(sv: Option[String], si: ScalaInstance): String = sv match { case Some(si.version) => si.version; case _ => si.actualVersion } private[this] def quiet[T](t: => T): Option[T] = try { Some(t) } catch { case e: Exception => None } def settingsCommand = showSettingLike(SettingsCommand, settingsPreamble, KeyRanks.MainSettingCutoff, key => !isTask(key.manifest)) def tasks = showSettingLike(TasksCommand, tasksPreamble, KeyRanks.MainTaskCutoff, key => isTask(key.manifest)) def showSettingLike(command: String, preamble: String, cutoff: Int, keep: AttributeKey[_] => Boolean) = Command(command, settingsBrief(command), settingsDetailed(command))(showSettingParser(keep)) { case (s: State, (verbosity: Int, selected: Option[String])) => if (selected.isEmpty) System.out.println(preamble) val prominentOnly = verbosity <= 1 val verboseFilter = if (prominentOnly) highPass(cutoff) else topNRanked(25 * verbosity) System.out.println(tasksHelp(s, keys => verboseFilter(keys filter keep), selected)) System.out.println() if (prominentOnly) System.out.println(moreAvailableMessage(command, selected.isDefined)) s } def showSettingParser(keepKeys: AttributeKey[_] => Boolean)(s: State): Parser[(Int, Option[String])] = verbosityParser ~ selectedParser(s, keepKeys).? def selectedParser(s: State, keepKeys: AttributeKey[_] => Boolean): Parser[String] = singleArgument(allTaskAndSettingKeys(s).filter(keepKeys).map(_.label).toSet) def verbosityParser: Parser[Int] = success(1) | ((Space ~ "-") ~> ( 'v'.id.+.map(_.size + 1) | ("V" ^^^ Int.MaxValue) )) def taskDetail(keys: Seq[AttributeKey[_]]): Seq[(String, String)] = sortByLabel(withDescription(keys)) flatMap taskStrings def allTaskAndSettingKeys(s: State): Seq[AttributeKey[_]] = { val extracted = Project.extract(s) import extracted._ val index = structure.index index.keyIndex.keys(Some(currentRef)).toSeq.map(index.keyMap).distinct } def sortByLabel(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.sortBy(_.label) def sortByRank(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.sortBy(_.rank) def withDescription(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.filter(_.description.isDefined) def isTask(mf: Manifest[_])(implicit taskMF: Manifest[Task[_]], inputMF: Manifest[InputTask[_]]): Boolean = mf.runtimeClass == taskMF.runtimeClass || mf.runtimeClass == inputMF.runtimeClass def topNRanked(n: Int) = (keys: Seq[AttributeKey[_]]) => sortByRank(keys).take(n) def highPass(rankCutoff: Int) = (keys: Seq[AttributeKey[_]]) => sortByRank(keys).takeWhile(_.rank <= rankCutoff) def tasksHelp(s: State, filter: Seq[AttributeKey[_]] => Seq[AttributeKey[_]], arg: Option[String]): String = { val commandAndDescription = taskDetail(filter(allTaskAndSettingKeys(s))) arg match { case Some(selected) => detail(selected, commandAndDescription.toMap) case None => aligned(" ", " ", commandAndDescription) mkString ("\\n", "\\n", "") } } def taskStrings(key: AttributeKey[_]): Option[(String, String)] = key.description map { d => (key.label, d) } def defaults = Command.command(DefaultsCommand) { s => s.copy(definedCommands = DefaultCommands) } def initialize = Command.command(InitCommand) { s => /*"load-commands -base ~/.sbt/commands" :: */ readLines(readable(sbtRCs(s))) ::: s } def eval = Command.single(EvalCommand, Help.more(EvalCommand, evalDetailed)) { (s, arg) => if (Project.isProjectLoaded(s)) loadedEval(s, arg) else rawEval(s, arg) s } private[this] def loadedEval(s: State, arg: String) { val extracted = Project extract s import extracted._ val result = session.currentEval().eval(arg, srcName = "<eval>", imports = autoImports(extracted)) s.log.info(s"ans: ${result.tpe} = ${result.getValue(currentLoader)}") } private[this] def rawEval(s: State, arg: String) { val app = s.configuration.provider val classpath = app.mainClasspath ++ app.scalaProvider.jars val result = Load.mkEval(classpath, s.baseDir, Nil).eval(arg, srcName = "<eval>", imports = new EvalImports(Nil, "")) s.log.info(s"ans: ${result.tpe} = ${result.getValue(app.loader)}") } def sessionCommand = Command.make(SessionCommand, sessionBrief, SessionSettings.Help)(SessionSettings.command) def reapply(newSession: SessionSettings, structure: BuildStructure, s: State): State = { s.log.info("Reapplying settings...") // Here, for correct behavior, we also need to re-inject a settings logger, as we'll be re-evaluating settings. val loggerInject = LogManager.settingsLogger(s) val withLogger = newSession.appendRaw(loggerInject :: Nil) val newStructure = Load.reapply(withLogger.mergeSettings, structure)(Project.showContextKey(newSession, structure)) Project.setProject(newSession, newStructure, s) } def set = Command(SetCommand, setBrief, setDetailed)(setParser) { case (s, (all, arg)) => val extracted = Project extract s import extracted._ val dslVals = extracted.currentUnit.unit.definitions.dslDefinitions // TODO - This is possibly inefficient (or stupid). We should try to only attach the // classloader + imports NEEDED to compile the set command, rather than // just ALL of them. val ims = (imports(extracted) ++ dslVals.imports.map(i => (i, -1))) val cl = dslVals.classloader(currentLoader) val settings = EvaluateConfigurations.evaluateSetting( session.currentEval(), "<set>", ims, arg, LineRange(0, 0) )(cl) val setResult = if (all) SettingCompletions.setAll(extracted, settings) else SettingCompletions.setThis(s, extracted, settings, arg) s.log.info(setResult.quietSummary) s.log.debug(setResult.verboseSummary) reapply(setResult.session, structure, s) } // @deprecated("Use SettingCompletions.setThis", "0.13.0") def setThis(s: State, extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String) = SettingCompletions.setThis(s, extracted, settings, arg) def inspect = Command(InspectCommand, inspectBrief, inspectDetailed)(Inspect.parser) { case (s, (option, sk)) => s.log.info(Inspect.output(s, option, sk)) s } @deprecated("Use Inspect.output", "0.13.0") def inspectOutput(s: State, option: Inspect.Mode, sk: Def.ScopedKey[_]): String = Inspect.output(s, option, sk) def lastGrep = Command(LastGrepCommand, lastGrepBrief, lastGrepDetailed)(lastGrepParser) { case (s, (pattern, Some(sks))) => val (str, ref, display) = extractLast(s) Output.lastGrep(sks, str.streams(s), pattern, printLast(s))(display) keepLastLog(s) case (s, (pattern, None)) => for (logFile <- lastLogFile(s)) yield Output.lastGrep(logFile, pattern, printLast(s)) keepLastLog(s) } def extractLast(s: State) = { val ext = Project.extract(s) (ext.structure, Select(ext.currentRef), ext.showKey) } def setParser = (s: State) => { val extracted = Project.extract(s) import extracted._ token(Space ~> flag("every" ~ Space)) ~ SettingCompletions.settingParser(structure.data, structure.index.keyMap, currentProject) } @deprecated("Use Inspect.parser", "0.13.0") def inspectParser: State => Parser[(Inspect.Mode, Def.ScopedKey[_])] = Inspect.parser @deprecated("Use Inspect.spacedModeParser", "0.13.0") val spacedModeParser: State => Parser[Inspect.Mode] = Inspect.spacedModeParser @deprecated("Use Inspect.allKeyParser", "0.13.0") def allKeyParser(s: State): Parser[AttributeKey[_]] = Inspect.allKeyParser(s) @deprecated("Use Inspect.spacedKeyParser", "0.13.0") val spacedKeyParser: State => Parser[Def.ScopedKey[_]] = Inspect.spacedKeyParser val spacedAggregatedParser = (s: State) => Act.requireSession(s, token(Space) ~> Act.aggregatedKeyParser(s)) val aggregatedKeyValueParser: State => Parser[Option[AnyKeys]] = (s: State) => spacedAggregatedParser(s).map(x => Act.keyValues(s)(x)).? val exportParser: State => Parser[() => State] = (s: State) => Act.requireSession(s, token(Space) ~> exportParser0(s)) private[sbt] def exportParser0(s: State): Parser[() => State] = { val extracted = Project extract s import extracted.{ showKey, structure } val keysParser = token(flag("--last" <~ Space)) ~ Act.aggregatedKeyParser(extracted) val show = Aggregation.ShowConfig(settingValues = true, taskValues = false, print = println _, success = false) for { lastOnly_keys <- keysParser kvs = Act.keyValues(structure)(lastOnly_keys._2) f <- if (lastOnly_keys._1) success(() => s) else Aggregation.evaluatingParser(s, structure, show)(kvs) } yield () => { def export0(s: State): State = lastImpl(s, kvs, Some(ExportStream)) val newS = try f() catch { case e: Exception => try export0(s) finally { throw e } } export0(newS) } } def lastGrepParser(s: State) = Act.requireSession(s, (token(Space) ~> token(NotSpace, "<pattern>")) ~ aggregatedKeyValueParser(s)) def last = Command(LastCommand, lastBrief, lastDetailed)(aggregatedKeyValueParser) { case (s, Some(sks)) => lastImpl(s, sks, None) case (s, None) => for (logFile <- lastLogFile(s)) yield Output.last(logFile, printLast(s)) keepLastLog(s) } def export = Command(ExportCommand, exportBrief, exportDetailed)(exportParser)((s, f) => f()) private[this] def lastImpl(s: State, sks: AnyKeys, sid: Option[String]): State = { val (str, ref, display) = extractLast(s) Output.last(sks, str.streams(s), printLast(s), sid)(display) keepLastLog(s) } /** Determines the log file that last* commands should operate on. See also isLastOnly. */ def lastLogFile(s: State) = { val backing = s.globalLogging.backing if (isLastOnly(s)) backing.last else Some(backing.file) } /** * If false, shift the current log file to be the log file that 'last' will operate on. * If true, keep the previous log file as the one 'last' operates on because there is nothing useful in the current one. */ def keepLastLog(s: State): State = if (isLastOnly(s)) s.keepLastLog else s /** * The last* commands need to determine whether to read from the current log file or the previous log file * and whether to keep the previous log file or not. This is selected based on whether the previous command * was 'shell', which meant that the user directly entered the 'last' command. If it wasn't directly entered, * the last* commands operate on any output since the last 'shell' command and do shift the log file. * Otherwise, the output since the previous 'shell' command is used and the log file is not shifted. */ def isLastOnly(s: State): Boolean = s.history.previous.forall(_ == Shell) def printLast(s: State): Seq[String] => Unit = _ foreach println def autoImports(extracted: Extracted): EvalImports = new EvalImports(imports(extracted), "<auto-imports>") def imports(extracted: Extracted): Seq[(String, Int)] = { val curi = extracted.currentRef.build extracted.structure.units(curi).imports.map(s => (s, -1)) } def listBuild(uri: URI, build: LoadedBuildUnit, current: Boolean, currentID: String, log: Logger) = { log.info("In " + uri) def prefix(id: String) = if (currentID != id) " " else if (current) " * " else "(*)" for (id <- build.defined.keys.toSeq.sorted) log.info("\\t" + prefix(id) + id) } def act = Command.customHelp(Act.actParser, actHelp) def actHelp = (s: State) => CommandStrings.showHelp ++ CommandStrings.multiTaskHelp ++ keysHelp(s) def keysHelp(s: State): Help = if (Project.isProjectLoaded(s)) Help.detailOnly(taskDetail(allTaskAndSettingKeys(s))) else Help.empty def plugins = Command.command(PluginsCommand, pluginsBrief, pluginsDetailed) { s => val helpString = PluginsDebug.helpAll(s) System.out.println(helpString) s } val pluginParser: State => Parser[AutoPlugin] = s => { val autoPlugins: Map[String, AutoPlugin] = PluginsDebug.autoPluginMap(s) token(Space) ~> Act.knownPluginParser(autoPlugins, "plugin") } def plugin = Command(PluginCommand)(pluginParser) { (s, plugin) => val helpString = PluginsDebug.help(plugin, s) System.out.println(helpString) s } def projects = Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(s => projectsParser(s).?) { case (s, Some(modifyBuilds)) => transformExtraBuilds(s, modifyBuilds) case (s, None) => showProjects(s); s } def showProjects(s: State) { val extracted = Project extract s import extracted._ import currentRef.{ build => curi, project => cid } listBuild(curi, structure.units(curi), true, cid, s.log) for ((uri, build) <- structure.units if curi != uri) listBuild(uri, build, false, cid, s.log) } def transformExtraBuilds(s: State, f: List[URI] => List[URI]): State = { val original = Project.extraBuilds(s) val extraUpdated = Project.updateExtraBuilds(s, f) try doLoadProject(extraUpdated, LoadAction.Current) catch { case e: Exception => s.log.error("Project loading failed: reverting to previous state.") Project.setExtraBuilds(s, original) } } def projectsParser(s: State): Parser[List[URI] => List[URI]] = { val addBase = token(Space ~> "add") ~> token(Space ~> basicUri, "<build URI>").+ val removeBase = token(Space ~> "remove") ~> token(Space ~> Uri(Project.extraBuilds(s).toSet)).+ addBase.map(toAdd => (xs: List[URI]) => (toAdd.toList ::: xs).distinct) | removeBase.map(toRemove => (xs: List[URI]) => xs.filterNot(toRemove.toSet)) } def project = Command.make(ProjectCommand, projectBrief, projectDetailed)(ProjectNavigation.command) def loadFailed = Command(LoadFailed)(loadProjectParser)(doLoadFailed) @deprecated("No longer used.", "0.13.2") def handleLoadFailed(s: State): State = doLoadFailed(s, "") @tailrec private[this] def doLoadFailed(s: State, loadArg: String): State = { val result = (SimpleReader.readLine("Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? ") getOrElse Quit).toLowerCase(Locale.ENGLISH) def matches(s: String) = !result.isEmpty && (s startsWith result) if (result.isEmpty || matches("retry")) loadProjectCommand(LoadProject, loadArg) :: s.clearGlobalLog else if (matches(Quit)) s.exit(ok = false) else if (matches("ignore")) { val hadPrevious = Project.isProjectLoaded(s) s.log.warn("Ignoring load failure: " + (if (hadPrevious) "using previously loaded project." else "no project loaded.")) s } else if (matches("last")) LastCommand :: loadProjectCommand(LoadFailed, loadArg) :: s else { println("Invalid response.") doLoadFailed(s, loadArg) } } def loadProjectCommands(arg: String) = StashOnFailure :: (OnFailure + " " + loadProjectCommand(LoadFailed, arg)) :: loadProjectCommand(LoadProjectImpl, arg) :: PopOnFailure :: State.FailureWall :: Nil def loadProject = Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser) { (s, arg) => loadProjectCommands(arg) ::: s } private[this] def loadProjectParser = (s: State) => matched(Project.loadActionParser) private[this] def loadProjectCommand(command: String, arg: String): String = s"$command $arg".trim def loadProjectImpl = Command(LoadProjectImpl)(_ => Project.loadActionParser)(doLoadProject) def doLoadProject(s0: State, action: LoadAction.Value): State = { val (s1, base) = Project.loadAction(SessionVar.clear(s0), action) IO.createDirectory(base) val s = if (s1 has Keys.stateCompilerCache) s1 else registerCompilerCache(s1) val (eval, structure) = Load.defaultLoad(s, base, s.log, Project.inPluginProject(s), Project.extraBuilds(s)) val session = Load.initialSession(structure, eval, s0) SessionSettings.checkSession(session, s) Project.setProject(session, structure, s) } def registerCompilerCache(s: State): State = { val maxCompilers = System.getProperty("sbt.resident.limit") val cache = if (maxCompilers == null) CompilerCache.fresh else { val num = try maxCompilers.toInt catch { case e: NumberFormatException => throw new RuntimeException("Resident compiler limit must be an integer.", e) } if (num <= 0) CompilerCache.fresh else CompilerCache(num) } s.put(Keys.stateCompilerCache, cache) } }
dyx/sbt
main/src/main/scala/sbt/Main.scala
Scala
bsd-3-clause
23,277
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sparklinedata.druid.metadata import org.sparklinedata.druid.{DruidDataSourceException, Utils} case class SpatialDruidDimensionInfo( druidColumn : String, spatialPosition : Int, minValue : Option[Double], maxValue : Option[Double] ) case class SpatialDruidDimension( druidColumn : DruidDimension, spatialPosition : Int, minValue : Option[Double], maxValue : Option[Double] ) case class DruidRelationColumnInfo( column : String, druidColumn : Option[String], spatialIndex : Option[SpatialDruidDimensionInfo] = None, hllMetric : Option[String] = None, sketchMetric : Option[String] = None, cardinalityEstimate : Option[Long] = None ) /** * Captures the link(s) of a source column to the Druid Index. * * A column ca have several kinds of links to the Druid Index: * - it can be a Druid Dimension, possibly the Time Dimension of the Druid Index. * - it can be a component(axis) of a Spatial Index in Druid * - its value can be stored as a HLL Aggregation in Druid. * - its value can be stored in a Sketch Aggregation in Druid. * * A column can have multiple links, for example: * - a latitude column can be both a Druid Dimension and be an axis in a Spatial Index. * - a column can be both a Dimension and have an HLL and/or Sketch. * * @param column the source column * @param druidColumn the direct link of this source coulmn to a Druid Dimension or Metric. * @param spatialIndex the spatial index for this column. * @param hllMetric the hll Metric for this column * @param sketchMetric the sketch for this column * @param cardinalityEstimate user provided cardinality estimate. */ case class DruidRelationColumn( column : String, druidColumn : Option[DruidColumn], spatialIndex : Option[SpatialDruidDimension] = None, hllMetric : Option[DruidMetric] = None, sketchMetric : Option[DruidMetric] = None, cardinalityEstimate : Option[Long] = None ) { private lazy val druidColumnToUse : DruidColumn = { Utils.filterSomes( Seq(druidColumn, hllMetric, sketchMetric, spatialIndex.map(_.druidColumn)).toList ).head.get } def hasDirectDruidColumn = druidColumn.isDefined def hasSpatialIndex = spatialIndex.isDefined def hasHLLMetric = hllMetric.isDefined def hasSketchMetric = sketchMetric.isDefined def name = druidColumnToUse.name def dataType = if (hasSpatialIndex) DruidDataType.Float else druidColumnToUse.dataType def size = druidColumnToUse.size val cardinality : Long = cardinalityEstimate.getOrElse{ if (cardinalityEstimate.isDefined) { cardinalityEstimate.get } else { druidColumnToUse.cardinality } } def isDimension(excludeTime : Boolean = false) : Boolean = { hasDirectDruidColumn && druidColumnToUse.isDimension(excludeTime) } def isTimeDimension : Boolean = { hasDirectDruidColumn && druidColumnToUse.isInstanceOf[DruidTimeDimension] } def isMetric : Boolean = hasDirectDruidColumn && !isDimension(false) def metric = druidColumnToUse.asInstanceOf[DruidMetric] } object DruidRelationColumn { def apply(druidDS: DruidDataSource, timeDimensionCol : String, colInfo: DruidRelationColumnInfo ) : Option[DruidRelationColumn] = { (colInfo.druidColumn, colInfo.spatialIndex, colInfo.hllMetric, colInfo.sketchMetric) match { case (Some(dC), None, None, None) if (dC == timeDimensionCol) => { val dColumn = druidDS.timeDimension.get Some( new DruidRelationColumn(colInfo.column, Some(dColumn), None, None, None, colInfo.cardinalityEstimate ) ) } case (Some(dC), None, None, None) if druidDS.columns.contains(dC) => { val dColumn = druidDS.columns(dC) Some( new DruidRelationColumn(colInfo.column, Some(dColumn), None, None, None, colInfo.cardinalityEstimate ) ) } case (odC, Some(sI), None, None) if druidDS.columns.contains(sI.druidColumn) && druidDS.columns(sI.druidColumn).isDimension() => { val drC = if (odC.isDefined) { apply(druidDS, timeDimensionCol, colInfo.copy(spatialIndex = None) ) } else None if ( odC.isDefined && !drC.isDefined) { return None } Some( new DruidRelationColumn(colInfo.column, drC.flatMap(_.druidColumn), Some( SpatialDruidDimension(druidDS.columns(sI.druidColumn).asInstanceOf[DruidDimension], sI.spatialPosition, sI.minValue, sI.maxValue) ), None, None, colInfo.cardinalityEstimate ) ) } case (odC, None, hllMetric, sketchMetric) if hllMetric.isDefined || sketchMetric.isDefined => { val drC = if (odC.isDefined) { apply(druidDS, timeDimensionCol, colInfo.copy(hllMetric = None, sketchMetric = None) ) } else None if ( odC.isDefined && !drC.isDefined) { return None } var hllM : Option[DruidMetric] = None var sketchM : Option[DruidMetric] = None if ( hllMetric.isDefined) { if (!druidDS.columns.contains(hllMetric.get) || druidDS.columns(hllMetric.get).isDimension() ) { return None } hllM = Some(druidDS.columns(hllMetric.get).asInstanceOf[DruidMetric]) } if ( sketchM.isDefined) { if (!druidDS.columns.contains(sketchMetric.get) || druidDS.columns(sketchMetric.get).isDimension() ) { return None } sketchM = Some(druidDS.columns(sketchMetric.get).asInstanceOf[DruidMetric]) } if (hllM.isDefined || sketchM.isDefined) { Some(new DruidRelationColumn(colInfo.column, drC.flatMap(_.druidColumn), None, hllM, sketchM, colInfo.cardinalityEstimate )) } else { None } } case _ => None } } def apply(dC : DruidColumn) : DruidRelationColumn = { new DruidRelationColumn(dC.name, Some(dC), None, None, None, None ) } }
SparklineData/spark-druid-olap
src/main/scala/org/sparklinedata/druid/metadata/DruidRelationColumn.scala
Scala
apache-2.0
7,813
package features import org.apache.spark.mllib.feature.{IDF, HashingTF} import org.apache.spark.mllib.linalg.Vector import org.apache.spark.rdd.RDD import twitter.Tweet import features.Transformers.default._ case class TfIdf(corpus: RDD[Tweet]) extends Serializable { import Features._ val tf = new HashingTF(coefficients) val idf = new IDF().fit(tf.transform(corpus.map(_.tokens))) def tf(text: Seq[String]): Vector = tf.transform(text) def tfIdf(text: Seq[String]): Vector = idf.transform(tf.transform(text)) }
openforce/spark-mllib-scala-play
app/features/TfIdf.scala
Scala
apache-2.0
530
package org.scalatra import scala.collection.Map trait HttpMessage { /** * A map of headers. Multiple header values are separated by a ',' * character. The keys of this map are case-insensitive. */ def headers: Map[String, String] /** * The content of the Content-Type header, or None if absent. */ def contentType: Option[String] /** * Returns the name of the character encoding of the body, or None if no * character encoding is specified. */ def characterEncoding: Option[String] }
louk/scalatra
core/src/main/scala/org/scalatra/HttpMessage.scala
Scala
bsd-2-clause
527
package org.scalafmt.util import scala.meta.parsers.ParseException import org.scalafmt.internal.State sealed abstract class ExperimentResult(scalaFile: ScalaFile) { def key: String override def toString: String = s"""${this.getClass.getSimpleName}($scalaFile)""".stripMargin } object ExperimentResult { case class Success(scalaFile: ScalaFile, nanos: Long) extends ExperimentResult(scalaFile) { override def key = "Success" } case class Timeout(scalaFile: ScalaFile) extends ExperimentResult(scalaFile) { override def key = "Formatter timed out" } case class Skipped(scalaFile: ScalaFile) extends ExperimentResult(scalaFile) { override def key = "Ignored, scalac won't parse" } case class SearchStateExploded(scalaFile: ScalaFile, state: State) extends ExperimentResult(scalaFile) { override def key = s"Search state exploded" } case class UnknownFailure(scalaFile: ScalaFile, e: Throwable) extends ExperimentResult(scalaFile) { override def key: String = e.getClass.getName override def toString: String = s"$scalaFile $e" } case class ParseErr(scalaFile: ScalaFile, e: ParseException) extends ExperimentResult(scalaFile) { override def key: String = e.getClass.getName + ": " + e.getMessage.replaceAll(" at .*", "") def lineNumber = e.pos.start.line def content = s"cols:${e.pos.start.column}-${e.pos.end.column}" def urlWithLineHighlighted: String = s"${scalaFile.githubUrl}#L${e.pos.start.line + 1} $cols" def cols = s"cols:${e.pos.start.column}-${e.pos.end.column}" } }
Daxten/scalafmt
core/src/test/scala/org/scalafmt/util/ExperimentResult.scala
Scala
apache-2.0
1,621
package com.twitter.algebird import com.twitter.algebird.BaseProperties._ import com.twitter.algebird.scalacheck.arbitrary._ import com.twitter.algebird.scalacheck.NonEmptyVector import org.scalacheck.Arbitrary import org.scalacheck.Prop.forAll import org.scalacheck.Prop class MaxLaws extends CheckProperties { def maxTest[T: Arbitrary: Ordering]: Prop = forAll { (l: Max[T], r: Max[T]) => val realMax = Max(Ordering[T].max(l.get, r.get)) l + r == realMax && (l.max(r)) == realMax } def maxSemiGroupTest[T: Arbitrary: Ordering]: Prop = forAll { v: NonEmptyVector[T] => val maxItems = v.items.map(Max(_)) v.items.max == Max.semigroup[T].combineAllOption(maxItems).get.get } // Test equiv import. val equiv: Equiv[Max[Int]] = implicitly[Equiv[Max[Int]]] // Testing that these ones can be found val sgInt: Semigroup[Max[Int]] = implicitly[Semigroup[Max[Int]]] val sgString: Semigroup[Max[String]] = implicitly[Semigroup[Max[String]]] val monoidString: Monoid[Max[String]] = implicitly[Monoid[Max[String]]] property("Max.{ +, max } works on ints")(maxTest[Int]) property("Max.aggregator returns the maximum item") { forAll { v: NonEmptyVector[Int] => v.items.max == Max.aggregator[Int].apply(v.items) } } property("Max.semigroup[Int] returns the maximum item") { maxSemiGroupTest[Int] } property("Max.semigroup[Char] returns the maximum item") { maxSemiGroupTest[Char] } property("Max[Long] is a commutative monoid") { commutativeMonoidLaws[Max[Long]] } property("Max[Double] is a commutative monoid") { commutativeMonoidLaws[Max[Double]] } property("Max[String] is a commutative monoid") { commutativeMonoidLaws[Max[String]] } property("Max[List[Int]] is a commutative monoid") { commutativeMonoidLaws[Max[List[Int]]] } property("Max[Vector[Int]] is a commutative monoid") { commutativeMonoidLaws[Max[Vector[Int]]] } property("Max[Stream[Int]] is a commutative monoid") { commutativeMonoidLaws[Max[Stream[Int]]] } }
twitter/algebird
algebird-test/src/test/scala/com/twitter/algebird/MaxLaws.scala
Scala
apache-2.0
2,059
/* * Copyright 2016 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.computations import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtOptionalInteger} import uk.gov.hmrc.ct.computations.calculations.MachineryAndPlantCalculator import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever case class CP90(value: Option[Int]) extends CtBoxIdentifier(name = "Balance Allowance") with CtOptionalInteger object CP90 extends Calculated[CP90, ComputationsBoxRetriever] with MachineryAndPlantCalculator { override def calculate(retriever: ComputationsBoxRetriever): CP90 = computeBalanceAllowance( retriever.retrieveCPQ8(), retriever.retrieveCP78(), retriever.retrieveCP84(), retriever.retrieveCP666(), retriever.retrieveCP673(), retriever.retrieveCP674(), retriever.retrieveCPAux1(), retriever.retrieveCPAux2(), retriever.retrieveCPAux3() ) }
ahudspith-equalexperts/ct-calculations
src/main/scala/uk/gov/hmrc/ct/computations/CP90.scala
Scala
apache-2.0
1,478
package dk.gp.cogp.svi import breeze.linalg.DenseMatrix import dk.gp.cov.CovFunc import breeze.linalg.DenseVector import dk.gp.cov.utils.covDiag import dk.gp.cogp.lb.LowerBound import dk.gp.cogp.model.CogpModel import dk.gp.cogp.model.Task import dk.bayes.math.gaussian.MultivariateGaussian object stochasticUpdateCogpModel { def apply(lb: LowerBound, tasks: Array[Task], trainCovParams: Boolean = true): LowerBound = { //@TODO when learning just the covParameters, at some iteration, loglik accuracy suddenly goes down, numerical stability issues? //@TODO Given just gU and hypG are learned, learning first hypG then gU doesn't not converge (loglik is decreasing), why is that? val newU = (0 until lb.model.g.size).map { j => stochasticUpdateU(j, lb) }.toArray lb.model = withNewGu(newU, lb.model) val (newW, newWDelta) = stochasticUpdateW(lb) val (newBeta, newBetaDelta) = stochasticUpdateBeta(lb) if (trainCovParams) { val newHypCovG: Array[(DenseVector[Double], DenseVector[Double])] = (0 until lb.model.g.size).map { j => stochasticUpdateHypCovG(j, lb) }.toArray lb.model = withNewCovParamsG(newHypCovG, lb.model).copy(w = newW, wDelta = newWDelta, beta = newBeta, betaDelta = newBetaDelta) lb.clearCache() } else lb.model = lb.model.copy(w = newW, wDelta = newWDelta, beta = newBeta, betaDelta = newBetaDelta) val newV = (0 until lb.model.h.size).map { i => stochasticUpdateV(i, lb) }.toArray lb.model = withNewHu(newV, lb.model) if (trainCovParams) { val newHypCovH: Array[(DenseVector[Double], DenseVector[Double])] = (0 until lb.model.h.size).map { i => stochasticUpdateHypCovH(i, lb) }.toArray lb.model = withNewCovParamsH(newHypCovH, lb.model) lb.clearCache() } lb } private def withNewGu(newGu: Array[MultivariateGaussian], model: CogpModel): CogpModel = { val newG = (0 until model.g.size).map { j => model.g(j).copy(u = newGu(j)) }.toArray val newModel = model.copy(g = newG) newModel } private def withNewCovParamsG(newHypCovG: Array[(DenseVector[Double], DenseVector[Double])], model: CogpModel): CogpModel = { val newG = (0 until model.g.size).map { j => model.g(j).copy(covFuncParams = newHypCovG(j)._1, covFuncParamsDelta = newHypCovG(j)._2) }.toArray val newModel = model.copy(g = newG) newModel } private def withNewHu(newHu: Array[MultivariateGaussian], model: CogpModel): CogpModel = { val newH = (0 until model.h.size).map { i => model.h(i).copy(u = newHu(i)) }.toArray val newModel = model.copy(h = newH) newModel } private def withNewCovParamsH(newHypCovH: Array[(DenseVector[Double], DenseVector[Double])], model: CogpModel): CogpModel = { val newH = (0 until model.h.size).map { i => model.h(i).copy(covFuncParams = newHypCovH(i)._1, covFuncParamsDelta = newHypCovH(i)._2) }.toArray val newModel = model.copy(h = newH) newModel } }
danielkorzekwa/bayes-scala-gp
src/main/scala/dk/gp/cogp/svi/stochasticUpdateCogpModel.scala
Scala
bsd-2-clause
2,973
package com.criteo.dev.cluster import java.io.{File, FileNotFoundException} import java.net.URL import com.criteo.dev.cluster.config.{ConfigLoader, GlobalConfig} /** * Main entry point to various actions on the AWS-hosted development clusters for the current user. * * See application.properties for example of how to configure source, target, and sample tables. */ object DevClusterLauncher { def main(args: Array[String]) { if (args.length == 0) { printHelp System.exit(1) } if (System.getenv("USER") == null) { println("Required variable USER is not set.") System.exit(1) } val commandMap = CommandRegistry.getCommandMap val commandString = args(0).trim() val command = commandMap.get(commandString) if (command isEmpty) { println(s"Invalid command: [$commandString]. Following commands are valid.") printHelp System.exit(1) } else { try { val argList = args.toList.drop(1) val realArgs = argList.filterNot(_.startsWith("--")) val conf = ConfigLoader( getOption(args, "source").map(getFileURL(_)).getOrElse(getFileURL("source.conf")), getOption(args, "target").map(getFileURL(_)).getOrElse(getFileURL("target.conf")), getOption(args, "checkpoint").map(getFileURL(_)) ).value command.get.apply(realArgs, conf) } catch { case e: Exception => { e.printStackTrace() System.exit(1) } } } System.exit(0) } def getFileURL(path: String): URL = { val file = new File(path) if (file.exists) file.toURI.toURL else throw new FileNotFoundException(s"$path does not exist") } def getOption(args: Array[String], argName: String): Option[String] = args .find(_.startsWith(s"--$argName")) .flatMap(_.split("=").drop(1).headOption) def printHelp(): Unit = { println("This tool provides utilities for creating and managing AWS dev instances, " + "and utilities such as copying data from gateway, and configuring gateway on local " + "machine to the cluster. Use the following commands.\n") CommandRegistry.getCommands.foreach( cc => { println(s"\033[1m${cc.name} commands\033[0m") println() cc.actions.filter(_.isHidden == false).foreach(c => { println(s"* ${c.command}") c.printHelp println() }) }) } } object HelpAction extends CliAction[Unit] { override def command: String = "help" override def usageArgs: List[Any] = List() override def help: String = "Gets help" override def applyInternal(args: List[String], config: GlobalConfig): Unit = { DevClusterLauncher.printHelp } }
criteo/berilia
src/main/scala/com/criteo/dev/cluster/DevClusterLauncher.scala
Scala
apache-2.0
2,757
/* * Copyright 2011-2022 GatlingCorp (https://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.core.check.css import com.typesafe.scalalogging.StrictLogging import jodd.lagarto.{ LagartoParser, LagartoParserConfig } import jodd.lagarto.dom.{ LagartoDOMBuilder, LagartoDomBuilderConfig } private[gatling] object Lagarto extends StrictLogging { private val ParserConfig = new LagartoParserConfig() .setEnableConditionalComments(false) .setEnableRawTextModes(false) private val DomBuilderConfig = { val config = new LagartoDomBuilderConfig() config.setParserConfig(ParserConfig) if (logger.underlying.isDebugEnabled) { config.setErrorLogConsumer((logger, message) => logger.debug(message)) } else { config.setErrorLogEnabled(false) } config } def newLagartoDomBuilder: LagartoDOMBuilder = new LagartoDOMBuilder(DomBuilderConfig) def newLagartoParser(chars: Array[Char]): LagartoParser = new LagartoParser(ParserConfig, chars) }
gatling/gatling
gatling-core/src/main/scala/io/gatling/core/check/css/Lagarto.scala
Scala
apache-2.0
1,544
package nl.soqua.lcpi.interpreter.show import org.scalatest.{Matchers, WordSpec} class ShowSpec extends WordSpec with Matchers { import Show._ "Show" should { "render an empty string" in { val str: String = Show.empty str shouldBe "" } "composing values should be ok" in { val str: String = string("abc") compose string("def") str shouldBe "abcdef" } "composing with empty is fine as well" in { val str: String = Show.empty compose string("abc") compose Show.empty compose string("def") compose Show.empty str shouldBe "abcdef" } "wrap a composition with parentheses" in { val f = string("a") compose string("z") val g = parenthesize apply f val parenized: String = string("_") compose g parenized shouldBe "_(az)" } "implicitly convert a string to a show" in { val f: String = string("a") compose "b" f shouldBe "ab" } } }
kevinvandervlist/lcpi
interpreter/src/test/scala/nl/soqua/lcpi/interpreter/show/ShowSpec.scala
Scala
mit
945
package justin.db import akka.remote.testkit.{MultiNodeConfig, MultiNodeSpec} import com.typesafe.config.ConfigFactory final class ConvergeJustinDBClusterConfig extends MultiNodeConfig { val first = role("first") val second = role("second") val third = role("third") private[this] val allRoles = List(first, second, third) private[this] val clusterName = "ConvergeJustinDBClusterSpec" private[this] def commonNodeConfig(id: Int) = ConfigFactory.parseString( s""" |justin.system = $clusterName |justin.kubernetes-hostname = s"justindb-$id" |justin.http.port = ${9000 + id} |akka.cluster.role.storagenode.min-nr-of-members = ${allRoles.size} |akka.cluster.http.management.port = ${19999 + id} |akka.cluster.seed-nodes.0 = "akka.tcp://$clusterName@localhost:25551" |akka.remote.netty.tcp.port = ${25551 + id} |akka.remote.netty.tcp.hostname = "localhost" """.stripMargin ) commonConfig(MultiNodeClusterSpec.commonBaseConfig.withFallback(JustinDBConfig.init.config)) allRoles.zipWithIndex.foreach { case (roleName, id) => nodeConfig(roleName)(commonNodeConfig(id)) } } final class ConvergeJustinDBClusterSpecMultiJvmNode1 extends ConvergeJustinDBClusterSpec final class ConvergeJustinDBClusterSpecMultiJvmNode2 extends ConvergeJustinDBClusterSpec final class ConvergeJustinDBClusterSpecMultiJvmNode3 extends ConvergeJustinDBClusterSpec abstract class ConvergeJustinDBClusterSpec(config: ConvergeJustinDBClusterConfig) extends MultiNodeSpec(config) with MultiNodeClusterSpec { def this() = this(new ConvergeJustinDBClusterConfig()) "A cluster" must { "be able to form" in { val config = new JustinDBConfig(system.settings.config) val justinDB = JustinDB.init(config)(system) enterBarrier("justindb-cluster-up") } } }
speedcom/JustinDB
src/multi-jvm/scala/justin/db/ConvergeJustinDBClusterSpec.scala
Scala
apache-2.0
2,010
package io.tabmo.aerospike import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import com.aerospike.client.query.IndexType import com.aerospike.client.{Value, Bin => AEBin} import io.tabmo.aerospike.data.{AerospikeKey, AerospikeKeyConverter, AerospikeRecord, Bin} import io.tabmo.aerospike.converter.key._ class QueryUsage extends CustomSpec with AerospikeClientTest { val ns = "test" val set = "unittest2" def clean(key: AerospikeKey[_], keys: AerospikeKey[_]*) = (key +: keys).foreach(k => ready(client.delete(k))) def clean[K](data: Map[AerospikeKey[K], Seq[AEBin]]) = data.keys.foreach(k => ready(client.delete(k))) def insert[K](data: Map[AerospikeKey[K], Seq[AEBin]])(implicit keyConv: AerospikeKeyConverter[K]) = data.map { case (key, bins) => ready(client.put(key, bins)) } def init() = { val data = Map( AerospikeKey(ns, set, 1) -> Seq(Bin("id", 1000), Bin("name", "julien"), Bin("age", 20)), AerospikeKey(ns, set, 2) -> Seq(Bin("id", 1000), Bin("name", "thomas"), Bin("age", 18)), AerospikeKey(ns, set, 3) -> Seq(Bin("id", 2000), Bin("name", "thomas"), Bin("age", 22)), AerospikeKey(ns, set, 4) -> Seq(Bin("id", 2001), Bin("name", "pierre"), Bin("age", 23)), AerospikeKey(ns, set, 5) -> Seq(Bin("id", 2010), Bin("name", "henri"), Bin("age", 21)) ) insert(data) val index1 = result(client.createIndex(ns, set, "id", IndexType.NUMERIC)) val index2 = result(client.createIndex(ns, set, "name", IndexType.STRING)) val index3 = result(client.createIndex(ns, set, "age", IndexType.NUMERIC)) Thread.sleep(4000) (data, Seq(index1, index2, index3)) } def reset[K](data: Map[AerospikeKey[K], Seq[AEBin]], indices: Seq[String]) = { indices.foreach(i => ready(client.dropIndex(ns, set, i))) clean(data) clean(data) } "EQUALs query" should { "list records by querying a LONG field" in { val (data, indices) = init() val result: Future[Map[AerospikeKey[Long], AerospikeRecord]] = client.queryEqual[Long, Long](ns, set, Seq("id", "name"), "id", 1000) whenReady(result) { r => assert { r.size === 2 } assert { r.head._2.getLong("id") === 1000 } assert { r.values.forall { r => val name = r.getString("name") name == "julien" || name == "thomas" } } } reset(data, indices) } "list records by querying a STRING field" in { val (data, indices) = init() val result = client.queryEqual[Long, String](ns, set, Seq("id", "name"), "name", "thomas") whenReady(result) { r => assert { r.size === 2 } assert { r.head._2.getString("name") === "thomas" } assert { r.values.forall { r => val id = r.getLong("id") id == 1000 || id == 2000 } } } reset(data, indices) } "allow to select all bins in a query" in { val (data, indices) = init() val result = client.queryEqual[Long, Long](ns, set, Seq.empty, "id", 1000) whenReady(result) { r => assert { r.head._2.bins.keys.size === 3 } } reset(data, indices) } "allow to filter bins in a query" in { val (data, indices) = init() val result = client.queryEqual[Long, Long](ns, set, Seq("id"), "id", 1000) whenReady(result) { r => assert { r.head._2.bins.keys.size === 1 } } reset(data, indices) } } "RANGE queries" should { "list records by querying on LONG field" in { val (data, indices) = init() val result = client.queryRange[Long](ns, set, Seq("id", "name"), "id", 2000, 2100) whenReady(result) { r => val records = r.values.toList assert { records.size === 3 } assert { records.map(_.getLong("id")).sum === 6011 } assert { records.exists(_.getString("name") == "henri") } } reset(data, indices) } } "queryEqualAggregate operation" should { "return the map result as AerospikeRecord" in { val (data, indices) = init() ready(client.registerUDF(this.getClass.getClassLoader, "persons.lua", "persons.lua")) val result = client.queryEqualAggregate(ns, set, "name", "thomas", this.getClass.getClassLoader, "persons.lua", "persons", "filterByAge", Seq(Value.get(19))) whenReady(result) { r => assert { r.size === 1 } assert { r.head.getLong("age") === 22 } assert { r.head.getOptString("age") === None } } ready(client.removeUDF("persons.lua")) reset(data, indices) } } "parallel queryEqualAggregate operation" should { "test" in { val (data, indices) = init() ready(client.registerUDF(this.getClass.getClassLoader, "persons.lua", "persons.lua")) val results = 0.to(10).map { i => client.queryEqualAggregate(ns, set, "name", "thomas", this.getClass.getClassLoader, "persons.lua", "persons", "filterByAge", Seq(Value.get(18 + i))) } whenReady(Future.sequence(results)) { r => r.size === 10 r.map(_.map(_.getLong("age")).sum).sum === data.map(_._2.find(_.name == "age").get.value.toLong).sum } ready(client.removeUDF("persons.lua")) reset(data, indices) } } "queryRangeAggregate operation" should { "return the map result as AerospikeRecord" in { val (data, indices) = init() ready(client.registerUDF(this.getClass.getClassLoader, "persons.lua", "persons.lua")) val result = client.queryRangeAggregate(ns, set, "age", 17, 19, this.getClass.getClassLoader, "persons.lua", "persons", "filterByAge", Seq(Value.get(18))) whenReady(result) { r => assert { r.size === 1 } assert { r.head.getLong("age") === 18 } assert { r.head.getOptString("age") === None } } ready(client.removeUDF("persons.lua")) reset(data, indices) } } }
tabmo/ReactiveAerospike
src/test/scala/QueryUsage.scala
Scala
apache-2.0
6,315
package com.twitter.finatra.http.tests.integration.messagebody.test import com.twitter.finagle.http.MediaType import com.twitter.finagle.http.Status import com.twitter.finatra.http.EmbeddedHttpServer import com.twitter.finatra.http.tests.integration.messagebody.main.GreetingServer import com.twitter.inject.server.FeatureTest class GreetingControllerIntegrationTest extends FeatureTest { override val server = new EmbeddedHttpServer(new GreetingServer) val requestPath = "/greet?name=Bob" test("get English greeting") { server.httpGet(path = requestPath, andExpect = Status.Ok, withBody = "Hello Bob") } test("get Spanish greeting") { server.httpGet( path = requestPath, headers = Map("Accept-Language" -> "es"), andExpect = Status.Ok, withBody = "Hola Bob" ) } test("get English json greeting") { server.httpGet( path = requestPath, accept = MediaType.JsonUtf8, andExpect = Status.Ok, withJsonBody = """{ "greeting" : "Hello Bob" }""" ) } test("get Spanish json greeting") { server.httpGet( path = requestPath, accept = MediaType.JsonUtf8, headers = Map("Accept-Language" -> "es"), andExpect = Status.Ok, withJsonBody = """{ "greeting" : "Hola Bob" }""" ) } }
twitter/finatra
http-server/src/test/scala/com/twitter/finatra/http/tests/integration/messagebody/test/GreetingControllerIntegrationTest.scala
Scala
apache-2.0
1,296
/* * Copyright 2015 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.ct600.v3 import uk.gov.hmrc.ct.box._ case class B80A(value: Option[Boolean]) extends CtBoxIdentifier("Is a repayment due for this period") with CtOptionalBoolean with Input { }
keithhall/ct-calculations
src/main/scala/uk/gov/hmrc/ct/ct600/v3/B80A.scala
Scala
apache-2.0
807
package scalaz.stream.mongodb.index import com.mongodb.{BasicDBObjectBuilder, DBObject} import scalaz.stream.mongodb.collectionSyntax._ import collection._ import scala.collection.JavaConverters._ /** * Configuration of the index. * Please see [[http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex]] * for explanation of the fields * */ case class CollectionIndex(keys: Map[String, Order.Value], background: Option[Boolean] = None, unique: Option[Boolean] = None, name: Option[String] = None, dropDups: Option[Boolean] = None, sparse: Option[Boolean] = None, expireAfterSeconds: Option[Int] = None, v: Option[Int] = None, weights: Option[Map[String, Int]] = None, defaultLanguage: Option[String] = None, languageOverride: Option[String] = None) { def background(b: Boolean): CollectionIndex = copy(background = Some(b)) def unique(b: Boolean): CollectionIndex = copy(unique = Some(b)) def name(s: String): CollectionIndex = copy(name = Some(s)) def dropDups(b: Boolean): CollectionIndex = copy(dropDups = Some(b)) def sparse(b: Boolean): CollectionIndex = copy(sparse = Some(b)) def expireAfterSeconds(i: Int): CollectionIndex = copy(expireAfterSeconds = Some(i)) def version(i: Int): CollectionIndex = copy(v = Some(i)) def weights(h: (String, Int), t: (String, Int)*): CollectionIndex = copy(weights = Some((h +: t).toMap)) def defaultLanguage(s: String): CollectionIndex = copy(defaultLanguage = Some(s)) def languageOverride(s: String): CollectionIndex = copy(languageOverride = Some(s)) lazy val optionsAsBson: DBObject = { BasicDBObjectBuilder.start(( background.map(b => "background" -> b).toSeq ++ unique.map(b => "unique" -> b) ++ name.map(s => "name" -> s) ++ dropDups.map(b => "dropDups" -> b) ++ sparse.map(b => "sparse" -> b) ++ expireAfterSeconds.map(i => "expireAfterSeconds" -> i) ++ v.map(i => "v" -> i) ++ weights.map(m => "weights" -> BasicDBObjectBuilder.start(m.asJava).get) ++ defaultLanguage.map(s => "defaultLanguage" -> s) ++ languageOverride.map(s => "languageOverride" -> s) ).toMap.asJava).get } lazy val keysAsBson: DBObject = { BasicDBObjectBuilder.start( keys.collect { case (k, Order.Ascending) => (k, 1) case (k, Order.Descending) => (k, -1) }.asJava).get } }
Spinoco/scalaz-stream-mongodb
core/src/main/scala/scalaz/stream/mongodb/index/CollectionIndex.scala
Scala
mit
2,984
package org.openurp.edu.eams.teach.lesson.task.dao.hibernate import org.beangle.data.jpa.hibernate.HibernateEntityDao import org.hibernate.SQLQuery import org.hibernate.`type`.StandardBasicTypes import org.openurp.edu.eams.teach.lesson.task.dao.LessonStatDao class LessonStatDaoHibernate extends HibernateEntityDao with LessonStatDao { def statTeacherTitle(semesters: List[_]): List[_] = { val queryString = "select XNXQID,jszcid,count(*) as num from (" + " select distinct teachtask0_.XNXQID as XNXQID, teacher2_.id as jzgid,teacher2_.JSZCID as JSZCID" + " from JXRW_T teachtask0_ inner join JXRW_LS_T teachers1_ on teachtask0_.id=teachers1_.JXRWID" + " inner join JCXX_JZG_T teacher2_ on teachers1_.LSID=teacher2_.id where" + " (teachtask0_.XNXQID in (:semesterIds))" + " )group by XNXQID,jszcid" val query = currentSession.createSQLQuery(queryString) query.setParameterList("semesterIds", EntityUtils.extractIds(semesters)) query.addScalar("XNXQID", StandardBasicTypes.LONG) query.addScalar("jszcid", StandardBasicTypes.LONG) query.addScalar("num", StandardBasicTypes.INTEGER) query.list() } }
openurp/edu-eams-webapp
schedule/src/main/scala/org/openurp/edu/eams/teach/lesson/task/dao/hibernate/LessonStatDaoHibernate.scala
Scala
gpl-3.0
1,185
package nl.codecentric.assumption.dsl import nl.codecentric.assumption.dsl.core.definition.BaseDefinition import org.json4s.native.JsonMethods._ import scala.concurrent.duration._ import scalaj.http.{Http, HttpResponse} /** * Created by hylke on 20/07/15. */ class RegistrationDefinition extends BaseDefinition { implicit lazy val formats = org.json4s.DefaultFormats "Get the current count of registrations" baseline (() => { val baselineCount = getRegistrationCount() println("Baseline count is", baselineCount) }) "The amount of registrations is increased with 50" assumes (() => { val assumeCount = getRegistrationCount() println("Assume count is", assumeCount) }) "In one week" time (1 minutes) "Send a cake to the development team" success (() => { println("Sending cake") }) "Send a message to the product owner" failure (() => { println("Having a firm talk with the product owner") }) def getRegistrationCount(): Integer = { val response: HttpResponse[String] = Http("http://33.33.33.60:8888/count") .timeout(connTimeoutMs = 1000, readTimeoutMs = 5000) .asString val json = parse(response.body); (json \\ "count").extract[Integer] } }
craftsmenlabs/gareth-poc
dsl/examples/src/main/scala/nl/codecentric/assumption/dsl/RegistrationDefinition.scala
Scala
gpl-2.0
1,226
package net.ceedubs.ficus package readers import java.time.temporal.ChronoUnit import com.typesafe.config.ConfigFactory import Ficus.{chronoUnitReader, toFicusConfig} class ChronoUnitReaderSpec extends Spec { def is = s2""" The ChronoUnitReader should read a ChronoUnit $readChronoUnit read a lower case ChronoUnit $readChronoUnitLowerCase """ def readChronoUnit = { val cfg = ConfigFactory.parseString(s""" | foo { | chrono-unit = "MILLIS" | } """.stripMargin) val chronoUnit = cfg.as[ChronoUnit]("foo.chrono-unit") val expected = ChronoUnit.MILLIS chronoUnit should_== expected } def readChronoUnitLowerCase = { val cfg = ConfigFactory.parseString(s""" | foo { | chrono-unit = "millis" | } """.stripMargin) val chronoUnit = cfg.as[ChronoUnit]("foo.chrono-unit") val expected = ChronoUnit.MILLIS chronoUnit should_== expected } }
mdedetrich/ficus
src/test/scala/net/ceedubs/ficus/readers/ChronoUnitReaderSpec.scala
Scala
mit
1,217
(A => A) => F[A]
hmemcpy/milewski-ctfp-pdf
src/content/1.10/code/scala/snippet27.scala
Scala
gpl-3.0
16
package 练习30 case class Item(name: String) { override def toString: String = name } trait Result case class ResultP(tail: Result, head: Item) extends Result { println("result: " + head) override def toString: String = s"($tail, $head)" } case object ResultO extends Result { println("result: " + this) override def toString: String = s"zero" } trait NumL { def methodR(num: NumR): Result } case class NumLP(tail: NumL, head: Item) extends NumL { override def methodR(num: NumR): Result = num.methodL(tail, head) } case object NumLO extends NumL { override def methodR(num: NumR): Result = ResultO } trait NumR { def tail: NumR def methodL(num: NumL, item: Item): Result } case class NumRP(override val tail: NumR, head: Item) extends NumR { override def methodL(num: NumL, item: Item): Result = { println("method: " + head) ResultP(tail.methodL(num, item), head) } } trait NumRO extends NumR { override def methodL(num: NumL, item: Item): Result = ResultP(num.methodR(tail), item) } object NumRO { val value: NumRO = new NumRO { override def tail: NumR = value } }
djx314/ubw
a28-练习/src/main/scala/练习30/加法.scala
Scala
bsd-3-clause
1,115
package chat.tox.antox.utils import java.lang.Iterable import android.content.ContentValues import chat.tox.antox.data.ClosedCursor import com.squareup.sqlbrite.BriteDatabase import rx.lang.scala.JavaConversions._ import rx.lang.scala.Observable //wrapper to make sqlbrite expose scala observables class BriteScalaDatabase(db: BriteDatabase) { def newTransaction(): BriteDatabase.Transaction = db.newTransaction() def close(): Unit = db.close() def createQuery(table: String, sql: String, args: String*): Observable[ClosedCursor] = { val observable = db.createQuery(table, sql, args: _*).asObservable() toScalaObservable(observable).map(query => ClosedCursor(query.run())) } def createQuery(tables: Iterable[String], sql: String, args: String*): Observable[ClosedCursor] = { val observable = db.createQuery(tables, sql, args: _*).asObservable() toScalaObservable(observable).map(query => ClosedCursor(query.run())) } def delete(table: String, whereClause: String, whereArgs: String*): Int = db.delete(table, whereClause, whereArgs: _*) def insert(table: String, values: ContentValues): Long = db.insert(table, values) def insert(table: String, values: ContentValues, conflictAlgorithm: Int): Long = db.insert(table, values, conflictAlgorithm) def query(sql: String, args: String*): ClosedCursor = { ClosedCursor(db.query(sql, args: _*)) } def setLoggingEnabled(enabled: Boolean): Unit = db.setLoggingEnabled(enabled) def update(table: String, values: ContentValues, conflictAlgorithm: Int, whereClause: String, whereArgs: String*): Int = db.update(table, values, conflictAlgorithm, whereClause, whereArgs: _*) def update(table: String, values: ContentValues, whereClause: String, whereArgs: String*): Int = db.update(table, values, whereClause, whereArgs: _*) }
wiiam/Antox
app/src/main/scala/chat/tox/antox/utils/BriteScalaDatabase.scala
Scala
gpl-3.0
1,843
package debop4s.core.collections import scala.reflect.ClassTag /** * [[BoundedStack]] 의 companion object */ object BoundedStack { def apply[A: ClassTag](maxSize: Option[Int] = None): BoundedStack[A] = new BoundedStack[A](maxSize.getOrElse(16)) } /** * 제한된 크기를 가지는 `stack` 입니다. 제한된 크기보다 더 많은 요소를 추가 시, 가장 아래에 있는 요소를 제거합니다. */ class BoundedStack[@miniboxed A: ClassTag](val maxSize: Int) extends Seq[A] { require(maxSize > 0, s"maxSize should greater than 0") private val array = new Array[A](maxSize) private var top: Int = 0 private var _count: Int = 0 def length: Int = _count override def size: Int = _count def clear(): Unit = { top = 0 _count = 0 } /** 특정 순서의 요소를 조회합니다 */ def apply(index: Int): A = { if (index >= _count) throw new IndexOutOfBoundsException(index.toString) else array((top + index) % maxSize) } def +=(elem: A): Unit = { top = if (top == 0) maxSize - 1 else top - 1 array(top) = elem if (_count < maxSize) _count += 1 } def insert(index: Int, elem: A): Unit = { if (index == 0) this += elem else if (index > _count) throw new IndexOutOfBoundsException(index.toString) else if (index == _count) { array((top + index) % maxSize) = elem _count += 1 } else { val swapped = this(index) this(index) = elem insert(index - 1, swapped) } } /** * 해당 인덱스의 요소를 `elem` 으로 변경한다. */ def update(index: Int, elem: A): Unit = { array((top + index) % maxSize) = elem } def ++=(iter: Iterable[A]): Unit = { val it = iter.iterator while (it.hasNext) { this += it.next() } } /** 최상위 요소를 꺼냅니다. */ def pop: A = { if (_count == 0) throw new NoSuchElementException else { val res = array(top) top = (top + 1) % maxSize _count -= 1 res } } /** 요소를 추가합니다. */ def push(elem: A): Unit = { this += elem } override def iterator = new Iterator[A] { var idx = 0 def hasNext = idx != _count def next() = { val res = apply(idx) idx += 1 res } } }
debop/debop4s
debop4s-core/src/main/scala/debop4s/core/collections/BoundedStack.scala
Scala
apache-2.0
2,273
package io.youi.server.dsl import io.youi.http.HttpConnection import scribe.Execution.global import scala.concurrent.Future case class ListConnectionFilter(filters: List[ConnectionFilter]) extends ConnectionFilter { override def filter(connection: HttpConnection): Future[FilterResponse] = firstPath(connection, filters) private def firstPath(connection: HttpConnection, filters: List[ConnectionFilter]): Future[FilterResponse] = if (filters.isEmpty) { Future.successful(FilterResponse.Stop(connection)) } else { val filter = filters.head filter.filter(connection).flatMap { case r: FilterResponse.Continue => Future.successful(r) case r: FilterResponse.Stop => firstPath(r.connection, filters.tail) } } }
outr/youi
server/src/main/scala/io/youi/server/dsl/ListConnectionFilter.scala
Scala
mit
769
package com.ubeeko.htalk.tests import org.scalatest._ import com.ubeeko.htalk.criteria._ import org.apache.hadoop.hbase.filter.FilterList import com.ubeeko.htalk.hbase.HTalkContext import com.ubeeko.htalk.hbase.TestHBaseManager class ScanSpec extends FlatSpec with Matchers { val userTable: Table = "user" case class User(val firstname: String, val name: Option[String]) implicit val htalkcontext = TestCommons.newContext() htalkcontext.createTable("user", Seq("p", "u")) "user" put("eleblouch", "p", "firstname", "Éric") put("eleblouch", "p", "name", "Le Blouc'h") put( "dcollard", "p", "firstname", "Damien") put("dcollard", "u", "uid", 1001) execute "Scan" should "be able to scan all rows" in { val select = "user" get rows count select.rows should be(2) select.cells should be(4) } it should "scan with families" in { val selectP = userTable get rows family("p") count selectP.rows should be(2) selectP.cells should be(3) val selectU = userTable get rows family("u") count selectU.rows should be(1) selectU.cells should be(1) } it should "scan with qualifiers" in { val selectF = userTable get rows family("p") qualifiers("firstname") count selectF.rows should be(2) selectF.cells should be(2) val selectA = userTable get rows family("p") qualifiers("firstname", "name") count selectA.rows should be(2) selectA.cells should be(3) val selectN = userTable get rows family("p") qualifiers("name") count selectN.rows should be(1) selectN.cells should be(1) } it should "scan using filters" in { val scanner = userTable get rows filter (new FilterList) } it should "be composable to entity" in { val composer = (rs: Result) => { User(rs.getValue("p", "firstname").as[String], rs.getValue("p", "name").asOpt[String]) } val select = (userTable get rows) ~ composer select should have size 2 select should contain(User("Éric", Some("Le Blouc'h"))) select should contain(User("Damien", None)) } // XXX Scan.getStartRow does not work with the fake implementation ignore should "scan all with a range start at first rowkey" in { val select = "user" get rows rangeStart("dcollard") count select.rows should be(2) select.cells should be(4) } // XXX Scan.getStartRow does not work with the fake implementation ignore should "exclude first row with a exclusive range start at first rowkey" in { val select = "user" get rows rangeStart("dcollard") excludeStart(true) count select.rows should be(1) select.cells should be(2) } }
eric-leblouch/htalk
src/test/scala/com/ubeeko/htalk/tests/ScanSpec.scala
Scala
apache-2.0
2,731
package fi.onesto.sbt.mobilizer import sbt.File object SbtCompat { val isArchive = sbt.classpath.ClasspathUtilities.isArchive(_: File) }
onesto/sbt-mobilizer
src/main/scala-sbt-0.13/fi/onesto/sbt/mobilizer/SbtCompat.scala
Scala
mit
142
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy private[spark] object ExecutorState extends Enumeration { val LAUNCHING, LOADING, RUNNING, KILLED, FAILED, LOST, EXITED = Value type ExecutorState = Value /** * def isFinished(state: ExecutorState): Boolean = Seq(KILLED, FAILED, LOST, EXITED).contains(state) * <br><br>完成状态包括:KILLED, FAILED, LOST, EXITED<br>注意:这里是完成,不是成功! * * @param state * @return */ def isFinished(state: ExecutorState): Boolean = Seq(KILLED, FAILED, LOST, EXITED).contains(state) }
Dax1n/spark-core
core/src/main/scala/org/apache/spark/deploy/ExecutorState.scala
Scala
apache-2.0
1,365
package Scala.localSim trait Client { }
JulienBe/CodinGame
src/Scala/localSim/Client.scala
Scala
gpl-2.0
43
package enumeratum import scala.collection.immutable._ import scala.language.experimental.macros /** All the cool kids have their own Enumeration implementation, most of which try to do so in the * name of implementing exhaustive pattern matching. * * This is yet another one. * * Example: * * {{{ * scala> import enumeratum._ * * scala> sealed trait DummyEnum extends EnumEntry * * scala> object DummyEnum extends Enum[DummyEnum] { * | val values = findValues * | case object Hello extends DummyEnum * | case object GoodBye extends DummyEnum * | case object Hi extends DummyEnum * | } * * scala> DummyEnum.withNameOption("Hello") * res0: Option[DummyEnum] = Some(Hello) * * scala> DummyEnum.withNameOption("Nope") * res1: Option[DummyEnum] = None * }}} * * @tparam A * The sealed trait */ trait Enum[A <: EnumEntry] { /** Map of [[A]] object names to [[A]] s */ lazy val namesToValuesMap: Map[String, A] = values.map(v => v.entryName -> v).toMap ++ extraNamesToValuesMap /** Additional list of names which can be mapped to values, for example to allow mapping of legacy * values. * @return * a Map of names to Values */ def extraNamesToValuesMap: Map[String, A] = Map.empty[String, A] /** Map of [[A]] object names in lower case to [[A]] s for case-insensitive comparison */ lazy final val lowerCaseNamesToValuesMap: Map[String, A] = namesToValuesMap.map { case (k, v) => k.toLowerCase -> v } /** Map of [[A]] object names in upper case to [[A]] s for case-insensitive comparison */ lazy final val upperCaseNameValuesToMap: Map[String, A] = namesToValuesMap.map { case (k, v) => k.toUpperCase() -> v } /** Map of [[A]] to their index in the values sequence. * * A performance optimisation so that indexOf can be found in constant time. */ lazy final val valuesToIndex: Map[A, Int] = values.zipWithIndex.toMap /** The sequence of values for your [[Enum]]. You will typically want to implement this in your * extending class as a `val` so that `withName` and friends are as efficient as possible. * * Feel free to implement this however you'd like (including messing around with ordering, etc) * if that fits your needs better. */ def values: IndexedSeq[A] /** Tries to get an [[A]] by the supplied name. The name corresponds to the .name of the case * objects implementing [[A]] * * Like [[Enumeration]] 's `withName`, this method will throw if the name does not match any of * the values' .entryName values. */ @SuppressWarnings(Array("org.wartremover.warts.Throw")) def withName(name: String): A = withNameOption(name).getOrElse(throw new NoSuchElementException(buildNotFoundMessage(name))) /** Optionally returns an [[A]] for a given name. */ def withNameOption(name: String): Option[A] = namesToValuesMap.get(name) /** Returns an [[Right[A]] ] for a given name, or a [[Left[NoSuchMember]] ] if the name does not * match any of the values' .entryName values. */ def withNameEither(name: String): Either[NoSuchMember[A], A] = namesToValuesMap.get(name).toRight(NoSuchMember(name, values)) /** Tries to get an [[A]] by the supplied name. The name corresponds to the .name of the case * objects implementing [[A]], disregarding case * * Like [[Enumeration]] 's `withName`, this method will throw if the name does not match any of * the values' .entryName values. */ @SuppressWarnings(Array("org.wartremover.warts.Throw")) def withNameInsensitive(name: String): A = withNameInsensitiveOption(name).getOrElse( throw new NoSuchElementException(buildNotFoundMessage(name)) ) /** Tries to get an [[A]] by the supplied name. The name corresponds to the .name of the case * objects implementing [[A]] transformed to upper case * * Like [[Enumeration]] 's `withName`, this method will throw if the name does not match any of * the values' .entryName values. */ @SuppressWarnings(Array("org.wartremover.warts.Throw")) def withNameUppercaseOnly(name: String): A = withNameUppercaseOnlyOption(name).getOrElse( throw new NoSuchElementException(buildNotFoundMessage(name)) ) /** Tries to get an [[A]] by the supplied name. The name corresponds to the .name of the case * objects implementing [[A]] transformed to lower case * * Like [[Enumeration]] 's `withName`, this method will throw if the name does not match any of * the values' .entryName values. */ @SuppressWarnings(Array("org.wartremover.warts.Throw")) def withNameLowercaseOnly(name: String): A = withNameLowercaseOnlyOption(name).getOrElse( throw new NoSuchElementException(buildNotFoundMessage(name)) ) /** Optionally returns an [[A]] for a given name, disregarding case */ def withNameInsensitiveOption(name: String): Option[A] = lowerCaseNamesToValuesMap.get(name.toLowerCase) /** Optionally returns an [[A]] for a given name assuming the value is upper case */ def withNameUppercaseOnlyOption(name: String): Option[A] = upperCaseNameValuesToMap.get(name) /** Optionally returns an [[A]] for a given name assuming the value is lower case */ def withNameLowercaseOnlyOption(name: String): Option[A] = lowerCaseNamesToValuesMap.get(name) /** Returns an [[Right[A]] ] for a given name, or a [[Left[NoSuchMember]] ] if the name does not * match any of the values' .entryName values, disregarding case. */ def withNameInsensitiveEither(name: String): Either[NoSuchMember[A], A] = lowerCaseNamesToValuesMap.get(name.toLowerCase).toRight(NoSuchMember(name, values)) /** Returns an [[Right[A]] ] for a given name, or a [[Left[NoSuchMember]] ] if the name does not * match any of the values' .entryName values, disregarding case. */ def withNameUppercaseOnlyEither(name: String): Either[NoSuchMember[A], A] = upperCaseNameValuesToMap.get(name).toRight(NoSuchMember(name, values)) /** Returns an [[Right[A]] ] for a given name, or a [[Left[NoSuchMember]] ] if the name does not * match any of the values' .entryName values, disregarding case. */ def withNameLowercaseOnlyEither(name: String): Either[NoSuchMember[A], A] = lowerCaseNamesToValuesMap.get(name).toRight(NoSuchMember(name, values)) /** Returns the index number of the member passed in the values picked up by this enum * * @param member * the member you want to check the index of * @return * the index of the first element of values that is equal (as determined by ==) to member, or * -1, if none exists. */ def indexOf(member: A): Int = valuesToIndex.getOrElse(member, -1) /** Method that returns a Seq of [[A]] objects that the macro was able to find. * * You will want to use this in some way to implement your [[values]] method. In fact, if you * aren't using this method...why are you even bothering with this lib? */ protected def findValues: IndexedSeq[A] = macro EnumMacros.findValuesImpl[A] private def buildNotFoundMessage(notFoundName: String): String = { s"$notFoundName is not a member of Enum ($existingEntriesString)" } private lazy val existingEntriesString = values.map(_.entryName).mkString(", ") } object Enum { /** Finds the Enum companion object for a particular EnumEntry */ implicit def materializeEnum[A <: EnumEntry]: Enum[A] = macro EnumMacros.materializeEnumImpl[A] }
lloydmeta/enumeratum
enumeratum-core/src/main/scala/enumeratum/Enum.scala
Scala
mit
7,555
package controllers import model.json.LoginJson import model.{Computer, Role, User} import org.mockito.Matchers._ import org.mockito.Mockito._ import play.api.Environment import play.api.i18n.MessagesApi import services.state.ActionState import services.{UserService, state} import test.ControllerTest import scala.concurrent.{ExecutionContext, Future} /** * @author Camilo Sampedro <camilo.sampedro@udea.edu.co> */ trait LoginControllerSpec extends ControllerTest { // Mocked ComputerController dependencies lazy val messagesApi = mock[MessagesApi] implicit lazy val environment = mock[Environment] val userToBeUsed = User("user","password",Some("User Name"),Role.NormalUser) val userToBeUsed2 = User("user2","password2",Some("User Name Two"),Role.Administrator) val userList = List(userToBeUsed,userToBeUsed2) /** * Execution context is a particular exception to the mocked dependencies */ implicit lazy val executionContext: ExecutionContext = ExecutionContext.global /** * Mocked computer service methods for testing only the controller * @param actionState Action state to be returned when methods being executed * @return Mocked computer service */ def mockUserService(actionState: ActionState): UserService = { // Mock the computer service lazy val userService = mock[UserService] val user = if(actionState == state.ActionCompleted){ Some(userToBeUsed) } else { None } // This state will be used for methods that don't have other states that ActionCompleted and Failed val alternativeState = if(actionState!=state.ActionCompleted){ state.Failed } else { actionState } when(userService.checkAndGet(any[LoginJson])) thenReturn(Future.successful(user)) when(userService.checkAndGet(any[String],any[String])) thenReturn(Future.successful(user)) when(userService.add(any[User])) thenReturn(Future.successful(actionState)) when(userService.get(any[String])) thenReturn(Future.successful(user)) when(userService.listAll) thenReturn(Future.successful(userList)) userService } }
ProjectAton/AtonLab
test/controllers/LoginControllerSpec.scala
Scala
gpl-3.0
2,120
package scoverage import sbt.Keys._ import sbt._ import scoverage.ScoverageKeys._ import scoverage.report.{CoverageAggregator, CoberturaXmlWriter, ScoverageHtmlWriter, ScoverageXmlWriter} object ScoverageSbtPlugin extends AutoPlugin { val OrgScoverage = "org.scoverage" val ScalacRuntimeArtifact = "scalac-scoverage-runtime" val ScalacPluginArtifact = "scalac-scoverage-plugin" val ScoverageVersion = "1.2.0" val autoImport = ScoverageKeys import autoImport._ val aggregateFilter = ScopeFilter( inAggregates(ThisProject), inConfigurations(Compile) ) // must be outside of the 'coverageAggregate' task (see: https://github.com/sbt/sbt/issues/1095 or https://github.com/sbt/sbt/issues/780) override def requires = plugins.JvmPlugin override def trigger = allRequirements override lazy val projectSettings = Seq( coverageEnabled := false, commands += Command.command("coverage", "enable compiled code with instrumentation", "")(toggleCoverage(true)), commands += Command.command("coverageOff", "disable compiled code with instrumentation", "")(toggleCoverage(false)), coverageReport <<= coverageReport0, coverageAggregate <<= coverageAggregate0, libraryDependencies ++= Seq( OrgScoverage % (ScalacRuntimeArtifact + "_" + scalaBinaryVersion.value) % ScoverageVersion % "provided" intransitive(), OrgScoverage % (ScalacPluginArtifact + "_" + scalaBinaryVersion.value) % ScoverageVersion % "provided" intransitive() ), scalacOptions in(Compile, compile) ++= scoverageScalacOptions.value, aggregate in coverageAggregate := false, coverageExcludedPackages := "", coverageExcludedFiles := "", coverageMinimum := 0, // default is no minimum coverageFailOnMinimum := false, coverageHighlighting := true, coverageOutputXML := true, coverageOutputHTML := true, coverageOutputCobertura := true, coverageOutputDebug := false, coverageCleanSubprojectFiles := true, coverageOutputTeamCity := false ) /** * The "coverage" command enables or disables instrumentation for all projects * in the build. */ private def toggleCoverage(status:Boolean): State => State = { state => val extracted = Project.extract(state) val newSettings = extracted.structure.allProjectRefs map { proj => coverageEnabled in proj := status } extracted.append(newSettings, state) } private lazy val coverageReport0 = Def.task { val target = crossTarget.value val log = streams.value.log log.info(s"Waiting for measurement data to sync...") Thread.sleep(1000) // have noticed some delay in writing on windows, hacky but works loadCoverage(target, log) match { case Some(cov) => writeReports( target, (sourceDirectories in Compile).value, cov, coverageOutputCobertura.value, coverageOutputXML.value, coverageOutputHTML.value, coverageOutputDebug.value, coverageOutputTeamCity.value, log) checkCoverage(cov, log, coverageMinimum.value, coverageFailOnMinimum.value) case None => log.warn("No coverage data, skipping reports") } } private lazy val coverageAggregate0 = Def.task { val log = streams.value.log log.info(s"Aggregating coverage from subprojects...") val xmlReportFiles = crossTarget.all(aggregateFilter).value map (_ / "scoverage-report" / Constants.XMLReportFilename) filter (_.isFile()) CoverageAggregator.aggregate(xmlReportFiles, coverageCleanSubprojectFiles.value) match { case Some(cov) => writeReports( crossTarget.value, sourceDirectories.all(aggregateFilter).value.flatten, cov, coverageOutputCobertura.value, coverageOutputXML.value, coverageOutputHTML.value, coverageOutputDebug.value, coverageOutputTeamCity.value, log) val cfmt = cov.statementCoverageFormatted log.info(s"Aggregation complete. Coverage was [$cfmt]") checkCoverage(cov, log, coverageMinimum.value, coverageFailOnMinimum.value) case None => log.info("No subproject data to aggregate, skipping reports") } } private lazy val scoverageScalacOptions = Def.task { val scoverageDeps: Seq[File] = update.value matching configurationFilter("provided") scoverageDeps.find(_.getAbsolutePath.contains(ScalacPluginArtifact)) match { case None => throw new Exception(s"Fatal: $ScalacPluginArtifact not in libraryDependencies") case Some(pluginPath) => scalaArgs(coverageEnabled.value, pluginPath, crossTarget.value, coverageExcludedPackages.value, coverageExcludedFiles.value, coverageHighlighting.value) } } private def scalaArgs(coverageEnabled: Boolean, pluginPath: File, target: File, excludedPackages: String, excludedFiles: String, coverageHighlighting: Boolean) = { if (coverageEnabled) { Seq( Some(s"-Xplugin:${pluginPath.getAbsolutePath}"), Some(s"-P:scoverage:dataDir:${target.getAbsolutePath}/scoverage-data"), Option(excludedPackages.trim).filter(_.nonEmpty).map(v => s"-P:scoverage:excludedPackages:$v"), Option(excludedFiles.trim).filter(_.nonEmpty).map(v => s"-P:scoverage:excludedFiles:$v"), // rangepos is broken in some releases of scala so option to turn it off if (coverageHighlighting) Some("-Yrangepos") else None ).flatten } else { Nil } } private def writeReports(crossTarget: File, compileSourceDirectories: Seq[File], coverage: Coverage, coverageOutputCobertura: Boolean, coverageOutputXML: Boolean, coverageOutputHTML: Boolean, coverageDebug: Boolean, coverageOutputTeamCity: Boolean, log: Logger): Unit = { log.info(s"Generating scoverage reports...") val coberturaDir = crossTarget / "coverage-report" val reportDir = crossTarget / "scoverage-report" coberturaDir.mkdirs() reportDir.mkdirs() if (coverageOutputCobertura) { log.info(s"Written Cobertura report [${coberturaDir.getAbsolutePath}/cobertura.xml]") new CoberturaXmlWriter(compileSourceDirectories, coberturaDir).write(coverage) } if (coverageOutputXML) { log.info(s"Written XML coverage report [${reportDir.getAbsolutePath}/scoverage.xml]") new ScoverageXmlWriter(compileSourceDirectories, reportDir, false).write(coverage) if (coverageDebug) { new ScoverageXmlWriter(compileSourceDirectories, reportDir, true).write(coverage) } } if (coverageOutputHTML) { log.info(s"Written HTML coverage report [${reportDir.getAbsolutePath}/index.html]") new ScoverageHtmlWriter(compileSourceDirectories, reportDir).write(coverage) } if (coverageOutputTeamCity) { log.info("Writing coverage report to teamcity") reportToTeamcity(coverage, coverageOutputHTML, reportDir, crossTarget, log) } log.info(s"Statement coverage.: ${coverage.statementCoverageFormatted}%") log.info(s"Branch coverage....: ${coverage.branchCoverageFormatted}%") log.info("Coverage reports completed") } private def reportToTeamcity(coverage: Coverage, createCoverageZip: Boolean, reportDir: File, crossTarget: File, log: Logger) { def statsKeyValue(key: String, value: Int): String = s"##teamcity[buildStatisticValue key='${key}' value='${value}']" // Log statement coverage as per: https://devnet.jetbrains.com/message/5467985 log.info(statsKeyValue("CodeCoverageAbsSCovered", coverage.invokedStatementCount)) log.info(statsKeyValue("CodeCoverageAbsSTotal", coverage.statementCount)) // Log branch coverage as a custom metrics (in percent) log.info(statsKeyValue("CodeCoverageBranch", "%.0f".format(coverage.branchCoveragePercent).toInt)) // Create the coverage report for teamcity (HTML files) if (createCoverageZip) IO.zip(Path.allSubpaths(reportDir), crossTarget / "coverage.zip") } private def loadCoverage(crossTarget: File, log: Logger): Option[Coverage] = { val dataDir = crossTarget / "/scoverage-data" val coverageFile = Serializer.coverageFile(dataDir) log.info(s"Reading scoverage instrumentation [$coverageFile]") if (coverageFile.exists) { val coverage = Serializer.deserialize(coverageFile) log.info(s"Reading scoverage measurements...") val measurementFiles = IOUtils.findMeasurementFiles(dataDir) val measurements = IOUtils.invoked(measurementFiles) coverage.apply(measurements) Some(coverage) } else { None } } private def checkCoverage(coverage: Coverage, log: Logger, min: Double, failOnMin: Boolean): Unit = { val cper = coverage.statementCoveragePercent val cfmt = coverage.statementCoverageFormatted // check for default minimum if (min > 0) { def is100(d: Double) = Math.abs(100 - d) <= 0.00001 if (is100(min) && is100(cper)) { log.info(s"100% Coverage !") } else if (min > cper) { log.error(s"Coverage is below minimum [$cfmt% < $min%]") if (failOnMin) throw new RuntimeException("Coverage minimum was not reached") } else { log.info(s"Coverage is above minimum [$cfmt% > $min%]") } } log.info(s"All done. Coverage was [$cfmt%]") } }
jasonchaffee/sbt-scoverage
src/main/scala/scoverage/ScoverageSbtPlugin.scala
Scala
apache-2.0
9,905
package test import org.scalatest._ import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import keemun.models.{GithubUser, AccountSettings} /** * Copyright (c) Nikita Kovaliov, maizy.ru, 2014 * See LICENSE.txt for details. */ @RunWith(classOf[JUnitRunner]) class AccountSettingsSuite extends FunSuite with Matchers { test("hash equals for the same content") { val user1 = GithubUser("user") val user2 = GithubUser("user") assert(user1.hashCode === user2.hashCode) val accountSettings1 = new AccountSettings(user1, true) val accountSettings2 = new AccountSettings(user2, true) assert(accountSettings1.hashCode === accountSettings2.hashCode) val accountSettings3 = new AccountSettings(user1, false) assert(accountSettings1.hashCode !== accountSettings3.hashCode) } }
maizy/keemun
test/models/AccountSettingsSuite.scala
Scala
mit
827
package chana import akka.actor.Actor import chana.timeseries.TFreq import chana.timeseries.TSer import chana.timeseries.descriptor.Content import chana.reactor.Publisher /** * * @author Caoyuan Deng */ trait Thing extends Actor with Publisher { def identifier: String def name: String def description: String def description_=(description: String) def serOf(freq: TFreq): Option[TSer] /** * The content of each symbol should be got automatailly from PersistenceManager.restoreContent * and keep it there without being refered to another one, so, we only give getter without setter. */ def content: Content /** * A helper method which can be overridden to get another ser provider from identifier */ def thingOf(identifier: String): Option[Thing] = None }
matthewtt/chana
src/main/scala/chana/Thing.scala
Scala
apache-2.0
801
package cc.mewa.api import play.api.mvc.WebSocket.FrameFormatter import play.api.libs.json.{__, Format, Writes, Reads, Json, JsError} import play.api.libs.json._ import cc.mewa.api.Protocol._ object ProtocolJson { /** * Formats WebSocket frames to be MewaMessages */ implicit def jsonFrameFormatter: FrameFormatter[MewaMessage] = FrameFormatter.jsonFrame.transform( msg => Json.toJson(msg), json => Json.fromJson[MewaMessage](json).fold( invalid => throw new RuntimeException("Bad client message on WebSocket: " + invalid), valid => valid ) ) /** * Convert message to JSON */ implicit val msgToJson = Writes[MewaMessage]{ case msg: ConnectToChannel => Json.obj( "type" -> "connect" , "channel" -> msg.channel , "device" -> msg.device , "password" -> msg.password , "subscribe" -> msg.subscribe) case DisconnectFromChannel => Json.obj("type" -> "disconnect") case AlreadyConnectedError => Json.obj("type" -> "already-connected-error") case AuthorizationError => Json.obj("type" -> "authorization-error") case NotConnectedError => Json.obj("type" -> "not-connected-error") case ConnectedEvent => Json.obj("type" -> "connected") case DisconnectedEvent => Json.obj("type" -> "disconnected") case msg:DeviceJoinedChannel => Json.obj("type" -> "joined-channel", "time" -> msg.timeStamp, "device" -> msg.device) case msg:DeviceLeftChannel => Json.obj("type" -> "left-channel", "time" -> msg.timeStamp, "device" -> msg.device) case GetDevices => Json.obj("type" -> "get-devices") case msg:DevicesEvent => Json.obj("type" -> "devices-event", "time" -> msg.timeStamp, "devices" -> msg.names) case msg: SendEvent => Json.obj( "type" -> "send-event", "id" -> msg.eventId, "params" -> msg.params, "ack" -> msg.ack ) case msg: Event => Json.obj( "type" -> "event", "time" -> msg.timeStamp, "device" -> msg.fromDevice, "id" -> msg.eventId, "params" ->msg.params ) case msg: SendMessage => Json.obj( "type" -> "send-message", "device" -> msg.targetDevice, "id" -> msg.messageId, "params" ->msg.params ) case msg: Message => Json.obj( "type" -> "message", "time" -> msg.timeStamp, "device" -> msg.fromDevice, "id" -> msg.messageId, "params" ->msg.params ) case Ack => Json.obj("type" -> "ack") } /** * Create message from JSON */ implicit val msgFromJson = Reads[MewaMessage]{jsval => (jsval \\ "type").as[String] match { case "connect" => connectFromJson(jsval) case "disconnect" => JsSuccess(DisconnectFromChannel) case "connected" => JsSuccess(ConnectedEvent) case "disconnected" => JsSuccess(DisconnectedEvent) case "already-connected-error" => JsSuccess(AlreadyConnectedError) case "authorization-error" => JsSuccess(AuthorizationError) case "not-connected-error" => JsSuccess(NotConnectedError) case "joined-channel" => joinedChannelFromJson(jsval) case "left-channel" => leftChannelFromJson(jsval) case "send-event" => sendEventFromJson(jsval) case "event" => eventFromJson(jsval) case "send-message" => sendMessageFromJson(jsval) case "message" => messageFromJson(jsval) case "ack" => JsSuccess(Ack) case "get-devices" => JsSuccess(GetDevices) case "devices-event" => devicesEventFromJson(jsval) case other => JsError("Unknown client message: <" + other + ">") } } def connectFromJson(jsval:JsValue): JsResult[ConnectToChannel] = { val channel = (jsval \\ "channel").as[String] val device : String= (jsval \\ "device").as[String] val password : String= (jsval \\ "password").as[String] val subscribe = (jsval \\ "subscribe").asOpt[List[String]].getOrElse(List()) JsSuccess(ConnectToChannel(channel, device, password, subscribe)) } def joinedChannelFromJson(jsval:JsValue): JsResult[DeviceJoinedChannel] = { val deviceName = (jsval \\ "device").as[String] JsSuccess(DeviceJoinedChannel("", deviceName)) } def leftChannelFromJson(jsval:JsValue): JsResult[DeviceLeftChannel] = { val deviceName = (jsval \\ "device").as[String] JsSuccess(DeviceLeftChannel("", deviceName)) } def sendEventFromJson(jsval:JsValue): JsResult[SendEvent] = { val eventId = (jsval \\ "id").as[String] val params : String= (jsval \\ "params").as[String] val ack = (jsval \\ "ack").asOpt[Boolean].getOrElse(false) JsSuccess(SendEvent(eventId, params, ack)) } def eventFromJson(jsval:JsValue): JsResult[Event] = { val device = (jsval \\ "device").as[String] val eventId = (jsval \\ "id").as[String] val params : String= (jsval \\ "params").as[String] JsSuccess(Event("", device, eventId, params)) } def sendMessageFromJson(jsval:JsValue): JsResult[SendMessage] = { val device = (jsval \\ "device").as[String] val msgId = (jsval \\ "id").as[String] val params : String= (jsval \\ "params").as[String] JsSuccess(SendMessage(device, msgId, params)) } def messageFromJson(jsval:JsValue): JsResult[Message] = { val device = (jsval \\ "device").as[String] val msgId = (jsval \\ "id").as[String] val params : String= (jsval \\ "params").as[String] JsSuccess(Message("", device, msgId, params)) } def devicesEventFromJson(jsval:JsValue): JsResult[DevicesEvent] = { val deviceNames = (jsval \\ "devices").as[List[String]] JsSuccess(DevicesEvent("", deviceNames)) } }
AnthillTech/mewa
app/cc/mewa/api/ProtocolJson.scala
Scala
bsd-2-clause
5,991
/* * Copyright 2013 - 2015, Daniel Krzywicki <daniel.krzywicki@agh.edu.pl> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package pl.edu.agh.scalamas.app import akka.actor.ActorSystem import com.typesafe.config.{Config, ConfigFactory} import pl.edu.agh.scalamas.random.ConcurrentRandomGeneratorComponent import pl.edu.agh.scalamas.solutions.{ConcurrentSolutionsFactory, SolutionsComponent} import pl.edu.agh.scalamas.stats.{ConcurrentStatsFactory, StatsComponent} /** * Application stack for running concurrent applications. * * Provides a concurrent agent runtime, stats factory and random generators. * * This stacks still needs to be mixed-in with an Environment strategy to use fine or coarse-grained agent concurrency. */ class ConcurrentStack(name: String) extends ConcurrentAgentRuntimeComponent with ConcurrentStatsFactory with ConcurrentSolutionsFactory with ConcurrentRandomGeneratorComponent with ConcurrentRunner { this: EnvironmentStrategy with StatsComponent with SolutionsComponent => val agentRuntime = new ConcurrentAgentRuntime { val config: Config = ConfigFactory.load() val system: ActorSystem = ActorSystem(name) } }
ros3n/IntOb
core/src/main/scala/pl/edu/agh/scalamas/app/ConcurrentStack.scala
Scala
mit
2,207
/* * * ____ __ ____________ ______ * / __/______ _/ /__ /_ __/ _/ //_/_ / * _\\ \\/ __/ _ `/ / _ `// / _/ // ,< / /_ * /___/\\__/\\_,_/_/\\_,_//_/ /___/_/|_| /___/ * * A PGF/TIKZ plot library for Scala. * */ package scalatikz.pgf.charts import scalatikz.pgf.charts.enums.TextLocation import scalatikz.pgf.enums.Color /** * Chart configuration. * * @param magnify magnify chart pieces * @param radius scale radius * @param rotationDegrees rotation degrees * @param explodeFactor piece explode factor * @param textLocation text location * @param textBeforeNumbers text appearing before numbers * @param textAfterNumbers text appearing after numbers * @param colors piece colors */ case class ChartConf private[charts] ( magnify: Boolean = false, radius: Int = 3, rotationDegrees: Int = 0, explodeFactor: Double = 0, textLocation: TextLocation = TextLocation.LABEL, textBeforeNumbers: Option[String] = None, textAfterNumbers: Option[String] = None, colors: Option[Seq[Color]] = None) { override def toString: String = { val builder = new StringBuilder if (magnify) builder ++= "\\tscale font,\\n" if (colors.nonEmpty) builder ++= s"\\tcolor={${colors.get.mkString(",")}},\\n" builder ++= s"\\tradius=$radius,\\n\\trotate=$rotationDegrees,\\n\\texplode=$explodeFactor,\\n\\ttext=$textLocation," builder ++= s"\\n\\tbefore number={${textBeforeNumbers.getOrElse("")}},\\n\\tafter number={${textAfterNumbers.getOrElse("")}}" builder.result } }
vagmcs/ScalaTIKZ
src/main/scala/scalatikz/pgf/charts/ChartConf.scala
Scala
lgpl-3.0
1,536
package com.twitter.parrot.launcher import com.twitter.parrot.config.ParrotLauncherConfig import org.junit.runner.RunWith import org.scalatest.OneInstancePerTest import org.scalatest.WordSpec import org.scalatest.junit.JUnitRunner import org.scalatest.matchers.MustMatchers @RunWith(classOf[JUnitRunner]) class ParrotLauncherSpec extends WordSpec with MustMatchers with OneInstancePerTest { val config = new ParrotLauncherConfig { localMode = true jobName = "test" log = "test" victims = "localhost:80" } "ParrotLauncher" should { for (xport <- Set("FinagleTransport", "ThriftTransport", "KestrelTransport", "MemcacheTransport")) { "rewrite old '%s' transport configs as '%sFactory(this)'".format(xport, xport) in { config.transport = xport val launcher = new ParrotLauncher(config) launcher.readSymbols("transport") must be(xport + "Factory(this)") } } "not rewrite other transport configs" in { for (xport <- Set("FinagleTransportFactory(this)", "KestrelTransportFactory", "CustomTransport")) { config.transport = xport val launcher = new ParrotLauncher(config) launcher.readSymbols("transport") must be(xport) } } } }
twitter/iago
src/test/scala/com/twitter/parrot/launcher/ParrotLauncherSpec.scala
Scala
apache-2.0
1,254
package com.sksamuel.elastic4s.searches.suggestion import com.sksamuel.exts.OptionImplicits._ import org.elasticsearch.common.unit.Fuzziness import org.elasticsearch.search.suggest.SuggestBuilders import org.elasticsearch.search.suggest.completion.{CompletionSuggestionBuilder, FuzzyOptions, RegexOptions} case class CompletionSuggestionDefinition(name: String, fieldname: String, analyzer: Option[String] = None, fuzziness: Option[Fuzziness] = None, fuzzyMinLength: Option[Int] = None, prefix: Option[String] = None, fuzzyPrefixLength: Option[Int] = None, maxDeterminizedStates: Option[Int] = None, regex: Option[String] = None, regexOptions: Option[RegexOptions] = None, shardSize: Option[Int] = None, size: Option[Int] = None, transpositions: Option[Boolean] = None, unicodeAware: Option[Boolean] = None, text: Option[String] = None) extends SuggestionDefinition { override type B = CompletionSuggestionBuilder override def builder: CompletionSuggestionBuilder = { val builder = SuggestBuilders.completionSuggestion(fieldname) super.populate(builder) prefix.foreach { prefix => fuzziness.fold(builder.prefix(prefix)) { fuzz => val options = new FuzzyOptions.Builder() options.setFuzziness(fuzz) unicodeAware.foreach(options.setUnicodeAware) fuzzyMinLength.foreach(options.setFuzzyMinLength) fuzzyPrefixLength.foreach(options.setFuzzyPrefixLength) maxDeterminizedStates.foreach(options.setMaxDeterminizedStates) transpositions.foreach(options.setTranspositions) builder.prefix(prefix, options.build) } } regex.foreach { regex => builder.regex(regex, regexOptions.orNull) } builder } def regex(regex: String): CompletionSuggestionDefinition = copy(regex = regex.some) def regexOptions(regexOptions: RegexOptions): CompletionSuggestionDefinition = copy(regexOptions = regexOptions.some) def fuzzyMinLength(min: Int): CompletionSuggestionDefinition = copy(fuzzyMinLength = min.some) def maxDeterminizedStates(states: Int): CompletionSuggestionDefinition = copy(maxDeterminizedStates = states.some) def fuzziness(edits: Int): CompletionSuggestionDefinition = copy(fuzziness = Fuzziness.fromEdits(edits).some) def fuzziness(fuzziness: Fuzziness): CompletionSuggestionDefinition = copy(fuzziness = fuzziness.some) def transpositions(transpositions: Boolean): CompletionSuggestionDefinition = copy(transpositions = transpositions.some) def unicodeAware(unicodeAware: Boolean): CompletionSuggestionDefinition = copy(unicodeAware = unicodeAware.some) def prefix(prefix: String): CompletionSuggestionDefinition = copy(prefix = prefix.some) def prefix(prefix: String, fuzziness: Fuzziness): CompletionSuggestionDefinition = copy(prefix = prefix.some, fuzziness = fuzziness.some) def fuzzyPrefixLength(length: Int): CompletionSuggestionDefinition = copy(fuzzyPrefixLength = length.some) override def analyzer(analyzer: String): CompletionSuggestionDefinition = copy(analyzer = analyzer.some) override def text(text: String): CompletionSuggestionDefinition = copy(text = text.some) override def shardSize(shardSize: Int): CompletionSuggestionDefinition = copy(shardSize = shardSize.some) override def size(size: Int): CompletionSuggestionDefinition = copy(size = size.some) }
FabienPennequin/elastic4s
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/suggestion/CompletionSuggestionDefinition.scala
Scala
apache-2.0
3,913
package akka.rtcweb.protocol.dtls.record import akka.rtcweb.protocol.dtls.ProtocolVersion import scodec._ import scodec.bits._ import scodec.codecs._ /** * * Implementations MUST NOT send zero-length fragments of Handshake, * Alert, or ChangeCipherSpec content types. Zero-length fragments of * Application data MAY be sent as they are potentially useful as a * traffic analysis countermeasure. * * @param `type` The higher-level protocol used to process the enclosed fragment. * @param version The version of the protocol being employed. This document * describes DTLS version 1.2, which uses the version { 254, 253 }. * The version value of 254.253 is the 1's complement of DTLS version * 1.2. This maximal spacing between TLS and DTLS version numbers * ensures that records from the two protocols can be easily * distinguished. It should be noted that future on-the-wire version * numbers of DTLS are decreasing in value (while the true version * number is increasing in value.) * @param opaqueFragment The application data. This data is transparent and treated as an * independent block to be dealt with by the higher-level protocol * specified by the type field. * @param epoch A counter value that is incremented on every cipher state change. * @param sequenceNumber The sequence number for this record. * */ case class DtlsPlaintext( `type`: ContentType, version: ProtocolVersion, epoch: Int, sequenceNumber: Long, opaqueFragment: ByteVector) object DtlsPlaintext { implicit val codec = "DTLSPlaintext" | { ("type" | ContentType.codec) :: ("version" | ProtocolVersion.codec) :: ("epoch" | uint16) :: ("sequence_number" | ulong(48)) :: variableSizeBytes("length" | uint16, "opaque fragment" | bytes) }.as[DtlsPlaintext] } case class DtlsCompressed( `type`: ContentType, version: ProtocolVersion, epoch: Int, sequenceNumber: Long, opaqueFragment: ByteVector) object DtlsCompressed { implicit val codec = { ("type" | ContentType.codec) :: ("version" | ProtocolVersion.codec) :: ("epoch" | uint16) :: ("sequence_number" | ulong(48)) :: variableSizeBytes("length" | uint16, "opaque fragment" | bytes) }.as[DtlsCompressed] } /** * The encryption and MAC functions translate a TLSCompressed * structure into a TLSCiphertext. The decryption functions reverse * the process. The MAC of the record also includes a sequence * number so that missing, extra, or repeated messages are detectable. * * @param `type` The type field is identical to TLSCompressed.type. * @param version The version field is identical to TLSCompressed.version. * @param epoch uint16 * @param sequenceNumber uint48 * @param fragment The length (in bytes) of the following TLSCiphertext.fragment. * The length MUST NOT exceed 2&#94;14 + 2048. */ case class DtlsCiphertext( `type`: ContentType, version: ProtocolVersion, epoch: Int, sequenceNumber: Long, fragment: ByteVector) object DtlsCiphertext { implicit val codec = "DtlsCiphertext" | { ("type" | ContentType.codec) :: ("version" | ProtocolVersion.codec) :: ("epoch" | uint16) :: ("sequence_number" | ulong(48)) :: variableSizeBytes("length" | uint16, "fragment" | bytes) }.as[DtlsCiphertext] }
danielwegener/akka-rtcweb
src/main/scala/akka/rtcweb/protocol/dtls/record/DtlsRecord.scala
Scala
apache-2.0
3,332
package com.twitter.scalding.estimation.memory import cascading.flow.{Flow, FlowStep, FlowStepStrategy} import com.twitter.algebird.Monoid import com.twitter.scalding.estimation.{Estimator, FallbackEstimatorMonoid, FlowStrategyInfo} import com.twitter.scalding.{Config, StringUtility} import java.util.{List => JList} import org.apache.hadoop.mapred.JobConf import org.slf4j.LoggerFactory import scala.collection.JavaConverters._ object MemoryEstimatorStepStrategy extends FlowStepStrategy[JobConf] { private val LOG = LoggerFactory.getLogger(this.getClass) implicit val estimatorMonoid: Monoid[Estimator[MemoryEstimate]] = new FallbackEstimatorMonoid[MemoryEstimate] /** * Make memory estimate, possibly overriding explicitly-set memory settings, and save useful info (such as * the original & estimate value of memory settings) in JobConf for later consumption. * * Called by Cascading at the start of each job step. */ final override def apply( flow: Flow[JobConf], preds: JList[FlowStep[JobConf]], step: FlowStep[JobConf] ): Unit = if (skipMemoryEstimation(step)) { LOG.info(s"Skipping memory estimation as ${Config.MemoryEstimators} is not set ") } else { estimate(flow, preds.asScala, step) } private[estimation] def skipMemoryEstimation(step: FlowStep[JobConf]): Boolean = step.getConfig.get(Config.MemoryEstimators, "").isEmpty private[estimation] def estimate( flow: Flow[JobConf], preds: Seq[FlowStep[JobConf]], step: FlowStep[JobConf] ): Unit = { val conf = step.getConfig Option(conf.get(Config.MemoryEstimators)).foreach { clsNames => val clsLoader = Thread.currentThread.getContextClassLoader val estimators = StringUtility .fastSplit(clsNames, ",") .map(clsLoader.loadClass(_).newInstance.asInstanceOf[Estimator[MemoryEstimate]]) val combinedEstimator = Monoid.sum(estimators) val info = FlowStrategyInfo(flow, preds, step) // get memory estimate val memoryEstimate: Option[MemoryEstimate] = combinedEstimator.estimate(info) memoryEstimate match { case Some(MemoryEstimate(Some(mapMem), Some(reduceMem))) => LOG.info(s"Overriding map memory to: $mapMem in Mb and reduce memory to: $reduceMem in Mb") setMemory(mapMem, (Config.MapJavaOpts, Config.MapMemory), conf) setMemory(reduceMem, (Config.ReduceJavaOpts, Config.ReduceMemory), conf) case Some(MemoryEstimate(Some(mapMem), _)) => LOG.info(s"Overriding only map memory to: $mapMem in Mb") setMemory(mapMem, (Config.MapJavaOpts, Config.MapMemory), conf) case Some(MemoryEstimate(_, Some(reduceMem))) => LOG.info(s"Overriding only reduce memory to: $reduceMem in Mb") setMemory(reduceMem, (Config.ReduceJavaOpts, Config.ReduceMemory), conf) case _ => LOG.info("Memory estimators didn't calculate any value. Skipping setting memory overrides") } } } private[estimation] def setMemory( memorySettings: (Long, Long), keys: (String, String), conf: JobConf ): Unit = { val (xmxMemory, containerMemory) = memorySettings val (xmxKey, containerKey) = keys conf.setLong(containerKey, containerMemory) setXmxMemory(xmxKey, xmxMemory, conf) } private[estimation] def setXmxMemory(xmxKey: String, xmxMemory: Long, conf: JobConf): Unit = { val xmxOpts = conf.get(xmxKey, "") //remove existing xmx / xms val xmxOptsWithoutXm = xmxOpts.split(" ").filterNot(s => s.startsWith("-Xmx") || s.startsWith("-Xms")).mkString(" ") conf.set(xmxKey, xmxOptsWithoutXm + s" -Xmx${xmxMemory}m") } }
twitter/scalding
scalding-core/src/main/scala/com/twitter/scalding/estimation/memory/MemoryEstimatorStepStrategy.scala
Scala
apache-2.0
3,697
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ml.clustering import scala.util.Random import org.dmg.pmml.PMML import org.dmg.pmml.clustering.ClusteringModel import org.apache.spark.SparkException import org.apache.spark.ml.linalg.{Vector, Vectors} import org.apache.spark.ml.param.ParamMap import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTest, MLTestingUtils, PMMLReadWriteTest} import org.apache.spark.ml.util.TestingUtils._ import org.apache.spark.mllib.clustering.{DistanceMeasure, KMeans => MLlibKMeans, KMeansModel => MLlibKMeansModel} import org.apache.spark.mllib.linalg.{Vectors => MLlibVectors} import org.apache.spark.sql.{DataFrame, Dataset, SparkSession} private[clustering] case class TestRow(features: Vector) class IntelKMeansSuite extends MLTest with DefaultReadWriteTest with PMMLReadWriteTest { import testImplicits._ final val k = 5 @transient var dataset: DataFrame = _ override def beforeAll(): Unit = { super.beforeAll() dataset = KMeansSuite.generateKMeansData(spark, 50, 3, k) } test("default parameters") { val kmeans = new KMeans() assert(kmeans.getK === 2) assert(kmeans.getFeaturesCol === "features") assert(kmeans.getPredictionCol === "prediction") assert(kmeans.getMaxIter === 20) assert(kmeans.getInitMode === MLlibKMeans.K_MEANS_PARALLEL) assert(kmeans.getInitSteps === 2) assert(kmeans.getTol === 1e-4) assert(kmeans.getDistanceMeasure === DistanceMeasure.EUCLIDEAN) val model = kmeans.setMaxIter(1).fit(dataset) val transformed = model.transform(dataset) checkNominalOnDF(transformed, "prediction", model.clusterCenters.length) MLTestingUtils.checkCopyAndUids(kmeans, model) assert(model.hasSummary) val copiedModel = model.copy(ParamMap.empty) assert(copiedModel.hasSummary) } test("set parameters") { val kmeans = new KMeans() .setK(9) .setFeaturesCol("test_feature") .setPredictionCol("test_prediction") .setMaxIter(33) .setInitMode(MLlibKMeans.RANDOM) .setInitSteps(3) .setSeed(123) .setTol(1e-3) .setDistanceMeasure(DistanceMeasure.COSINE) assert(kmeans.getK === 9) assert(kmeans.getFeaturesCol === "test_feature") assert(kmeans.getPredictionCol === "test_prediction") assert(kmeans.getMaxIter === 33) assert(kmeans.getInitMode === MLlibKMeans.RANDOM) assert(kmeans.getInitSteps === 3) assert(kmeans.getSeed === 123) assert(kmeans.getTol === 1e-3) assert(kmeans.getDistanceMeasure === DistanceMeasure.COSINE) } test("parameters validation") { intercept[IllegalArgumentException] { new KMeans().setK(1) } intercept[IllegalArgumentException] { new KMeans().setInitMode("no_such_a_mode") } intercept[IllegalArgumentException] { new KMeans().setInitSteps(0) } intercept[IllegalArgumentException] { new KMeans().setDistanceMeasure("no_such_a_measure") } } test("fit, transform and summary") { val predictionColName = "kmeans_prediction" val kmeans = new KMeans().setK(k).setPredictionCol(predictionColName).setSeed(1) val model = kmeans.fit(dataset) assert(model.clusterCenters.length === k) testTransformerByGlobalCheckFunc[Tuple1[Vector]](dataset.toDF(), model, "features", predictionColName) { rows => val clusters = rows.map(_.getAs[Int](predictionColName)).toSet assert(clusters.size === k) assert(clusters === Set(0, 1, 2, 3, 4)) } assert(model.hasParent) // Check validity of model summary val numRows = dataset.count() assert(model.hasSummary) val summary: KMeansSummary = model.summary assert(summary.predictionCol === predictionColName) assert(summary.featuresCol === "features") assert(summary.predictions.count() === numRows) for (c <- Array(predictionColName, "features")) { assert(summary.predictions.columns.contains(c)) } assert(summary.cluster.columns === Array(predictionColName)) assert(summary.trainingCost < 0.1) val clusterSizes = summary.clusterSizes assert(clusterSizes.length === k) assert(clusterSizes.sum === numRows) assert(clusterSizes.forall(_ >= 0)) assert(summary.numIter == 1) model.setSummary(None) assert(!model.hasSummary) } test("KMeansModel transform with non-default feature and prediction cols") { val featuresColName = "kmeans_model_features" val predictionColName = "kmeans_model_prediction" val model = new KMeans().setK(k).setSeed(1).fit(dataset) model.setFeaturesCol(featuresColName).setPredictionCol(predictionColName) val transformed = model.transform(dataset.withColumnRenamed("features", featuresColName)) assert(transformed.schema.fieldNames.toSet === Set(featuresColName, predictionColName)) assert(model.getFeaturesCol == featuresColName) assert(model.getPredictionCol == predictionColName) } test("KMeans using cosine distance") { val df = spark.createDataFrame(spark.sparkContext.parallelize(Seq( Vectors.dense(1.0, 1.0), Vectors.dense(10.0, 10.0), Vectors.dense(1.0, 0.5), Vectors.dense(10.0, 4.4), Vectors.dense(-1.0, 1.0), Vectors.dense(-100.0, 90.0) )).map(v => TestRow(v))) val model = new KMeans() .setK(3) .setSeed(42) .setInitMode(MLlibKMeans.RANDOM) .setTol(1e-6) .setDistanceMeasure(DistanceMeasure.COSINE) .fit(df) val predictionDf = model.transform(df) assert(predictionDf.select("prediction").distinct().count() == 3) val predictionsMap = predictionDf.collect().map(row => row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap assert(predictionsMap(Vectors.dense(1.0, 1.0)) == predictionsMap(Vectors.dense(10.0, 10.0))) assert(predictionsMap(Vectors.dense(1.0, 0.5)) == predictionsMap(Vectors.dense(10.0, 4.4))) assert(predictionsMap(Vectors.dense(-1.0, 1.0)) == predictionsMap(Vectors.dense(-100.0, 90.0))) model.clusterCenters.forall(Vectors.norm(_, 2) == 1.0) } test("KMeans with cosine distance is not supported for 0-length vectors") { val model = new KMeans().setDistanceMeasure(DistanceMeasure.COSINE).setK(2) val df = spark.createDataFrame(spark.sparkContext.parallelize(Seq( Vectors.dense(0.0, 0.0), Vectors.dense(10.0, 10.0), Vectors.dense(1.0, 0.5) )).map(v => TestRow(v))) val e = intercept[SparkException](model.fit(df)) assert(e.getCause.isInstanceOf[AssertionError]) assert(e.getCause.getMessage.contains("Cosine distance is not defined")) } test("KMean with Array input") { def trainAndGetCost(dataset: Dataset[_]): Double = { val model = new KMeans().setK(k).setMaxIter(1).setSeed(1).fit(dataset) model.summary.trainingCost } val (newDataset, newDatasetD, newDatasetF) = MLTestingUtils.generateArrayFeatureDataset(dataset) val trueCost = trainAndGetCost(newDataset) val doubleArrayCost = trainAndGetCost(newDatasetD) val floatArrayCost = trainAndGetCost(newDatasetF) // checking the cost is fine enough as a sanity check assert(trueCost ~== doubleArrayCost absTol 1e-6) assert(trueCost ~== floatArrayCost absTol 1e-6) } test("read/write") { def checkModelData(model: KMeansModel, model2: KMeansModel): Unit = { assert(model.clusterCenters === model2.clusterCenters) } val kmeans = new KMeans() testEstimatorAndModelReadWrite(kmeans, dataset, KMeansSuite.allParamSettings, KMeansSuite.allParamSettings, checkModelData) } test("pmml export") { val clusterCenters = Array( MLlibVectors.dense(1.0, 2.0, 6.0), MLlibVectors.dense(1.0, 3.0, 0.0), MLlibVectors.dense(1.0, 4.0, 6.0)) val oldKmeansModel = new MLlibKMeansModel(clusterCenters) val kmeansModel = new KMeansModel("", oldKmeansModel) def checkModel(pmml: PMML): Unit = { // Check the header description is what we expect assert(pmml.getHeader.getDescription === "k-means clustering") // check that the number of fields match the single vector size assert(pmml.getDataDictionary.getNumberOfFields === clusterCenters(0).size) // This verify that there is a model attached to the pmml object and the model is a clustering // one. It also verifies that the pmml model has the same number of clusters of the spark // model. val pmmlClusteringModel = pmml.getModels.get(0).asInstanceOf[ClusteringModel] assert(pmmlClusteringModel.getNumberOfClusters === clusterCenters.length) } testPMMLWrite(sc, kmeansModel, checkModel) } test("prediction on single instance") { val kmeans = new KMeans().setSeed(123L) val model = kmeans.fit(dataset) testClusteringModelSinglePrediction(model, model.predict, dataset, model.getFeaturesCol, model.getPredictionCol) } test("compare with weightCol and without weightCol") { val df1 = spark.createDataFrame(spark.sparkContext.parallelize(Array( Vectors.dense(1.0, 1.0), Vectors.dense(10.0, 10.0), Vectors.dense(10.0, 10.0), Vectors.dense(1.0, 0.5), Vectors.dense(10.0, 4.4), Vectors.dense(10.0, 4.4), Vectors.dense(-1.0, 1.0), Vectors.dense(-100.0, 90.0), Vectors.dense(-100.0, 90.0) )).map(v => TestRow(v))) val model1 = new KMeans() .setK(3) .setSeed(42) .setInitMode(MLlibKMeans.RANDOM) .setTol(1e-6) .setDistanceMeasure(DistanceMeasure.COSINE) .fit(df1) val predictionDf1 = model1.transform(df1) assert(predictionDf1.select("prediction").distinct().count() == 3) val predictionsMap1 = predictionDf1.collect().map(row => row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap assert(predictionsMap1(Vectors.dense(1.0, 1.0)) == predictionsMap1(Vectors.dense(10.0, 10.0))) assert(predictionsMap1(Vectors.dense(1.0, 0.5)) == predictionsMap1(Vectors.dense(10.0, 4.4))) assert(predictionsMap1(Vectors.dense(-1.0, 1.0)) == predictionsMap1(Vectors.dense(-100.0, 90.0))) model1.clusterCenters.forall(Vectors.norm(_, 2) == 1.0) val df2 = spark.createDataFrame(spark.sparkContext.parallelize(Array( (Vectors.dense(1.0, 1.0), 1.0), (Vectors.dense(10.0, 10.0), 2.0), (Vectors.dense(1.0, 0.5), 1.0), (Vectors.dense(10.0, 4.4), 2.0), (Vectors.dense(-1.0, 1.0), 1.0), (Vectors.dense(-100.0, 90.0), 2.0)))).toDF("features", "weightCol") val model2 = new KMeans() .setK(3) .setSeed(42) .setInitMode(MLlibKMeans.RANDOM) .setTol(1e-6) .setDistanceMeasure(DistanceMeasure.COSINE) .setWeightCol("weightCol") .fit(df2) val predictionDf2 = model2.transform(df2) assert(predictionDf2.select("prediction").distinct().count() == 3) val predictionsMap2 = predictionDf2.collect().map(row => row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap assert(predictionsMap2(Vectors.dense(1.0, 1.0)) == predictionsMap2(Vectors.dense(10.0, 10.0))) assert(predictionsMap2(Vectors.dense(1.0, 0.5)) == predictionsMap2(Vectors.dense(10.0, 4.4))) assert(predictionsMap2(Vectors.dense(-1.0, 1.0)) == predictionsMap2(Vectors.dense(-100.0, 90.0))) model2.clusterCenters.forall(Vectors.norm(_, 2) == 1.0) // compare if model1 and model2 have the same cluster centers assert(model1.clusterCenters.length === model2.clusterCenters.length) assert(model1.clusterCenters.toSet.subsetOf((model2.clusterCenters.toSet))) } test("Two centers with weightCol") { // use the same weight for all samples. val df1 = spark.createDataFrame(spark.sparkContext.parallelize(Array( (Vectors.dense(0.0, 0.0), 2.0), (Vectors.dense(0.0, 0.1), 2.0), (Vectors.dense(0.1, 0.0), 2.0), (Vectors.dense(9.0, 0.0), 2.0), (Vectors.dense(9.0, 0.2), 2.0), (Vectors.dense(9.2, 0.0), 2.0)))).toDF("features", "weightCol") val model1 = new KMeans() .setK(2) .setInitMode(MLlibKMeans.RANDOM) .setWeightCol("weightCol") .setMaxIter(10) .fit(df1) val predictionDf1 = model1.transform(df1) assert(predictionDf1.select("prediction").distinct().count() == 2) val predictionsMap1 = predictionDf1.collect().map(row => row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap assert(predictionsMap1(Vectors.dense(0.0, 0.0)) == predictionsMap1(Vectors.dense(0.0, 0.1))) assert(predictionsMap1(Vectors.dense(0.0, 0.0)) == predictionsMap1(Vectors.dense(0.1, 0.0))) assert(predictionsMap1(Vectors.dense(9.0, 0.0)) == predictionsMap1(Vectors.dense(9.0, 0.2))) assert(predictionsMap1(Vectors.dense(9.0, 0.2)) == predictionsMap1(Vectors.dense(9.2, 0.0))) model1.clusterCenters.forall(Vectors.norm(_, 2) == 1.0) // center 1: // total weights in cluster 1: 2.0 + 2.0 + 2.0 = 6.0 // x: 9.0 * (2.0/6.0) + 9.0 * (2.0/6.0) + 9.2 * (2.0/6.0) = 9.066666666666666 // y: 0.0 * (2.0/6.0) + 0.2 * (2.0/6.0) + 0.0 * (2.0/6.0) = 0.06666666666666667 // center 2: // total weights in cluster 2: 2.0 + 2.0 + 2.0 = 6.0 // x: 0.0 * (2.0/6.0) + 0.0 * (2.0/6.0) + 0.1 * (2.0/6.0) = 0.03333333333333333 // y: 0.0 * (2.0/6.0) + 0.1 * (2.0/6.0) + 0.0 * (2.0/6.0) = 0.03333333333333333 val model1_center1 = Vectors.dense(9.066666666666666, 0.06666666666666667) val model1_center2 = Vectors.dense(0.03333333333333333, 0.03333333333333333) assert(model1.clusterCenters(0) === model1_center1) assert(model1.clusterCenters(1) === model1_center2) // use different weight val df2 = spark.createDataFrame(spark.sparkContext.parallelize(Array( (Vectors.dense(0.0, 0.0), 1.0), (Vectors.dense(0.0, 0.1), 2.0), (Vectors.dense(0.1, 0.0), 3.0), (Vectors.dense(9.0, 0.0), 2.5), (Vectors.dense(9.0, 0.2), 1.0), (Vectors.dense(9.2, 0.0), 2.0)))).toDF("features", "weightCol") val model2 = new KMeans() .setK(2) .setInitMode(MLlibKMeans.RANDOM) .setWeightCol("weightCol") .setMaxIter(10) .fit(df2) val predictionDf2 = model2.transform(df2) assert(predictionDf2.select("prediction").distinct().count() == 2) val predictionsMap2 = predictionDf2.collect().map(row => row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap assert(predictionsMap2(Vectors.dense(0.0, 0.0)) == predictionsMap2(Vectors.dense(0.0, 0.1))) assert(predictionsMap2(Vectors.dense(0.0, 0.0)) == predictionsMap2(Vectors.dense(0.1, 0.0))) assert(predictionsMap2(Vectors.dense(9.0, 0.0)) == predictionsMap2(Vectors.dense(9.0, 0.2))) assert(predictionsMap2(Vectors.dense(9.0, 0.2)) == predictionsMap2(Vectors.dense(9.2, 0.0))) model2.clusterCenters.forall(Vectors.norm(_, 2) == 1.0) // center 1: // total weights in cluster 1: 2.5 + 1.0 + 2.0 = 5.5 // x: 9.0 * (2.5/5.5) + 9.0 * (1.0/5.5) + 9.2 * (2.0/5.5) = 9.072727272727272 // y: 0.0 * (2.5/5.5) + 0.2 * (1.0/5.5) + 0.0 * (2.0/5.5) = 0.03636363636363637 // center 2: // total weights in cluster 2: 1.0 + 2.0 + 3.0 = 6.0 // x: 0.0 * (1.0/6.0) + 0.0 * (2.0/6.0) + 0.1 * (3.0/6.0) = 0.05 // y: 0.0 * (1.0/6.0) + 0.1 * (2.0/6.0) + 0.0 * (3.0/6.0) = 0.03333333333333333 val model2_center1 = Vectors.dense(9.072727272727272, 0.03636363636363637) val model2_center2 = Vectors.dense(0.05, 0.03333333333333333) assert(model2.clusterCenters(0) === model2_center1) assert(model2.clusterCenters(1) === model2_center2) } test("Four centers with weightCol") { // no weight val df1 = spark.createDataFrame(spark.sparkContext.parallelize(Array( Vectors.dense(0.1, 0.1), Vectors.dense(5.0, 0.2), Vectors.dense(10.0, 0.0), Vectors.dense(15.0, 0.5), Vectors.dense(32.0, 18.0), Vectors.dense(30.1, 20.0), Vectors.dense(-6.0, -6.0), Vectors.dense(-10.0, -10.0))).map(v => TestRow(v))) val model1 = new KMeans() .setK(4) .setInitMode(MLlibKMeans.K_MEANS_PARALLEL) .setMaxIter(10) .fit(df1) val predictionDf1 = model1.transform(df1) assert(predictionDf1.select("prediction").distinct().count() == 4) val predictionsMap1 = predictionDf1.collect().map(row => row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap assert(predictionsMap1(Vectors.dense(0.1, 0.1)) == predictionsMap1(Vectors.dense(5.0, 0.2)) ) assert(predictionsMap1(Vectors.dense(10.0, 0.0)) == predictionsMap1(Vectors.dense(15.0, 0.5)) ) assert(predictionsMap1(Vectors.dense(32.0, 18.0)) == predictionsMap1(Vectors.dense(30.1, 20.0))) assert(predictionsMap1(Vectors.dense(-6.0, -6.0)) == predictionsMap1(Vectors.dense(-10.0, -10.0))) model1.clusterCenters.forall(Vectors.norm(_, 2) == 1.0) // use same weight, should have the same result as no weight val df2 = spark.createDataFrame(spark.sparkContext.parallelize(Array( (Vectors.dense(0.1, 0.1), 2.0), (Vectors.dense(5.0, 0.2), 2.0), (Vectors.dense(10.0, 0.0), 2.0), (Vectors.dense(15.0, 0.5), 2.0), (Vectors.dense(32.0, 18.0), 2.0), (Vectors.dense(30.1, 20.0), 2.0), (Vectors.dense(-6.0, -6.0), 2.0), (Vectors.dense(-10.0, -10.0), 2.0)))).toDF("features", "weightCol") val model2 = new KMeans() .setK(4) .setInitMode(MLlibKMeans.K_MEANS_PARALLEL) .setWeightCol("weightCol") .setMaxIter(10) .fit(df2) val predictionDf2 = model2.transform(df2) assert(predictionDf2.select("prediction").distinct().count() == 4) val predictionsMap2 = predictionDf2.collect().map(row => row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap assert(predictionsMap2(Vectors.dense(0.1, 0.1)) == predictionsMap2(Vectors.dense(5.0, 0.2))) assert(predictionsMap2(Vectors.dense(10.0, 0.0)) == predictionsMap2(Vectors.dense(15.0, 0.5))) assert(predictionsMap2(Vectors.dense(32.0, 18.0)) == predictionsMap2(Vectors.dense(30.1, 20.0))) assert(predictionsMap2(Vectors.dense(-6.0, -6.0)) == predictionsMap2(Vectors.dense(-10.0, -10.0))) model2.clusterCenters.forall(Vectors.norm(_, 2) == 1.0) assert(model1.clusterCenters === model2.clusterCenters) } }
Intel-bigdata/OAP
oap-mllib/mllib-dal/src/test/scala/org/apache/spark/ml/clustering/IntelKMeansSuite.scala
Scala
apache-2.0
19,125
package nest.sparkle.store import scala.concurrent.{Future, ExecutionContext} import scala.reflect.runtime.universe._ import rx.lang.scala.Observable import nest.sparkle.core.OngoingData import nest.sparkle.measure.{Span, DummySpan} case class OngoingEvents[T, U](initial: Observable[Event[T, U]], ongoing: Observable[Event[T, U]]) /** a readable column of data that supports simple range queries. */ trait Column[T, U] { /** name of this column */ def name: String def keyType: TypeTag[_] def valueType: TypeTag[_] /** Obsolete, use ReadRange. */ def readRangeOld // format: OFF ( start: Option[T] = None, end: Option[T] = None, limit: Option[Long] = None, parentSpan: Option[Span] = None ) ( implicit execution: ExecutionContext): OngoingEvents[T, U] // format: ON /** read a slice of events from the column, inclusive of the start and ends. * If start is missing, read from the first element in the column. If end is missing * read from the last element in the column. */ def readRange // format: OFF ( start: Option[T] = None, end: Option[T] = None, limit: Option[Long] = None, parentSpan: Option[Span] = None ) ( implicit execution: ExecutionContext): OngoingData[T, U] // format: ON /** optionally return the last key in the column */ def lastKey()(implicit execution: ExecutionContext, parentSpan: Span): Future[Option[T]] = ??? /** optionally return the first key in the column */ def firstKey()(implicit execution: ExecutionContext, parentSpan: Span): Future[Option[T]] = ??? /** return a count of the items in the column, or zero if it is empty */ def countItems(start: Option[T] = None, end: Option[T] = None) ( implicit execution: ExecutionContext, parentSpan: Span = DummySpan): Future[Long] = ??? // LATER add authorization hook, to validate permission to read a range } // LATER consider making value more flexible: // . a sequence? values:Seq[V]. e.g. if we want to store: [1:[123, 134], 2:[100]] // . an hlist? e.g. if we want to store a csv file with multiple separately typed columns per row // . possibly a typeclass that covers all single, sequence, and typed list cases? /** an single item in the datastore, e.g. a timestamp and value */ // Obsolete, DataArrays are the new black. case class Event[T, V](key: T, value: V) // TODO get rid of Event in favor of DataArray object Event { type Events[T, U] = Seq[Event[T, U]] }
mighdoll/sparkle
store/src/main/scala/nest/sparkle/store/Column.scala
Scala
apache-2.0
2,489
package com.lambtors.poker_api.module.shared.infrastructure.provider import java.util.UUID import scala.concurrent.Future import com.lambtors.poker_api.module.shared.domain.UUIDProvider class RandomUUIDProvider extends UUIDProvider[Future] { override def provide(): Future[UUID] = Future.successful(UUID.randomUUID()) }
lambtors/poker-api
src/main/scala/com/lambtors/poker_api/module/shared/infrastructure/provider/RandomUUIDProvider.scala
Scala
mit
326
package slick.basic import slick.util.AsyncExecutor.{Priority, Continuation, Fresh, WithConnection} import java.io.Closeable import java.util.concurrent.atomic.{AtomicReferenceArray, AtomicLong} import com.typesafe.config.Config import scala.concurrent.{Promise, ExecutionContext, Future} import scala.util.{Success, Failure} import scala.util.control.NonFatal import org.slf4j.LoggerFactory import org.reactivestreams._ import slick.SlickException import slick.dbio._ import slick.util._ /** Backend for the basic database and session handling features. * Concrete backends like `JdbcBackend` extend this type and provide concrete * types for `Database`, `DatabaseFactory` and `Session`. */ trait BasicBackend { self => protected lazy val actionLogger = new SlickLogger(LoggerFactory.getLogger(classOf[BasicBackend].getName+".action")) protected lazy val streamLogger = new SlickLogger(LoggerFactory.getLogger(classOf[BasicBackend].getName+".stream")) type This >: this.type <: BasicBackend /** The type of database objects used by this backend. */ type Database <: DatabaseDef /** The type of the database factory used by this backend. */ type DatabaseFactory /** The type of session objects used by this backend. */ type Session >: Null <: SessionDef /** The type of the context used for running SynchronousDatabaseActions */ type Context >: Null <: BasicActionContext /** The type of the context used for streaming SynchronousDatabaseActions */ type StreamingContext >: Null <: Context with BasicStreamingActionContext /** The database factory */ val Database: DatabaseFactory /** Create a Database instance through [[https://github.com/typesafehub/config Typesafe Config]]. * The supported config keys are backend-specific. This method is used by `DatabaseConfig`. * * @param path The path in the configuration file for the database configuration, or an empty * string for the top level of the `Config` object. * @param config The `Config` object to read from. */ def createDatabase(config: Config, path: String): Database /** A database instance to which connections can be created. */ trait DatabaseDef extends Closeable { this: Database => /** Create a new session. The session needs to be closed explicitly by calling its close() method. */ def createSession(): Session /** Free all resources allocated by Slick for this Database. This is done asynchronously, so * you need to wait for the returned `Future` to complete in order to ensure that everything * has been shut down. */ def shutdown: Future[Unit] = Future(close)(ExecutionContext.fromExecutor(AsyncExecutor.shutdownExecutor)) /** Free all resources allocated by Slick for this Database, blocking the current thread until * everything has been shut down. * * Backend implementations which are based on a naturally blocking shutdown procedure can * simply implement this method and get `shutdown` as an asynchronous wrapper for free. If * the underlying shutdown procedure is asynchronous, you should implement `shutdown` instead * and wrap it with `Await.result` in this method. */ def close: Unit /** Run an Action asynchronously and return the result as a Future. */ final def run[R](a: DBIOAction[R, NoStream, Nothing]): Future[R] = runInternal(a, false) private[slick] final def runInternal[R](a: DBIOAction[R, NoStream, Nothing], useSameThread: Boolean): Future[R] = try runInContext(a, createDatabaseActionContext(useSameThread), false, true) catch { case NonFatal(ex) => Future.failed(ex) } /** Create a `Publisher` for Reactive Streams which, when subscribed to, will run the specified * `DBIOAction` and return the result directly as a stream without buffering everything first. * This method is only supported for streaming actions. * * The Publisher itself is just a stub that holds a reference to the action and this Database. * The action does not actually start to run until the call to `onSubscribe` returns, after * which the Subscriber is responsible for reading the full response or cancelling the * Subscription. The created Publisher can be reused to serve a multiple Subscribers, * each time triggering a new execution of the action. * * For the purpose of combinators such as `cleanup` which can run after a stream has been * produced, cancellation of a stream by the Subscriber is not considered an error. For * example, there is no way for the Subscriber to cause a rollback when streaming the * results of `someQuery.result.transactionally`. * * When using a JDBC back-end, all `onNext` calls are done synchronously and the ResultSet row * is not advanced before `onNext` returns. This allows the Subscriber to access LOB pointers * from within `onNext`. If streaming is interrupted due to back-pressure signaling, the next * row will be prefetched (in order to buffer the next result page from the server when a page * boundary has been reached). */ final def stream[T](a: DBIOAction[_, Streaming[T], Nothing]): DatabasePublisher[T] = streamInternal(a, false) private[slick] final def streamInternal[T](a: DBIOAction[_, Streaming[T], Nothing], useSameThread: Boolean): DatabasePublisher[T] = createPublisher(a, s => createStreamingDatabaseActionContext(s, useSameThread)) /** Create a Reactive Streams `Publisher` using the given context factory. */ protected[this] def createPublisher[T](a: DBIOAction[_, Streaming[T], Nothing], createCtx: Subscriber[_ >: T] => StreamingContext): DatabasePublisher[T] = new DatabasePublisher[T] { def subscribe(s: Subscriber[_ >: T]) = { if(s eq null) throw new NullPointerException("Subscriber is null") val ctx = createCtx(s) if(streamLogger.isDebugEnabled) streamLogger.debug(s"Signaling onSubscribe($ctx)") val subscribed = try { s.onSubscribe(ctx.subscription); true } catch { case NonFatal(ex) => streamLogger.warn("Subscriber.onSubscribe failed unexpectedly", ex) false } if(subscribed) { try { runInContext(a, ctx, true, true).onComplete { case Success(_) => ctx.tryOnComplete case Failure(t) => ctx.tryOnError(t) }(DBIO.sameThreadExecutionContext) } catch { case NonFatal(ex) => ctx.tryOnError(ex) } } } } /** Create the default DatabaseActionContext for this backend. */ protected[this] def createDatabaseActionContext[T](_useSameThread: Boolean): Context /** Create the default StreamingDatabaseActionContext for this backend. */ protected[this] def createStreamingDatabaseActionContext[T](s: Subscriber[_ >: T], useSameThread: Boolean): StreamingContext /** Run an Action in an existing DatabaseActionContext. This method can be overridden in * subclasses to support new DatabaseActions which cannot be expressed through * SynchronousDatabaseAction. * * @param streaming Whether to return the result as a stream. In this case, the context must * be a `StreamingDatabaseActionContext` and the Future result should be * completed with `null` or failed after streaming has finished. This * method should not call any `Subscriber` method other than `onNext`. */ protected[this] def runInContext[R](a: DBIOAction[R, NoStream, Nothing], ctx: Context, streaming: Boolean, topLevel: Boolean): Future[R] = { logAction(a, ctx) a match { case SuccessAction(v) => Future.successful(v) case FailureAction(t) => Future.failed(t) case FutureAction(f) => f case FlatMapAction(base, f, ec) => runInContext(base, ctx, false, topLevel).flatMap(v => runInContext(f(v), ctx, streaming, false))(ctx.getEC(ec)) case AndThenAction(actions) => val last = actions.length - 1 def run(pos: Int, v: Any): Future[Any] = { val f1 = runInContext(actions(pos), ctx, streaming && pos == last, pos == 0) if(pos == last) f1 else f1.flatMap(run(pos + 1, _))(DBIO.sameThreadExecutionContext) } run(0, null).asInstanceOf[Future[R]] case sa @ SequenceAction(actions) => val len = actions.length val results = new AtomicReferenceArray[Any](len) def run(pos: Int): Future[Any] = { if(pos == len) Future.successful { val b = sa.cbf() var i = 0 while(i < len) { b += results.get(i) i += 1 } b.result() } else runInContext(actions(pos), ctx, false, pos == 0).flatMap { (v: Any) => results.set(pos, v) run(pos + 1) } (DBIO.sameThreadExecutionContext) } run(0).asInstanceOf[Future[R]] case CleanUpAction(base, f, keepFailure, ec) => val p = Promise[R]() runInContext(base, ctx, streaming, topLevel).onComplete { t1 => try { val a2 = f(t1 match { case Success(_) => None case Failure(t) => Some(t) }) runInContext(a2, ctx, false, false).onComplete { t2 => if(t2.isFailure && (t1.isSuccess || !keepFailure)) p.complete(t2.asInstanceOf[Failure[R]]) else p.complete(t1) } (DBIO.sameThreadExecutionContext) } catch { case NonFatal(ex) => throw (t1 match { case Failure(t) if keepFailure => t case _ => ex }) } } (ctx.getEC(ec)) p.future case FailedAction(a) => runInContext(a, ctx, false, topLevel).failed.asInstanceOf[Future[R]] case AsTryAction(a) => val p = Promise[R]() runInContext(a, ctx, false, topLevel).onComplete(v => p.success(v.asInstanceOf[R]))(DBIO.sameThreadExecutionContext) p.future case NamedAction(a, _) => runInContext(a, ctx, streaming, topLevel) case a: SynchronousDatabaseAction[_, _, _, _] => if(streaming) { if(a.supportsStreaming) streamSynchronousDatabaseAction(a.asInstanceOf[SynchronousDatabaseAction[_, _ <: NoStream, This, _ <: Effect]], ctx.asInstanceOf[StreamingContext], !topLevel).asInstanceOf[Future[R]] else runInContext(CleanUpAction(AndThenAction(Vector(DBIO.Pin, a.nonFusedEquivalentAction)), _ => DBIO.Unpin, true, DBIO.sameThreadExecutionContext), ctx, streaming, topLevel) } else runSynchronousDatabaseAction(a.asInstanceOf[SynchronousDatabaseAction[R, NoStream, This, _]], ctx, !topLevel) case a: DatabaseAction[_, _, _] => throw new SlickException(s"Unsupported database action $a for $this") } } /** Within a synchronous execution, ensure that a Session is available. */ protected[this] final def acquireSession(ctx: Context): Unit = if(!ctx.isPinned) ctx.currentSession = createSession() /** Within a synchronous execution, close the current Session unless it is pinned. * * @param discardErrors If set to true, swallow all non-fatal errors that arise while * closing the Session. */ protected[this] final def releaseSession(ctx: Context, discardErrors: Boolean): Unit = if(!ctx.isPinned) { try ctx.currentSession.close() catch { case NonFatal(ex) if(discardErrors) => } ctx.currentSession = null } /** Run a `SynchronousDatabaseAction` on this database. */ protected[this] def runSynchronousDatabaseAction[R](a: SynchronousDatabaseAction[R, NoStream, This, _], ctx: Context, continuation: Boolean): Future[R] = { val promise = Promise[R]() ctx.getEC(synchronousExecutionContext).prepare.execute(new AsyncExecutor.PrioritizedRunnable { def priority = ctx.priority(continuation) def run: Unit = try { ctx.readSync val res = try { acquireSession(ctx) val res = try a.run(ctx) catch { case NonFatal(ex) => releaseSession(ctx, true) throw ex } releaseSession(ctx, false) res } finally { if (!ctx.isPinned) connectionReleased = true ctx.sync = 0 } promise.success(res) } catch { case NonFatal(ex) => promise.tryFailure(ex) } }) promise.future } /** Stream a `SynchronousDatabaseAction` on this database. */ protected[this] def streamSynchronousDatabaseAction(a: SynchronousDatabaseAction[_, _ <: NoStream, This, _ <: Effect], ctx: StreamingContext, continuation: Boolean): Future[Null] = { ctx.streamingAction = a scheduleSynchronousStreaming(a, ctx, continuation)(null) ctx.streamingResultPromise.future } /** Stream a part of the results of a `SynchronousDatabaseAction` on this database. */ protected[BasicBackend] def scheduleSynchronousStreaming(a: SynchronousDatabaseAction[_, _ <: NoStream, This, _ <: Effect], ctx: StreamingContext, continuation: Boolean)(initialState: a.StreamState): Unit = try { ctx.getEC(synchronousExecutionContext).prepare.execute(new AsyncExecutor.PrioritizedRunnable { private[this] def str(l: Long) = if(l != Long.MaxValue) l else if(GlobalConfig.unicodeDump) "\\u221E" else "oo" def priority = ctx.priority(continuation) def run: Unit = try { val debug = streamLogger.isDebugEnabled var state = initialState ctx.readSync if(state eq null) acquireSession(ctx) var demand = ctx.demandBatch var realDemand = if(demand < 0) demand - Long.MinValue else demand do { try { if(debug) streamLogger.debug((if(state eq null) "Starting initial" else "Restarting ") + " streaming action, realDemand = " + str(realDemand)) if(ctx.cancelled) { if(ctx.deferredError ne null) throw ctx.deferredError if(state ne null) { // streaming cancelled before finishing val oldState = state state = null a.cancelStream(ctx, oldState) } } else if((realDemand > 0 || (state eq null))) { val oldState = state state = null state = a.emitStream(ctx, realDemand, oldState) } if(state eq null) { // streaming finished and cleaned up releaseSession(ctx, true) ctx.streamingResultPromise.trySuccess(null) } } catch { case NonFatal(ex) => if(state ne null) try a.cancelStream(ctx, state) catch ignoreFollowOnError releaseSession(ctx, true) throw ex } finally { ctx.streamState = state if (!ctx.isPinned) connectionReleased = true ctx.sync = 0 } if(debug) { if(state eq null) streamLogger.debug(s"Sent up to ${str(realDemand)} elements - Stream " + (if(ctx.cancelled) "cancelled" else "completely delivered")) else streamLogger.debug(s"Sent ${str(realDemand)} elements, more available - Performing atomic state transition") } demand = ctx.delivered(demand) realDemand = if(demand < 0) demand - Long.MinValue else demand } while ((state ne null) && realDemand > 0) if(debug) { if(state ne null) streamLogger.debug("Suspending streaming action with continuation (more data available)") else streamLogger.debug("Finished streaming action") } } catch { case NonFatal(ex) => ctx.streamingResultPromise.tryFailure(ex) } }) } catch { case NonFatal(ex) => streamLogger.warn("Error scheduling synchronous streaming", ex) throw ex } /** Return the default ExecutionContet for this Database which should be used for running * SynchronousDatabaseActions for asynchronous execution. */ protected[this] def synchronousExecutionContext: ExecutionContext protected[this] def logAction(a: DBIOAction[_, NoStream, Nothing], ctx: Context): Unit = { if(actionLogger.isDebugEnabled && a.isLogged) { ctx.sequenceCounter += 1 val logA = a.nonFusedEquivalentAction val aPrefix = if(a eq logA) "" else "[fused] " val dump = new TreePrinter(prefix = " ", firstPrefix = aPrefix, narrow = { case a: DBIOAction[_, _, _] => a.nonFusedEquivalentAction case o => o }).get(logA) val msg = DumpInfo.highlight("#" + ctx.sequenceCounter) + ": " + dump.substring(0, dump.length-1) actionLogger.debug(msg) } } } /** A logical session of a `Database`. The underlying database connection is created lazily on demand. */ trait SessionDef extends Closeable { /** Close this Session. */ def close(): Unit /** Force an actual database session to be opened. Slick sessions are lazy, so you do not * get a real database connection until you need it or you call force() on the session. */ def force(): Unit } /** The context object passed to database actions by the execution engine. */ trait BasicActionContext extends ActionContext { /** Whether to run all operations on the current thread or schedule them normally on the * appropriate ExecutionContext. This is used by the blocking API. */ protected[BasicBackend] val useSameThread: Boolean /** Return the specified ExecutionContext unless running in same-thread mode, in which case * `Action.sameThreadExecutionContext` is returned instead. */ private[BasicBackend] def getEC(ec: ExecutionContext): ExecutionContext = if(useSameThread) DBIO.sameThreadExecutionContext else ec /** A volatile variable to enforce the happens-before relationship (see * [[https://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html]] and * [[http://gee.cs.oswego.edu/dl/jmm/cookbook.html]]) when executing something in * a synchronous action context. It is read when entering the context and written when leaving * so that all writes to non-volatile variables within the context are visible to the next * synchronous execution. */ @volatile private[BasicBackend] var sync = 0 private[BasicBackend] def readSync = sync // workaround for SI-9053 to avoid warnings private[BasicBackend] var currentSession: Session = null private[BasicBackend] var releasedConnection = false private[BasicBackend] def priority(continuation: Boolean): Priority = { if (currentSession != null) WithConnection else if (continuation) Continuation else Fresh } /** Used for the sequence counter in Action debug output. This variable is volatile because it * is only updated sequentially but not protected by a synchronous action context. */ @volatile private[BasicBackend] var sequenceCounter = 0 def session: Session = currentSession } /** A special DatabaseActionContext for streaming execution. */ protected[this] class BasicStreamingActionContext(subscriber: Subscriber[_], protected[BasicBackend] val useSameThread: Boolean, database: Database) extends BasicActionContext with StreamingActionContext with Subscription { /** Whether the Subscriber has been signaled with `onComplete` or `onError`. */ private[this] var finished = false /** The total number of elements requested and not yet marked as delivered by the synchronous * streaming action. Whenever this value drops to 0, streaming is suspended. When it is raised * up from 0 in `request`, streaming is scheduled to be restarted. It is initially set to * `Long.MinValue` when streaming starts. Any negative value above `Long.MinValue` indicates * the actual demand at that point. It is reset to 0 when the initial streaming ends. */ private[this] val remaining = new AtomicLong(Long.MinValue) /** An error that will be signaled to the Subscriber when the stream is cancelled or * terminated. This is used for signaling demand overflow in `request()` while guaranteeing * that the `onError` message does not overlap with an active `onNext` call. */ private[BasicBackend] var deferredError: Throwable = null /** The state for a suspended streaming action. Must only be set from a synchronous action * context. */ private[BasicBackend] var streamState: AnyRef = null /** The streaming action which may need to be continued with the suspended state */ private[BasicBackend] var streamingAction: SynchronousDatabaseAction[_, _ <: NoStream, This, _ <: Effect] = null @volatile private[this] var cancelRequested = false /** The Promise to complete when streaming has finished. */ val streamingResultPromise = Promise[Null]() /** Indicate that the specified number of elements has been delivered. Returns the remaining * demand. This is an atomic operation. It must only be called from the synchronous action * context which performs the streaming. */ def delivered(num: Long): Long = remaining.addAndGet(-num) /** Get the current demand that has not yet been marked as delivered and mark it as being in * the current batch. When this value is negative, the initial streaming action is still * running and the real demand can be computed by subtracting `Long.MinValue` from the * returned value. */ def demandBatch: Long = remaining.get() /** Whether the stream has been cancelled by the Subscriber */ def cancelled: Boolean = cancelRequested def emit(v: Any): Unit = subscriber.asInstanceOf[Subscriber[Any]].onNext(v) /** Finish the stream with `onComplete` if it is not finished yet. May only be called from a * synchronous action context. */ def tryOnComplete: Unit = if(!finished && !cancelRequested) { if(streamLogger.isDebugEnabled) streamLogger.debug("Signaling onComplete()") finished = true try subscriber.onComplete() catch { case NonFatal(ex) => streamLogger.warn("Subscriber.onComplete failed unexpectedly", ex) } } /** Finish the stream with `onError` if it is not finished yet. May only be called from a * synchronous action context. */ def tryOnError(t: Throwable): Unit = if(!finished) { if(streamLogger.isDebugEnabled) streamLogger.debug(s"Signaling onError($t)") finished = true try subscriber.onError(t) catch { case NonFatal(ex) => streamLogger.warn("Subscriber.onError failed unexpectedly", ex) } } /** Restart a suspended streaming action. Must only be called from the Subscriber context. */ def restartStreaming: Unit = { readSync val s = streamState if(s ne null) { streamState = null if(streamLogger.isDebugEnabled) streamLogger.debug("Scheduling stream continuation after transition from demand = 0") val a = streamingAction database.scheduleSynchronousStreaming(a, this.asInstanceOf[StreamingContext], continuation = true)(s.asInstanceOf[a.StreamState]) } else { if(streamLogger.isDebugEnabled) streamLogger.debug("Saw transition from demand = 0, but no stream continuation available") } } def subscription = this ////////////////////////////////////////////////////////////////////////// Subscription methods def request(l: Long): Unit = if(!cancelRequested) { if(l <= 0) { deferredError = new IllegalArgumentException("Requested count must not be <= 0 (see Reactive Streams spec, 3.9)") cancel } else { if(!cancelRequested && remaining.getAndAdd(l) == 0L) restartStreaming } } def cancel: Unit = if(!cancelRequested) { cancelRequested = true // Restart streaming because cancelling requires closing the result set and the session from // within a synchronous action context. This will also complete the result Promise and thus // allow the rest of the scheduled Action to run. if(remaining.getAndSet(Long.MaxValue) == 0L) restartStreaming } } }
Radsaggi/slick
slick/src/main/scala/slick/basic/BasicBackend.scala
Scala
bsd-2-clause
24,573
package dotty.tools.dotc package transform import TreeTransforms._ import core.Denotations._ import core.SymDenotations._ import core.Contexts._ import core.Symbols._ import core.Types._ import core.Constants._ import core.StdNames._ import core.TypeErasure.isUnboundedGeneric import typer.ErrorReporting._ import ast.Trees._ import Erasure.Boxing._ import core.TypeErasure._ /** This transform normalizes type tests and type casts, * also replacing type tests with singleton argument type with reference equality check * Any remaining type tests * - use the object methods $isInstanceOf and $asInstanceOf * - have a reference type as receiver * - can be translated directly to machine instructions * * * Unfortunately this phase ended up being not Y-checkable unless types are erased. A cast to an ConstantType(3) or x.type * cannot be rewritten before erasure. */ trait TypeTestsCasts { import ast.tpd._ // override def phaseName: String = "typeTestsCasts" def interceptTypeApply(tree: TypeApply)(implicit ctx: Context): Tree = ctx.traceIndented(s"transforming ${tree.show}", show = true) { tree.fun match { case fun @ Select(qual, selector) => val sym = tree.symbol def isPrimitive(tp: Type) = tp.classSymbol.isPrimitiveValueClass def derivedTree(qual1: Tree, sym: Symbol, tp: Type) = cpy.TypeApply(tree)(qual1.select(sym).withPos(qual.pos), List(TypeTree(tp))) def qualCls = qual.tpe.widen.classSymbol def transformIsInstanceOf(expr:Tree, argType: Type): Tree = { def argCls = argType.classSymbol if (expr.tpe <:< argType) Literal(Constant(true)) withPos tree.pos else if (argCls.isPrimitiveValueClass) if (qualCls.isPrimitiveValueClass) Literal(Constant(qualCls == argCls)) else transformIsInstanceOf(expr, defn.boxedClass(argCls).typeRef) else argType.dealias match { case _: SingletonType => val cmpOp = if (argType derivesFrom defn.AnyValClass) defn.Any_equals else defn.Object_eq expr.select(cmpOp).appliedTo(singleton(argType)) case AndType(tp1, tp2) => evalOnce(expr) { fun => val erased1 = transformIsInstanceOf(fun, tp1) val erased2 = transformIsInstanceOf(fun, tp2) erased1 match { case Literal(Constant(true)) => erased2 case _ => erased2 match { case Literal(Constant(true)) => erased1 case _ => erased1 and erased2 } } } case defn.MultiArrayType(elem, ndims) if isUnboundedGeneric(elem) => def isArrayTest(arg: Tree) = ref(defn.runtimeMethod(nme.isArray)).appliedTo(arg, Literal(Constant(ndims))) if (ndims == 1) isArrayTest(qual) else evalOnce(qual) { qual1 => derivedTree(qual1, defn.Any_isInstanceOf, qual1.tpe) and isArrayTest(qual1) } case _ => derivedTree(expr, defn.Any_isInstanceOf, argType) } } def transformAsInstanceOf(argType: Type): Tree = { def argCls = argType.widen.classSymbol if (qual.tpe <:< argType) Typed(qual, tree.args.head) else if (qualCls.isPrimitiveValueClass) { if (argCls.isPrimitiveValueClass) primitiveConversion(qual, argCls) else derivedTree(box(qual), defn.Any_asInstanceOf, argType) } else if (argCls.isPrimitiveValueClass) unbox(qual.ensureConforms(defn.ObjectType), argType) else derivedTree(qual, defn.Any_asInstanceOf, argType) } def erasedArg = erasure(tree.args.head.tpe) if (sym eq defn.Any_isInstanceOf) transformIsInstanceOf(qual, erasedArg) else if (sym eq defn.Any_asInstanceOf) transformAsInstanceOf(erasedArg) else tree case _ => tree } } }
AlexSikia/dotty
src/dotty/tools/dotc/transform/TypeTestsCasts.scala
Scala
bsd-3-clause
4,092
/*********************************************************************** * Copyright (c) 2017-2018 IBM * Copyright (c) 2013-2018 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.cassandra.index import java.nio.charset.StandardCharsets import com.google.common.primitives.{Longs, Shorts} import org.locationtech.geomesa.cassandra.{NamedColumn, RowValue} import org.opengis.feature.simple.SimpleFeatureType trait CassandraZ3Layout extends CassandraFeatureIndex { private val Shard = NamedColumn("shard", 0, "tinyint", classOf[Byte], partition = true) private val Period = NamedColumn("period", 1, "smallint", classOf[Short], partition = true) private val ZValue = NamedColumn("z", 2, "bigint", classOf[Long]) private val FeatureId = NamedColumn("fid", 3, "text", classOf[String]) override protected val columns: Seq[NamedColumn] = Seq(Shard, Period, ZValue, FeatureId) // * - 1 byte identifying the sft (OPTIONAL - only if table is shared) // * - 1 byte shard // * - 2 byte period // * - 8 bytes z value // * - n bytes feature ID override protected def rowToColumns(sft: SimpleFeatureType, row: Array[Byte]): Seq[RowValue] = { import CassandraFeatureIndex.RichByteArray var shard: java.lang.Byte = null var period: java.lang.Short = null var z: java.lang.Long = null var fid: String = null if (row.length > 0) { shard = row(0) if (row.length > 1) { period = Shorts.fromBytes(row(1), row(2)) if (row.length > 3) { z = Longs.fromBytes(row(3), row.getOrElse(4, 0), row.getOrElse(5, 0), row.getOrElse(6, 0), row.getOrElse(7, 0), row.getOrElse(8, 0), row.getOrElse(9, 0), row.getOrElse(10, 0)) if (row.length > 11) { fid = new String(row, 11, row.length - 11, StandardCharsets.UTF_8) } } } } Seq(RowValue(Shard, shard), RowValue(Period, period), RowValue(ZValue, z), RowValue(FeatureId, fid)) } override protected def columnsToRow(columns: Seq[RowValue]): Array[Byte] = { val shard = columns.head.value.asInstanceOf[Byte] val period = Shorts.toByteArray(columns(1).value.asInstanceOf[Short]) val z = Longs.toByteArray(columns(2).value.asInstanceOf[Long]) val fid = columns(3).value.asInstanceOf[String].getBytes(StandardCharsets.UTF_8) val row = Array.ofDim[Byte](11 + fid.length) row(0) = shard System.arraycopy(period, 0, row, 1, 2) System.arraycopy(z, 0, row, 3, 8) System.arraycopy(fid, 0, row, 11, fid.length) row } }
boundlessgeo/geomesa
geomesa-cassandra/geomesa-cassandra-datastore/src/main/scala/org/locationtech/geomesa/cassandra/index/CassandraZ3Layout.scala
Scala
apache-2.0
2,910
class test { case class C(a: Int) { def a(i: Int): Int = 1 } def f(c: C) = /*start*/c.a/*end*/ //missing arguments for method a(Int) } //Int
ilinum/intellij-scala
testdata/typeInference/bugs5/SCL5023.scala
Scala
apache-2.0
152
package pureconfig.module import scala.util.Try import akka.http.scaladsl.model.Uri.ParsingMode import akka.http.scaladsl.model.{IllegalUriException, Uri} import pureconfig.error.{CannotConvert, ExceptionThrown} import pureconfig.{ConfigReader, ConfigWriter} package object akkahttp { implicit val uriReader: ConfigReader[Uri] = ConfigReader.fromString(str => Try(Uri(str, ParsingMode.Strict)).toEither.left .map { case err: IllegalUriException => CannotConvert(str, "Uri", err.info.summary) case err => ExceptionThrown(err) } ) implicit val uriWriter: ConfigWriter[Uri] = ConfigWriter[String].contramap(_.toString) }
melrief/pureconfig
modules/akka-http/src/main/scala/pureconfig/module/akkahttp/package.scala
Scala
mpl-2.0
678
import sbt._ import Keys._ object RayBuild extends Build { val playPath = Option(System.getProperty("play.path")).getOrElse("../play") val playVersion = Option(System.getProperty("play.version")).getOrElse("2.0") val playRepository = Resolver.file("Local Play Repository", file(new File(playPath, "repository/local").getPath))(Resolver.ivyStylePatterns) val typesafeRepository = Resolver.url("Typesafe repository", url("http://repo.typesafe.com/typesafe/releases/"))(Resolver.ivyStylePatterns) lazy val root = Project(id = "ray", base = file(".")) }
puffnfresh/ray
project/Build.scala
Scala
mit
574
package ohnosequences.compota.cli import java.io.File import com.amazonaws.auth._ import ohnosequences.awstools.ec2.EC2 import com.amazonaws.AmazonClientException import ohnosequences.logging.ConsoleLogger object CredentialsUtils { val logger = new ConsoleLogger("credentials utils") def serializeProvider(provider: AWSCredentialsProvider): String = provider match { case pfc: PropertiesFileCredentialsProvider => { "PropertiesFileCredentialsProvider" } case ip: InstanceProfileCredentialsProvider => { "InstanceProfileCredentialsProvider" } case ep: EnvironmentVariableCredentialsProvider => { "EnvironmentVariableCredentialsProvider" } case p => p.toString } def serializeProviderConstructor(provider: AWSCredentialsProvider): String = provider match { case ip: InstanceProfileCredentialsProvider => { "new com.amazonaws.auth.InstanceProfileCredentialsProvider()" } case ep: EnvironmentVariableCredentialsProvider => { "new com.amazonaws.auth.EnvironmentVariableCredentialsProvider()" } case pfc: PropertiesFileCredentialsProvider => { val fieldField = pfc.getClass().getDeclaredField("credentialsFilePath") fieldField.setAccessible(true) val path= fieldField.get(pfc).asInstanceOf[String] "new com.amazonaws.auth.PropertiesFileCredentialsProvider(\\"\\"\\"$path$\\"\\"\\")".replace("$path$", path) } //todo fix! case p => "" } def checkProvider(provider: AWSCredentialsProvider): Boolean = { try { val ec2 = EC2.create(provider) val size = ec2.ec2.describeSpotPriceHistory().getSpotPriceHistory.size() true } catch { case t: AmazonClientException => { logger.warn("couldn't receive credentials from (or describeSpotPriceHistory() not allowed)" + serializeProvider(provider)) false } } } def retrieveCredentialsProvider(file: Option[String]): Option[AWSCredentialsProvider] = { val credentialsFile = file match { case None => { val defaultLocation = System.getProperty("user.home") new File(defaultLocation, "compota.credentials") } case Some(f) => { new File(f) } } if (credentialsFile.exists()) { val property = new PropertiesFileCredentialsProvider(credentialsFile.getAbsolutePath) if(checkProvider(property)) { return Some(property) } } else { logger.warn("couldn't find file with credentials: " + credentialsFile.getAbsolutePath) } val environmentProvider = new EnvironmentVariableCredentialsProvider() if (checkProvider(environmentProvider)) { return Some(environmentProvider) } val instanceProfile = new InstanceProfileCredentialsProvider() if (checkProvider(instanceProfile)) { return Some(instanceProfile) } None } }
ohnosequences/compotaCLI
src/main/scala/ohnosequences/compota/cli/CredentialsUtils.scala
Scala
agpl-3.0
2,893