code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.job.local
import org.apache.samza.config.{Config, TaskConfigJava}
import org.apache.samza.config.JobConfig._
import org.apache.samza.config.ShellCommandConfig._
import org.apache.samza.container.{SamzaContainer, SamzaContainerListener, TaskName}
import org.apache.samza.coordinator.JobModelManager
import org.apache.samza.coordinator.stream.CoordinatorStreamManager
import org.apache.samza.job.{StreamJob, StreamJobFactory}
import org.apache.samza.metrics.{JmxServer, MetricsRegistryMap, MetricsReporter}
import org.apache.samza.runtime.LocalContainerRunner
import org.apache.samza.storage.ChangelogStreamManager
import org.apache.samza.task.TaskFactoryUtil
import org.apache.samza.util.Logging
import scala.collection.JavaConversions._
import scala.collection.mutable
/**
* Creates a new Thread job with the given config
*/
class ThreadJobFactory extends StreamJobFactory with Logging {
def getJob(config: Config): StreamJob = {
info("Creating a ThreadJob, which is only meant for debugging.")
val metricsRegistry = new MetricsRegistryMap()
val coordinatorStreamManager = new CoordinatorStreamManager(config, metricsRegistry)
coordinatorStreamManager.register(getClass.getSimpleName)
coordinatorStreamManager.start
coordinatorStreamManager.bootstrap
val changelogStreamManager = new ChangelogStreamManager(coordinatorStreamManager)
val coordinator = JobModelManager(coordinatorStreamManager, changelogStreamManager.readPartitionMapping())
val jobModel = coordinator.jobModel
val taskPartitionMappings: mutable.Map[TaskName, Integer] = mutable.Map[TaskName, Integer]()
for (containerModel <- jobModel.getContainers.values) {
for (taskModel <- containerModel.getTasks.values) {
taskPartitionMappings.put(taskModel.getTaskName, taskModel.getChangelogPartition.getPartitionId)
}
}
changelogStreamManager.writePartitionMapping(taskPartitionMappings)
//create necessary checkpoint and changelog streams
val checkpointManager = new TaskConfigJava(jobModel.getConfig).getCheckpointManager(metricsRegistry)
if (checkpointManager != null) {
checkpointManager.createResources()
}
ChangelogStreamManager.createChangelogStreams(jobModel.getConfig, jobModel.maxChangeLogStreamPartitions)
val containerId = "0"
val jmxServer = new JmxServer
val streamApp = TaskFactoryUtil.createStreamApplication(config)
val appRunner = new LocalContainerRunner(jobModel, "0")
val taskFactory = TaskFactoryUtil.createTaskFactory(config, streamApp, appRunner)
// Give developers a nice friendly warning if they've specified task.opts and are using a threaded job.
config.getTaskOpts match {
case Some(taskOpts) => warn("%s was specified in config, but is not being used because job is being executed with ThreadJob. You probably want to run %s=%s." format (TASK_JVM_OPTS, STREAM_JOB_FACTORY_CLASS, classOf[ProcessJobFactory].getName))
case _ => None
}
val containerListener = new SamzaContainerListener {
override def onContainerFailed(t: Throwable): Unit = {
error("Container failed.", t)
throw t
}
override def onContainerStop(pausedOrNot: Boolean): Unit = {
}
override def onContainerStart(): Unit = {
}
}
try {
coordinator.start
val container = SamzaContainer(
containerId,
jobModel,
config,
Map[String, MetricsReporter](),
taskFactory)
container.setContainerListener(containerListener)
val threadJob = new ThreadJob(container)
threadJob
} finally {
coordinator.stop
jmxServer.stop
}
}
}
| fredji97/samza | samza-core/src/main/scala/org/apache/samza/job/local/ThreadJobFactory.scala | Scala | apache-2.0 | 4,512 |
/*
* Copyright (C) 2011, Mysema Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mysema.scalagen
import japa.parser.ast.body.ModifierSet
import java.util.ArrayList
import com.mysema.scala.BeanUtils
import UnitTransformer._
object Properties extends Properties
/**
* Properties turns field + accessor combinations into annotated
* Scala properties
*/
class Properties extends UnitTransformerBase {
def transform(cu: CompilationUnit): CompilationUnit = {
cu.accept(this, cu).asInstanceOf[CompilationUnit]
}
override def visit(n: ClassOrInterfaceDecl, cu: CompilationUnit): ClassOrInterfaceDecl = {
val t = super.visit(n, cu).asInstanceOf[ClassOrInterfaceDecl]
// accessors
val getters = t.getMembers.collect { case m: Method => m }
.filter(m => isGetter(m))
.map(m => (m.getName,m)).toMap
// fields with accessors
val fields = t.getMembers.collect { case f: Field => f }
.filter(_.getModifiers.isPrivate)
.flatMap( f => f.getVariables.map( v => (v.getId.getName,v,f) ))
.filter { case (name,_,_) => getters.contains(name) }
// remove accessors
for ( (name, variable, field) <- fields) {
var getter = getters(name)
val body = getter.getBody
if (getter.getModifiers.isAbstract) {
t.setMembers(t.getMembers.filterNot(_ == getter))
field.removeModifier(PRIVATE)
} else if (isReturnFieldStmt(body(0))) {
//t.getMembers.remove(getter)
t.setMembers(t.getMembers.filterNot(_ == getter))
field.setModifiers(getter.getModifiers)
} else if (isLazyCreation(body,name)) {
//t.getMembers.remove(getter)
t.setMembers(t.getMembers.filterNot(_ == getter))
variable.setInit(getLazyInit(body))
field.setModifiers(getter.getModifiers
.addModifier(LAZY).addModifier(ModifierSet.FINAL))
}
}
t
}
private def isGetter(method: Method): Boolean = method match {
case Method(n, t, Nil, Block(_ :: rest)) if !t.isInstanceOf[VoidType] => true
case _ => false
}
}
| cessationoftime/scalagen | scalagen/src/main/scala/com/mysema/scalagen/Properties.scala | Scala | apache-2.0 | 2,643 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.spells
import io.truthencode.ddo.model.effect.EffectList
import io.truthencode.ddo.model.spells.SpellElement._
import io.truthencode.ddo.model.spells.component.ComponentList
import scala.collection.immutable
object SpellBuilder {
// def apply[T <: Spell](name: String): SpellBuilder[T with EmptySpell with WithName] = apply[T with EmptySpell with WithName](Seq(UseSpellName(name)))
def apply(): SpellBuilder[EmptySpell] =
apply[EmptySpell](Set()) // new SpellBuilder(elements)
def apply(name: String): SpellBuilder[EmptySpell with WithName] =
apply[EmptySpell with WithName](Set(UseSpellName(name))) // new SpellBuilder(elements)
def apply[T <: Spell](ingredients: Set[SpellElement]): SpellBuilder[T] =
new SpellBuilder[T](ingredients)
// def apply(): SpellBuilder[Pizza.EmptyPizza] = apply[Pizza.EmptyPizza](Seq())
}
abstract protected class BaseSpellBuilder[T <: Spell] protected (
elements: Set[SpellElement]
) {
type CompleteSpell = EmptySpell
with WithName with WithSpellInfo with WithSpellEffects with WithCasterClass with WithTarget
with WithSpellSavingThrow with WithSpellPoints with WithComponents with WithLevelCap
// with CoolDown
def addName(name: String): BaseSpellBuilder[T with WithName]
def addSpellInfo(si: SpellInfo): BaseSpellBuilder[T with WithSpellInfo]
// SpellInfo
// def addCoolDown(cd: Option[Duration]): BaseSpellBuilder[T with CoolDown]
def addCasterClass(
cl: Seq[CasterWithLevel]
): BaseSpellBuilder[T with WithCasterClass]
def addSpellTarget(
target: List[SpellTarget]
): BaseSpellBuilder[T with WithTarget]
def addSavingThrow(
saves: List[SavingThrow]
): BaseSpellBuilder[T with WithSpellSavingThrow]
def addSpellPoints(sp: Int): BaseSpellBuilder[T with WithSpellPoints]
def addComponents(
component: List[ComponentList]
): BaseSpellBuilder[T with WithComponents]
def addLevelCap(cl: CasterLevelCap): BaseSpellBuilder[T with WithLevelCap]
def addEffect(eff: EffectList): BaseSpellBuilder[T with WithSpellEffects]
/**
* Adds Spell Resistance Information
*
* @param sr
* Spell Resistance
* @return
* Builder with Spell Resistance information
*/
def addSpellResistance(sr: SpellResistance): BaseSpellBuilder[T]
def build(implicit ev: T =:= CompleteSpell): Spell
}
class SpellBuilder[T <: Spell](elements: Set[SpellElement] = Set.empty)
extends BaseSpellBuilder[T](elements) {
// override def addCoolDown(
// cd: Option[Duration]): BaseSpellBuilder[T with CoolDown] =
// SpellBuilder[T with CoolDown](elements :+ UseCoolDown {
// cd
// })
override def build(implicit ev: T =:= CompleteSpell): Spell =
SpellDescriptor(elements)
// {
// val e: Spell =:= CompleteSpell = ev
// createSpell
// // val s: Spell
// null
//
// }
override def addCasterClass(
cl: Seq[CasterWithLevel]
): BaseSpellBuilder[T with WithCasterClass] = {
SpellBuilder[T with WithCasterClass](elements + UseCasterClass {
cl.toSet
})
}
override def addSpellTarget(
targets: List[SpellTarget]
): BaseSpellBuilder[T with WithTarget] =
SpellBuilder[T with WithTarget](elements + UseSpellTarget {
targets
})
override def addSavingThrow(
saves: List[SavingThrow]
): BaseSpellBuilder[T with WithSpellSavingThrow] =
SpellBuilder[T with WithSpellSavingThrow](elements + UseSpellSavingThrow {
saves
})
override def addSpellPoints(
sp: Int
): BaseSpellBuilder[T with WithSpellPoints] =
SpellBuilder[T with WithSpellPoints](elements + UseSpellPoints {
sp
})
override def addComponents(
component: List[ComponentList]
): BaseSpellBuilder[T with WithComponents] =
SpellBuilder[T with WithComponents](elements + UseComponents {
component
})
override def addLevelCap(
cl: CasterLevelCap
): BaseSpellBuilder[T with WithLevelCap] =
SpellBuilder[T with WithLevelCap](elements + UseLevelCap {
cl.baseLevelCap
})
override def addEffect(
eff: EffectList
): BaseSpellBuilder[T with WithSpellEffects] =
SpellBuilder[T with WithSpellEffects](elements + UseSpellEffects {
eff.effects
})
override def addSpellInfo(
si: SpellInfo
): BaseSpellBuilder[T with WithSpellInfo] =
SpellBuilder[T with WithSpellInfo](
elements + UseSpellInfo(
coolDown = si.coolDown,
savingThrow = si.savingThrow,
sr = si.sr,
target = si.target,
components = si.components,
spellPoints = si.spellPoints,
range = si.range
)
)
override def addName(name: String): BaseSpellBuilder[T with WithName] =
SpellBuilder[T with WithName](elements + UseSpellName(name))
/*
private[this] def buildFromElements(elements: Seq[SpellElement], name: String) = {
var s = createSpell(name = name)
// elements.reduceLeft {(e,y) => e match {
// case x:WithCoolDown => s.copy(coolDown = x.coolDown)
// }
elements.foreach {
case x: WithSpellInfo => s = s.copy(name = s.name,
coolDown = x.coolDown,
spellResistance = x.spellResistance,
target = x.target,
savingThrow = x.savingThrow,
spellPoints = x.spellPoints,
components = x.components)
case x: UseCoolDown => s = s.copy(coolDown = x.coolDown)
case x: WithCasterClass =>
s = s.copy(casterLevels = new CasterLevels {
override def casterLevels: Seq[CasterWithLevel] = x.casterLevels
})
case x: WithSpellTarget => s = s.copy(target = x.target)
case x: WithSpellSavingThrow => s = s.copy(savingThrow = x.savingThrow)
case x: WithSpellPoints => s = s.copy(spellPoints = x.spellPoints)
case x: WithSpellEffects => s = s.copy(effects = x.effects)
case x: WithComponents => s = s.copy(components = x.components)
case x: WithLevelCap => s = s.copy(maxCasterLevel = x)
case x: WithName => s = s.copy(name = x.name)
}
s
} */
/**
* Adds Spell Resistance Information
*
* @param sr
* Spell Resistance
* @return
* Builder with Spell Resistance information
*/
override def addSpellResistance(sr: SpellResistance): BaseSpellBuilder[T] = {
val info: immutable.Set[WithSpellInfo] =
elements.extract[WithSpellInfo].toSet
val wsr: immutable.Set[WithSpellResistance] =
elements.extract[WithSpellResistance].toSet
if (info.nonEmpty) {
SpellBuilder[T](elements + UseSpellResistance {
sr.sr
})
}
SpellBuilder[T](elements + UseSpellResistance {
sr.sr
})
}
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/spells/SpellLike.scala | Scala | apache-2.0 | 7,570 |
package com.bloomberg.sparkflow
/**
* Created by ngoehausen on 5/11/16.
*/
object CaseClasses extends Serializable {
case class TrashFire(temp: Double, mass: Double)
}
| nimbusgo/spark-flow | src/test/scala/com/bloomberg/sparkflow/CaseClasses.scala | Scala | apache-2.0 | 177 |
object Test {
def Foo(a: Int): Char = ???
object Bar
def crash[A](): Boolean = Bar match {
case Foo.Bar => true
case _ => false
}
}
trait hrhino {
def Foo(i: Int) = i
val Foo.Crash = ???
}
| lrytz/scala | test/files/neg/t10474.scala | Scala | apache-2.0 | 213 |
package es.weso.manifest
import es.weso.rdfgraph.nodes.IRI
object ManifestPrefixes {
lazy val mf = IRI("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#")
lazy val qt = IRI("http://www.w3.org/2001/sw/DataAccess/tests/test-query#")
lazy val sht = IRI("http://www.w3.org/ns/shacl/test-suite#")
lazy val dc = IRI("http://purl.org/dc/elements/1.1/")
lazy val dawgt = IRI("http://www.w3.org/2001/sw/DataAccess/tests/test-dawg#")
lazy val rdfs = IRI("http://www.w3.org/2000/01/rdf-schema#")
lazy val mf_Manifest = mf.add("Manifest")
lazy val mf_entries = mf.add("entries")
lazy val mf_include = mf.add("include")
lazy val mf_name = mf.add("name")
lazy val mf_action = mf.add("action")
lazy val mf_result = mf.add("result")
lazy val mf_status = mf.add("status")
lazy val rdfs_label = rdfs.add("label")
lazy val rdfs_comment = rdfs.add("comment")
lazy val sht_proposed = sht.add("proposed")
lazy val sht_approved = sht.add("approved")
lazy val sht_rejected = sht.add("rejected")
lazy val sht_specRef = sht.add("specRef")
lazy val sht_schema = sht.add("schema")
lazy val sht_schema_format = sht.add("schema-format")
lazy val sht_data = sht.add("data")
lazy val sht_data_format = sht.add("data-format")
lazy val sht_schema_output_format = sht.add("schema-output-format")
lazy val sht_node = sht.add("node")
lazy val sht_shape = sht.add("shape")
lazy val sht_Validate = sht.add("Validate")
lazy val sht_WellFormedSchema = sht.add("WellFormedSchema")
lazy val sht_NonWellFormedSchema = sht.add("NonWellFormedSchema")
lazy val sht_MatchNodeShape = sht.add("MatchNodeShape")
lazy val sht_ConvertSchemaSyntax = sht.add("ConvertSchemaSyntax")
lazy val sht_SHACLC = sht.add("SHACLC")
lazy val sht_TURTLE = sht.add("TURTLE")
} | jorgeyp/ShExcala | src/main/scala/es/weso/manifest/ManifestPrefixes.scala | Scala | mit | 1,790 |
package com.twitter.inject.server
import com.google.common.net.{HttpHeaders, MediaType}
import com.google.inject.Stage
import com.twitter.conversions.time._
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.http._
import com.twitter.finagle.service.Backoff._
import com.twitter.finagle.service.RetryPolicy
import com.twitter.finagle.service.RetryPolicy._
import com.twitter.finagle.stats.{NullStatsReceiver, InMemoryStatsReceiver, StatsReceiver}
import com.twitter.finagle.{ChannelClosedException, Service}
import com.twitter.inject.app.Banner._
import com.twitter.inject.app.{Banner, EmbeddedApp, App}
import com.twitter.inject.modules.InMemoryStatsReceiverModule
import com.twitter.inject.server.EmbeddedTwitterServer._
import com.twitter.util._
import java.net.{InetSocketAddress, URI}
import java.util.concurrent.TimeUnit._
import org.jboss.netty.handler.codec.http.{HttpMethod, HttpResponseStatus}
object EmbeddedTwitterServer {
private def resolveClientFlags(useSocksProxy: Boolean, clientFlags: Map[String, String]) = {
if (useSocksProxy) {
clientFlags ++ Map(
"com.twitter.server.resolverZkHosts" -> "localhost:2181",
"com.twitter.finagle.socks.socksProxyHost" -> "localhost",
"com.twitter.finagle.socks.socksProxyPort" -> "50001")
}
else {
clientFlags
}
}
}
/**
* EmbeddedTwitterServer allows a twitter-server serving http or thrift endpoints to be started
* locally (on ephemeral ports), and tested through it's http/thrift interfaces.
*
* Note: All initialization fields are lazy to aid running multiple tests inside Intellij at the same time
* since Intellij "pre-constructs" ALL the tests before running each one.
*
* @param twitterServer The twitter server to be started locally for integration testing
* @param clientFlags Command line flags (e.g. "foo"->"bar" is translated into -foo=bar)
* @param extraArgs Extra command line arguments
* @param waitForWarmup Once the app is started, wait for App warmup to be completed
* @param stage Guice Stage used to create the server's injector. Since EmbeddedTwitterServer is used for testing, we default to Stage.DEVELOPMENT.
* This makes it possible to only mock objects that are used in a given test, at the expense of not checking that the entire
* object graph is valid. As such, you should always have at lease one Stage.PRODUCTION test for your service (which eagerly
* creates all Guice classes at startup)
* @param useSocksProxy Use a tunneled socks proxy for external service discovery/calls (useful for manually run external integration tests that connect to external services)
* @param skipAppMain Skip the running of appMain when the app starts. You will need to manually call app.appMain() later in your test.
*/
class EmbeddedTwitterServer(
val twitterServer: Ports,
clientFlags: Map[String, String] = Map(),
extraArgs: Seq[String] = Seq(),
waitForWarmup: Boolean = true,
stage: Stage = Stage.DEVELOPMENT,
useSocksProxy: Boolean = false,
skipAppMain: Boolean = false,
defaultRequestHeaders: Map[String, String] = Map(),
streamResponse: Boolean = false)
extends EmbeddedApp(
app = twitterServer,
clientFlags = resolveClientFlags(useSocksProxy, clientFlags),
resolverMap = Map(),
extraArgs = extraArgs,
waitForWarmup = waitForWarmup,
skipAppMain = skipAppMain,
stage = stage) {
/* Constructor */
// Add framework override modules
if (isGuiceApp) {
guiceApp.addFrameworkOverrideModules(InMemoryStatsReceiverModule)
}
/* Lazy Fields */
lazy val httpAdminClient = {
start()
createHttpClient(
"httpAdminClient",
twitterServer.httpAdminPort)
}
lazy val statsReceiver = if (isGuiceApp) injector.instance[StatsReceiver] else new InMemoryStatsReceiver
lazy val inMemoryStatsReceiver = statsReceiver.asInstanceOf[InMemoryStatsReceiver]
lazy val adminHostAndPort = PortUtils.loopbackAddressForPort(twitterServer.httpAdminPort)
def thriftPort: Int = {
start()
twitterServer.thriftPort.get
}
def thriftHostAndPort: String = {
PortUtils.loopbackAddressForPort(thriftPort)
}
/* Protected */
override protected def nonGuiceAppStarted(): Boolean = {
twitterServer.httpAdminPort != 0
}
override protected def logAppStartup() {
Banner.banner("Server Started: " + appName)
println(s"AdminHttp -> http://$adminHostAndPort/admin")
}
/* Public */
lazy val isGuiceTwitterServer = twitterServer.isInstanceOf[App]
override def close() {
if (!closed) {
super.close()
closed = true
}
}
def clearStats() = {
inMemoryStatsReceiver.counters.clear()
inMemoryStatsReceiver.stats.clear()
inMemoryStatsReceiver.gauges.clear()
}
def printStats() {
def prettyKeys(keys: Seq[String]): String = {
keys.mkString("/")
}
banner(appName + " Stats")
for ((keys, values) <- inMemoryStatsReceiver.stats.iterator.toSeq.sortBy {_._1.head}) {
val avg = values.sum / values.size
println(prettyKeys(keys) + "\\t = Avg " + avg + " with values " + values.mkString(", "))
}
for ((keys, value) <- inMemoryStatsReceiver.counters.iterator.toSeq.sortBy {_._1.head}) {
println(prettyKeys(keys) + "\\t = " + value)
}
for ((keys, value) <- inMemoryStatsReceiver.gauges.iterator.toSeq.sortBy {_._1.head}) {
println(prettyKeys(keys) + "\\t = " + value)
}
}
def assertHealthy(healthy: Boolean = true) {
val expectedBody = if (healthy) "OK\\n" else ""
httpGetAdmin(
"/health",
andExpect = Status.Ok,
withBody = expectedBody)
}
def httpGetAdmin(
path: String,
accept: MediaType = null,
headers: Map[String, String] = Map(),
suppress: Boolean = false,
andExpect: HttpResponseStatus = Status.Ok,
withLocation: String = null,
withBody: String = null): Response = {
start()
val request = createApiRequest(path, HttpMethod.GET)
httpExecute(httpAdminClient, request, addAcceptHeader(accept, headers), suppress, andExpect, withLocation, withBody)
}
override protected def combineArgs() = {
adminAndLogArgs ++ super.combineArgs
}
protected def httpExecute(
client: Service[Request, Response],
request: Request,
headers: Map[String, String] = Map(),
suppress: Boolean = false,
andExpect: HttpResponseStatus = Status.Ok,
withLocation: String = null,
withBody: String = null): Response = {
/* Pre - Execute */
printRequest(request, suppress)
/* Execute */
val response = handleRequest(request, client = client, additionalHeaders = headers)
/* Post - Execute */
printResponseMetadata(response, suppress)
printResponseBody(response, suppress)
if (andExpect != null && response.status != andExpect) {
response.status should equal(andExpect)
}
if (withBody != null) {
response.contentString should equal(withBody)
}
if (withLocation != null) {
response.location.get should endWith(withLocation)
}
response
}
protected def createHttpClient(
name: String,
port: Int,
tcpConnectTimeout: Duration = 60.seconds,
connectTimeout: Duration = 60.seconds,
requestTimeout: Duration = 300.seconds,
retryPolicy: RetryPolicy[Try[Any]] = httpRetryPolicy,
secure: Boolean = false): Service[Request, Response] = {
val host = new InetSocketAddress(PortUtils.loopbackAddress, port)
val builder = ClientBuilder()
.name(name)
.codec(RichHttp[Request](Http(), aggregateChunks = !streamResponse))
.tcpConnectTimeout(tcpConnectTimeout)
.connectTimeout(connectTimeout)
.requestTimeout(requestTimeout)
.hosts(host)
.hostConnectionLimit(75)
.retryPolicy(retryPolicy)
.reportTo(NullStatsReceiver)
.failFast(false)
if (secure)
builder.tlsWithoutValidation().build()
else
builder.build()
}
private def handleRequest(request: Request, client: Service[Request, Response], additionalHeaders: Map[String, String] = Map()): Response = {
// Don't overwrite request.headers set by RequestBuilder in httpFormPost.
val defaultNewHeaders = defaultRequestHeaders filterKeys {!request.headerMap.contains(_)}
addOrRemoveHeaders(request, defaultNewHeaders)
addOrRemoveHeaders(request, additionalHeaders) //additional headers get added second so they can overwrite defaults
val futureResponse = client(request)
val elapsed = Stopwatch.start()
try {
Await.result(futureResponse)
} catch {
case e: Throwable =>
println("ERROR in request: " + request + " " + e + " in " + elapsed().inUnit(MILLISECONDS) + " ms")
throw e
}
}
/* Private */
protected def httpRetryPolicy: RetryPolicy[Try[Any]] = {
backoff(
constant(1.second) take 15) {
case Throw(e: ChannelClosedException) =>
println("Retrying ChannelClosedException")
true
}
}
private def printRequest(request: Request, suppress: Boolean) {
if (!suppress) {
val headers = request.headerMap.mkString(
"[Header]\\t",
"\\n[Header]\\t",
"")
val msg = "HTTP " + request.method + " " + request.uri + "\\n" + headers
if (request.contentString.isEmpty)
banner(msg)
else
banner(msg + "\\n" + prettyRequestBody(request))
}
}
protected def prettyRequestBody(request: Request): String = {
request.contentString
}
private def printResponseMetadata(response: Response, suppress: Boolean) {
if (!suppress) {
println("-" * 75)
println("[Status]\\t" + response.status)
println(response.headerMap.mkString(
"[Header]\\t",
"\\n[Header]\\t",
""))
}
}
private def printResponseBody(response: Response, suppress: Boolean) {
if (!suppress) {
if (response.contentString.isEmpty) {
println("*EmptyBody*")
}
else {
printNonEmptyResponseBody(response)
}
}
}
protected def printNonEmptyResponseBody(response: Response): Unit = {
println(response.contentString)
println()
}
private def adminAndLogArgs = Array(
"-admin.port=" + PortUtils.ephemeralLoopback,
"-log.level=INFO")
// Deletes request headers with null-values in map.
private def addOrRemoveHeaders(request: Request, headers: Map[String, String]): Unit = {
for ((key, value) <- headers) {
if (value == null) {
request.headers.remove(key)
} else {
request.headers.set(key, value)
}
}
}
protected def createApiRequest(path: String, method: HttpMethod = Method.Get) = {
val pathToUse = if (path.startsWith("http"))
URI.create(path).getPath
else
path
Request(method, pathToUse)
}
private def addAcceptHeader(accept: MediaType, headers: Map[String, String]): Map[String, String] = {
if (accept != null)
headers + (HttpHeaders.ACCEPT -> accept.toString)
else
headers
}
}
| tempbottle/finatra | inject/inject-server/src/test/scala/com/twitter/inject/server/EmbeddedTwitterServer.scala | Scala | apache-2.0 | 11,046 |
package org.elasticsearch.spark.rdd
import java.util.Collections
import java.util.Map
import org.apache.commons.logging.LogFactory
import org.apache.commons.logging.Log
import org.apache.spark.SparkContext
import org.apache.spark.Partition
import org.apache.spark.TaskContext
import org.elasticsearch.hadoop.cfg.Settings
import org.elasticsearch.hadoop.rest.InitializationUtils
import org.elasticsearch.hadoop.rest.RestService.PartitionDefinition
import org.elasticsearch.hadoop.serialization.builder.JdkValueReader
private[spark] class JavaEsRDD(
@transient sc: SparkContext,
config: scala.collection.Map[String, String] = scala.collection.Map.empty)
extends AbstractEsRDD[java.util.Map[String, Object]](sc, config){
override def compute(split: Partition, context: TaskContext): JavaEsRDDIterator = {
new JavaEsRDDIterator(context, split.asInstanceOf[EsPartition].esPartition)
}
}
private[rdd] class JavaEsRDDIterator(
context: TaskContext,
partition: PartitionDefinition)
extends AbstractEsRDDIterator[Map[String, Object]](context, partition) {
override def getLogger() = LogFactory.getLog(classOf[JavaEsRDD])
override def initReader(settings:Settings, log: Log) = {
InitializationUtils.setValueReaderIfNotSet(settings, classOf[JdkValueReader], log)
}
override def createValue(value: Array[Object]): java.util.Map[String, Object] = {
Collections.singletonMap(value(0).toString(), value(1))
}
} | nfouka/elasticsearch-hadoop | spark/src/main/scala/org/elasticsearch/spark/rdd/JavaEsRDD.scala | Scala | apache-2.0 | 1,449 |
package metronome.temporal
import metronome.Duration
import metronome.chrono.{IsoChronology, Chronology}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* A standard set of fields.
* <p>
* This set of fields provide field-based access to manipulate a date, time or date-time.
* The standard set of fields can be extended by implementing {@link TemporalField}.
* <p>
* These fields are intended to be applicable in multiple calendar systems.
* For example, most non-ISO calendar systems define dates as a year, month and day,
* just with slightly different rules.
* The documentation of each field explains how it operates.
*
* @implSpec
* This is a final, immutable and thread-safe enum.
*
* @since 1.8
*/
object ChronoField {
/**
* The nano-of-second.
* <p>
* This counts the nanosecond within the second, from 0 to 999,999,999.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the nano-of-second handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_MINUTE}, {@link #SECOND_OF_DAY} or
* {@link #INSTANT_SECONDS} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should set as much precision as the
* object stores, using integer division to remove excess precision.
* For example, if the {@code TemporalAccessor} stores time to millisecond precision,
* then the nano-of-second must be divided by 1,000,000 before replacing the milli-of-second.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The field is resolved in combination with {@code MILLI_OF_SECOND} and {@code MICRO_OF_SECOND}.
*/
final val NANO_OF_SECOND: = null
/**
* The nano-of-day.
* <p>
* This counts the nanosecond within the day, from 0 to (24 * 60 * 60 * 1,000,000,000) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the nano-of-day handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_DAY} filling unknown precision with zero.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code NANO_OF_SECOND}, {@code SECOND_OF_MINUTE},
* {@code MINUTE_OF_HOUR} and {@code HOUR_OF_DAY} fields.
*/
final val NANO_OF_DAY: = null
/**
* The micro-of-second.
* <p>
* This counts the microsecond within the second, from 0 to 999,999.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the micro-of-second handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_MINUTE}, {@link #SECOND_OF_DAY} or
* {@link #INSTANT_SECONDS} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_SECOND} with the value multiplied by 1,000.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The field is resolved in combination with {@code MILLI_OF_SECOND} to produce
* {@code NANO_OF_SECOND}.
*/
final val MICRO_OF_SECOND: = null
/**
* The micro-of-day.
* <p>
* This counts the microsecond within the day, from 0 to (24 * 60 * 60 * 1,000,000) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the micro-of-day handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_DAY} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_DAY} with the value multiplied by 1,000.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code MICRO_OF_SECOND}, {@code SECOND_OF_MINUTE},
* {@code MINUTE_OF_HOUR} and {@code HOUR_OF_DAY} fields.
*/
final val MICRO_OF_DAY: = null
/**
* The milli-of-second.
* <p>
* This counts the millisecond within the second, from 0 to 999.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the milli-of-second handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_MINUTE}, {@link #SECOND_OF_DAY} or
* {@link #INSTANT_SECONDS} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_SECOND} with the value multiplied by 1,000,000.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The field is resolved in combination with {@code MICRO_OF_SECOND} to produce
* {@code NANO_OF_SECOND}.
*/
final val MILLI_OF_SECOND: = null
/**
* The milli-of-day.
* <p>
* This counts the millisecond within the day, from 0 to (24 * 60 * 60 * 1,000) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the milli-of-day handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_DAY} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_DAY} with the value multiplied by 1,000,000.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code MILLI_OF_SECOND}, {@code SECOND_OF_MINUTE},
* {@code MINUTE_OF_HOUR} and {@code HOUR_OF_DAY} fields.
*/
final val MILLI_OF_DAY: = null
/**
* The second-of-minute.
* <p>
* This counts the second within the minute, from 0 to 59.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
*/
final val SECOND_OF_MINUTE: = null
/**
* The second-of-day.
* <p>
* This counts the second within the day, from 0 to (24 * 60 * 60) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code SECOND_OF_MINUTE}, {@code MINUTE_OF_HOUR}
* and {@code HOUR_OF_DAY} fields.
*/
final val SECOND_OF_DAY: = null
/**
* The minute-of-hour.
* <p>
* This counts the minute within the hour, from 0 to 59.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
*/
final val MINUTE_OF_HOUR: = null
/**
* The minute-of-day.
* <p>
* This counts the minute within the day, from 0 to (24 * 60) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code MINUTE_OF_HOUR} and {@code HOUR_OF_DAY} fields.
*/
final val MINUTE_OF_DAY: = null
/**
* The hour-of-am-pm.
* <p>
* This counts the hour within the AM/PM, from 0 to 11.
* This is the hour that would be observed on a standard 12-hour digital clock.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated from 0 to 11 in strict and smart mode.
* In lenient mode the value is not validated. It is combined with
* {@code AMPM_OF_DAY} to form {@code HOUR_OF_DAY} by multiplying
* the {AMPM_OF_DAY} value by 12.
*/
final val HOUR_OF_AMPM: = null
/**
* The clock-hour-of-am-pm.
* <p>
* This counts the hour within the AM/PM, from 1 to 12.
* This is the hour that would be observed on a standard 12-hour analog wall clock.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated from 1 to 12 in strict mode and from
* 0 to 12 in smart mode. In lenient mode the value is not validated.
* The field is converted to an {@code HOUR_OF_AMPM} with the same value,
* unless the value is 12, in which case it is converted to 0.
*/
final val CLOCK_HOUR_OF_AMPM: = null
/**
* The hour-of-day.
* <p>
* This counts the hour within the day, from 0 to 23.
* This is the hour that would be observed on a standard 24-hour digital clock.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The field is combined with {@code MINUTE_OF_HOUR}, {@code SECOND_OF_MINUTE} and
* {@code NANO_OF_SECOND} to produce a {@code Time}.
* In lenient mode, any excess days are added to the parsed date, or
* made available via {@link java.time.format.DateTimeFormatter#parsedExcessDays()}.
*/
final val HOUR_OF_DAY: = null
/**
* The clock-hour-of-day.
* <p>
* This counts the hour within the AM/PM, from 1 to 24.
* This is the hour that would be observed on a 24-hour analog wall clock.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated from 1 to 24 in strict mode and from
* 0 to 24 in smart mode. In lenient mode the value is not validated.
* The field is converted to an {@code HOUR_OF_DAY} with the same value,
* unless the value is 24, in which case it is converted to 0.
*/
final val CLOCK_HOUR_OF_DAY: = null
/**
* The am-pm-of-day.
* <p>
* This counts the AM/PM within the day, from 0 (AM) to 1 (PM).
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated from 0 to 1 in strict and smart mode.
* In lenient mode the value is not validated. It is combined with
* {@code HOUR_OF_AMPM} to form {@code HOUR_OF_DAY} by multiplying
* the {AMPM_OF_DAY} value by 12.
*/
final val AMPM_OF_DAY: = null
/**
* The day-of-week, such as Tuesday.
* <p>
* This represents the standard concept of the day of the week.
* In the default ISO calendar system, this has values from Monday (1) to Sunday (7).
* The {@link DayOfWeek} class can be used to interpret the result.
* <p>
* Most non-ISO calendar systems also define a seven day week that aligns with ISO.
* Those calendar systems must also use the same numbering system, from Monday (1) to
* Sunday (7), which allows {@code DayOfWeek} to be used.
* <p>
* Calendar systems that do not have a standard seven day week should implement this field
* if they have a similar concept of named or numbered days within a period similar
* to a week. It is recommended that the numbering starts from 1.
*/
final val DAY_OF_WEEK: = null
/**
* The aligned day-of-week within a month.
* <p>
* This represents concept of the count of days within the period of a week
* where the weeks are aligned to the start of the month.
* This field is typically used with {@link #ALIGNED_WEEK_OF_MONTH}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-month
* starts on day-of-month 1, the second aligned-week starts on day-of-month 8, and so on.
* Within each of these aligned-weeks, the days are numbered from 1 to 7 and returned
* as the value of this field.
* As such, day-of-month 1 to 7 will have aligned-day-of-week values from 1 to 7.
* And day-of-month 8 to 14 will repeat this with aligned-day-of-week values from 1 to 7.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
final val ALIGNED_DAY_OF_WEEK_IN_MONTH: = null
/**
* The aligned day-of-week within a year.
* <p>
* This represents concept of the count of days within the period of a week
* where the weeks are aligned to the start of the year.
* This field is typically used with {@link #ALIGNED_WEEK_OF_YEAR}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-year
* starts on day-of-year 1, the second aligned-week starts on day-of-year 8, and so on.
* Within each of these aligned-weeks, the days are numbered from 1 to 7 and returned
* as the value of this field.
* As such, day-of-year 1 to 7 will have aligned-day-of-week values from 1 to 7.
* And day-of-year 8 to 14 will repeat this with aligned-day-of-week values from 1 to 7.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
final val ALIGNED_DAY_OF_WEEK_IN_YEAR: = null
/**
* The day-of-month.
* <p>
* This represents the concept of the day within the month.
* In the default ISO calendar system, this has values from 1 to 31 in most months.
* April, June, September, November have days from 1 to 30, while February has days
* from 1 to 28, or 29 in a leap year.
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* day-of-month values for users of the calendar system.
* Normally, this is a count of days from 1 to the length of the month.
*/
final val DAY_OF_MONTH: = null
/**
* The day-of-year.
* <p>
* This represents the concept of the day within the year.
* In the default ISO calendar system, this has values from 1 to 365 in standard
* years and 1 to 366 in leap years.
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* day-of-year values for users of the calendar system.
* Normally, this is a count of days from 1 to the length of the year.
* <p>
* Note that a non-ISO calendar system may have year numbering system that changes
* at a different point to the natural reset in the month numbering. An example
* of this is the Japanese calendar system where a change of era, which resets
* the year number to 1, can happen on any date. The era and year reset also cause
* the day-of-year to be reset to 1, but not the month-of-year or day-of-month.
*/
final val DAY_OF_YEAR: = null
/**
* The epoch-day, based on the Java epoch of 1970-01-01 (ISO).
* <p>
* This field is the sequential count of days where 1970-01-01 (ISO) is zero.
* Note that this uses the <i>local</i> time-line, ignoring offset and time-zone.
* <p>
* This field is strictly defined to have the same meaning in all calendar systems.
* This is necessary to ensure interoperation between calendars.
*/
final val EPOCH_DAY: = null
/**
* The aligned week within a month.
* <p>
* This represents concept of the count of weeks within the period of a month
* where the weeks are aligned to the start of the month.
* This field is typically used with {@link #ALIGNED_DAY_OF_WEEK_IN_MONTH}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-month
* starts on day-of-month 1, the second aligned-week starts on day-of-month 8, and so on.
* Thus, day-of-month values 1 to 7 are in aligned-week 1, while day-of-month values
* 8 to 14 are in aligned-week 2, and so on.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
final val ALIGNED_WEEK_OF_MONTH: = null
/**
* The aligned week within a year.
* <p>
* This represents concept of the count of weeks within the period of a year
* where the weeks are aligned to the start of the year.
* This field is typically used with {@link #ALIGNED_DAY_OF_WEEK_IN_YEAR}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-year
* starts on day-of-year 1, the second aligned-week starts on day-of-year 8, and so on.
* Thus, day-of-year values 1 to 7 are in aligned-week 1, while day-of-year values
* 8 to 14 are in aligned-week 2, and so on.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
final val ALIGNED_WEEK_OF_YEAR: = null
/**
* The month-of-year, such as March.
* <p>
* This represents the concept of the month within the year.
* In the default ISO calendar system, this has values from January (1) to December (12).
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* month-of-year values for users of the calendar system.
* Normally, this is a count of months starting from 1.
*/
final val MONTH_OF_YEAR: = null
/**
* The proleptic-month based, counting months sequentially from year 0.
* <p>
* This field is the sequential count of months where the first month
* in proleptic-year zero has the value zero.
* Later months have increasingly larger values.
* Earlier months have increasingly small values.
* There are no gaps or breaks in the sequence of months.
* Note that this uses the <i>local</i> time-line, ignoring offset and time-zone.
* <p>
* In the default ISO calendar system, June 2012 would have the value
* {@code (2012 * 12 + 6 - 1)}. This field is primarily for internal use.
* <p>
* Non-ISO calendar systems must implement this field as per the definition above.
* It is just a simple zero-based count of elapsed months from the start of proleptic-year 0.
* All calendar systems with a full proleptic-year definition will have a year zero.
* If the calendar system has a minimum year that excludes year zero, then one must
* be extrapolated in order for this method to be defined.
*/
final val PROLEPTIC_MONTH: = null
/**
* The year within the era.
* <p>
* This represents the concept of the year within the era.
* This field is typically used with {@link #ERA}.
* <p>
* The standard mental model for a date is based on three concepts - year, month and day.
* These map onto the {@code YEAR}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Note that there is no reference to eras.
* The full model for a date requires four concepts - era, year, month and day. These map onto
* the {@code ERA}, {@code YEAR_OF_ERA}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Whether this field or {@code YEAR} is used depends on which mental model is being used.
* See {@link ChronoLocalDate} for more discussion on this topic.
* <p>
* In the default ISO calendar system, there are two eras defined, 'BCE' and 'CE'.
* The era 'CE' is the one currently in use and year-of-era runs from 1 to the maximum value.
* The era 'BCE' is the previous era, and the year-of-era runs backwards.
* <p>
* For example, subtracting a year each time yield the following:<br>
* - year-proleptic 2 = 'CE' year-of-era 2<br>
* - year-proleptic 1 = 'CE' year-of-era 1<br>
* - year-proleptic 0 = 'BCE' year-of-era 1<br>
* - year-proleptic -1 = 'BCE' year-of-era 2<br>
* <p>
* Note that the ISO-8601 standard does not actually define eras.
* Note also that the ISO eras do not align with the well-known AD/BC eras due to the
* change between the Julian and Gregorian calendar systems.
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* year-of-era value for users of the calendar system.
* Since most calendar systems have only two eras, the year-of-era numbering approach
* will typically be the same as that used by the ISO calendar system.
* The year-of-era value should typically always be positive, however this is not required.
*/
final val YEAR_OF_ERA: = null
/**
* The proleptic year, such as 2012.
* <p>
* This represents the concept of the year, counting sequentially and using negative numbers.
* The proleptic year is not interpreted in terms of the era.
* See {@link #YEAR_OF_ERA} for an example showing the mapping from proleptic year to year-of-era.
* <p>
* The standard mental model for a date is based on three concepts - year, month and day.
* These map onto the {@code YEAR}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Note that there is no reference to eras.
* The full model for a date requires four concepts - era, year, month and day. These map onto
* the {@code ERA}, {@code YEAR_OF_ERA}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Whether this field or {@code YEAR_OF_ERA} is used depends on which mental model is being used.
* See {@link ChronoLocalDate} for more discussion on this topic.
* <p>
* Non-ISO calendar systems should implement this field as follows.
* If the calendar system has only two eras, before and after a fixed date, then the
* proleptic-year value must be the same as the year-of-era value for the later era,
* and increasingly negative for the earlier era.
* If the calendar system has more than two eras, then the proleptic-year value may be
* defined with any appropriate value, although defining it to be the same as ISO may be
* the best option.
*/
final val YEAR: = null
/**
* The era.
* <p>
* This represents the concept of the era, which is the largest division of the time-line.
* This field is typically used with {@link #YEAR_OF_ERA}.
* <p>
* In the default ISO calendar system, there are two eras defined, 'BCE' and 'CE'.
* The era 'CE' is the one currently in use and year-of-era runs from 1 to the maximum value.
* The era 'BCE' is the previous era, and the year-of-era runs backwards.
* See {@link #YEAR_OF_ERA} for a full example.
* <p>
* Non-ISO calendar systems should implement this field to define eras.
* The value of the era that was active on 1970-01-01 (ISO) must be assigned the value 1.
* Earlier eras must have sequentially smaller values.
* Later eras must have sequentially larger values,
*/
final val ERA: = null
/**
* The instant epoch-seconds.
* <p>
* This represents the concept of the sequential count of seconds where
* 1970-01-01T00:00Z (ISO) is zero.
* This field may be used with {@link #NANO_OF_SECOND} to represent the fraction of the second.
* <p>
* An {@link Instant} represents an instantaneous point on the time-line.
* On their own, an instant has insufficient information to allow a local date-time to be obtained.
* Only when paired with an offset or time-zone can the local date or time be calculated.
* <p>
* This field is strictly defined to have the same meaning in all calendar systems.
* This is necessary to ensure interoperation between calendars.
*/
final val INSTANT_SECONDS: = null
/**
* The offset from UTC/Greenwich.
* <p>
* This represents the concept of the offset in seconds of local time from UTC/Greenwich.
* <p>
* A {@link ZoneOffset} represents the period of time that local time differs from UTC/Greenwich.
* This is usually a fixed number of hours and minutes.
* It is equivalent to the {@link ZoneOffset#getTotalSeconds() total amount} of the offset in seconds.
* For example, during the winter Paris has an offset of {@code +01:00}, which is 3600 seconds.
* <p>
* This field is strictly defined to have the same meaning in all calendar systems.
* This is necessary to ensure interoperation between calendars.
*/
final val OFFSET_SECONDS: = null
}
final class ChronoField extends TemporalField {
private def this(name: String, baseUnit: TemporalUnit, rangeUnit: TemporalUnit, range: ValueRange) {
this.name = name
this.baseUnit = baseUnit
this.rangeUnit = rangeUnit
this.range = range
this.displayNameKey = null
}
private def this(name: String, baseUnit: TemporalUnit, rangeUnit: TemporalUnit, range: ValueRange, displayNameKey: String) {
this.name = name
this.baseUnit = baseUnit
this.rangeUnit = rangeUnit
this.range = range
this.displayNameKey = displayNameKey
}
override def getDisplayName(locale: Locale): String = {
if (displayNameKey == null) {
name
}
val lr: LocaleResources = LocaleProviderAdapter.getResourceBundleBased.getLocaleResources(locale)
val rb: ResourceBundle = lr.getJavaTimeFormatData
val key: String = "field." + displayNameKey
if (rb.containsKey(key)) rb.getString(key) else name
}
def getBaseUnit: TemporalUnit = {
baseUnit
}
def getRangeUnit: TemporalUnit = {
rangeUnit
}
/**
* Gets the range of valid values for the field.
* <p>
* All fields can be expressed as a {@code long} integer.
* This method returns an object that describes the valid range for that value.
* <p>
* This method returns the range of the field in the ISO-8601 calendar system.
* This range may be incorrect for other calendar systems.
* Use {@link Chronology#range(ChronoField)} to access the correct range
* for a different calendar system.
* <p>
* Note that the result only describes the minimum and maximum valid values
* and it is important not to read too much into them. For example, there
* could be values within the range that are invalid for the field.
*
* @return the range of valid values for the field, not null
*/
def range: ValueRange = {
range
}
/**
* Checks if this field represents a component of a date.
* <p>
* Fields from day-of-week to era are date-based.
*
* @return true if it is a component of a date
*/
def isDateBased: Boolean = {
ordinal >= DAY_OF_WEEK.ordinal && ordinal <= ERA.ordinal
}
/**
* Checks if this field represents a component of a time.
* <p>
* Fields from nano-of-second to am-pm-of-day are time-based.
*
* @return true if it is a component of a time
*/
def isTimeBased: Boolean = {
ordinal < DAY_OF_WEEK.ordinal
}
/**
* Checks that the specified value is valid for this field.
* <p>
* This validates that the value is within the outer range of valid values
* returned by {@link #range()}.
* <p>
* This method checks against the range of the field in the ISO-8601 calendar system.
* This range may be incorrect for other calendar systems.
* Use {@link Chronology#range(ChronoField)} to access the correct range
* for a different calendar system.
*
* @param value the value to check
* @return the value that was passed in
*/
def checkValidValue(value: Long): Long = {
range.checkValidValue(value, this)
}
/**
* Checks that the specified value is valid and fits in an {@code int}.
* <p>
* This validates that the value is within the outer range of valid values
* returned by {@link #range()}.
* It also checks that all valid values are within the bounds of an {@code int}.
* <p>
* This method checks against the range of the field in the ISO-8601 calendar system.
* This range may be incorrect for other calendar systems.
* Use {@link Chronology#range(ChronoField)} to access the correct range
* for a different calendar system.
*
* @param value the value to check
* @return the value that was passed in
*/
def checkValidIntValue(value: Long): Int = {
range.checkValidIntValue(value, this)
}
def isSupportedBy(temporal: TemporalAccessor): Boolean = {
temporal.isSupported(this)
}
def rangeRefinedBy(temporal: TemporalAccessor): ValueRange = {
temporal.range(this)
}
def getFrom(temporal: TemporalAccessor): Long = {
temporal.getLong(this)
}
def adjustInto(temporal: R, newValue: Long): R = {
temporal.`with`(this, newValue).asInstanceOf[R]
}
override def toString: String = {
name
}
private final val name: String = null
private final val baseUnit: TemporalUnit = null
private final val rangeUnit: TemporalUnit = null
private final val range: ValueRange = null
private final val displayNameKey: String = null
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* A standard set of date periods units.
* <p>
* This set of units provide unit-based access to manipulate a date, time or date-time.
* The standard set of units can be extended by implementing {@link TemporalUnit}.
* <p>
* These units are intended to be applicable in multiple calendar systems.
* For example, most non-ISO calendar systems define units of years, months and days,
* just with slightly different rules.
* The documentation of each unit explains how it operates.
*
* @implSpec
* This is a final, immutable and thread-safe enum.
*
* @since 1.8
*/
object ChronoUnit {
/**
* Unit that represents the concept of a nanosecond, the smallest supported unit of time.
* For the ISO calendar system, it is equal to the 1,000,000,000th part of the second unit.
*/
val NANOS = ChronoUnit("Nanos", Duration.ofNanos(1))
/**
* Unit that represents the concept of a microsecond.
* For the ISO calendar system, it is equal to the 1,000,000th part of the second unit.
*/
val MICROS = ChronoUnit("Micros", Duration.ofNanos(1000))
/**
* Unit that represents the concept of a millisecond.
* For the ISO calendar system, it is equal to the 1000th part of the second unit.
*/
val MILLIS = ChronoUnit("Millis", Duration.ofNanos(1000000))
/**
* Unit that represents the concept of a second.
* For the ISO calendar system, it is equal to the second in the SI system
* of units, except around a leap-second.
*/
val SECONDS = ChronoUnit("Seconds", Duration.ofSeconds(1))
/**
* Unit that represents the concept of a minute.
* For the ISO calendar system, it is equal to 60 seconds.
*/
val MINUTES = ChronoUnit("Minutes", Duration.ofSeconds(60))
/**
* Unit that represents the concept of an hour.
* For the ISO calendar system, it is equal to 60 minutes.
*/
val HOURS = ChronoUnit("Hours", Duration.ofSeconds(3600))
/**
* Unit that represents the concept of half a day, as used in AM/PM.
* For the ISO calendar system, it is equal to 12 hours.
*/
val HALF_DAYS = ChronoUnit("HalfDays", Duration.ofSeconds(43200))
/**
* Unit that represents the concept of a day.
* For the ISO calendar system, it is the standard day from midnight to midnight.
* The estimated duration of a day is {@code 24 Hours}.
* <p>
* When used with other calendar systems it must correspond to the day defined by
* the rising and setting of the Sun on Earth. It is not required that days begin
* at midnight - when converting between calendar systems, the date should be
* equivalent at midday.
*/
val DAYS = ChronoUnit("Days", Duration.ofSeconds(86400))
/**
* Unit that represents the concept of a week.
* For the ISO calendar system, it is equal to 7 days.
* <p>
* When used with other calendar systems it must correspond to an integral number of days.
*/
val WEEKS = ChronoUnit("Weeks", Duration.ofSeconds(7 * 86400L))
/**
* Unit that represents the concept of a month.
* For the ISO calendar system, the length of the month varies by month-of-year.
* The estimated duration of a month is one twelfth of {@code 365.2425 Days}.
* <p>
* When used with other calendar systems it must correspond to an integral number of days.
*/
val MONTHS = ChronoUnit("Months", Duration.ofSeconds(31556952L / 12))
/**
* Unit that represents the concept of a year.
* For the ISO calendar system, it is equal to 12 months.
* The estimated duration of a year is {@code 365.2425 Days}.
* <p>
* When used with other calendar systems it must correspond to an integral number of days
* or months roughly equal to a year defined by the passage of the Earth around the Sun.
*/
val YEARS = ChronoUnit("Years", Duration.ofSeconds(31556952L))
/**
* Unit that represents the concept of a decade.
* For the ISO calendar system, it is equal to 10 years.
* <p>
* When used with other calendar systems it must correspond to an integral number of days
* and is normally an integral number of years.
*/
val DECADES = ChronoUnit("Decades", Duration.ofSeconds(31556952L * 10L))
/**
* Unit that represents the concept of a century.
* For the ISO calendar system, it is equal to 100 years.
* <p>
* When used with other calendar systems it must correspond to an integral number of days
* and is normally an integral number of years.
*/
val CENTURIES = ChronoUnit("Centuries", Duration.ofSeconds(31556952L * 100L))
/**
* Unit that represents the concept of a millennium.
* For the ISO calendar system, it is equal to 1000 years.
* <p>
* When used with other calendar systems it must correspond to an integral number of days
* and is normally an integral number of years.
*/
val MILLENNIA = ChronoUnit("Millennia", Duration.ofSeconds(31556952L * 1000L))
/**
* Unit that represents the concept of an era.
* The ISO calendar system doesn't have eras thus it is impossible to add
* an era to a date or date-time.
* The estimated duration of the era is artificially defined as {@code 1,000,00,000 Years}.
* <p>
* When used with other calendar systems there are no restrictions on the unit.
*/
val ERAS = ChronoUnit("Eras", Duration.ofSeconds(31556952L * 1000000000L))
/**
* Artificial unit that represents the concept of forever.
* This is primarily used with {@link TemporalField} to represent unbounded fields
* such as the year or era.
* The estimated duration of the era is artificially defined as the largest duration
* supported by {@code Duration}.
*/
val FOREVER = ChronoUnit("Forever", Duration.ofSeconds(Long.MAX_VALUE, 999999999))
}
case class ChronoUnit(name: String, estimatedDuration: Duration) extends TemporalUnit {
/**
* Gets the estimated duration of this unit in the ISO calendar system.
* <p>
* All of the units in this class have an estimated duration.
* Days vary due to daylight saving time, while months have different lengths.
*
* @return the estimated duration of this unit, not null
*/
def duration: Duration = estimatedDuration
/**
* Checks if the duration of the unit is an estimate.
* <p>
* All time units in this class are considered to be accurate, while all date
* units in this class are considered to be estimated.
* <p>
* This definition ignores leap seconds, but considers that Days vary due to
* daylight saving time and months have different lengths.
*
* @return true if the duration is estimated, false if accurate
*/
def isDurationEstimated: Boolean = {
this.compareTo(DAYS) >= 0
}
/**
* Checks if this unit is a date unit.
* <p>
* All units from days to eras inclusive are date-based.
* Time-based units and {@code FOREVER} return false.
*
* @return true if a date unit, false if a time unit
*/
def isDateBased: Boolean = {
this.compareTo(DAYS) >= 0 && this ne FOREVER
}
/**
* Checks if this unit is a time unit.
* <p>
* All units from nanos to half-days inclusive are time-based.
* Date-based units and {@code FOREVER} return false.
*
* @return true if a time unit, false if a date unit
*/
def isTimeBased: Boolean = {
this.compareTo(DAYS) < 0
}
override def isSupportedBy(temporal: Temporal): Boolean = {
temporal.isSupported(this)
}
def addTo(temporal: R, amount: Long): R = {
temporal.plus(amount, this).asInstanceOf[R]
}
def between(temporal1Inclusive: Temporal, temporal2Exclusive: Temporal): Long = {
temporal1Inclusive.until(temporal2Exclusive, this)
}
override def toString: String = {
name
}
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* Copyright (c) 2011-2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Fields and units specific to the ISO-8601 calendar system,
* including quarter-of-year and week-based-year.
* <p>
* This class defines fields and units that are specific to the ISO calendar system.
*
* <h3>Quarter of year</h3>
* The ISO-8601 standard is based on the standard civic 12 month year.
* This is commonly divided into four quarters, often abbreviated as Q1, Q2, Q3 and Q4.
* <p>
* January, February and March are in Q1.
* April, May and June are in Q2.
* July, August and September are in Q3.
* October, November and December are in Q4.
* <p>
* The complete date is expressed using three fields:
* <p><ul>
* <li>{@link #DAY_OF_QUARTER DAY_OF_QUARTER} - the day within the quarter, from 1 to 90, 91 or 92
* <li>{@link #QUARTER_OF_YEAR QUARTER_OF_YEAR} - the week within the week-based-year
* <li>{@link ChronoField#YEAR YEAR} - the standard ISO year
* </ul><p>
*
* <h3>Week based years</h3>
* The ISO-8601 standard was originally intended as a data interchange format,
* defining a string format for dates and times. However, it also defines an
* alternate way of expressing the date, based on the concept of week-based-year.
* <p>
* The date is expressed using three fields:
* <p><ul>
* <li>{@link ChronoField#DAY_OF_WEEK DAY_OF_WEEK} - the standard field defining the
* day-of-week from Monday (1) to Sunday (7)
* <li>{@link #WEEK_OF_WEEK_BASED_YEAR} - the week within the week-based-year
* <li>{@link #WEEK_BASED_YEAR WEEK_BASED_YEAR} - the week-based-year
* </ul><p>
* The week-based-year itself is defined relative to the standard ISO proleptic year.
* It differs from the standard year in that it always starts on a Monday.
* <p>
* The first week of a week-based-year is the first Monday-based week of the standard
* ISO year that has at least 4 days in the new year.
* <p><ul>
* <li>If January 1st is Monday then week 1 starts on January 1st
* <li>If January 1st is Tuesday then week 1 starts on December 31st of the previous standard year
* <li>If January 1st is Wednesday then week 1 starts on December 30th of the previous standard year
* <li>If January 1st is Thursday then week 1 starts on December 29th of the previous standard year
* <li>If January 1st is Friday then week 1 starts on January 4th
* <li>If January 1st is Saturday then week 1 starts on January 3rd
* <li>If January 1st is Sunday then week 1 starts on January 2nd
* </ul><p>
* There are 52 weeks in most week-based years, however on occasion there are 53 weeks.
* <p>
* For example:
* <p>
* <table cellpadding="0" cellspacing="3" border="0" style="text-align: left; width: 50%;">
* <caption>Examples of Week based Years</caption>
* <tr><th>Date</th><th>Day-of-week</th><th>Field values</th></tr>
* <tr><th>2008-12-28</th><td>Sunday</td><td>Week 52 of week-based-year 2008</td></tr>
* <tr><th>2008-12-29</th><td>Monday</td><td>Week 1 of week-based-year 2009</td></tr>
* <tr><th>2008-12-31</th><td>Wednesday</td><td>Week 1 of week-based-year 2009</td></tr>
* <tr><th>2009-01-01</th><td>Thursday</td><td>Week 1 of week-based-year 2009</td></tr>
* <tr><th>2009-01-04</th><td>Sunday</td><td>Week 1 of week-based-year 2009</td></tr>
* <tr><th>2009-01-05</th><td>Monday</td><td>Week 2 of week-based-year 2009</td></tr>
* </table>
*
* @implSpec
* <p>
* This class is immutable and thread-safe.
*
* @since 1.8
*/
object IsoFields {
/**
* The field that represents the day-of-quarter.
* <p>
* This field allows the day-of-quarter value to be queried and set.
* The day-of-quarter has values from 1 to 90 in Q1 of a standard year, from 1 to 91
* in Q1 of a leap year, from 1 to 91 in Q2 and from 1 to 92 in Q3 and Q4.
* <p>
* The day-of-quarter can only be calculated if the day-of-year, month-of-year and year
* are available.
* <p>
* When setting this field, the value is allowed to be partially lenient, taking any
* value from 1 to 92. If the quarter has less than 92 days, then day 92, and
* potentially day 91, is in the following quarter.
* <p>
* In the resolving phase of parsing, a date can be created from a year,
* quarter-of-year and day-of-quarter.
* <p>
* In {@linkplain ResolverStyle#STRICT strict mode}, all three fields are
* validated against their range of valid values. The day-of-quarter field
* is validated from 1 to 90, 91 or 92 depending on the year and quarter.
* <p>
* In {@linkplain ResolverStyle#SMART smart mode}, all three fields are
* validated against their range of valid values. The day-of-quarter field is
* validated between 1 and 92, ignoring the actual range based on the year and quarter.
* If the day-of-quarter exceeds the actual range by one day, then the resulting date
* is one day later. If the day-of-quarter exceeds the actual range by two days,
* then the resulting date is two days later.
* <p>
* In {@linkplain ResolverStyle#LENIENT lenient mode}, only the year is validated
* against the range of valid values. The resulting date is calculated equivalent to
* the following three stage approach. First, create a date on the first of January
* in the requested year. Then take the quarter-of-year, subtract one, and add the
* amount in quarters to the date. Finally, take the day-of-quarter, subtract one,
* and add the amount in days to the date.
* <p>
* This unit is an immutable and thread-safe singleton.
*/
final val DAY_OF_QUARTER: TemporalField = Field.DAY_OF_QUARTER
/**
* The field that represents the quarter-of-year.
* <p>
* This field allows the quarter-of-year value to be queried and set.
* The quarter-of-year has values from 1 to 4.
* <p>
* The quarter-of-year can only be calculated if the month-of-year is available.
* <p>
* In the resolving phase of parsing, a date can be created from a year,
* quarter-of-year and day-of-quarter.
* See {@link #DAY_OF_QUARTER} for details.
* <p>
* This unit is an immutable and thread-safe singleton.
*/
final val QUARTER_OF_YEAR: TemporalField = Field.QUARTER_OF_YEAR
/**
* The field that represents the week-of-week-based-year.
* <p>
* This field allows the week of the week-based-year value to be queried and set.
* The week-of-week-based-year has values from 1 to 52, or 53 if the
* week-based-year has 53 weeks.
* <p>
* In the resolving phase of parsing, a date can be created from a
* week-based-year, week-of-week-based-year and day-of-week.
* <p>
* In {@linkplain ResolverStyle#STRICT strict mode}, all three fields are
* validated against their range of valid values. The week-of-week-based-year
* field is validated from 1 to 52 or 53 depending on the week-based-year.
* <p>
* In {@linkplain ResolverStyle#SMART smart mode}, all three fields are
* validated against their range of valid values. The week-of-week-based-year
* field is validated between 1 and 53, ignoring the week-based-year.
* If the week-of-week-based-year is 53, but the week-based-year only has
* 52 weeks, then the resulting date is in week 1 of the following week-based-year.
* <p>
* In {@linkplain ResolverStyle#LENIENT lenient mode}, only the week-based-year
* is validated against the range of valid values. If the day-of-week is outside
* the range 1 to 7, then the resulting date is adjusted by a suitable number of
* weeks to reduce the day-of-week to the range 1 to 7. If the week-of-week-based-year
* value is outside the range 1 to 52, then any excess weeks are added or subtracted
* from the resulting date.
* <p>
* This unit is an immutable and thread-safe singleton.
*/
final val WEEK_OF_WEEK_BASED_YEAR: TemporalField = Field.WEEK_OF_WEEK_BASED_YEAR
/**
* The field that represents the week-based-year.
* <p>
* This field allows the week-based-year value to be queried and set.
* <p>
* The field has a range that matches {@link Date#MAX} and {@link Date#MIN}.
* <p>
* In the resolving phase of parsing, a date can be created from a
* week-based-year, week-of-week-based-year and day-of-week.
* See {@link #WEEK_OF_WEEK_BASED_YEAR} for details.
* <p>
* This unit is an immutable and thread-safe singleton.
*/
final val WEEK_BASED_YEAR: TemporalField = Field.WEEK_BASED_YEAR
/**
* The unit that represents week-based-years for the purpose of addition and subtraction.
* <p>
* This allows a number of week-based-years to be added to, or subtracted from, a date.
* The unit is equal to either 52 or 53 weeks.
* The estimated duration of a week-based-year is the same as that of a standard ISO
* year at {@code 365.2425 Days}.
* <p>
* The rules for addition add the number of week-based-years to the existing value
* for the week-based-year field. If the resulting week-based-year only has 52 weeks,
* then the date will be in week 1 of the following week-based-year.
* <p>
* This unit is an immutable and thread-safe singleton.
*/
final val WEEK_BASED_YEARS: TemporalUnit = Unit.WEEK_BASED_YEARS
/**
* Unit that represents the concept of a quarter-year.
* For the ISO calendar system, it is equal to 3 months.
* The estimated duration of a quarter-year is one quarter of {@code 365.2425 Days}.
* <p>
* This unit is an immutable and thread-safe singleton.
*/
final val QUARTER_YEARS: TemporalUnit = Unit.QUARTER_YEARS
/**
* Implementation of the field.
*/
private object Field {
private def isIso(temporal: TemporalAccessor): Boolean = {
Chronology.from(temporal) == IsoChronology.INSTANCE
}
private def getWeekRange(date: Nothing): ValueRange = {
val wby: Int = getWeekBasedYear(date)
date = date.withDayOfYear(1).withYear(wby)
if (date.getDayOfWeek eq THURSDAY || (date.getDayOfWeek eq WEDNESDAY && date.isLeapYear)) {
ValueRange.of(1, 53)
}
ValueRange.of(1, 52)
}
private def getWeek(date: Nothing): Int = {
val dow0: Int = date.getDayOfWeek.ordinal
val doy0: Int = date.getDayOfYear - 1
val doyThu0: Int = doy0 + (3 - dow0)
val alignedWeek: Int = doyThu0 / 7
val firstThuDoy0: Int = doyThu0 - (alignedWeek * 7)
var firstMonDoy0: Int = firstThuDoy0 - 3
if (firstMonDoy0 < -3) {
firstMonDoy0 += 7
}
if (doy0 < firstMonDoy0) {
getWeekRange(date.withDayOfYear(180).minusYears(1)).getMaximum.asInstanceOf[Int]
}
var week: Int = ((doy0 - firstMonDoy0) / 7) + 1
if (week == 53) {
if ((firstMonDoy0 == -3 || (firstMonDoy0 == -2 && date.isLeapYear)) == false) {
week = 1
}
}
week
}
private def getWeekBasedYear(date: Nothing): Int = {
var year: Int = date.getYear
var doy: Int = date.getDayOfYear
if (doy <= 3) {
val dow: Int = date.getDayOfWeek.ordinal
if (doy - dow < -2) {
year -= 1
}
}
else if (doy >= 363) {
val dow: Int = date.getDayOfWeek.ordinal
doy = doy - 363 - (if (date.isLeapYear) 1 else 0)
if (doy - dow >= 0) {
year += 1
}
}
year
}
final val DAY_OF_QUARTER: = null
final val QUARTER_OF_YEAR: = null
final val WEEK_OF_WEEK_BASED_YEAR: = null
final val WEEK_BASED_YEAR: = null
private final val QUARTER_DAYS: Array[Int] = Array(0, 90, 181, 273, 0, 91, 182, 274)
}
private class Field extends TemporalField {
def isDateBased: Boolean = {
true
}
def isTimeBased: Boolean = {
false
}
def rangeRefinedBy(temporal: TemporalAccessor): ValueRange = {
range
}
}
/**
* Implementation of the period unit.
*/
private object Unit {
/**
* Unit that represents the concept of a week-based-year.
*/
final val WEEK_BASED_YEARS: = null
/**
* Unit that represents the concept of a quarter-year.
*/
final val QUARTER_YEARS: = null
}
private final class Unit extends TemporalUnit {
private def this(name: String, estimatedDuration: Nothing) {
this.name = name
this.duration = estimatedDuration
}
def getDuration: Nothing = {
duration
}
def isDurationEstimated: Boolean = {
true
}
def isDateBased: Boolean = {
true
}
def isTimeBased: Boolean = {
false
}
override def isSupportedBy(temporal: Temporal): Boolean = {
temporal.isSupported(EPOCH_DAY)
}
def addTo(temporal: R, amount: Long): R = {
this match {
case WEEK_BASED_YEARS =>
temporal.`with`(WEEK_BASED_YEAR, Math.addExact(temporal.get(WEEK_BASED_YEAR), amount)).asInstanceOf[R]
case QUARTER_YEARS =>
temporal.plus(amount / 256, YEARS).plus((amount % 256) * 3, MONTHS).asInstanceOf[R]
case _ =>
throw new IllegalStateException("Unreachable")
}
}
def between(temporal1Inclusive: Temporal, temporal2Exclusive: Temporal): Long = {
if (temporal1Inclusive.getClass ne temporal2Exclusive.getClass) {
temporal1Inclusive.until(temporal2Exclusive, this)
}
this match {
case WEEK_BASED_YEARS =>
Math.subtractExact(temporal2Exclusive.getLong(WEEK_BASED_YEAR), temporal1Inclusive.getLong(WEEK_BASED_YEAR))
case QUARTER_YEARS =>
temporal1Inclusive.until(temporal2Exclusive, MONTHS) / 3
case _ =>
throw new IllegalStateException("Unreachable")
}
}
override def toString: String = {
name
}
private final val name: String = null
private final val duration: Nothing = null
}
}
final class IsoFields {
/**
* Restricted constructor.
*/
private def {
throw new AssertionError("Not instantiable")
}
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* A set of date fields that provide access to Julian Days.
* <p>
* The Julian Day is a standard way of expressing date and time commonly used in the scientific community.
* It is expressed as a decimal number of whole days where days start at midday.
* This class represents variations on Julian Days that count whole days from midnight.
* <p>
* The fields are implemented relative to {@link ChronoField#EPOCH_DAY EPOCH_DAY}.
* The fields are supported, and can be queried and set if {@code EPOCH_DAY} is available.
* The fields work with all chronologies.
*
* @implSpec
* This is an immutable and thread-safe class.
*
* @since 1.8
*/
object JulianFields {
/**
* The offset from Julian to EPOCH DAY.
*/
private final val JULIAN_DAY_OFFSET: Long = 2440588L
/**
* Julian Day field.
* <p>
* This is an integer-based version of the Julian Day Number.
* Julian Day is a well-known system that represents the count of whole days since day 0,
* which is defined to be January 1, 4713 BCE in the Julian calendar, and -4713-11-24 Gregorian.
* The field has "JulianDay" as 'name', and 'DAYS' as 'baseUnit'.
* The field always refers to the local date-time, ignoring the offset or zone.
* <p>
* For date-times, 'JULIAN_DAY.getFrom()' assumes the same value from
* midnight until just before the next midnight.
* When 'JULIAN_DAY.adjustInto()' is applied to a date-time, the time of day portion remains unaltered.
* 'JULIAN_DAY.adjustInto()' and 'JULIAN_DAY.getFrom()' only apply to {@code Temporal} objects that
* can be converted into {@link ChronoField#EPOCH_DAY}.
* An {@link UnsupportedTemporalTypeException} is thrown for any other type of object.
* <p>
* In the resolving phase of parsing, a date can be created from a Julian Day field.
* In {@linkplain ResolverStyle#STRICT strict mode} and {@linkplain ResolverStyle#SMART smart mode}
* the Julian Day value is validated against the range of valid values.
* In {@linkplain ResolverStyle#LENIENT lenient mode} no validation occurs.
* <p>
* <h3>Astronomical and Scientific Notes</h3>
* The standard astronomical definition uses a fraction to indicate the time-of-day,
* thus 3.25 would represent the time 18:00, since days start at midday.
* This implementation uses an integer and days starting at midnight.
* The integer value for the Julian Day Number is the astronomical Julian Day value at midday
* of the date in question.
* This amounts to the astronomical Julian Day, rounded to an integer {@code JDN = floor(JD + 0.5)}.
* <p>
* {{{
* | ISO date | Julian Day Number | Astronomical Julian Day |
* | 1970-01-01T00:00 | 2,440,588 | 2,440,587.5 |
* | 1970-01-01T06:00 | 2,440,588 | 2,440,587.75 |
* | 1970-01-01T12:00 | 2,440,588 | 2,440,588.0 |
* | 1970-01-01T18:00 | 2,440,588 | 2,440,588.25 |
* | 1970-01-02T00:00 | 2,440,589 | 2,440,588.5 |
* | 1970-01-02T06:00 | 2,440,589 | 2,440,588.75 |
* | 1970-01-02T12:00 | 2,440,589 | 2,440,589.0 |
* }}}
* <p>
* Julian Days are sometimes taken to imply Universal Time or UTC, but this
* implementation always uses the Julian Day number for the local date,
* regardless of the offset or time-zone.
*/
final val JULIAN_DAY: TemporalField = Field.JULIAN_DAY
/**
* Modified Julian Day field.
* <p>
* This is an integer-based version of the Modified Julian Day Number.
* Modified Julian Day (MJD) is a well-known system that counts days continuously.
* It is defined relative to astronomical Julian Day as {@code MJD = JD - 2400000.5}.
* Each Modified Julian Day runs from midnight to midnight.
* The field always refers to the local date-time, ignoring the offset or zone.
* <p>
* For date-times, 'MODIFIED_JULIAN_DAY.getFrom()' assumes the same value from
* midnight until just before the next midnight.
* When 'MODIFIED_JULIAN_DAY.adjustInto()' is applied to a date-time, the time of day portion remains unaltered.
* 'MODIFIED_JULIAN_DAY.adjustInto()' and 'MODIFIED_JULIAN_DAY.getFrom()' only apply to {@code Temporal} objects
* that can be converted into {@link ChronoField#EPOCH_DAY}.
* An {@link UnsupportedTemporalTypeException} is thrown for any other type of object.
* <p>
* This implementation is an integer version of MJD with the decimal part rounded to floor.
* <p>
* In the resolving phase of parsing, a date can be created from a Modified Julian Day field.
* In {@linkplain ResolverStyle#STRICT strict mode} and {@linkplain ResolverStyle#SMART smart mode}
* the Modified Julian Day value is validated against the range of valid values.
* In {@linkplain ResolverStyle#LENIENT lenient mode} no validation occurs.
* <p>
* <h3>Astronomical and Scientific Notes</h3>
* {{{
* | ISO date | Modified Julian Day | Decimal MJD |
* | 1970-01-01T00:00 | 40,587 | 40,587.0 |
* | 1970-01-01T06:00 | 40,587 | 40,587.25 |
* | 1970-01-01T12:00 | 40,587 | 40,587.5 |
* | 1970-01-01T18:00 | 40,587 | 40,587.75 |
* | 1970-01-02T00:00 | 40,588 | 40,588.0 |
* | 1970-01-02T06:00 | 40,588 | 40,588.25 |
* | 1970-01-02T12:00 | 40,588 | 40,588.5 |
* }}}
* <p>
* Modified Julian Days are sometimes taken to imply Universal Time or UTC, but this
* implementation always uses the Modified Julian Day for the local date,
* regardless of the offset or time-zone.
*/
final val MODIFIED_JULIAN_DAY: TemporalField = Field.MODIFIED_JULIAN_DAY
/**
* Rata Die field.
* <p>
* Rata Die counts whole days continuously starting day 1 at midnight at the beginning of 0001-01-01 (ISO).
* The field always refers to the local date-time, ignoring the offset or zone.
* <p>
* For date-times, 'RATA_DIE.getFrom()' assumes the same value from
* midnight until just before the next midnight.
* When 'RATA_DIE.adjustInto()' is applied to a date-time, the time of day portion remains unaltered.
* 'RATA_DIE.adjustInto()' and 'RATA_DIE.getFrom()' only apply to {@code Temporal} objects
* that can be converted into {@link ChronoField#EPOCH_DAY}.
* An {@link UnsupportedTemporalTypeException} is thrown for any other type of object.
* <p>
* In the resolving phase of parsing, a date can be created from a Rata Die field.
* In {@linkplain ResolverStyle#STRICT strict mode} and {@linkplain ResolverStyle#SMART smart mode}
* the Rata Die value is validated against the range of valid values.
* In {@linkplain ResolverStyle#LENIENT lenient mode} no validation occurs.
*/
final val RATA_DIE: TemporalField = Field.RATA_DIE
/**
* Implementation of JulianFields. Each instance is a singleton.
*/
private object Field {
final val JULIAN_DAY: = null
final val MODIFIED_JULIAN_DAY: = null
final val RATA_DIE: = null
private final val serialVersionUID: Long = -7501623920830201812L
}
private final class Field extends TemporalField {
private def this(name: String, baseUnit: TemporalUnit, rangeUnit: TemporalUnit, offset: Long) {
this.name = name
this.baseUnit = baseUnit
this.rangeUnit = rangeUnit
this.range = ValueRange.of(-365243219162L + offset, 365241780471L + offset)
this.offset = offset
}
def getBaseUnit: TemporalUnit = {
baseUnit
}
def getRangeUnit: TemporalUnit = {
rangeUnit
}
def isDateBased: Boolean = {
true
}
def isTimeBased: Boolean = {
false
}
def range: ValueRange = {
range
}
def isSupportedBy(temporal: TemporalAccessor): Boolean = {
temporal.isSupported(EPOCH_DAY)
}
def rangeRefinedBy(temporal: TemporalAccessor): ValueRange = {
if (isSupportedBy(temporal) == false) {
throw new Nothing("Unsupported field: " + this)
}
range
}
def getFrom(temporal: TemporalAccessor): Long = {
temporal.getLong(EPOCH_DAY) + offset
}
def adjustInto(temporal: R, newValue: Long): R = {
if (range.isValidValue(newValue) == false) {
throw new Nothing("Invalid value: " + name + " " + newValue)
}
temporal.`with`(EPOCH_DAY, Math.subtractExact(newValue, offset)).asInstanceOf[R]
}
override def resolve(fieldValues: Nothing, chronology: Chronology, zone: Nothing, resolverStyle: Nothing): ChronoLocalDate = {
val value: Long = fieldValues.remove(this)
if (resolverStyle eq ResolverStyle.LENIENT) {
chronology.dateEpochDay(Math.subtractExact(value, offset))
}
range.checkValidValue(value, this)
chronology.dateEpochDay(value - offset)
}
override def toString: String = {
name
}
@transient
private final val name: String = null
@transient
private final val baseUnit: TemporalUnit = null
@transient
private final val rangeUnit: TemporalUnit = null
@transient
private final val range: ValueRange = null
@transient
private final val offset: Long = 0L
}
}
sealed trait JulianFields
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Framework-level interface defining read-write access to a temporal object,
* such as a date, time, offset or some combination of these.
* <p>
* This is the base interface type for date, time and offset objects that
* are complete enough to be manipulated using plus and minus.
* It is implemented by those classes that can provide and manipulate information
* as {@linkplain TemporalField fields} or {@linkplain TemporalQuery queries}.
* See {@link TemporalAccessor} for the read-only version of this interface.
* <p>
* Most date and time information can be represented as a number.
* These are modeled using {@code TemporalField} with the number held using
* a {@code long} to handle large values. Year, month and day-of-month are
* simple examples of fields, but they also include instant and offsets.
* See {@link ChronoField} for the standard set of fields.
* <p>
* Two pieces of date/time information cannot be represented by numbers,
* the {@linkplain java.time.chrono.Chronology chronology} and the
* {@linkplain java.time.ZoneId time-zone}.
* These can be accessed via {@link #query(TemporalQuery) queries} using
* the static methods defined on {@link TemporalQuery}.
* <p>
* This interface is a framework-level interface that should not be widely
* used in application code. Instead, applications should create and pass
* around instances of concrete types, such as {@code Date}.
* There are many reasons for this, part of which is that implementations
* of this interface may be in calendar systems other than ISO.
* See {@link java.time.chrono.ChronoLocalDate} for a fuller discussion of the issues.
*
* <h3>When to implement</h3>
* <p>
* A class should implement this interface if it meets three criteria:
* <p><ul>
* <li>it provides access to date/time/offset information, as per {@code TemporalAccessor}
* <li>the set of fields are contiguous from the largest to the smallest
* <li>the set of fields are complete, such that no other field is needed to define the
* valid range of values for the fields that are represented
* </ul><p>
* <p>
* Four examples make this clear:
* <p><ul>
* <li>{@code Date} implements this interface as it represents a set of fields
* that are contiguous from days to forever and require no external information to determine
* the validity of each date. It is therefore able to implement plus/minus correctly.
* <li>{@code Time} implements this interface as it represents a set of fields
* that are contiguous from nanos to within days and require no external information to determine
* validity. It is able to implement plus/minus correctly, by wrapping around the day.
* <li>{@code MonthDay}, the combination of month-of-year and day-of-month, does not implement
* this interface. While the combination is contiguous, from days to months within years,
* the combination does not have sufficient information to define the valid range of values
* for day-of-month. As such, it is unable to implement plus/minus correctly.
* <li>The combination day-of-week and day-of-month ("Friday the 13th") should not implement
* this interface. It does not represent a contiguous set of fields, as days to weeks overlaps
* days to months.
* </ul><p>
*
* @implSpec
* This interface places no restrictions on the mutability of implementations,
* however immutability is strongly recommended.
* All implementations must be { @link Comparable}.
*
* @since 1.8
*/
trait Temporal extends TemporalAccessor {
/**
* Checks if the specified unit is supported.
* <p>
* This checks if the specified unit can be added to, or subtracted from, this date-time.
* If false, then calling the {@link #plus(long, TemporalUnit)} and
* {@link #minus(long, TemporalUnit) minus} methods will throw an exception.
*
* @implSpec
* Implementations must check and handle all units defined in { @link ChronoUnit}.
* If the unit is supported, then true must be returned, otherwise false must be returned.
* <p>
* If the field is not a { @code ChronoUnit}, then the result of this method
* is obtained by invoking { @code TemporalUnit.isSupportedBy(Temporal)}
* passing { @code this} as the argument.
* <p>
* Implementations must ensure that no observable state is altered when this
* read-only method is invoked.
*
* @param unit the unit to check, null returns false
* @return true if the unit can be added/subtracted, false if not
*/
def isSupported(unit: TemporalUnit): Boolean
/**
* Returns an adjusted object of the same type as this object with the adjustment made.
* <p>
* This adjusts this date-time according to the rules of the specified adjuster.
* A simple adjuster might simply set the one of the fields, such as the year field.
* A more complex adjuster might set the date to the last day of the month.
* A selection of common adjustments is provided in {@link TemporalAdjuster}.
* These include finding the "last day of the month" and "next Wednesday".
* The adjuster is responsible for handling special cases, such as the varying
* lengths of month and leap years.
* <p>
* Some example code indicating how and why this method is used:
* {{{
* date = date.with(Month.JULY); // most key classes implement TemporalAdjuster
* date = date.with(lastDayOfMonth()); // static import from Adjusters
* date = date.with(next(WEDNESDAY)); // static import from Adjusters and DayOfWeek
* }}}
*
* <p>
* Implementations must not alter either this object or the specified temporal object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
* <p>
* The default implementation must behave equivalent to this code:
* {{{
* return adjuster.adjustInto(this);
* }}}
*
* @param adjuster the adjuster to use, not null
* @return an object of the same type with the specified adjustment made, not null
* @throws DateTimeException if unable to make the adjustment
* @throws ArithmeticException if numeric overflow occurs
*/
def `with`(adjuster: TemporalAdjuster): Temporal = {
adjuster.adjustInto(this)
}
/**
* Returns an object of the same type as this object with the specified field altered.
* <p>
* This returns a new object based on this one with the value for the specified field changed.
* For example, on a {@code Date}, this could be used to set the year, month or day-of-month.
* The returned object will have the same observable type as this object.
* <p>
* In some cases, changing a field is not fully defined. For example, if the target object is
* a date representing the 31st January, then changing the month to February would be unclear.
* In cases like this, the field is responsible for resolving the result. Typically it will choose
* the previous valid date, which would be the last valid day of February in this example.
*
* @implSpec
* Implementations must check and handle all fields defined in { @link ChronoField}.
* If the field is supported, then the adjustment must be performed.
* If unsupported, then an { @code UnsupportedTemporalTypeException} must be thrown.
* <p>
* If the field is not a { @code ChronoField}, then the result of this method
* is obtained by invoking { @code TemporalField.adjustInto(Temporal, long)}
* passing { @code this} as the first argument.
* <p>
* Implementations must not alter this object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
*
* @param field the field to set in the result, not null
* @param newValue the new value of the field in the result
* @return an object of the same type with the specified field set, not null
* @throws DateTimeException if the field cannot be set
* @throws UnsupportedTemporalTypeException if the field is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
def `with`(field: TemporalField, newValue: Long): Temporal
/**
* Returns an object of the same type as this object with an amount added.
* <p>
* This adjusts this temporal, adding according to the rules of the specified amount.
* The amount is typically a {@link java.time.Period} but may be any other type implementing
* the {@link TemporalAmount} interface, such as {@link java.time.Duration}.
* <p>
* Some example code indicating how and why this method is used:
* {{{
* date = date.plus(period); // add a Period instance
* date = date.plus(duration); // add a Duration instance
* date = date.plus(workingDays(6)); // example user-written workingDays method
* }}}
* <p>
* Note that calling {@code plus} followed by {@code minus} is not guaranteed to
* return the same date-time.
*
* @implSpec
* <p>
* Implementations must not alter either this object or the specified temporal object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
* <p>
* The default implementation must behave equivalent to this code:
* {{{
* return amount.addTo(this);
* }}}
*
* @param amount the amount to add, not null
* @return an object of the same type with the specified adjustment made, not null
* @throws DateTimeException if the addition cannot be made
* @throws ArithmeticException if numeric overflow occurs
*/
def plus(amount: TemporalAmount): Temporal = {
amount.addTo(this)
}
/**
* Returns an object of the same type as this object with the specified period added.
* <p>
* This method returns a new object based on this one with the specified period added.
* For example, on a {@code Date}, this could be used to add a number of years, months or days.
* The returned object will have the same observable type as this object.
* <p>
* In some cases, changing a field is not fully defined. For example, if the target object is
* a date representing the 31st January, then adding one month would be unclear.
* In cases like this, the field is responsible for resolving the result. Typically it will choose
* the previous valid date, which would be the last valid day of February in this example.
*
* @implSpec
* Implementations must check and handle all units defined in { @link ChronoUnit}.
* If the unit is supported, then the addition must be performed.
* If unsupported, then an { @code UnsupportedTemporalTypeException} must be thrown.
* <p>
* If the unit is not a { @code ChronoUnit}, then the result of this method
* is obtained by invoking { @code TemporalUnit.addTo(Temporal, long)}
* passing { @code this} as the first argument.
* <p>
* Implementations must not alter this object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
*
* @param amountToAdd the amount of the specified unit to add, may be negative
* @param unit the unit of the period to add, not null
* @return an object of the same type with the specified period added, not null
* @throws DateTimeException if the unit cannot be added
* @throws UnsupportedTemporalTypeException if the unit is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
def plus(amountToAdd: Long, unit: TemporalUnit): Temporal
/**
* Returns an object of the same type as this object with an amount subtracted.
* <p>
* This adjusts this temporal, subtracting according to the rules of the specified amount.
* The amount is typically a {@link java.time.Period} but may be any other type implementing
* the {@link TemporalAmount} interface, such as {@link java.time.Duration}.
* <p>
* Some example code indicating how and why this method is used:
* {{{
* date = date.minus(period); // subtract a Period instance
* date = date.minus(duration); // subtract a Duration instance
* date = date.minus(workingDays(6)); // example user-written workingDays method
* }}}
* <p>
* Note that calling {@code plus} followed by {@code minus} is not guaranteed to
* return the same date-time.
*
* @implSpec
* <p>
* Implementations must not alter either this object or the specified temporal object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
* <p>
* The default implementation must behave equivalent to this code:
* {{{
* return amount.subtractFrom(this);
* }}}
*
* @param amount the amount to subtract, not null
* @return an object of the same type with the specified adjustment made, not null
* @throws DateTimeException if the subtraction cannot be made
* @throws ArithmeticException if numeric overflow occurs
*/
def minus(amount: TemporalAmount): Temporal = {
amount.subtractFrom(this)
}
/**
* Returns an object of the same type as this object with the specified period subtracted.
* <p>
* This method returns a new object based on this one with the specified period subtracted.
* For example, on a {@code Date}, this could be used to subtract a number of years, months or days.
* The returned object will have the same observable type as this object.
* <p>
* In some cases, changing a field is not fully defined. For example, if the target object is
* a date representing the 31st March, then subtracting one month would be unclear.
* In cases like this, the field is responsible for resolving the result. Typically it will choose
* the previous valid date, which would be the last valid day of February in this example.
*
* @implSpec
* Implementations must behave in a manor equivalent to the default method behavior.
* <p>
* Implementations must not alter this object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
* <p>
* The default implementation must behave equivalent to this code:
* {{{
* return (amountToSubtract == Long.MIN_VALUE ?
* plus(Long.MAX_VALUE, unit).plus(1, unit) : plus(-amountToSubtract, unit));
* }}}
*
* @param amountToSubtract the amount of the specified unit to subtract, may be negative
* @param unit the unit of the period to subtract, not null
* @return an object of the same type with the specified period subtracted, not null
* @throws DateTimeException if the unit cannot be subtracted
* @throws UnsupportedTemporalTypeException if the unit is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
def minus(amountToSubtract: Long, unit: TemporalUnit): Temporal = {
(if (amountToSubtract == Long.MIN_VALUE) plus(Long.MAX_VALUE, unit).plus(1, unit) else plus(-amountToSubtract, unit))
}
/**
* Calculates the amount of time until another temporal in terms of the specified unit.
* <p>
* This calculates the amount of time between two temporal objects
* in terms of a single {@code TemporalUnit}.
* The start and end points are {@code this} and the specified temporal.
* The end point is converted to be of the same type as the start point if different.
* The result will be negative if the end is before the start.
* For example, the period in hours between two temporal objects can be
* calculated using {@code startTime.until(endTime, HOURS)}.
* <p>
* The calculation returns a whole number, representing the number of
* complete units between the two temporals.
* For example, the period in hours between the times 11:30 and 13:29
* will only be one hour as it is one minute short of two hours.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link TemporalUnit#between(Temporal, Temporal)}:
* {{{
* // these two lines are equivalent
* temporal = start.until(end, unit);
* temporal = unit.between(start, end);
* }}}
* The choice should be made based on which makes the code more readable.
* <p>
* For example, this method allows the number of days between two dates to
* be calculated:
* {{{
* long daysBetween = start.until(end, DAYS);
* // or alternatively
* long daysBetween = DAYS.between(start, end);
* }}}
*
* @implSpec
* Implementations must begin by checking to ensure that the input temporal
* object is of the same observable type as the implementation.
* They must then perform the calculation for all instances of { @link ChronoUnit}.
* An { @code UnsupportedTemporalTypeException} must be thrown for { @code ChronoUnit}
* instances that are unsupported.
* <p>
* If the unit is not a { @code ChronoUnit}, then the result of this method
* is obtained by invoking { @code TemporalUnit.between(Temporal, Temporal)}
* passing { @code this} as the first argument and the input temporal as
* the second argument.
* <p>
* In summary, implementations must behave in a manner equivalent to this pseudo-code:
* {{{
* // convert the end temporal to the same type as this class
* if (unit instanceof ChronoUnit) {
* // if unit is supported, then calculate and return result
* // else throw UnsupportedTemporalTypeException for unsupported units
* }
* return unit.between(this, convertedEndTemporal);
* }}}
* <p>
* Note that the unit's { @code between} method must only be invoked if the
* two temporal objects have exactly the same type evaluated by { @code getClass()}.
* <p>
* Implementations must ensure that no observable state is altered when this
* read-only method is invoked.
*
* @param endExclusive the end temporal, converted to be of the
* same type as this object, not null
* @param unit the unit to measure the amount in, not null
* @return the amount of time between this temporal object and the specified one
* in terms of the unit; positive if the specified object is later than this one,
* negative if it is earlier than this one
* @throws DateTimeException if the amount cannot be calculated, or the end
* temporal cannot be converted to the same type as this temporal
* @throws UnsupportedTemporalTypeException if the unit is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
def until(endExclusive: Temporal, unit: TemporalUnit): Long
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Framework-level interface defining read-only access to a temporal object,
* such as a date, time, offset or some combination of these.
* <p>
* This is the base interface type for date, time and offset objects.
* It is implemented by those classes that can provide information
* as {@linkplain TemporalField fields} or {@linkplain TemporalQuery queries}.
* <p>
* Most date and time information can be represented as a number.
* These are modeled using {@code TemporalField} with the number held using
* a {@code long} to handle large values. Year, month and day-of-month are
* simple examples of fields, but they also include instant and offsets.
* See {@link ChronoField} for the standard set of fields.
* <p>
* Two pieces of date/time information cannot be represented by numbers,
* the {@linkplain java.time.chrono.Chronology chronology} and the
* {@linkplain java.time.ZoneId time-zone}.
* These can be accessed via {@linkplain #query(TemporalQuery) queries} using
* the static methods defined on {@link TemporalQuery}.
* <p>
* A sub-interface, {@link Temporal}, extends this definition to one that also
* supports adjustment and manipulation on more complete temporal objects.
* <p>
* This interface is a framework-level interface that should not be widely
* used in application code. Instead, applications should create and pass
* around instances of concrete types, such as {@code Date}.
* There are many reasons for this, part of which is that implementations
* of this interface may be in calendar systems other than ISO.
* See {@link java.time.chrono.ChronoLocalDate} for a fuller discussion of the issues.
*
* @implSpec
* This interface places no restrictions on the mutability of implementations,
* however immutability is strongly recommended.
*
* @since 1.8
*/
trait TemporalAccessor {
/**
* Checks if the specified field is supported.
* <p>
* This checks if the date-time can be queried for the specified field.
* If false, then calling the {@link #range(TemporalField) range} and {@link #get(TemporalField) get}
* methods will throw an exception.
*
* @implSpec
* Implementations must check and handle all fields defined in { @link ChronoField}.
* If the field is supported, then true must be returned, otherwise false must be returned.
* <p>
* If the field is not a { @code ChronoField}, then the result of this method
* is obtained by invoking { @code TemporalField.isSupportedBy(TemporalAccessor)}
* passing { @code this} as the argument.
* <p>
* Implementations must ensure that no observable state is altered when this
* read-only method is invoked.
*
* @param field the field to check, null returns false
* @return true if this date-time can be queried for the field, false if not
*/
def isSupported(field: TemporalField): Boolean
/**
* Gets the range of valid values for the specified field.
* <p>
* All fields can be expressed as a {@code long} integer.
* This method returns an object that describes the valid range for that value.
* The value of this temporal object is used to enhance the accuracy of the returned range.
* If the date-time cannot return the range, because the field is unsupported or for
* some other reason, an exception will be thrown.
* <p>
* Note that the result only describes the minimum and maximum valid values
* and it is important not to read too much into them. For example, there
* could be values within the range that are invalid for the field.
*
* @implSpec
* Implementations must check and handle all fields defined in { @link ChronoField}.
* If the field is supported, then the range of the field must be returned.
* If unsupported, then an { @code UnsupportedTemporalTypeException} must be thrown.
* <p>
* If the field is not a { @code ChronoField}, then the result of this method
* is obtained by invoking { @code TemporalField.rangeRefinedBy(TemporalAccessorl)}
* passing { @code this} as the argument.
* <p>
* Implementations must ensure that no observable state is altered when this
* read-only method is invoked.
* <p>
* The default implementation must behave equivalent to this code:
* {{{
* if (field instanceof ChronoField) {
* if (isSupported(field)) {
* return field.range();
* }
* throw new UnsupportedTemporalTypeException("Unsupported field: " + field);
* }
* return field.rangeRefinedBy(this);
* }}}
*
* @param field the field to query the range for, not null
* @return the range of valid values for the field, not null
* @throws DateTimeException if the range for the field cannot be obtained
* @throws UnsupportedTemporalTypeException if the field is not supported
*/
def range(field: TemporalField): ValueRange = {
if (field.isInstanceOf[ChronoField]) {
if (isSupported(field)) {
field.range
}
throw new UnsupportedTemporalTypeException("Unsupported field: " + field)
}
field.rangeRefinedBy(this)
}
/**
* Gets the value of the specified field as an {@code int}.
* <p>
* This queries the date-time for the value for the specified field.
* The returned value will always be within the valid range of values for the field.
* If the date-time cannot return the value, because the field is unsupported or for
* some other reason, an exception will be thrown.
*
* @implSpec
* Implementations must check and handle all fields defined in { @link ChronoField}.
* If the field is supported and has an { @code int} range, then the value of
* the field must be returned.
* If unsupported, then an { @code UnsupportedTemporalTypeException} must be thrown.
* <p>
* If the field is not a { @code ChronoField}, then the result of this method
* is obtained by invoking { @code TemporalField.getFrom(TemporalAccessor)}
* passing { @code this} as the argument.
* <p>
* Implementations must ensure that no observable state is altered when this
* read-only method is invoked.
* <p>
* The default implementation must behave equivalent to this code:
* {{{
* if (range(field).isIntValue()) {
* return range(field).checkValidIntValue(getLong(field), field);
* }
* throw new UnsupportedTemporalTypeException("Invalid field " + field + " + for get() method, use getLong() instead");
* }}}
*
* @param field the field to get, not null
* @return the value for the field, within the valid range of values
* @throws DateTimeException if a value for the field cannot be obtained or
* the value is outside the range of valid values for the field
* @throws UnsupportedTemporalTypeException if the field is not supported or
* the range of values exceeds an { @code int}
* @throws ArithmeticException if numeric overflow occurs
*/
def get(field: TemporalField): Int = {
val range: ValueRange = range(field)
if (range.isIntValue == false) {
throw new UnsupportedTemporalTypeException("Invalid field " + field + " + for get() method, use getLong() instead")
}
val value: Long = getLong(field)
if (range.isValidValue(value) == false) {
throw new Nothing("Invalid value for " + field + " (valid values " + range + "): " + value)
}
value.asInstanceOf[Int]
}
/**
* Gets the value of the specified field as a {@code long}.
* <p>
* This queries the date-time for the value for the specified field.
* The returned value may be outside the valid range of values for the field.
* If the date-time cannot return the value, because the field is unsupported or for
* some other reason, an exception will be thrown.
*
* @implSpec
* Implementations must check and handle all fields defined in { @link ChronoField}.
* If the field is supported, then the value of the field must be returned.
* If unsupported, then an { @code UnsupportedTemporalTypeException} must be thrown.
* <p>
* If the field is not a { @code ChronoField}, then the result of this method
* is obtained by invoking { @code TemporalField.getFrom(TemporalAccessor)}
* passing { @code this} as the argument.
* <p>
* Implementations must ensure that no observable state is altered when this
* read-only method is invoked.
*
* @param field the field to get, not null
* @return the value for the field
* @throws DateTimeException if a value for the field cannot be obtained
* @throws UnsupportedTemporalTypeException if the field is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
def getLong(field: TemporalField): Long
/**
* Queries this date-time.
* <p>
* This queries this date-time using the specified query strategy object.
* <p>
* Queries are a key tool for extracting information from date-times.
* They exists to externalize the process of querying, permitting different
* approaches, as per the strategy design pattern.
* Examples might be a query that checks if the date is the day before February 29th
* in a leap year, or calculates the number of days to your next birthday.
* <p>
* The most common query implementations are method references, such as
* {@code Date::from} and {@code ZoneId::from}.
* Additional implementations are provided as static methods on {@link TemporalQuery}.
*
* @implSpec
* The default implementation must behave equivalent to this code:
* {{{
* if (query == TemporalQuery.zoneId() ||
* query == TemporalQuery.chronology() || query == TemporalQuery.precision()) {
* return null;
* }
* return query.queryFrom(this);
* }}}
* Future versions are permitted to add further queries to the if statement.
* <p>
* All classes implementing this interface and overriding this method must call
* { @code TemporalAccessor.super.query(query)}. JDK classes may avoid calling
* super if they provide behavior equivalent to the default behaviour, however
* non-JDK classes may not utilize this optimization and must call { @code super}.
* <p>
* If the implementation can supply a value for one of the queries listed in the
* if statement of the default implementation, then it must do so.
* For example, an application-defined { @code HourMin} class storing the hour
* and minute must override this method as follows:
* {{{
* if (query == TemporalQuery.precision()) {
* return MINUTES;
* }
* return TemporalAccessor.super.query(query);
* }}}
* <p>
* Implementations must ensure that no observable state is altered when this
* read-only method is invoked.
*
* @param <R> the type of the result
* @param query the query to invoke, not null
* @return the query result, null may be returned (defined by the query)
* @throws DateTimeException if unable to query
* @throws ArithmeticException if numeric overflow occurs
*/
def query(query: TemporalQuery[R]): R = {
if (query eq TemporalQuery.zoneId || query eq TemporalQuery.chronology || query eq TemporalQuery.precision) {
null
}
query.queryFrom(this)
}
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Strategy for adjusting a temporal object.
* <p>
* Adjusters are a key tool for modifying temporal objects.
* They exist to externalize the process of adjustment, permitting different
* approaches, as per the strategy design pattern.
* Examples might be an adjuster that sets the date avoiding weekends, or one that
* sets the date to the last day of the month.
* <p>
* There are two equivalent ways of using a {@code TemporalAdjuster}.
* The first is to invoke the method on this interface directly.
* The second is to use {@link Temporal#with(TemporalAdjuster)}:
* {{{
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisAdjuster.adjustInto(temporal);
* temporal = temporal.with(thisAdjuster);
* }}}
* It is recommended to use the second approach, {@code with(TemporalAdjuster)},
* as it is a lot clearer to read in code.
* <p>
* This class also contains a standard set of adjusters, available as static methods.
* These include:
* <ul>
* <li>finding the first or last day of the month
* <li>finding the first day of next month
* <li>finding the first or last day of the year
* <li>finding the first day of next year
* <li>finding the first or last day-of-week within a month, such as "first Wednesday in June"
* <li>finding the next or previous day-of-week, such as "next Thursday"
* </ul>
*
* @implSpec
* This interface places no restrictions on the mutability of implementations,
* however immutability is strongly recommended.
* <p>
* All the implementations supplied by the static methods on this interface are immutable.
*
* @since 1.8
*/
object TemporalAdjuster {
/**
* Obtains a {@code TemporalAdjuster} that wraps a date adjuster.
* <p>
* The {@code TemporalAdjuster} is based on the low level {@code Temporal} interface.
* This method allows an adjustment from {@code Date} to {@code Date}
* to be wrapped to match the temporal-based interface.
* This is provided for convenience to make user-written adjusters simpler.
* <p>
* In general, user-written adjusters should be static constants:
* {{{{@code
* static TemporalAdjuster TWO_DAYS_LATER = TemporalAdjuster.ofDateAdjuster(
* date -> date.plusDays(2));
* }}}}
*
* @param dateBasedAdjuster the date-based adjuster, not null
* @return the temporal adjuster wrapping on the date adjuster, not null
*/
def ofDateAdjuster(dateBasedAdjuster: Nothing): TemporalAdjuster = {
TemporalAdjusters.ofDateAdjuster(dateBasedAdjuster)
}
/**
* Returns the "first day of month" adjuster, which returns a new date set to
* the first day of the current month.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-01-01.<br>
* The input 2011-02-15 will return 2011-02-01.
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* temporal.with(DAY_OF_MONTH, 1);
* }}}
*
* @return the first day-of-month adjuster, not null
*/
def firstDayOfMonth: TemporalAdjuster = {
TemporalAdjusters.firstDayOfMonth
}
/**
* Returns the "last day of month" adjuster, which returns a new date set to
* the last day of the current month.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-01-31.<br>
* The input 2011-02-15 will return 2011-02-28.<br>
* The input 2012-02-15 will return 2012-02-29 (leap year).<br>
* The input 2011-04-15 will return 2011-04-30.
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* long lastDay = temporal.range(DAY_OF_MONTH).getMaximum();
* temporal.with(DAY_OF_MONTH, lastDay);
* }}}
*
* @return the last day-of-month adjuster, not null
*/
def lastDayOfMonth: TemporalAdjuster = {
TemporalAdjusters.lastDayOfMonth
}
/**
* Returns the "first day of next month" adjuster, which returns a new date set to
* the first day of the next month.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-02-01.<br>
* The input 2011-02-15 will return 2011-03-01.
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* temporal.with(DAY_OF_MONTH, 1).plus(1, MONTHS);
* }}}
*
* @return the first day of next month adjuster, not null
*/
def firstDayOfNextMonth: TemporalAdjuster = {
TemporalAdjusters.firstDayOfNextMonth
}
/**
* Returns the "first day of year" adjuster, which returns a new date set to
* the first day of the current year.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-01-01.<br>
* The input 2011-02-15 will return 2011-01-01.<br>
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* temporal.with(DAY_OF_YEAR, 1);
* }}}
*
* @return the first day-of-year adjuster, not null
*/
def firstDayOfYear: TemporalAdjuster = {
TemporalAdjusters.firstDayOfYear
}
/**
* Returns the "last day of year" adjuster, which returns a new date set to
* the last day of the current year.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-12-31.<br>
* The input 2011-02-15 will return 2011-12-31.<br>
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* long lastDay = temporal.range(DAY_OF_YEAR).getMaximum();
* temporal.with(DAY_OF_YEAR, lastDay);
* }}}
*
* @return the last day-of-year adjuster, not null
*/
def lastDayOfYear: TemporalAdjuster = {
TemporalAdjusters.lastDayOfYear
}
/**
* Returns the "first day of next year" adjuster, which returns a new date set to
* the first day of the next year.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2012-01-01.
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* temporal.with(DAY_OF_YEAR, 1).plus(1, YEARS);
* }}}
*
* @return the first day of next month adjuster, not null
*/
def firstDayOfNextYear: TemporalAdjuster = {
TemporalAdjusters.firstDayOfNextYear
}
/**
* Returns the first in month adjuster, which returns a new date
* in the same month with the first matching day-of-week.
* This is used for expressions like 'first Tuesday in March'.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-12-15 for (MONDAY) will return 2011-12-05.<br>
* The input 2011-12-15 for (FRIDAY) will return 2011-12-02.<br>
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} and {@code DAY_OF_MONTH} fields
* and the {@code DAYS} unit, and assumes a seven day week.
*
* @param dayOfWeek the day-of-week, not null
* @return the first in month adjuster, not null
*/
def firstInMonth(dayOfWeek: DayOfWeek): TemporalAdjuster = {
TemporalAdjuster.dayOfWeekInMonth(1, dayOfWeek)
}
/**
* Returns the last in month adjuster, which returns a new date
* in the same month with the last matching day-of-week.
* This is used for expressions like 'last Tuesday in March'.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-12-15 for (MONDAY) will return 2011-12-26.<br>
* The input 2011-12-15 for (FRIDAY) will return 2011-12-30.<br>
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} and {@code DAY_OF_MONTH} fields
* and the {@code DAYS} unit, and assumes a seven day week.
*
* @param dayOfWeek the day-of-week, not null
* @return the first in month adjuster, not null
*/
def lastInMonth(dayOfWeek: DayOfWeek): TemporalAdjuster = {
TemporalAdjuster.dayOfWeekInMonth(-1, dayOfWeek)
}
/**
* Returns the day-of-week in month adjuster, which returns a new date
* in the same month with the ordinal day-of-week.
* This is used for expressions like the 'second Tuesday in March'.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-12-15 for (1,TUESDAY) will return 2011-12-06.<br>
* The input 2011-12-15 for (2,TUESDAY) will return 2011-12-13.<br>
* The input 2011-12-15 for (3,TUESDAY) will return 2011-12-20.<br>
* The input 2011-12-15 for (4,TUESDAY) will return 2011-12-27.<br>
* The input 2011-12-15 for (5,TUESDAY) will return 2012-01-03.<br>
* The input 2011-12-15 for (-1,TUESDAY) will return 2011-12-27 (last in month).<br>
* The input 2011-12-15 for (-4,TUESDAY) will return 2011-12-06 (3 weeks before last in month).<br>
* The input 2011-12-15 for (-5,TUESDAY) will return 2011-11-29 (4 weeks before last in month).<br>
* The input 2011-12-15 for (0,TUESDAY) will return 2011-11-29 (last in previous month).<br>
* <p>
* For a positive or zero ordinal, the algorithm is equivalent to finding the first
* day-of-week that matches within the month and then adding a number of weeks to it.
* For a negative ordinal, the algorithm is equivalent to finding the last
* day-of-week that matches within the month and then subtracting a number of weeks to it.
* The ordinal number of weeks is not validated and is interpreted leniently
* according to this algorithm. This definition means that an ordinal of zero finds
* the last matching day-of-week in the previous month.
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} and {@code DAY_OF_MONTH} fields
* and the {@code DAYS} unit, and assumes a seven day week.
*
* @param ordinal the week within the month, unbounded but typically from -5 to 5
* @param dayOfWeek the day-of-week, not null
* @return the day-of-week in month adjuster, not null
*/
def dayOfWeekInMonth(ordinal: Int, dayOfWeek: DayOfWeek): TemporalAdjuster = {
TemporalAdjusters.dayOfWeekInMonth(ordinal, dayOfWeek)
}
/**
* Returns the next day-of-week adjuster, which adjusts the date to the
* first occurrence of the specified day-of-week after the date being adjusted.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 (a Saturday) for parameter (MONDAY) will return 2011-01-17 (two days later).<br>
* The input 2011-01-15 (a Saturday) for parameter (WEDNESDAY) will return 2011-01-19 (four days later).<br>
* The input 2011-01-15 (a Saturday) for parameter (SATURDAY) will return 2011-01-22 (seven days later).
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} field and the {@code DAYS} unit,
* and assumes a seven day week.
*
* @param dayOfWeek the day-of-week to move the date to, not null
* @return the next day-of-week adjuster, not null
*/
def next(dayOfWeek: DayOfWeek): TemporalAdjuster = {
TemporalAdjusters.next(dayOfWeek)
}
/**
* Returns the next-or-same day-of-week adjuster, which adjusts the date to the
* first occurrence of the specified day-of-week after the date being adjusted
* unless it is already on that day in which case the same object is returned.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 (a Saturday) for parameter (MONDAY) will return 2011-01-17 (two days later).<br>
* The input 2011-01-15 (a Saturday) for parameter (WEDNESDAY) will return 2011-01-19 (four days later).<br>
* The input 2011-01-15 (a Saturday) for parameter (SATURDAY) will return 2011-01-15 (same as input).
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} field and the {@code DAYS} unit,
* and assumes a seven day week.
*
* @param dayOfWeek the day-of-week to check for or move the date to, not null
* @return the next-or-same day-of-week adjuster, not null
*/
def nextOrSame(dayOfWeek: DayOfWeek): TemporalAdjuster = {
TemporalAdjusters.nextOrSame(dayOfWeek)
}
/**
* Returns the previous day-of-week adjuster, which adjusts the date to the
* first occurrence of the specified day-of-week before the date being adjusted.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 (a Saturday) for parameter (MONDAY) will return 2011-01-10 (five days earlier).<br>
* The input 2011-01-15 (a Saturday) for parameter (WEDNESDAY) will return 2011-01-12 (three days earlier).<br>
* The input 2011-01-15 (a Saturday) for parameter (SATURDAY) will return 2011-01-08 (seven days earlier).
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} field and the {@code DAYS} unit,
* and assumes a seven day week.
*
* @param dayOfWeek the day-of-week to move the date to, not null
* @return the previous day-of-week adjuster, not null
*/
def previous(dayOfWeek: DayOfWeek): TemporalAdjuster = {
TemporalAdjusters.previous(dayOfWeek)
}
/**
* Returns the previous-or-same day-of-week adjuster, which adjusts the date to the
* first occurrence of the specified day-of-week before the date being adjusted
* unless it is already on that day in which case the same object is returned.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 (a Saturday) for parameter (MONDAY) will return 2011-01-10 (five days earlier).<br>
* The input 2011-01-15 (a Saturday) for parameter (WEDNESDAY) will return 2011-01-12 (three days earlier).<br>
* The input 2011-01-15 (a Saturday) for parameter (SATURDAY) will return 2011-01-15 (same as input).
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} field and the {@code DAYS} unit,
* and assumes a seven day week.
*
* @param dayOfWeek the day-of-week to check for or move the date to, not null
* @return the previous-or-same day-of-week adjuster, not null
*/
def previousOrSame(dayOfWeek: DayOfWeek): TemporalAdjuster = {
TemporalAdjusters.previousOrSame(dayOfWeek)
}
}
trait TemporalAdjuster {
/**
* Adjusts the specified temporal object.
* <p>
* This adjusts the specified temporal object using the logic
* encapsulated in the implementing class.
* Examples might be an adjuster that sets the date avoiding weekends, or one that
* sets the date to the last day of the month.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link Temporal#with(TemporalAdjuster)}:
* {{{
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisAdjuster.adjustInto(temporal);
* temporal = temporal.with(thisAdjuster);
* }}}
* It is recommended to use the second approach, {@code with(TemporalAdjuster)},
* as it is a lot clearer to read in code.
*
* @implSpec
* The implementation must take the input object and adjust it.
* The implementation defines the logic of the adjustment and is responsible for
* documenting that logic. It may use any method on { @code Temporal} to
* query the temporal object and perform the adjustment.
* The returned object must have the same observable type as the input object
* <p>
* The input object must not be altered.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable temporal objects.
* <p>
* The input temporal object may be in a calendar system other than ISO.
* Implementations may choose to document compatibility with other calendar systems,
* or reject non-ISO temporal objects by { @link TemporalQuery#chronology() querying the chronology}.
* <p>
* This method may be called from multiple threads in parallel.
* It must be thread-safe when invoked.
*
* @param temporal the temporal object to adjust, not null
* @return an object of the same observable type with the adjustment made, not null
* @throws DateTimeException if unable to make the adjustment
* @throws ArithmeticException if numeric overflow occurs
*/
def adjustInto(temporal: Temporal): Temporal
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012-2013, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Implementations of the static methods in {@code TemporalAdjuster}
*
* @since 1.8
*/
object TemporalAdjusters {
/**
* Obtains a {@code TemporalAdjuster} that wraps a date adjuster.
* <p>
* The {@code TemporalAdjuster} is based on the low level {@code Temporal} interface.
* This method allows an adjustment from {@code Date} to {@code Date}
* to be wrapped to match the temporal-based interface.
* This is provided for convenience to make user-written adjusters simpler.
* <p>
* In general, user-written adjusters should be static constants:
* {{{
* public static TemporalAdjuster TWO_DAYS_LATER = TemporalAdjuster.ofDateAdjuster(
* date -> date.plusDays(2));
* }}}
*
* @param dateBasedAdjuster the date-based adjuster, not null
* @return the temporal adjuster wrapping on the date adjuster, not null
*/
private[temporal] def ofDateAdjuster(dateBasedAdjuster: Nothing): TemporalAdjuster = {
(temporal) -> {
LocalDate input = LocalDate.from(temporal);
LocalDate output = dateBasedAdjuster.apply(input);
temporal.with (output);
}
}
/**
* Returns the "first day of month" adjuster, which returns a new date set to
* the first day of the current month.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-01-01.<br>
* The input 2011-02-15 will return 2011-02-01.
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* temporal.with(DAY_OF_MONTH, 1);
* }}}
*
* @return the first day-of-month adjuster, not null
*/
private[temporal] def firstDayOfMonth: TemporalAdjuster = {
(temporal) -> temporal.with (DAY_OF_MONTH, 1)
}
/**
* Returns the "last day of month" adjuster, which returns a new date set to
* the last day of the current month.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-01-31.<br>
* The input 2011-02-15 will return 2011-02-28.<br>
* The input 2012-02-15 will return 2012-02-29 (leap year).<br>
* The input 2011-04-15 will return 2011-04-30.
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* long lastDay = temporal.range(DAY_OF_MONTH).getMaximum();
* temporal.with(DAY_OF_MONTH, lastDay);
* }}}
*
* @return the last day-of-month adjuster, not null
*/
private[temporal] def lastDayOfMonth: TemporalAdjuster = {
(temporal) -> temporal.with (DAY_OF_MONTH, temporal.range(DAY_OF_MONTH).getMaximum())
}
/**
* Returns the "first day of next month" adjuster, which returns a new date set to
* the first day of the next month.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-02-01.<br>
* The input 2011-02-15 will return 2011-03-01.
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* temporal.with(DAY_OF_MONTH, 1).plus(1, MONTHS);
* }}}
*
* @return the first day of next month adjuster, not null
*/
private[temporal] def firstDayOfNextMonth: TemporalAdjuster = {
(temporal) -> temporal.with (DAY_OF_MONTH, 1).plus(1, MONTHS)
}
/**
* Returns the "first day of year" adjuster, which returns a new date set to
* the first day of the current year.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-01-01.<br>
* The input 2011-02-15 will return 2011-01-01.<br>
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* temporal.with(DAY_OF_YEAR, 1);
* }}}
*
* @return the first day-of-year adjuster, not null
*/
private[temporal] def firstDayOfYear: TemporalAdjuster = {
(temporal) -> temporal.with (DAY_OF_YEAR, 1)
}
/**
* Returns the "last day of year" adjuster, which returns a new date set to
* the last day of the current year.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2011-12-31.<br>
* The input 2011-02-15 will return 2011-12-31.<br>
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* long lastDay = temporal.range(DAY_OF_YEAR).getMaximum();
* temporal.with(DAY_OF_YEAR, lastDay);
* }}}
*
* @return the last day-of-year adjuster, not null
*/
private[temporal] def lastDayOfYear: TemporalAdjuster = {
(temporal) -> temporal.with (DAY_OF_YEAR, temporal.range(DAY_OF_YEAR).getMaximum())
}
/**
* Returns the "first day of next year" adjuster, which returns a new date set to
* the first day of the next year.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 will return 2012-01-01.
* <p>
* The behavior is suitable for use with most calendar systems.
* It is equivalent to:
* {{{
* temporal.with(DAY_OF_YEAR, 1).plus(1, YEARS);
* }}}
*
* @return the first day of next month adjuster, not null
*/
private[temporal] def firstDayOfNextYear: TemporalAdjuster = {
(temporal) -> temporal.with (DAY_OF_YEAR, 1).plus(1, YEARS)
}
/**
* Returns the first in month adjuster, which returns a new date
* in the same month with the first matching day-of-week.
* This is used for expressions like 'first Tuesday in March'.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-12-15 for (MONDAY) will return 2011-12-05.<br>
* The input 2011-12-15 for (FRIDAY) will return 2011-12-02.<br>
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} and {@code DAY_OF_MONTH} fields
* and the {@code DAYS} unit, and assumes a seven day week.
*
* @param dayOfWeek the day-of-week, not null
* @return the first in month adjuster, not null
*/
private[temporal] def firstInMonth(dayOfWeek: DayOfWeek): TemporalAdjuster = {
TemporalAdjusters.dayOfWeekInMonth(1, dayOfWeek)
}
/**
* Returns the last in month adjuster, which returns a new date
* in the same month with the last matching day-of-week.
* This is used for expressions like 'last Tuesday in March'.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-12-15 for (MONDAY) will return 2011-12-26.<br>
* The input 2011-12-15 for (FRIDAY) will return 2011-12-30.<br>
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} and {@code DAY_OF_MONTH} fields
* and the {@code DAYS} unit, and assumes a seven day week.
*
* @param dayOfWeek the day-of-week, not null
* @return the first in month adjuster, not null
*/
private[temporal] def lastInMonth(dayOfWeek: DayOfWeek): TemporalAdjuster = {
TemporalAdjusters.dayOfWeekInMonth(-1, dayOfWeek)
}
/**
* Returns the day-of-week in month adjuster, which returns a new date
* in the same month with the ordinal day-of-week.
* This is used for expressions like the 'second Tuesday in March'.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-12-15 for (1,TUESDAY) will return 2011-12-06.<br>
* The input 2011-12-15 for (2,TUESDAY) will return 2011-12-13.<br>
* The input 2011-12-15 for (3,TUESDAY) will return 2011-12-20.<br>
* The input 2011-12-15 for (4,TUESDAY) will return 2011-12-27.<br>
* The input 2011-12-15 for (5,TUESDAY) will return 2012-01-03.<br>
* The input 2011-12-15 for (-1,TUESDAY) will return 2011-12-27 (last in month).<br>
* The input 2011-12-15 for (-4,TUESDAY) will return 2011-12-06 (3 weeks before last in month).<br>
* The input 2011-12-15 for (-5,TUESDAY) will return 2011-11-29 (4 weeks before last in month).<br>
* The input 2011-12-15 for (0,TUESDAY) will return 2011-11-29 (last in previous month).<br>
* <p>
* For a positive or zero ordinal, the algorithm is equivalent to finding the first
* day-of-week that matches within the month and then adding a number of weeks to it.
* For a negative ordinal, the algorithm is equivalent to finding the last
* day-of-week that matches within the month and then subtracting a number of weeks to it.
* The ordinal number of weeks is not validated and is interpreted leniently
* according to this algorithm. This definition means that an ordinal of zero finds
* the last matching day-of-week in the previous month.
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} and {@code DAY_OF_MONTH} fields
* and the {@code DAYS} unit, and assumes a seven day week.
*
* @param ordinal the week within the month, unbounded but typically from -5 to 5
* @param dayOfWeek the day-of-week, not null
* @return the day-of-week in month adjuster, not null
*/
private[temporal] def dayOfWeekInMonth(ordinal: Int, dayOfWeek: DayOfWeek): TemporalAdjuster = {
val dowValue: Int = dayOfWeek.getValue
if (ordinal >= 0) {
(temporal) -> {
Temporal temp = temporal.with (DAY_OF_MONTH, 1);
int curDow = temp.get(DAY_OF_WEEK);
int dowDiff = (dowValue - curDow + 7) % 7;
dowDiff += (ordinal - 1L) * 7L; // safe from overflow
temp.plus(dowDiff, DAYS);
}
}
else {
(temporal) -> {
Temporal temp = temporal.with (DAY_OF_MONTH, temporal.range(DAY_OF_MONTH).getMaximum());
int curDow = temp.get(DAY_OF_WEEK);
int daysDiff = dowValue - curDow;
daysDiff = (daysDiff == 0 ? 0: (daysDiff > 0 ? daysDiff - 7: daysDiff) );
daysDiff -= (-ordinal - 1L) * 7L; // safe from overflow
temp.plus(daysDiff, DAYS);
}
}
}
/**
* Returns the next day-of-week adjuster, which adjusts the date to the
* first occurrence of the specified day-of-week after the date being adjusted.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 (a Saturday) for parameter (MONDAY) will return 2011-01-17 (two days later).<br>
* The input 2011-01-15 (a Saturday) for parameter (WEDNESDAY) will return 2011-01-19 (four days later).<br>
* The input 2011-01-15 (a Saturday) for parameter (SATURDAY) will return 2011-01-22 (seven days later).
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} field and the {@code DAYS} unit,
* and assumes a seven day week.
*
* @param dayOfWeek the day-of-week to move the date to, not null
* @return the next day-of-week adjuster, not null
*/
private[temporal] def next(dayOfWeek: DayOfWeek): TemporalAdjuster = {
val dowValue: Int = dayOfWeek.getValue
(temporal) -> {
int calDow = temporal.get(DAY_OF_WEEK);
int daysDiff = calDow - dowValue;
temporal.plus(daysDiff >= 0 ? 7 - daysDiff: - daysDiff, DAYS);
}
}
/**
* Returns the next-or-same day-of-week adjuster, which adjusts the date to the
* first occurrence of the specified day-of-week after the date being adjusted
* unless it is already on that day in which case the same object is returned.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 (a Saturday) for parameter (MONDAY) will return 2011-01-17 (two days later).<br>
* The input 2011-01-15 (a Saturday) for parameter (WEDNESDAY) will return 2011-01-19 (four days later).<br>
* The input 2011-01-15 (a Saturday) for parameter (SATURDAY) will return 2011-01-15 (same as input).
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} field and the {@code DAYS} unit,
* and assumes a seven day week.
*
* @param dayOfWeek the day-of-week to check for or move the date to, not null
* @return the next-or-same day-of-week adjuster, not null
*/
private[temporal] def nextOrSame(dayOfWeek: DayOfWeek): TemporalAdjuster = {
val dowValue: Int = dayOfWeek.getValue
(temporal) -> {
int calDow = temporal.get(DAY_OF_WEEK);
if (calDow == dowValue) {
temporal;
}
int daysDiff = calDow - dowValue;
temporal.plus(daysDiff >= 0 ? 7 - daysDiff: - daysDiff, DAYS);
}
}
/**
* Returns the previous day-of-week adjuster, which adjusts the date to the
* first occurrence of the specified day-of-week before the date being adjusted.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 (a Saturday) for parameter (MONDAY) will return 2011-01-10 (five days earlier).<br>
* The input 2011-01-15 (a Saturday) for parameter (WEDNESDAY) will return 2011-01-12 (three days earlier).<br>
* The input 2011-01-15 (a Saturday) for parameter (SATURDAY) will return 2011-01-08 (seven days earlier).
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} field and the {@code DAYS} unit,
* and assumes a seven day week.
*
* @param dayOfWeek the day-of-week to move the date to, not null
* @return the previous day-of-week adjuster, not null
*/
private[temporal] def previous(dayOfWeek: DayOfWeek): TemporalAdjuster = {
val dowValue: Int = dayOfWeek.getValue
(temporal) -> {
int calDow = temporal.get(DAY_OF_WEEK);
int daysDiff = dowValue - calDow;
temporal.minus(daysDiff >= 0 ? 7 - daysDiff: - daysDiff, DAYS);
}
}
/**
* Returns the previous-or-same day-of-week adjuster, which adjusts the date to the
* first occurrence of the specified day-of-week before the date being adjusted
* unless it is already on that day in which case the same object is returned.
* <p>
* The ISO calendar system behaves as follows:<br>
* The input 2011-01-15 (a Saturday) for parameter (MONDAY) will return 2011-01-10 (five days earlier).<br>
* The input 2011-01-15 (a Saturday) for parameter (WEDNESDAY) will return 2011-01-12 (three days earlier).<br>
* The input 2011-01-15 (a Saturday) for parameter (SATURDAY) will return 2011-01-15 (same as input).
* <p>
* The behavior is suitable for use with most calendar systems.
* It uses the {@code DAY_OF_WEEK} field and the {@code DAYS} unit,
* and assumes a seven day week.
*
* @param dayOfWeek the day-of-week to check for or move the date to, not null
* @return the previous-or-same day-of-week adjuster, not null
*/
private[temporal] def previousOrSame(dayOfWeek: DayOfWeek): TemporalAdjuster = {
val dowValue: Int = dayOfWeek.getValue
(temporal) -> {
int calDow = temporal.get(DAY_OF_WEEK);
if (calDow == dowValue) {
temporal;
}
int daysDiff = dowValue - calDow;
temporal.minus(daysDiff >= 0 ? 7 - daysDiff: - daysDiff, DAYS);
}
}
}
final class TemporalAdjusters {
private def {
}
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, 2013 Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Framework-level interface defining an amount of time, such as
* "6 hours", "8 days" or "2 years and 3 months".
* <p>
* This is the base interface type for amounts of time.
* An amount is distinct from a date or time-of-day in that it is not tied
* to any specific point on the time-line.
* <p>
* The amount can be thought of as a {@code Map} of {@link TemporalUnit} to
* {@code long}, exposed via {@link #getUnits()} and {@link #get(TemporalUnit)}.
* A simple case might have a single unit-value pair, such as "6 hours".
* A more complex case may have multiple unit-value pairs, such as
* "7 years, 3 months and 5 days".
* <p>
* There are two common implementations.
* {@link Period} is a date-based implementation, storing years, months and days.
* {@link Duration} is a time-based implementation, storing seconds and nanoseconds,
* but providing some access using other duration based units such as minutes,
* hours and fixed 24-hour days.
* <p>
* This interface is a framework-level interface that should not be widely
* used in application code. Instead, applications should create and pass
* around instances of concrete types, such as {@code Period} and {@code Duration}.
*
* @implSpec
* This interface places no restrictions on the mutability of implementations,
* however immutability is strongly recommended.
*
* @since 1.8
*/
trait TemporalAmount {
/**
* Returns the value of the requested unit.
* The units returned from {@link #getUnits()} uniquely define the
* value of the {@code TemporalAmount}. A value must be returned
* for each unit listed in {@code getUnits}.
*
* @implSpec
* Implementations may declare support for units not listed by { @link #getUnits()}.
* Typically, the implementation would define additional units
* as conversions for the convenience of developers.
*
* @param unit the { @code TemporalUnit} for which to return the value
* @return the long value of the unit
* @throws DateTimeException if a value for the unit cannot be obtained
* @throws UnsupportedTemporalTypeException if the { @code unit} is not supported
*/
def get(unit: TemporalUnit): Long
/**
* Returns the list of units uniquely defining the value of this TemporalAmount.
* The list of {@code TemporalUnits} is defined by the implementation class.
* The list is a snapshot of the units at the time {@code getUnits}
* is called and is not mutable.
* The units are ordered from longest duration to the shortest duration
* of the unit.
*
* @implSpec
* The list of units completely and uniquely represents the
* state of the object without omissions, overlaps or duplication.
* The units are in order from longest duration to shortest.
*
* @return the List of { @code TemporalUnits}; not null
*/
def getUnits: Nothing
/**
* Adds to the specified temporal object.
* <p>
* Adds the amount to the specified temporal object using the logic
* encapsulated in the implementing class.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link Temporal#plus(TemporalAmount)}:
* {{{
* // These two lines are equivalent, but the second approach is recommended
* dateTime = amount.addTo(dateTime);
* dateTime = dateTime.plus(adder);
* }}}
* It is recommended to use the second approach, {@code plus(TemporalAmount)},
* as it is a lot clearer to read in code.
*
* @implSpec
* The implementation must take the input object and add to it.
* The implementation defines the logic of the addition and is responsible for
* documenting that logic. It may use any method on { @code Temporal} to
* query the temporal object and perform the addition.
* The returned object must have the same observable type as the input object
* <p>
* The input object must not be altered.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable temporal objects.
* <p>
* The input temporal object may be in a calendar system other than ISO.
* Implementations may choose to document compatibility with other calendar systems,
* or reject non-ISO temporal objects by { @link TemporalQuery#chronology() querying the chronology}.
* <p>
* This method may be called from multiple threads in parallel.
* It must be thread-safe when invoked.
*
* @param temporal the temporal object to add the amount to, not null
* @return an object of the same observable type with the addition made, not null
* @throws DateTimeException if unable to add
* @throws ArithmeticException if numeric overflow occurs
*/
def addTo(temporal: Temporal): Temporal
/**
* Subtracts this object from the specified temporal object.
* <p>
* Subtracts the amount from the specified temporal object using the logic
* encapsulated in the implementing class.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link Temporal#minus(TemporalAmount)}:
* {{{
* // these two lines are equivalent, but the second approach is recommended
* dateTime = amount.subtractFrom(dateTime);
* dateTime = dateTime.minus(amount);
* }}}
* It is recommended to use the second approach, {@code minus(TemporalAmount)},
* as it is a lot clearer to read in code.
*
* @implSpec
* The implementation must take the input object and subtract from it.
* The implementation defines the logic of the subtraction and is responsible for
* documenting that logic. It may use any method on { @code Temporal} to
* query the temporal object and perform the subtraction.
* The returned object must have the same observable type as the input object
* <p>
* The input object must not be altered.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable temporal objects.
* <p>
* The input temporal object may be in a calendar system other than ISO.
* Implementations may choose to document compatibility with other calendar systems,
* or reject non-ISO temporal objects by { @link TemporalQuery#chronology() querying the chronology}.
* <p>
* This method may be called from multiple threads in parallel.
* It must be thread-safe when invoked.
*
* @param temporal the temporal object to subtract the amount from, not null
* @return an object of the same observable type with the subtraction made, not null
* @throws DateTimeException if unable to subtract
* @throws ArithmeticException if numeric overflow occurs
*/
def subtractFrom(temporal: Temporal): Temporal
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* A field of date-time, such as month-of-year or hour-of-minute.
* <p>
* Date and time is expressed using fields which partition the time-line into something
* meaningful for humans. Implementations of this interface represent those fields.
* <p>
* The most commonly used units are defined in {@link ChronoField}.
* Further fields are supplied in {@link IsoFields}, {@link WeekFields} and {@link JulianFields}.
* Fields can also be written by application code by implementing this interface.
* <p>
* The field works using double dispatch. Client code calls methods on a date-time like
* {@code DateTime} which check if the field is a {@code ChronoField}.
* If it is, then the date-time must handle it.
* Otherwise, the method call is re-dispatched to the matching method in this interface.
*
* @implSpec
* This interface must be implemented with care to ensure other classes operate correctly.
* All implementations that can be instantiated must be final, immutable and thread-safe.
* Implementations should be { @code Serializable} where possible.
* An enum is as effective implementation choice.
*
* @since 1.8
*/
trait TemporalField {
/**
* Gets the display name for the field in the requested locale.
* <p>
* If there is no display name for the locale then a suitable default must be returned.
* <p>
* The default implementation must check the locale is not null
* and return {@code toString()}.
*
* @param locale the locale to use, not null
* @return the display name for the locale or a suitable default, not null
*/
def getDisplayName(locale: Locale): String = {
toString
}
/**
* Gets the unit that the field is measured in.
* <p>
* The unit of the field is the period that varies within the range.
* For example, in the field 'MonthOfYear', the unit is 'Months'.
* See also {@link #getRangeUnit()}.
*
* @return the period unit defining the base unit of the field, not null
*/
def getBaseUnit: TemporalUnit
/**
* Gets the range that the field is bound by.
* <p>
* The range of the field is the period that the field varies within.
* For example, in the field 'MonthOfYear', the range is 'Years'.
* See also {@link #getBaseUnit()}.
* <p>
* The range is never null. For example, the 'Year' field is shorthand for
* 'YearOfForever'. It therefore has a unit of 'Years' and a range of 'Forever'.
*
* @return the period unit defining the range of the field, not null
*/
def getRangeUnit: TemporalUnit
/**
* Gets the range of valid values for the field.
* <p>
* All fields can be expressed as a {@code long} integer.
* This method returns an object that describes the valid range for that value.
* This method is generally only applicable to the ISO-8601 calendar system.
* <p>
* Note that the result only describes the minimum and maximum valid values
* and it is important not to read too much into them. For example, there
* could be values within the range that are invalid for the field.
*
* @return the range of valid values for the field, not null
*/
def range: ValueRange
/**
* Checks if this field represents a component of a date.
* <p>
* A field is date-based if it can be derived from
* {@link ChronoField#EPOCH_DAY EPOCH_DAY}.
* Note that it is valid for both {@code isDateBased()} and {@code isTimeBased()}
* to return false, such as when representing a field like minute-of-week.
*
* @return true if this field is a component of a date
*/
def isDateBased: Boolean
/**
* Checks if this field represents a component of a time.
* <p>
* A field is time-based if it can be derived from
* {@link ChronoField#NANO_OF_DAY NANO_OF_DAY}.
* Note that it is valid for both {@code isDateBased()} and {@code isTimeBased()}
* to return false, such as when representing a field like minute-of-week.
*
* @return true if this field is a component of a time
*/
def isTimeBased: Boolean
/**
* Checks if this field is supported by the temporal object.
* <p>
* This determines whether the temporal accessor supports this field.
* If this returns false, the the temporal cannot be queried for this field.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link TemporalAccessor#isSupported(TemporalField)}:
* {{{
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisField.isSupportedBy(temporal);
* temporal = temporal.isSupported(thisField);
* }}}
* It is recommended to use the second approach, {@code isSupported(TemporalField)},
* as it is a lot clearer to read in code.
* <p>
* Implementations should determine whether they are supported using the fields
* available in {@link ChronoField}.
*
* @param temporal the temporal object to query, not null
* @return true if the date-time can be queried for this field, false if not
*/
def isSupportedBy(temporal: TemporalAccessor): Boolean
/**
* Get the range of valid values for this field using the temporal object to
* refine the result.
* <p>
* This uses the temporal object to find the range of valid values for the field.
* This is similar to {@link #range()}, however this method refines the result
* using the temporal. For example, if the field is {@code DAY_OF_MONTH} the
* {@code range} method is not accurate as there are four possible month lengths,
* 28, 29, 30 and 31 days. Using this method with a date allows the range to be
* accurate, returning just one of those four options.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link TemporalAccessor#range(TemporalField)}:
* {{{
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisField.rangeRefinedBy(temporal);
* temporal = temporal.range(thisField);
* }}}
* It is recommended to use the second approach, {@code range(TemporalField)},
* as it is a lot clearer to read in code.
* <p>
* Implementations should perform any queries or calculations using the fields
* available in {@link ChronoField}.
* If the field is not supported an {@code UnsupportedTemporalTypeException} must be thrown.
*
* @param temporal the temporal object used to refine the result, not null
* @return the range of valid values for this field, not null
* @throws DateTimeException if the range for the field cannot be obtained
* @throws UnsupportedTemporalTypeException if the field is not supported by the temporal
*/
def rangeRefinedBy(temporal: TemporalAccessor): ValueRange
/**
* Gets the value of this field from the specified temporal object.
* <p>
* This queries the temporal object for the value of this field.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link TemporalAccessor#getLong(TemporalField)}
* (or {@link TemporalAccessor#get(TemporalField)}):
* {{{
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisField.getFrom(temporal);
* temporal = temporal.getLong(thisField);
* }}}
* It is recommended to use the second approach, {@code getLong(TemporalField)},
* as it is a lot clearer to read in code.
* <p>
* Implementations should perform any queries or calculations using the fields
* available in {@link ChronoField}.
* If the field is not supported an {@code UnsupportedTemporalTypeException} must be thrown.
*
* @param temporal the temporal object to query, not null
* @return the value of this field, not null
* @throws DateTimeException if a value for the field cannot be obtained
* @throws UnsupportedTemporalTypeException if the field is not supported by the temporal
* @throws ArithmeticException if numeric overflow occurs
*/
def getFrom(temporal: TemporalAccessor): Long
/**
* Returns a copy of the specified temporal object with the value of this field set.
* <p>
* This returns a new temporal object based on the specified one with the value for
* this field changed. For example, on a {@code Date}, this could be used to
* set the year, month or day-of-month.
* The returned object has the same observable type as the specified object.
* <p>
* In some cases, changing a field is not fully defined. For example, if the target object is
* a date representing the 31st January, then changing the month to February would be unclear.
* In cases like this, the implementation is responsible for resolving the result.
* Typically it will choose the previous valid date, which would be the last valid
* day of February in this example.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link Temporal#with(TemporalField, long)}:
* {{{
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisField.adjustInto(temporal);
* temporal = temporal.with(thisField);
* }}}
* It is recommended to use the second approach, {@code with(TemporalField)},
* as it is a lot clearer to read in code.
* <p>
* Implementations should perform any queries or calculations using the fields
* available in {@link ChronoField}.
* If the field is not supported an {@code UnsupportedTemporalTypeException} must be thrown.
* <p>
* Implementations must not alter the specified temporal object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
*
* @param <R> the type of the Temporal object
* @param temporal the temporal object to adjust, not null
* @param newValue the new value of the field
* @return the adjusted temporal object, not null
* @throws DateTimeException if the field cannot be set
* @throws UnsupportedTemporalTypeException if the field is not supported by the temporal
* @throws ArithmeticException if numeric overflow occurs
*/
def adjustInto(temporal: R, newValue: Long): R
/**
* Resolves this field to provide a simpler alternative or a date.
* <p>
* This method is invoked during the resolve phase of parsing.
* It is designed to allow application defined fields to be simplified into
* more standard fields, such as those on {@code ChronoField}, or into a date.
* <p>
* Applications should not normally invoke this method directly.
*
* @implSpec
* If an implementation represents a field that can be simplified, or
* combined with others, then this method must be implemented.
* <p>
* The specified map contains the current state of the parse.
* The map is mutable and must be mutated to resolve the field and
* any related fields. This method will only be invoked during parsing
* if the map contains this field, and implementations should therefore
* assume this field is present.
* <p>
* Resolving a field will consist of looking at the value of this field,
* and potentially other fields, and either updating the map with a
* simpler value, such as a { @code ChronoField}, or returning a
* complete { @code ChronoLocalDate}. If a resolve is successful,
* the code must remove all the fields that were resolved from the map,
* including this field.
* <p>
* For example, the { @code IsoFields} class contains the quarter-of-year
* and day-of-quarter fields. The implementation of this method in that class
* resolves the two fields plus the { @link ChronoField#YEAR YEAR} into a
* complete { @code Date}. The resolve method will remove all three
* fields from the map before returning the { @code Date}.
* <p>
* If resolution should be possible, but the data is invalid, the resolver
* style should be used to determine an appropriate level of leniency, which
* may require throwing a { @code DateTimeException} or { @code ArithmeticException}.
* If no resolution is possible, the resolve method must return null.
* <p>
* When resolving time fields, the map will be altered and null returned.
* When resolving date fields, the date is normally returned from the method,
* with the map altered to remove the resolved fields. However, it would also
* be acceptable for the date fields to be resolved into other { @code ChronoField}
* instances that can produce a date, such as { @code EPOCH_DAY}.
* <p>
* Not all { @code TemporalAccessor} implementations are accepted as return values.
* Implementations must accept { @code ChronoLocalDate}, { @code ChronoLocalDateTime},
* { @code ChronoZonedDateTime} and { @code Time}.
* <p>
* The zone is not normally required for resolution, but is provided for completeness.
* <p>
* The default implementation must return null.
*
* @param fieldValues the map of fields to values, which can be updated, not null
* @param chronology the effective chronology, not null
* @param zone the effective zone, not null
* @param resolverStyle the requested type of resolve, not null
* @return the resolved temporal object; null if resolving only
* changed the map, or no resolve occurred
* @throws ArithmeticException if numeric overflow occurs
* @throws DateTimeException if resolving results in an error. This must not be thrown
* by querying a field on the temporal without first checking if it is supported
*/
def resolve(fieldValues: Nothing, chronology: Chronology, zone: Nothing, resolverStyle: Nothing): TemporalAccessor = {
null
}
/**
* Gets a descriptive name for the field.
* <p>
* The should be of the format 'BaseOfRange', such as 'MonthOfYear',
* unless the field has a range of {@code FOREVER}, when only
* the base unit is mentioned, such as 'Year' or 'Era'.
*
* @return the name of the field, not null
*/
override def toString: String
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2007-2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Common implementations of {@code TemporalQuery}.
* <p>
* This class provides common implementations of {@link TemporalQuery}.
* These are defined here as they must be constants, and the definition
* of lambdas does not guarantee that. By assigning them once here,
* they become 'normal' Java constants.
*
* @since 1.8
*/
object TemporalQueries {
/**
* A strict query for the {@code ZoneId}.
*/
private[temporal] final val ZONE_ID: TemporalQuery[Nothing] = (temporal) -> {
temporal.query(ZONE_ID);
}
/**
* A query for the {@code Chronology}.
*/
private[temporal] final val CHRONO: TemporalQuery[Chronology] = (temporal) -> {
temporal.query(CHRONO);
}
/**
* A query for the smallest supported unit.
*/
private[temporal] final val PRECISION: TemporalQuery[TemporalUnit] = (temporal) -> {
temporal.query(PRECISION);
}
/**
* A lenient query for the {@code ZoneId}, falling back to the {@code ZoneOffset}.
*/
private[temporal] final val ZONE: TemporalQuery[Nothing] = (temporal) -> {
ZoneId zone = temporal.query(ZONE_ID);
(zone != null ? zone: temporal.query(OFFSET));
}
/**
* A query for {@code ZoneOffset} returning null if not found.
*/
private[temporal] final val OFFSET: TemporalQuery[ZoneOffset] = (temporal) -> {
if (temporal.isSupported(OFFSET_SECONDS)) {
ZoneOffset.ofTotalSeconds(temporal.get(OFFSET_SECONDS));
}
null;
}
/**
* A query for {@code Date} returning null if not found.
*/
private[temporal] final val LOCAL_DATE: TemporalQuery[Nothing] = (temporal) -> {
if (temporal.isSupported(EPOCH_DAY)) {
LocalDate.ofEpochDay(temporal.getLong(EPOCH_DAY));
}
null;
}
/**
* A query for {@code Time} returning null if not found.
*/
private[temporal] final val LOCAL_TIME: TemporalQuery[Nothing] = (temporal) -> {
if (temporal.isSupported(NANO_OF_DAY)) {
LocalTime.ofNanoOfDay(temporal.getLong(NANO_OF_DAY));
}
null;
}
}
final class TemporalQueries {
/**
* Private constructor since this is a utility class.
*/
private def {
}
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Strategy for querying a temporal object.
* <p>
* Queries are a key tool for extracting information from temporal objects.
* They exist to externalize the process of querying, permitting different
* approaches, as per the strategy design pattern.
* Examples might be a query that checks if the date is the day before February 29th
* in a leap year, or calculates the number of days to your next birthday.
* <p>
* The {@link TemporalField} interface provides another mechanism for querying
* temporal objects. That interface is limited to returning a {@code long}.
* By contrast, queries can return any type.
* <p>
* There are two equivalent ways of using a {@code TemporalQuery}.
* The first is to invoke the method on this interface directly.
* The second is to use {@link TemporalAccessor#query(TemporalQuery)}:
* {{{
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisQuery.queryFrom(temporal);
* temporal = temporal.query(thisQuery);
* }}}
* It is recommended to use the second approach, {@code query(TemporalQuery)},
* as it is a lot clearer to read in code.
* <p>
* The most common implementations are method references, such as
* {@code Date::from} and {@code ZoneId::from}.
* Additional common implementations are provided on this interface as static methods.
*
* @implSpec
* This interface places no restrictions on the mutability of implementations,
* however immutability is strongly recommended.
*
* @since 1.8
*/
object TemporalQuery {
/**
* A strict query for the {@code ZoneId}.
* <p>
* This queries a {@code TemporalAccessor} for the zone.
* The zone is only returned if the date-time conceptually contains a {@code ZoneId}.
* It will not be returned if the date-time only conceptually has an {@code ZoneOffset}.
* Thus a {@link java.time.ZonedDateTime} will return the result of {@code getZone()},
* but an {@link java.time.OffsetDateTime} will return null.
* <p>
* In most cases, applications should use {@link #zone()} as this query is too strict.
* <p>
* The result from JDK classes implementing {@code TemporalAccessor} is as follows:<br>
* {@code Date} returns null<br>
* {@code Time} returns null<br>
* {@code DateTime} returns null<br>
* {@code ZonedDateTime} returns the associated zone<br>
* {@code OffsetTime} returns null<br>
* {@code OffsetDateTime} returns null<br>
* {@code ChronoLocalDate} returns null<br>
* {@code ChronoLocalDateTime} returns null<br>
* {@code ChronoZonedDateTime} returns the associated zone<br>
* {@code Era} returns null<br>
* {@code DayOfWeek} returns null<br>
* {@code Month} returns null<br>
* {@code Year} returns null<br>
* {@code YearMonth} returns null<br>
* {@code MonthDay} returns null<br>
* {@code ZoneOffset} returns null<br>
* {@code Instant} returns null<br>
*
* @return a query that can obtain the zone ID of a temporal, not null
*/
def zoneId: TemporalQuery[Nothing] = {
TemporalQueries.ZONE_ID
}
/**
* A query for the {@code Chronology}.
* <p>
* This queries a {@code TemporalAccessor} for the chronology.
* If the target {@code TemporalAccessor} represents a date, or part of a date,
* then it should return the chronology that the date is expressed in.
* As a result of this definition, objects only representing time, such as
* {@code Time}, will return null.
* <p>
* The result from JDK classes implementing {@code TemporalAccessor} is as follows:<br>
* {@code Date} returns {@code IsoChronology.INSTANCE}<br>
* {@code Time} returns null (does not represent a date)<br>
* {@code DateTime} returns {@code IsoChronology.INSTANCE}<br>
* {@code ZonedDateTime} returns {@code IsoChronology.INSTANCE}<br>
* {@code OffsetTime} returns null (does not represent a date)<br>
* {@code OffsetDateTime} returns {@code IsoChronology.INSTANCE}<br>
* {@code ChronoLocalDate} returns the associated chronology<br>
* {@code ChronoLocalDateTime} returns the associated chronology<br>
* {@code ChronoZonedDateTime} returns the associated chronology<br>
* {@code Era} returns the associated chronology<br>
* {@code DayOfWeek} returns null (shared across chronologies)<br>
* {@code Month} returns {@code IsoChronology.INSTANCE}<br>
* {@code Year} returns {@code IsoChronology.INSTANCE}<br>
* {@code YearMonth} returns {@code IsoChronology.INSTANCE}<br>
* {@code MonthDay} returns null {@code IsoChronology.INSTANCE}<br>
* {@code ZoneOffset} returns null (does not represent a date)<br>
* {@code Instant} returns null (does not represent a date)<br>
* <p>
* The method {@link java.time.chrono.Chronology#from(TemporalAccessor)} can be used as a
* {@code TemporalQuery} via a method reference, {@code Chronology::from}.
* That method is equivalent to this query, except that it throws an
* exception if a chronology cannot be obtained.
*
* @return a query that can obtain the chronology of a temporal, not null
*/
def chronology: TemporalQuery[Chronology] = {
TemporalQueries.CHRONO
}
/**
* A query for the smallest supported unit.
* <p>
* This queries a {@code TemporalAccessor} for the time precision.
* If the target {@code TemporalAccessor} represents a consistent or complete date-time,
* date or time then this must return the smallest precision actually supported.
* Note that fields such as {@code NANO_OF_DAY} and {@code NANO_OF_SECOND}
* are defined to always return ignoring the precision, thus this is the only
* way to find the actual smallest supported unit.
* For example, were {@code GregorianCalendar} to implement {@code TemporalAccessor}
* it would return a precision of {@code MILLIS}.
* <p>
* The result from JDK classes implementing {@code TemporalAccessor} is as follows:<br>
* {@code Date} returns {@code DAYS}<br>
* {@code Time} returns {@code NANOS}<br>
* {@code DateTime} returns {@code NANOS}<br>
* {@code ZonedDateTime} returns {@code NANOS}<br>
* {@code OffsetTime} returns {@code NANOS}<br>
* {@code OffsetDateTime} returns {@code NANOS}<br>
* {@code ChronoLocalDate} returns {@code DAYS}<br>
* {@code ChronoLocalDateTime} returns {@code NANOS}<br>
* {@code ChronoZonedDateTime} returns {@code NANOS}<br>
* {@code Era} returns {@code ERAS}<br>
* {@code DayOfWeek} returns {@code DAYS}<br>
* {@code Month} returns {@code MONTHS}<br>
* {@code Year} returns {@code YEARS}<br>
* {@code YearMonth} returns {@code MONTHS}<br>
* {@code MonthDay} returns null (does not represent a complete date or time)<br>
* {@code ZoneOffset} returns null (does not represent a date or time)<br>
* {@code Instant} returns {@code NANOS}<br>
*
* @return a query that can obtain the precision of a temporal, not null
*/
def precision: TemporalQuery[TemporalUnit] = {
TemporalQueries.PRECISION
}
/**
* A lenient query for the {@code ZoneId}, falling back to the {@code ZoneOffset}.
* <p>
* This queries a {@code TemporalAccessor} for the zone.
* It first tries to obtain the zone, using {@link #zoneId()}.
* If that is not found it tries to obtain the {@link #offset()}.
* Thus a {@link java.time.ZonedDateTime} will return the result of {@code getZone()},
* while an {@link java.time.OffsetDateTime} will return the result of {@code getOffset()}.
* <p>
* In most cases, applications should use this query rather than {@code #zoneId()}.
* <p>
* The method {@link ZoneId#from(TemporalAccessor)} can be used as a
* {@code TemporalQuery} via a method reference, {@code ZoneId::from}.
* That method is equivalent to this query, except that it throws an
* exception if a zone cannot be obtained.
*
* @return a query that can obtain the zone ID or offset of a temporal, not null
*/
def zone: TemporalQuery[Nothing] = {
TemporalQueries.ZONE
}
/**
* A query for {@code ZoneOffset} returning null if not found.
* <p>
* This returns a {@code TemporalQuery} that can be used to query a temporal
* object for the offset. The query will return null if the temporal
* object cannot supply an offset.
* <p>
* The query implementation examines the {@link ChronoField#OFFSET_SECONDS OFFSET_SECONDS}
* field and uses it to create a {@code ZoneOffset}.
* <p>
* The method {@link java.time.ZoneOffset#from(TemporalAccessor)} can be used as a
* {@code TemporalQuery} via a method reference, {@code ZoneOffset::from}.
* This query and {@code ZoneOffset::from} will return the same result if the
* temporal object contains an offset. If the temporal object does not contain
* an offset, then the method reference will throw an exception, whereas this
* query will return null.
*
* @return a query that can obtain the offset of a temporal, not null
*/
def offset: TemporalQuery[ZoneOffset] = {
TemporalQueries.OFFSET
}
/**
* A query for {@code Date} returning null if not found.
* <p>
* This returns a {@code TemporalQuery} that can be used to query a temporal
* object for the local date. The query will return null if the temporal
* object cannot supply a local date.
* <p>
* The query implementation examines the {@link ChronoField#EPOCH_DAY EPOCH_DAY}
* field and uses it to create a {@code Date}.
* <p>
* The method {@link ZoneOffset#from(TemporalAccessor)} can be used as a
* {@code TemporalQuery} via a method reference, {@code Date::from}.
* This query and {@code Date::from} will return the same result if the
* temporal object contains a date. If the temporal object does not contain
* a date, then the method reference will throw an exception, whereas this
* query will return null.
*
* @return a query that can obtain the date of a temporal, not null
*/
def localDate: TemporalQuery[Nothing] = {
TemporalQueries.LOCAL_DATE
}
/**
* A query for {@code Time} returning null if not found.
* <p>
* This returns a {@code TemporalQuery} that can be used to query a temporal
* object for the local time. The query will return null if the temporal
* object cannot supply a local time.
* <p>
* The query implementation examines the {@link ChronoField#NANO_OF_DAY NANO_OF_DAY}
* field and uses it to create a {@code Time}.
* <p>
* The method {@link ZoneOffset#from(TemporalAccessor)} can be used as a
* {@code TemporalQuery} via a method reference, {@code Time::from}.
* This query and {@code Time::from} will return the same result if the
* temporal object contains a time. If the temporal object does not contain
* a time, then the method reference will throw an exception, whereas this
* query will return null.
*
* @return a query that can obtain the time of a temporal, not null
*/
def localTime: TemporalQuery[Nothing] = {
TemporalQueries.LOCAL_TIME
}
}
trait TemporalQuery {
/**
* Queries the specified temporal object.
* <p>
* This queries the specified temporal object to return an object using the logic
* encapsulated in the implementing class.
* Examples might be a query that checks if the date is the day before February 29th
* in a leap year, or calculates the number of days to your next birthday.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link TemporalAccessor#query(TemporalQuery)}:
* {{{
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisQuery.queryFrom(temporal);
* temporal = temporal.query(thisQuery);
* }}}
* It is recommended to use the second approach, {@code query(TemporalQuery)},
* as it is a lot clearer to read in code.
*
* @implSpec
* The implementation must take the input object and query it.
* The implementation defines the logic of the query and is responsible for
* documenting that logic.
* It may use any method on { @code TemporalAccessor} to determine the result.
* The input object must not be altered.
* <p>
* The input temporal object may be in a calendar system other than ISO.
* Implementations may choose to document compatibility with other calendar systems,
* or reject non-ISO temporal objects by { @link TemporalQuery#chronology() querying the chronology}.
* <p>
* This method may be called from multiple threads in parallel.
* It must be thread-safe when invoked.
*
* @param temporal the temporal object to query, not null
* @return the queried value, may return null to indicate not found
* @throws DateTimeException if unable to query
* @throws ArithmeticException if numeric overflow occurs
*/
def queryFrom[R](temporal: TemporalAccessor): R
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* A unit of date-time, such as Days or Hours.
* <p>
* Measurement of time is built on units, such as years, months, days, hours, minutes and seconds.
* Implementations of this interface represent those units.
* <p>
* An instance of this interface represents the unit itself, rather than an amount of the unit.
* See {@link Period} for a class that represents an amount in terms of the common units.
* <p>
* The most commonly used units are defined in {@link ChronoUnit}.
* Further units are supplied in {@link IsoFields}.
* Units can also be written by application code by implementing this interface.
* <p>
* The unit works using double dispatch. Client code calls methods on a date-time like
* {@code DateTime} which check if the unit is a {@code ChronoUnit}.
* If it is, then the date-time must handle it.
* Otherwise, the method call is re-dispatched to the matching method in this interface.
*
* @implSpec
* This interface must be implemented with care to ensure other classes operate correctly.
* All implementations that can be instantiated must be final, immutable and thread-safe.
* It is recommended to use an enum where possible.
*
* @since 1.8
*/
trait TemporalUnit {
/**
* Gets the duration of this unit, which may be an estimate.
* <p>
* All units return a duration measured in standard nanoseconds from this method.
* The duration will be positive and non-zero.
* For example, an hour has a duration of {@code 60 * 60 * 1,000,000,000ns}.
* <p>
* Some units may return an accurate duration while others return an estimate.
* For example, days have an estimated duration due to the possibility of
* daylight saving time changes.
* To determine if the duration is an estimate, use {@link #isDurationEstimated()}.
*
* @return the duration of this unit, which may be an estimate, not null
*/
def getDuration: Duration
/**
* Checks if the duration of the unit is an estimate.
* <p>
* All units have a duration, however the duration is not always accurate.
* For example, days have an estimated duration due to the possibility of
* daylight saving time changes.
* This method returns true if the duration is an estimate and false if it is
* accurate. Note that accurate/estimated ignores leap seconds.
*
* @return true if the duration is estimated, false if accurate
*/
def isDurationEstimated: Boolean
/**
* Checks if this unit represents a component of a date.
* <p>
* A date is time-based if it can be used to imply meaning from a date.
* It must have a {@linkplain #getDuration() duration} that is an integral
* multiple of the length of a standard day.
* Note that it is valid for both {@code isDateBased()} and {@code isTimeBased()}
* to return false, such as when representing a unit like 36 hours.
*
* @return true if this unit is a component of a date
*/
def isDateBased: Boolean
/**
* Checks if this unit represents a component of a time.
* <p>
* A unit is time-based if it can be used to imply meaning from a time.
* It must have a {@linkplain #getDuration() duration} that divides into
* the length of a standard day without remainder.
* Note that it is valid for both {@code isDateBased()} and {@code isTimeBased()}
* to return false, such as when representing a unit like 36 hours.
*
* @return true if this unit is a component of a time
*/
def isTimeBased: Boolean
/**
* Checks if this unit is supported by the specified temporal object.
* <p>
* This checks that the implementing date-time can add/subtract this unit.
* This can be used to avoid throwing an exception.
* <p>
* This default implementation derives the value using
* {@link Temporal#plus(long, TemporalUnit)}.
*
* @param temporal the temporal object to check, not null
* @return true if the unit is supported
*/
def isSupportedBy(temporal: Temporal): Boolean = {
if (temporal.isInstanceOf[Nothing]) {
isTimeBased
}
if (temporal.isInstanceOf[ChronoLocalDate]) {
isDateBased
}
if (temporal.isInstanceOf[Nothing] || temporal.isInstanceOf[Nothing]) {
true
}
try {
temporal.plus(1, this)
true
}
catch {
case ex: UnsupportedTemporalTypeException => {
false
}
case ex: RuntimeException => {
try {
temporal.plus(-1, this)
true
}
catch {
case ex2: RuntimeException => {
false
}
}
}
}
}
/**
* Returns a copy of the specified temporal object with the specified period added.
* <p>
* The period added is a multiple of this unit. For example, this method
* could be used to add "3 days" to a date by calling this method on the
* instance representing "days", passing the date and the period "3".
* The period to be added may be negative, which is equivalent to subtraction.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link Temporal#plus(long, TemporalUnit)}:
* {{{
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisUnit.addTo(temporal);
* temporal = temporal.plus(thisUnit);
* }}}
* It is recommended to use the second approach, {@code plus(TemporalUnit)},
* as it is a lot clearer to read in code.
* <p>
* Implementations should perform any queries or calculations using the units
* available in {@link ChronoUnit} or the fields available in {@link ChronoField}.
* If the unit is not supported an {@code UnsupportedTemporalTypeException} must be thrown.
* <p>
* Implementations must not alter the specified temporal object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
*
* @param <R> the type of the Temporal object
* @param temporal the temporal object to adjust, not null
* @param amount the amount of this unit to add, positive or negative
* @return the adjusted temporal object, not null
* @throws DateTimeException if the period cannot be added
* @throws UnsupportedTemporalTypeException if the unit is not supported by the temporal
*/
def addTo(temporal: R, amount: Long): R
/**
* Calculates the amount of time between two temporal objects.
* <p>
* This calculates the amount in terms of this unit. The start and end
* points are supplied as temporal objects and must be of compatible types.
* The implementation will convert the second type to be an instance of the
* first type before the calculating the amount.
* The result will be negative if the end is before the start.
* For example, the amount in hours between two temporal objects can be
* calculated using {@code HOURS.between(startTime, endTime)}.
* <p>
* The calculation returns a whole number, representing the number of
* complete units between the two temporals.
* For example, the amount in hours between the times 11:30 and 13:29
* will only be one hour as it is one minute short of two hours.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link Temporal#until(Temporal, TemporalUnit)}:
* {{{
* // these two lines are equivalent
* between = thisUnit.between(start, end);
* between = start.until(end, thisUnit);
* }}}
* The choice should be made based on which makes the code more readable.
* <p>
* For example, this method allows the number of days between two dates to
* be calculated:
* {{{
* long daysBetween = DAYS.between(start, end);
* // or alternatively
* long daysBetween = start.until(end, DAYS);
* }}}
* <p>
* Implementations should perform any queries or calculations using the units
* available in {@link ChronoUnit} or the fields available in {@link ChronoField}.
* If the unit is not supported an {@code UnsupportedTemporalTypeException} must be thrown.
* Implementations must not alter the specified temporal objects.
*
* @implSpec
* Implementations must begin by checking to if the two temporals have the
* same type using { @code getClass()}. If they do not, then the result must be
* obtained by calling { @code temporal1Inclusive.until(temporal2Exclusive, this)}.
*
* @param temporal1Inclusive the base temporal object, not null
* @param temporal2Exclusive the other temporal object, not null
* @return the amount of time between temporal1Inclusive and temporal2Exclusive
* in terms of this unit; positive if temporal2Exclusive is later than
* temporal1Inclusive, negative if earlier
* @throws DateTimeException if the amount cannot be calculated, or the end
* temporal cannot be converted to the same type as the start temporal
* @throws UnsupportedTemporalTypeException if the unit is not supported by the temporal
* @throws ArithmeticException if numeric overflow occurs
*/
def between(temporal1Inclusive: Temporal, temporal2Exclusive: Temporal): Long
/**
* Gets a descriptive name for the unit.
* <p>
* This should be in the plural and upper-first camel case, such as 'Days' or 'Minutes'.
*
* @return the name of this unit, not null
*/
override def toString: String
}
/*
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2013, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* UnsupportedTemporalTypeException indicates that a ChronoField or ChronoUnit is
* not supported for a Temporal class.
*
* @implSpec
* This class is intended for use in a single thread.
*
* @since 1.8
*/
object UnsupportedTemporalTypeException {
}
class UnsupportedTemporalTypeException extends DateTimeException {
/**
* Constructs a new UnsupportedTemporalTypeException with the specified message.
*
* @param message the message to use for this exception, may be null
*/
def this(message: String) {
`super`(message)
}
/**
* Constructs a new UnsupportedTemporalTypeException with the specified message and cause.
*
* @param message the message to use for this exception, may be null
* @param cause the cause of the exception, may be null
*/
def this(message: String, cause: Throwable) {
`super`(message, cause)
}
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2011-2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* The range of valid values for a date-time field.
* <p>
* All {@link TemporalField} instances have a valid range of values.
* For example, the ISO day-of-month runs from 1 to somewhere between 28 and 31.
* This class captures that valid range.
* <p>
* It is important to be aware of the limitations of this class.
* Only the minimum and maximum values are provided.
* It is possible for there to be invalid values within the outer range.
* For example, a weird field may have valid values of 1, 2, 4, 6, 7, thus
* have a range of '1 - 7', despite that fact that values 3 and 5 are invalid.
* <p>
* Instances of this class are not tied to a specific field.
*
* @implSpec
* This class is immutable and thread-safe.
*
* @since 1.8
*/
object ValueRange {
/**
* Obtains a fixed value range.
* <p>
* This factory obtains a range where the minimum and maximum values are fixed.
* For example, the ISO month-of-year always runs from 1 to 12.
*
* @param min the minimum value
* @param max the maximum value
* @return the ValueRange for min, max, not null
* @throws IllegalArgumentException if the minimum is greater than the maximum
*/
def of(min: Long, max: Long): ValueRange = {
if (min > max) {
throw new IllegalArgumentException("Minimum value must be less than maximum value")
}
new ValueRange(min, min, max, max)
}
/**
* Obtains a variable value range.
* <p>
* This factory obtains a range where the minimum value is fixed and the maximum value may vary.
* For example, the ISO day-of-month always starts at 1, but ends between 28 and 31.
*
* @param min the minimum value
* @param maxSmallest the smallest maximum value
* @param maxLargest the largest maximum value
* @return the ValueRange for min, smallest max, largest max, not null
* @throws IllegalArgumentException if
* the minimum is greater than the smallest maximum,
* or the smallest maximum is greater than the largest maximum
*/
def of(min: Long, maxSmallest: Long, maxLargest: Long): ValueRange = {
of(min, min, maxSmallest, maxLargest)
}
/**
* Obtains a fully variable value range.
* <p>
* This factory obtains a range where both the minimum and maximum value may vary.
*
* @param minSmallest the smallest minimum value
* @param minLargest the largest minimum value
* @param maxSmallest the smallest maximum value
* @param maxLargest the largest maximum value
* @return the ValueRange for smallest min, largest min, smallest max, largest max, not null
* @throws IllegalArgumentException if
* the smallest minimum is greater than the smallest maximum,
* or the smallest maximum is greater than the largest maximum
* or the largest minimum is greater than the largest maximum
*/
def of(minSmallest: Long, minLargest: Long, maxSmallest: Long, maxLargest: Long): ValueRange = {
if (minSmallest > minLargest) {
throw new IllegalArgumentException("Smallest minimum value must be less than largest minimum value")
}
if (maxSmallest > maxLargest) {
throw new IllegalArgumentException("Smallest maximum value must be less than largest maximum value")
}
if (minLargest > maxLargest) {
throw new IllegalArgumentException("Minimum value must be less than maximum value")
}
new ValueRange(minSmallest, minLargest, maxSmallest, maxLargest)
}
}
final class ValueRange extends Serializable {
/**
* Restrictive constructor.
*
* @param minSmallest the smallest minimum value
* @param minLargest the largest minimum value
* @param maxSmallest the smallest minimum value
* @param maxLargest the largest minimum value
*/
private def this(minSmallest: Long, minLargest: Long, maxSmallest: Long, maxLargest: Long) {
this.minSmallest = minSmallest
this.minLargest = minLargest
this.maxSmallest = maxSmallest
this.maxLargest = maxLargest
}
/**
* Is the value range fixed and fully known.
* <p>
* For example, the ISO day-of-month runs from 1 to between 28 and 31.
* Since there is uncertainty about the maximum value, the range is not fixed.
* However, for the month of January, the range is always 1 to 31, thus it is fixed.
*
* @return true if the set of values is fixed
*/
def isFixed: Boolean = {
minSmallest == minLargest && maxSmallest == maxLargest
}
/**
* Gets the minimum value that the field can take.
* <p>
* For example, the ISO day-of-month always starts at 1.
* The minimum is therefore 1.
*
* @return the minimum value for this field
*/
def getMinimum: Long = {
minSmallest
}
/**
* Gets the largest possible minimum value that the field can take.
* <p>
* For example, the ISO day-of-month always starts at 1.
* The largest minimum is therefore 1.
*
* @return the largest possible minimum value for this field
*/
def getLargestMinimum: Long = {
minLargest
}
/**
* Gets the smallest possible maximum value that the field can take.
* <p>
* For example, the ISO day-of-month runs to between 28 and 31 days.
* The smallest maximum is therefore 28.
*
* @return the smallest possible maximum value for this field
*/
def getSmallestMaximum: Long = {
maxSmallest
}
/**
* Gets the maximum value that the field can take.
* <p>
* For example, the ISO day-of-month runs to between 28 and 31 days.
* The maximum is therefore 31.
*
* @return the maximum value for this field
*/
def getMaximum: Long = {
maxLargest
}
/**
* Checks if all values in the range fit in an {@code int}.
* <p>
* This checks that all valid values are within the bounds of an {@code int}.
* <p>
* For example, the ISO month-of-year has values from 1 to 12, which fits in an {@code int}.
* By comparison, ISO nano-of-day runs from 1 to 86,400,000,000,000 which does not fit in an {@code int}.
* <p>
* This implementation uses {@link #getMinimum()} and {@link #getMaximum()}.
*
* @return true if a valid value always fits in an { @code int}
*/
def isIntValue: Boolean = {
getMinimum >= Integer.MIN_VALUE && getMaximum <= Integer.MAX_VALUE
}
/**
* Checks if the value is within the valid range.
* <p>
* This checks that the value is within the stored range of values.
*
* @param value the value to check
* @return true if the value is valid
*/
def isValidValue(value: Long): Boolean = {
(value >= getMinimum && value <= getMaximum)
}
/**
* Checks if the value is within the valid range and that all values
* in the range fit in an {@code int}.
* <p>
* This method combines {@link #isIntValue()} and {@link #isValidValue(long)}.
*
* @param value the value to check
* @return true if the value is valid and fits in an { @code int}
*/
def isValidIntValue(value: Long): Boolean = {
isIntValue && isValidValue(value)
}
/**
* Checks that the specified value is valid.
* <p>
* This validates that the value is within the valid range of values.
* The field is only used to improve the error message.
*
* @param value the value to check
* @param field the field being checked, may be null
* @return the value that was passed in
* @see #isValidValue(long)
*/
def checkValidValue(value: Long, field: TemporalField): Long = {
if (isValidValue(value) == false) {
throw new Nothing(genInvalidFieldMessage(field, value))
}
value
}
/**
* Checks that the specified value is valid and fits in an {@code int}.
* <p>
* This validates that the value is within the valid range of values and that
* all valid values are within the bounds of an {@code int}.
* The field is only used to improve the error message.
*
* @param value the value to check
* @param field the field being checked, may be null
* @return the value that was passed in
* @see #isValidIntValue(long)
*/
def checkValidIntValue(value: Long, field: TemporalField): Int = {
if (isValidIntValue(value) == false) {
throw new Nothing(genInvalidFieldMessage(field, value))
}
value.asInstanceOf[Int]
}
private def genInvalidFieldMessage(field: TemporalField, value: Long): String = {
if (field != null) {
"Invalid value for " + field + " (valid values " + this + "): " + value
}
else {
"Invalid value (valid values " + this + "): " + value
}
}
/**
* Checks if this range is equal to another range.
* <p>
* The comparison is based on the four values, minimum, largest minimum,
* smallest maximum and maximum.
* Only objects of type {@code ValueRange} are compared, other types return false.
*
* @param obj the object to check, null returns false
* @return true if this is equal to the other range
*/
override def equals(obj: AnyRef): Boolean = {
if (obj eq this) {
true
}
if (obj.isInstanceOf[ValueRange]) {
val other: ValueRange = obj.asInstanceOf[ValueRange]
minSmallest == other.minSmallest && minLargest == other.minLargest && maxSmallest == other.maxSmallest && maxLargest == other.maxLargest
}
false
}
/**
* A hash code for this range.
*
* @return a suitable hash code
*/
override def hashCode: Int = {
val hash: Long = minSmallest + minLargest << 16 + minLargest >> 48 + maxSmallest << 32 + maxSmallest >> 32 + maxLargest << 48 + maxLargest >> 16
(hash ^ (hash >>> 32)).asInstanceOf[Int]
}
/**
* Outputs this range as a {@code String}.
* <p>
* The format will be '{min}/{largestMin} - {smallestMax}/{max}',
* where the largestMin or smallestMax sections may be omitted, together
* with associated slash, if they are the same as the min or max.
*
* @return a string representation of this range, not null
*/
override def toString: String = {
val buf: StringBuilder = new StringBuilder
buf.append(minSmallest)
if (minSmallest != minLargest) {
buf.append('/').append(minLargest)
}
buf.append(" - ").append(maxSmallest)
if (maxSmallest != maxLargest) {
buf.append('/').append(maxLargest)
}
buf.toString
}
/**
* The smallest minimum value.
*/
private final val minSmallest: Long = 0L
/**
* The largest minimum value.
*/
private final val minLargest: Long = 0L
/**
* The smallest maximum value.
*/
private final val maxSmallest: Long = 0L
/**
* The largest maximum value.
*/
private final val maxLargest: Long = 0L
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2011-2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Localized definitions of the day-of-week, week-of-month and week-of-year fields.
* <p>
* A standard week is seven days long, but cultures have different definitions for some
* other aspects of a week. This class represents the definition of the week, for the
* purpose of providing {@link TemporalField} instances.
* <p>
* WeekFields provides five fields,
* {@link #dayOfWeek()}, {@link #weekOfMonth()}, {@link #weekOfYear()},
* {@link #weekOfWeekBasedYear()}, and {@link #weekBasedYear()}
* that provide access to the values from any {@linkplain Temporal temporal object}.
* <p>
* The computations for day-of-week, week-of-month, and week-of-year are based
* on the {@linkplain ChronoField#YEAR proleptic-year},
* {@linkplain ChronoField#MONTH_OF_YEAR month-of-year},
* {@linkplain ChronoField#DAY_OF_MONTH day-of-month}, and
* {@linkplain ChronoField#DAY_OF_WEEK ISO day-of-week} which are based on the
* {@linkplain ChronoField#EPOCH_DAY epoch-day} and the chronology.
* The values may not be aligned with the {@linkplain ChronoField#YEAR_OF_ERA year-of-Era}
* depending on the Chronology.
* <p>A week is defined by:
* <ul>
* <li>The first day-of-week.
* For example, the ISO-8601 standard considers Monday to be the first day-of-week.
* <li>The minimal number of days in the first week.
* For example, the ISO-8601 standard counts the first week as needing at least 4 days.
* </ul><p>
* Together these two values allow a year or month to be divided into weeks.
* <p>
* <h3>Week of Month</h3>
* One field is used: week-of-month.
* The calculation ensures that weeks never overlap a month boundary.
* The month is divided into periods where each period starts on the defined first day-of-week.
* The earliest period is referred to as week 0 if it has less than the minimal number of days
* and week 1 if it has at least the minimal number of days.
* <p>
* <table cellpadding="0" cellspacing="3" border="0" style="text-align: left; width: 50%;">
* <caption>Examples of WeekFields</caption>
* <tr><th>Date</th><td>Day-of-week</td>
* <td>First day: Monday<br>Minimal days: 4</td><td>First day: Monday<br>Minimal days: 5</td></tr>
* <tr><th>2008-12-31</th><td>Wednesday</td>
* <td>Week 5 of December 2008</td><td>Week 5 of December 2008</td></tr>
* <tr><th>2009-01-01</th><td>Thursday</td>
* <td>Week 1 of January 2009</td><td>Week 0 of January 2009</td></tr>
* <tr><th>2009-01-04</th><td>Sunday</td>
* <td>Week 1 of January 2009</td><td>Week 0 of January 2009</td></tr>
* <tr><th>2009-01-05</th><td>Monday</td>
* <td>Week 2 of January 2009</td><td>Week 1 of January 2009</td></tr>
* </table>
*
* <h3>Week of Year</h3>
* One field is used: week-of-year.
* The calculation ensures that weeks never overlap a year boundary.
* The year is divided into periods where each period starts on the defined first day-of-week.
* The earliest period is referred to as week 0 if it has less than the minimal number of days
* and week 1 if it has at least the minimal number of days.
*
* <h3>Week Based Year</h3>
* Two fields are used for week-based-year, one for the
* {@link #weekOfWeekBasedYear() week-of-week-based-year} and one for
* {@link #weekBasedYear() week-based-year}. In a week-based-year, each week
* belongs to only a single year. Week 1 of a year is the first week that
* starts on the first day-of-week and has at least the minimum number of days.
* The first and last weeks of a year may contain days from the
* previous calendar year or next calendar year respectively.
*
* <table cellpadding="0" cellspacing="3" border="0" style="text-align: left; width: 50%;">
* <caption>Examples of WeekFields for week-based-year</caption>
* <tr><th>Date</th><td>Day-of-week</td>
* <td>First day: Monday<br>Minimal days: 4</td><td>First day: Monday<br>Minimal days: 5</td></tr>
* <tr><th>2008-12-31</th><td>Wednesday</td>
* <td>Week 1 of 2009</td><td>Week 53 of 2008</td></tr>
* <tr><th>2009-01-01</th><td>Thursday</td>
* <td>Week 1 of 2009</td><td>Week 53 of 2008</td></tr>
* <tr><th>2009-01-04</th><td>Sunday</td>
* <td>Week 1 of 2009</td><td>Week 53 of 2008</td></tr>
* <tr><th>2009-01-05</th><td>Monday</td>
* <td>Week 2 of 2009</td><td>Week 1 of 2009</td></tr>
* </table>
*
* @implSpec
* This class is immutable and thread-safe.
*
* @since 1.8
*/
object WeekFields {
/**
* Obtains an instance of {@code WeekFields} appropriate for a locale.
* <p>
* This will look up appropriate values from the provider of localization data.
*
* @param locale the locale to use, not null
* @return the week-definition, not null
*/
def of(locale: Locale): WeekFields = {
locale = new Locale(locale.getLanguage, locale.getCountry)
val calDow: Int = CalendarDataUtility.retrieveFirstDayOfWeek(locale)
val dow: DayOfWeek = DayOfWeek.SUNDAY.plus(calDow - 1)
val minDays: Int = CalendarDataUtility.retrieveMinimalDaysInFirstWeek(locale)
WeekFields.of(dow, minDays)
}
/**
* Obtains an instance of {@code WeekFields} from the first day-of-week and minimal days.
* <p>
* The first day-of-week defines the ISO {@code DayOfWeek} that is day 1 of the week.
* The minimal number of days in the first week defines how many days must be present
* in a month or year, starting from the first day-of-week, before the week is counted
* as the first week. A value of 1 will count the first day of the month or year as part
* of the first week, whereas a value of 7 will require the whole seven days to be in
* the new month or year.
* <p>
* WeekFields instances are singletons; for each unique combination
* of {@code firstDayOfWeek} and {@code minimalDaysInFirstWeek} the
* the same instance will be returned.
*
* @param firstDayOfWeek the first day of the week, not null
* @param minimalDaysInFirstWeek the minimal number of days in the first week, from 1 to 7
* @return the week-definition, not null
* @throws IllegalArgumentException if the minimal days value is less than one
* or greater than 7
*/
def of(firstDayOfWeek: DayOfWeek, minimalDaysInFirstWeek: Int): WeekFields = {
val key: String = firstDayOfWeek.toString + minimalDaysInFirstWeek
var rules: WeekFields = CACHE.get(key)
if (rules == null) {
rules = new WeekFields(firstDayOfWeek, minimalDaysInFirstWeek)
CACHE.putIfAbsent(key, rules)
rules = CACHE.get(key)
}
rules
}
/**
* The cache of rules by firstDayOfWeek plus minimalDays.
* Initialized first to be available for definition of ISO, etc.
*/
private final val CACHE: Nothing = new Nothing(4, 0.75f, 2)
/**
* The ISO-8601 definition, where a week starts on Monday and the first week
* has a minimum of 4 days.
* <p>
* The ISO-8601 standard defines a calendar system based on weeks.
* It uses the week-based-year and week-of-week-based-year concepts to split
* up the passage of days instead of the standard year/month/day.
* <p>
* Note that the first week may start in the previous calendar year.
* Note also that the first few days of a calendar year may be in the
* week-based-year corresponding to the previous calendar year.
*/
final val ISO: WeekFields = new WeekFields(DayOfWeek.MONDAY, 4)
/**
* The common definition of a week that starts on Sunday and the first week
* has a minimum of 1 day.
* <p>
* Defined as starting on Sunday and with a minimum of 1 day in the month.
* This week definition is in use in the US and other European countries.
*/
final val SUNDAY_START: WeekFields = WeekFields.of(DayOfWeek.SUNDAY, 1)
/**
* The unit that represents week-based-years for the purpose of addition and subtraction.
* <p>
* This allows a number of week-based-years to be added to, or subtracted from, a date.
* The unit is equal to either 52 or 53 weeks.
* The estimated duration of a week-based-year is the same as that of a standard ISO
* year at {@code 365.2425 Days}.
* <p>
* The rules for addition add the number of week-based-years to the existing value
* for the week-based-year field retaining the week-of-week-based-year
* and day-of-week, unless the week number it too large for the target year.
* In that case, the week is set to the last week of the year
* with the same day-of-week.
* <p>
* This unit is an immutable and thread-safe singleton.
*/
final val WEEK_BASED_YEARS: TemporalUnit = IsoFields.WEEK_BASED_YEARS
/**
* Field type that computes DayOfWeek, WeekOfMonth, and WeekOfYear
* based on a WeekFields.
* A separate Field instance is required for each different WeekFields;
* combination of start of week and minimum number of days.
* Constructors are provided to create fields for DayOfWeek, WeekOfMonth,
* and WeekOfYear.
*/
private[temporal] object ComputedDayOfField {
/**
* Returns a field to access the day of week,
* computed based on a WeekFields.
* <p>
* The WeekDefintion of the first day of the week is used with
* the ISO DAY_OF_WEEK field to compute week boundaries.
*/
private[temporal] def ofDayOfWeekField(weekDef: WeekFields): WeekFields.ComputedDayOfField = {
new WeekFields.ComputedDayOfField("DayOfWeek", weekDef, DAYS, WEEKS, DAY_OF_WEEK_RANGE)
}
/**
* Returns a field to access the week of month,
* computed based on a WeekFields.
* @see WeekFields#weekOfMonth()
*/
private[temporal] def ofWeekOfMonthField(weekDef: WeekFields): WeekFields.ComputedDayOfField = {
new WeekFields.ComputedDayOfField("WeekOfMonth", weekDef, WEEKS, MONTHS, WEEK_OF_MONTH_RANGE)
}
/**
* Returns a field to access the week of year,
* computed based on a WeekFields.
* @see WeekFields#weekOfYear()
*/
private[temporal] def ofWeekOfYearField(weekDef: WeekFields): WeekFields.ComputedDayOfField = {
new WeekFields.ComputedDayOfField("WeekOfYear", weekDef, WEEKS, YEARS, WEEK_OF_YEAR_RANGE)
}
/**
* Returns a field to access the week of week-based-year,
* computed based on a WeekFields.
* @see WeekFields#weekOfWeekBasedYear()
*/
private[temporal] def ofWeekOfWeekBasedYearField(weekDef: WeekFields): WeekFields.ComputedDayOfField = {
new WeekFields.ComputedDayOfField("WeekOfWeekBasedYear", weekDef, WEEKS, IsoFields.WEEK_BASED_YEARS, WEEK_OF_YEAR_RANGE)
}
/**
* Returns a field to access the week of week-based-year,
* computed based on a WeekFields.
* @see WeekFields#weekBasedYear()
*/
private[temporal] def ofWeekBasedYearField(weekDef: WeekFields): WeekFields.ComputedDayOfField = {
new WeekFields.ComputedDayOfField("WeekBasedYear", weekDef, IsoFields.WEEK_BASED_YEARS, FOREVER, ChronoField.YEAR.range)
}
private final val DAY_OF_WEEK_RANGE: ValueRange = ValueRange.of(1, 7)
private final val WEEK_OF_MONTH_RANGE: ValueRange = ValueRange.of(0, 1, 4, 6)
private final val WEEK_OF_YEAR_RANGE: ValueRange = ValueRange.of(0, 1, 52, 54)
}
private[temporal] class ComputedDayOfField extends TemporalField {
/**
* Return a new week-based-year date of the Chronology, year, week-of-year,
* and dow of week.
* @param chrono The chronology of the new date
* @param yowby the year of the week-based-year
* @param wowby the week of the week-based-year
* @param dow the day of the week
* @return a ChronoLocalDate for the requested year, week of year, and day of week
*/
private def ofWeekBasedYear(chrono: Chronology, yowby: Int, wowby: Int, dow: Int): ChronoLocalDate = {
val date: ChronoLocalDate = chrono.date(yowby, 1, 1)
val ldow: Int = localizedDayOfWeek(date)
val offset: Int = startOfWeekOffset(1, ldow)
val yearLen: Int = date.lengthOfYear
val newYearWeek: Int = computeWeek(offset, yearLen + weekDef.getMinimalDaysInFirstWeek)
wowby = Math.min(wowby, newYearWeek - 1)
val days: Int = -offset + (dow - 1) + (wowby - 1) * 7
date.plus(days, DAYS)
}
private def this(name: String, weekDef: WeekFields, baseUnit: TemporalUnit, rangeUnit: TemporalUnit, range: ValueRange) {
this.name = name
this.weekDef = weekDef
this.baseUnit = baseUnit
this.rangeUnit = rangeUnit
this.range = range
}
def getFrom(temporal: TemporalAccessor): Long = {
if (rangeUnit eq WEEKS) {
localizedDayOfWeek(temporal)
}
else if (rangeUnit eq MONTHS) {
localizedWeekOfMonth(temporal)
}
else if (rangeUnit eq YEARS) {
localizedWeekOfYear(temporal)
}
else if (rangeUnit eq WEEK_BASED_YEARS) {
localizedWeekOfWeekBasedYear(temporal)
}
else if (rangeUnit eq FOREVER) {
localizedWeekBasedYear(temporal)
}
else {
throw new IllegalStateException("unreachable, rangeUnit: " + rangeUnit + ", this: " + this)
}
}
private def localizedDayOfWeek(temporal: TemporalAccessor): Int = {
val sow: Int = weekDef.getFirstDayOfWeek.getValue
val isoDow: Int = temporal.get(DAY_OF_WEEK)
Math.floorMod(isoDow - sow, 7) + 1
}
private def localizedDayOfWeek(isoDow: Int): Int = {
val sow: Int = weekDef.getFirstDayOfWeek.getValue
Math.floorMod(isoDow - sow, 7) + 1
}
private def localizedWeekOfMonth(temporal: TemporalAccessor): Long = {
val dow: Int = localizedDayOfWeek(temporal)
val dom: Int = temporal.get(DAY_OF_MONTH)
val offset: Int = startOfWeekOffset(dom, dow)
computeWeek(offset, dom)
}
private def localizedWeekOfYear(temporal: TemporalAccessor): Long = {
val dow: Int = localizedDayOfWeek(temporal)
val doy: Int = temporal.get(DAY_OF_YEAR)
val offset: Int = startOfWeekOffset(doy, dow)
computeWeek(offset, doy)
}
/**
* Returns the year of week-based-year for the temporal.
* The year can be the previous year, the current year, or the next year.
* @param temporal a date of any chronology, not null
* @return the year of week-based-year for the date
*/
private def localizedWeekBasedYear(temporal: TemporalAccessor): Int = {
val dow: Int = localizedDayOfWeek(temporal)
val year: Int = temporal.get(YEAR)
val doy: Int = temporal.get(DAY_OF_YEAR)
val offset: Int = startOfWeekOffset(doy, dow)
val week: Int = computeWeek(offset, doy)
if (week == 0) {
year - 1
}
else {
val dayRange: ValueRange = temporal.range(DAY_OF_YEAR)
val yearLen: Int = dayRange.getMaximum.asInstanceOf[Int]
val newYearWeek: Int = computeWeek(offset, yearLen + weekDef.getMinimalDaysInFirstWeek)
if (week >= newYearWeek) {
year + 1
}
}
year
}
/**
* Returns the week of week-based-year for the temporal.
* The week can be part of the previous year, the current year,
* or the next year depending on the week start and minimum number
* of days.
* @param temporal a date of any chronology
* @return the week of the year
* @see #localizedWeekBasedYear(java.time.temporal.TemporalAccessor)
*/
private def localizedWeekOfWeekBasedYear(temporal: TemporalAccessor): Int = {
val dow: Int = localizedDayOfWeek(temporal)
val doy: Int = temporal.get(DAY_OF_YEAR)
val offset: Int = startOfWeekOffset(doy, dow)
var week: Int = computeWeek(offset, doy)
if (week == 0) {
var date: ChronoLocalDate = Chronology.from(temporal).date(temporal)
date = date.minus(doy, DAYS)
localizedWeekOfWeekBasedYear(date)
}
else if (week > 50) {
val dayRange: ValueRange = temporal.range(DAY_OF_YEAR)
val yearLen: Int = dayRange.getMaximum.asInstanceOf[Int]
val newYearWeek: Int = computeWeek(offset, yearLen + weekDef.getMinimalDaysInFirstWeek)
if (week >= newYearWeek) {
week = week - newYearWeek + 1
}
}
week
}
/**
* Returns an offset to align week start with a day of month or day of year.
*
* @param day the day; 1 through infinity
* @param dow the day of the week of that day; 1 through 7
* @return an offset in days to align a day with the start of the first 'full' week
*/
private def startOfWeekOffset(day: Int, dow: Int): Int = {
val weekStart: Int = Math.floorMod(day - dow, 7)
var offset: Int = -weekStart
if (weekStart + 1 > weekDef.getMinimalDaysInFirstWeek) {
offset = 7 - weekStart
}
offset
}
/**
* Returns the week number computed from the reference day and reference dayOfWeek.
*
* @param offset the offset to align a date with the start of week
* from { @link #startOfWeekOffset}.
* @param day the day for which to compute the week number
* @return the week number where zero is used for a partial week and 1 for the first full week
*/
private def computeWeek(offset: Int, day: Int): Int = {
((7 + offset + (day - 1)) / 7)
}
def adjustInto(temporal: R, newValue: Long): R = {
val newVal: Int = range.checkValidIntValue(newValue, this)
val currentVal: Int = temporal.get(this)
if (newVal == currentVal) {
temporal
}
if (rangeUnit eq FOREVER) {
val idow: Int = temporal.get(weekDef.dayOfWeek)
val wowby: Int = temporal.get(weekDef.weekOfWeekBasedYear)
ofWeekBasedYear(Chronology.from(temporal), newValue.asInstanceOf[Int], wowby, idow).asInstanceOf[R]
}
else {
temporal.plus(newVal - currentVal, baseUnit).asInstanceOf[R]
}
}
override def resolve(fieldValues: Nothing, chronology: Chronology, zone: Nothing, resolverStyle: Nothing): ChronoLocalDate = {
val value: Long = fieldValues.get(this)
val newValue: Int = Math.toIntExact(value)
if (rangeUnit eq WEEKS) {
val checkedValue: Int = range.checkValidIntValue(value, this)
val startDow: Int = weekDef.getFirstDayOfWeek.getValue
val isoDow: Long = Math.floorMod((startDow - 1) + (checkedValue - 1), 7) + 1
fieldValues.remove(this)
fieldValues.put(DAY_OF_WEEK, isoDow)
null
}
if (fieldValues.containsKey(DAY_OF_WEEK) eq false) {
null
}
val isoDow: Int = DAY_OF_WEEK.checkValidIntValue(fieldValues.get(DAY_OF_WEEK))
val dow: Int = localizedDayOfWeek(isoDow)
if (fieldValues.containsKey(YEAR)) {
val year: Int = YEAR.checkValidIntValue(fieldValues.get(YEAR))
if (rangeUnit eq MONTHS && fieldValues.containsKey(MONTH_OF_YEAR)) {
val month: Long = fieldValues.get(MONTH_OF_YEAR)
resolveWoM(fieldValues, chronology, year, month, newValue, dow, resolverStyle)
}
if (rangeUnit eq YEARS) {
resolveWoY(fieldValues, chronology, year, newValue, dow, resolverStyle)
}
}
else if ((rangeUnit eq WEEK_BASED_YEARS || rangeUnit eq FOREVER) && fieldValues.containsKey(weekDef.weekBasedYear) && fieldValues.containsKey(weekDef.weekOfWeekBasedYear)) {
resolveWBY(fieldValues, chronology, dow, resolverStyle)
}
null
}
private def resolveWoM(fieldValues: Nothing, chrono: Chronology, year: Int, month: Long, wom: Long, localDow: Int, resolverStyle: Nothing): ChronoLocalDate = {
var date: ChronoLocalDate = null
if (resolverStyle eq ResolverStyle.LENIENT) {
date = chrono.date(year, 1, 1).plus(Math.subtractExact(month, 1), MONTHS)
val weeks: Long = Math.subtractExact(wom, localizedWeekOfMonth(date))
val days: Int = localDow - localizedDayOfWeek(date)
date = date.plus(Math.addExact(Math.multiplyExact(weeks, 7), days), DAYS)
}
else {
val monthValid: Int = MONTH_OF_YEAR.checkValidIntValue(month)
date = chrono.date(year, monthValid, 1)
val womInt: Int = range.checkValidIntValue(wom, this)
val weeks: Int = (womInt - localizedWeekOfMonth(date)).asInstanceOf[Int]
val days: Int = localDow - localizedDayOfWeek(date)
date = date.plus(weeks * 7 + days, DAYS)
if (resolverStyle eq ResolverStyle.STRICT && date.getLong(MONTH_OF_YEAR) != month) {
throw new Nothing("Strict mode rejected resolved date as it is in a different month")
}
}
fieldValues.remove(this)
fieldValues.remove(YEAR)
fieldValues.remove(MONTH_OF_YEAR)
fieldValues.remove(DAY_OF_WEEK)
date
}
private def resolveWoY(fieldValues: Nothing, chrono: Chronology, year: Int, woy: Long, localDow: Int, resolverStyle: Nothing): ChronoLocalDate = {
var date: ChronoLocalDate = chrono.date(year, 1, 1)
if (resolverStyle eq ResolverStyle.LENIENT) {
val weeks: Long = Math.subtractExact(woy, localizedWeekOfYear(date))
val days: Int = localDow - localizedDayOfWeek(date)
date = date.plus(Math.addExact(Math.multiplyExact(weeks, 7), days), DAYS)
}
else {
val womInt: Int = range.checkValidIntValue(woy, this)
val weeks: Int = (womInt - localizedWeekOfYear(date)).asInstanceOf[Int]
val days: Int = localDow - localizedDayOfWeek(date)
date = date.plus(weeks * 7 + days, DAYS)
if (resolverStyle eq ResolverStyle.STRICT && date.getLong(YEAR) != year) {
throw new Nothing("Strict mode rejected resolved date as it is in a different year")
}
}
fieldValues.remove(this)
fieldValues.remove(YEAR)
fieldValues.remove(DAY_OF_WEEK)
date
}
private def resolveWBY(fieldValues: Nothing, chrono: Chronology, localDow: Int, resolverStyle: Nothing): ChronoLocalDate = {
val yowby: Int = weekDef.weekBasedYear.range.checkValidIntValue(fieldValues.get(weekDef.weekBasedYear), weekDef.weekBasedYear)
var date: ChronoLocalDate = null
if (resolverStyle eq ResolverStyle.LENIENT) {
date = ofWeekBasedYear(chrono, yowby, 1, localDow)
val wowby: Long = fieldValues.get(weekDef.weekOfWeekBasedYear)
val weeks: Long = Math.subtractExact(wowby, 1)
date = date.plus(weeks, WEEKS)
}
else {
val wowby: Int = weekDef.weekOfWeekBasedYear.range.checkValidIntValue(fieldValues.get(weekDef.weekOfWeekBasedYear), weekDef.weekOfWeekBasedYear)
date = ofWeekBasedYear(chrono, yowby, wowby, localDow)
if (resolverStyle eq ResolverStyle.STRICT && localizedWeekBasedYear(date) != yowby) {
throw new Nothing("Strict mode rejected resolved date as it is in a different week-based-year")
}
}
fieldValues.remove(this)
fieldValues.remove(weekDef.weekBasedYear)
fieldValues.remove(weekDef.weekOfWeekBasedYear)
fieldValues.remove(DAY_OF_WEEK)
date
}
override def getDisplayName(locale: Locale): String = {
if (rangeUnit eq YEARS) {
val lr: LocaleResources = LocaleProviderAdapter.getResourceBundleBased.getLocaleResources(locale)
val rb: ResourceBundle = lr.getJavaTimeFormatData
if (rb.containsKey("field.week")) rb.getString("field.week") else name
}
name
}
def getBaseUnit: TemporalUnit = {
baseUnit
}
def getRangeUnit: TemporalUnit = {
rangeUnit
}
def isDateBased: Boolean = {
true
}
def isTimeBased: Boolean = {
false
}
def range: ValueRange = {
range
}
def isSupportedBy(temporal: TemporalAccessor): Boolean = {
if (temporal.isSupported(DAY_OF_WEEK)) {
if (rangeUnit eq WEEKS) {
true
}
else if (rangeUnit eq MONTHS) {
temporal.isSupported(DAY_OF_MONTH)
}
else if (rangeUnit eq YEARS) {
temporal.isSupported(DAY_OF_YEAR)
}
else if (rangeUnit eq WEEK_BASED_YEARS) {
temporal.isSupported(DAY_OF_YEAR)
}
else if (rangeUnit eq FOREVER) {
temporal.isSupported(YEAR)
}
}
false
}
def rangeRefinedBy(temporal: TemporalAccessor): ValueRange = {
if (rangeUnit eq ChronoUnit.WEEKS) {
range
}
else if (rangeUnit eq MONTHS) {
rangeByWeek(temporal, DAY_OF_MONTH)
}
else if (rangeUnit eq YEARS) {
rangeByWeek(temporal, DAY_OF_YEAR)
}
else if (rangeUnit eq WEEK_BASED_YEARS) {
rangeWeekOfWeekBasedYear(temporal)
}
else if (rangeUnit eq FOREVER) {
YEAR.range
}
else {
throw new IllegalStateException("unreachable, rangeUnit: " + rangeUnit + ", this: " + this)
}
}
/**
* Map the field range to a week range
* @param temporal the temporal
* @param field the field to get the range of
* @return the ValueRange with the range adjusted to weeks.
*/
private def rangeByWeek(temporal: TemporalAccessor, field: TemporalField): ValueRange = {
val dow: Int = localizedDayOfWeek(temporal)
val offset: Int = startOfWeekOffset(temporal.get(field), dow)
val fieldRange: ValueRange = temporal.range(field)
ValueRange.of(computeWeek(offset, fieldRange.getMinimum.asInstanceOf[Int]), computeWeek(offset, fieldRange.getMaximum.asInstanceOf[Int]))
}
/**
* Map the field range to a week range of a week year.
* @param temporal the temporal
* @return the ValueRange with the range adjusted to weeks.
*/
private def rangeWeekOfWeekBasedYear(temporal: TemporalAccessor): ValueRange = {
if (!temporal.isSupported(DAY_OF_YEAR)) {
WEEK_OF_YEAR_RANGE
}
val dow: Int = localizedDayOfWeek(temporal)
val doy: Int = temporal.get(DAY_OF_YEAR)
val offset: Int = startOfWeekOffset(doy, dow)
val week: Int = computeWeek(offset, doy)
if (week == 0) {
var date: ChronoLocalDate = Chronology.from(temporal).date(temporal)
date = date.minus(doy + 7, DAYS)
rangeWeekOfWeekBasedYear(date)
}
val dayRange: ValueRange = temporal.range(DAY_OF_YEAR)
val yearLen: Int = dayRange.getMaximum.asInstanceOf[Int]
val newYearWeek: Int = computeWeek(offset, yearLen + weekDef.getMinimalDaysInFirstWeek)
if (week >= newYearWeek) {
var date: ChronoLocalDate = Chronology.from(temporal).date(temporal)
date = date.plus(yearLen - doy + 1 + 7, ChronoUnit.DAYS)
rangeWeekOfWeekBasedYear(date)
}
ValueRange.of(1, newYearWeek - 1)
}
override def toString: String = {
name + "[" + weekDef.toString + "]"
}
private final val name: String = null
private final val weekDef: WeekFields = null
private final val baseUnit: TemporalUnit = null
private final val rangeUnit: TemporalUnit = null
private final val range: ValueRange = null
}
}
final class WeekFields extends Serializable {
/**
* Creates an instance of the definition.
*
* @param firstDayOfWeek the first day of the week, not null
* @param minimalDaysInFirstWeek the minimal number of days in the first week, from 1 to 7
* @throws IllegalArgumentException if the minimal days value is invalid
*/
private def this(firstDayOfWeek: DayOfWeek, minimalDaysInFirstWeek: Int) {
if (minimalDaysInFirstWeek < 1 || minimalDaysInFirstWeek > 7) {
throw new IllegalArgumentException("Minimal number of days is invalid")
}
this.firstDayOfWeek = firstDayOfWeek
this.minimalDays = minimalDaysInFirstWeek
}
/**
* Return the singleton WeekFields associated with the
* {@code firstDayOfWeek} and {@code minimalDays}.
* @return the singleton WeekFields for the firstDayOfWeek and minimalDays.
* @throws InvalidObjectException if the serialized object has invalid
* values for firstDayOfWeek or minimalDays.
*/
private def readResolve: AnyRef = {
try {
WeekFields.of(firstDayOfWeek, minimalDays)
}
catch {
case iae: IllegalArgumentException => {
throw new Nothing("Invalid serialized WeekFields: " + iae.getMessage)
}
}
}
/**
* Gets the first day-of-week.
* <p>
* The first day-of-week varies by culture.
* For example, the US uses Sunday, while France and the ISO-8601 standard use Monday.
* This method returns the first day using the standard {@code DayOfWeek} enum.
*
* @return the first day-of-week, not null
*/
def getFirstDayOfWeek: DayOfWeek = {
firstDayOfWeek
}
/**
* Gets the minimal number of days in the first week.
* <p>
* The number of days considered to define the first week of a month or year
* varies by culture.
* For example, the ISO-8601 requires 4 days (more than half a week) to
* be present before counting the first week.
*
* @return the minimal number of days in the first week of a month or year, from 1 to 7
*/
def getMinimalDaysInFirstWeek: Int = {
minimalDays
}
/**
* Returns a field to access the day of week based on this {@code WeekFields}.
* <p>
* This is similar to {@link ChronoField#DAY_OF_WEEK} but uses values for
* the day-of-week based on this {@code WeekFields}.
* The days are numbered from 1 to 7 where the
* {@link #getFirstDayOfWeek() first day-of-week} is assigned the value 1.
* <p>
* For example, if the first day-of-week is Sunday, then that will have the
* value 1, with other days ranging from Monday as 2 to Saturday as 7.
* <p>
* In the resolving phase of parsing, a localized day-of-week will be converted
* to a standardized {@code ChronoField} day-of-week.
* The day-of-week must be in the valid range 1 to 7.
* Other fields in this class build dates using the standardized day-of-week.
*
* @return a field providing access to the day-of-week with localized numbering, not null
*/
def dayOfWeek: TemporalField = {
dayOfWeek
}
/**
* Returns a field to access the week of month based on this {@code WeekFields}.
* <p>
* This represents the concept of the count of weeks within the month where weeks
* start on a fixed day-of-week, such as Monday.
* This field is typically used with {@link WeekFields#dayOfWeek()}.
* <p>
* Week one (1) is the week starting on the {@link WeekFields#getFirstDayOfWeek}
* where there are at least {@link WeekFields#getMinimalDaysInFirstWeek()} days in the month.
* Thus, week one may start up to {@code minDays} days before the start of the month.
* If the first week starts after the start of the month then the period before is week zero (0).
* <p>
* For example:<br>
* - if the 1st day of the month is a Monday, week one starts on the 1st and there is no week zero<br>
* - if the 2nd day of the month is a Monday, week one starts on the 2nd and the 1st is in week zero<br>
* - if the 4th day of the month is a Monday, week one starts on the 4th and the 1st to 3rd is in week zero<br>
* - if the 5th day of the month is a Monday, week two starts on the 5th and the 1st to 4th is in week one<br>
* <p>
* This field can be used with any calendar system.
* <p>
* In the resolving phase of parsing, a date can be created from a year,
* week-of-month, month-of-year and day-of-week.
* <p>
* In {@linkplain ResolverStyle#STRICT strict mode}, all four fields are
* validated against their range of valid values. The week-of-month field
* is validated to ensure that the resulting month is the month requested.
* <p>
* In {@linkplain ResolverStyle#SMART smart mode}, all four fields are
* validated against their range of valid values. The week-of-month field
* is validated from 0 to 6, meaning that the resulting date can be in a
* different month to that specified.
* <p>
* In {@linkplain ResolverStyle#LENIENT lenient mode}, the year and day-of-week
* are validated against the range of valid values. The resulting date is calculated
* equivalent to the following four stage approach.
* First, create a date on the first day of the first week of January in the requested year.
* Then take the month-of-year, subtract one, and add the amount in months to the date.
* Then take the week-of-month, subtract one, and add the amount in weeks to the date.
* Finally, adjust to the correct day-of-week within the localized week.
*
* @return a field providing access to the week-of-month, not null
*/
def weekOfMonth: TemporalField = {
weekOfMonth
}
/**
* Returns a field to access the week of year based on this {@code WeekFields}.
* <p>
* This represents the concept of the count of weeks within the year where weeks
* start on a fixed day-of-week, such as Monday.
* This field is typically used with {@link WeekFields#dayOfWeek()}.
* <p>
* Week one(1) is the week starting on the {@link WeekFields#getFirstDayOfWeek}
* where there are at least {@link WeekFields#getMinimalDaysInFirstWeek()} days in the year.
* Thus, week one may start up to {@code minDays} days before the start of the year.
* If the first week starts after the start of the year then the period before is week zero (0).
* <p>
* For example:<br>
* - if the 1st day of the year is a Monday, week one starts on the 1st and there is no week zero<br>
* - if the 2nd day of the year is a Monday, week one starts on the 2nd and the 1st is in week zero<br>
* - if the 4th day of the year is a Monday, week one starts on the 4th and the 1st to 3rd is in week zero<br>
* - if the 5th day of the year is a Monday, week two starts on the 5th and the 1st to 4th is in week one<br>
* <p>
* This field can be used with any calendar system.
* <p>
* In the resolving phase of parsing, a date can be created from a year,
* week-of-year and day-of-week.
* <p>
* In {@linkplain ResolverStyle#STRICT strict mode}, all three fields are
* validated against their range of valid values. The week-of-year field
* is validated to ensure that the resulting year is the year requested.
* <p>
* In {@linkplain ResolverStyle#SMART smart mode}, all three fields are
* validated against their range of valid values. The week-of-year field
* is validated from 0 to 54, meaning that the resulting date can be in a
* different year to that specified.
* <p>
* In {@linkplain ResolverStyle#LENIENT lenient mode}, the year and day-of-week
* are validated against the range of valid values. The resulting date is calculated
* equivalent to the following three stage approach.
* First, create a date on the first day of the first week in the requested year.
* Then take the week-of-year, subtract one, and add the amount in weeks to the date.
* Finally, adjust to the correct day-of-week within the localized week.
*
* @return a field providing access to the week-of-year, not null
*/
def weekOfYear: TemporalField = {
weekOfYear
}
/**
* Returns a field to access the week of a week-based-year based on this {@code WeekFields}.
* <p>
* This represents the concept of the count of weeks within the year where weeks
* start on a fixed day-of-week, such as Monday and each week belongs to exactly one year.
* This field is typically used with {@link WeekFields#dayOfWeek()} and
* {@link WeekFields#weekBasedYear()}.
* <p>
* Week one(1) is the week starting on the {@link WeekFields#getFirstDayOfWeek}
* where there are at least {@link WeekFields#getMinimalDaysInFirstWeek()} days in the year.
* If the first week starts after the start of the year then the period before
* is in the last week of the previous year.
* <p>
* For example:<br>
* - if the 1st day of the year is a Monday, week one starts on the 1st<br>
* - if the 2nd day of the year is a Monday, week one starts on the 2nd and
* the 1st is in the last week of the previous year<br>
* - if the 4th day of the year is a Monday, week one starts on the 4th and
* the 1st to 3rd is in the last week of the previous year<br>
* - if the 5th day of the year is a Monday, week two starts on the 5th and
* the 1st to 4th is in week one<br>
* <p>
* This field can be used with any calendar system.
* <p>
* In the resolving phase of parsing, a date can be created from a week-based-year,
* week-of-year and day-of-week.
* <p>
* In {@linkplain ResolverStyle#STRICT strict mode}, all three fields are
* validated against their range of valid values. The week-of-year field
* is validated to ensure that the resulting week-based-year is the
* week-based-year requested.
* <p>
* In {@linkplain ResolverStyle#SMART smart mode}, all three fields are
* validated against their range of valid values. The week-of-week-based-year field
* is validated from 1 to 53, meaning that the resulting date can be in the
* following week-based-year to that specified.
* <p>
* In {@linkplain ResolverStyle#LENIENT lenient mode}, the year and day-of-week
* are validated against the range of valid values. The resulting date is calculated
* equivalent to the following three stage approach.
* First, create a date on the first day of the first week in the requested week-based-year.
* Then take the week-of-week-based-year, subtract one, and add the amount in weeks to the date.
* Finally, adjust to the correct day-of-week within the localized week.
*
* @return a field providing access to the week-of-week-based-year, not null
*/
def weekOfWeekBasedYear: TemporalField = {
weekOfWeekBasedYear
}
/**
* Returns a field to access the year of a week-based-year based on this {@code WeekFields}.
* <p>
* This represents the concept of the year where weeks start on a fixed day-of-week,
* such as Monday and each week belongs to exactly one year.
* This field is typically used with {@link WeekFields#dayOfWeek()} and
* {@link WeekFields#weekOfWeekBasedYear()}.
* <p>
* Week one(1) is the week starting on the {@link WeekFields#getFirstDayOfWeek}
* where there are at least {@link WeekFields#getMinimalDaysInFirstWeek()} days in the year.
* Thus, week one may start before the start of the year.
* If the first week starts after the start of the year then the period before
* is in the last week of the previous year.
* <p>
* This field can be used with any calendar system.
* <p>
* In the resolving phase of parsing, a date can be created from a week-based-year,
* week-of-year and day-of-week.
* <p>
* In {@linkplain ResolverStyle#STRICT strict mode}, all three fields are
* validated against their range of valid values. The week-of-year field
* is validated to ensure that the resulting week-based-year is the
* week-based-year requested.
* <p>
* In {@linkplain ResolverStyle#SMART smart mode}, all three fields are
* validated against their range of valid values. The week-of-week-based-year field
* is validated from 1 to 53, meaning that the resulting date can be in the
* following week-based-year to that specified.
* <p>
* In {@linkplain ResolverStyle#LENIENT lenient mode}, the year and day-of-week
* are validated against the range of valid values. The resulting date is calculated
* equivalent to the following three stage approach.
* First, create a date on the first day of the first week in the requested week-based-year.
* Then take the week-of-week-based-year, subtract one, and add the amount in weeks to the date.
* Finally, adjust to the correct day-of-week within the localized week.
*
* @return a field providing access to the week-based-year, not null
*/
def weekBasedYear: TemporalField = {
weekBasedYear
}
/**
* Checks if this {@code WeekFields} is equal to the specified object.
* <p>
* The comparison is based on the entire state of the rules, which is
* the first day-of-week and minimal days.
*
* @param object the other rules to compare to, null returns false
* @return true if this is equal to the specified rules
*/
override def equals(`object`: AnyRef): Boolean = {
if (this eq `object`) {
true
}
if (`object`.isInstanceOf[WeekFields]) {
hashCode == `object`.hashCode
}
false
}
/**
* A hash code for this {@code WeekFields}.
*
* @return a suitable hash code
*/
override def hashCode: Int = {
firstDayOfWeek.ordinal * 7 + minimalDays
}
/**
* A string representation of this {@code WeekFields} instance.
*
* @return the string representation, not null
*/
override def toString: String = {
"WeekFields[" + firstDayOfWeek + ',' + minimalDays + ']'
}
/**
* The first day-of-week.
*/
private final val firstDayOfWeek: DayOfWeek = null
/**
* The minimal number of days in the first week.
*/
private final val minimalDays: Int = 0
/**
* The field used to access the computed DayOfWeek.
*/
@transient
private final val dayOfWeek: TemporalField = ComputedDayOfField.ofDayOfWeekField(this)
/**
* The field used to access the computed WeekOfMonth.
*/
@transient
private final val weekOfMonth: TemporalField = ComputedDayOfField.ofWeekOfMonthField(this)
/**
* The field used to access the computed WeekOfYear.
*/
@transient
private final val weekOfYear: TemporalField = ComputedDayOfField.ofWeekOfYearField(this)
/**
* The field that represents the week-of-week-based-year.
* <p>
* This field allows the week of the week-based-year value to be queried and set.
* <p>
* This unit is an immutable and thread-safe singleton.
*/
@transient
private final val weekOfWeekBasedYear: TemporalField = ComputedDayOfField.ofWeekOfWeekBasedYearField(this)
/**
* The field that represents the week-based-year.
* <p>
* This field allows the week-based-year value to be queried and set.
* <p>
* This unit is an immutable and thread-safe singleton.
*/
@transient
private final val weekBasedYear: TemporalField = ComputedDayOfField.ofWeekBasedYearField(this)
}
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* <p>
* Access to date and time using fields and units, and date time adjusters.
* </p>
* <p>
* This package expands on the base package to provide additional functionality for
* more powerful use cases. Support is included for:
* </p>
* <ul>
* <li>Units of date-time, such as years, months, days and hours</li>
* <li>Fields of date-time, such as month-of-year, day-of-week or hour-of-day</li>
* <li>Date-time adjustment functions</li>
* <li>Different definitions of weeks</li>
* </ul>
*
* <h3>Fields and Units</h3>
* <p>
* Dates and times are expressed in terms of fields and units.
* A unit is used to measure an amount of time, such as years, days or minutes.
* All units implement {@link java.time.temporal.TemporalUnit}.
* The set of well known units is defined in {@link java.time.temporal.ChronoUnit}, such as {@code DAYS}.
* The unit interface is designed to allow applications defined units.
* </p>
* <p>
* A field is used to express part of a larger date-time, such as year, month-of-year or second-of-minute.
* All fields implement {@link java.time.temporal.TemporalField}.
* The set of well known fields are defined in {@link java.time.temporal.ChronoField}, such as {@code HOUR_OF_DAY}.
* Additional fields are defined by {@link java.time.temporal.JulianFields}, {@link java.time.temporal.WeekFields}
* and {@link java.time.temporal.IsoFields}.
* The field interface is designed to allow applications defined fields.
* </p>
* <p>
* This package provides tools that allow the units and fields of date and time to be accessed
* in a general way most suited for frameworks.
* {@link java.time.temporal.Temporal} provides the abstraction for date time types that support fields.
* Its methods support getting the value of a field, creating a new date time with the value of
* a field modified, and querying for additional information, typically used to extract the offset or time-zone.
* </p>
* <p>
* One use of fields in application code is to retrieve fields for which there is no convenience method.
* For example, getting the day-of-month is common enough that there is a method on {@code Date}
* called {@code getDayOfMonth()}. However for more unusual fields it is necessary to use the field.
* For example, {@code date.get(ChronoField.ALIGNED_WEEK_OF_MONTH)}.
* The fields also provide access to the range of valid values.
* </p>
*
* <h3>Adjustment and Query</h3>
* <p>
* A key part of the date-time problem space is adjusting a date to a new, related value,
* such as the "last day of the month", or "next Wednesday".
* These are modeled as functions that adjust a base date-time.
* The functions implement {@link java.time.temporal.TemporalAdjuster} and operate on {@code Temporal}.
* A set of common functions are provided in {@code TemporalAdjuster}.
* For example, to find the first occurrence of a day-of-week after a given date, use
* {@link java.time.temporal.TemporalAdjuster#next(DayOfWeek)}, such as
* {@code date.with(next(MONDAY))}.
* Applications can also define adjusters by implementing {@code TemporalAdjuster}.
* </p>
* <p>
* The {@link java.time.temporal.TemporalAmount} interface models amounts of relative time.
* </p>
* <p>
* In addition to adjusting a date-time, an interface is provided to enable querying -
* {@link java.time.temporal.TemporalQuery}.
* The most common implementations of the query interface are method references.
* The {@code from(TemporalAccessor)} methods on major classes can all be used, such as
* {@code Date::from} or {@code Month::from}.
* Further implementations are provided in {@code TemporalQuery} as static methods.
* Applications can also define queries by implementing {@code TemporalQuery}.
* </p>
*
* <h3>Weeks</h3>
* <p>
* Different locales have different definitions of the week.
* For example, in Europe the week typically starts on a Monday, while in the US it starts on a Sunday.
* The {@link java.time.temporal.WeekFields} class models this distinction.
* </p>
* <p>
* The ISO calendar system defines an additional week-based division of years.
* This defines a year based on whole Monday to Monday weeks.
* This is modeled in {@link java.time.temporal.IsoFields}.
* </p>
*
* <h3>Package specification</h3>
* <p>
* Unless otherwise noted, passing a null argument to a constructor or method in any class or interface
* in this package will cause a {@link java.lang.NullPointerException NullPointerException} to be thrown.
* The Javadoc "@param" definition is used to summarise the null-behavior.
* The "@throws {@link java.lang.NullPointerException}" is not explicitly documented in each method.
* </p>
* <p>
* All calculations should check for numeric overflow and throw either an {@link java.lang.ArithmeticException}
* or a {@link java.time.DateTimeException}.
* </p>
* @since JDK1.8
*/
| javierg1975/metronome | src/main/scala/metronome/temporal/Temporal.scala | Scala | gpl-2.0 | 306,189 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.memory
import org.apache.spark.SparkConf
import org.apache.spark.storage.BlockId
/**
* A [[MemoryManager]] that enforces a soft boundary between execution and storage such that
* either side can borrow memory from the other.
*
* The region shared between execution and storage is a fraction of (the total heap space - 300MB)
* configurable through `spark.memory.fraction` (default 0.6). The position of the boundary
* within this space is further determined by `spark.memory.storageFraction` (default 0.5).
* This means the size of the storage region is 0.6 * 0.5 = 0.3 of the heap space by default.
*
* Storage can borrow as much execution memory as is free until execution reclaims its space.
* When this happens, cached blocks will be evicted from memory until sufficient borrowed
* memory is released to satisfy the execution memory request.
*
* Similarly, execution can borrow as much storage memory as is free. However, execution
* memory is *never* evicted by storage due to the complexities involved in implementing this.
* The implication is that attempts to cache blocks may fail if execution has already eaten
* up most of the storage space, in which case the new blocks will be evicted immediately
* according to their respective storage levels.
*
* @param onHeapStorageRegionSize Size of the storage region, in bytes.
* This region is not statically reserved; execution can borrow from
* it if necessary. Cached blocks can be evicted only if actual
* storage memory usage exceeds this region.
*/
private[spark] class UnifiedMemoryManager private[memory] (
conf: SparkConf,
val maxHeapMemory: Long,
onHeapStorageRegionSize: Long,
numCores: Int)
extends MemoryManager(
conf,
numCores,
onHeapStorageRegionSize,
maxHeapMemory - onHeapStorageRegionSize) {
private def assertInvariants(): Unit = {
assert(onHeapExecutionMemoryPool.poolSize + onHeapStorageMemoryPool.poolSize == maxHeapMemory)
assert(
offHeapExecutionMemoryPool.poolSize + offHeapStorageMemoryPool.poolSize == maxOffHeapMemory)
}
assertInvariants()
override def maxOnHeapStorageMemory: Long = synchronized {
maxHeapMemory - onHeapExecutionMemoryPool.memoryUsed
}
override def maxOffHeapStorageMemory: Long = synchronized {
maxOffHeapMemory - offHeapExecutionMemoryPool.memoryUsed
}
/**
* Try to acquire up to `numBytes` of execution memory for the current task and return the
* number of bytes obtained, or 0 if none can be allocated.
*
* This call may block until there is enough free memory in some situations, to make sure each
* task has a chance to ramp up to at least 1 / 2N of the total memory pool (where N is the # of
* active tasks) before it is forced to spill. This can happen if the number of tasks increase
* but an older task had a lot of memory already.
*/
override private[memory] def acquireExecutionMemory(
numBytes: Long,
taskAttemptId: Long,
memoryMode: MemoryMode): Long = synchronized {
assertInvariants()
assert(numBytes >= 0)
val (executionPool, storagePool, storageRegionSize, maxMemory) = memoryMode match {
case MemoryMode.ON_HEAP => (
onHeapExecutionMemoryPool,
onHeapStorageMemoryPool,
onHeapStorageRegionSize,
maxHeapMemory)
case MemoryMode.OFF_HEAP => (
offHeapExecutionMemoryPool,
offHeapStorageMemoryPool,
offHeapStorageMemory,
maxOffHeapMemory)
}
/**
* Grow the execution pool by evicting cached blocks, thereby shrinking the storage pool.
*
* When acquiring memory for a task, the execution pool may need to make multiple
* attempts. Each attempt must be able to evict storage in case another task jumps in
* and caches a large block between the attempts. This is called once per attempt.
*/
def maybeGrowExecutionPool(extraMemoryNeeded: Long): Unit = {
if (extraMemoryNeeded > 0) {
// There is not enough free memory in the execution pool, so try to reclaim memory from
// storage. We can reclaim any free memory from the storage pool. If the storage pool
// has grown to become larger than `storageRegionSize`, we can evict blocks and reclaim
// the memory that storage has borrowed from execution.
val memoryReclaimableFromStorage = math.max(
storagePool.memoryFree,
storagePool.poolSize - storageRegionSize)
if (memoryReclaimableFromStorage > 0) {
// Only reclaim as much space as is necessary and available:
val spaceToReclaim = storagePool.freeSpaceToShrinkPool(
math.min(extraMemoryNeeded, memoryReclaimableFromStorage))
storagePool.decrementPoolSize(spaceToReclaim)
executionPool.incrementPoolSize(spaceToReclaim)
}
}
}
/**
* The size the execution pool would have after evicting storage memory.
*
* The execution memory pool divides this quantity among the active tasks evenly to cap
* the execution memory allocation for each task. It is important to keep this greater
* than the execution pool size, which doesn't take into account potential memory that
* could be freed by evicting storage. Otherwise we may hit SPARK-12155.
*
* Additionally, this quantity should be kept below `maxMemory` to arbitrate fairness
* in execution memory allocation across tasks, Otherwise, a task may occupy more than
* its fair share of execution memory, mistakenly thinking that other tasks can acquire
* the portion of storage memory that cannot be evicted.
*/
def computeMaxExecutionPoolSize(): Long = {
maxMemory - math.min(storagePool.memoryUsed, storageRegionSize)
}
executionPool.acquireMemory(
numBytes, taskAttemptId, maybeGrowExecutionPool, computeMaxExecutionPoolSize)
}
override def acquireStorageMemory(
blockId: BlockId,
numBytes: Long,
memoryMode: MemoryMode): Boolean = synchronized {
assertInvariants()
assert(numBytes >= 0)
val (executionPool, storagePool, maxMemory) = memoryMode match {
case MemoryMode.ON_HEAP => (
onHeapExecutionMemoryPool,
onHeapStorageMemoryPool,
maxOnHeapStorageMemory)
case MemoryMode.OFF_HEAP => (
offHeapExecutionMemoryPool,
offHeapStorageMemoryPool,
maxOffHeapMemory)
}
if (numBytes > maxMemory) {
// Fail fast if the block simply won't fit
logInfo(s"Will not store $blockId as the required space ($numBytes bytes) exceeds our " +
s"memory limit ($maxMemory bytes)")
return false
}
if (numBytes > storagePool.memoryFree) {
// There is not enough free memory in the storage pool, so try to borrow free memory from
// the execution pool.
val memoryBorrowedFromExecution = Math.min(executionPool.memoryFree, numBytes)
executionPool.decrementPoolSize(memoryBorrowedFromExecution)
storagePool.incrementPoolSize(memoryBorrowedFromExecution)
}
storagePool.acquireMemory(blockId, numBytes)
}
override def acquireUnrollMemory(
blockId: BlockId,
numBytes: Long,
memoryMode: MemoryMode): Boolean = synchronized {
acquireStorageMemory(blockId, numBytes, memoryMode)
}
}
object UnifiedMemoryManager {
// Set aside a fixed amount of memory for non-storage, non-execution purposes.
// This serves a function similar to `spark.memory.fraction`, but guarantees that we reserve
// sufficient memory for the system even for small heaps. E.g. if we have a 1GB JVM, then
// the memory used for execution and storage will be (1024 - 300) * 0.6 = 434MB by default.
private val RESERVED_SYSTEM_MEMORY_BYTES = 300 * 1024 * 1024
def apply(conf: SparkConf, numCores: Int): UnifiedMemoryManager = {
val maxMemory = getMaxMemory(conf)
new UnifiedMemoryManager(
conf,
maxHeapMemory = maxMemory,
onHeapStorageRegionSize =
(maxMemory * conf.getDouble("spark.memory.storageFraction", 0.5)).toLong,
numCores = numCores)
}
/**
* Return the total amount of memory shared between execution and storage, in bytes.
*/
private def getMaxMemory(conf: SparkConf): Long = {
val systemMemory = conf.getLong("spark.testing.memory", Runtime.getRuntime.maxMemory)
val reservedMemory = conf.getLong("spark.testing.reservedMemory",
if (conf.contains("spark.testing")) 0 else RESERVED_SYSTEM_MEMORY_BYTES)
val minSystemMemory = (reservedMemory * 1.5).ceil.toLong
if (systemMemory < minSystemMemory) {
throw new IllegalArgumentException(s"System memory $systemMemory must " +
s"be at least $minSystemMemory. Please increase heap size using the --driver-memory " +
s"option or spark.driver.memory in Spark configuration.")
}
// SPARK-12759 Check executor memory to fail fast if memory is insufficient
if (conf.contains("spark.executor.memory")) {
val executorMemory = conf.getSizeAsBytes("spark.executor.memory")
if (executorMemory < minSystemMemory) {
throw new IllegalArgumentException(s"Executor memory $executorMemory must be at least " +
s"$minSystemMemory. Please increase executor memory using the " +
s"--executor-memory option or spark.executor.memory in Spark configuration.")
}
}
val usableMemory = systemMemory - reservedMemory
val memoryFraction = conf.getDouble("spark.memory.fraction", 0.6)
(usableMemory * memoryFraction).toLong
}
}
| sh-cho/cshSpark | memory/UnifiedMemoryManager.scala | Scala | apache-2.0 | 10,450 |
/* This file is part of gnieh-coroutines.
*
* See the NOTICE file distributed with this work for copyright information.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gnieh.coroutines.plugin
import scala.tools.nsc
import scala.tools.nsc.typechecker._
import nsc.Global
import nsc.Phase
import nsc.plugins.Plugin
import nsc.plugins.PluginComponent
class CoroutinesPlugin(val global: Global) extends Plugin {
import global._
val name = "coroutines"
val description = "applies coroutines conversion"
val phase = new CoroutinesTransform() {
val global = CoroutinesPlugin.this.global
val runsAfter = List("typer")
// it must run before continuations are transformed
// override val runsBefore = List("selectiveanf")
}
val components = List[PluginComponent](phase)
global.log("instantiated coroutines plugin: " + this)
// TODO: require -enabled command-line flag?
override def processOptions(options: List[String], error: String => Unit) = {
var enabled = false
for (option <- options) {
if (option == "enable") {
enabled = true
} else if(option == "verbose"){
phase.verbose = true
}else {
error("Option not understood: "+option)
}
}
}
override val optionsHelp: Option[String] =
Some(" -P:coroutines:verbose verbose mode")
//Some(" -P:coroutines:enable Enable coroutines (continuations must be enabled too)")
}
| satabin/gnieh-coroutines | plugin/gnieh/coroutines/plugin/CoroutinesPlugin.scala | Scala | apache-2.0 | 1,952 |
package ml.sparkling.graph.loaders.csv.utils
import org.apache.spark.graphx.VertexId
import org.apache.spark.sql.Row
/**
* Created by Roman Bartusiak (roman.bartusiak@pwr.edu.pl http://riomus.github.io).
*/
object DefaultTransformers {
def defaultEdgeAttribute(row: Row):Double = {
1d
}
def numberToVertexId[VD](any: VD):VertexId = {
any.toString.toLong
}
}
| sparkling-graph/sparkling-graph | loaders/src/main/scala/ml/sparkling/graph/loaders/csv/utils/DefaultTransformers.scala | Scala | bsd-2-clause | 381 |
//===========================================================================
// Copyright 2015 Delving B.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//===========================================================================
package triplestore
import dataset.DsInfo
import dataset.DsInfo._
import mapping.VocabInfo.CATEGORIES_SPEC
import org.joda.time.DateTime
import play.api.libs.json.JsValue
import services.StringHandling._
import services.Temporal
import triplestore.GraphProperties._
import triplestore.TripleStore.QueryValue
object Sparql {
// === actor store ===
private val SPARQL_ESCAPE: Map[Char, String] = Map(
'\\t' -> "\\\\t",
'\\n' -> "\\\\n",
'\\r' -> "\\\\r",
'\\b' -> "\\\\b",
'\\f' -> "\\\\f",
'"' -> "\\\\\\"",
'\\'' -> "\\\\'",
'\\\\' -> "\\\\\\\\"
)
private def escape(value: String): String = value.map(c => SPARQL_ESCAPE.getOrElse(c, c.toString)).mkString
// todo: the following is a way to make the string interpolation type-aware
implicit class QueryStringContext(stringContext: StringContext) {
def Q(args: Any*) = stringContext.s(args.map {
case nxProp: NXProp => "<" + nxProp.uri + ">"
case string: String => "'" + escape(string) + "'"
// todo: other cases
})
}
val x = "4"
val y = Q"gumby $x"
// todo: ---------------------------------------
private def literalExpression(value: String, languageOpt: Option[String]) = languageOpt.map { language =>
s"'${escape(value)}'@$language"
} getOrElse {
s"'${escape(value)}'"
}
def graphExistsQ(graphName: String) =
s"""
|ASK {
| GRAPH <$graphName> { ?s ?p ?o }
|}
""".stripMargin
// === dataset info ===
val selectDatasetSpecsQ =
s"""
|SELECT DISTINCT ?spec
|WHERE {
| GRAPH ?g {
| ?s <$datasetSpec> ?spec .
| FILTER NOT EXISTS { ?s <$deleted> true }
| }
|}
|ORDER BY ?spec
""".stripMargin
def askIfDatasetExistsQ(uri: String) =
s"""
|ASK {
| GRAPH ?g { <$uri> <$datasetSpec> ?spec }
|}
""".stripMargin
def updatePropertyQ(graphName: String, uri: String, prop: NXProp, value: String): String =
s"""
|WITH <$graphName>
|DELETE {
| <$uri> <$prop> ?o
|}
|INSERT {
| <$uri> <$prop> ${literalExpression(value, None)}
|}
|WHERE {
| OPTIONAL { <$uri> <$prop> ?o }
|}
""".stripMargin.trim
def updateSyncedFalseQ(graphName: String, uri: String): String =
s"""
|WITH <$graphName>
|DELETE {
| <$uri> <$synced> ?o
|}
|INSERT {
| <$uri> <$synced> false
|}
|WHERE {
| OPTIONAL { <$uri> <$synced> ?o }
|}
""".stripMargin.trim
def removeLiteralPropertyQ(graphName: String, uri: String, prop: NXProp) =
s"""
|WITH <$graphName>
|DELETE {
| <$uri> <$prop> ?o
|}
|WHERE {
| <$uri> <$prop> ?o
|}
""".stripMargin
def addLiteralPropertyToListQ(graphName: String, uri: String, prop: NXProp, value: String) =
s"""
|INSERT DATA {
| GRAPH <$graphName> {
| <$uri> <$prop> ${literalExpression(value, None)} .
| }
|}
""".stripMargin.trim
def deleteLiteralPropertyFromListQ(graphName: String, uri: String, prop: NXProp, value: String) =
s"""
|WITH <$graphName>
|DELETE {
| <$uri> <$prop> ${literalExpression(value, None)} .
|}
|WHERE {
| <$uri> <$prop> ${literalExpression(value, None)} .
|}
""".stripMargin
def deleteDatasetQ(datasetGraphName: String, uri: String, skosGraphName: String) =
s"""
|DROP SILENT GRAPH <$datasetGraphName>;
|DROP SILENT GRAPH <$skosGraphName>;
|DELETE {
| GRAPH ?g {
| ?s ?p ?o .
| }
|}
|WHERE {
| GRAPH ?g {
| ?foafDoc <$foafPrimaryTopic> ?record .
| ?foafDoc <$belongsTo> <$uri> .
| ?record a <$recordEntity> .
| }
| GRAPH ?g {
| ?s ?p ?o .
| }
|};
|DELETE {
| GRAPH ?g {
| ?s ?p ?o .
| }
|}
|WHERE {
| GRAPH ?g {
| ?subject
| <$mappingVocabulary> <$uri> ;
| a <$terminologyMapping> .
| }
| GRAPH ?g {
| ?s ?p ?o .
| }
|};
""".stripMargin
def deleteDatasetRecordsQ(uri: String) =
s"""
|DELETE {
| GRAPH ?g {
| ?s ?p ?o .
| }
|}
|INSERT {
| GRAPH ?g {
| ?foafDoc <$deleted> true .
| ?foafDoc <$synced> false .
| ?foafDoc <$foafPrimaryTopic> ?record .
| ?foafDoc <$belongsTo> <$uri> .
| ?record a <$recordEntity> .
| }
|}
|WHERE {
| GRAPH ?g {
| ?foafDoc <$foafPrimaryTopic> ?record .
| ?foafDoc <$belongsTo> <$uri> .
| ?record a <$recordEntity> .
| }
| GRAPH ?g {
| ?s ?p ?o .
| }
|};
""".stripMargin
def markOlderRecordsDeletedQ(fromSaveTime: DateTime, uri:String) =
s"""
|DELETE {
| GRAPH ?g {
| ?s ?p ?o .
| }
|}
|INSERT {
| GRAPH ?g {
| ?foafDoc <$deleted> true .
| ?foafDoc <$foafPrimaryTopic> ?record .
| ?foafDoc <$belongsTo> <$uri> .
| ?record a <$recordEntity> .
| }
|}
|WHERE {
| GRAPH ?g {
| ?foafDoc <$saveTime> ?saveTime
| FILTER (?saveTime < "${Temporal.timeToUTCString(fromSaveTime)}")
| ?foafDoc <$foafPrimaryTopic> ?record .
| ?foafDoc <$belongsTo> <$uri> .
| ?record a <$recordEntity> .
| }
| GRAPH ?g {
| ?s ?p ?o .
| }
|};
""".stripMargin
// === vocab info ===
val listVocabInfoQ =
s"""
|SELECT ?spec
|WHERE {
| GRAPH ?g {
| ?s <$skosSpec> ?spec .
| }
|}
|ORDER BY ?spec
""".stripMargin
def checkVocabQ(skosUri: String) =
s"""
|ASK {
| GRAPH ?g {
| <$skosUri> <$skosSpec> ?spec .
| }
|}
""".stripMargin
def getVocabStatisticsQ(skosGraphName: String) =
s"""
|PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
|PREFIX skos: <http://www.w3.org/2004/02/skos/core#>
|SELECT (count(?s) as ?count)
|WHERE {
| GRAPH <$skosGraphName> {
| ?s rdf:type skos:Concept
| }
|}
""".stripMargin
def dropVocabularyQ(graphName: String, skosGraphName: String, uri: String) =
s"""
|CLEAR GRAPH <$graphName>;
|CLEAR GRAPH <$skosGraphName>;
|DELETE {
| GRAPH ?g {
| ?s ?p ?o .
| }
|}
|WHERE {
| GRAPH ?g {
| ?subject
| <$mappingVocabulary> <$uri> ;
| a <$terminologyMapping> .
| }
| GRAPH ?g {
| ?s ?p ?o .
| }
|};
""".stripMargin
// === mapping store ===
def doesMappingExistQ(uriA: String, uriB: String) =
s"""
|ASK {
| GRAPH ?g {
| ?mapping
| a <$terminologyMapping>;
| <$mappingConcept> <$uriA>;
| <$mappingConcept> <$uriB> .
| }
|}
""".stripMargin
def deleteMappingQ(uriA: String, uriB: String) =
s"""
|DELETE {
| GRAPH ?g {
| ?s ?p ?o
| }
|}
|INSERT {
| GRAPH ?g {
| ?mapping <$mappingDeleted> true
| }
|}
|WHERE {
| GRAPH ?g {
| ?mapping <$mappingConcept> <$uriA> .
| ?mapping <$mappingConcept> <$uriB> .
| }
| GRAPH ?g {
| ?s ?p ?o
| }
|}
""".stripMargin
def insertMappingQ(graphName:String, prefix: String, uri: String, uriA: String, uriB: String, skosA: SkosGraph, skosB: SkosGraph) = {
val connection = if (skosB.spec == CATEGORIES_SPEC) belongsToCategory.uri else exactMatch
s"""
|INSERT DATA {
| GRAPH <$graphName> {
| <$uriA> <$connection> <$uriB> .
| <$uri>
| a <$terminologyMapping>;
| <$synced> false;
| <$mappingTime> '''${Temporal.timeToString(new DateTime)}''' ;
| <$mappingConcept> <$uriA> ;
| <$mappingConcept> <$uriB> ;
| <$mappingVocabulary> <${skosA.uri}> ;
| <$mappingVocabulary> <${skosB.uri}> .
| }
|}
""".stripMargin
}
def getVocabMappingsQ(skosA: SkosGraph, skosB: SkosGraph) = {
s"""
|SELECT ?a ?b
|WHERE {
| GRAPH ?g {
| ?a <$exactMatch> ?b .
| ?s <$mappingVocabulary> <${skosA.uri}> .
| ?s <$mappingVocabulary> <${skosB.uri}> .
| }
|}
""".stripMargin
}
def getTermMappingsQ(terms: SkosGraph, categories: Boolean) = {
val connection = if (categories) belongsToCategory.uri else exactMatch
s"""
|SELECT ?termUri ?vocabUri ?vocabSpec
|WHERE {
| GRAPH ?vocabGraph {
| ?vocab <$skosSpec> ?vocabSpec
| }
| GRAPH ?mappingGraph {
| ?termUri <$connection> ?vocabUri .
| ?s <$mappingVocabulary> <${terms.uri}> .
| ?s <$mappingVocabulary> ?vocab .
| FILTER (?vocab != <${terms.uri}>)
| }
|}
""".stripMargin
}
def askTermMappingQ(termUri: String, vocabUri: String, categories: Boolean) = {
val connection = if (categories) belongsToCategory.uri else exactMatch
s"""
|ASK {
| GRAPH ?g {
| ?termUri <$connection> ?vocabUri .
| }
|}
""".stripMargin
}
// === skosification ===
val listSkosifiedFieldsQ =
s"""
|SELECT ?spec ?datasetUri ?fieldPropertyUri
|WHERE {
| GRAPH ?g {
| ?datasetUri
| <$skosField> ?fieldPropertyUri ;
| a <$datasetEntity> ;
| <$datasetSpec> ?spec.
| }
|}
""".stripMargin
case class SkosifiedField(spec: String, datasetUri: String, fieldPropertyValue: String) {
val parts = fieldPropertyValue.split("=")
val fieldPropertyTag = parts(0)
val fieldPropertyUri = parts(1)
val skosGraph = getSkosGraphName(datasetUri)
val removeSkosEntriesQ =
s"""
|PREFIX skos: <http://www.w3.org/2004/02/skos/core#>
|DELETE {
| GRAPH <$skosGraph> {
| ?mintedUri ?p ?o .
| }
|}
|WHERE {
| GRAPH <$skosGraph> {
| ?mintedUri
| a skos:Concept ;
| <$belongsTo> <$datasetUri> ;
| <$skosField> <$fieldPropertyUri> .
| }
| GRAPH <$skosGraph> {
| ?mintedUri ?p ?o .
| }
|};
""".stripMargin
}
def skosifiedFieldFromResult(resultMap: Map[String, QueryValue]) = SkosifiedField(
resultMap("spec").text,
resultMap("datasetUri").text,
resultMap("fieldPropertyUri").text
)
def skosificationCasesExistQ(sf: SkosifiedField) =
s"""
|ASK {
| GRAPH ?g {
| ?anything <${sf.fieldPropertyUri}> ?literalValue .
| FILTER isLiteral(?literalValue)
| ?foafDoc <$foafPrimaryTopic> ?record .
| ?foafDoc <$belongsTo> <${sf.datasetUri}> .
| ?record a <$recordEntity> .
| }
|}
""".stripMargin
def countSkosificationCasesQ(sf: SkosifiedField) =
s"""
|SELECT (COUNT(DISTINCT ?literalValue) as ?count)
|WHERE {
| GRAPH ?g {
| ?anything <${sf.fieldPropertyUri}> ?literalValue .
| FILTER isLiteral(?literalValue)
| ?foafDoc <$foafPrimaryTopic> ?record .
| ?foafDoc <$belongsTo> <${sf.datasetUri}> .
| ?record a <$recordEntity> .
| }
|}
""".stripMargin
def countFromResult(mapList: List[Map[String, QueryValue]]): Int = mapList.head.get("count").get.text.toInt
def listSkosificationCasesQ(sf: SkosifiedField, chunkSize: Int) =
s"""
|SELECT DISTINCT ?literalValue
|WHERE {
| GRAPH ?g {
| ?anything <${sf.fieldPropertyUri}> ?literalValue .
| FILTER isLiteral(?literalValue)
| ?foafDoc <$foafPrimaryTopic> ?record .
| ?foafDoc <$belongsTo> <${sf.datasetUri}> .
| ?record a <$recordEntity> .
| }
|}
|LIMIT $chunkSize
""".stripMargin
def createCasesFromQueryValues(sf: SkosifiedField, resultList: List[Map[String, QueryValue]]): List[SkosificationCase] =
resultList.map(_("literalValue")).map(v => SkosificationCase(sf, v.text, v.language))
def createCasesFromHistogram(dsInfo: DsInfo, json: JsValue): List[SkosificationCase] = {
val fieldPropertyTag = (json \\ "tag").as[String]
val fieldPropertyUri = (json \\ "uri").as[String]
val fieldPropertyValue = s"$fieldPropertyTag=$fieldPropertyUri"
val histogram = (json \\ "histogram").as[List[List[String]]]
val sf = SkosifiedField(dsInfo.spec, dsInfo.uri, fieldPropertyValue)
// todo: no language
histogram.map(count => SkosificationCase(sf, count(1), None, Some(count.head.toInt)))
}
case class SkosificationCase(sf: SkosifiedField, literalValueText: String, languageOpt: Option[String], frequencyOpt: Option[Int] = None) {
val mintedUri = s"${sf.datasetUri}/${sf.fieldPropertyTag}/${slugify(literalValueText)}"
val datasetUri = sf.datasetUri
val value = literalValueText
val ensureSkosEntryQ =
s"""
|PREFIX skos: <http://www.w3.org/2004/02/skos/core#>
|INSERT {
| GRAPH <${sf.skosGraph}> {
| <$mintedUri>
| a skos:Concept ;
| a <$proxyResource> ;
| skos:altLabel ${literalExpression(value, languageOpt)} ;
| <$proxyLiteralValue> ${literalExpression(value, languageOpt)} ;
| <$belongsTo> <$datasetUri> ;
| <$skosField> <${sf.fieldPropertyUri}> ;
| <$skosFieldTag> ${literalExpression(sf.fieldPropertyTag, None)} ;
| <$proxyLiteralField> <${sf.fieldPropertyUri}> ;
| <$synced> false .
| ${frequencyOpt.map(freq => s"<$mintedUri> <$skosFrequency> '''$freq''' .").getOrElse("")}
| }
|}
|WHERE {
| FILTER NOT EXISTS {
| GRAPH <${sf.skosGraph}> {
| ?existing a skos:Concept
| FILTER( ?existing = <$mintedUri> )
| }
| }
|};
""".stripMargin
val literalToUriQ =
s"""
|DELETE {
| GRAPH ?g {
| ?s <${sf.fieldPropertyUri}> ${literalExpression(value, languageOpt)} .
| }
|}
|INSERT {
| GRAPH ?g {
| ?s <${sf.fieldPropertyUri}> <$mintedUri> .
| }
|}
|WHERE {
| GRAPH ?g {
| ?s <${sf.fieldPropertyUri}> ${literalExpression(value, languageOpt)} .
| ?record a <$recordEntity> .
| ?foafDoc <$foafPrimaryTopic> ?record .
| ?foafDoc <$belongsTo> <${sf.datasetUri}> .
| }
|};
""".stripMargin
}
}
| delving/narthex | app/triplestore/Sparql.scala | Scala | apache-2.0 | 15,802 |
abstract class FooA {
type A <: Ax;
abstract class Ax;
abstract class InnerA {
type B <: A;
def doB : B;
}
}
trait FooB extends FooA {
type A <: Ax;
trait Ax extends super.Ax { def xxx : Int; } // error: cyclic inheritance: trait Ax extends itself) // error: class definitions cannot be overridden
abstract class InnerB extends InnerA {
// type B <: A;
val a : A = doB;
a.xxx;
doB.xxx;
}
}
| som-snytt/dotty | tests/neg-custom-args/overrideClass.scala | Scala | apache-2.0 | 467 |
package org.bitcoins.core.script.splice
import org.bitcoins.core.script.ScriptProgram
import org.bitcoins.core.script.constant._
import org.bitcoins.core.script.result.ScriptErrorInvalidStackOperation
import org.bitcoins.core.util.BitcoinSLogger
/**
* Created by chris on 2/4/16.
*/
sealed abstract class SpliceInterpreter {
private def logger = BitcoinSLogger.logger
/** Pushes the string length of the top element of the stack (without popping it). */
def opSize(program: ScriptProgram): ScriptProgram = {
require(program.script.headOption.contains(OP_SIZE), "Script top must be OP_SIZE")
if (program.stack.nonEmpty) {
if (program.stack.head == OP_0) {
ScriptProgram(program, OP_0 :: program.stack, program.script.tail)
} else {
val scriptNumber = program.stack.head match {
case ScriptNumber.zero => ScriptNumber.zero
case x: ScriptToken => ScriptNumber(x.bytes.size)
}
ScriptProgram(program, scriptNumber :: program.stack, program.script.tail)
}
} else {
logger.error("Must have at least 1 element on the stack for OP_SIZE")
ScriptProgram(program, ScriptErrorInvalidStackOperation)
}
}
}
object SpliceInterpreter extends SpliceInterpreter | Christewart/bitcoin-s-core | src/main/scala/org/bitcoins/core/script/splice/SpliceInterpreter.scala | Scala | mit | 1,254 |
package org.scalaideaextension.environment
import java.io.File
/**
* @author kostas.kougios
* Date: 01/08/14
*/
case class ScriptEnvironment(scriptRootDirectory: File) | kostaskougios/scala-idea-extensions | src/org/scalaideaextension/environment/ScriptEnvironment.scala | Scala | apache-2.0 | 183 |
package co.blocke.scalajack
package model
import scala.collection.Map
import scala.collection.mutable
import co.blocke.scala_reflection.info.{TupleInfo, FieldInfo}
case class ExtraFieldValue[T](value: T, valueTypeAdapter: TypeAdapter[T])
trait Writer[WIRE] {
def writeArray[Elem](t: Iterable[Elem], elemTypeAdapter: TypeAdapter[Elem], out: mutable.Builder[WIRE, WIRE]): Unit
def writeBigInt(t: BigInt, out: mutable.Builder[WIRE, WIRE]): Unit
def writeBoolean(t: Boolean, out: mutable.Builder[WIRE, WIRE]): Unit
def writeDecimal(t: BigDecimal, out: mutable.Builder[WIRE, WIRE]): Unit
def writeDouble(t: Double, out: mutable.Builder[WIRE, WIRE]): Unit
def writeInt(t: Int, out: mutable.Builder[WIRE, WIRE]): Unit
def writeLong(t: Long, out: mutable.Builder[WIRE, WIRE]): Unit
def writeMap[Key, Value, To](t: Map[Key, Value], keyTypeAdapter: TypeAdapter[Key], valueTypeAdapter: TypeAdapter[Value], out: mutable.Builder[WIRE, WIRE]): Unit
def writeNull(out: mutable.Builder[WIRE, WIRE]): Unit
def writeObject[T](
t: T,
orderedFieldNames: List[String],
fieldMembersByName: Map[String, ClassFieldMember[_,_]],
out: mutable.Builder[WIRE, WIRE],
extras: List[(String, ExtraFieldValue[_])] = List.empty[(String, ExtraFieldValue[_])]
): Unit
def writeString(t: String, out: mutable.Builder[WIRE, WIRE]): Unit
def writeRaw(t: WIRE, out: mutable.Builder[WIRE, WIRE]): Unit // i.e. no quotes for JSON
def writeTuple[T](
t: T,
writeFn: (Product) => List[(TypeAdapter[_], Any)],
out: mutable.Builder[WIRE, WIRE]
): Unit
} | gzoller/ScalaJack | core/src/main/scala/co.blocke.scalajack/model/Writer.scala | Scala | mit | 1,597 |
/*
* Copyright 2014-2016 Panavista Technologies, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deftrade
import scala.language.experimental.macros
private[deftrade] trait NonDefaultNamedValues {
import scala.reflect.macros.whitebox
// val start = '('
// val sep = ", "
// val end = ')'
val start = '['
val sep = "; "
val end = ']'
/**
* Use to override toString method for case classes to print field names as well as field values,
* for only those field values which are not equal to the default value (if any) specified in the
* apply() method of the companion.
*
* e.g.
* {{{
* case class Foo(i: Int, s: String = "bar") {
* override def toString = NonDefaultNamedValues.nonDefaultNamedValues
* }
* }}}
* will generate this:
* {{{
* override def toString = {
* val b = List.newBuilder[String]
* b += "i=" + this.i
* if (this.s != Foo.apply$default$2) b += "s=" + this.s
* b.result mkString (this.productPrefix + "[", ";", "]")"
* }
* }}}
*
* Implementation: identify the companion object for the case class.
* For each parameter of the apply method of the companion,
* identify the field with the same name.
* If the field has a value which is not equal to the default value for the corresponding
* parameter in the apply method, or there is no default value, emit a "name=value" string
* for the toString method.
*/
def impl2(c: reflect.macros.blackbox.Context): c.Expr[String] = {
import c.universe._
val companionSymbol = c.internal.enclosingOwner.owner.companion
val companionTermName = companionSymbol.name.toTermName
val companionType = companionSymbol.typeSignature
val applySymbol = companionType.decl(TermName("apply")).asMethod
val builderTree = q"val b = List.newBuilder[String]"
val nvTrees = applySymbol.paramLists.head.zipWithIndex map {
case (param, index) =>
val fieldNameString = param.name.decodedName.toString
val fieldTermName = param.asTerm.name.toTermName
val nvAppendTree = q"""b += $fieldNameString + "=" + this.$fieldTermName"""
val defaultValTermName = TermName(s"apply$$default$$${index + 1}")
if (companionType.member(defaultValTermName) != NoSymbol) {
q"if (this.$fieldTermName != $companionTermName.$defaultValTermName) $nvAppendTree"
} else {
nvAppendTree
}
}
val mkStringTree = q"""b.result mkString (this.productPrefix + "[", "; ", "]")"""
c.Expr[String] {
q"""
$builderTree
..$nvTrees
$mkStringTree
"""
}
}
}
object NonDefaultNamedValues extends NonDefaultNamedValues {
def nonDefaultNamedValues: String = macro impl2
} | ndwade/def-trade | macros/src/main/scala/io/deftrade/NonDefaultNamedValues.scala | Scala | apache-2.0 | 3,294 |
package com.lge.metr
import scala.language.implicitConversions
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import com.lge.metr.JavaModel._
@RunWith(classOf[JUnitRunner])
class ParserTest extends FunSuite {
test("parse input") {
val p = new ParboiledJavaProcessor
val cu = p.process("""
interface foo {
int bar();
}
class Test {
abstract int bar();
void f() {
;
final int a = 3;
int b = 3;
int c;
{
c = a + b;
}
try {
c = a / 0;
} catch (IOException e) {
} catch (Exception e) {
} finally {
try {}
catch (Exception e) {}
}
synchronized(this) {
if (c > 0) {
return;
} else {
c++;
}
}
while (true);
do c++; while (c<10);
la:
for (int i=0; i<10; i++) {
continue la;
}
new Thread(new Runnable() {
@Override
public void run() {
switch(a) {
case 0:
case 1:
break;
case 2:
{
System.out.println("");
}
break;
default:
break;
} //switch
} //run
}).start(); //Runnable
} //f
} //Test
""")
expectResult(2)(cu.exes.size)
// expectResult(CompUnit(List(
// Method("f", BlockStmt(List(
// OtherStmt(),
// OtherStmt(),
// OtherStmt(),
// OtherStmt(),
// IfStmt(BlockStmt(List(OtherStmt())), None),
// OtherStmt()))),
// Method("run", BlockStmt(List())))))(cu)
}
} | agiledevteam/metr | src/test/scala/com/lge/metr/PaserTest.scala | Scala | mit | 1,568 |
package sds;
object Main extends App {
val sds: SDS = new SDS(args)
sds.run()
} | g1144146/sds_for_scala | src/main/scala/sds/Main.scala | Scala | apache-2.0 | 88 |
package uk.co.morleydev.ghosthunt.model.net
case class AcceptJoinGameRequest(id : Int, clientTime : Long)
| MorleyDev/GhostHunt | src/main/scala/uk/co/morleydev/ghosthunt/model/net/AcceptJoinGameRequest.scala | Scala | mit | 107 |
/*
* Copyright 2010-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package squerylrecord
import org.squeryl.Session
import org.squeryl.dsl.ast.FunctionNode
import org.squeryl.internals.OutMapper
import org.squeryl.dsl.StringExpression
import org.squeryl.dsl.DateExpression
import org.specs2.mutable.Specification
import org.specs2.specification.AroundExample
import org.specs2.execute.{ AsResult , Result }
import record.{ BaseField, Record }
import record.field._
import RecordTypeMode._
import MySchema.{ TestData => td, _ }
import java.util.Calendar
import java.sql.Timestamp
import java.text.SimpleDateFormat
import org.squeryl.adapters.PostgreSqlAdapter
import common.Empty
import http.{LiftSession, S}
import util.Helpers
/**
* Systems under specification for SquerylRecord.
*/
class SquerylRecordSpec extends Specification with AroundExample {
"SquerylRecord Specification".title
sequential
lazy val session = new LiftSession("", Helpers.randomString(20), Empty)
// One of these is for specs2 2.x, the other for specs2 1.x
protected def around[T <% Result](t: =>T) = {
S.initIfUninitted(session) {
DBHelper.initSquerylRecordWithInMemoryDB()
DBHelper.createSchema()
t
}
}
protected def around[T : AsResult](t: =>T) = {
S.initIfUninitted(session) {
DBHelper.initSquerylRecordWithInMemoryDB()
DBHelper.createSchema()
AsResult(t)
}
}
"SquerylRecord" should {
"load record by ID" in {
transaction {
S.initIfUninitted(session) {
val company = companies.lookup(td.c2.id)
checkCompaniesEqual(company.get, td.c2)
val employee = employees.lookup(td.e1.id)
checkEmployeesEqual(employee.get, td.e1)
}
}
}
"load record by string field value" in {
transaction {
S.initIfUninitted(session){
val company = from(companies)(c =>
where(c.name === td.c1.name.get) select (c))
checkCompaniesEqual(company.single, td.c1)
}
}
}
"support order by" in {
transaction {
val orderedCompanies = from(companies)(c =>
select(c) orderBy (c.name))
val ids = orderedCompanies.map(_.id)
// NOTE: This circumvents implicit conversion for the contents on List
// ids must containInOrder(
// td.allCompanies.sortBy(_.name.get).map(_.id))
ids.mkString("(", ",", ")") must_== td.allCompanies.sortBy(_.name.get).map(_.id).mkString("(", ",", ")")
}
}
"support normal joins" in {
transaction {
val companiesWithEmployees = from(companies, employees)((c, e) =>
where(c.id === e.companyId.get)
select ((c.id, e.id))).toList
companiesWithEmployees must haveSize(td.allEmployees.size)
companiesWithEmployees must containAllOf(td.allEmployees map { e => (e.companyId.get, e.id) })
}
}
"support left outer joins" in {
transaction {
S.initIfUninitted(session){
val companiesWithEmployees = join(companies, employees.leftOuter)((c, e) =>
select(c, e)
on (c.id === e.map(_.companyId))
)
companiesWithEmployees must haveSize(4)
// One company doesn't have an employee, two have
companiesWithEmployees.filter(ce => ce._2.isEmpty) must haveSize(1)
val companiesAndEmployeesWithSameName = join(companies, employees.leftOuter)((c, e) =>
groupBy(c.id)
compute (countDistinct(e.map(_.id)))
on (c.name === e.map(_.name))
)
// There are three companies
companiesAndEmployeesWithSameName must haveSize(3)
// One company has the same name as an employee, two don't
companiesAndEmployeesWithSameName.filter(ce => ce.measures == 0) must haveSize(2)
val employeesWithSameAdminSetting = join(employees, employees.leftOuter)((e1, e2) =>
select(e1, e2)
on (e1.admin === e2.map(_.admin))
)
employeesWithSameAdminSetting.foreach { ee =>
ee._2 must not(beEmpty)
}
val companiesWithSameCreationDate = join(companies, companies.leftOuter)((c1, c2) =>
select(c1, c2)
on (c1.created === c2.map(_.created))
)
companiesWithSameCreationDate must not(beEmpty)
val employeesWithSameDepartmentNumber = join(employees, employees.leftOuter)((e1, e2) =>
select(e1, e2)
on (e1.departmentNumber === e2.map(_.departmentNumber))
)
employeesWithSameDepartmentNumber must not(beEmpty)
val employeesWithSameRoles = join(employees, employees.leftOuter)((e1, e2) =>
select(e1, e2)
on (e1.role === e2.map(_.role))
)
employeesWithSameRoles must not(beEmpty)
}
}
}
"support one to many relations" in {
transaction {
val company = companies.lookup(td.c1.id)
company must beSome[Company]
val employees = company.get.employees
employees must haveSize(1)
checkEmployeesEqual(td.e1, employees.head)
employees.associate(td.e3)
td.e3.companyId.get must_== company.get.id
}
}
"support many to many relations" in {
transactionWithRollback {
td.e1.rooms must haveSize(2)
td.e2.rooms must beEmpty
td.r1.employees must haveSize(1)
td.r3.employees must beEmpty
td.r3.employees.associate(td.e2)
td.e2.rooms must haveSize(1)
}
}
"support updates" in {
val id = td.c1.id
transactionWithRollback {
S.initIfUninitted(session) {
val company = companies.lookup(id).get
company.name("New Name")
company.postCode("11111")
companies.update(company)
val loaded = companies.lookup(id).get
checkCompaniesEqual(company, loaded)
update(companies)(c => where(c.id === id)
set (c.name := "Name2"))
val afterPartialUpdate = companies.lookup(id).get
afterPartialUpdate.name.get must_== "Name2"
}
}
// After rollback, the company should still be the same:
transaction {
S.initIfUninitted(session) {
val company = companies.lookup(id).get
checkCompaniesEqual(td.c1, company)
}
}
}
"support delete" in {
transactionWithRollback {
employees.delete(td.e2.id)
employees.lookup(td.e2.id) must beNone
}
}
"support select with properties of formerly fetched objects" in {
transaction {
S.initIfUninitted(session) {
val company = companies.lookup(td.c2.id).head
val employee = from(employees)(e =>
where(e.companyId === company.idField) select (e)).head
employee.id must_== td.e2.id
val loadedCompanies = from(companies)(c =>
where(c.created === company.created) select (c))
loadedCompanies.size must beGreaterThanOrEqualTo(1)
}
}
}
"support many to many relations" >> {
transactionWithRollback {
td.e1.rooms must haveSize(2)
}
}
"support date/time queries" >> {
transaction {
val c1 = from(companies)(c =>
where(c.created <= Calendar.getInstance)
select (c))
c1.size must beGreaterThan(1)
val c2 = from(companies)(c =>
where(c.created <= Calendar.getInstance.getTime)
select (c))
c2.size must beGreaterThan(1)
}
}
"support inner queries" >> {
import record.field._
transaction {
// Should work with the ID function (returns a long):
val companyId: Long = from(companies)(c => where(c.id in
from(companies)(c2 => where(c2.id === td.c1.id) select (c2.id)))
select (c.id)).single
companyId must_== td.c1.id
// It should also be possible to select the ID field directly:
val companyIdField: LongField[Company] = from(companies)(c => where(c.idField in
from(companies)(c2 => where(c2.id === td.c1.id) select (c2.idField)))
select (c.idField)).single
companyIdField.get must_== td.c1.id
// Strings should also be selectable in inner queries
val companyIdByName: Long = from(companies)(c => where(c.name in
from(companies)(c2 => where(c2.name === td.c1.name) select (c2.name)))
select (c.id)).single
companyIdByName must_== td.c1.id
// ...And DateTime-Fields:
val companyIdByCreated: DateTimeField[Company] = from(companies)(c => where(c.created in
from(companies)(c2 => where(c2.id === td.c1.id) select (c2.created)))
select (c.created)).single
companyIdByCreated.get must_== td.c1.created.get
// Decimal Fields:
val empSalary: DecimalField[Employee] = from(employees)(e => where(e.salary in
from(employees)(e2 => where(e2.id === td.e1.id) select (e2.salary)))
select (e.salary)).single
empSalary.get must_== td.e1.salary.get
// Email fields:
val empEmail: EmailField[Employee] = from(employees)(e => where(e.email in
from(employees)(e2 => where(e2.id === td.e1.id) select (e2.email)))
select (e.email)).single
empSalary.get must_== td.e1.salary.get
// Boolean fields:
val empAdmin: BooleanField[Employee] = from(employees)(e => where(e.admin in
from(employees)(e2 => where(e2.id === td.e2.id) select (e2.admin)))
select (e.admin)).single
empAdmin.get must_== td.e2.admin.get
// Enum fields:
val empRoleQuery = from(employees)(e => where(e.role in
from(employees)(e2 => where(e2.id === td.e2.id) select (e2.role)))
select (e.role.get))
val empRole = empRoleQuery.single
empRole must_== td.e2.role.get
}
}
"support the CRUDify trait" >> {
transaction {
val company = Company.create.name("CRUDify Company").
created(Calendar.getInstance()).
country(Countries.USA).postCode("90210")
val bridge = Company.buildBridge(company)
bridge.save
val id = company.id
company.isPersisted must_== true
id must be_>(0l)
company.postCode("10001")
bridge.save
val company2 = Company.findForParam(id.toString)
company2.isDefined must_== true
company2.foreach(c2 => {
c2.postCode.get must_== "10001"
})
val allCompanies = Company.findForList(0, 1000)
allCompanies.size must be_>(0)
bridge.delete_!
val allCompanies2 = Company.findForList(0, 1000)
allCompanies2.size must_== (allCompanies.size - 1)
}
}
"Support Optimistic Locking" >> {
val company = Company.create.name("Optimistic Company").
created(Calendar.getInstance()).
country(Countries.USA).
postCode("90210")
//First insert the company in one transaction
transaction {
companies.insert(company)
}
//Retrieve and modify in another transaction
val innerUpdate = new Thread(new Runnable {
override def run() {
transaction {
val company2 = companies.lookup(company.id).get
company2.created(Calendar.getInstance())
companies.update(company2)
}
}
})
innerUpdate.start
innerUpdate.join
//Then in a third transaction, try to update the original object
transaction {
import org.squeryl.StaleUpdateException
company.created(Calendar.getInstance())
companies.update(company) must throwAn[StaleUpdateException]
}
}
"Allow custom functions" in {
inTransaction {
val created =
from(companies)(c =>
where(c.name === "First Company USA")
select (&(toChar(c.created, "EEE, d MMM yyyy")))
)
created.head must_== new SimpleDateFormat("EEE, d MMM yyyy").format(Calendar.getInstance().getTime())
}
}
"Support precision and scale taken from DecimalTypedField" >> {
val posoMetaData = companies.posoMetaData
val fieldMetaData = posoMetaData.findFieldMetaDataForProperty("employeeSatisfaction").get
val columnDefinition = new PostgreSqlAdapter().writeColumnDeclaration(fieldMetaData, false, MySchema)
columnDefinition.endsWith("numeric(" + Company.employeeSatisfaction.context.getPrecision() +"," + Company.employeeSatisfaction.scale + ")") must_== true
}
"Properly reset the dirty_? flag after loading entities" >> inTransaction {
val company = from(companies)(company =>
select(company)).page(0, 1).single
company.allFields map { f => f.dirty_? must_== false }
success
}
}
class ToChar(d: DateExpression[Timestamp], e: StringExpression[String], m: OutMapper[String])
extends FunctionNode[String]("FORMATDATETIME", Some(m), Seq(d, e)) with StringExpression[String]
def toChar(d: DateExpression[Timestamp], e: StringExpression[String])(implicit m: OutMapper[String]) = new ToChar(d, e, m)
class TransactionRollbackException extends RuntimeException
/**
* Runs the given code in a transaction and rolls
* back the transaction afterwards.
*/
private def transactionWithRollback[T](code: => T): T = {
def rollback: Unit = throw new TransactionRollbackException()
var result: T = null.asInstanceOf[T]
try {
transaction {
result = code
rollback
}
} catch {
case e: TransactionRollbackException => // OK, was rolled back
}
result
}
private def checkCompaniesEqual(c1: Company, c2: Company): Result = {
val cmp = new RecordComparer[Company](c1, c2)
cmp.check(_.idField)
cmp.check(_.description)
cmp.check(_.country)
cmp.check(_.postCode)
cmp.check(_.created)
cmp.checkXHtml()
}
private def checkEmployeesEqual(e1: Employee, e2: Employee): Result = {
val cmp = new RecordComparer[Employee](e1, e2)
cmp.check(_.name)
cmp.check(_.companyId)
cmp.check(_.email)
cmp.check(_.salary)
cmp.check(_.locale)
cmp.check(_.timeZone)
//cmp.check(_.password)
cmp.check(_.admin)
cmp.check(_.departmentNumber)
cmp.check(_.role)
// Photo must be checked separately
e1.photo.get match {
case Some(p) => {
val p2 = e2.photo.get
p2 must beSome[Array[Byte]]
p2.get.size must_== p.size
(0 until p.size) map { i =>
p2.get(i) must_== p(i)
}
}
case None => e2.photo.get must beNone
}
}
class RecordComparer[T <: Record[T]](val r1: T, val r2: T) {
def check(fieldExtractor: (T) => BaseField): Result = {
val f1 = fieldExtractor(r1)
val f2 = fieldExtractor(r2)
f1.get must_== f2.get
f1.name must_== f2.name
}
def checkXHtml(): Result =
r1.toXHtml must_== r2.toXHtml
}
}
| lzpfmh/framework-2 | persistence/squeryl-record/src/test/scala/net/liftweb/squerylrecord/SquerylRecordSpec.scala | Scala | apache-2.0 | 15,684 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.physical.stream
import org.apache.flink.table.connector.source.ScanTableSource
import org.apache.flink.table.planner.plan.`trait`.FlinkRelDistribution
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalTableSourceScan
import org.apache.flink.table.planner.plan.nodes.physical.stream.{StreamPhysicalChangelogNormalize, StreamPhysicalTableSourceScan}
import org.apache.flink.table.planner.plan.schema.TableSourceTable
import org.apache.flink.table.planner.plan.utils.ScanUtil
import org.apache.flink.table.planner.sources.DynamicSourceUtils.{isSourceChangeEventsDuplicate, isUpsertSource}
import org.apache.flink.table.planner.utils.ShortcutUtils
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.calcite.rel.core.TableScan
/**
* Rule that converts [[FlinkLogicalTableSourceScan]] to [[StreamPhysicalTableSourceScan]].
*
* <p>Depends whether this is a scan source, this rule will also generate
* [[StreamPhysicalChangelogNormalize]] to materialize the upsert stream.
*/
class StreamPhysicalTableSourceScanRule
extends ConverterRule(
classOf[FlinkLogicalTableSourceScan],
FlinkConventions.LOGICAL,
FlinkConventions.STREAM_PHYSICAL,
"StreamPhysicalTableSourceScanRule") {
/** Rule must only match if TableScan targets a [[ScanTableSource]] */
override def matches(call: RelOptRuleCall): Boolean = {
val scan: TableScan = call.rel(0).asInstanceOf[TableScan]
val tableSourceTable = scan.getTable.unwrap(classOf[TableSourceTable])
tableSourceTable match {
case tst: TableSourceTable =>
tst.tableSource match {
case _: ScanTableSource => true
case _ => false
}
case _ => false
}
}
def convert(rel: RelNode): RelNode = {
val scan = rel.asInstanceOf[FlinkLogicalTableSourceScan]
val traitSet: RelTraitSet = rel.getTraitSet.replace(FlinkConventions.STREAM_PHYSICAL)
val config = ShortcutUtils.unwrapContext(rel.getCluster).getTableConfig
val table = scan.getTable.asInstanceOf[TableSourceTable]
val newScan = new StreamPhysicalTableSourceScan(
rel.getCluster,
traitSet,
table)
if (isUpsertSource(table.catalogTable, table.tableSource) ||
isSourceChangeEventsDuplicate(table.catalogTable, table.tableSource, config)) {
// generate changelog normalize node
// primary key has been validated in CatalogSourceTable
val primaryKey = table.catalogTable.getSchema.getPrimaryKey.get()
val keyFields = primaryKey.getColumns
val inputFieldNames = newScan.getRowType.getFieldNames
val primaryKeyIndices = ScanUtil.getPrimaryKeyIndices(inputFieldNames, keyFields)
val requiredDistribution = FlinkRelDistribution.hash(primaryKeyIndices, requireStrict = true)
val requiredTraitSet = rel.getCluster.getPlanner.emptyTraitSet()
.replace(requiredDistribution)
.replace(FlinkConventions.STREAM_PHYSICAL)
val newInput: RelNode = RelOptRule.convert(newScan, requiredTraitSet)
new StreamPhysicalChangelogNormalize(
scan.getCluster,
traitSet,
newInput,
primaryKeyIndices)
} else {
newScan
}
}
}
object StreamPhysicalTableSourceScanRule {
val INSTANCE = new StreamPhysicalTableSourceScanRule
}
| aljoscha/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/rules/physical/stream/StreamPhysicalTableSourceScanRule.scala | Scala | apache-2.0 | 4,313 |
import scala.collection.mutable._
trait SB[A] extends Buffer[A] {
import collection.Traversable
abstract override def insertAll(n: Int, iter: Traversable[A]): Unit = synchronized {
super.insertAll(n, iter)
}
abstract override def update(n: Int, newelem: A): Unit = synchronized {
super.update(n, newelem)
}
}
object Test extends dotty.runtime.LegacyApp {
new ArrayBuffer[Int] with SB[Int]
}
| densh/dotty | tests/pos/t2503.scala | Scala | bsd-3-clause | 418 |
package com.sksamuel.elastic4s.searches.queries
import com.sksamuel.elastic4s.analyzers.Analyzer
import com.sksamuel.exts.OptionImplicits._
import org.elasticsearch.index.query.SimpleQueryStringFlag
case class SimpleStringQueryDefinition(query: String,
analyzer: Option[String] = None,
analyzeWildcard: Option[Boolean] = None,
operator: Option[String] = None,
queryName: Option[String] = None,
quote_field_suffix: Option[String] = None,
lenient: Option[Boolean] = None,
fields: Seq[(String, Double)] = Nil,
flags: Seq[SimpleQueryStringFlag] = Nil,
minimumShouldMatch: Option[Int] = None
) extends QueryDefinition {
def quoteFieldSuffix(suffix: String): SimpleStringQueryDefinition = copy(quote_field_suffix = suffix.some)
def flags(flags: SimpleQueryStringFlag*): SimpleStringQueryDefinition = copy(flags = flags)
def analyzer(analyzer: String): SimpleStringQueryDefinition = copy(analyzer = analyzer.some)
def analyzer(analyzer: Analyzer): SimpleStringQueryDefinition = copy(analyzer = analyzer.name.some)
def queryName(queryName: String): SimpleStringQueryDefinition = copy(queryName = queryName.some)
def defaultOperator(op: String): SimpleStringQueryDefinition = copy(operator = op.some)
def lenient(lenient: Boolean): SimpleStringQueryDefinition = copy(lenient = lenient.some)
def minimumShouldMatch(minimumShouldMatch: Int): SimpleStringQueryDefinition =
copy(minimumShouldMatch = minimumShouldMatch.some)
def analyzeWildcard(analyzeWildcard: Boolean): SimpleStringQueryDefinition =
copy(analyzeWildcard = analyzeWildcard.some)
def asfields(fields: String*): SimpleStringQueryDefinition = copy(fields = this.fields ++ fields.map(f => (f, -1D)))
def field(name: String): SimpleStringQueryDefinition = copy(fields = fields :+ (name, -1D))
def field(name: String, boost: Double): SimpleStringQueryDefinition = copy(fields = fields :+ (name, boost))
}
| tyth/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/queries/SimpleStringQueryDefinition.scala | Scala | apache-2.0 | 2,282 |
package jp.mwsoft.wikipedia.totext
import java.io.BufferedInputStream
import java.io.Closeable
import java.io.FileInputStream
import scala.collection.mutable.ArrayBuffer
import scala.util.matching.Regex
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream
import org.joda.time.format.DateTimeFormat
import javax.xml.stream.XMLEventReader
import javax.xml.stream.XMLInputFactory
import javax.xml.stream.XMLStreamConstants
import javax.xml.stream.events.XMLEvent
case class PageArticle(
var id: Long = -1,
var title: String = null,
var lastUpdate: Long = -1,
var text: String = null,
var categories: List[String] = null) {
val reCategory = new Regex("""\\[\\[(?i)Category\\:([^\\|]+)\\|?.*?]\\]""", "category")
def setText(text: String) {
this.text = text
val categories = ArrayBuffer[String]()
for (line <- text.split("\\n"); m <- reCategory.findFirstMatchIn(line)) {
categories += m.group("category")
}
this.categories = categories.toList
}
}
/**
* parse jawiki-latest-pages-articles.xml.bz2
*/
class PageArticleParser(path: String) extends Iterator[PageArticle] with Closeable {
val factory = XMLInputFactory.newInstance()
var reader: XMLEventReader = null
var nextElem: PageArticle = null
def hasNext: Boolean = this.synchronized { reader == null || nextElem != null }
def next: PageArticle = this.synchronized {
if (reader == null) {
reader = factory.createXMLEventReader(
new BZip2CompressorInputStream(new BufferedInputStream(new FileInputStream(path))));
readElem()
}
val elem = nextElem
readElem()
elem
}
val dtFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'")
var parentElem = ""
private def readElem(): Unit = if (reader.hasNext) {
val event = reader.nextEvent()
if (isStartElem(event, "page")) {
parentElem = "page"
nextElem = PageArticle()
} else if (isStartElem(event, "id") && parentElem == "page") {
nextElem.id = getText("id").toLong
} else if (isStartElem(event, "title")) {
nextElem.title = getText("title")
} else if (isStartElem(event, "timestamp")) {
nextElem.lastUpdate = dtFormat.parseMillis(getText("timestamp"))
} else if (isStartElem(event, "text")) {
nextElem.setText(getText("text"))
} else if (isStartElem(event, "revision")) {
parentElem = "revision"
} else if (isEndElem(event, "revision")) {
parentElem = "page"
} else if (isStartElem(event, "contributor")) {
parentElem = "contributor"
} else if (isEndElem(event, "contributor")) {
parentElem = "revision"
}
if (!isEndElem(event, "page")) readElem
} else {
nextElem = null
}
private def getText(name: String): String = {
val builder = new StringBuilder();
def getTextLoop: Unit = if (reader.hasNext) {
val event = reader.nextEvent()
if (event.getEventType() == XMLStreamConstants.CHARACTERS)
builder.append(event.asCharacters.getData.trim)
if (!isEndElem(event, name))
getTextLoop
}
getTextLoop
builder.toString
}
private def isStartElem(event: XMLEvent, name: String): Boolean = {
event.getEventType() == XMLStreamConstants.START_ELEMENT &&
name.equals(event.asStartElement.getName.getLocalPart)
}
private def isEndElem(event: XMLEvent, name: String): Boolean = {
event.getEventType() == XMLStreamConstants.END_ELEMENT &&
name.equals(event.asEndElement.getName.getLocalPart)
}
def close() {
if (reader != null) reader.close()
}
} | mwsoft/wikipedia2text | src/main/scala/jp/mwsoft/wikipedia/totext/PageArticleParser.scala | Scala | mit | 3,576 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.util.Locale
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.language.implicitConversions
import org.apache.commons.lang.StringUtils
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.command.{TableModel, TableNewProcessor}
import org.apache.spark.sql.execution.strategy.CarbonLateDecodeStrategy
import org.apache.spark.sql.execution.streaming.Sink
import org.apache.spark.sql.hive.CarbonMetaStore
import org.apache.spark.sql.optimizer.CarbonLateDecodeRule
import org.apache.spark.sql.parser.CarbonSpark2SqlParser
import org.apache.spark.sql.sources._
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CarbonException
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
import org.apache.carbondata.core.metadata.schema.SchemaEvolutionEntry
import org.apache.carbondata.core.metadata.schema.table.TableInfo
import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
import org.apache.carbondata.hadoop.util.CarbonInputFormatUtil
import org.apache.carbondata.spark.CarbonOption
import org.apache.carbondata.spark.util.CarbonScalaUtil
import org.apache.carbondata.streaming.{CarbonStreamException, CarbonStreamingQueryListener, StreamSinkFactory}
/**
* Carbon relation provider compliant to data source api.
* Creates carbon relations
*/
class CarbonSource extends CreatableRelationProvider with RelationProvider
with SchemaRelationProvider with StreamSinkProvider with DataSourceRegister {
override def shortName(): String = "carbondata"
private val LOGGER = LogServiceFactory.getLogService(CarbonSource.getClass.getName)
// will be called if hive supported create table command is provided
override def createRelation(sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
CarbonEnv.getInstance(sqlContext.sparkSession)
// if path is provided we can directly create Hadoop relation. \\
// Otherwise create datasource relation
val newParameters = CarbonScalaUtil.getDeserializedParameters(parameters)
newParameters.get("tablePath") match {
case Some(path) => CarbonDatasourceHadoopRelation(sqlContext.sparkSession,
Array(path),
newParameters,
None)
case _ =>
val options = new CarbonOption(newParameters)
val tablePath =
CarbonEnv.getTablePath(options.dbName, options.tableName)(sqlContext.sparkSession)
CarbonDatasourceHadoopRelation(sqlContext.sparkSession,
Array(tablePath),
newParameters,
None)
}
}
// called by any write operation like INSERT INTO DDL or DataFrame.write API
override def createRelation(
sqlContext: SQLContext,
mode: SaveMode,
parameters: Map[String, String],
data: DataFrame): BaseRelation = {
CarbonEnv.getInstance(sqlContext.sparkSession)
var newParameters = CarbonScalaUtil.getDeserializedParameters(parameters)
val options = new CarbonOption(newParameters)
val isExists = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.tableExists(
options.tableName, options.dbName)(sqlContext.sparkSession)
val (doSave, doAppend) = (mode, isExists) match {
case (SaveMode.ErrorIfExists, true) =>
CarbonException.analysisException(s"table path already exists.")
case (SaveMode.Overwrite, true) =>
newParameters += (("overwrite", "true"))
(true, false)
case (SaveMode.Overwrite, false) | (SaveMode.ErrorIfExists, false) =>
newParameters += (("overwrite", "true"))
(true, false)
case (SaveMode.Append, _) =>
(false, true)
case (SaveMode.Ignore, exists) =>
(!exists, false)
}
if (doSave) {
// save data when the save mode is Overwrite.
new CarbonDataFrameWriter(sqlContext, data).saveAsCarbonFile(
CaseInsensitiveMap[String](newParameters))
} else if (doAppend) {
new CarbonDataFrameWriter(sqlContext, data).appendToCarbonFile(
CaseInsensitiveMap[String](newParameters))
}
createRelation(sqlContext, newParameters, data.schema)
}
// called by DDL operation with a USING clause
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
dataSchema: StructType): BaseRelation = {
CarbonEnv.getInstance(sqlContext.sparkSession)
addLateDecodeOptimization(sqlContext.sparkSession)
val newParameters =
CaseInsensitiveMap[String](CarbonScalaUtil.getDeserializedParameters(parameters))
val dbName: String =
CarbonEnv.getDatabaseName(newParameters.get("dbName"))(sqlContext.sparkSession)
val tableOption: Option[String] = newParameters.get("tableName")
if (tableOption.isEmpty) {
CarbonException.analysisException("Table creation failed. Table name is not specified")
}
val tableName = tableOption.get.toLowerCase()
if (tableName.contains(" ")) {
CarbonException.analysisException(
"Table creation failed. Table name cannot contain blank space")
}
val (path, updatedParams) = if (sqlContext.sparkSession.sessionState.catalog.listTables(dbName)
.exists(_.table.equalsIgnoreCase(tableName))) {
getPathForTable(sqlContext.sparkSession, dbName, tableName, newParameters)
} else {
createTableIfNotExists(sqlContext.sparkSession, dbName, tableName, newParameters, dataSchema)
}
CarbonDatasourceHadoopRelation(sqlContext.sparkSession, Array(path), updatedParams,
Option(dataSchema))
}
private def addLateDecodeOptimization(ss: SparkSession): Unit = {
if (ss.sessionState.experimentalMethods.extraStrategies.isEmpty) {
ss.sessionState.experimentalMethods.extraStrategies = Seq(new CarbonLateDecodeStrategy)
ss.sessionState.experimentalMethods.extraOptimizations = Seq(new CarbonLateDecodeRule)
}
}
private def createTableIfNotExists(
sparkSession: SparkSession,
dbName: String,
tableName: String,
parameters: Map[String, String],
dataSchema: StructType): (String, Map[String, String]) = {
val dbName: String = CarbonEnv.getDatabaseName(parameters.get("dbName"))(sparkSession)
val tableName: String = parameters.getOrElse("tableName", "").toLowerCase
try {
if (!(parameters.contains("carbonSchemaPartsNo")
|| parameters.contains("carbonschemapartsno"))) {
val carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
(carbonTable.getTablePath, parameters)
} else {
(getPathForTable(sparkSession, dbName, tableName, parameters))
}
} catch {
case _: NoSuchTableException =>
LOGGER.warn("Carbon Table [" +dbName +"] [" +tableName +"] is not found, " +
"Now existing Schema will be overwritten with default properties")
val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
val identifier = AbsoluteTableIdentifier.from(
CarbonEnv.getTablePath(Some(dbName), tableName)(sparkSession),
dbName,
tableName)
val updatedParams = CarbonSource.updateAndCreateTable(
identifier, dataSchema, sparkSession, metaStore, parameters, None)
(CarbonEnv.getTablePath(Some(dbName), tableName)(sparkSession), updatedParams)
case ex: Exception =>
throw new Exception("do not have dbname and tablename for carbon table", ex)
}
}
/**
* Returns the path of the table
*
* @param sparkSession
* @param dbName
* @param tableName
* @return
*/
private def getPathForTable(sparkSession: SparkSession, dbName: String,
tableName : String, parameters: Map[String, String]): (String, Map[String, String]) = {
if (StringUtils.isBlank(tableName)) {
throw new MalformedCarbonCommandException("The Specified Table Name is Blank")
}
if (tableName.contains(" ")) {
throw new MalformedCarbonCommandException("Table Name Should not have spaces ")
}
try {
if (parameters.contains("tablePath")) {
(parameters("tablePath"), parameters)
} else if (!sparkSession.isInstanceOf[CarbonSession]) {
(CarbonProperties.getStorePath + "/" + dbName + "/" + tableName, parameters)
} else {
(CarbonEnv.getTablePath(Some(dbName), tableName)(sparkSession), parameters)
}
} catch {
case ex: Exception =>
throw new Exception(s"Do not have $dbName and $tableName", ex)
}
}
/**
* produce a streaming `Sink` for a specific format
* now it will create a default sink(CarbonAppendableStreamSink) for row format
*/
override def createSink(sqlContext: SQLContext,
parameters: Map[String, String],
partitionColumns: Seq[String],
outputMode: OutputMode): Sink = {
// check "tablePath" option
val options = new CarbonOption(parameters)
val dbName = CarbonEnv.getDatabaseName(options.dbName)(sqlContext.sparkSession)
val tableName = options.tableName
if (tableName.contains(" ")) {
throw new CarbonStreamException("Table creation failed. Table name cannot contain blank " +
"space")
}
val sparkSession = sqlContext.sparkSession
val carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
if (!carbonTable.isStreamingSink) {
throw new CarbonStreamException(s"Table ${carbonTable.getDatabaseName}." +
s"${carbonTable.getTableName} is not a streaming table")
}
// CarbonSession has added CarbonStreamingQueryListener during the initialization.
// But other SparkSessions didn't, so here will add the listener once.
if (!"CarbonSession".equals(sparkSession.getClass.getSimpleName)) {
if (CarbonSource.listenerAdded.get(sparkSession.hashCode()).isEmpty) {
synchronized {
if (CarbonSource.listenerAdded.get(sparkSession.hashCode()).isEmpty) {
sparkSession.streams.addListener(new CarbonStreamingQueryListener(sparkSession))
CarbonSource.listenerAdded.put(sparkSession.hashCode(), true)
}
}
}
}
// create sink
StreamSinkFactory.createStreamTableSink(
sqlContext.sparkSession,
sqlContext.sparkSession.sessionState.newHadoopConf(),
carbonTable,
parameters)
}
}
object CarbonSource {
lazy val listenerAdded = new mutable.HashMap[Int, Boolean]()
def createTableInfoFromParams(
parameters: Map[String, String],
dataSchema: StructType,
identifier: AbsoluteTableIdentifier,
query: Option[LogicalPlan],
sparkSession: SparkSession): TableModel = {
val sqlParser = new CarbonSpark2SqlParser
val map = scala.collection.mutable.Map[String, String]()
parameters.foreach { case (key, value) => map.put(key, value.toLowerCase()) }
val options = new CarbonOption(parameters)
val fields = query match {
case Some(q) =>
// if query is provided then it is a CTAS flow
if (sqlParser.getFields(dataSchema).nonEmpty) {
throw new AnalysisException(
"Schema cannot be specified in a Create Table As Select (CTAS) statement")
}
sqlParser
.getFields(CarbonEnv.getInstance(sparkSession).carbonMetaStore
.getSchemaFromUnresolvedRelation(sparkSession, q))
case None =>
sqlParser.getFields(dataSchema)
}
val bucketFields = sqlParser.getBucketFields(map, fields, options)
sqlParser.prepareTableModel(ifNotExistPresent = false, Option(identifier.getDatabaseName),
identifier.getTableName, fields, Nil, map, bucketFields)
}
/**
* Update spark catalog table with schema information in case of schema storage is hive metastore
* @param tableDesc
* @param sparkSession
* @return
*/
def updateCatalogTableWithCarbonSchema(
tableDesc: CatalogTable,
sparkSession: SparkSession,
query: Option[LogicalPlan] = None): CatalogTable = {
val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
val storageFormat = tableDesc.storage
val properties = storageFormat.properties
if (!properties.contains("carbonSchemaPartsNo")) {
val tablePath = CarbonEnv.getTablePath(
tableDesc.identifier.database, tableDesc.identifier.table)(sparkSession)
val dbName = CarbonEnv.getDatabaseName(tableDesc.identifier.database)(sparkSession)
val identifier = AbsoluteTableIdentifier.from(tablePath, dbName, tableDesc.identifier.table)
val map = updateAndCreateTable(
identifier,
tableDesc.schema,
sparkSession,
metaStore,
properties,
query)
// updating params
val updatedFormat = CarbonToSparkAdapter
.getUpdatedStorageFormat(storageFormat, map, tablePath)
tableDesc.copy(storage = updatedFormat)
} else {
val tableInfo = CarbonUtil.convertGsonToTableInfo(properties.asJava)
val isTransactionalTable = properties.getOrElse("isTransactional", "true").contains("true")
tableInfo.setTransactionalTable(isTransactionalTable)
if (isTransactionalTable && !metaStore.isReadFromHiveMetaStore) {
// save to disk
metaStore.saveToDisk(tableInfo, properties("tablePath"))
// remove schema string from map as we don't store carbon schema to hive metastore
val map = CarbonUtil.removeSchemaFromMap(properties.asJava)
val updatedFormat = storageFormat.copy(properties = map.asScala.toMap)
tableDesc.copy(storage = updatedFormat)
} else {
tableDesc
}
}
}
def updateAndCreateTable(
identifier: AbsoluteTableIdentifier,
dataSchema: StructType,
sparkSession: SparkSession,
metaStore: CarbonMetaStore,
properties: Map[String, String],
query: Option[LogicalPlan]): Map[String, String] = {
val model = createTableInfoFromParams(properties, dataSchema, identifier, query, sparkSession)
val tableInfo: TableInfo = TableNewProcessor(model)
val isTransactionalTable = properties.getOrElse("isTransactional", "true").contains("true")
tableInfo.setTablePath(identifier.getTablePath)
tableInfo.setTransactionalTable(isTransactionalTable)
tableInfo.setDatabaseName(identifier.getDatabaseName)
val schemaEvolutionEntry = new SchemaEvolutionEntry
schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
val map = if (!metaStore.isReadFromHiveMetaStore && isTransactionalTable) {
metaStore.saveToDisk(tableInfo, identifier.getTablePath)
new java.util.HashMap[String, String]()
} else {
CarbonUtil.convertToMultiStringMap(tableInfo)
}
properties.foreach(e => map.put(e._1, e._2))
map.put("tablepath", identifier.getTablePath)
map.put("dbname", identifier.getDatabaseName)
if (map.containsKey("tableName")) {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
LOGGER.warn("tableName is not required in options, ignoring it")
}
map.put("tableName", identifier.getTableName)
map.asScala.toMap
}
}
/**
* Code ported from Apache Spark
* Builds a map in which keys are case insensitive. Input map can be accessed for cases where
* case-sensitive information is required. The primary constructor is marked private to avoid
* nested case-insensitive map creation, otherwise the keys in the original map will become
* case-insensitive in this scenario.
*/
case class CaseInsensitiveMap[T] (originalMap: Map[String, T]) extends Map[String, T]
with Serializable {
val keyLowerCasedMap = originalMap.map(kv => kv.copy(_1 = kv._1.toLowerCase(Locale.ROOT)))
override def get(k: String): Option[T] = keyLowerCasedMap.get(k.toLowerCase(Locale.ROOT))
override def contains(k: String): Boolean =
keyLowerCasedMap.contains(k.toLowerCase(Locale.ROOT))
override def +[B1 >: T](kv: (String, B1)): Map[String, B1] = {
new CaseInsensitiveMap(originalMap + kv)
}
override def iterator: Iterator[(String, T)] = keyLowerCasedMap.iterator
override def -(key: String): Map[String, T] = {
new CaseInsensitiveMap(originalMap.filterKeys(!_.equalsIgnoreCase(key)))
}
}
| manishgupta88/carbondata | integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala | Scala | apache-2.0 | 17,488 |
package sel
import leon._
import mem._
import higherorder._
import lang._
import annotation._
import instrumentation._
import invariant._
import collection._
/**
* A lazy selection sorting algorithm that allows accessing the kth minimum
* in O(k.n) time, where `n` is the number of elements in the list.
* See file BottomUpMergeSort for a more optimal algorithm for accessing
* the kth element.
**/
object LazySelectionSort {
sealed abstract class LList {
def size: BigInt = {
this match {
case SCons(_, t) => 1 + t.size
case _ => BigInt(0)
}
} ensuring (_ >= 0)
}
private case class SCons(x: BigInt, tailFun: Stream) extends LList
private case class SNil() extends LList
private case class Stream(lfun: () => LList) {
lazy val list: LList = lfun()
@inline
def size = (list*).size
}
def pullMin(l: List[BigInt]): List[BigInt] = {
l match {
case Nil() => l
case Cons(x, xs) =>
pullMin(xs) match {
case Nil() => Cons(x, Nil())
case nxs @ Cons(y, ys) =>
if (x <= y) Cons(x, nxs)
else Cons(y, Cons(x, ys))
}
}
} ensuring (res => res.size == l.size && alloc <= ? * l.size + ?)
def sort(l: List[BigInt]): LList = {
pullMin(l) match {
case Cons(x, xs) =>
// here, x is the minimum
SCons(x, Stream(() => sort(xs))) // sorts lazily only if needed
case _ =>
SNil()
}
} ensuring (res => res.size == l.size && alloc <= ? * l.size + ?)
// a lazy concat method
def concat(l1: List[BigInt], l2: LList) : LList = {
l1 match {
case Cons(x, xs) => SCons(x, Stream(() => concat(xs, l2)))
case Nil() => SNil()
}
} ensuring(res => alloc <= ?)
// k-th min accompanying the lazy selection sort
def kthMin(l: Stream, k: BigInt): BigInt = {
require(k >= 1)
l.list match {
case SCons(x, xs) =>
if (k == 1) x
else
kthMin(xs, k - 1)
case SNil() => BigInt(0)
}
} ensuring (_ => alloc <= ? * (k * l.size) + ? * k + ?)
}
| regb/leon | testcases/benchmarks/alloc/LazySelectionSort.scala | Scala | gpl-3.0 | 2,084 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.content
import org.scalatest.{FunSpec, Matchers}
import play.api.data.validation.ValidationError
import play.api.libs.json._
import org.apache.toree.kernel.protocol.v5._
class ExecuteReplyOkSpec extends FunSpec with Matchers {
val executeReplyOkJson: JsValue = Json.parse("""
{
"status": "ok",
"execution_count": 999,
"payload": [],
"user_expressions": {}
}
""")
val executeReplyOk: ExecuteReplyOk = ExecuteReplyOk(
999, Some(Payloads()), Some(UserExpressions())
)
describe("ExecuteReplyOk") {
describe("implicit conversions") {
it("should implicitly convert from valid json to a executeReplyOk instance") {
// This is the least safe way to convert as an error is thrown if it fails
executeReplyOkJson.as[ExecuteReplyOk] should be (executeReplyOk)
}
it("should also work with asOpt") {
// This is safer, but we lose the error information as it returns
// None if the conversion fails
val newExecuteReplyOk = executeReplyOkJson.asOpt[ExecuteReplyOk]
newExecuteReplyOk.get should be (executeReplyOk)
}
it("should also work with validate") {
// This is the safest as it collects all error information (not just first error) and reports it
val executeReplyOkResults = executeReplyOkJson.validate[ExecuteReplyOk]
executeReplyOkResults.fold(
(invalid: Seq[(JsPath, Seq[ValidationError])]) => println("Failed!"),
(valid: ExecuteReplyOk) => valid
) should be (executeReplyOk)
}
it("should implicitly convert from a executeReplyOk instance to valid json") {
Json.toJson(executeReplyOk) should be (executeReplyOkJson)
}
}
}
}
| hmost1/incubator-toree | protocol/src/test/scala/org/apache/toree/kernel/protocol/v5/content/ExecuteReplyOkSpec.scala | Scala | apache-2.0 | 2,585 |
package scavlink.link
import akka.actor.ActorSystem
import akka.testkit.{TestKit, TestProbe}
import org.scalatest.{BeforeAndAfterAll, GivenWhenThen, Matchers, WordSpecLike}
import scavlink.EventMatcher
import scavlink.connection.frame.LostPackets
import scavlink.message.{VehicleId, Packet}
import scavlink.message.common._
import scala.language.reflectiveCalls
class LinkEventBusSpec(_system: ActorSystem) extends TestKit(_system)
with WordSpecLike with Matchers with BeforeAndAfterAll with GivenWhenThen {
case object TestEvent extends LinkEvent
def this() = this(ActorSystem("LinkEventBusSpec"))
val id = VehicleId("spec")
def fixture = new {
val probe = new TestProbe(system)
val bus = new LinkEventBus
}
"A LinkEventBus" when {
"subscribed with SubscribeTo.all()" should {
"pass all received events" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to all bus events")
bus.subscribeToAll(probe.ref)
When("SentPacket is published")
bus.publish(SentPacket(Packet(id, 1, 1, SystemTime())))
Then("the SentPacket event is received")
probe.expectMsg(SentPacket(Packet(id, 1, 1, SystemTime())))
When("Packet is published")
bus.publish(Packet(id, 147, 223, Heartbeat()))
Then("the Packet event is received")
probe.expectMsg(Packet(id, 147, 223, Heartbeat()))
When("two events are published")
bus.publish(Packet(id, 2, 2, AuthKey()))
bus.publish(ReceiveError(LostPackets(4)))
Then("both events are received")
probe.expectMsg(Packet(id, 2, 2, AuthKey()))
probe.expectMsg(ReceiveError(LostPackets(4)))
}
}
"called with SubscribeTo.event()" should {
"pass only the specified event" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to one type of event")
bus.subscribe(probe.ref, SubscribeTo.event(classOf[Packet]))
When("that event type is published")
bus.publish(Packet(id, 33, 55, Heartbeat()))
Then("the event is received")
probe.expectMsg(Packet(id, 33, 55, Heartbeat()))
When("some other event type is published")
bus.publish(TestEvent)
Then("that event is not received")
probe.expectNoMsg()
}
}
"subscribed with SubscribeTo.events()" should {
"pass any of the specified events" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to several event types")
bus.subscribe(probe.ref, SubscribeTo.events(classOf[Packet], classOf[SentPacket]))
When("one of those events type is published")
bus.publish(Packet(id, 33, 55, Heartbeat()))
bus.publish(SentPacket(Packet(id, 88, 222, AuthKey())))
Then("the event is received")
probe.expectMsg(Packet(id, 33, 55, Heartbeat()))
probe.expectMsg(SentPacket(Packet(id, 88, 222, AuthKey())))
When("some other event type is published")
bus.publish(ReceiveError(LostPackets(200)))
Then("that event is not received")
probe.expectNoMsg()
}
}
"subscribed with SubscribeTo.eventsExcept()" should {
"pass any event except the specified ones" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to all events except certain types")
bus.subscribe(probe.ref, SubscribeTo.eventsExcept(classOf[SentPacket]))
When("one of those event types is published")
bus.publish(SentPacket(Packet(id, 88, 222, AuthKey())))
Then("that event is not received")
probe.expectNoMsg()
When("other events are published")
bus.publish(Packet(id, 33, 55, Heartbeat()))
bus.publish(ReceiveError(LostPackets(200)))
bus.publish(TestEvent)
Then("those events are received")
probe.expectMsg(Packet(id, 33, 55, Heartbeat()))
probe.expectMsg(ReceiveError(LostPackets(200)))
probe.expectMsg(TestEvent)
}
}
"subscribed with SubscribeTo.allMessages()" should {
"pass through all received messages" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to all messages")
bus.subscribe(probe.ref, SubscribeTo.allMessages)
When("any message is published")
bus.publish(Packet(id, 63, 11, ParamRequestList()))
bus.publish(Packet(id, 22, 245, RawImu()))
Then("that event is received")
probe.expectMsg(Packet(id, 63, 11, ParamRequestList()))
probe.expectMsg(Packet(id, 22, 245, RawImu()))
When("any other event is published")
bus.publish(SentPacket(Packet(id, 33, 55, Heartbeat())))
bus.publish(ReceiveError(LostPackets(200)))
bus.publish(TestEvent)
Then("those events are not received")
probe.expectNoMsg()
}
}
"subscribed with SubscribeTo.message() with no component" should {
"pass the specified message for all components" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to one message")
bus.subscribe(probe.ref, SubscribeTo.message(classOf[Heartbeat]))
When("that message is published")
bus.publish(Packet(id, 63, 11, Heartbeat()))
bus.publish(Packet(id, 22, 245, Heartbeat()))
Then("that event is received")
probe.expectMsg(Packet(id, 63, 11, Heartbeat()))
probe.expectMsg(Packet(id, 22, 245, Heartbeat()))
When("any other message is published")
bus.publish(Packet(id, 63, 11, ParamRequestList()))
bus.publish(Packet(id, 22, 245, RawImu()))
Then("those events are not received")
probe.expectNoMsg()
}
}
"subscribed with SubscribeTo.messages() with no component" should {
"pass any of the messages for all components" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to multiple messages")
bus.subscribe(probe.ref, SubscribeTo.messages(classOf[Heartbeat], classOf[SysStatus]))
When("any of those messages is published")
bus.publish(Packet(id, 44, 55, Heartbeat()))
bus.publish(Packet(id, 99, 111, SysStatus()))
Then("those events are received")
probe.expectMsg(Packet(id, 44, 55, Heartbeat()))
probe.expectMsg(Packet(id, 99, 111, SysStatus()))
When("any other message is published")
bus.publish(Packet(id, 63, 11, ParamRequestList()))
bus.publish(Packet(id, 22, 245, RawImu()))
Then("those events are not received")
probe.expectNoMsg()
}
}
"subscribed with SubscribeTo.messagesExcept() with no component" should {
"pass any message except those specified" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to messages except those specified")
bus.subscribe(probe.ref, SubscribeTo.messagesExcept(classOf[Heartbeat], classOf[SysStatus]))
When("other messages are published")
bus.publish(Packet(id, 63, 11, ParamRequestList()))
bus.publish(Packet(id, 22, 245, RawImu()))
Then("those events are received")
probe.expectMsg(Packet(id, 63, 11, ParamRequestList()))
probe.expectMsg(Packet(id, 22, 245, RawImu()))
When("the specified messages are published")
bus.publish(Packet(id, 44, 55, Heartbeat()))
bus.publish(Packet(id, 99, 111, SysStatus()))
Then("those events are not received")
probe.expectNoMsg()
}
}
"subscribed with SubscribeTo.message(component,...)" should {
"pass the message for the specified component only" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to one message for a specific component")
bus.subscribe(probe.ref, SubscribeTo.messageFrom(44, 55, classOf[Heartbeat]))
When("that message + the component is published")
bus.publish(Packet(id, 44, 55, Heartbeat()))
Then("that event is received")
probe.expectMsg(Packet(id, 44, 55, Heartbeat()))
When("any other component is published with the same message")
bus.publish(Packet(id, 63, 11, Heartbeat()))
bus.publish(Packet(id, 22, 245, Heartbeat()))
Then("those events are not received")
probe.expectNoMsg()
}
}
"subscribed with SubscribeTo.messages(component,...)" should {
"pass any of the messages for the specified component only" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to multiple messages for a specific component")
bus.subscribe(probe.ref, SubscribeTo.messagesFrom(44, 55, classOf[Heartbeat], classOf[SysStatus]))
When("any of those messages + the component is published")
bus.publish(Packet(id, 44, 55, Heartbeat()))
bus.publish(Packet(id, 44, 55, SysStatus()))
Then("those events are received")
probe.expectMsg(Packet(id, 44, 55, Heartbeat()))
probe.expectMsg(Packet(id, 44, 55, SysStatus()))
When("any other component is published with the same messages")
bus.publish(Packet(id, 63, 11, Heartbeat()))
bus.publish(Packet(id, 22, 245, SysStatus()))
Then("those events are not received")
probe.expectNoMsg()
}
}
"subscribed with SubscribeTo.messagesExcept(component,...)" should {
"pass any message except specified, only for the specified component" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to messages for a component except those specified")
bus.subscribe(probe.ref, SubscribeTo.messagesExcept(44, 55, classOf[Heartbeat], classOf[SysStatus]))
When("other messages are published")
bus.publish(Packet(id, 44, 55, ParamRequestList()))
bus.publish(Packet(id, 44, 55, RawImu()))
Then("those events are received")
probe.expectMsg(Packet(id, 44, 55, ParamRequestList()))
probe.expectMsg(Packet(id, 44, 55, RawImu()))
When("the specified messages are published")
bus.publish(Packet(id, 44, 55, Heartbeat()))
bus.publish(Packet(id, 44, 55, SysStatus()))
Then("those events are not received")
probe.expectNoMsg()
When("other messages are published, but from a different component")
bus.publish(Packet(id, 63, 11, RawImu()))
Then("those events are not received")
probe.expectNoMsg()
}
}
"subscribed with multiple subscription functions" should {
"pass all matching events" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor with multiple subscription functions")
bus.subscribe(probe.ref, SubscribeTo.messages(classOf[Heartbeat], classOf[SysStatus]))
bus.subscribe(probe.ref, SubscribeTo.events(classOf[ReceiveError]))
When("any of those messages is published")
bus.publish(Packet(id, 44, 55, Heartbeat()))
bus.publish(Packet(id, 99, 111, SysStatus()))
bus.publish(ReceiveError(LostPackets(41)))
Then("those events are received")
probe.expectMsg(Packet(id, 44, 55, Heartbeat()))
probe.expectMsg(Packet(id, 99, 111, SysStatus()))
probe.expectMsg(ReceiveError(LostPackets(41)))
When("any other message is published")
bus.publish(Packet(id, 63, 11, ParamRequestList()))
bus.publish(SentPacket(Packet(id, 22, 245, RawImu())))
Then("those events are not received")
probe.expectNoMsg()
}
}
"unsubscribed with a specific matcher" should {
"pass no more events for that matcher" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor subscribed to one message")
val matcher = SubscribeTo.message(classOf[Heartbeat])
bus.subscribe(probe.ref, matcher)
When("that message is published")
bus.publish(Packet(id, 63, 11, Heartbeat()))
Then("that event is received")
probe.expectMsg(Packet(id, 63, 11, Heartbeat()))
When("the actor unsubscribes")
bus.unsubscribe(probe.ref, matcher)
bus.publish(Packet(id, 63, 11, Heartbeat()))
Then("that message is no longer received")
probe.expectNoMsg()
}
"still pass events for other matchers" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor with two subscriptions")
val matcher1: EventMatcher[LinkEvent] = SubscribeTo.messages(classOf[Heartbeat], classOf[SysStatus])
val matcher2: EventMatcher[LinkEvent] = SubscribeTo.events(classOf[ReceiveError])
bus.subscribe(probe.ref, matcher1)
bus.subscribe(probe.ref, matcher2)
When("any of those messages is published")
bus.publish(Packet(id, 44, 55, Heartbeat()))
bus.publish(Packet(id, 99, 111, SysStatus()))
bus.publish(ReceiveError(LostPackets(41)))
Then("those events are received")
probe.expectMsg(Packet(id, 44, 55, Heartbeat()))
probe.expectMsg(Packet(id, 99, 111, SysStatus()))
probe.expectMsg(ReceiveError(LostPackets(41)))
When("the second matcher is unsubscribed")
bus.unsubscribe(probe.ref, matcher2)
Then("the first type of event is still received")
bus.publish(Packet(id, 44, 55, Heartbeat()))
probe.expectMsg(Packet(id, 44, 55, Heartbeat()))
And("the second type is not received")
bus.publish(ReceiveError(LostPackets(41)))
probe.expectNoMsg()
}
}
"unsubscribed without a matcher" should {
"pass no more events of any kind" in {
val f = fixture
val probe = f.probe
val bus = f.bus
Given("an actor with two subscriptions")
val matcher1 = SubscribeTo.messages(classOf[Heartbeat], classOf[SysStatus])
val matcher2 = SubscribeTo.events(classOf[ReceiveError])
bus.subscribe(probe.ref, matcher1)
bus.subscribe(probe.ref, matcher2)
When("any of those events is published")
bus.publish(Packet(id, 44, 55, Heartbeat()))
bus.publish(Packet(id, 99, 111, SysStatus()))
bus.publish(ReceiveError(LostPackets(41)))
Then("those events are received")
probe.expectMsg(Packet(id, 44, 55, Heartbeat()))
probe.expectMsg(Packet(id, 99, 111, SysStatus()))
probe.expectMsg(ReceiveError(LostPackets(41)))
When("the actor is unsubscribed")
bus.unsubscribe(probe.ref)
And("those events are published")
bus.publish(Packet(id, 44, 55, Heartbeat()))
bus.publish(Packet(id, 99, 111, SysStatus()))
bus.publish(ReceiveError(LostPackets(41)))
Then("the events are no longer received")
probe.expectNoMsg()
}
}
}
"compareClassifiers" should {
"return 0 for the same matcher function" in {
val matcher: EventMatcher[LinkEvent] = event => event.isInstanceOf[Packet]
val bus = new LinkEventBus {
def compare(a: Classifier, b: Classifier): Int = super.compareClassifiers(a, b)
}
bus.compare(matcher, matcher) shouldBe 0
}
"return not 0 for different matcher functions" in {
val matcher1: EventMatcher[LinkEvent] = event => event.isInstanceOf[Packet]
val matcher2: EventMatcher[LinkEvent] = event => event.isInstanceOf[SentPacket]
val bus = new LinkEventBus {
def compare(a: Classifier, b: Classifier): Int = super.compareClassifiers(a, b)
}
bus.compare(matcher1, matcher2) shouldNot be(0)
}
}
}
| nickolasrossi/scavlink | src/test/scala/scavlink/link/LinkEventBusSpec.scala | Scala | mit | 16,013 |
/**
* Created by Torri on 4/16/2016.
*/
object Tester {
def main(args: Array[String]) {
import math.Implicits._
println(scale(4.4, 2.0 / 3.0))
}
}
| Creatorri/Scala-Libs | src/test/scala/Tester.scala | Scala | mit | 163 |
package scala.concurrent.impl
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.concurrent.CountDownLatch
import org.junit.Assert._
import org.junit.{ After, Before, Test }
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.annotation.tailrec
import scala.concurrent.ExecutionContext
import scala.concurrent.impl.Promise.DefaultPromise
import scala.util.{ Failure, Success, Try }
import scala.util.control.NonFatal
/** Tests for the private class DefaultPromise */
@RunWith(classOf[JUnit4])
class DefaultPromiseTest {
// Many tests in this class use a helper class, Tester, to track the state of
// promises and to ensure they behave correctly, particularly the complex behaviour
// of linking.
type Result = Int
type PromiseId = Int
type HandlerId = Int
type ChainId = Int
/** The state of a set of set of linked promises. */
case class Chain(
promises: Set[PromiseId],
state: Either[Set[HandlerId],Try[Result]]
)
/** A helper class that provides methods for creating, linking, completing and
* adding handlers to promises. With each operation it verifies that handlers
* are called, any expected exceptions are thrown, and that all promises have
* the expected value.
*
* The links between promises are not tracked precisely. Instead, linked promises
* are placed in the same Chain object. Each link in the same chain will share
* the same value.
*/
class Tester {
var promises = Map.empty[PromiseId, DefaultPromise[Result]]
var chains = Map.empty[ChainId, Chain]
private var counter = 0
private def freshId(): Int = {
val id = counter
counter += 1
id
}
/** Handlers report their activity on this queue */
private val handlerQueue = new ConcurrentLinkedQueue[(Try[Result], HandlerId)]()
/** Get the chain for a given promise */
private def promiseChain(p: PromiseId): Option[(ChainId, Chain)] = {
val found: Iterable[(ChainId, Chain)] = for ((cid, c) <- chains; p0 <- c.promises; if (p0 == p)) yield ((cid, c))
found.toList match {
case Nil => None
case x::Nil => Some(x)
case _ => throw new IllegalStateException(s"Promise $p found in more than one chain")
}
}
/** Passed to `checkEffect` to indicate the expected effect of an operation */
sealed trait Effect
case object NoEffect extends Effect
case class HandlersFired(result: Try[Result], handlers: Set[HandlerId]) extends Effect
case object MaybeIllegalThrown extends Effect
case object IllegalThrown extends Effect
/** Runs an operation while verifying that the operation has the expected effect */
private def checkEffect(expected: Effect)(f: => Any) {
assert(handlerQueue.isEmpty()) // Should have been cleared by last usage
val result = Try(f)
var fireCounts = Map.empty[(Try[Result], HandlerId), Int]
while (!handlerQueue.isEmpty()) {
val key = handlerQueue.poll()
val newCount = fireCounts.getOrElse(key, 0) + 1
fireCounts = fireCounts.updated(key, newCount)
}
def assertIllegalResult = result match {
case Failure(e: IllegalStateException) => ()
case _ => fail(s"Expected IllegalStateException: $result")
}
expected match {
case NoEffect =>
assertTrue(s"Shouldn't throw exception: $result", result.isSuccess)
assertEquals(Map.empty[(Try[Result], HandlerId), Int], fireCounts)
case HandlersFired(firingResult, handlers) =>
assert(result.isSuccess)
val expectedCounts = handlers.foldLeft(Map.empty[(Try[Result], HandlerId), Int]) {
case (map, hid) => map.updated((firingResult, hid), 1)
}
assertEquals(expectedCounts, fireCounts)
case MaybeIllegalThrown =>
if (result.isFailure) assertIllegalResult
assertEquals(Map.empty, fireCounts)
case IllegalThrown =>
assertIllegalResult
assertEquals(Map.empty, fireCounts)
}
}
/** Check each promise has the expected value. */
private def assertPromiseValues() {
for ((cid, chain) <- chains; p <- chain.promises) {
chain.state match {
case Right(result) => assertEquals(Some(result), promises(p).value)
case Left(_) => ()
}
}
}
/** Create a promise, returning a handle. */
def newPromise(): PromiseId = {
val pid = freshId()
val cid = freshId()
promises = promises.updated(pid, new DefaultPromise[Result]())
chains = chains.updated(cid, Chain(Set(pid), Left(Set.empty)))
assertPromiseValues()
pid
}
/** Complete a promise */
def complete(p: PromiseId) {
val r = Success(freshId())
val (cid, chain) = promiseChain(p).get
val (completionEffect, newState) = chain.state match {
case Left(handlers) => (HandlersFired(r, handlers), Right(r))
case Right(completion) => (IllegalThrown, chain.state)
}
checkEffect(completionEffect) { promises(p).complete(r) }
chains = chains.updated(cid, chain.copy(state = newState))
assertPromiseValues()
}
/** Attempt to link two promises together */
def link(a: PromiseId, b: PromiseId): (ChainId, ChainId) = {
val promiseA = promises(a)
val promiseB = promises(b)
val (cidA, chainA) = promiseChain(a).get
val (cidB, chainB) = promiseChain(b).get
// Examine the state of each promise's chain to work out
// the effect of linking the promises, and to work out
// if the two chains should be merged.
sealed trait MergeOp
case object NoMerge extends MergeOp
case class Merge(state: Either[Set[HandlerId],Try[Result]]) extends MergeOp
val (linkEffect, mergeOp) = (chainA.state, chainB.state) match {
case (Left(handlers1), Left(handlers2)) =>
(NoEffect, Merge(Left(handlers1 ++ handlers2)))
case (Left(handlers), Right(result)) =>
(HandlersFired(result, handlers), Merge(Right(result)))
case (Right(result), Left(handlers)) =>
(HandlersFired(result, handlers), Merge(Right(result)))
case (Right(_), Right(_)) if (cidA == cidB) =>
(MaybeIllegalThrown, NoMerge) // Won't be thrown if happen to link a promise to itself
case (Right(_), Right(_)) =>
(IllegalThrown, NoMerge)
}
// Perform the linking and merge the chains, if appropriate
checkEffect(linkEffect) { promiseA.linkRootOf(promiseB) }
val (newCidA, newCidB) = mergeOp match {
case NoMerge => (cidA, cidB)
case Merge(newState) => {
chains = chains - cidA
chains = chains - cidB
val newCid = freshId()
chains = chains.updated(newCid, Chain(chainA.promises ++ chainB.promises, newState))
(newCid, newCid)
}
}
assertPromiseValues()
(newCidA, newCidB)
}
/** Attach an onComplete handler. When called, the handler will
* place an entry into `handlerQueue` with the handler's identity.
* This allows verification of handler calling semantics.
*/
def attachHandler(p: PromiseId): HandlerId = {
val hid = freshId()
val promise = promises(p)
val (cid, chain) = promiseChain(p).get
val (attachEffect, newState) = chain.state match {
case Left(handlers) =>
(NoEffect, Left(handlers + hid))
case Right(result) =>
(HandlersFired(result, Set(hid)), Right(result))
}
implicit val ec = new ExecutionContext {
def execute(r: Runnable) { r.run() }
def reportFailure(t: Throwable) { t.printStackTrace() }
}
checkEffect(attachEffect) { promise.onComplete(result => handlerQueue.add((result, hid))) }
chains = chains.updated(cid, chain.copy(state = newState))
assertPromiseValues()
hid
}
}
// Some methods and objects that build a list of promise
// actions to test and then execute them
type PromiseKey = Int
sealed trait Action
case class Complete(p: PromiseKey) extends Action
case class Link(a: PromiseKey, b: PromiseKey) extends Action
case class AttachHandler(p: PromiseKey) extends Action
/** Tests a sequence of actions on a Tester. Creates promises as needed. */
private def testActions(actions: Seq[Action]) {
val t = new Tester()
var pMap = Map.empty[PromiseKey, PromiseId]
def byKey(key: PromiseKey): PromiseId = {
if (!pMap.contains(key)) {
pMap = pMap.updated(key, t.newPromise())
}
pMap(key)
}
actions foreach { action =>
action match {
case Complete(p) => t.complete(byKey(p))
case Link(a, b) => t.link(byKey(a), byKey(b))
case AttachHandler(p) => t.attachHandler(byKey(p))
}
}
}
/** Tests all permutations of actions for `count` promises */
private def testPermutations(count: Int) {
val ps = (0 until count).toList
val pPairs = for (a <- ps; b <- ps) yield (a, b)
var allActions = ps.map(Complete(_)) ++ pPairs.map { case (a, b) => Link(a, b) } ++ ps.map(AttachHandler(_))
for ((permutation, i) <- allActions.permutations.zipWithIndex) {
testActions(permutation)
}
}
/** Test all permutations of actions with a single promise */
@Test
def testPermutations1 {
testPermutations(1)
}
/** Test all permutations of actions with two promises - about 40 thousand */
@Test
def testPermutations2 {
testPermutations(2)
}
/** Link promises in different orders, using the same link structure as is
* used in Future.flatMap */
@Test
def simulateFlatMapLinking {
val random = new scala.util.Random(1)
for (_ <- 0 until 10) {
val t = new Tester()
val flatMapCount = 100
sealed trait FlatMapEvent
case class Link(a: PromiseId, b: PromiseId) extends FlatMapEvent
case class Complete(p: PromiseId) extends FlatMapEvent
@tailrec
def flatMapEvents(count: Int, p1: PromiseId, acc: List[FlatMapEvent]): List[FlatMapEvent] = {
if (count == 0) {
Complete(p1)::acc
} else {
val p2 = t.newPromise()
flatMapEvents(count - 1, p2, Link(p2, p1)::acc)
}
}
val events = flatMapEvents(flatMapCount, t.newPromise(), Nil)
assertEquals(flatMapCount + 1, t.chains.size) // All promises are unlinked
val shuffled = random.shuffle(events)
shuffled foreach {
case Link(a, b) => t.link(a, b)
case Complete(p) => t.complete(p)
}
// All promises should be linked together, no matter the order of their linking
assertEquals(1, t.chains.size)
}
}
/** Link promises together on more than one thread, using the same link
* structure as is used in Future.flatMap */
@Test
def testFlatMapLinking {
for (_ <- 0 until 100) {
val flatMapCount = 100
val startLatch = new CountDownLatch(1)
val doneLatch = new CountDownLatch(flatMapCount + 1)
def execute(f: => Unit) {
val ec = ExecutionContext.global
ec.execute(new Runnable {
def run() {
try {
startLatch.await()
f
doneLatch.countDown()
} catch {
case NonFatal(e) => ec.reportFailure(e)
}
}
})
}
@tailrec
def flatMapTimes(count: Int, p1: DefaultPromise[Int]) {
if (count == 0) {
execute { p1.success(1) }
} else {
val p2 = new DefaultPromise[Int]()
execute { p2.linkRootOf(p1) }
flatMapTimes(count - 1, p2)
}
}
val p = new DefaultPromise[Int]()
flatMapTimes(flatMapCount, p)
startLatch.countDown()
doneLatch.await()
assertEquals(Some(Success(1)), p.value)
}
}
}
| felixmulder/scala | test/junit/scala/concurrent/impl/DefaultPromiseTest.scala | Scala | bsd-3-clause | 11,916 |
/*
* Copyright 2014 Adam Rosenberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nalloc.bitb.kcits.optional
class OptionalLongSpec extends OptionalTypeSuite {
property("The empty value does not unapply") {
Long.MinValue match {
case OptionalLong(x) => fail(s"Empty value unapplied to $x")
case _ =>
}
}
property("The empty value maps to the empty value of its target type") {
OptionalLong.empty.map(_.toByte) shouldBe OptionalByte.empty
OptionalLong.empty.map(_.toShort) shouldBe OptionalShort.empty
OptionalLong.empty.map(_.toInt) shouldBe OptionalInt.empty
OptionalLong.empty.map(_ + 1.toByte) shouldBe OptionalLong.empty
OptionalLong.empty.map(_ + 1.toShort) shouldBe OptionalLong.empty
OptionalLong.empty.map(_ + 1) shouldBe OptionalLong.empty
OptionalLong.empty.map(_ + 1L) shouldBe OptionalLong.empty
OptionalLong.empty.map(_ + 1f).isEmpty shouldBe true
OptionalLong.empty.map(_ + 1d).isEmpty shouldBe true
}
property("Non empty values unapply to themselves") {
forAll { x: Long =>
whenever(x != Long.MinValue) {
x match {
case OptionalLong(unapplied) => x shouldBe unapplied
case _ => fail(s"$x failed to unapply")
}
}
}
}
property("Non empty values map using the passed in function") {
forAll(longs, mapFunctionsFrom[Long]) { (value, functions) =>
whenever(value != Long.MinValue) {
import functions._
OptionalLong(value).map(mapToByte) shouldBe OptionalByte(mapToByte(value))
OptionalLong(value).map(mapToShort) shouldBe OptionalShort(mapToShort(value))
OptionalLong(value).map(mapToInt) shouldBe OptionalInt(mapToInt(value))
OptionalLong(value).map(mapToLong) shouldBe OptionalLong(mapToLong(value))
OptionalLong(value).map(mapToFloat) shouldBe OptionalFloat(mapToFloat(value))
OptionalLong(value).map(mapToDouble) shouldBe OptionalDouble(mapToDouble(value))
OptionalLong(value).map(mapToString) shouldBe Optional(mapToString(value))
}
}
}
property("foreach on the empty value is a no-op") {
OptionalLong.empty.foreach(_ => fail())
}
property("foreach acts on non empty values") {
forAll { x: Long =>
whenever(x != Long.MinValue) {
var executed = false
OptionalLong(x).foreach(_ => executed = true)
executed shouldBe true
}
}
}
property("exists on the empty value always returns false") {
OptionalLong.empty.exists(_ => true) shouldBe false
}
property("exists on non empty values evaluates the passed in function") {
forAll { x: Long =>
whenever(x != Long.MinValue) {
OptionalLong(x).exists(x => x == x) shouldBe true
OptionalLong(x).exists(x => x == x + 1) shouldBe false
}
}
}
property("filter on the empty value always returns the empty value") {
OptionalLong.empty.filter(_ => false) shouldBe OptionalLong.empty
OptionalLong.empty.filter(_ => true) shouldBe OptionalLong.empty
}
property("filter on non empty values evaluates the passed in function") {
forAll { x: Long =>
whenever(x != Long.MinValue) {
OptionalLong(x).filter(x => x == x) shouldBe OptionalLong(x)
OptionalLong(x).filter(x => x == x + 1) shouldBe OptionalLong.empty
}
}
}
property("getOrElse on the empty value returns the passed in alternative") {
OptionalLong.empty.getOrElse(1.toByte) shouldBe 1
}
property("getOrElse on non empty values does not evaluate the passed in function") {
forAll { x: Long =>
whenever(x != Long.MinValue) {
OptionalLong(x).getOrElse(throw new IllegalArgumentException) shouldBe x
}
}
}
property("orElse on the empty value returns the passed in alternative") {
OptionalLong.empty.orElse(OptionalLong(1)) shouldBe OptionalLong(1)
}
property("orElse on non empty values does not evaluate the passed in function") {
forAll { x: Long =>
whenever(x != Long.MinValue) {
OptionalLong(x).orElse(throw new IllegalArgumentException) shouldBe OptionalLong(x)
}
}
}
property("The empty value flatMaps to the empty value of its target type") {
forAll(flatMapFunctionsFrom[Long]) { functions =>
import functions._
OptionalLong.empty.flatMap(mapToOptionalByte) shouldBe OptionalByte.empty
OptionalLong.empty.flatMap(mapToOptionalShort) shouldBe OptionalShort.empty
OptionalLong.empty.flatMap(mapToOptionalInt) shouldBe OptionalInt.empty
OptionalLong.empty.flatMap(mapToOptionalLong) shouldBe OptionalLong.empty
OptionalLong.empty.flatMap(mapToOptionalFloat).isEmpty shouldBe true
OptionalLong.empty.flatMap(mapToOptionalDouble).isEmpty shouldBe true
OptionalLong.empty.flatMap(mapToOptionalString) shouldBe Optional.empty[String]
}
}
property("Non empty values flatMap using the passed in function") {
forAll(longs, flatMapFunctionsFrom[Long]) { (value, functions) =>
whenever(value != Long.MinValue) {
import functions._
OptionalLong(value).flatMap(mapToOptionalByte) shouldBe mapToOptionalByte(value)
OptionalLong(value).flatMap(mapToOptionalShort) shouldBe mapToOptionalShort(value)
OptionalLong(value).flatMap(mapToOptionalInt) shouldBe mapToOptionalInt(value)
OptionalLong(value).flatMap(mapToOptionalLong) shouldBe mapToOptionalLong(value)
OptionalLong(value).flatMap(mapToOptionalFloat) shouldBe mapToOptionalFloat(value)
OptionalLong(value).flatMap(mapToOptionalDouble) shouldBe mapToOptionalDouble(value)
OptionalLong(value).flatMap(mapToOptionalString) shouldBe mapToOptionalString(value)
}
}
}
property("The empty value always applies the ifEmpty portion of a fold") {
forAll { (ifEmpty: Long, ifNotEmpty: Long) =>
whenever(ifEmpty != ifNotEmpty) {
OptionalLong.empty.fold(ifEmpty)(_ => ifNotEmpty) shouldBe ifEmpty
}
}
}
property("Non empty values always apply the map portion of a fold") {
forAll(longs, longs, mapFunctionsFrom[Long]) { (ifEmpty, value, functions) =>
whenever(value != Long.MinValue) {
import functions._
OptionalLong(value).fold(ifEmpty)(mapToLong) shouldBe mapToLong(value)
}
}
}
property("forAll on the empty value always returns false") {
OptionalLong.empty.forAll(_ => false) shouldBe true
}
property("forAll on non empty values evaluates the passed in function") {
forAll { x: Long =>
whenever(x != Long.MinValue) {
OptionalLong(x).forAll(x => x == x) shouldBe true
OptionalLong(x).forAll(x => x == x + 1) shouldBe false
}
}
}
property("collect on the empty value always returns the empty value of its target type") {
OptionalLong.empty.collect {
case _ => 100
} shouldBe OptionalInt.empty
}
property("collect on non empty values evaluates the passed in partial function") {
forAll { x: Long =>
whenever(x != Long.MinValue && x > 0) {
x % 2 match {
case 0 =>
val projection = OptionalLong(x).collect {
case that if that % 2 == 1 => 100
}
projection shouldBe OptionalInt.empty
val projectionWithDefault = OptionalLong(x).collect {
case that if that % 2 == 1 => 100
case _ => 200
}
projectionWithDefault shouldBe OptionalInt(200)
case 1 =>
val projection = OptionalLong(x).collect {
case that if that % 2 == 1 => 100
}
projection shouldBe OptionalInt(100)
val projectionWithDefault = OptionalLong(x).collect {
case that if that % 2 == 1 => 100
case _ => 200
}
projectionWithDefault shouldBe OptionalInt(100)
}
}
}
}
}
| arosenberger/nalloc | optional/src/test/scala/org/nalloc/bitb/kcits/optional/OptionalLongSpec.scala | Scala | apache-2.0 | 7,918 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package expr
import com.intellij.psi.{PsiElement, PsiField}
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScVariable
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import org.jetbrains.plugins.scala.lang.resolve.processor.DynamicResolveProcessor
/**
* @author Alexander Podkhalyuzin
*/
trait ScAssignStmt extends ScExpression {
def getLExpression: ScExpression = findChildByClassScala(classOf[ScExpression])
def getRExpression: Option[ScExpression] = findLastChild(classOf[ScExpression]) match {
case Some(expr: ScExpression) if expr != getLExpression => Some(expr)
case _ => None
}
def assignName: Option[String] = {
getLExpression match {
case ref: ScReferenceExpression if ref.qualifier.isEmpty => Some(ref.getText)
case _ => None
}
}
def isNamedParameter: Boolean = {
getLExpression match {
case expr: ScReferenceExpression =>
expr.bind() match {
case Some(r) => r.isNamedParameter
case _ => false
}
case _ => false
}
}
def mirrorMethodCall: Option[ScMethodCall]
/**
* Has sense only in case if left token resolves to parameterless function
*
* @return parameterless function setter, or None otherwise
*/
def resolveAssignment: Option[ScalaResolveResult]
def shapeResolveAssignment: Option[ScalaResolveResult]
/**
* @return element to which equals sign should navigate
*/
def assignNavigationElement: PsiElement = {
getLExpression match {
case methodCall: ScMethodCall =>
methodCall.applyOrUpdateElement match {
case Some(r) => r.getActualElement
case None => null
}
case left => resolveAssignment match {
case Some(ScalaResolveResult(elem, _)) => elem
case _ => left match {
case ref: ScReferenceExpression => ref.resolve() match {
case v: ScVariable => v
case p: ScClassParameter if p.isVar => p
case f: PsiField => f
case _ => null
}
case _ => null
}
}
}
}
def isDynamicNamedAssignment: Boolean = {
getContext match {
case context@(_: ScTuple | _: ScParenthesisedExpr | _: ScArgumentExprList) =>
context.getContext match {
case m: MethodInvocation if m.argumentExpressions.contains(this) =>
m.getEffectiveInvokedExpr match {
case r: ScReferenceExpression =>
r.bind() match {
case Some(resolveResult) if DynamicResolveProcessor.isApplyDynamicNamed(resolveResult) => return true
case _ =>
m.applyOrUpdateElement match {
case Some(innerResult) if DynamicResolveProcessor.isApplyDynamicNamed(innerResult) => return true
case _ =>
}
}
case _ =>
}
case _ =>
}
case _ =>
}
false
}
override def accept(visitor: ScalaElementVisitor) {
visitor.visitAssignmentStatement(this)
}
}
object NamedAssignStmt {
def unapply(st: ScAssignStmt): Option[String] = st.assignName
}
object ScAssignStmt {
def unapply(st: ScAssignStmt): Option[(ScExpression, Option[ScExpression])] =
Some(st.getLExpression, st.getRExpression)
} | jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScAssignStmt.scala | Scala | apache-2.0 | 3,500 |
package org.scaladebugger.docs.layouts
import org.scaladebugger.docs.layouts.partials.common._
import org.scaladebugger.docs.styles.{FrontPageStyle, PageStyle}
import scalatags.Text.all._
import org.scaladebugger.docs.styles.Implicits._
import org.senkbeil.grus.layouts.Context
/**
* Represents the layout for the front page of the site.
*/
class FrontPage extends SitePage(syntaxHighlightTheme = "default") {
private val ApiExampleCode =
s"""
|val fileName = "file.scala"
|val lineNumber = 37
|
|scalaVirtualMachine.getOrCreateBreakpointRequest(
| fileName,
| lineNumber
|).foreach(breakpointEvent => {
| val f = breakpointEvent.fileName
| val l = breakpointEvent.lineNumber
|
| println(s"Reached breakpoint $$f:$$l")
|})
""".stripMargin
private val LanguageExampleCode =
"""
|myFunc := func(a, b) {
| a + b
|}
|
|result := myFunc 3 9
|
|print("Result is " ++ result)
""".stripMargin
override protected def preHeadContent(context: Context): Seq[Modifier] = {
super.preHeadContent(context) ++ Seq(
FrontPageStyle.global.toStyleTag,
FrontPageStyle.toStyleTag
)
}
/**
* Renders the front page.
*
* @param content Unused
* @return The rendered content
*/
override def render(content: Seq[Modifier] = Nil): Modifier = {
super.render(Seq(div(
tag("section")(PageStyle.section, PageStyle.sectionLight)(
div(PageStyle.sectionContent)(
h1(PageStyle.heroTitle)(
EnsimeLogo(),
span("Scala Debugger")
),
span(PageStyle.heroSubtitle)(
"Scala abstractions and tooling around the Java Debugger Interface."
),
span(FrontPageStyle.inlineButtonContainer)(
Button(
"Learn More",
context.mainMenuItems
.find(_.name.toLowerCase == "about")
.flatMap(_.link)
.getOrElse(throw new RuntimeException("Missing about section!")),
PageStyle.buttonMargin
),
Button(
"Source Code",
"https://www.github.com/ensime/scala-debugger",
PageStyle.buttonMargin
),
Button(
"Community",
"https://www.gitter.im/ensime/scala-debugger",
PageStyle.buttonMargin
)
)
)
),
tag("section")(
PageStyle.section,
PageStyle.sectionDark
)(
div(PageStyle.sectionContent, height := "550px")(
h1(
i(
`class` := "fa fa-laptop",
attr("aria-hidden") := "true"
)(),
span("Installation")
),
Tabs.Light(
identifier = "installation",
// API
Tabs.Tab(
name = "api",
LinedContent("sbt", ScalaCodeBlock(
"""
|libraryDependencies += "org.scala-debugger" %% "scala-debugger-api" % "1.1.0-M3"
""".stripMargin, fitContainer = true, trim = true)
),
LinedContent("sbt plugin", ScalaCodeBlock(
"""
|addSbtPlugin("org.scala-debugger" % "sbt-jdi-tools" % "1.0.0")
""".stripMargin, fitContainer = true, trim = true)
)
),
// Language
Tabs.Tab(
name = "language",
LinedContent("sbt", ScalaCodeBlock(
"""
|libraryDependencies += "org.scala-debugger" %% "scala-debugger-language" % "1.1.0-M3"
""".stripMargin, fitContainer = true, trim = true)
)
),
// SDB
Tabs.Tab(
name = "sdb",
LinedContent("download jar", Button(
name = "sdb 1.1.0-M3 built with Scala 2.10",
link = "/downloads/sdb/1.1.0-M3/sdb-2.10.jar"
)),
LinedContent("download jar", Button(
name = "sdb 1.1.0-M3 built with Scala 2.11",
link = "/downloads/sdb/1.1.0-M3/sdb-2.11.jar"
)),
LinedContent("download jar", Button(
name = "sdb 1.1.0-M3 built with Scala 2.12",
link = "/downloads/sdb/1.1.0-M3/sdb-2.12.jar"
))
),
// Visual Debugger
/*Tabs.Tab(
name = "visual debugger",
LinedContent("download jar", Button(
name = "vsdb 1.1.0-M3 built with Scala 2.10",
link = "/downloads/vsdb/1.1.0-M3/vsdb-2.10.jar"
)),
LinedContent("download jar", Button(
name = "vsdb 1.1.0-M3 built with Scala 2.11",
link = "/downloads/vsdb/1.1.0-M3/vsdb-2.11.jar"
)),
LinedContent("download jar", Button(
name = "vsdb 1.1.0-M3 built with Scala 2.12",
link = "/downloads/vsdb/1.1.0-M3/vsdb-2.12.jar"
))
),*/
// sbt plugin
Tabs.Tab(
name = "sbt",
LinedContent("sbt plugin", ScalaCodeBlock(
"""
|addSbtPlugin("org.scala-debugger" % "sbt-scala-debugger" % "1.1.0-M3")
""".stripMargin, fitContainer = true, trim = true)
),
LinedContent("run", ScalaCodeBlock("sbt sdb:run",
fitContainer = true, trim = true))
)
)
)
),
tag("section")(
PageStyle.section,
PageStyle.sectionLight
)(
div(PageStyle.sectionContent, height := "550px")(
h1(
i(
`class` := "fa fa-gears",
attr("aria-hidden") := "true"
)(),
span("Demos")
),
Tabs.Dark(
identifier = "demos",
// API
Tabs.Tab(
name = "api",
LinedContent.Raw(
ScalaCodeBlock(ApiExampleCode,
fitContainer = true, trim = true)
)
),
// Language
Tabs.Tab(
name = "language",
LinedContent.Raw(
ScalaCodeBlock(LanguageExampleCode,
fitContainer = true, trim = true)
)
),
// SDB
Tabs.Tab.NoInner(
name = "sdb",
Video("/videos/examples/", "sdb")
),
// Visual Debugger
/*Tabs.Tab.NoInner(
name = "visual debugger",
Video("/videos/examples/", "visual-debugger")
),*/
// sbt plugin
Tabs.Tab.NoInner(
name = "sbt",
Video("/videos/examples/", "sbt-plugin")
)
)
)
)
)))
}
}
| ensime/scala-debugger | scala-debugger-docs/src/main/scala/org/scaladebugger/docs/layouts/FrontPage.scala | Scala | apache-2.0 | 7,028 |
package org.cakesolutions.akkapatterns.core
import akka.actor.{Props, ActorSystem}
import akka.pattern.ask
import akka.util.Timeout
import concurrent.Await
case class Start()
case object InmatesAreRunningTheAsylum
case class Started()
case class Stop()
trait ServerCore {
implicit def actorSystem: ActorSystem
implicit val timeout = Timeout(30000)
val application = actorSystem.actorOf(
props = Props[ApplicationActor],
name = "application"
)
Await.ready(application ? Start(), timeout.duration)
}
| anand-singh/akka-patterns | sbt/src/main/scala/org/cakesolutions/akkapatterns/core/boot.scala | Scala | apache-2.0 | 524 |
trait Label[A]:
def apply(v: A): String
given [A]: Label[A] = _.toString
extension [A](x: A) inline def label(using inline l: Label[A]): String = l(x)
def label1[A](v: A) = v.label
def label2[A](l: A) = l.label
| dotty-staging/dotty | tests/pos/i9342b.scala | Scala | apache-2.0 | 217 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.regression
import scala.util.Random
import org.jblas.DoubleMatrix
import org.scalatest.FunSuite
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.util.{LocalClusterSparkContext, LinearDataGenerator,
MLlibTestSparkContext}
import org.apache.spark.util.Utils
private object RidgeRegressionSuite {
/** 3 features */
val model = new RidgeRegressionModel(weights = Vectors.dense(0.1, 0.2, 0.3), intercept = 0.5)
}
class RidgeRegressionSuite extends FunSuite with MLlibTestSparkContext {
def predictionError(predictions: Seq[Double], input: Seq[LabeledPoint]) = {
predictions.zip(input).map { case (prediction, expected) =>
(prediction - expected.label) * (prediction - expected.label)
}.reduceLeft(_ + _) / predictions.size
}
test("ridge regression can help avoid overfitting") {
// For small number of examples and large variance of error distribution,
// ridge regression should give smaller generalization error that linear regression.
val numExamples = 50
val numFeatures = 20
org.jblas.util.Random.seed(42)
// Pick weights as random values distributed uniformly in [-0.5, 0.5]
val w = DoubleMatrix.rand(numFeatures, 1).subi(0.5)
// Use half of data for training and other half for validation
val data = LinearDataGenerator.generateLinearInput(3.0, w.toArray, 2 * numExamples, 42, 10.0)
val testData = data.take(numExamples)
val validationData = data.takeRight(numExamples)
val testRDD = sc.parallelize(testData, 2).cache()
val validationRDD = sc.parallelize(validationData, 2).cache()
// First run without regularization.
val linearReg = new LinearRegressionWithSGD()
linearReg.optimizer.setNumIterations(200)
.setStepSize(1.0)
val linearModel = linearReg.run(testRDD)
val linearErr = predictionError(
linearModel.predict(validationRDD.map(_.features)).collect(), validationData)
val ridgeReg = new RidgeRegressionWithSGD()
ridgeReg.optimizer.setNumIterations(200)
.setRegParam(0.1)
.setStepSize(1.0)
val ridgeModel = ridgeReg.run(testRDD)
val ridgeErr = predictionError(
ridgeModel.predict(validationRDD.map(_.features)).collect(), validationData)
// Ridge validation error should be lower than linear regression.
assert(ridgeErr < linearErr,
"ridgeError (" + ridgeErr + ") was not less than linearError(" + linearErr + ")")
}
test("model save/load") {
val model = RidgeRegressionSuite.model
val tempDir = Utils.createTempDir()
val path = tempDir.toURI.toString
// Save model, load it back, and compare.
try {
model.save(sc, path)
val sameModel = RidgeRegressionModel.load(sc, path)
assert(model.weights == sameModel.weights)
assert(model.intercept == sameModel.intercept)
} finally {
Utils.deleteRecursively(tempDir)
}
}
}
class RidgeRegressionClusterSuite extends FunSuite with LocalClusterSparkContext {
test("task size should be small in both training and prediction") {
val m = 4
val n = 200000
val points = sc.parallelize(0 until m, 2).mapPartitionsWithIndex { (idx, iter) =>
val random = new Random(idx)
iter.map(i => LabeledPoint(1.0, Vectors.dense(Array.fill(n)(random.nextDouble()))))
}.cache()
// If we serialize data directly in the task closure, the size of the serialized task would be
// greater than 1MB and hence Spark would throw an error.
val model = RidgeRegressionWithSGD.train(points, 2)
val predictions = model.predict(points.map(_.features))
}
}
| trueyao/spark-lever | mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala | Scala | apache-2.0 | 4,474 |
package chap5
object Exe7 extends App {
import Stream._
val stream: Stream[Int] = cons(1, stream.map(_ + 1))
assert(stream.filter(_ % 2 == 0).take(3).toList == List(2, 4, 6))
assert(Stream(1, 2, 3).append(4).toList == List(1, 2, 3, 4))
assert(Stream(1, 2, 3, 4).append({throw new RuntimeException(""); 5}).take(3).toList == List(1, 2, 3))
assert(Stream(1, 2, 3).flatMap { n => Stream(n, n + 1) }.toList == List(1, 2, 2, 3, 3, 4))
}
| ponkotuy/FPScala | src/main/scala/chap5/Exe7.scala | Scala | unlicense | 445 |
/**
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt
/**
* Type for AutoPlugin's trigger method.
* Determines whether an AutoPlugin will be activated for a project when the
* `requires` clause is satisfied.
*/
sealed abstract class PluginTrigger extends Serializable
object PluginTrigger {
case object AllRequirements extends PluginTrigger
case object NoTrigger extends PluginTrigger
}
| xuwei-k/xsbt | main/src/main/contraband-scala/sbt/PluginTrigger.scala | Scala | apache-2.0 | 482 |
object Test extends Application {
class A {
def foo[T](x: T) = print(1)
def foo(x: C) = print(2)
}
class C
class D extends C
val a = new A
a./* line: 3 */foo[C](new C)
a./* line: 4 */foo(new D)
} | katejim/intellij-scala | testdata/resolve2/overloading/generics/Generics1.scala | Scala | apache-2.0 | 219 |
package ca.uqam.euler.nicolas
// Answer : 76576500
object Problem012 {
def triangleNumbers = {
def from(n: Int): Stream[Int] =
(1 to n).sum #:: from(n + 1)
from(1)
}
def factors(n: Int) = {
val d = n.toDouble
var i = 2
var fs = Seq(n, 1)
while (i <= scala.math.sqrt(d)) {
val xd = d / i
val xi = xd.toInt
if (xd == xi)
fs = xi +: i +: fs
i += 1
}
fs
}
def firstTriangleNumberWithMoreFactorsThan(n: Int) =
triangleNumbers.find(factors(_).length > n).get
def main(args: Array[String]) = Answer {
firstTriangleNumberWithMoreFactorsThan(500)
}
} | nicolaspayette/project-euler | src/main/scala/ca/uqam/euler/nicolas/Problem012.scala | Scala | mit | 639 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.integrationtest
import io.fabric8.kubernetes.api.model.Pod
import org.apache.spark.launcher.SparkLauncher
private[spark] trait BasicTestsSuite { k8sSuite: KubernetesSuite =>
import BasicTestsSuite._
import KubernetesSuite.k8sTestTag
test("Run SparkPi with no resources", k8sTestTag) {
runSparkPiAndVerifyCompletion()
}
test("Run SparkPi with a very long application name.", k8sTestTag) {
sparkAppConf.set("spark.app.name", "long" * 40)
runSparkPiAndVerifyCompletion()
}
test("Use SparkLauncher.NO_RESOURCE", k8sTestTag) {
sparkAppConf.setJars(Seq(containerLocalSparkDistroExamplesJar))
runSparkPiAndVerifyCompletion(
appResource = SparkLauncher.NO_RESOURCE)
}
test("Run SparkPi with a master URL without a scheme.", k8sTestTag) {
val url = kubernetesTestComponents.kubernetesClient.getMasterUrl
val k8sMasterUrl = if (url.getPort < 0) {
s"k8s://${url.getHost}"
} else {
s"k8s://${url.getHost}:${url.getPort}"
}
sparkAppConf.set("spark.master", k8sMasterUrl)
runSparkPiAndVerifyCompletion()
}
test("Run SparkPi with an argument.", k8sTestTag) {
// This additional configuration with snappy is for SPARK-26995
sparkAppConf
.set("spark.io.compression.codec", "snappy")
runSparkPiAndVerifyCompletion(appArgs = Array("5"))
}
test("Run SparkPi with custom labels, annotations, and environment variables.", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.driver.label.label1", "label1-value")
.set("spark.kubernetes.driver.label.label2", "label2-value")
.set("spark.kubernetes.driver.annotation.annotation1", "annotation1-value")
.set("spark.kubernetes.driver.annotation.annotation2", "annotation2-value")
.set("spark.kubernetes.driverEnv.ENV1", "VALUE1")
.set("spark.kubernetes.driverEnv.ENV2", "VALUE2")
.set("spark.kubernetes.executor.label.label1", "label1-value")
.set("spark.kubernetes.executor.label.label2", "label2-value")
.set("spark.kubernetes.executor.annotation.annotation1", "annotation1-value")
.set("spark.kubernetes.executor.annotation.annotation2", "annotation2-value")
.set("spark.executorEnv.ENV1", "VALUE1")
.set("spark.executorEnv.ENV2", "VALUE2")
runSparkPiAndVerifyCompletion(
driverPodChecker = (driverPod: Pod) => {
doBasicDriverPodCheck(driverPod)
checkCustomSettings(driverPod)
},
executorPodChecker = (executorPod: Pod) => {
doBasicExecutorPodCheck(executorPod)
checkCustomSettings(executorPod)
})
}
test("All pods have the same service account by default", k8sTestTag) {
runSparkPiAndVerifyCompletion(
executorPodChecker = (executorPod: Pod) => {
doExecutorServiceAccountCheck(executorPod, kubernetesTestComponents.serviceAccountName)
})
}
test("Run extraJVMOptions check on driver", k8sTestTag) {
sparkAppConf
.set("spark.driver.extraJavaOptions", "-Dspark.test.foo=spark.test.bar")
runSparkJVMCheckAndVerifyCompletion(
expectedJVMValue = Seq("(spark.test.foo,spark.test.bar)"))
}
test("Run SparkRemoteFileTest using a remote data file", k8sTestTag) {
sparkAppConf
.set("spark.files", REMOTE_PAGE_RANK_DATA_FILE)
runSparkRemoteCheckAndVerifyCompletion(appArgs = Array(REMOTE_PAGE_RANK_FILE_NAME))
}
}
private[spark] object BasicTestsSuite {
val SPARK_PAGE_RANK_MAIN_CLASS: String = "org.apache.spark.examples.SparkPageRank"
val CONTAINER_LOCAL_FILE_DOWNLOAD_PATH = "/var/spark-data/spark-files"
val CONTAINER_LOCAL_DOWNLOADED_PAGE_RANK_DATA_FILE =
s"$CONTAINER_LOCAL_FILE_DOWNLOAD_PATH/pagerank_data.txt"
val REMOTE_PAGE_RANK_DATA_FILE =
"https://storage.googleapis.com/spark-k8s-integration-tests/files/pagerank_data.txt"
val REMOTE_PAGE_RANK_FILE_NAME = "pagerank_data.txt"
}
| aosagie/spark | resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala | Scala | apache-2.0 | 4,689 |
package looty
package views.loot
import looty.model.InventoryIds.InventoryId
import looty.model.parsers.ItemParser
import looty.model.{Attributes, CharClasses, CharInvId, ComputedItem, PaperDoll, PassiveSkillTreeHelp}
import looty.poeapi.PoeTypes.Leagues.League
import looty.poeapi.{PoeCacher, PoeRpcs}
import looty.views.ItemDetailHover
import org.scalajs.dom
import org.scalajs.jquery.JQuery
import scala.scalajs.js
//////////////////////////////////////////////////////////////
// Copyright (c) 2014 Ben Jackman, Jeff Gomberg
// All Rights Reserved
// please contact ben@jackman.biz or jeff@cgtanalytics.com
// for licensing inquiries
// Created by bjackman @ 8/25/14 10:19 PM
//////////////////////////////////////////////////////////////
class UpgradesPane(
league: League,
itemDetailHover: ItemDetailHover,
setUpgradeItemFn: (Option[ComputedItem]) => Unit,
setRequiredLvlFn: Int => Unit
)(implicit val pc: PoeCacher) {
val el = jq("<div class='upgrades-pane'></div>")
val dollEl = jq("<div></div>")
val lvlEl = jq("<div></div>")
def start(): JQuery = {
val playerDiv = jq("<div></div>")
el.append("<div>Use this pane to find upgrades for a character's items. Simply select a character then click on the item you wish to upgrade. The grid will show deltas of the values of other items.</div>")
el.append(playerDiv)
val playerSel = {
val O = js.Dynamic.literal
playerDiv.asJsDyn.select2(O(
width = 180,
placeholder = "Character",
query = { (q: js.Dynamic) =>
val term = q.term.asInstanceOf[String]
for {
chars <- pc.getChars()
} {
val cs = chars.toList
.filter(c => c.name.toLowerCase.startsWith(term.toLowerCase) && c.league =?= league.rpcName)
.sortBy(_.name.toLowerCase)
.map(c => O(id = c.name, text = c.name))
.toJsArr
q.callback(O(results = cs))
}
}: js.Function
)).on("change", { (e: js.Dynamic) =>
loadChar(e.`val`.asInstanceOf[String])
}: js.Function)
}
// el.append(playerSel)
el.append(lvlEl)
el.append(dollEl)
el
}
def loadChar(name: String) {
import scala.async.Async.{async, await}
async {
val pFut = pc.getAccountNameAndRealm.flatMap(accountName => PoeRpcs.getPassiveSkills(accountName, character = name))
val cFut = pc.getChars()
val iFut = pc.getInv(name)
val passives = await(pFut)
val chars = await(cFut)
val inventory = await(iFut)
val bagId = CharInvId(name)
val items = inventory.allItems(Some(name)).toList
chars.find(_.name =?= name).foreach { charInfo =>
val cls = charInfo.getCharClass
val pAttrs = PassiveSkillTreeHelp.hashesToAttributes(passives.hashes)
val attrs = cls.startingAttributes.reduceWith(pAttrs)(_ + _)
displayLevelAndAttributes(charInfo.level.toInt, attrs)
}
val doll = PaperDoll.fromItems(items).map(i => i.map(i => ItemParser.parseItem(i, bagId, bagId.character)))
displayPaperDoll(doll)
}
}
def displayLevelAndAttributes(level: Int, attrs: Attributes[Int]) {
lvlEl.empty()
val lvlBtn = jq(s"""<a href="javascript:void(0)" class="upgrade-btn lvl-btn">Level: $level</a>""")
lvlEl.append(lvlBtn)
lvlBtn.on("click", () => {
setRequiredLvlFn(level)
false
})
}
def displayPaperDoll(doll: PaperDoll[Option[ComputedItem]]) {
dollEl.empty()
val clrBtn = jq(s"""<a href="javascript:void(0)" class="upgrade-btn clear-btn">Clear</a>""")
dollEl.append(clrBtn)
clrBtn.on("click", () => {
itemDetailHover.setSecondItem(None)
setUpgradeItemFn(None)
false
})
doll.toList.sortBy(_._1.toString).foreach { case (iid, item) =>
val e = renderIid(iid, item)
dollEl.append(e)
}
}
def renderIid(iid: InventoryId, item: Option[ComputedItem]): JQuery = {
val el = jq(s"""<a href="javascript:void(0)" class="upgrade-btn">$iid</a>""")
el.on("click", () => {
itemDetailHover.setSecondItem(item)
setUpgradeItemFn(item)
false
})
el.on("mouseenter", (e: dom.MouseEvent) => {
item.foreach { item =>
itemDetailHover.setFirstItem(Some(item))
itemDetailHover.show(e.clientX, e.clientY, compare = false)
}
})
el.on("mouseleave", () => {
itemDetailHover.hide()
})
}
}
| mihailim/looty | looty/src/main/scala/looty/views/loot/UpgradesPane.scala | Scala | gpl-2.0 | 4,456 |
// main
@main def Test = {
println(Foo.genOwner)
} | dotty-staging/dotty | tests/run-macros/i8877/Test_2.scala | Scala | apache-2.0 | 53 |
package models
import play.api.Play
/**
* Created by artem on 23.11.14.
*/
object Configuration {
import Play.current
object mongo {
def host = Play.configuration.getString("mongo.host").getOrElse(sys.error("Mongo host not set"))
}
}
| arakcheev/wbox | app/models/Configuration.scala | Scala | apache-2.0 | 252 |
package org.bitcoins.core.p2p
import org.bitcoins.testkitcore.gen.p2p.ControlMessageGenerator
import org.bitcoins.testkitcore.util.BitcoinSUnitTest
class RejectMessageTest extends BitcoinSUnitTest {
it must "have serialization symmetry" in {
forAll(ControlMessageGenerator.rejectMessage) { rejectMsg =>
assert(RejectMessage(rejectMsg.hex) == rejectMsg)
}
}
}
| bitcoin-s/bitcoin-s | core-test/src/test/scala/org/bitcoins/core/p2p/RejectMessageTest.scala | Scala | mit | 380 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend.component.highlevelserver.repository
import akka.actor.Actor
import com.github.dnvriend.component.highlevelserver.dto.{ Person, PersonWithId }
class PersonRepository extends Actor {
override def receive: Receive = database(0, Map.empty)
def database(id: Int, people: Map[Int, PersonWithId]): Receive = {
case person: Person =>
val personWithId = PersonWithId(id + 1, person.name, person.age, person.married)
context.become(database(id + 1, people + (id + 1 -> personWithId)))
sender() ! personWithId
case "findAll" =>
sender() ! people.values.toList.sortBy(_.id)
}
}
| dnvriend/akka-http-test | app/com/github/dnvriend/component/highlevelserver/repository/PersonRepository.scala | Scala | apache-2.0 | 1,234 |
/**
* Copyright (c) 2016, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.sparkts.models
import breeze.linalg.{DenseMatrix, DenseVector => BreezeDenseVector}
import org.apache.spark.mllib.linalg.DenseVector
import org.scalatest.FunSuite
import org.scalatest.Matchers._
class ARIMAXSuite extends FunSuite {
// Data from http://www.robjhyndman.com/data/ - command to use this data available on website
// robjhyndman.com/talks/RevolutionR/exercises1.pdf
def getTrainData(col1: Int, col2: Int) = {
val train = scala.io.Source.fromInputStream(getClass.getClassLoader.getResourceAsStream("data_train.csv")).getLines()
train.drop(1).map(a => a.split(",",4).map(_.trim).slice(col1,col2).map(va => va.toDouble)).toArray.flatten
}
def getTestData(col1: Int, col2: Int) = {
val test = scala.io.Source.fromInputStream(getClass.getClassLoader.getResourceAsStream("data_test.csv")).getLines()
test.drop(1).map(a => a.split(",",4).map(_.trim).slice(col1,col2).map(va => va.toDouble)).toArray.flatten
}
val gdp_train = getTrainData(3,4)
val sales_train = getTrainData(1,2)
val adBudget_train = getTrainData(2,3)
val gdp_test = getTestData(3,4)
val sales_test = getTestData(1,2)
val adBudget_test = getTestData(2,3)
val tsTrain = new DenseVector(gdp_train)
val xregTrain = new DenseMatrix(rows = sales_train.length, cols = 2, data = sales_train ++ adBudget_train)
val tsTest = new BreezeDenseVector(gdp_test)
val xregTest = new DenseMatrix(rows = sales_test.length, cols = 2, data = sales_test ++ adBudget_test)
val tsTrain_2 = new DenseVector(Array(93.0,82,109,110,109,84,100,91,119,78,99,92,76,99,84,103,107,106,106,89,121,103,92,94,99,94,90,99,100,125,78,95,92,84,99,88,85,121,119,94,89,121,110,110,78,88,86,77,106,127,91,98,108,110,88,118,112,104,97,100,97,96,95,111,84,102,98,110,108,92,121,104,109,105,93,74,106,118,97,109,90,91,95,95,111,112,96,122,108,96,78,124,79,89,98,127,110,92,120,109,106,124,135,110,98,108,109,103,106,92,89,82,118,94,112,86))
val xregTrain_2 = new DenseMatrix(rows = 116, cols = 4, data = Array(416,393,444,445,426,435,471,397,454,416,424,395,401,471,400,418,476,436,442,472,492,443,418,417,423,382,433,409,436,437,372,419,423,415,432,413,361,415,437,391,395,468,415,386,410,437,401,446,492,443,438,417,384,418,403,408,380,422,432,405,437,444,485,426,411,440,400,440,432,439,431,384,404,439,401,401,427,375,411,428,376,407,403,454,478,418,428,401,467,456,446,509,406,431,458,469,450,462,538,435,485,439,451,457,495,479,418,423,430,477,423,462,481,406,450,405,
0,0,0,0,1,1,0,0,0,0,0,1.0,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,
28,28,28,28,28,28,29,29,29,29,29,29,29,21,21,21,21,21,21,21,28,28,28,28,28,28,28,21,21,21,21,21,21,21,30,30,30,30,30,30,30,42,42,42,15,15,15,15,19,19,19,19,19,19,19,23,23,23,23,23,23,23,25,25,25,25,25,25,25,16,16,16,16,16,16,16,17,17,17,17,17,17,17,21,21,21,21,21,26,26,26,35,35,35,35,35,35,35,34,34,34,34,34,34,34,25,25,25,25,25,25,25,24,24,24,24,
55,57,53,55,57,50,50,53,51,55,48,46,42,41,48,48,55,59,57,55,59,53,46,44,41,33,32,42,41,37,44,41,44,42,41,37,46,46,37,44,42,39,41,35,57,62,55,53,53,55,55,42,46,42,42,48,50,44,50,48,50,57,55,59,59,53,57,60,55,51,44,42,41,48,50,46,41,39,50,53,48,42,39,33,44,37,35,41,54,53,50,47,52,52,57,53,53,50,55,46,51,56,57,57,57,53,50,42,49,52,53,50,46,48,49,52))
val tsTest_2 = new BreezeDenseVector(Array(100.0 ,98 ,102 ,98 ,112 ,99 ,99 ,87 ,103 ,115 ,101 ,125 ,117 ,109 ,111 ,105))
val xregTest_2 = new DenseMatrix(rows = 16, cols = 4, data = Array( 465,453,472,454,432,431,475,393,437,537,462,539,471,455,466,490,
1,1,0,0,0,0,0,1,1,0,0,0,0,0,1.0,1,
24,24,25,25,25,25,25,25,25,23,23,23,23,23,23,23,
51,54,49,46,42,41,45,46,48,41,42,48,43,47,48,46 ))
def mean(t: BreezeDenseVector[Double]): Double = {
val ts = new DenseVector(t.toArray)
var sum = 0.0
ts.values.map( va => sum += va)
sum / t.length
}
def maxDeviation(mean: Double, ts: BreezeDenseVector[Double]): Double ={
var max = 0.0
for (t <- ts) {
val error = Math.abs(t - mean)
if (error > max) max = error }
max
}
/* First data set */
test("MAX(0,0,1) 1 true - first data set"){
val model = ARIMAX.fitModel(0, 0, 1, tsTrain, xregTrain, 1)
assert(model.coefficients.length == 6)
val results = model.forecast(tsTest, xregTest)
results.length should be (tsTest.length)
val avg = mean(tsTest)
for (i <- results ) {
i should be (avg +- 10)
}
}
test("MAX(0,0,2) 1 true"){
val model = ARIMAX.fitModel(0, 0, 2, tsTrain_2, xregTrain_2, 1)
assert(model.coefficients.length == 11)
val results = model.forecast(tsTest_2, xregTest_2)
results.length should be (tsTest_2.length)
val avg = mean(tsTest_2)
for (i <- results ) {
i should be (avg +- 2)
}
}
test("IMAX(0,1,1) 1 true"){
val model = ARIMAX.fitModel(0, 1, 1, tsTrain_2, xregTrain_2, 1)
assert(model.coefficients.length == 10)
val results = model.forecast(tsTest_2, xregTest_2)
results.length should be (tsTest_2.length)
val avg = mean(tsTest_2)
for (i <- results ) {
i should be (avg +- 2)
}
}
test("ARIMAX(2,1,1) 1 true false - first data set"){
val model = ARIMAX.fitModel(2, 1, 1, tsTrain, xregTrain, 1, includeIntercept = false)
assert(model.coefficients.length == 8)
val results = model.forecast(tsTest, xregTest)
results.length should be (tsTest.length)
val avg = mean(tsTest)
// The timeseries data are quite scattered that's why we multiply max deviation by 2
for (i <- results ) {
i should be (avg +- maxDeviation(mean(tsTest),tsTest )*2)
}
}
test("ARIMAX(1,1,1) 1 true true"){
val model = ARIMAX.fitModel(1, 1, 1, tsTrain_2, xregTrain_2, 1)
assert(model.coefficients.length == 11)
val results = model.forecast(tsTest_2, xregTest_2)
results.length should be (tsTest_2.length)
val avg = mean(tsTest_2)
for (i <- results ) {
i should be (avg +- 5)
}
}
test("ARIMAX(2,1,1) 1 true false"){
val model = ARIMAX.fitModel(2, 1, 1, tsTrain_2, xregTrain_2, 1, includeIntercept = false)
assert(model.coefficients.length == 12)
val results = model.forecast(tsTest_2, xregTest_2)
results.length should be (tsTest_2.length)
val avg = mean(tsTest_2)
for (i <- results ) {
i should be (avg +- 10)
}
}
}
| samklr/spark-timeseries | src/test/scala/com/cloudera/sparkts/models/ARIMAXSuite.scala | Scala | apache-2.0 | 7,051 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.scaladsl.registry
import java.net.URI
import com.lightbend.lagom.internal.registry.ServiceRegistryClient
import com.lightbend.lagom.scaladsl.api.Descriptor.Call
import com.lightbend.lagom.scaladsl.client.CircuitBreakersPanel
import com.lightbend.lagom.scaladsl.client.CircuitBreakingServiceLocator
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
private[lagom] class ServiceRegistryServiceLocator(
circuitBreakers: CircuitBreakersPanel,
client: ServiceRegistryClient,
implicit val ec: ExecutionContext
) extends CircuitBreakingServiceLocator(circuitBreakers) {
override def locateAll(name: String, serviceCall: Call[_, _]): Future[List[URI]] =
// a ServiceLocator doesn't know what a `portName` is so we default to `None` and the
// implementation will return any registry without a port name. This means that in order
// for this queries to work any service registered using `http` as portName will also have
// to be registered without name.
client.locateAll(name, None).map(_.toList)
override def locate(name: String, serviceCall: Call[_, _]): Future[Option[URI]] =
locateAll(name, serviceCall).map(_.headOption)
}
| rcavalcanti/lagom | dev/service-registry/devmode-scaladsl/src/main/scala/com/lightbend/lagom/internal/scaladsl/registry/ServiceRegistryServiceLocator.scala | Scala | apache-2.0 | 1,304 |
package at.forsyte.apalache.tla.imp
import at.forsyte.apalache.tla.lir.oper.FixedArity
import at.forsyte.apalache.tla.lir.{FormalParam, OperFormalParam, SimpleFormalParam}
import tla2sany.semantic.FormalParamNode
/**
* A translator of FormalParamNode.
*
* @author konnov
*/
class FormalParamTranslator {
def translate(param: FormalParamNode): FormalParam = {
if (param.getArity == 0) {
SimpleFormalParam(param.getName.toString.intern())
} else {
OperFormalParam(param.getName.toString.intern(), param.getArity)
}
}
}
object FormalParamTranslator {
private val singleton: FormalParamTranslator = new FormalParamTranslator()
def apply(): FormalParamTranslator = {
// as our objects do not have state, we can always return a singleton here
singleton
}
}
| konnov/dach | tla-import/src/main/scala/at/forsyte/apalache/tla/imp/FormalParamTranslator.scala | Scala | apache-2.0 | 806 |
package io.iohk.ethereum.db.storage
import java.nio.ByteBuffer
import akka.util.ByteString
import boopickle.CompositePickler
import boopickle.Default._
import io.iohk.ethereum.blockchain.sync.fast.FastSync._
import io.iohk.ethereum.db.dataSource.DataSource
import io.iohk.ethereum.utils.ByteUtils.compactPickledBytes
object FastSyncStateStorage {
val syncStateKey: String = "fast-sync-state"
}
class FastSyncStateStorage(val dataSource: DataSource)
extends KeyValueStorage[String, SyncState, FastSyncStateStorage] {
type T = FastSyncStateStorage
import FastSyncStateStorage._
override val namespace: IndexedSeq[Byte] = Namespaces.FastSyncStateNamespace
implicit val byteStringPickler: Pickler[ByteString] =
transformPickler[ByteString, Array[Byte]](ByteString(_))(_.toArray[Byte])
implicit val hashTypePickler: CompositePickler[HashType] =
compositePickler[HashType]
.addConcreteType[StateMptNodeHash]
.addConcreteType[ContractStorageMptNodeHash]
.addConcreteType[EvmCodeHash]
.addConcreteType[StorageRootHash]
override def keySerializer: String => IndexedSeq[Byte] = _.getBytes(StorageStringCharset.UTF8Charset)
override def keyDeserializer: IndexedSeq[Byte] => String = b =>
new String(b.toArray, StorageStringCharset.UTF8Charset)
override def valueSerializer: SyncState => IndexedSeq[Byte] = ss => compactPickledBytes(Pickle.intoBytes(ss))
override def valueDeserializer: IndexedSeq[Byte] => SyncState =
(bytes: IndexedSeq[Byte]) => Unpickle[SyncState].fromBytes(ByteBuffer.wrap(bytes.toArray[Byte]))
protected def apply(dataSource: DataSource): FastSyncStateStorage = new FastSyncStateStorage(dataSource)
def putSyncState(syncState: SyncState): FastSyncStateStorage = put(syncStateKey, syncState)
def getSyncState(): Option[SyncState] = get(syncStateKey)
def purge(): FastSyncStateStorage = remove(syncStateKey)
}
| input-output-hk/etc-client | src/main/scala/io/iohk/ethereum/db/storage/FastSyncStateStorage.scala | Scala | mit | 1,909 |
package org.moe.parser
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
import org.moe.runtime._
import org.moe.interpreter._
import org.moe.ast._
import org.moe.parser._
class BooleanLiteralTestSuite extends FunSuite with BeforeAndAfter with ParserTestUtils {
test("... basic test with a true") {
val result = interpretCode("true")
assert(result.unboxToBoolean.get === true)
}
test("... basic test with a false") {
val result = interpretCode("false")
assert(result.unboxToBoolean.get === false)
}
test("... basic test with equality (true == true)") {
val result = interpretCode("true == true")
assert(result.unboxToBoolean.get === true)
}
test("... basic test with equality (true == false") {
val result = interpretCode("true == false")
assert(result.unboxToBoolean.get === false)
}
test("... basic test with equality (false == false") {
val result = interpretCode("false == false")
assert(result.unboxToBoolean.get === true)
}
test("... basic test with equality (false == true") {
val result = interpretCode("false == true")
assert(result.unboxToBoolean.get === false)
}
test("... basic test with && (true && true)") {
val result = interpretCode("true && true")
assert(result.unboxToBoolean.get === true)
}
test("... basic test with && (false || true)") {
val result = interpretCode("false || true")
assert(result.unboxToBoolean.get === true)
}
}
| MoeOrganization/moe | src/test/scala/org/moe/parser/BooleanLiteralTestSuite.scala | Scala | mit | 1,475 |
// Copyright (C) 2019 MapRoulette contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.maproulette.models.dal
import java.sql.Connection
import anorm.SqlParser._
import anorm._
import anorm.JodaParameterMetaData._
import javax.inject.{Inject, Provider, Singleton}
import org.apache.commons.lang3.StringUtils
import org.joda.time.{DateTime, DateTimeZone}
import org.maproulette.exception.InvalidException
import org.maproulette.Config
import org.maproulette.session.dal.{UserDAL}
import org.maproulette.models.{UserNotification, UserNotificationEmail, NotificationSubscriptions, Task, Comment}
import org.maproulette.models.utils.DALHelper
import org.maproulette.session.User
import org.maproulette.data.UserType
import org.maproulette.permissions.Permission
import org.maproulette.provider.websockets.WebSocketProvider
import org.maproulette.provider.websockets.WebSocketMessages
import org.slf4j.LoggerFactory
import play.api.db.Database
/**
* @author nrotstan
*/
@Singleton
class NotificationDAL @Inject()(db: Database,
userDAL: Provider[UserDAL],
webSocketProvider: WebSocketProvider,
config: Config,
permission: Permission)
extends DALHelper {
import org.maproulette.utils.AnormExtension._
// The anorm row parser for user notifications
val userNotificationParser: RowParser[UserNotification] = {
get[Long]("user_notifications.id") ~
get[Long]("user_notifications.user_id") ~
get[Int]("user_notifications.notification_type") ~
get[DateTime]("user_notifications.created") ~
get[DateTime]("user_notifications.modified") ~
get[Option[String]]("user_notifications.description") ~
get[Option[String]]("user_notifications.from_username") ~
get[Option[String]]("challenges.name") ~
get[Boolean]("user_notifications.is_read") ~
get[Int]("user_notifications.email_status") ~
get[Option[Long]]("user_notifications.task_id") ~
get[Option[Long]]("user_notifications.challenge_id") ~
get[Option[Long]]("user_notifications.project_id") ~
get[Option[Long]]("user_notifications.target_id") ~
get[Option[String]]("user_notifications.extra") map {
case id ~ userId ~ notificationType ~ created ~ modified ~ description ~ fromUsername ~ challengeName ~ isRead ~ emailStatus ~ taskId ~ challengeId ~ projectId ~ targetId ~ extra =>
new UserNotification(id, userId, notificationType, created, modified, description, fromUsername, challengeName, isRead, emailStatus, taskId, challengeId, projectId, targetId, extra)
}
}
val userNotificationEmailParser: RowParser[UserNotificationEmail] = {
get[Long]("user_notifications.id") ~
get[Long]("user_notifications.user_id") ~
get[Int]("user_notifications.notification_type") ~
get[DateTime]("user_notifications.created") ~
get[Int]("user_notifications.email_status") map {
case id ~ userId ~ notificationType ~ created ~ emailStatus =>
new UserNotificationEmail(id, userId, notificationType, created, emailStatus)
}
}
// The anorm row parser for user's subscriptions to notifications
val notificationSubscriptionParser: RowParser[NotificationSubscriptions] = {
get[Long]("id") ~
get[Long]("user_id") ~
get[Int]("system") ~
get[Int]("mention") ~
get[Int]("review_approved") ~
get[Int]("review_rejected") ~
get[Int]("review_again") map {
case id ~ userId ~ system ~ mention ~ reviewApproved ~ reviewRejected ~ reviewAgain =>
NotificationSubscriptions(id, userId, system, mention, reviewApproved, reviewRejected, reviewAgain),
}
}
def createMentionNotifications(fromUser:User, comment:Comment, task:Task) = {
// match [@username] (username may contain spaces) or @username (no spaces allowed)
val mentionRegex = """\[@([^\]]+)\]|@([\w\d_-]+)""".r.unanchored
for (m <- mentionRegex.findAllMatchIn(comment.comment)) {
// use first non-null group
val username = m.subgroups.filter(_ != null).head
// Retrieve and notify mentioned user
userDAL.get().retrieveByOSMUsername(username, User.superUser) match {
case Some(mentionedUser) =>
this.addNotification(UserNotification(
-1,
userId=mentionedUser.id,
notificationType=UserNotification.NOTIFICATION_TYPE_MENTION,
fromUsername=Some(fromUser.osmProfile.displayName),
taskId=Some(task.id),
challengeId=Some(task.parent),
targetId=Some(comment.id),
extra=Some(comment.comment),
), User.superUser)
case None => None
}
}
}
def createReviewNotification(user: User, forUserId: Long, reviewStatus: Int, task: Task, comment: Option[Comment]) = {
val notificationType = reviewStatus match {
case Task.REVIEW_STATUS_REQUESTED => UserNotification.NOTIFICATION_TYPE_REVIEW_AGAIN
case Task.REVIEW_STATUS_APPROVED => UserNotification.NOTIFICATION_TYPE_REVIEW_APPROVED
case Task.REVIEW_STATUS_ASSISTED => UserNotification.NOTIFICATION_TYPE_REVIEW_APPROVED
case Task.REVIEW_STATUS_REJECTED => UserNotification.NOTIFICATION_TYPE_REVIEW_REJECTED
case Task.REVIEW_STATUS_DISPUTED => UserNotification.NOTIFICATION_TYPE_REVIEW_AGAIN
}
this.addNotification(UserNotification(
-1,
userId=forUserId,
notificationType=notificationType,
fromUsername=Some(user.osmProfile.displayName),
description=Some(reviewStatus.toString()),
taskId=Some(task.id),
challengeId=Some(task.parent),
extra=comment match {
case Some(c) => Some(c.comment)
case None => None
}
), User.superUser)
}
/**
* Add/insert a notification. The email setting of the notification will be automatically
* set based on the recipient's email settings. If the recipient is not subscribed to
* the type of notification given then it is simply ignored
*
* @param notification The notification to add
* @param user The user making the request
* @return
*/
def addNotification(notification: UserNotification, user: User) = {
permission.hasWriteAccess(UserType(), user)(notification.userId)
val subscriptions = this.getNotificationSubscriptions(notification.userId, user)
val subscriptionType = notification.notificationType match {
case UserNotification.NOTIFICATION_TYPE_SYSTEM => subscriptions.system
case UserNotification.NOTIFICATION_TYPE_MENTION => subscriptions.mention
case UserNotification.NOTIFICATION_TYPE_REVIEW_APPROVED => subscriptions.reviewApproved
case UserNotification.NOTIFICATION_TYPE_REVIEW_REJECTED => subscriptions.reviewRejected
case UserNotification.NOTIFICATION_TYPE_REVIEW_AGAIN => subscriptions.reviewAgain
case _ => throw new InvalidException("Invalid notification type")
}
// Guard against ignored notification type
subscriptionType match {
case UserNotification.NOTIFICATION_IGNORE => None // nothing to do
case _ =>
notification.emailStatus = subscriptionType
notification.isRead = false
db.withConnection { implicit c =>
val query =
"""INSERT INTO user_notifications (user_id, notification_type, description, from_username, is_read,
email_status, task_id, challenge_id, project_id, target_id, extra)
VALUES ({user_id}, {notification_type}, {description}, {from_username}, {is_read},
{email_status}, {task_id}, {challenge_id}, {project_id}, {target_id}, {extra})
RETURNING *"""
val newNotification = SQL(query).on(
'user_id -> notification.userId,
'notification_type -> notification.notificationType,
'description -> notification.description,
'from_username -> notification.fromUsername,
'is_read -> false,
'email_status -> notification.emailStatus,
'task_id -> notification.taskId,
'challenge_id -> notification.challengeId,
'project_id -> notification.projectId,
'target_id -> notification.targetId,
'extra -> notification.extra
).execute()
}
webSocketProvider.sendMessage(WebSocketMessages.notificationNew(
WebSocketMessages.NotificationData(notification.userId, notification.notificationType)
))
}
}
/**
* Retrieves notification subscriptions for a user
*
* @param userId The id of the subscribing user
* @param user The user making the request
* @return
*/
def getNotificationSubscriptions(userId:Long, user:User): NotificationSubscriptions = {
permission.hasReadAccess(UserType(), user)(userId)
db.withConnection { implicit c =>
SQL(
s"""SELECT * FROM user_notification_subscriptions
WHERE user_id=${userId} LIMIT 1"""
).as(notificationSubscriptionParser.*).headOption match {
case Some(subscription) => subscription
case None =>
// Default to subscribing to all notifications, but with no emails
NotificationSubscriptions(-1, userId, UserNotification.NOTIFICATION_EMAIL_NONE,
UserNotification.NOTIFICATION_EMAIL_NONE,
UserNotification.NOTIFICATION_EMAIL_NONE,
UserNotification.NOTIFICATION_EMAIL_NONE,
UserNotification.NOTIFICATION_EMAIL_NONE)
}
}
}
/**
* Updates notification subscriptions for a user
*
* @param userId The id of the subscribing user
* @param user The user making the request
* @param subscriptions The updated subscriptions
* @return
*/
def updateNotificationSubscriptions(userId: Long, user: User, subscriptions: NotificationSubscriptions) = {
permission.hasWriteAccess(UserType(), user)(userId)
db.withConnection { implicit c =>
// Upsert new subscription settings
SQL(
s"""INSERT INTO user_notification_subscriptions (user_id, system, mention, review_approved, review_rejected, review_again)
VALUES({userId}, {system}, {mention}, {reviewApproved}, {reviewRejected}, {reviewAgain})
ON CONFLICT (user_id) DO
UPDATE SET system=EXCLUDED.system, mention=EXCLUDED.mention, review_approved=EXCLUDED.review_approved, review_rejected=EXCLUDED.review_rejected, review_again=EXCLUDED.review_again"""
).on(
'userId -> userId,
'system -> subscriptions.system,
'mention -> subscriptions.mention,
'reviewApproved -> subscriptions.reviewApproved,
'reviewRejected -> subscriptions.reviewRejected,
'reviewAgain -> subscriptions.reviewAgain,
).executeUpdate()
}
}
/**
* Marks as read the given notifications owned by the given userId
*
* @param userId The id of the user that owns the notifications
* @param user The user making the request
* @param notificationIds The ids of the notifications to be marked read
*/
def markNotificationsRead(userId: Long, user: User, notificationIds: List[Long]) = {
permission.hasWriteAccess(UserType(), user)(userId)
db.withConnection { implicit c =>
val query =
s"""UPDATE user_notifications SET is_read=true
WHERE user_notifications.user_id={user_id}
${this.getLongListFilter(Some(notificationIds), "user_notifications.id")}"""
SQL(query).on('user_id -> userId).execute()
}
}
/**
* Deletes the given notifications owned by the given userId
*
* @param userId The id of the user that owns the notifications
* @param user The user making the request
* @param notificationIds The ids of the notifications to delete
* @return
*/
def deleteNotifications(userId: Long, user: User, notificationIds: List[Long]) = {
permission.hasWriteAccess(UserType(), user)(userId)
db.withConnection { implicit c =>
val query =
s"""DELETE from user_notifications
WHERE user_notifications.user_id={user_id}
${this.getLongListFilter(Some(notificationIds), "user_notifications.id")}"""
SQL(query).on('user_id -> userId).execute()
}
}
/**
* Retrieves the user notifications sent to the given userId
*/
def getUserNotifications(userId: Long, user: User, limit: Int=Config.DEFAULT_LIST_SIZE, offset: Int=0,
orderColumn: String="is_read", orderDirection: String="ASC",
notificationType: Option[Int]=None, isRead: Option[Boolean]=None,
fromUsername: Option[String]=None, challengeId: Option[Long]=None): List[UserNotification] = {
permission.hasReadAccess(UserType(), user)(userId)
db.withConnection { implicit c =>
val whereClause = new StringBuilder("WHERE user_id = {userId}")
appendInWhereClause(whereClause, getOptionalFilter(notificationType, "notification_type", "notificationType"))
appendInWhereClause(whereClause, getOptionalFilter(isRead, "is_read", "isRead"))
appendInWhereClause(whereClause, getOptionalFilter(challengeId, "challenge_id", "challengeId"))
appendInWhereClause(whereClause, getOptionalMatchFilter(fromUsername, "from_username", "fromUsername"))
// In addition to the requested sort, we always add a sort by created desc
// (unless created was the requested sort column)
var orderClause = this.order(Some(orderColumn), orderDirection)
if (orderColumn != "created") {
orderClause ++= ", created desc"
}
val query = s"""
|SELECT user_notifications.*, challenges.name
|FROM user_notifications
|LEFT OUTER JOIN challenges on user_notifications.challenge_id = challenges.id
|${whereClause}
|${orderClause}
|LIMIT ${sqlLimit(limit)} OFFSET $offset
""".stripMargin
SQL(query).on(
'userId -> userId,
'notificationType -> notificationType,
'isRead -> isRead,
'challengeId -> challengeId,
'fromUsername -> fromUsername,
).as(userNotificationParser.*)
}
}
}
| mvexel/maproulette2 | app/org/maproulette/models/NotificationDAL.scala | Scala | apache-2.0 | 14,373 |
package sbt
package appmacro
import Classes.Applicative
import Types.Id
/** The separate hierarchy from Applicative/Monad is for two reasons.
*
* 1. The type constructor is represented as an abstract type because a TypeTag cannot represent a type constructor directly.
* 2. The applicative interface is uncurried.
*/
trait Instance
{
type M[x]
def app[K[L[x]], Z](in: K[M], f: K[Id] => Z)(implicit a: AList[K]): M[Z]
def map[S,T](in: M[S], f: S => T): M[T]
def pure[T](t: () => T): M[T]
}
trait Convert
{
def apply[T: c.WeakTypeTag](c: scala.reflect.macros.Context)(in: c.Tree): c.Tree
}
trait MonadInstance extends Instance
{
def flatten[T](in: M[M[T]]): M[T]
}
import scala.reflect._
import macros._
object InputWrapper
{
/** The name of the wrapper method should be obscure.
* Wrapper checking is based solely on this name, so it must not conflict with a user method name.
* The user should never see this method because it is compile-time only and only used internally by the task macro system.*/
final val WrapName = "wrap_\\u2603\\u2603"
// This method should be annotated as compile-time only when that feature is implemented
def wrap_\\u2603\\u2603[T](in: Any): T = error("This method is an implementation detail and should not be referenced.")
/** Wraps an arbitrary Tree in a call to the `wrap` method of this module for later processing by an enclosing macro.
* The resulting Tree is the manually constructed version of:
*
* `c.universe.reify { InputWrapper.<WrapName>[T](ts.splice) }`
*/
def wrapKey[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any]): c.Expr[T] =
{
import c.universe.{Apply=>ApplyTree,_}
val util = new ContextUtil[c.type](c)
val iw = util.singleton(InputWrapper)
val tpe = c.weakTypeOf[T]
val nme = newTermName(WrapName).encoded
val tree = ApplyTree(TypeApply(Select(Ident(iw), nme), TypeTree(tpe) :: Nil), ts.tree :: Nil)
tree.setPos(ts.tree.pos)
c.Expr[T](tree)
}
}
object Instance
{
final val ApplyName = "app"
final val FlattenName = "flatten"
final val PureName = "pure"
final val MapName = "map"
final val InstanceTCName = "M"
final class Input[U <: Universe with Singleton](val tpe: U#Type, val expr: U#Tree, val local: U#ValDef)
/** Implementation of a macro that provides a direct syntax for applicative functors and monads.
* It is intended to be used in conjunction with another macro that conditions the inputs.
*
* This method processes the Tree `t` to find inputs of the form `InputWrapper.wrap[T]( input )`
* This form is typically constructed by another macro that pretends to be able to get a value of type `T`
* from a value convertible to `M[T]`. This `wrap(input)` form has two main purposes.
* First, it identifies the inputs that should be transformed.
* Second, it allows the input trees to be wrapped for later conversion into the appropriate `M[T]` type by `convert`.
* This wrapping is necessary because applying the first macro must preserve the original type,
* but it is useful to delay conversion until the outer, second macro is called. The `wrap` method accomplishes this by
* allowing the original `Tree` and `Type` to be hidden behind the raw `T` type. This method will remove the call to `wrap`
* so that it is not actually called at runtime.
*
* Each `input` in each expression of the form `InputWrapper.wrap[T]( input )` is transformed by `convert`.
* This transformation converts the input Tree to a Tree of type `M[T]`.
* The original wrapped expression `wrap(input)` is replaced by a reference to a new local `val $x: T`, where `$x` is a fresh name.
* These converted inputs are passed to `builder` as well as the list of these synthetic `ValDef`s.
* The `TupleBuilder` instance constructs a tuple (Tree) from the inputs and defines the right hand side of the vals
* that unpacks the tuple containing the results of the inputs.
*
* The constructed tuple of inputs and the code that unpacks the results of the inputs are then passed to the `i`,
* which is an implementation of `Instance` that is statically accessible.
* An Instance defines a applicative functor associated with a specific type constructor and, if it implements MonadInstance as well, a monad.
* Typically, it will be either a top-level module or a stable member of a top-level module (such as a val or a nested module).
* The `with Singleton` part of the type verifies some cases at macro compilation time,
* while the full check for static accessibility is done at macro expansion time.
* Note: Ideally, the types would verify that `i: MonadInstance` when `t.isRight`.
* With the various dependent types involved, this is not worth it.
*
* The `t` argument is the argument of the macro that will be transformed as described above.
* If the macro that calls this method is for a multi-input map (app followed by map),
* `t` should be the argument wrapped in Left.
* If this is for multi-input flatMap (app followed by flatMap),
* this should be the argument wrapped in Right.
*/
def contImpl[T](c: Context, i: Instance with Singleton, convert: Convert, builder: TupleBuilder)(t: Either[c.Expr[T], c.Expr[i.M[T]]])(
implicit tt: c.WeakTypeTag[T], it: c.TypeTag[i.type]): c.Expr[i.M[T]] =
{
import c.universe.{Apply=>ApplyTree,_}
val util = ContextUtil[c.type](c)
val mTC: Type = util.extractTC(i, InstanceTCName)
val mttpe: Type = appliedType(mTC, tt.tpe :: Nil).normalize
// the tree for the macro argument
val (tree, treeType) = t match {
case Left(l) => (l.tree, tt.tpe.normalize)
case Right(r) => (r.tree, mttpe)
}
val instanceSym = util.singleton(i)
// A Tree that references the statically accessible Instance that provides the actual implementations of map, flatMap, ...
val instance = Ident(instanceSym)
val isWrapper: Tree => Boolean = util.isWrapper(InputWrapper.WrapName)
type In = Input[c.universe.type]
var inputs = List[In]()
// Local definitions in the macro. This is used to ensure references are to M instances defined outside of the macro call.
val defs = util.collectDefs(tree, isWrapper)
val checkQual: Tree => Unit = util.checkReferences(defs, isWrapper)
// transforms the original tree into calls to the Instance functions pure, map, ...,
// resulting in a value of type M[T]
def makeApp(body: Tree): Tree =
inputs match {
case Nil => pure(body)
case x :: Nil => single(body, x)
case xs => arbArity(body, xs)
}
// no inputs, so construct M[T] via Instance.pure or pure+flatten
def pure(body: Tree): Tree =
{
val typeApplied = TypeApply(Select(instance, PureName), TypeTree(treeType) :: Nil)
val p = ApplyTree(typeApplied, Function(Nil, body) :: Nil)
if(t.isLeft) p else flatten(p)
}
// m should have type M[M[T]]
// the returned Tree will have type M[T]
def flatten(m: Tree): Tree =
{
val typedFlatten = TypeApply(Select(instance, FlattenName), TypeTree(tt.tpe) :: Nil)
ApplyTree(typedFlatten, m :: Nil)
}
// calls Instance.map or flatmap directly, skipping the intermediate Instance.app that is unnecessary for a single input
def single(body: Tree, input: In): Tree =
{
val variable = input.local
val param = ValDef(util.parameterModifiers, variable.name, variable.tpt, EmptyTree)
val typeApplied = TypeApply(Select(instance, MapName), variable.tpt :: TypeTree(treeType) :: Nil)
val mapped = ApplyTree(typeApplied, input.expr :: Function(param :: Nil, body) :: Nil)
if(t.isLeft) mapped else flatten(mapped)
}
// calls Instance.app to get the values for all inputs and then calls Instance.map or flatMap to evaluate the body
def arbArity(body: Tree, inputs: List[In]): Tree =
{
val result = builder.make(c)(mTC, inputs)
val param = util.freshMethodParameter( appliedType(result.representationC, util.idTC :: Nil) )
val bindings = result.extract(param)
val f = Function(param :: Nil, Block(bindings, body))
val ttt = TypeTree(treeType)
val typedApp = TypeApply(Select(instance, ApplyName), TypeTree(result.representationC) :: ttt :: Nil)
val app = ApplyTree(ApplyTree(typedApp, result.input :: f :: Nil), result.alistInstance :: Nil)
if(t.isLeft) app else flatten(app)
}
// called when transforming the tree to add an input
// for `qual` of type M[A], and a selection qual.value,
// the call is addType(Type A, Tree qual)
// the result is a Tree representing a reference to
// the bound value of the input
def addType(tpe: Type, qual: Tree): Tree =
{
qual.foreach(checkQual)
val vd = util.freshValDef(tpe, qual.symbol)
inputs ::= new Input(tpe, qual, vd)
util.refVal(vd)
}
def sub(tpe: Type, qual: Tree): Tree =
{
val tag = c.WeakTypeTag(tpe)
addType(tpe, convert(c)(qual)(tag) )
}
// applies the transformation
// resetting attributes must be: a) local b) done here and not wider or else there are obscure errors
val tr = makeApp( c.resetLocalAttrs( util.transformWrappers(tree, isWrapper, (tpe, tr) => sub(tpe, tr)) ) )
c.Expr[i.M[T]](tr)
}
import Types._
implicit def applicativeInstance[A[_]](implicit ap: Applicative[A]): Instance { type M[x] = A[x] } = new Instance
{
type M[x] = A[x]
def app[ K[L[x]], Z ](in: K[A], f: K[Id] => Z)(implicit a: AList[K]) = a.apply[A,Z](in, f)
def map[S,T](in: A[S], f: S => T) = ap.map(f, in)
def pure[S](s: () => S): M[S] = ap.pure(s())
}
type AI[A[_]] = Instance { type M[x] = A[x] }
def compose[A[_], B[_]](implicit a: AI[A], b: AI[B]): Instance { type M[x] = A[B[x]] } = new Composed[A,B](a,b)
// made a public, named, unsealed class because of trouble with macros and inference when the Instance is not an object
class Composed[A[_], B[_]](a: AI[A], b: AI[B]) extends Instance
{
type M[x] = A[B[x]]
def pure[S](s: () => S): A[B[S]] = a.pure(() => b.pure(s))
def map[S,T](in: M[S], f: S => T): M[T] = a.map(in, (bv: B[S]) => b.map(bv, f))
def app[ K[L[x]], Z ](in: K[M], f: K[Id] => Z)(implicit alist: AList[K]): A[B[Z]] =
{
val g: K[B] => B[Z] = in => b.app[K, Z](in, f)
type Split[ L[x] ] = K[ (L ∙ B)#l ]
a.app[Split, B[Z]](in, g)(AList.asplit(alist))
}
}
}
| harrah/xsbt | util/appmacro/src/main/scala/sbt/appmacro/Instance.scala | Scala | bsd-3-clause | 10,122 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.io.File
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
/**
* Test suite to handle metadata cache related.
*/
abstract class MetadataCacheSuite extends QueryTest with SharedSparkSession {
/** Removes one data file in the given directory. */
protected def deleteOneFileInDirectory(dir: File): Unit = {
assert(dir.isDirectory)
val oneFile = dir.listFiles().find { file =>
!file.getName.startsWith("_") && !file.getName.startsWith(".")
}
assert(oneFile.isDefined)
oneFile.foreach(_.delete())
}
test("SPARK-16336,SPARK-27961 Suggest fixing FileNotFoundException") {
withTempPath { (location: File) =>
// Create an ORC directory
spark.range(start = 0, end = 100, step = 1, numPartitions = 3)
.write.orc(location.getAbsolutePath)
// Read the directory in
val df = spark.read.orc(location.getAbsolutePath)
assert(df.count() == 100)
// Delete a file
deleteOneFileInDirectory(location)
// Read it again and now we should see a FileNotFoundException
val e = intercept[SparkException] {
df.count()
}
assert(e.getMessage.contains("FileNotFoundException"))
assert(e.getMessage.contains("recreating the Dataset/DataFrame involved"))
}
}
}
class MetadataCacheV1Suite extends MetadataCacheSuite {
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_LIST, "orc")
test("SPARK-16337 temporary view refresh") {
withTempView("view_refresh") { withTempPath { (location: File) =>
// Create an ORC directory
spark.range(start = 0, end = 100, step = 1, numPartitions = 3)
.write.orc(location.getAbsolutePath)
// Read the directory in
spark.read.orc(location.getAbsolutePath).createOrReplaceTempView("view_refresh")
assert(sql("select count(*) from view_refresh").first().getLong(0) == 100)
// Delete a file
deleteOneFileInDirectory(location)
// Read it again and now we should see a FileNotFoundException
val e = intercept[SparkException] {
sql("select count(*) from view_refresh").first()
}
assert(e.getMessage.contains("FileNotFoundException"))
assert(e.getMessage.contains("REFRESH"))
// Refresh and we should be able to read it again.
spark.catalog.refreshTable("view_refresh")
val newCount = sql("select count(*) from view_refresh").first().getLong(0)
assert(newCount > 0 && newCount < 100)
}}
}
test("case sensitivity support in temporary view refresh") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
withTempView("view_refresh") {
withTempPath { (location: File) =>
// Create a Parquet directory
spark.range(start = 0, end = 100, step = 1, numPartitions = 3)
.write.orc(location.getAbsolutePath)
// Read the directory in
spark.read.orc(location.getAbsolutePath).createOrReplaceTempView("view_refresh")
// Delete a file
deleteOneFileInDirectory(location)
intercept[SparkException](sql("select count(*) from view_refresh").first())
// Refresh and we should be able to read it again.
spark.catalog.refreshTable("vIeW_reFrEsH")
val newCount = sql("select count(*) from view_refresh").first().getLong(0)
assert(newCount > 0 && newCount < 100)
}
}
}
}
}
class MetadataCacheV2Suite extends MetadataCacheSuite {
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_LIST, "")
}
| maropu/spark | sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala | Scala | apache-2.0 | 4,553 |
import scala.collection._
object T {
val newSymbolMap: mutable.HashMap[String, mutable.HashMap[Int, Double]] = mutable.HashMap.empty
val map = newSymbolMap.getOrElse("a", mutable.HashMap.empty)
map.put(1, 0.0)
newSymbolMap.put("a", map)
}
| densh/dotty | tests/pos/i1045.scala | Scala | bsd-3-clause | 247 |
package eventscale.service.twitter
import java.io.File
import eventscale.service.EventProcessor
import akka.actor.ActorRef
class TwitterService {
def apply(stream: ActorRef, searchParams: Option[Array[String]]) = {
}
}
| jmarin/eventscale | backend/src/main/scala/eventscale/service/twitter/TwitterService.scala | Scala | apache-2.0 | 227 |
package com.github.xubo245.gcdss.load
import htsjdk.samtools.util.Log
import org.apache.spark.{SparkConf, SparkContext}
import org.bdgenomics.adam.rdd.ADAMContext
import org.bdgenomics.adam.rdd.ADAMContext._
/**
* Created by xubo on 2017/4/6.
*/
object LoadRecordGroupDictionary {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("LoadRecordGroupDictionary").setMaster("local[16]")
Log.setGlobalLogLevel(Log.LogLevel.ERROR)
// .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
// LoadRecordGroupDictionary
val sc = new SparkContext(conf)
val ac = new ADAMContext(sc)
compute(sc,args(0))
sc.stop
}
def compute(sc: SparkContext, fqFile: String): Unit = {
val alignment = sc.loadAlignments(fqFile)
var SD = alignment.recordGroups.recordGroups
println(SD.size)
SD.foreach { each =>
println(each)
}
}
}
| xubo245/GCDSS | src/main/scala/com/github/xubo245/gcdss/load/LoadRecordGroupDictionary.scala | Scala | gpl-2.0 | 918 |
package com.tajpure.scheme
import org.jllvm.InstructionBuilder
import org.jllvm.value.user.instruction.GetElementPointerInstruction
import org.jllvm._type.FunctionType
import org.jllvm._type.IntegerType
import org.jllvm.value.user.instruction.AddInstruction
import org.jllvm.value.user.instruction.StackAllocation
import org.jllvm._type.IdentifiedStructType
import org.jllvm.value.BasicBlock
import org.jllvm.NativeLibrary
import org.jllvm.value.user.instruction.ReturnInstruction
import org.jllvm.ExecutionEngine
import org.jllvm.value.user.instruction.LoadInstruction
import org.jllvm.value.user.instruction.StoreInstruction
import org.jllvm.value.user.constant.ConstantInteger
import org.jllvm.Module
import org.jllvm._type.Type
import org.jllvm.generic.GenericValue
import org.jllvm.value.user.constant.Function
import org.jllvm.bindings.LLVMLinkage
object SwitchTest extends App {
NativeLibrary.load()
val elements = Array[Type](IntegerType.i64, IntegerType.i32)
val module: Module = new Module("test")
val builder: InstructionBuilder = new InstructionBuilder()
val FT: FunctionType = new FunctionType(IntegerType.i32, Array(IntegerType.i32), false)
val F: Function = new Function(module, "fac", FT)
F.setLinkage(LLVMLinkage.LLVMExternalLinkage)
val BB: BasicBlock = F.appendBasicBlock("b")
val BB1: BasicBlock = F.appendBasicBlock("b")
val end: BasicBlock = F.appendBasicBlock("b")
builder.positionBuilderAtEnd(end)
new ReturnInstruction(builder, ConstantInteger.constI32(1))
builder.positionBuilderAtEnd(BB)
val seitch = builder.buildSwitch(ConstantInteger.constI32(2), BB1, 2)
builder.positionBuilderAtEnd(BB1)
builder.buildBr(end)
val BB2: BasicBlock = F.appendBasicBlock("b")
builder.positionBuilderAtEnd(BB2)
builder.buildBr(end)
seitch.addCase(ConstantInteger.constI32(1), BB2)
val BB3: BasicBlock = F.appendBasicBlock("b")
builder.positionBuilderAtEnd(BB3)
new ReturnInstruction(builder, ConstantInteger.constI32(2))
seitch.addCase(ConstantInteger.constI32(2), BB3)
module.dump()
val engine = new ExecutionEngine(module)
val runFunction = engine.runFunction(F, new Array[GenericValue](0))
println(org.jllvm.bindings.ExecutionEngine.LLVMGenericValueToInt(runFunction.getInstance(), 1))
} | tajpure/SoScheme | src/test/scala/com/tajpure/scheme/SwitchTest.scala | Scala | gpl-3.0 | 2,273 |
package me.bowdon.ddldiff
import org.scalatest._
import org.scalatest.Matchers._
import me.bowdon.ddldiff.ast._
class DiffCalculatorSpec extends FlatSpec with Matchers {
"DiffCalculator" should "create missing tables" in {
val idCol = ColumnDef(Identifier("id"), Numeric, Set.empty)
val newTable = TableDef(Identifier("foo"), Map(Identifier("id") -> idCol), Set())
DiffCalculator.diff(None, Some(newTable)) shouldEqual
Seq(
CreateTable(newTable))
}
it should "drop removed tables" in {
val idCol = ColumnDef(Identifier("id"), Numeric, Set.empty)
val oldTable = TableDef(Identifier("foo"), Map(Identifier("id") -> idCol), Set())
DiffCalculator.diff(Some(oldTable), None) shouldEqual
Seq(
DropTable(Identifier("foo")))
}
it should "do nothing for nothing" in {
DiffCalculator.diff(None, None) shouldEqual Seq()
}
it should "add missing columns" in {
val oldTable = TableDef(Identifier("foo"), Map(), Set())
val idCol = ColumnDef(Identifier("id"), Numeric, Set.empty)
val newTable = oldTable.copy(columns = Map(Identifier("id") -> idCol))
DiffCalculator.diff(Some(oldTable), Some(newTable)) shouldEqual
Seq(
AddColumn(Identifier("foo"), idCol))
}
it should "drop removed columns" in {
val idCol = ColumnDef(Identifier("id"), Numeric, Set.empty)
val oldTable = TableDef(Identifier("foo"), Map(Identifier("id") -> idCol), Set())
val newTable = oldTable.copy(columns = Map())
DiffCalculator.diff(Some(oldTable), Some(newTable)) shouldEqual
Seq(
DropColumn(Identifier("foo"), idCol))
}
it should "add new column constraints" in {
val oldIdCol = ColumnDef(Identifier("id"), Numeric, Set.empty)
val constraint = ColumnConstraint(None, PrimaryKey(Some(Asc), true))
val newIdCol = oldIdCol.copy(constraints = Set(constraint))
val oldTable = TableDef(Identifier("foo"), Map(Identifier("id") -> oldIdCol), Set())
val newTable = oldTable.copy(columns = Map(Identifier("id") -> newIdCol))
DiffCalculator.diff(Some(oldTable), Some(newTable)) shouldEqual
Seq(
AddColumnConstraint(Identifier("foo"), Identifier("id"), constraint))
}
it should "drop removed column constraints" in {
val constraint = ColumnConstraint(None, PrimaryKey(Some(Asc), true))
val oldIdCol = ColumnDef(Identifier("id"), Numeric, Set(constraint))
val newIdCol = oldIdCol.copy(constraints = Set())
val oldTable = TableDef(Identifier("foo"), Map(Identifier("id") -> oldIdCol), Set())
val newTable = oldTable.copy(columns = Map(Identifier("id") -> newIdCol))
DiffCalculator.diff(Some(oldTable), Some(newTable)) shouldEqual
Seq(
DropColumnConstraint(Identifier("foo"), Identifier("id"), constraint))
}
it should "assume a constraint rename is a drop and create" in {
val oldConstraint = ColumnConstraint(Some(Identifier("foo_pk")), PrimaryKey(Some(Asc), true))
val newConstraint = ColumnConstraint(Some(Identifier("foo_pkx")), PrimaryKey(Some(Asc), true))
val idCol = ColumnDef(Identifier("id"), Numeric, Set(oldConstraint))
val oldTable = TableDef(Identifier("foo"), Map(Identifier("id") -> idCol), Set())
val newTable = oldTable.copy(
columns = Map(Identifier("id") -> idCol.copy(
constraints = Set(newConstraint))))
DiffCalculator.diff(Some(oldTable), Some(newTable)) shouldEqual
Seq(
DropColumnConstraint(Identifier("foo"), Identifier("id"), oldConstraint),
AddColumnConstraint(Identifier("foo"), Identifier("id"), newConstraint))
}
}
| cbowdon/ddl-diff | src/test/scala/me/bowdon/ddldiff/DiffCalculatorSpec.scala | Scala | gpl-3.0 | 3,605 |
object SCL9789 {
sealed trait True extends Bool {
type If[T <: Up, F <: Up, Up] = T
}
sealed trait False extends Bool {
type If[T <: Up, F <: Up, Up] = F
}
sealed trait Bool {
type If[T <: Up, F <: Up, Up] <: Up
}
case class BoolRep[B <: Bool](val value: Boolean)
def toBoolean[B <: Bool](implicit b: BoolRep[B]) = b.value
implicit val falseRep: BoolRep[False] = BoolRep[False](false)
implicit val trueRep: BoolRep[True] = BoolRep[True](true)
sealed trait Comparison {
type Match[IfLT <: Up, IfEQ <: Up, IfGT <: Up, Up] <: Up
type gt = Match[False, False, True, Bool]
type lt = Match[True, False, False, Bool]
type eq = Match[False, True, False, Bool]
type le = Match[True, True, False, Bool]
type ge = Match[False, True, True, Bool]
}
sealed trait EQ extends Comparison {
type Match[IfLT <: Up, IfEQ <: Up, IfGT <: Up, Up] = IfEQ
}
sealed trait Nat {
type Match[NonZero[N <: Nat] <: Up, IfZero <: Up, Up] <: Up
type Compare[N <: Nat] <: Comparison
}
sealed trait _0 extends Nat {
type Match[NonZero[N <: Nat] <: Up, IfZero <: Up, Up] = IfZero
type Compare[N <: Nat] =
N#Match[ConstLT, EQ, Comparison]
type ConstLT[A] = EQ
}
sealed trait Succ[N <: Nat] extends Nat {
type Match[NonZero[N <: Nat] <: Up, IfZero <: Up, Up] = NonZero[N]
type Compare[O <: Nat] = O#Match[N#Compare, EQ, Comparison]
}
type _1 = Succ[_0]
type _2 = Succ[_1]
def main(args: Array[String]) {
toBoolean[_1#Compare[_2]#/*resolved: true*/lt] toString
}
}
| ilinum/intellij-scala | testdata/resolve2/bug3/SCL9789.scala | Scala | apache-2.0 | 1,568 |
package com.avast.jarloader
import java.io.File
import java.util.Comparator
/**
* Created <b>4.11.13</b><br>
*
* @author Jenda Kolena, kolena@avast.com
* @version 0.1
*/
class TimeFileComparator extends Comparator[File] {
def compare(o1: File, o2: File): Int = {
if (o1.lastModified < o2.lastModified) 1 else if (o1.lastModified > o2.lastModified) -1 else 0 //descendant order
}
}
| avast/jarloader | src/main/scala/com/avast/jarloader/TimeFileComparator.scala | Scala | apache-2.0 | 396 |
class NonAnnotatedSteps {
def given() {
}
def when() {
}
def then() {
}
} | donsenior/jbehave-core | jbehave-scala/src/test/scala/NonAnnotatedSteps.scala | Scala | bsd-3-clause | 93 |
package com.rasterfoundry.database
import com.rasterfoundry.datamodel._
import com.rasterfoundry.database.Implicits._
import doobie._
import doobie.implicits._
import doobie.postgres.implicits._
import com.rasterfoundry.datamodel.PageRequest
import cats.implicits._
import cats.effect.LiftIO
import java.sql.Timestamp
import java.time.temporal.IsoFields
import java.time.temporal.TemporalAdjusters
import java.time.DayOfWeek
import java.time.LocalDate
import java.time.ZoneOffset
import java.util.UUID
object ProjectLayerDao extends Dao[ProjectLayer] {
val tableName = "project_layers"
val selectAllColsF: Fragment = fr"""
SELECT
id, created_at, modified_at, name, project_id, color_group_hex,
smart_layer_id, range_start, range_end, geometry, is_single_band,
single_band_options, overviews_location, min_zoom_level
"""
val selectF: Fragment =
selectAllColsF ++ fr"from" ++ tableF
def getProjectLayerById(
projectLayerId: UUID): ConnectionIO[Option[ProjectLayer]] =
query.filter(projectLayerId).selectOption
def unsafeGetProjectLayerById(
projectLayerId: UUID): ConnectionIO[ProjectLayer] = {
query.filter(projectLayerId).select
}
def listProjectLayersForProjectQ(projectId: UUID) =
query.filter(fr"project_id = ${projectId}")
def listProjectLayersForProject(
page: PageRequest,
projectId: UUID): ConnectionIO[PaginatedResponse[ProjectLayer]] =
listProjectLayersForProjectQ(projectId).page(page)
def listProjectLayersWithImagery(
projectId: UUID): ConnectionIO[List[ProjectLayer]] = {
val tableF =
fr"project_layers left join scenes_to_layers on project_layers.id = scenes_to_layers.project_layer_id"
val queryBuilder = Dao.QueryBuilder[ProjectLayer](
selectAllColsF ++ fr"from" ++ tableF,
tableF,
Nil)
queryBuilder
.filter(fr"scenes_to_layers.scene_id IS NOT NULL")
.filter(fr"project_id = ${projectId}")
.list
}
def insertProjectLayer(
pl: ProjectLayer
): ConnectionIO[ProjectLayer] = {
(fr"INSERT INTO" ++ tableF ++ fr"""
(id, created_at, modified_at, name, project_id, color_group_hex,
smart_layer_id, range_start, range_end, geometry, is_single_band, single_band_options,
overviews_location, min_zoom_level
)
VALUES
(${pl.id}, ${pl.createdAt}, ${pl.modifiedAt}, ${pl.name}, ${pl.projectId},
${pl.colorGroupHex}, ${pl.smartLayerId}, ${pl.rangeStart}, ${pl.rangeEnd},
${pl.geometry}, ${pl.isSingleBand}, ${pl.singleBandOptions}, ${pl.overviewsLocation},
${pl.minZoomLevel})
""").update.withUniqueGeneratedKeys[ProjectLayer](
"id",
"created_at",
"modified_at",
"name",
"project_id",
"color_group_hex",
"smart_layer_id",
"range_start",
"range_end",
"geometry",
"is_single_band",
"single_band_options",
"overviews_location",
"min_zoom_level"
)
}
def updateProjectLayerQ(projectLayer: ProjectLayer, id: UUID): Update0 = {
val updateTime = new Timestamp((new java.util.Date()).getTime)
val idFilter = fr"id = ${id}"
val query = (fr"UPDATE" ++ tableF ++ fr"""SET
modified_at = ${updateTime},
name = ${projectLayer.name},
color_group_hex = ${projectLayer.colorGroupHex},
geometry = ${projectLayer.geometry},
project_id = ${projectLayer.projectId},
is_single_band = ${projectLayer.isSingleBand},
single_band_options = ${projectLayer.singleBandOptions},
overviews_location=${projectLayer.overviewsLocation},
min_zoom_level=${projectLayer.minZoomLevel}
""" ++ Fragments.whereAndOpt(Some(idFilter))).update
query
}
def createProjectLayer(
projectLayer: ProjectLayer
): ConnectionIO[ProjectLayer] =
insertProjectLayer(projectLayer)
def getProjectLayer(
projectId: UUID,
layerId: UUID
): ConnectionIO[Option[ProjectLayer]] =
query.filter(fr"project_id = ${projectId}").filter(layerId).selectOption
def deleteProjectLayer(layerId: UUID)(
implicit L: LiftIO[ConnectionIO]): ConnectionIO[Int] =
for {
pl <- unsafeGetProjectLayerById(layerId)
_ <- pl.overviewsLocation match {
case Some(locUrl) =>
L.liftIO(ProjectDao.removeLayerOverview(layerId, locUrl))
case _ => ().pure[ConnectionIO]
}
rowsDeleted <- query.filter(layerId).delete
} yield rowsDeleted
def updateProjectLayer(pl: ProjectLayer, plId: UUID): ConnectionIO[Int] = {
updateProjectLayerQ(pl, plId).run
}
def batchCreateLayers(
groupedScenes: Map[(Option[(Timestamp, Timestamp)], Option[String]),
List[Scene.ProjectScene]],
layer: ProjectLayer,
splitOptions: SplitOptions): ConnectionIO[List[ProjectLayer]] = {
val projectLayersAndScenes
: Map[ProjectLayer.Create, List[Scene.ProjectScene]] = groupedScenes.map {
case ((Some((start, end)), datasourceO), scenes) =>
(ProjectLayer.Create(
datasourceO match {
case Some(datasource) =>
s"${splitOptions.name} | " +
s"${datasource}"
case _ =>
s"${splitOptions.name}"
},
layer.projectId,
splitOptions.colorGroupHex.getOrElse(layer.colorGroupHex),
Some(layer.id),
Some(start),
Some(end),
layer.geometry,
layer.isSingleBand,
layer.singleBandOptions,
layer.overviewsLocation,
layer.minZoomLevel
),
scenes)
case ((_, datasourceO), scenes) =>
(ProjectLayer.Create(
datasourceO match {
case Some(datasource) =>
s"${splitOptions.name} | " +
s"${datasource}"
case _ =>
s"${splitOptions.name}"
},
layer.projectId,
splitOptions.colorGroupHex.getOrElse(layer.colorGroupHex),
Some(layer.id),
None,
None,
layer.geometry,
layer.isSingleBand,
layer.singleBandOptions,
layer.overviewsLocation,
layer.minZoomLevel
),
scenes)
}
projectLayersAndScenes.toList traverse {
case (projectLayerC: ProjectLayer.Create,
scenes: List[Scene.ProjectScene]) =>
(scenes.toNel, layer.projectId) match {
case (Some(s), Some(pId)) =>
for {
insertedLayer <- insertProjectLayer(projectLayerC.toProjectLayer)
_ <- ProjectDao.addScenesToProject(
s.map(_.id),
pId,
insertedLayer.id,
true
)
} yield insertedLayer
case _ =>
throw new java.lang.IllegalArgumentException(
s"Cannot add scenes to a layer which is not in a project: ${layer.id}"
)
}
}
}
def getDayRangeFromTimestamp(date: Timestamp): (Timestamp, Timestamp) = {
val startOfDay =
date.toLocalDateTime.toLocalDate.atStartOfDay
(new Timestamp(startOfDay.toEpochSecond(ZoneOffset.UTC) * 1000),
new Timestamp(
startOfDay.plusHours(24).toEpochSecond(ZoneOffset.UTC) * 1000))
}
def getWeekRangeFromTimestamp(date: Timestamp): (Timestamp, Timestamp) = {
val week = date.toLocalDateTime.get(IsoFields.WEEK_OF_WEEK_BASED_YEAR)
val year = date.toLocalDateTime.get(IsoFields.WEEK_BASED_YEAR)
val datetimeWeek = LocalDate
.now()
.`with`(IsoFields.WEEK_OF_WEEK_BASED_YEAR, week)
.`with`(IsoFields.WEEK_BASED_YEAR, year)
.`with`(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY))
val startOfDay = datetimeWeek.atStartOfDay
(new Timestamp(startOfDay.toEpochSecond(ZoneOffset.UTC) * 1000),
new Timestamp(startOfDay.plusDays(7).toEpochSecond(ZoneOffset.UTC) * 1000))
}
def groupScenesBySplitOptions(splitOptions: SplitOptions)
: Scene.ProjectScene => (Option[(Timestamp, Timestamp)], Option[String]) = {
scene: Scene.ProjectScene =>
(splitOptions.period, splitOptions.splitOnDatasource) match {
case (SplitPeriod.Day, Some(true)) =>
(scene.filterFields.acquisitionDate.map(getDayRangeFromTimestamp),
Some(scene.datasource.name))
case (SplitPeriod.Week, Some(true)) =>
(scene.filterFields.acquisitionDate.map(getWeekRangeFromTimestamp),
Some(scene.datasource.name))
case (SplitPeriod.Day, _) =>
(scene.filterFields.acquisitionDate.map(getDayRangeFromTimestamp),
None)
case (SplitPeriod.Week, _) =>
(scene.filterFields.acquisitionDate.map(getWeekRangeFromTimestamp),
None)
}
}
def splitProjectLayer(
projectId: UUID,
layerId: UUID,
splitOptions: SplitOptions): ConnectionIO[List[ProjectLayer]] = {
for {
layer <- unsafeGetProjectLayerById(layerId)
scenes <- ProjectLayerScenesDao
.listLayerScenesRaw(layerId, Some(splitOptions))
.flatMap(ProjectLayerScenesDao.scenesToProjectScenes(_, layerId))
groupedScenes = scenes.groupBy(groupScenesBySplitOptions(splitOptions))
newLayers <- batchCreateLayers(groupedScenes, layer, splitOptions)
_ <- splitOptions.removeFromLayer match {
case Some(true) =>
ProjectDao.deleteScenesFromProject(scenes.map(_.id),
projectId,
layerId)
case _ => 0.pure[ConnectionIO]
}
} yield newLayers
}
def layerIsInProject(layerId: UUID,
projectID: UUID): ConnectionIO[Boolean] = {
query.filter(layerId).selectOption map {
case Some(projectLayer) => projectLayer.projectId == Option(projectID)
case _ => false
}
}
}
| azavea/raster-foundry | app-backend/db/src/main/scala/ProjectLayerDao.scala | Scala | apache-2.0 | 9,887 |
package org.eigengo.activator.nashorn
import akka.actor.ActorRef
import spray.routing.{Directives, Route}
import akka.util.Timeout
import java.util.concurrent.TimeUnit
import scala.concurrent.ExecutionContext
import spray.http.{ContentTypes, StatusCodes, HttpEntity, HttpResponse}
import org.json4s.{NoTypeHints, Formats}
import org.json4s.jackson.Serialization
trait WorkflowService extends Directives {
import WorkflowActor._
import WorkflowInstanceActor._
import akka.pattern.ask
import Serialization._
private implicit val _: Formats = Serialization.formats(NoTypeHints)
private implicit val timeout = Timeout(5000, TimeUnit.MILLISECONDS)
import WorkflowObjectMapper._
private def toJson(value: Any): HttpResponse = value match {
case Started(id, instruction) =>
HttpResponse(entity =
HttpEntity(ContentTypes.`application/json`, write(Map("id" -> id, "instruction" -> map(instruction)))))
case End(id, instruction, data) =>
HttpResponse(entity =
HttpEntity(ContentTypes.`application/json`, write(Map("id" -> id, "instruction" -> map(instruction), "data" -> map(data)))))
case Next(id, instruction, data) =>
HttpResponse(entity =
HttpEntity(ContentTypes.`application/json`, write(Map("id" -> id, "instruction" -> map(instruction), "data" -> map(data)))))
case Error(id, error, instruction, data) =>
HttpResponse(entity =
HttpEntity(ContentTypes.`application/json`, write(Map("id" -> id))), status = StatusCodes.BadRequest)
case StartFailed =>
HttpResponse(entity =
HttpEntity(ContentTypes.`application/json`, "{}"), status = StatusCodes.NotFound)
}
def workflowRoute(workflowActor: ActorRef)(implicit executor: ExecutionContext): Route = {
path("workflow" / "start" / Segment) { workflowName =>
post {
complete {
(workflowActor ? StartWorkflow("/" + workflowName)).map(toJson)
}
}
} ~
path("workflow" / "next" / Segment) { id =>
post {
handleWith { entity: Array[Byte] =>
println(entity)
(workflowActor ? Request(id, entity)).map(toJson)
}
}
}
}
}
| eigengo/activator-akka-nashorn | src/main/scala/org/eigengo/activator/nashorn/WorkflowService.scala | Scala | apache-2.0 | 2,173 |
/**
* Copyright (C) 2016 DANS - Data Archiving and Networked Services (info@dans.knaw.nl)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.multideposit.parser
import java.io.IOException
import better.files.File
import cats.data.Validated
import nl.knaw.dans.easy.multideposit.model.MimeType
import org.apache.tika.Tika
object MimeType {
private val tika = new Tika
/**
* Identify the mimeType of a path.
*
* @param file the file to identify
* @return the mimeType of the path if the identification was successful; `Failure` otherwise
*/
def get(file: File): Validated[MimeType] = {
Validated.catchOnly[IOException] { tika.detect(file.path) }
.leftMap(ioe => ParseError(-1, ioe.getMessage).chained)
}
}
| DANS-KNAW/easy-split-multi-deposit | src/main/scala/nl.knaw.dans.easy.multideposit/parser/MimeType.scala | Scala | apache-2.0 | 1,276 |
package net.fosdal.oslo.ofile
import java.io.FileNotFoundException
import org.scalatest.{Matchers, WordSpec}
class OFileSpec extends WordSpec with Matchers {
"fileContents" when {
"given a source file that exists" must {
"get it's contents" in new Fixture {
fileContents(existingFile) shouldBe sourceContent
}
}
"given a source file that does not exist" must {
"throw an exception" in new Fixture {
val exception: FileNotFoundException = intercept[FileNotFoundException](fileContents(doesNotExist))
exception.getMessage shouldBe s"$doesNotExist (No such file or directory)"
}
}
}
"resourceContents" when {
"given a source file that exists" must {
"get it's contents" in new Fixture {
resourceContents(existingResource) shouldBe sourceContent
}
}
"given a source file that does not exist" must {
"throw an exception" in new Fixture {
val exception: Exception = intercept[Exception](resourceContents(doesNotExist))
exception.getMessage shouldBe s"resource not found: $doesNotExist"
}
}
}
"contents" when {
"given a source file that exists as a file" must {
"get it's contents" in new Fixture {
contents(existingFile) shouldBe sourceContent
}
}
"given a source file that exists as a resource" must {
"get it's contents" in new Fixture {
contents(existingResource) shouldBe sourceContent
}
}
"given a source file that exists as neither a file nor a resource" must {
"throw an exception" in new Fixture {
val exception: FileNotFoundException = intercept[FileNotFoundException](fileContents(doesNotExist))
exception.getMessage shouldBe s"$doesNotExist (No such file or directory)"
}
}
}
trait Fixture {
val sourceContent: String = """Lorem ipsum dolor sit amet,
|consectetur adipiscing elit,
|sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.""".stripMargin
val existingFile = "src/test/resources/test_file.txt"
val existingResource = "test_file.txt"
val doesNotExist = "santa_claus.txt"
}
}
| sfosdal/oslo | src/test/scala/net/fosdal/oslo/ofile/OFileSpec.scala | Scala | apache-2.0 | 2,254 |
package io.finch.wrk
import com.twitter.finagle.http.{Request, Response}
import com.twitter.finagle.stats.NullStatsReceiver
import com.twitter.finagle.tracing.NullTracer
import com.twitter.finagle.{Http, Service}
import com.twitter.util.Await
abstract class Wrk extends App {
case class Payload(message: String)
protected def serve(s: Service[Request, Response]): Unit = Await.ready(
Http.server.withCompressionLevel(0).withStatsReceiver(NullStatsReceiver).withTracer(NullTracer).serve(":8081", s)
)
}
| finagle/finch | examples/src/main/scala/io/finch/wrk/Wrk.scala | Scala | apache-2.0 | 516 |
package kornell.server.jdbc.repository
import java.util.UUID
import kornell.core.entity.AuditedEntityType
import kornell.core.to.InstitutionHostNamesTO
import kornell.server.jdbc.SQL._
import kornell.server.repository.TOs
import scala.collection.JavaConverters._
class InstitutionHostNameRepo(institutionUUID: String) {
def get: InstitutionHostNamesTO = {
TOs.newInstitutionHostNamesTO(
sql"""
| select hostName from InstitutionHostName
| where institutionUUID = ${institutionUUID}
| order by hostName"""
.map[String])
}
def updateHostnames(hostnames: InstitutionHostNamesTO): InstitutionHostNamesTO = {
val from = get
removeHostnames(institutionUUID)
hostnames.getInstitutionHostNames.asScala.foreach(hostname => addHostname(hostname))
//log entity change
EventsRepo.logEntityChange(institutionUUID, AuditedEntityType.institutionHostName, institutionUUID, from, hostnames)
hostnames
}
def removeHostnames(institutionUUID: String): InstitutionHostNameRepo = {
sql"""delete from InstitutionHostName where institutionUUID = ${institutionUUID}""".executeUpdate
InstitutionsRepo.cleanUpHostNameCache()
this
}
def addHostname(hostName: String): Unit = {
sql"""insert into InstitutionHostName (uuid, hostName, institutionUUID) values
(${UUID.randomUUID.toString},
${hostName},
${institutionUUID})""".executeUpdate
InstitutionsRepo.updateHostNameCache(institutionUUID, hostName)
}
}
object InstitutionHostNameRepo {
def apply(institutionUUID: String) = new InstitutionHostNameRepo(institutionUUID)
}
| Craftware/Kornell | kornell-api/src/main/scala/kornell/server/jdbc/repository/InstitutionHostNameRepo.scala | Scala | apache-2.0 | 1,621 |
import bwapi.{Unit => ScUnit, _}
import bwta.BWTA
object TestBot {
def main(args: Array[String]) =
new TestBot().run()
}
class TestBot extends DefaultBWListener {
val mirror = new Mirror()
var game: Game = _
var self: Player = _
def run(): Unit = {
mirror.getModule().setEventListener(this)
mirror.startGame()
}
override def onUnitCreate(unit: ScUnit): Unit = {
System.out.println("New unit " + unit.getType)
}
override def onStart(): Unit = {
game = mirror.getGame
self = game.self()
//Use BWTA to analyze map
//This may take a few minutes if the map is processed first time!
System.out.println("Analyzing map...")
BWTA.readMap()
BWTA.analyze()
System.out.println("Map data ready")
}
override def onFrame(): Unit = {
//game.setTextSize(10);
game.drawTextScreen(10, 10, "Playing as " + self.getName + " - " + self.getRace)
import scala.collection.JavaConverters._
self.getUnits.asScala
.filter(_.getType == UnitType.Terran_Command_Center && self.minerals >= 50)
.foreach(_.train(UnitType.Terran_SCV))
self.getUnits.asScala
.filter(_.getType.isWorker)
.filter(_.isIdle)
.foreach { worker =>
val closestMineral = game.neutral.getUnits.asScala
.filter(_.getType.isMineralField)
.map(mineral => (mineral.getDistance(worker), mineral))
.sortBy(_._1)
.map(_._2)
.headOption
closestMineral.foreach(worker.gather)
}
}
}
| KadekM/starcraft-bot-scala-template | src/main/scala/TestBot.scala | Scala | apache-2.0 | 1,603 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.client.execution
import com.ibm.spark.kernel.protocol.v5.content.{ExecuteReply, ExecuteReplyError, ExecuteResult, StreamContent}
import com.ibm.spark.utils.LogLike
case class DeferredExecution() extends LogLike {
private var executeResultCallbacks: List[(ExecuteResult) => Unit] = Nil
private var streamCallbacks: List[(StreamContent) => Unit] = Nil
private var errorCallbacks: List[(ExecuteReplyError) => Unit] = Nil
private var executeResultOption: Option[ExecuteResult] = None
private var executeReplyOption: Option[ExecuteReply] = None
/**
* Registers a callback for handling ExecuteResult messages.
* This {@param callback} will run once on successful code execution and
* then be unregistered. If {@param callback} is registered after the result
* has been returned it will be invoked immediately.
* In the event of a failure {@param callback} will never be called.
* @param callback A callback function, which will be invoked at most once,
* with an ExecuteResult IPython message
* @return The DeferredExecution with the given callback registered.
*/
def onResult(callback: (ExecuteResult) => Unit): DeferredExecution = {
this.executeResultCallbacks = callback :: this.executeResultCallbacks
processCallbacks()
this
}
/**
* Registers a callback for handling StreamContent messages.
* Ths {@param callback} can be called 0 or more times. If the
* {@param callback} is registered after StreamContent messages have been
* emitted, the {@param callback} will only receive messages emitted after the
* point of registration.
* @param callback A callback function, which can be invoked 0 or more times,
* with Stream Ipython messages
* @return The DeferredExecution with the given callback registered.
*/
def onStream(callback: (StreamContent) => Unit): DeferredExecution = {
this.streamCallbacks = callback :: this.streamCallbacks
this
}
/**
* Registers a callback for handling ExecuteReply messages when there is an
* error during code execution. This {@param callback} will run once on failed
* code execution and then be unregistered. If {@param callback} is registered
* after the error reply has been returned it will be invoked immediately.
* In the event of successful code execution {@param callback} will never be
* called.
* @param callback A callback function, which will be invoked at most once,
* with an ExecuteReply IPython message
* @return The DeferredExecution with the given callback registered.
*/
def onError(callback: (ExecuteReplyError) => Unit): DeferredExecution = {
this.errorCallbacks = callback :: this.errorCallbacks
processCallbacks()
this
}
private def processCallbacks(): Unit = {
(executeReplyOption, executeResultOption) match {
case (Some(executeReply), Some(executeResult)) if executeReply.status.equals("error") =>
// call error callbacks
this.errorCallbacks.foreach(_(executeReply))
// This prevents methods from getting called again when
// a callback is registered after processing occurs
this.errorCallbacks = Nil
case (Some(executeReply), Some(executeResult)) if executeReply.status.equals("ok") =>
// call result callbacks
this.executeResultCallbacks.foreach(_(executeResult))
// This prevents methods from getting called again when
// a callback is registered after processing occurs
this.executeResultCallbacks = Nil
case _ =>
logger.debug(
s"""
Did not invoke client callbacks.
ExecuteReply was: ${executeReplyOption}
ExecuteResult was: ${executeResultOption}
""".stripMargin)
}
}
def resolveResult(executeResultMessage: ExecuteResult): Unit = {
this.executeResultOption = Some(executeResultMessage)
processCallbacks()
}
def resolveReply(executeReplyMessage: ExecuteReply): Unit = {
this.executeReplyOption = Some(executeReplyMessage)
processCallbacks()
}
def emitStreamContent(streamContent: StreamContent): Unit = {
this.streamCallbacks.foreach(streamCallback => {
streamCallback(streamContent)
})
}
}
| bpburns/spark-kernel | client/src/main/scala/com/ibm/spark/kernel/protocol/v5/client/execution/DeferredExecution.scala | Scala | apache-2.0 | 4,934 |
package tifmo
import dcstree.Executor
import dcstree.SemRole
import dcstree.Relation
import scala.collection.mutable
package inference {
import Finder._
import RulesQuick._
import RulesLight._
/**
* The core of inference engine.
*
* This class implements the forward chaining algorithm, provides the
* infra structure for writing rules, and implements basic axioms of
* relational algebra in a way that can avoid most combinatorial explosions.
*/
class IEngineCore extends Executor {
private[this] var predCounter = 0
def getNewPredID() = {
predCounter += 1
predCounter
}
// lock
/**
* Locking mechanism for debug.
*
* If locked, an error will occur if the status of the inference engine is changed
* (e.g. new atomic sentences proven).
*/
var locked = false
// W
protected[this] var W = null: TermIndex
protected[this] val WPool = mutable.Map.empty[Dimension, TermIndex]
/**
* Get the W term.
*/
def getW(dim: Dimension) = {
if (W == null) {
assert(!locked)
W = new TermIndex(new Dimension(null))
W.setwflag()
IEPredNonEmpty(W).apply(Debug_SimpleRuleTrace("W", getNewPredID()))
IEPredSubsume(W, W).apply(Debug_SimpleRuleTrace("W", getNewPredID()))
anyTermPool.foreach(_(this, W))
WPool(W.dim) = W
}
WPool.getOrElseUpdate(dim, {
assert(!locked)
val ret = new TermIndex(dim)
ret.setwflag()
IEPredNonEmpty(ret).apply(Debug_SimpleRuleTrace("W", getNewPredID()))
IEPredSubsume(ret, ret).apply(Debug_SimpleRuleTrace("W", getNewPredID()))
anyTermPool.foreach(_(this, ret))
claimCP(ret, dim.relabel(null).map(r => (W, r)), Debug_SimpleRuleTrace("W", getNewPredID()))
ret
})
}
// new term
def newTerm(dim: Dimension) = {
assert(!locked)
val w = getW(dim)
val term = new TermIndex(w.dim)
IEPredSubsume(term, term).apply(Debug_SimpleRuleTrace("tm id", getNewPredID()))
anyTermPool.foreach(_(this, term))
for (x <- w.superSets) {
applySubsume(term, x, Debug_SimpleRuleTrace("tm sub W", getNewPredID()))
}
term
}
def new1DTerm() = this.newTerm(new Dimension(null))
// contradiction:
private[this] var contraFlag = false
/**
* Returns if there has been a contradiction during forward chaining.
*/
def hasContradiction = contraFlag
def contradict() {
contraFlag = true
}
// five hierarchy forward chaining:
private[this] val quick = mutable.Queue.empty[() => Unit]
private[inference] def newQuick(todo: () => Unit) {
quick.enqueue(todo)
}
private[this] val mergePool = mutable.Set.empty[TermIndex]
private[this] val light = mutable.Queue.empty[(() => Unit, Set[TermIndex])]
private[inference] def newLight(todo: () => Unit, dep: Set[TermIndex]) {
light.enqueue((todo, dep))
}
private[this] class Sched[T <: IEPred](pred: T, f: RuleDo[T], args: Seq[RuleArg]) {
private[this] val dep = args.flatMap(_.terms).toSet
def sched() {
if (pred.valid && dep.forall(_.valid)) f(IEngineCore.this, pred, args)
}
}
private[this] val heavy = mutable.Queue.empty[Sched[_ <: IEPred]]
private[inference] def newHeavy[T <: IEPred](pred: T, f: RuleDo[T], args: Seq[RuleArg]) {
heavy.enqueue(new Sched[T](pred, f, args))
}
private[this] val constructQ = mutable.Queue.empty[() => Unit]
/**
* Forward chaining process.
*/
def explore() {
def loop() {
if (!quick.isEmpty) {
quick.dequeue()()
loop()
} else if (!mergePool.isEmpty) {
val cache = mutable.Set.empty[TermIndex]
var most = 0
var tomerge = null: Set[TermIndex]
for (x <- mergePool; if !cache.contains(x)) {
val tmp = x.subSets intersect x.superSets
cache ++= tmp
if (tmp.size > most) {
most = tmp.size
tomerge = tmp
}
}
mergePool --= tomerge
assert(tomerge.size >= 2)
val a = tomerge.find(_.isW) match {
case Some(x) => x
case None => {
tomerge.maxBy[Int](x => x.iscps.size + x.mkcps.size +
x.isins.size + x.mkins.size + x.ispis.size + x.mkpis.size)
}
}
val eqs = tomerge - a
merge(a, eqs)
loop()
} else if (!light.isEmpty) {
val (todo, dep) = light.dequeue()
if (dep.forall(_.valid)) todo()
loop()
} else if (!heavy.isEmpty) {
heavy.dequeue().sched()
loop()
} else if (!constructQ.isEmpty) {
constructQ.dequeue()()
loop()
}
}
loop()
}
// apply & claim
private[inference] def applyNonEmpty(a: TermIndex, debug_trace: Debug_RuleTrace) = {
val tmp = IEPredNonEmpty(a)
if (tmp.apply(debug_trace)) {
assert(!locked)
rqPINE1(this, tmp)
rqPINE2(this, tmp)
rqCPNE1(this, tmp)
rqCPNE2(this, tmp)
rqPICP3(this, tmp)
for (x <- a.neTriggers) x.fire(this, tmp)
if (a.selfDisjoint) contradict()
true
} else {
false
}
}
/**
* Claim a term to be non-empty.
*/
def claimNonEmpty(a: TermIndex, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) {
if (applyNonEmpty(a, debug_trace)) {
for (x <- a.superSets) {
applyNonEmpty(x, Debug_RuleSubNE(a.holder, x.holder, getNewPredID()))
}
}
}
private[inference] def applyDisjoint(a: TermIndex, b: TermIndex, debug_trace: Debug_RuleTrace) = {
val tmp = IEPredDisjoint(a, b)
if (tmp.apply(debug_trace)) {
assert(!locked)
for (x <- a.djtTriggers) x.fire(this, tmp)
if (a == b) {
if (a.knownNE) contradict()
} else {
val tmp2 = IEPredDisjoint(b, a)
val rec = tmp2.apply(Debug_SimpleRuleTrace("djt rev", getNewPredID()))
assert(rec)
for (x <- b.djtTriggers) x.fire(this, tmp2)
}
true
} else {
false
}
}
/**
* Claim two terms to be disjoint.
*/
def claimDisjoint(a: TermIndex, b: TermIndex, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) {
assert(a.dim == b.dim)
if (applyDisjoint(a, b, debug_trace)) {
newLight(() => rlDjtSubPI1(this, a, b), Set(a, b))
}
}
private[inference] def applySubsume(a: TermIndex, b: TermIndex, debug_trace: Debug_RuleTrace) = {
val tmp = IEPredSubsume(a, b)
if (tmp.apply(debug_trace)) {
assert(!locked)
if (a.hasSub(b)) mergePool += a
for (in <- b.mkins; if in.comp.forall(_.hasSub(a))) {
newQuick(() => claimSubsume(a, in.head, Debug_RuleINdef(a.holder, in.head.holder, in.comp.map(_.holder), getNewPredID())))
}
for (x <- a.subTriggers) x.fire(this, tmp)
for (x <- b.superTriggers) x.fire(this, tmp)
true
} else {
false
}
}
/**
* Claim term `a` to be a subset of term `b`.
*/
def claimSubsume(a: TermIndex, b: TermIndex, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) {
assert(a.dim == b.dim)
if (applySubsume(a, b, debug_trace)) {
val bsup = b.superSets
newLight(() => rlDjtSubPI2(this, a, b), Set(a, b))
var task = Nil: List[() => Boolean]
for (x <- a.subSets) {
if (!x.hasSuper(b) || x == a) {
for (y <- bsup) {
task = (() => applySubsume(x, y, Debug_RuleSubSub(x.holder, a.holder, b.holder, y.holder, getNewPredID()))) :: task
}
} else {
assert(bsup.forall(x.hasSuper(_)))
}
}
task.foreach(_())
if (a.knownNE) {
claimNonEmpty(b, Debug_RuleSubNE(a.holder, b.holder, getNewPredID()))
}
}
}
private[inference] def applyCP(h: TermIndex, t: Set[(TermIndex, SemRole)], debug_trace: Debug_RuleTrace) = {
val tmp = IEPredCP(h, t)
if (tmp.apply(debug_trace)) {
assert(!locked)
val mincps = t.minBy[Int](_._1.mkcps.size)._1.mkcps
for (cp <- mincps; if cp.comp == t && cp != tmp) {
newQuick(() => claimSubsume(h, cp.head, Debug_SimpleRuleTrace("CP Uniqueness", getNewPredID())))
newQuick(() => claimSubsume(cp.head, h, Debug_SimpleRuleTrace("CP Uniqueness", getNewPredID())))
}
rqCPNE3(this, tmp)
rqCPNE4(this, tmp)
rqPICP1(this, tmp)
for (x <- t.map(_._1); y <- x.mkcpTriggers) y.fire(this, tmp)
for (x <- h.iscpTriggers) x.fire(this, tmp)
true
} else {
false
}
}
/**
* Claim term `h` to be the Cartesian product of `t`
*/
def claimCP(h: TermIndex, t: Set[(TermIndex, SemRole)], debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) {
assert {
val hrs = h.dim.relabel(null)
val rss = t.map(x => x._1.dim.relabel(x._2))
t.size >= 2 && hrs == rss.flatten && hrs.size == (0 /: rss)(_ + _.size)
}
if (applyCP(h, t, debug_trace)) {
newLight(() => rlCPCP(this, h, t), t.map(_._1) + h)
}
}
private[inference] def applyPI(h: TermIndex, t: TermIndex, r: SemRole, debug_trace: Debug_RuleTrace) = {
val tmp = IEPredPI(h, t, r)
if (tmp.apply(debug_trace)) {
assert(!locked)
for (pi <- t.mkpis; if pi.headrs == tmp.headrs && pi != tmp) {
newQuick(() => claimSubsume(h, pi.head, Debug_SimpleRuleTrace("PI Uniqueness", getNewPredID())))
newQuick(() => claimSubsume(pi.head, h, Debug_SimpleRuleTrace("PI Uniqueness", getNewPredID())))
}
rqPINE3(this, tmp)
rqPINE4(this, tmp)
rqPICP2(this, tmp)
newLight(() => rlDjtSubPI3(this, h, t, r), Set(h, t))
for (x <- t.mkpiTriggers) x.fire(this, tmp)
for (x <- h.ispiTriggers) x.fire(this, tmp)
true
} else {
false
}
}
/**
* Claim term `h` to be the projection of term `t` into role `r`
*/
def claimPI(h: TermIndex, t: TermIndex, r: SemRole, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) {
assert {
val hrs = h.dim.relabel(r)
val trs = t.dim.relabel(null)
hrs.subsetOf(trs) && trs != hrs
}
if (applyPI(h, t, r, debug_trace)) {
newLight(() => rlPIPI(this, h, t, r), Set(h, t))
}
}
private[this] def esssub(x: Set[TermIndex], y: Set[TermIndex]) = {
x.subsetOf(y.flatMap(z => z.subSets intersect z.superSets))
}
private[inference] def applyIN(h: TermIndex, t: Set[TermIndex], aux: Boolean, debug_trace: Debug_RuleTrace) = {
h.isins.filter(in => in.aux && esssub(t, in.comp)).foreach(_.dispose())
val tmp = IEPredIN(h, t, aux)
if (tmp.apply(debug_trace)) {
assert(!locked)
val subs = t.minBy[Int](_.assuper.size).subSets.filter(x => t.forall(_.hasSub(x)))
for (x <- subs) {
newQuick(() => claimSubsume(x, h, Debug_RuleINdef(x.holder, h.holder, t.map(_.holder), getNewPredID())))
}
if (!aux) {
for (x <- h.isinTriggers) x.fire(this, tmp)
for (x <- t; y <- x.mkinTriggers) y.fire(this, tmp)
}
true
} else {
false
}
}
/**
* Claim term `h` to be the intersection of terms `t`
*/
def claimIN(h: TermIndex, t: Set[TermIndex], aux: Boolean = false, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) {
assert(t.forall(_.dim == h.dim))
assert(aux || h.dim.size >= 2)
t.foreach(claimSubsume(h, _, Debug_SimpleRuleTrace("IN def", getNewPredID())))
def squeeze(t: Set[TermIndex]) = {
val tmp = for (x <- t; if !t.exists(y => y.hasSuper(x) && !x.hasSuper(y))) yield {
x.superSets intersect x.subSets
}
tmp.map(_.head)
}
if (aux) {
val mt = squeeze(t)
if (mt.size <= 1) {
claimSubsume(mt.head, h, Debug_SimpleRuleTrace("IN Check", getNewPredID()))
} else if (!h.isins.exists(in => esssub(in.comp, mt))) {
applyIN(h, mt, true, debug_trace)
}
} else {
applyIN(h, t, false, debug_trace)
}
}
/**
* Claim `rl(a, b)` for the user-defined binary relation `rl`.
*/
def claimRL(a: TermIndex, rl: Relation, b: TermIndex, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) {
val tmp = IEPredRL(a, rl, b)
if (tmp.apply(debug_trace)) {
assert(!locked)
for (x <- a.arlTriggers) x.fire(this, tmp)
for (x <- b.rlbTriggers) x.fire(this, tmp)
rl.execute[TermIndex](this, a, b)
}
}
/**
* Claim `tms.head = func(tms.tail, param)` for the user-defined function `func`.
*/
def claimFunc(func: IEFunction, tms: Seq[TermIndex], param: Any, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) {
assert(tms.head.dim == func.headDim(null +: tms.tail.map(_.holder), param))
if (IEPredFunc(func, tms, param).apply(debug_trace)) {
assert(!locked)
if (!tms.tail.isEmpty) {
val minfuncs = tms.tail.minBy[Int](_.funcs.size).funcs
for (x <- minfuncs; if x.func == func && x.param == param && x.tms.tail == tms.tail) {
claimSubsume(tms.head, x.tms.head, Debug_SimpleRuleTrace("Func Uniqueness", getNewPredID()))
claimSubsume(x.tms.head, tms.head, Debug_SimpleRuleTrace("Func Uniqueness", getNewPredID()))
}
}
func.applyFunc(this, tms, param)
}
}
// merge:
private[this] def merge(a: TermIndex, eqs: Set[TermIndex]) {
eqs.foreach(_.holder.idx = a)
for (x <- eqs) {
assert(a.hasSuper(x))
assert(a.hasSub(x))
assert(a.knownNE == x.knownNE)
for (y <- x.superSets; if !eqs.contains(y)) assert(a.hasSuper(y))
for (y <- x.subSets; if !eqs.contains(y)) assert(a.hasSub(y))
}
for (x <- eqs) {
x.kne.toList.foreach(_.dispose())
x.assub.toList.foreach(_.dispose())
x.assuper.toList.foreach(_.dispose())
}
def replace(x: TermIndex) = if (eqs.contains(x)) a else x
var task = Nil: List[() => Unit]
for (x <- eqs) {
for (djt <- x.djts) {
val na = replace(djt.a)
val nb = replace(djt.b)
val ntr = djt.debug_trace
task = (() => claimDisjoint(na, nb, ntr)) :: task
}
for (cp <- x.iscps) {
val ncomp = cp.comp.map(y => (replace(y._1), y._2))
val ntr = cp.debug_trace
task = (() => claimCP(a, ncomp, ntr)) :: task
}
for (cp <- x.mkcps) {
val nhead = replace(cp.head)
val ncomp = cp.comp.map(y => (replace(y._1), y._2))
val ntr = cp.debug_trace
task = (() => claimCP(nhead, ncomp, ntr)) :: task
}
for (pi <- x.ispis) {
val ncompt = replace(pi.compt)
val ncompr = pi.compr
val ntr = pi.debug_trace
task = (() => claimPI(a, ncompt, ncompr, ntr)) :: task
}
for (pi <- x.mkpis) {
val nhead = replace(pi.head)
val ncompr = pi.compr
val ntr = pi.debug_trace
task = (() => claimPI(nhead, a, ncompr, ntr)) :: task
}
for (in <- x.isins) {
val ncomp = in.comp.map(replace(_))
val naux = in.aux
val ntr = in.debug_trace
task = (() => claimIN(a, ncomp, naux, ntr)) :: task
}
for (in <- x.mkins) {
val nhead = replace(in.head)
val ncomp = in.comp.map(replace(_))
val naux = in.aux
val ntr = in.debug_trace
task = (() => claimIN(nhead, ncomp, naux, ntr)) :: task
}
for (fc <- x.funcs) {
val nfunc = fc.func
val ntms = fc.tms.map(replace(_))
val nparam = fc.param
val ntr = fc.debug_trace
task = (() => claimFunc(nfunc, ntms, nparam, ntr)) :: task
}
for (rel <- x.asarl) {
val nb = replace(rel.b)
val nrl = rel.rl
val ntr = rel.debug_trace
task = (() => claimRL(a, nrl, nb, ntr)) :: task
}
for (rel <- x.asrlb) {
val na = replace(rel.a)
val nrl = rel.rl
val ntr = rel.debug_trace
task = (() => claimRL(na, nrl, a, ntr)) :: task
}
}
for (x <- eqs) {
for (y <- x.djts.toList) {
y.dispose()
IEPredDisjoint(y.b, y.a).dispose()
}
x.iscps.toList.foreach(_.dispose())
x.mkcps.toList.foreach(_.dispose())
x.ispis.toList.foreach(_.dispose())
x.mkpis.toList.foreach(_.dispose())
x.isins.toList.foreach(_.dispose())
x.mkins.toList.foreach(_.dispose())
x.funcs.toList.foreach(_.dispose())
x.asarl.toList.foreach(_.dispose())
x.asrlb.toList.foreach(_.dispose())
}
task.foreach(_())
for (x <- eqs) {
x.disposers.toList.foreach(_())
}
val toreloc = mutable.Set.empty[Guard[_ <: IEPred]]
for (x <- eqs) {
x.neTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.subTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.superTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.djtTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.iscpTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.mkcpTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.ispiTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.mkpiTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.isinTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.mkinTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.arlTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
x.rlbTriggers.foreach(tg => {
tg.guard.change(tg, a)
toreloc += tg.guard
})
}
toreloc.foreach(_.locate(this))
for (x <- eqs) {
assert(x.kne.isEmpty)
assert(x.assub.isEmpty)
assert(x.assuper.isEmpty)
assert(x.djts.isEmpty)
assert(x.iscps.isEmpty)
assert(x.mkcps.isEmpty)
assert(x.ispis.isEmpty)
assert(x.mkpis.isEmpty)
assert(x.isins.isEmpty)
assert(x.mkins.isEmpty)
assert(x.funcs.isEmpty)
assert(x.asarl.isEmpty)
assert(x.asrlb.isEmpty)
assert(x.disposers.isEmpty)
assert(x.neTriggers.isEmpty)
assert(x.subTriggers.isEmpty)
assert(x.superTriggers.isEmpty)
assert(x.djtTriggers.isEmpty)
assert(x.iscpTriggers.isEmpty)
assert(x.mkcpTriggers.isEmpty)
assert(x.ispiTriggers.isEmpty)
assert(x.mkpiTriggers.isEmpty)
assert(x.isinTriggers.isEmpty)
assert(x.mkinTriggers.isEmpty)
assert(x.arlTriggers.isEmpty)
assert(x.rlbTriggers.isEmpty)
}
}
// foreach:
private[this] val anyTermPool = mutable.Set.empty[(IEngineCore, TermIndex) => Unit]
protected def forAnyTerm(f: (IEngineCore, TermIndex) => Unit) {
anyTermPool.add(f)
}
/**
* Setup a callback function which is activated when `term` is known to be non-empty.
*/
def ifNotEmpty(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredNonEmpty]) {
assert(term.valid)
(new Watcher[IEPredNonEmpty]("ne", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever `term` has a new super-set.
*/
def foreachSuperset(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredSubsume]) {
assert(term.valid)
(new Watcher[IEPredSubsume]("sub", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever `term` has a new subset.
*/
def foreachSubset(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredSubsume]) {
assert(term.valid)
(new Watcher[IEPredSubsume]("super", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever `term` has a new disjoint.
*/
def foreachDisjoint(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredDisjoint]) {
assert(term.valid)
(new Watcher[IEPredDisjoint]("djt", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever `term` is known to be a Cartesian product.
*/
def foreachIsCP(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredCP]) {
assert(term.valid)
(new Watcher[IEPredCP]("iscp", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever `term` is known to be a component of some Cartesian product.
*/
def foreachMkCP(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredCP]) {
assert(term.valid)
(new Watcher[IEPredCP]("mkcp", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever `term` is known to be a projection.
*/
def foreachIsPI(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredPI]) {
assert(term.valid)
(new Watcher[IEPredPI]("ispi", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever some other term is known to be a projection of `term`.
*/
def foreachMkPI(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredPI]) {
assert(term.valid)
(new Watcher[IEPredPI]("mkpi", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever `term` is known to be an intersection.
*/
def foreachIsIN(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredIN]) {
assert(term.valid)
(new Watcher[IEPredIN]("isin", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever `term` is known to be a component of an intersection.
*/
def foreachMkIN(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredIN]) {
assert(term.valid)
(new Watcher[IEPredIN]("mkin", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever a relation `rl(term, x)` is known, for some user-defined relation `rl` and some term `x`.
*/
def foreachARLX(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredRL]) {
assert(term.valid)
(new Watcher[IEPredRL]("arl", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated whenever a relation `rl(x, term)` is known, for some user-defined relation `rl` and some term `x`.
*/
def foreachXRLB(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredRL]) {
assert(term.valid)
(new Watcher[IEPredRL]("rlb", f, args)).initialize(term, this)
}
/**
* Setup a callback function which is activated when the Cartesian product of `comp` is constructed.
*/
def forCPof(comp: Set[(TermIndex, SemRole)], args: Seq[RuleArg], f: RuleDo[IEPredCP]) {
comp.map(_._1).foreach(_.valid)
(new ForCPof(f, args)).initialize(comp, this)
}
/**
* Setup a callback function which is activated when `a` is known to be subsumed by `b`.
*/
def ifSubsume(a: TermIndex, b: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredSubsume]) {
assert(a.valid)
assert(b.valid)
(new IfSubsume(f, args)).initialize(a, b, this)
}
/**
* Setup a callback function which is activated when `a` is known to be disjoint to `b`.
*/
def ifDisjoint(a: TermIndex, b: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredDisjoint]) {
assert(a.valid)
assert(b.valid)
(new IfDisjoint(f, args)).initialize(a, b, this)
}
/**
* Setup a callback function which is activated when there is a relation `rl(a, b)`.
*/
def ifRelation(a: TermIndex, b: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredRL]) {
assert(a.valid)
assert(b.valid)
(new IfRelation(f, args)).initialize(a, b, this)
}
// getter:
/**
* Get Cartesian product.
*/
def getCP(comp: Set[(Term, SemRole)]) = findCP(comp) match {
case Some(x) => x
case None => {
val dim = new Dimension(comp.flatMap(x => x._1.dim.relabel(x._2)))
val ret = newTerm(dim)
claimCP(ret, comp.map(x => (x._1.index, x._2)), Debug_SimpleRuleTrace("getCP", getNewPredID()))
ret.holder
}
}
/**
* Get projection.
*/
def getPI(compt: Term, headrs: Set[SemRole]) = findPI(compt, headrs) match {
case Some(x) => x
case None => {
val (dim, r) = Dimension(headrs)
val ret = newTerm(dim)
claimPI(ret, compt.index, r, Debug_SimpleRuleTrace("getPI", getNewPredID()))
ret.holder
}
}
/**
* Get intersection.
*/
def getIN(comp: Set[Term], aux: Boolean = false) = findIN(comp) match {
case Some(x) => {
if (!aux && x.dim.size >= 2) {
claimIN(x.index, comp.map(_.index), false, Debug_SimpleRuleTrace("getIN", getNewPredID()))
}
x
}
case None => {
val dim = comp.head.dim
val ret = newTerm(dim)
claimIN(ret, comp.map(_.index), aux || dim.size == 1, Debug_SimpleRuleTrace("getIN", getNewPredID()))
ret.holder
}
}
/**
* Get `func(tms.tail, param)` for the user-defined function `func`.
*/
def getFunc(func: IEFunction, tms: Seq[Term], param: Any) = findFunc(func, tms, param) match {
case Some(x) => x
case None => {
val dim = func.headDim(tms, param)
val ret = newTerm(dim)
claimFunc(func, ret +: tms.tail.map(_.index), param, Debug_SimpleRuleTrace("getFunc", getNewPredID()))
ret.holder
}
}
// constructors:
def construct(finder: () => Option[Term], getter: () => Term, args: Seq[RuleArg], f: (Term, Seq[RuleArg]) => Unit) {
val dep = args.flatMap(_.terms).toSet
assert(dep.forall(_.valid))
finder() match {
case Some(x) => f(x, args)
case None => constructQ.enqueue(() => if (dep.forall(_.valid)) f(getter(), args))
}
}
def constructCP(comp: Set[(Term, SemRole)], args: Seq[RuleArg], f: (Term, Seq[RuleArg]) => Unit) {
val finder = () => findCP(comp)
val getter = () => getCP(comp)
construct(finder, getter, args, f)
}
def constructPI(compt: Term, headrs: Set[SemRole], args: Seq[RuleArg], f: (Term, Seq[RuleArg]) => Unit) {
val finder = () => findPI(compt, headrs)
val getter = () => getPI(compt, headrs)
construct(finder, getter, args, f)
}
def constructIN(comp: Set[Term], args: Seq[RuleArg], f: (Term, Seq[RuleArg]) => Unit) {
val finder = () => findIN(comp)
val getter = () => getIN(comp, true)
construct(finder, getter, args, f)
}
////////////////////////////////////////////////////
forAnyTerm(rPISub)
forAnyTerm(rCPSub)
forAnyTerm(rPIdef)
forAnyTerm(rPIWIN)
forAnyTerm(rCPIN)
}
}
| tomtung/tifmo | src/main/scala/tifmo/inference/IEngineCore.scala | Scala | bsd-2-clause | 26,609 |
package io.udash.web.guide.views.rpc.demos
import io.udash._
import io.udash.bootstrap._
import io.udash.bootstrap.button.UdashButton
import io.udash.bootstrap.form.UdashInputGroup
import io.udash.i18n._
import io.udash.web.commons.views.Component
import io.udash.web.guide.styles.partials.GuideStyles
import io.udash.web.guide.{Context, GuideExceptions}
import org.scalajs.dom
import scalatags.JsDom
import scalatags.JsDom.all._
import scala.concurrent.duration.DurationLong
import scala.language.postfixOps
import scala.util.{Failure, Success}
case class ExceptionsDemoModel(
exception: String,
translatableException: TranslationKey0,
unknownException: String
)
object ExceptionsDemoModel extends HasModelPropertyCreator[ExceptionsDemoModel]
class ExceptionsDemoComponent extends Component {
import Context._
override def getTemplate: Modifier = ExceptionsDemoViewFactory()
object ExceptionsDemoViewFactory {
def apply(): Modifier = {
val model = ModelProperty[ExceptionsDemoModel](ExceptionsDemoModel("", TranslationKey.untranslatable(""), ""))
val presenter = new ExceptionsDemoPresenter(model)
new ExceptionsDemoView(model, presenter).render
}
}
class ExceptionsDemoPresenter(model: ModelProperty[ExceptionsDemoModel]) {
def exceptionCall(): Unit = {
Context.serverRpc.demos.exceptions.example() onComplete {
case Success(_) => throw new RuntimeException("It should fail!")
case Failure(ex) => model.subProp(_.exception).set(ex match {
case ex: GuideExceptions.ExampleException =>
ex.printStackTrace()
s"ExampleException: ${ex.msg}"
case _ => s"UnknownException: ${ex.getMessage}"
})
}
}
def translatableExceptionCall(): Unit = {
Context.serverRpc.demos.exceptions.exampleWithTranslatableError() onComplete {
case Success(_) => throw new RuntimeException("It should fail!")
case Failure(ex) => model.subProp(_.translatableException).set(ex match {
case ex: GuideExceptions.TranslatableExampleException => ex.trKey
case _ => null
})
}
}
def unknownExceptionCall(): Unit = {
Context.serverRpc.demos.exceptions.unknownError() onComplete {
case Success(_) => throw new RuntimeException("It should fail!")
case Failure(ex) => model.subProp(_.unknownException).set(ex match {
case ex: GuideExceptions.ExampleException => s"ExampleException: ${ex.msg}"
case ex: GuideExceptions.TranslatableExampleException => s"TranslatableExampleException: ${ex.trKey.key}"
case _ => s"UnknownException: ${ex.getMessage}"
})
}
}
}
class ExceptionsDemoView(model: ModelProperty[ExceptionsDemoModel], presenter: ExceptionsDemoPresenter) {
import JsDom.all._
implicit val translationProvider: TranslationProvider = new RemoteTranslationProvider(serverRpc.demos.translations, Some(dom.window.localStorage), 6 hours)
implicit val lang: Lang = Lang("en")
private val exceptionButtonDisabled = Property(false)
private val exceptionButton = UdashButton(
disabled = exceptionButtonDisabled,
componentId = ComponentId("exception-demo")
)(_ => "Call registered exception!")
private val translatableExceptionButton = UdashButton(
disabled = exceptionButtonDisabled,
componentId = ComponentId("translatable-exception-demo")
)(_ => "Call registered translatable exception!")
private val unknownExceptionButtonDisabled = Property(false)
private val unknownExceptionButton = UdashButton(
disabled = unknownExceptionButtonDisabled,
componentId = ComponentId("unknown-exception-demo")
)(_ => "Call unknown exception!")
exceptionButton.listen {
case UdashButton.ButtonClickEvent(_, _) =>
exceptionButtonDisabled.set(true)
presenter.exceptionCall()
}
translatableExceptionButton.listen {
case UdashButton.ButtonClickEvent(_, _) =>
exceptionButtonDisabled.set(true)
presenter.translatableExceptionCall()
}
unknownExceptionButton.listen {
case UdashButton.ButtonClickEvent(_, _) =>
unknownExceptionButtonDisabled.set(true)
presenter.unknownExceptionCall()
}
def render: Modifier = span(GuideStyles.frame, GuideStyles.useBootstrap)(
UdashInputGroup()(
UdashInputGroup.prependText(
"Result: ",
produce(model.subProp(_.exception))(v => span(id := "exception-demo-response", v).render)
),
exceptionButton.render
).render, br,
UdashInputGroup()(
UdashInputGroup.prependText(
"Result: ",
produce(model.subProp(_.translatableException))(v => span(id := "translatable-exception-demo-response")(v.translated()).render)
),
translatableExceptionButton.render
).render, br,
UdashInputGroup()(
UdashInputGroup.prependText(
"Result: ",
produce(model.subProp(_.unknownException))(v => span(id := "unknown-exception-demo-response", v).render)
),
unknownExceptionButton.render
).render
)
}
}
| UdashFramework/udash-core | guide/guide/.js/src/main/scala/io/udash/web/guide/views/rpc/demos/ExceptionsDemoComponent.scala | Scala | apache-2.0 | 5,173 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript.qsu
import quasar.ejson.EJson
import quasar.qscript.TTypes
trait QSUTTypes[T[_[_]]] extends TTypes[T] {
type QAuth = quasar.qscript.qsu.QAuth[T]
type QDims = quasar.qscript.qsu.QDims[T]
type QIdAccess = quasar.qscript.qsu.QIdAccess[T]
type QAccess[A] = quasar.qscript.qsu.QAccess[T, A]
type FreeAccess[A] = quasar.qscript.qsu.FreeAccess[T, A]
type QSUGraph = quasar.qscript.qsu.QSUGraph[T]
type RevIdx = quasar.qscript.qsu.QSUGraph.RevIdx[T]
type RevIdxM[F[_]] = quasar.qscript.qsu.RevIdxM[T, F]
type References = quasar.qscript.qsu.References[T, T[EJson]]
type QScriptUniform[A] = quasar.qscript.qsu.QScriptUniform[T, A]
type QScriptEducated[A] = quasar.qscript.QScriptEducated[T, A]
}
| jedesah/Quasar | connector/src/main/scala/quasar/qscript/qsu/QSUTTypes.scala | Scala | apache-2.0 | 1,339 |
package org.http4s
package object client extends ClientTypes
trait ClientTypes {
import org.http4s.client._
type ConnectionBuilder[F[_], A <: Connection[F]] = RequestKey => F[A]
type Middleware[F[_]] = Client[F] => Client[F]
}
| aeons/http4s | client/src/main/scala/org/http4s/client/package.scala | Scala | apache-2.0 | 237 |
/*******************************************************************************
Copyright (c) 2013-2014, S-Core, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.DOMCore
import scala.collection.mutable.{Map=>MMap, HashMap=>MHashMap}
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing.models.DOMHtml.{HTMLDocument, HTMLTopElement}
import org.w3c.dom.Node
import kr.ac.kaist.jsaf.analysis.cfg.{CFG, CFGExpr, InternalError, FunctionId}
import kr.ac.kaist.jsaf.analysis.typing._
import scala.Some
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
import kr.ac.kaist.jsaf.Shell
object DOMNode extends DOM {
private val name = "Node"
// NodeType
val ELEMENT_NODE = 1
val ATTRIBUTE_NODE = 2
val TEXT_NODE = 3
val CDATA_SECTION_NODE = 4
val ENTITY_REFERENCE_NODE = 5
val ENTITY_NODE = 6
val PROCESSING_INSTRUCTION_NODE = 7
val COMMENT_NODE = 8
val DOCUMENT_NODE = 9
val DOCUMENT_TYPE_NODE = 10
val DOCUMENT_FRAGMENT_NODE = 11
val NOTATION_NODE = 12
/* predefined locatoins */
val loc_cons = newSystemRecentLoc(name + "Cons")
val loc_proto = newSystemRecentLoc(name + "Proto")
val loc_ins = newSystemRecentLoc(name + "Ins")
/* constructor or object*/
private val prop_cons: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Function")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue))),
("@hasinstance", AbsConstValue(PropValueNullTop)),
("length", AbsConstValue(PropValue(ObjectValue(Value(AbsNumber.alpha(0)), F, F, F)))),
("prototype", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F)))),
("ELEMENT_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(1), F, T, T)))),
("ATTRIBUTE_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(2), F, T, T)))),
("TEXT_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(3), F, T, T)))),
("CDATA_SECTION_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(4), F, T, T)))),
("ENTITY_REFERENCE_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(5), F, T, T)))),
("ENTITY_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(6), F, T, T)))),
("PROCESSING_INSTRUCTION_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(7), F, T, T)))),
("COMMENT_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(8), F, T, T)))),
("DOCUMENT_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(9), F, T, T)))),
("DOCUMENT_TYPE_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(10), F, T, T)))),
("DOCUMENT_FRAGMENT_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(11), F, T, T)))),
("NOTATION_NODE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(12), F, T, T)))),
("DOCUMENT_POSITION_DISCONNECTED", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(0x01), F, T, T)))),
("DOCUMENT_POSITION_PRECEDING", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(0x02), F, T, T)))),
("DOCUMENT_POSITION_FOLLOWING", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(0x04), F, T, T)))),
("DOCUMENT_POSITION_CONTAINS", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(0x08), F, T, T)))),
("DOCUMENT_POSITION_CONTAINED_BY", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(0x10), F, T, T)))),
("DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(0x20), F, T, T))))
)
/* prorotype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue))),
("insertBefore", AbsBuiltinFunc("DOMNode.insertBefore", 2)),
("replaceChild", AbsBuiltinFunc("DOMNode.replaceChild", 2)),
("removeChild", AbsBuiltinFunc("DOMNode.removeChild", 1)),
("appendChild", AbsBuiltinFunc("DOMNode.appendChild", 1)),
("hasChildNodes", AbsBuiltinFunc("DOMNode.hasChildNodes", 0)),
("cloneNode", AbsBuiltinFunc("DOMNode.cloneNode", 1)),
("normalize", AbsBuiltinFunc("DOMNode.normalize", 0)),
("isSupported", AbsBuiltinFunc("DOMNode.isSupported", 2)),
("hasAttributes", AbsBuiltinFunc("DOMNode.hasAttributes", 0)),
("compareDocumentPosition", AbsBuiltinFunc("DOMNode.compareDocumentPosition", 1)),
("isSameNode", AbsBuiltinFunc("DOMNode.isSameNode", 1)),
("lookupPrefix", AbsBuiltinFunc("DOMNode.lookupPrefix", 1)),
("isDefaultNamespace", AbsBuiltinFunc("DOMNode.isDefaultNamespace", 1)),
("lookupNamespaceURI", AbsBuiltinFunc("DOMNode.lookupNamespaceURI", 1)),
("isEqualNode", AbsBuiltinFunc("DOMNode.isEqualNode", 1)),
("getFeature", AbsBuiltinFunc("DOMNode.getFeature", 2)),
("setUserData", AbsBuiltinFunc("DOMNode.setUserData", 3)),
("getUserData", AbsBuiltinFunc("DOMNode.getUserData", 1)),
// WHATWG DOM
("contains", AbsBuiltinFunc("DOMNode.contains", 1))
)
/* prorotype */
private val prop_proto2: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue))),
("insertBefore", AbsBuiltinFunc("DOMNode.insertBefore", 2)),
("replaceChild", AbsBuiltinFunc("DOMNode.replaceChild", 2)),
("removeChild", AbsBuiltinFunc("DOMNode.removeChild", 1)),
("appendChild", AbsBuiltinFunc("DOMNode.appendChild", 1)),
("hasChildNodes", AbsBuiltinFunc("DOMNode.hasChildNodes", 0)),
("cloneNode", AbsBuiltinFunc("DOMNode.cloneNode", 1)),
("normalize", AbsBuiltinFunc("DOMNode.normalize", 0)),
("isSupported", AbsBuiltinFunc("DOMNode.isSupported", 2)),
("hasAttributes", AbsBuiltinFunc("DOMNode.hasAttributes", 0)),
("compareDocumentPosition", AbsBuiltinFunc("DOMNode.compareDocumentPosition", 1)),
("isSameNode", AbsBuiltinFunc("DOMNode.isSameNode", 1)),
("lookupPrefix", AbsBuiltinFunc("DOMNode.lookupPrefix", 1)),
("isDefaultNamespace", AbsBuiltinFunc("DOMNode.isDefaultNamespace", 1)),
("lookupNamespaceURI", AbsBuiltinFunc("DOMNode.lookupNamespaceURI", 1)),
("isEqualNode", AbsBuiltinFunc("DOMNode.isEqualNode", 1)),
("getFeature", AbsBuiltinFunc("DOMNode.getFeature", 2)),
("setUserData", AbsBuiltinFunc("DOMNode.setUserData", 3)),
("getUserData", AbsBuiltinFunc("DOMNode.getUserData", 1)),
// WHATWG DOM
("contains", AbsBuiltinFunc("DOMNode.contains", 1)),
("firstChild", AbsConstValue(PropValue(ObjectValue(Value(HTMLTopElement.loc_ins_set) + Value(NullTop), F, T, T)))),
("parentNode", AbsConstValue(PropValue(ObjectValue(Value(HTMLTopElement.loc_ins_set) + Value(NullTop), F, T, T)))),
("lastChild", AbsConstValue(PropValue(ObjectValue(Value(HTMLTopElement.loc_ins_set) + Value(NullTop), F, T, T)))),
("previousSibling", AbsConstValue(PropValue(ObjectValue(Value(HTMLTopElement.loc_ins_set) + Value(NullTop), F, T, T)))),
("nextSibling", AbsConstValue(PropValue(ObjectValue(Value(HTMLTopElement.loc_ins_set) + Value(NullTop), F, T, T))))
)
/* global */
private val prop_global: List[(String, AbsProperty)] = List(
(name, AbsConstValue(PropValue(ObjectValue(loc_cons, T, F, T))))
)
def getInitList(): List[(Loc, List[(String, AbsProperty)])] = if(Shell.params.opt_Dommodel2) List(
(loc_cons, prop_cons), (loc_proto, prop_proto2), (GlobalLoc, prop_global)
) else List(
(loc_cons, prop_cons), (loc_proto, prop_proto), (GlobalLoc, prop_global)
)
def getSemanticMap(): Map[String, SemanticFun] = {
Map(
//TODO: not yet implemented
("DOMNode.insertBefore" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
if(Shell.params.opt_Dommodel2)
((Helper.ReturnStore(h, Value(HTMLTopElement.loc_ins_set)), ctx), (he, ctxe))
else {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_new = getArgValue(h, ctx, args, "0")._2
val ref = getArgValue(h, ctx, args, "1")
val lset_ref = ref._2
// If refChild is null, insert newChild at the end of the list of children.
val nullh = if(NullTop <= ref._1._2) {
DOMTree.appendChild(h, lset_this, lset_new)
} else h
if (!lset_new.isEmpty && !lset_ref.isEmpty) {
val h_1 = DOMTree.insertBefore(nullh, lset_this, lset_new, lset_ref)
((Helper.ReturnStore(h_1, Value(lset_new)), ctx), (he, ctxe))
}
else if(NullTop <= ref._1._2)
((nullh, ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
}
})),
("DOMNode.replaceChild" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
if(Shell.params.opt_Dommodel2)
((Helper.ReturnStore(h, Value(HTMLTopElement.loc_ins_set)), ctx), (he, ctxe))
else {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_new = getArgValue(h, ctx, args, "0")._2
val lset_old = getArgValue(h, ctx, args, "1")._2
if (!lset_new.isEmpty && !lset_old.isEmpty) {
/* location for clone node */
val h_1 = lset_this.foldLeft(h)((hh, l_node) => {
val lset_ns = Helper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(hh)((hhh, l_ns) => {
val n_len = Operator.ToUInt32(Helper.Proto(h, l_ns, AbsString.alpha("length")))
n_len.getSingle match {
case Some(n) if AbsNumber.isNum(n_len) =>
val n_index = (0 until n.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(Helper.Proto(hhh, l_ns, AbsString.alpha(i.toString)), Value(lset_old))._1._3
})
if (n_index < 0)
hhh
else {
val hhh_1 = Helper.Delete(hhh, l_ns, AbsString.alpha(n_index.toString))._1
Helper.PropStore(hhh_1, l_ns, AbsString.alpha(n_index.toString), Value(lset_new))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(Helper.Proto(hhh, l_ns, NumStr), Value(lset_old))._1._3
val hhh_1 =
if (BoolTrue <= b_eq) {
val _hhh = Helper.Delete(hhh, l_ns, NumStr)._1
Helper.PropStore(_hhh, l_ns, NumStr, Value(lset_new))
}
else HeapBot
val hhh_2 =
if (BoolFalse <= b_eq) hhh
else HeapBot
hhh_1 + hhh_2
case _ => hhh /* exception ?? */
}
})
})
/* `parentNode', 'previousSibling', 'nextSibling' update of the reference child */
val (h_2, preSib, nextSib) = lset_old.foldLeft((h_1, ValueBot, ValueBot))((d, l) => {
val preS = Helper.Proto(d._1, l, AbsString.alpha("previousSibling"))
val nextS = Helper.Proto(d._1, l, AbsString.alpha("nextSibling"))
val h_2_1 = Helper.PropStore(d._1, l, AbsString.alpha("parentNode"), Value(NullTop))
val h_2_2 = Helper.PropStore(h_2_1, l, AbsString.alpha("previousSibling"), Value(NullTop))
val h_2_3 = Helper.PropStore(h_2_2, l, AbsString.alpha("nextSibling"), Value(NullTop))
(h_2_3, preS + d._2, nextS + d._3)
})
/* 'prarentNode', 'previousSibling', 'nextSibling' update of the new child */
val h_3 = lset_new.foldLeft(h_2)((_h, l) => {
val h_3_1 = Helper.PropStore(_h, l, AbsString.alpha("parentNode"), Value(lset_this))
val h_3_2 = Helper.PropStore(h_3_1, l, AbsString.alpha("previousSibling"), preSib)
Helper.PropStore(h_3_2, l, AbsString.alpha("nextSibling"), nextSib)
})
/* 'nextSibling' update of the previous sibling of the reference child */
val h_4 = preSib._2.foldLeft(h_3)((_h, l) =>
Helper.PropStore(_h, l, AbsString.alpha("nextSibling"), Value(lset_new))
)
/* 'previousSibling' update of the next sibling of the reference child */
val h_5 = nextSib._2.foldLeft(h_4)((_h, l) =>
Helper.PropStore(_h, l, AbsString.alpha("previousSibling"), Value(lset_new))
)
((Helper.ReturnStore(h_5, Value(lset_old)), ctx), (he, ctxe))
}
else
((HeapBot, ContextBot), (he, ctxe))
}
})),
("DOMNode.removeChild" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
if(Shell.params.opt_Dommodel2)
((Helper.ReturnStore(h, Value(HTMLTopElement.loc_ins_set)), ctx), (he, ctxe))
else {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_child = getArgValue(h, ctx, args, "0")._2
if (!lset_this.isEmpty && !lset_child.isEmpty) {
val h_1 = DOMTree.removeChild(h, lset_this, lset_child)
((Helper.ReturnStore(h_1, Value(lset_child)), ctx), (he, ctxe))
}
else
((HeapBot, ContextBot), (he, ctxe))
}
})),
("DOMNode.appendChild" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val lset_child = getArgValue(h, ctx, args, "0")._2
if(Shell.params.opt_Dommodel2)
((Helper.ReturnStore(h, Value(lset_child)), ctx), (he, ctxe))
else {
val h_1 = DOMTree.appendChild(h, lset_this, lset_child)
if (!lset_child.isEmpty && !lset_child.isEmpty)
((Helper.ReturnStore(h_1, Value(lset_child)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
}
})),
("DOMNode.hasChildNodes" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
if(Shell.params.opt_Dommodel2)
((Helper.ReturnStore(h, Value(BoolTop)), ctx), (he, ctxe))
else {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val b_return = lset_this.foldLeft[AbsBool](BoolBot)((b, l) => {
val lset_child = Helper.Proto(h, l, AbsString.alpha("childNodes"))._2
lset_child.foldLeft(b)((bb, ll) => {
val absnum = Helper.Proto(h, ll, AbsString.alpha("length"))._1._4
bb + (absnum.getAbsCase match {
case AbsBot => BoolBot
case _ if AbsNumber.isUIntAll(absnum) => BoolTop
case _ => absnum.getSingle match {
case Some(n) if AbsNumber.isNum(absnum) => if (n != 0) BoolTrue else BoolFalse
case _ => BoolFalse
}})
})
})
if (b_return </ BoolBot)
((Helper.ReturnStore(h, Value(b_return)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
}
})),
("DOMNode.cloneNode" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val b_deep = Helper.toBoolean(getArgValue(h, ctx, args, "0"))
if (b_deep </ BoolBot) {
if(Shell.params.opt_Dommodel2)
((Helper.ReturnStore(h, Value(HTMLTopElement.loc_ins_set)), ctx), (he, ctxe))
else {
/* unsound, 'deep' arugment is ingnored */
/* location for clone node */
val l_r = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
/* this node only */
val o_node = lset_this.foldLeft(Obj.bottom)((o, l) => o + h_1(l))
val h_2 = h_1.update(l_r, o_node)
/* The duplicate node has no parent; (parentNode is null.). */
val h_3 = Helper.PropStore(h_2, l_r, AbsString.alpha("parentNode"), Value(NullTop))
((Helper.ReturnStore(h_3, Value(l_r)), ctx_1), (he, ctxe))
}
}
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.normalize" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* unsound, do nothing */
((Helper.ReturnStore(h, Value(UndefTop)), ctx), (he, ctxe))
})),
("DOMNode.isSupported" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* arguments */
val s_feature = Helper.toString(Helper.toPrimitive_better(h, getArgValue(h, ctx, args, "0")))
val s_version = Helper.toString(Helper.toPrimitive_better(h, getArgValue(h, ctx, args, "1")))
if (s_feature </ StrBot || s_version </ StrBot)
if(Shell.params.opt_Dommodel2)
((Helper.ReturnStore(h, Value(BoolTop)), ctx), (he, ctxe))
else /* imprecise semantic */
((Helper.ReturnStore(h, Value(BoolTop)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.hasAttributes" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* imprecise semantic */
((Helper.ReturnStore(h, Value(BoolTop)), ctx), (he, ctxe))
})),
("DOMNode.compareDocumentPosition" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* arguments */
val v_other = getArgValue(h, ctx, args, "0")
if (v_other </ ValueBot) {
/* imprecise semantic */
((Helper.ReturnStore(h, Value(UInt)), ctx), (he, ctxe))
}
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.isSameNode" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val v_other = getArgValue(h, ctx, args, "0")
if (v_other </ ValueBot) {
val v_return = Operator.bopSEq(Value(lset_this), Value(v_other._2))
((Helper.ReturnStore(h, v_return), ctx), (he, ctxe))
}
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.lookupPrefix" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* arguments */
val s_uri = Helper.toString(Helper.toPrimitive_better(h, getArgValue(h, ctx, args, "0")))
if (s_uri </ StrBot)
/* imprecise semantic */
((Helper.ReturnStore(h, Value(StrTop) + Value(NullTop)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.isDefaultNamespace" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* arguments */
val s_uri = Helper.toString(Helper.toPrimitive_better(h, getArgValue(h, ctx, args, "0")))
if (s_uri </ StrBot)
/* imprecise semantic */
((Helper.ReturnStore(h, Value(BoolTop)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.lookupNamespaceURI" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* arguments */
val s_prefix = Helper.toString(Helper.toPrimitive_better(h, getArgValue(h, ctx, args, "0")))
if (s_prefix </ StrBot)
/* imprecise semantic */
((Helper.ReturnStore(h, Value(StrTop) + Value(NullTop)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.isEqualNode" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* arguments */
val v_arg = getArgValue(h, ctx, args, "0")
if (v_arg </ ValueBot)
/* imprecise semantic */
((Helper.ReturnStore(h, Value(BoolTop)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.getFeature" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* arguments */
val s_feature = Helper.toString(Helper.toPrimitive_better(h, getArgValue(h, ctx, args, "0")))
val s_version = Helper.toString(Helper.toPrimitive_better(h, getArgValue(h, ctx, args, "1")))
if (s_feature </ StrBot || s_version </ StrBot)
/* unsound semantic */
((Helper.ReturnStore(h, Value(NullTop)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.setUserData" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* arguments */
val s_key = Helper.toString(Helper.toPrimitive_better(h, getArgValue(h, ctx, args, "0")))
val v_data = getArgValue(h, ctx, args, "1")
val v_handler = getArgValue(h, ctx, args, "2")
if (s_key </ StrBot || v_data </ ValueBot || v_handler </ ValueBot)
/* unsound semantic */
((Helper.ReturnStore(h, Value(NullTop)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.getUserData" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
/* arguments */
val s_key = Helper.toString(Helper.toPrimitive_better(h, getArgValue(h, ctx, args, "0")))
if (s_key </ StrBot)
/* unsound semantic */
((Helper.ReturnStore(h, Value(NullTop)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
})),
("DOMNode.contains" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
if(Shell.params.opt_Dommodel2)
((Helper.ReturnStore(h, Value(BoolTop)), ctx), (he, ctxe))
else {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val other = getArgValue(h, ctx, args, "0")
val nullargcheck = if(other._1._2 </ NullBot) Value(BoolFalse) else ValueBot
val lset_other = getArgValue(h, ctx, args, "0")._2
if(!lset_other.isEmpty){
val returnval = lset_this.foldLeft(Value(BoolBot))((_val, l_this) => {
lset_other.foldLeft(_val)((__val, l_other) => {
if(DOMHelper.contains(h, LocSetBot, l_this, l_other) == true)
__val + Value(BoolTrue)
else
__val + Value(BoolFalse)
})
})
((Helper.ReturnStore(h, returnval + nullargcheck), ctx), (he, ctxe))
}
else if(nullargcheck </ ValueBot)
((Helper.ReturnStore(h, nullargcheck), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
}
}))
)
}
def getPreSemanticMap(): Map[String, SemanticFun] = {
Map(
("DOMNode.insertBefore" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
/* arguments */
val lset_new = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)._2
val lset_ref = getArgValue_pre(h, ctx, args, "1", PureLocalLoc)._2
if (!lset_new.isEmpty && !lset_ref.isEmpty) {
/* location for clone node */
val h_1 = lset_this.foldLeft(h)((hh, l_node) => {
val lset_ns = PreHelper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(hh)((hhh, l_ns) => {
val n_len = Operator.ToUInt32(PreHelper.Proto(h, l_ns, AbsString.alpha("length")))
AbsNumber.getUIntSingle(n_len) match {
case Some(n_length) =>
val n_index = (0 until n_length.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(PreHelper.Proto(hhh, l_ns, AbsString.alpha(i.toString)), Value(lset_ref))._1._3
})
if (n_index < 0)
hhh
else {
val _hhh = (n_index+1 until n_length.toInt).foldLeft(hhh)((_h, i) => {
val i_rev = n_length - i + 1
val v_move = PreHelper.Proto(_h, l_ns, AbsString.alpha(i_rev.toString))
val _h1 = PreHelper.Delete(_h, l_ns, AbsString.alpha(i_rev.toString))._1
PreHelper.PropStore(_h1, l_ns, AbsString.alpha((i_rev+1).toString), v_move)
})
val _hhh_1 = PreHelper.PropStore(_hhh, l_ns, AbsString.alpha(n_index.toString), Value(lset_new))
// increase the length of childNodes by 1
PreHelper.PropStore(_hhh_1, l_ns, AbsString.alpha("length"), Value(AbsNumber.alpha(n_length + 1)))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(PreHelper.Proto(hhh, l_ns, NumStr), Value(lset_ref))._1._3
val hhh_1 =
if (BoolTrue <= b_eq) PreHelper.PropStore(hhh, l_ns, NumStr, Value(lset_new))
else hhh
val hhh_2 =
if (BoolFalse <= b_eq) hhh
else hhh
hhh_1 + hhh_2
case _ => hhh /* exception ?? */
}
})
})
((PreHelper.ReturnStore(h_1, PureLocalLoc, Value(lset_new)), ctx), (he, ctxe))
}
else
((h, ctx), (he, ctxe))
})),
("DOMNode.replaceChild" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
/* arguments */
val lset_new = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)._2
val lset_old = getArgValue_pre(h, ctx, args, "1", PureLocalLoc)._2
if (!lset_new.isEmpty && !lset_old.isEmpty) {
/* location for clone node */
val h_1 = lset_this.foldLeft(h)((hh, l_node) => {
val lset_ns = PreHelper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(hh)((hhh, l_ns) => {
val n_len = Operator.ToUInt32(PreHelper.Proto(h, l_ns, AbsString.alpha("length")))
AbsNumber.getUIntSingle(n_len) match {
case Some(n) =>
val n_index = (0 until n.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(PreHelper.Proto(hhh, l_ns, AbsString.alpha(i.toString)), Value(lset_old))._1._3
})
if (n_index < 0)
hhh
else {
val hhh_1 = PreHelper.Delete(hhh, l_ns, AbsString.alpha(n_index.toString))._1
PreHelper.PropStore(hhh_1, l_ns, AbsString.alpha(n_index.toString), Value(lset_new))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(PreHelper.Proto(hhh, l_ns, NumStr), Value(lset_old))._1._3
val hhh_1 =
if (BoolTrue <= b_eq) {
val _hhh = PreHelper.Delete(hhh, l_ns, NumStr)._1
PreHelper.PropStore(_hhh, l_ns, NumStr, Value(lset_new))
}
else hhh
val hhh_2 =
if (BoolFalse <= b_eq) hhh
else hhh
hhh_1 + hhh_2
case _ => hhh /* exception ?? */
}
})
})
((PreHelper.ReturnStore(h_1, PureLocalLoc, Value(lset_old)), ctx), (he, ctxe))
}
else
((h, ctx), (he, ctxe))
})),
("DOMNode.removeChild" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
/* arguments */
val lset_child = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)._2
if (!lset_child.isEmpty) {
/* location for clone node */
val h_1 = lset_this.foldLeft(h)((hh, l_node) => {
val lset_ns = PreHelper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(hh)((hhh, l_ns) => {
val n_len = Operator.ToUInt32(PreHelper.Proto(h, l_ns, AbsString.alpha("length")))
AbsNumber.getUIntSingle(n_len) match {
case Some(n) =>
val n_index = (0 until n.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(PreHelper.Proto(hhh, l_ns, AbsString.alpha(i.toString)), Value(lset_child))._1._3
})
if (n_index < 0)
hhh
else {
val hhh_1 = PreHelper.Delete(hhh, l_ns, AbsString.alpha(n_index.toString))._1
val hhh_2 = (n_index+1 until n.toInt).foldLeft(hhh_1)((_h, i) => {
val v_next = PreHelper.Proto(_h, l_ns, AbsString.alpha(i.toString))
val _h1 = PreHelper.Delete(_h, l_ns, AbsString.alpha(i.toString))._1
PreHelper.PropStore(_h1, l_ns, AbsString.alpha((i-1).toString), v_next)
})
// decrease the length of childNodes by 1
PreHelper.PropStore(hhh_2, l_ns, AbsString.alpha("length"), Value(AbsNumber.alpha(n - 1)))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(PreHelper.Proto(hhh, l_ns, NumStr), Value(lset_child))._1._3
val hhh_1 =
if (BoolTrue <= b_eq) PreHelper.Delete(hhh, l_ns, NumStr)._1
else hhh
val hhh_2 =
if (BoolFalse <= b_eq) hhh
else hhh
hhh_1 + hhh_2
case _ => hhh /* exception ?? */
}
})
})
((PreHelper.ReturnStore(h_1, PureLocalLoc, Value(lset_child)), ctx), (he, ctxe))
}
else
((h, ctx), (he, ctxe))
})),
("DOMNode.appendChild" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
/* arguments */
val lset_child = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)._2
if (!lset_child.isEmpty) {
/* location for clone node */
val h_1 = lset_this.foldLeft(h)((hh, l_node) => {
val lset_ns = PreHelper.Proto(hh, l_node, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(hh)((hhh, l_ns) => {
val n_len = Operator.ToUInt32(PreHelper.Proto(hhh, l_ns, AbsString.alpha("length")))
AbsNumber.getUIntSingle(n_len) match {
case Some(n) =>
val hhh_1 = PreHelper.PropStore(hhh, l_ns, AbsString.alpha(n.toInt.toString), Value(lset_child))
PreHelper.PropStore(hhh_1, l_ns, AbsString.alpha("length"), Value(AbsNumber.alpha(n+1)))
case _ if AbsNumber.isUIntAll(n_len) =>
PreHelper.PropStore(hhh, l_ns, NumStr, Value(lset_child))
case _ => hhh /* exception ?? */
}
})
})
((PreHelper.ReturnStore(h_1, PureLocalLoc, Value(lset_child)), ctx), (he, ctxe))
}
else
((h, ctx), (he, ctxe))
})),
("DOMNode.hasChildNodes" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
val b_return = lset_this.foldLeft[AbsBool](BoolBot)((b, l) => {
val lset_child = PreHelper.Proto(h, l, AbsString.alpha("childNodes"))._2
lset_child.foldLeft(b)((bb, ll) => {
val absnum = PreHelper.Proto(h, ll, AbsString.alpha("length"))._1._4
bb + (absnum.getAbsCase match {
case AbsBot => BoolBot
case _ if AbsNumber.isUIntAll(absnum) => BoolTop
case _ => absnum.getSingle match {
case Some(n) if AbsNumber.isNum(absnum) => if (n != 0) BoolTrue else BoolFalse
case _ => BoolFalse
}})
})
})
if (b_return </ BoolBot)
((PreHelper.ReturnStore(h, PureLocalLoc, Value(b_return)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
})),
("DOMNode.cloneNode" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_env = h(PureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val lset_this = h(PureLocalLoc)("@this")._2._2
/* arguments */
val b_deep = PreHelper.toBoolean(getArgValue_pre(h, ctx, args, "0", PureLocalLoc))
if (b_deep </ BoolBot) {
/* unsound, 'deep' arugment is ingnored */
/* location for clone node */
val l_r = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = PreHelper.Oldify(h, ctx, addr1)
/* this node only */
val o_node = lset_this.foldLeft(Obj.bottom)((o, l) => o + h_1(l))
val h_2 = h_1.update(l_r, o_node)
((PreHelper.ReturnStore(h_2, PureLocalLoc, Value(l_r)), ctx_1), (he, ctxe))
}
else
((h, ctx), (he, ctxe))
})),
("DOMNode.normalize" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* unsound, do nothing */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(UndefTop)), ctx), (he, ctxe))
})),
("DOMNode.isSupported" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* arguments */
val s_feature = PreHelper.toString(PreHelper.toPrimitive(getArgValue_pre(h, ctx, args, "0", PureLocalLoc)))
val s_version = PreHelper.toString(PreHelper.toPrimitive(getArgValue_pre(h, ctx, args, "1", PureLocalLoc)))
if (s_feature </ StrBot || s_version </ StrBot)
/* imprecise semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(BoolTop)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
})),
("DOMNode.hasAttributes" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* imprecise semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(BoolTop)), ctx), (he, ctxe))
})),
("DOMNode.compareDocumentPosition" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* arguments */
val v_other = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)
if (v_other </ ValueBot) {
/* imprecise semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(UInt)), ctx), (he, ctxe))
}
else
((h, ctx), (he, ctxe))
})),
("DOMNode.isSameNode" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
/* arguments */
val v_other = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)
if (v_other </ ValueBot) {
val v_return = Operator.bopSEq(Value(lset_this), Value(v_other._2))
((PreHelper.ReturnStore(h, PureLocalLoc, v_return), ctx), (he, ctxe))
}
else
((h, ctx), (he, ctxe))
})),
("DOMNode.lookupPrefix" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* arguments */
val s_uri = PreHelper.toString(PreHelper.toPrimitive(getArgValue_pre(h, ctx, args, "0", PureLocalLoc)))
if (s_uri </ StrBot)
/* imprecise semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(StrTop) + Value(NullTop)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
})),
("DOMNode.isDefaultNamespace" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* arguments */
val s_uri = PreHelper.toString(PreHelper.toPrimitive(getArgValue_pre(h, ctx, args, "0", PureLocalLoc)))
if (s_uri </ StrBot)
/* imprecise semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(BoolTop)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
})),
("DOMNode.lookupNamespaceURI" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* arguments */
val s_prefix = PreHelper.toString(PreHelper.toPrimitive(getArgValue_pre(h, ctx, args, "0", PureLocalLoc)))
if (s_prefix </ StrBot)
/* imprecise semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(StrTop) + Value(NullTop)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
})),
("DOMNode.isEqualNode" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* arguments */
val v_arg = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)
if (v_arg </ ValueBot)
/* imprecise semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(BoolTop)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
})),
("DOMNode.getFeature" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* arguments */
val s_feature = PreHelper.toString(PreHelper.toPrimitive(getArgValue_pre(h, ctx, args, "0", PureLocalLoc)))
val s_version = PreHelper.toString(PreHelper.toPrimitive(getArgValue_pre(h, ctx, args, "1", PureLocalLoc)))
if (s_feature </ StrBot || s_version </ StrBot)
/* unsound semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(NullTop)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
})),
("DOMNode.setUserData" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* arguments */
val s_key = PreHelper.toString(PreHelper.toPrimitive(getArgValue_pre(h, ctx, args, "0", PureLocalLoc)))
val v_data = getArgValue_pre(h, ctx, args, "1", PureLocalLoc)
val v_handler = getArgValue_pre(h, ctx, args, "2", PureLocalLoc)
if (s_key </ StrBot || v_data </ ValueBot || v_handler </ ValueBot)
/* unsound semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(NullTop)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
})),
("DOMNode.getUserData" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
/* arguments */
val s_key = PreHelper.toString(PreHelper.toPrimitive(getArgValue_pre(h, ctx, args, "0", PureLocalLoc)))
if (s_key </ StrBot)
/* unsound semantic */
((PreHelper.ReturnStore(h, PureLocalLoc, Value(NullTop)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
}))
)
}
def getDefMap(): Map[String, AccessFun] = {
Map(
//TODO: not yet implemented
("DOMNode.insertBefore" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_new = getArgValue(h, ctx, args, "0")._2
val lset_ref = getArgValue(h, ctx, args, "1")._2
if (!lset_new.isEmpty && !lset_ref.isEmpty) {
/* location for clone node */
val LP1 = lset_this.foldLeft(LPBot)((lpset, l_node) => {
val lset_ns = Helper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(lpset)((lpset1, l_ns) => {
val n_len = Operator.ToUInt32(Helper.Proto(h, l_ns, AbsString.alpha("length")))
AbsNumber.getUIntSingle(n_len) match {
case Some(n_length) =>
val n_index = (0 until n_length.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(Helper.Proto(h, l_ns, AbsString.alpha(i.toString)), Value(lset_ref))._1._3
})
if (n_index < 0)
lpset1
else {
val LP1_1 = (n_index+1 until n_length.toInt).foldLeft(lpset1)((lpset2, i) => {
val i_rev = n_length - i + 1
val LP1_1_1 = AccessHelper.Delete_def(h, l_ns, AbsString.alpha(i_rev.toString))
lpset2++LP1_1_1 ++ AccessHelper.PropStore_def(h, l_ns, AbsString.alpha((i_rev+1).toString))
})
val LP1_2 = AccessHelper.PropStore_def(h, l_ns, AbsString.alpha(n_index.toString))
// increase the length of childNodes by 1
lpset1 ++ LP1_1 ++ LP1_2 ++
AccessHelper.PropStore_def(h, l_ns, AbsString.alpha("length"))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(Helper.Proto(h, l_ns, NumStr), Value(lset_ref))._1._3
val LP1_1 =
if (BoolTrue <= b_eq) AccessHelper.PropStore_def(h, l_ns, NumStr)
else LPBot
val LP1_2 =
if (BoolFalse <= b_eq) lpset1
else LPBot
lpset1 ++ LP1_1 ++ LP1_2
case _ => lpset1 /* exception ?? */
}
})
})
LP1 + (SinglePureLocalLoc, "@return")
}
else
LPBot
})),
("DOMNode.replaceChild" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_new = getArgValue(h, ctx, args, "0")._2
val lset_old = getArgValue(h, ctx, args, "1")._2
if (!lset_new.isEmpty && !lset_old.isEmpty) {
/* location for clone node */
val LP1 = lset_this.foldLeft(LPBot)((lpset, l_node) => {
val lset_ns = Helper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(lpset)((lpset1, l_ns) => {
val n_len = Operator.ToUInt32(Helper.Proto(h, l_ns, AbsString.alpha("length")))
AbsNumber.getUIntSingle(n_len) match {
case Some(n) =>
val n_index = (0 until n.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(Helper.Proto(h, l_ns, AbsString.alpha(i.toString)), Value(lset_old))._1._3
})
if (n_index < 0)
lpset1
else {
val LP1_1 = AccessHelper.Delete_def(h, l_ns, AbsString.alpha(n_index.toString))
lpset1 ++ LP1_1 ++ AccessHelper.PropStore_def(h, l_ns, AbsString.alpha(n_index.toString))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(Helper.Proto(h, l_ns, NumStr), Value(lset_old))._1._3
val LP1_1 =
if (BoolTrue <= b_eq) {
val LP1_1_1 = AccessHelper.Delete_def(h, l_ns, NumStr)
LP1_1_1 ++ AccessHelper.PropStore_def(h, l_ns, NumStr)
}
else LPBot
lpset1 ++ LP1_1
case _ => lpset1 /* exception ?? */
}
})
})
LP1 + (SinglePureLocalLoc, "@return")
}
else
LPBot
})),
("DOMNode.removeChild" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_child = getArgValue(h, ctx, args, "0")._2
if (!lset_child.isEmpty) {
/* location for clone node */
val LP1 = lset_this.foldLeft(LPBot)((lpset, l_node) => {
val lset_ns = Helper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(lpset)((lpset1, l_ns) => {
val n_len = Operator.ToUInt32(Helper.Proto(h, l_ns, AbsString.alpha("length")))
AbsNumber.getUIntSingle(n_len) match {
case Some(n) =>
val n_index = (0 until n.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(Helper.Proto(h, l_ns, AbsString.alpha(i.toString)), Value(lset_child))._1._3
})
if (n_index < 0)
lpset1
else {
val LP1_1 = AccessHelper.Delete_def(h, l_ns, AbsString.alpha(n_index.toString))
val LP1_2 = (n_index+1 until n.toInt).foldLeft(LPBot)((lpset2, i) => {
lpset2 ++ AccessHelper.Delete_def(h, l_ns, AbsString.alpha(i.toString)) ++
AccessHelper.PropStore_def(h, l_ns, AbsString.alpha((i-1).toString))
})
lpset1 ++ LP1_2 ++ LP1_2 ++
AccessHelper.PropStore_def(h, l_ns, AbsString.alpha("length"))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(Helper.Proto(h, l_ns, NumStr), Value(lset_child))._1._3
val LP1_1 =
if (BoolTrue <= b_eq) AccessHelper.Delete_def(h, l_ns, NumStr)
else LPBot
lpset1 ++ LP1_1
case _ => lpset1 /* exception ?? */
}
})
})
LP1 + (SinglePureLocalLoc, "@return")
}
else
LPBot
})),
("DOMNode.appendChild" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_child = getArgValue(h, ctx, args, "0")._2
if (!lset_child.isEmpty) {
val LP1 = lset_this.foldLeft(LPBot)((lpset, l_node) => {
val lset_ns = Helper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
lset_ns.foldLeft(lpset)((lpset1, l_ns) => {
val n_len = Operator.ToUInt32(Helper.Proto(h, l_ns, AbsString.alpha("length")))
AbsNumber.getUIntSingle(n_len) match {
case Some(n) =>
lpset1 ++ AccessHelper.PropStore_def(h, l_ns, AbsString.alpha(n.toInt.toString)) ++
AccessHelper.PropStore_def(h, l_ns, AbsString.alpha("length"))
case _ if AbsNumber.isUIntAll(n_len) =>
lpset1 ++ AccessHelper.PropStore_def(h, l_ns, NumStr)
case _ => lpset1 /* exception ?? */
}
})
})
LP1 + (SinglePureLocalLoc, "@return")
}
else
LPBot
})),
("DOMNode.hasChildNodes" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.cloneNode" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
//val l_r = addrToLoc(addr1, Recent)
//val LP1 = AccessHelper.Oldify_def(h, ctx, addr1)
val LP1 = set_addr.foldLeft(LPBot)((lp, a) =>
lp ++ AccessHelper.Oldify_def(h, ctx, cfg.getAPIAddress((fid, a), 0)))
val LP2 = lset_this.foldLeft(LPBot)((lpset, l) => {
val prop_set = h(l).getProps
prop_set.foldLeft(lpset)((lpset1, prop) =>
lpset1 ++ set_addr.foldLeft(LPBot)((lp, a) => lp + (addrToLoc(cfg.getAPIAddress((fid, a), 0),Recent), prop)))
})
LP1 ++ LP2 + (SinglePureLocalLoc, "@return")
})),
("DOMNode.normalize" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.isSupported" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.hasAttributes" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.compareDocumentPosition" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.isSameNode" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.lookupPrefix" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.isDefaultNamespace" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.lookupNamespaceURI" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.isEqualNode" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.getFeature" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.setUserData" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.getUserData" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
}))
)
}
def getUseMap(): Map[String, AccessFun] = {
Map(
//TODO: not yet implemented
("DOMNode.insertBefore" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_new = getArgValue(h, ctx, args, "0")._2
val lset_ref = getArgValue(h, ctx, args, "1")._2
val LP1 = getArgValue_use(h, ctx, args, "0") ++ getArgValue_use(h, ctx, args, "1")
if (!lset_new.isEmpty && !lset_ref.isEmpty) {
/* location for clone node */
val LP2 = lset_this.foldLeft(LPBot)((lpset, l_node) => {
val lset_ns = Helper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
val LP2_1 = AccessHelper.Proto_use(h, l_node, AbsString.alpha("childNodes"))
lset_ns.foldLeft(lpset++LP2_1)((lpset1, l_ns) => {
val n_len = Operator.ToUInt32(Helper.Proto(h, l_ns, AbsString.alpha("length")))
val LP2_2 = AccessHelper.Proto_use(h, l_ns, AbsString.alpha("length"))
AbsNumber.getUIntSingle(n_len) match {
case Some(n_length) =>
val n_index = (0 until n_length.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(Helper.Proto(h, l_ns, AbsString.alpha(i.toString)), Value(lset_ref))._1._3
})
val LP2_3 = (0 until n_length.toInt).foldLeft(LPBot)((lpset, i) =>
lpset ++ AccessHelper.Proto_use(h, l_ns, AbsString.alpha(i.toString)))
if (n_index < 0)
lpset1 ++ LP2_2 ++ LP2_3
else {
val LP2_4 = (n_index+1 until n_length.toInt).foldLeft(LPBot)((lpset2, i) => {
val i_rev = n_length - i + 1
val LP2_4_1 = AccessHelper.Proto_use(h, l_ns, AbsString.alpha(i_rev.toString))
val LP2_4_2 = AccessHelper.Delete_use(h, l_ns, AbsString.alpha(i_rev.toString))
lpset2 ++ LP2_4_1 ++ LP2_4_2 ++
AccessHelper.PropStore_use(h, l_ns, AbsString.alpha((i_rev+1).toString))
})
val LP2_5 = AccessHelper.PropStore_use(h, l_ns, AbsString.alpha(n_index.toString))
lpset1 ++ LP2_2 ++ LP2_3 ++ LP2_4 ++ LP2_5 ++
AccessHelper.PropStore_use(h, l_ns, AbsString.alpha("length"))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(Helper.Proto(h, l_ns, NumStr), Value(lset_ref))._1._3
val LP2_3 = AccessHelper.Proto_use(h, l_ns, NumStr)
val LP2_4 =
if (BoolTrue <= b_eq) AccessHelper.PropStore_use(h, l_ns, NumStr)
else LPBot
val hhh_2 =
if (BoolFalse <= b_eq) lpset1
else LPBot
lpset1 ++ LP2_2 ++ LP2_3 ++ LP2_4
case _ => lpset1 /* exception ?? */
}
})
})
LP1 ++ LP2 + (SinglePureLocalLoc, "@return") + (SinglePureLocalLoc, "@this")
}
else
LP1
})),
("DOMNode.replaceChild" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_new = getArgValue(h, ctx, args, "0")._2
val lset_old = getArgValue(h, ctx, args, "1")._2
val LP1 = getArgValue_use(h, ctx, args, "0") ++ getArgValue_use(h, ctx, args, "1")
if (!lset_new.isEmpty && !lset_old.isEmpty) {
/* location for clone node */
val LP2 = lset_this.foldLeft(LPBot)((lpset, l_node) => {
val lset_ns = Helper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
val LP2_1 = AccessHelper.Proto_use(h, l_node, AbsString.alpha("childNodes"))
lset_ns.foldLeft(lpset++LP2_1)((lpset1, l_ns) => {
val n_len = Operator.ToUInt32(Helper.Proto(h, l_ns, AbsString.alpha("length")))
val LP2_2 = AccessHelper.Proto_use(h, l_ns, AbsString.alpha("length"))
AbsNumber.getUIntSingle(n_len) match {
case Some(n) =>
val n_index = (0 until n.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(Helper.Proto(h, l_ns, AbsString.alpha(i.toString)), Value(lset_old))._1._3
})
val LP2_3 = (0 until n.toInt).foldLeft(LPBot)((lpset, i) =>
lpset ++ AccessHelper.Proto_use(h, l_ns, AbsString.alpha(i.toString)))
if (n_index < 0)
lpset1 ++ LP2_2 ++ LP2_3
else {
val LP2_4 = AccessHelper.Delete_use(h, l_ns, AbsString.alpha(n_index.toString))
lpset1 ++ LP2_2 ++ LP2_3 ++
AccessHelper.PropStore_use(h, l_ns, AbsString.alpha(n_index.toString))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(Helper.Proto(h, l_ns, NumStr), Value(lset_old))._1._3
val LP2_3 = AccessHelper.Proto_use(h, l_ns, NumStr)
val LP2_4 =
if (BoolTrue <= b_eq) {
AccessHelper.Delete_use(h, l_ns, NumStr) ++
AccessHelper.PropStore_use(h, l_ns, NumStr)
}
else LPBot
lpset1 ++ LP2_2 ++ LP2_3 ++ LP2_4
case _ => lpset1 /* exception ?? */
}
})
})
LP1 ++ LP2 + (SinglePureLocalLoc, "@return") + (SinglePureLocalLoc, "@this")
}
else
LP1
})),
("DOMNode.removeChild" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_child = getArgValue(h, ctx, args, "0")._2
val LP1 = getArgValue_use(h, ctx, args, "0")
if (!lset_child.isEmpty) {
/* location for clone node */
val LP2 = lset_this.foldLeft(LPBot)((lpset, l_node) => {
val lset_ns = Helper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
val LP2_1 = AccessHelper.Proto_use(h, l_node, AbsString.alpha("childNodes"))
lset_ns.foldLeft(lpset++LP2_1)((lpset1, l_ns) => {
val n_len = Operator.ToUInt32(Helper.Proto(h, l_ns, AbsString.alpha("length")))
val LP2_2 = AccessHelper.Proto_use(h, l_ns, AbsString.alpha("length"))
AbsNumber.getUIntSingle(n_len) match {
case Some(n) =>
val n_index = (0 until n.toInt).indexWhere((i) => {
BoolTrue <= Operator.bopSEq(Helper.Proto(h, l_ns, AbsString.alpha(i.toString)), Value(lset_child))._1._3
})
val LP2_3 = (0 until n.toInt).foldLeft(LPBot)((lpset, i) =>
lpset ++ AccessHelper.Proto_use(h, l_ns, AbsString.alpha(i.toString)))
if (n_index < 0)
lpset1 ++ LP2_2 ++ LP2_3
else {
val LP2_4 = AccessHelper.Delete_def(h, l_ns, AbsString.alpha(n_index.toString))
val LP2_5 = (n_index+1 until n.toInt).foldLeft(LPBot)((lpset2, i) => {
val LP2_5_1 = AccessHelper.Proto_use(h, l_ns, AbsString.alpha(i.toString))
LP2_5_1 ++ AccessHelper.Delete_use(h, l_ns, AbsString.alpha(i.toString)) ++
AccessHelper.PropStore_use(h, l_ns, AbsString.alpha((i-1).toString))
})
lpset1 ++ LP2_2 ++ LP2_3 ++ LP2_4 ++ LP2_5 ++
AccessHelper.PropStore_use(h, l_ns, AbsString.alpha("length"))
}
case _ if AbsNumber.isUIntAll(n_len) =>
val b_eq = Operator.bopSEq(Helper.Proto(h, l_ns, NumStr), Value(lset_child))._1._3
val LP2_3 = AccessHelper.Proto_use(h, l_ns, NumStr)
val LP2_4 =
if (BoolTrue <= b_eq) AccessHelper.Delete_use(h, l_ns, NumStr)
else LPBot
lpset1 ++ LP2_2 ++ LP2_3 ++ LP2_4
case _ => lpset1 /* exception ?? */
}
})
})
LP1 ++ LP2 + (SinglePureLocalLoc, "@return") + (SinglePureLocalLoc, "@this")
}
else
LP1
})),
("DOMNode.appendChild" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* arguments */
val lset_child = getArgValue(h, ctx, args, "0")._2
val LP1 = getArgValue_use(h, ctx, args, "0")
if (!lset_child.isEmpty) {
val LP2 = lset_this.foldLeft(LPBot)((lpset, l_node) => {
val lset_ns = Helper.Proto(h, l_node, AbsString.alpha("childNodes"))._2
val LP2_1 = AccessHelper.Proto_use(h, l_node, AbsString.alpha("childNodes"))
lset_ns.foldLeft(lpset++LP2_1)((lpset1, l_ns) => {
val n_len = Operator.ToUInt32(Helper.Proto(h, l_ns, AbsString.alpha("length")))
val LP2_2 = AccessHelper.Proto_use(h, l_ns, AbsString.alpha("length"))
AbsNumber.getUIntSingle(n_len) match {
case Some(n) =>
lpset1 ++ LP2_2 ++
AccessHelper.PropStore_use(h, l_ns, AbsString.alpha(n.toInt.toString)) ++
AccessHelper.PropStore_use(h, l_ns, AbsString.alpha("length"))
case _ if AbsNumber.isUIntAll(n_len) =>
lpset1 ++ LP2_2 ++ AccessHelper.PropStore_use(h, l_ns, NumStr)
case _ => lpset1 /* exception ?? */
}
})
})
LP1 ++ LP2 + (SinglePureLocalLoc, "@return") + (SinglePureLocalLoc, "@this")
}
else
LP1
})),
("DOMNode.hasChildNodes" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val LP1 = lset_this.foldLeft(LPBot)((lpset, l) => {
val lset_child = Helper.Proto(h, l, AbsString.alpha("childNodes"))._2
val LP1_1 = AccessHelper.Proto_use(h, l, AbsString.alpha("childNodes"))
lset_child.foldLeft(lpset++LP1_1)((lpset1, ll) => {
lpset1 ++ AccessHelper.Proto_use(h, ll, AbsString.alpha("length"))
})
})
LP1 + (SinglePureLocalLoc, "@return") + (SinglePureLocalLoc, "@this")
})),
("DOMNode.cloneNode" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
//val l_r = addrToLoc(addr1, Recent)
//val LP1 = AccessHelper.Oldify_def(h, ctx, addr1)
val LP1 = set_addr.foldLeft(LPBot)((lp, a) =>
lp ++ AccessHelper.Oldify_use(h, ctx, cfg.getAPIAddress((fid, a), 0)))
val LP2 = lset_this.foldLeft(LPBot)((lpset, l) => {
val prop_set = h(l).getProps
prop_set.foldLeft(lpset)((lpset1, prop) => lpset1 + (l, prop))
})
LP1 ++ LP2 ++ getArgValue_use(h, ctx, args, "0") +
(SinglePureLocalLoc, "@return") + (SinglePureLocalLoc, "@this")
})),
("DOMNode.normalize" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.isSupported" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") ++ getArgValue_use(h, ctx, args, "1") + (SinglePureLocalLoc, "@return")
})),
("DOMNode.hasAttributes" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
})),
("DOMNode.compareDocumentPosition" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") + (SinglePureLocalLoc, "@return")
})),
("DOMNode.isSameNode" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") + (SinglePureLocalLoc, "@return") + (SinglePureLocalLoc, "@this")
})),
("DOMNode.lookupPrefix" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") + (SinglePureLocalLoc, "@return")
})),
("DOMNode.isDefaultNamespace" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") + (SinglePureLocalLoc, "@return")
})),
("DOMNode.lookupNamespaceURI" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") + (SinglePureLocalLoc, "@return")
})),
("DOMNode.isEqualNode" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") + (SinglePureLocalLoc, "@return")
})),
("DOMNode.getFeature" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") ++ getArgValue_use(h, ctx, args, "1") + (SinglePureLocalLoc, "@return")
})),
("DOMNode.setUserData" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") ++ getArgValue_use(h, ctx, args, "1") ++ getArgValue_use(h, ctx, args, "2") + (SinglePureLocalLoc, "@return")
})),
("DOMNode.getUserData" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") + (SinglePureLocalLoc, "@return")
}))
)
}
/* instance */
override def getInstance(cfg: CFG): Option[Loc] = Some(newRecentLoc())
/* list of properties in the instance object */
override def getInsList(node: Node): List[(String, PropValue)] = {
val nodeName = node.getNodeName
val nodeValue = node.getNodeValue
val namespaceURI = node.getNamespaceURI
val prefix = node.getPrefix
val localName = node.getLocalName
val baseURI = node.getBaseURI
val textContent = node.getTextContent
List(
// DOM Level 1
("nodeName", PropValue(ObjectValue(AbsString.alpha(if(nodeName!=null) nodeName else ""), F, T, T))),
("nodeValue", PropValue(ObjectValue(AbsString.alpha(if(nodeValue!=null) nodeValue else ""), T, T, T))),
("nodeType", PropValue(ObjectValue(AbsNumber.alpha(node.getNodeType), F, T, T))),
// Introduced in DOM Level 2
("namespaceURI", PropValue(ObjectValue(AbsString.alpha(if(namespaceURI != null) namespaceURI else ""), F, T, T))),
("prefix", PropValue(ObjectValue(AbsString.alpha(if(prefix!=null) prefix else ""), T, T, T))),
("localName", PropValue(ObjectValue(AbsString.alpha(if(localName != null) localName else ""), F, T, T))),
// Introduced in DOM Level 3
// ("baseURI", PropValue(ObjectValue(AbsString.alpha(if(baseURI!=null) baseURI else ""), F, T, T))),
("textContent", PropValue(ObjectValue(AbsString.alpha(if(textContent!=null) textContent else ""), T, T, T))),
("ownerDocument", PropValue(ObjectValue(Value(HTMLDocument.GlobalDocumentLoc), F, T, T)))
)
// 'baseURI' in DOM Level 3
}
def getInsList2(): List[(String, AbsProperty)] = List(
("nodeName", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("nodeValue", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("nodeType", AbsConstValue(PropValue(ObjectValue(Value(NumTop), F, T, T)))),
("namespaceURI", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("childNodes", AbsConstValue(PropValue(ObjectValue(Value(DOMNodeList.loc_ins2), F, T, T)))),
("attributes", AbsConstValue(PropValue(ObjectValue(Value(DOMNamedNodeMap.loc_ins2), F, T, T)))),
("ownerDocument", AbsConstValue(PropValue(ObjectValue(Value(HTMLDocument.loc_ins), F, T, T)))),
("prefix", AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T)))),
("localName", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("textContext", AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T))))
)
def getInsList(node: Node, ownerDocument: PropValue): List[(String, PropValue)] = getInsList(node) :+
("ownerDocument", ownerDocument)
def getInsList(nodeName: PropValue, nodeValue: PropValue, nodeType: PropValue, parentNode: PropValue, childNodes: PropValue,
firstChild: PropValue, lastChild: PropValue, previousSibling: PropValue, nextSibling: PropValue , ownerDocument: PropValue,
namespaceURI: PropValue, prefix: PropValue, localName: PropValue, textContent: PropValue) : List[(String, PropValue)] = List(
("nodeName", nodeName),
("nodeValue", nodeValue),
("nodeType", nodeType),
("parentNode", parentNode),
("childNodes", childNodes),
("firstChild", firstChild),
("lastChild", lastChild),
("previousSibling", previousSibling),
("nextSibling", nextSibling),
("ownerDocument", ownerDocument),
("namespaceURI", namespaceURI),
("prefix", prefix),
("localName", localName),
("textContent", textContent))
// TODO: 'baseURI' in DOM Level 3
def getInsList(nodeName: PropValue, nodeValue: PropValue, nodeType: PropValue, parentNode: PropValue, childNodes: PropValue,
firstChild: PropValue, lastChild: PropValue, previousSibling: PropValue, nextSibling: PropValue , ownerDocument: PropValue,
namespaceURI: PropValue, prefix: PropValue, localName: PropValue, textContent: PropValue, attributes: PropValue) : List[(String, PropValue)] = List(
("nodeName", nodeName),
("nodeValue", nodeValue),
("nodeType", nodeType),
("parentNode", parentNode),
("childNodes", childNodes),
("firstChild", firstChild),
("lastChild", lastChild),
("previousSibling", previousSibling),
("nextSibling", nextSibling),
("ownerDocument", ownerDocument),
("namespaceURI", namespaceURI),
("prefix", prefix),
("localName", localName),
("textContent", textContent),
("attributes", attributes)
)
// TODO: 'baseURI' in DOM Level 3
}
| darkrsw/safe | src/main/scala/kr/ac/kaist/jsaf/analysis/typing/models/DOMCore/DOMNode.scala | Scala | bsd-3-clause | 75,659 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import org.apache.spark.sql.{AnalysisException, SQLContext}
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
// please note that the META-INF/services had to be modified for the test directory for this to work
class DDLSourceLoadSuite extends DataSourceTest with SharedSQLContext {
test("data sources with the same name - internal data sources") {
val e = intercept[AnalysisException] {
spark.read.format("Fluet da Bomb").load()
}
assert(e.getMessage.contains("Multiple sources found for Fluet da Bomb"))
}
test("data sources with the same name - internal data source/external data source") {
assert(spark.read.format("datasource").load().schema ==
StructType(Seq(StructField("longType", LongType, nullable = false))))
}
test("data sources with the same name - external data sources") {
val e = intercept[AnalysisException] {
spark.read.format("Fake external source").load()
}
assert(e.getMessage.contains("Multiple sources found for Fake external source"))
}
test("load data source from format alias") {
assert(spark.read.format("gathering quorum").load().schema ==
StructType(Seq(StructField("stringType", StringType, nullable = false))))
}
test("specify full classname with duplicate formats") {
assert(spark.read.format("org.apache.spark.sql.sources.FakeSourceOne")
.load().schema == StructType(Seq(StructField("stringType", StringType, nullable = false))))
}
}
class FakeSourceOne extends RelationProvider with DataSourceRegister {
def shortName(): String = "Fluet da Bomb"
override def createRelation(cont: SQLContext, param: Map[String, String]): BaseRelation =
new BaseRelation {
override def sqlContext: SQLContext = cont
override def schema: StructType =
StructType(Seq(StructField("stringType", StringType, nullable = false)))
}
}
class FakeSourceTwo extends RelationProvider with DataSourceRegister {
def shortName(): String = "Fluet da Bomb"
override def createRelation(cont: SQLContext, param: Map[String, String]): BaseRelation =
new BaseRelation {
override def sqlContext: SQLContext = cont
override def schema: StructType =
StructType(Seq(StructField("integerType", IntegerType, nullable = false)))
}
}
class FakeSourceThree extends RelationProvider with DataSourceRegister {
def shortName(): String = "gathering quorum"
override def createRelation(cont: SQLContext, param: Map[String, String]): BaseRelation =
new BaseRelation {
override def sqlContext: SQLContext = cont
override def schema: StructType =
StructType(Seq(StructField("stringType", StringType, nullable = false)))
}
}
class FakeSourceFour extends RelationProvider with DataSourceRegister {
def shortName(): String = "datasource"
override def createRelation(cont: SQLContext, param: Map[String, String]): BaseRelation =
new BaseRelation {
override def sqlContext: SQLContext = cont
override def schema: StructType =
StructType(Seq(StructField("longType", LongType, nullable = false)))
}
}
| bravo-zhang/spark | sql/core/src/test/scala/org/apache/spark/sql/sources/DDLSourceLoadSuite.scala | Scala | apache-2.0 | 3,986 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.core.models
import org.apache.s2graph.core.TestCommonWithModels
import org.apache.s2graph.core.mysqls.Label
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}
class ModelTest extends FunSuite with Matchers with TestCommonWithModels with BeforeAndAfterAll {
override def beforeAll(): Unit = {
initTests()
}
override def afterAll(): Unit = {
graph.shutdown()
}
test("test Label.findByName") {
val labelOpt = Label.findByName(labelName, useCache = false)
println(labelOpt)
labelOpt.isDefined shouldBe true
val indices = labelOpt.get.indices
indices.size > 0 shouldBe true
println(indices)
val defaultIndexOpt = labelOpt.get.defaultIndex
println(defaultIndexOpt)
defaultIndexOpt.isDefined shouldBe true
val metas = labelOpt.get.metaProps
println(metas)
metas.size > 0 shouldBe true
val srcService = labelOpt.get.srcService
println(srcService)
val tgtService = labelOpt.get.tgtService
println(tgtService)
val service = labelOpt.get.service
println(service)
val srcColumn = labelOpt.get.srcService
println(srcColumn)
val tgtColumn = labelOpt.get.tgtService
println(tgtColumn)
}
}
| daewon/incubator-s2graph | s2core/src/test/scala/org/apache/s2graph/core/models/ModelTest.scala | Scala | apache-2.0 | 2,034 |
/*
* Copyright (C) 2011 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.core.tools.io
import java.io.OutputStream
import scala.collection.mutable.ListBuffer
class BufferOutputStream extends OutputStream {
val buffer = new ListBuffer[Int]
override def write(b: Int) = buffer += b
}
| ISCPIF/PSEExperiments | openmole-src/openmole/core/org.openmole.core.tools/src/main/scala/org/openmole/core/tools/io/BufferOutputStream.scala | Scala | agpl-3.0 | 952 |
package com.webtrends.harness.component.kafka
import java.nio.charset.StandardCharsets
import akka.actor.ActorSystem
import akka.testkit.TestProbe
import com.webtrends.harness.component.kafka.actor.OffsetManager
import com.webtrends.harness.component.kafka.config.KafkaTestConfig
import org.junit.runner.RunWith
import org.slf4j.{Logger, LoggerFactory}
import org.specs2.mutable.SpecificationLike
import org.specs2.runner.JUnitRunner
import org.specs2.time.NoTimeConversions
import scala.concurrent.duration._
//@RunWith(classOf[JUnitRunner])
class OffsetManagerSpec extends SpecificationLike with NoTimeConversions {
import OffsetManager._
protected final val log:Logger = LoggerFactory.getLogger(getClass)
val c = KafkaTestConfig.config
implicit val system = ActorSystem("test", c)
implicit val timeout = 10 seconds
val helper = TestUtil.ZkHelper()
val zkServer = helper.zkServer
val path = "/offsetTest/lab/H"
val offsetActor = system.actorOf(OffsetManager.props(path))
val probe = TestProbe()
val myPath = "someNode"
val data = "something"
helper.ensureZkAvailable()
Thread.sleep(2000)
//Disable because this fails during a mvn build intermittently
args(skipAll = false, sequential = true)
"Offset Manager " should {
"store some data " in {
probe.send(offsetActor, StoreOffsetData("topic1", "cluster", 0, OffsetData(data.getBytes(StandardCharsets.UTF_8), 5L)))
val result = probe.receiveOne(timeout).asInstanceOf[OffsetDataResponse]
log.info("Result {}", result)
result.data must beLeft
result.data.left.get.asString() must beEqualTo(data)
}
"be able to get some data" in {
probe.send(offsetActor, GetOffsetData("topic1", "cluster", 0))
val result = probe.receiveOne(timeout).asInstanceOf[OffsetDataResponse]
result.data must beLeft
result.data.left.get.asString() must beEqualTo(data)
}
"be successful on noNodeException " in {
probe.send(offsetActor, GetOffsetData("topic2", "cluster2", 0))
val r = probe.receiveOne(timeout).asInstanceOf[OffsetDataResponse]
r.data must beLeft
r.data.left.get.data must be empty
}
}
step {
system.stop(offsetActor)
zkServer.stop()
system.terminate()
}
}
| malibuworkcrew/wookiee-kafka | src/test/scala/com/webtrends/harness/component/kafka/OffsetManagerSpec.scala | Scala | apache-2.0 | 2,273 |
package org.concurrency.ch7
import scala.concurrent.stm._
class TPair[P,Q](pinit:P, qinit:Q) {
val p = Ref[P](pinit)
val q = Ref[Q](qinit)
def first(implicit txn: InTxn): P = p()
def first_= (x:P)(implicit txn: InTxn): P = { p() = x; p() }
def second(implicit txn: InTxn): Q = q()
def second_=(x: Q)(implicit txn: InTxn): Q = { q() = x; q() }
def swap()(implicit e: P =:= Q, txn:InTxn):Unit = {
val r = q().asInstanceOf[P]
q() = p().asInstanceOf[Q]
p() = r
}
override def toString: String = {
atomic {
implicit tx => {
s"(${p()},${q()})"
}
}
}
}
object TPairApp extends App {
atomic {
implicit tx => {
val p = new TPair("1", 1)
println(p)
p.first = "2"
p.second = 2
println(p)
//compile error
// println({
// p.swap
// p
// })
}
}
atomic {
implicit tx => {
val q = new TPair(1, 2)
println({
q.swap; q
})
}
}
}
| marcos-sb/concurrent-programming-scala | src/main/scala-2.11/org/concurrency/ch7/TPair.scala | Scala | apache-2.0 | 990 |
package wandou.math.algebra
final class EigenvalueDecomposition(V: Array[Array[Double]]) extends Serializable {
if (V.length != V(0).length) {
throw new IllegalArgumentException("Matrix must be square")
}
/** Row and column dimension (square matrix). */
private val n = V.length
/** Arrays for internal storage of eigenvalues. */
private val d = new Array[Double](n)
private val e = new Array[Double](n)
/** Array for internal storage of nonsymmetric Hessenberg form. */
private var H: Array[Array[Double]] = _
/** Working storage for nonsymmetric algorithm. */
private var ort: Array[Double] = _
// Complex scalar division.
private var cdivr: Double = _
private var cdivi: Double = _
if (Matrix.isSymmetric(V)) {
// Tridiagonalize.
tred2()
// Diagonalize.
tql2()
} else {
H = Array.ofDim[Double](n, n)
ort = Array.ofDim[Double](n)
var j = 0
while (j < n) {
var i = 0
while (i < n) {
H(i)(j) = V(i)(j)
i += 1
}
j += 1
}
// Reduce to Hessenberg form.
orthes()
// Reduce Hessenberg to real Schur form.
hqr2()
}
def this(A: Matrix) = {
this(Matrix.toArray(A))
}
private def cdiv(xr: Double, xi: Double, yr: Double, yi: Double) {
var r = 0.0
var d = 0.0
if (math.abs(yr) > math.abs(yi)) {
r = yi / yr
d = yr + r * yi
cdivr = (xr + r * xi) / d
cdivi = (xi - r * xr) / d
} else {
r = yr / yi
d = yi + r * yr
cdivr = (r * xr + xi) / d
cdivi = (r * xi - xr) / d
}
}
/**
* Returns the block diagonal eigenvalue matrix, <tt>D</tt>.
*
* @return <tt>D</tt>
*/
def getD: Matrix = {
val D = Array.ofDim[Double](n, n)
var i = 0
while (i < n) {
var j = 0
while (j < n) {
D(i)(j) = 0.0
j += 1
}
D(i)(i) = d(i)
if (e(i) > 0) {
D(i)(i + 1) = e(i)
} else if (e(i) < 0) {
D(i)(i - 1) = e(i)
}
i += 1
}
DenseMatrix(D)
}
/**
* Returns the imaginary parts of the eigenvalues.
*
* @return imag(diag(D))
*/
def getImagEigenvalues: Vector = {
DenseVector(e)
}
/**
* Returns the real parts of the eigenvalues.
*
* @return real(diag(D))
*/
def getRealEigenvalues: Vector = {
DenseVector(d)
}
/**
* Returns the eigenvector matrix, <tt>V</tt>
*
* @return <tt>V</tt>
*/
def getV: Matrix = {
DenseMatrix(V)
}
/** Nonsymmetric reduction from Hessenberg to real Schur form. */
private def hqr2() {
// This is derived from the Algol procedure hqr2,
// by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutine in EISPACK.
// Initialize
var nn = this.n
var n = nn - 1
val low = 0
val high = nn - 1
val eps = math.pow(2.0, -52.0)
// Store roots isolated by balanc and compute matrix norm
var norm = 0.0
var i = 0
while (i < nn) {
if (i < low || i > high) {
d(i) = H(i)(i)
e(i) = 0.0
}
var j = math.max(i - 1, 0)
while (j < nn) {
norm += math.abs(H(i)(j))
j += 1
}
i += 1
}
// Outer loop over eigenvalue index
var iter = 0
var y = 0.0
var x = 0.0
var w = 0.0
var z = 0.0
var s = 0.0
var r = 0.0
var q = 0.0
var p = 0.0
var exshift = 0.0
while (n >= low) {
// Look for single small sub-diagonal element
var l = n
var continue = true
while (l > low && continue) {
s = math.abs(H(l - 1)(l - 1)) + math.abs(H(l)(l))
if (s == 0.0) {
s = norm
}
if (math.abs(H(l)(l - 1)) < eps * s) {
continue = false
} else {
l -= 1
}
}
// Check for convergence
// One root found
if (l == n) {
H(n)(n) += exshift
d(n) = H(n)(n)
e(n) = 0.0
n -= 1
iter = 0
// Two roots found
} else if (l == n - 1) {
w = H(n)(n - 1) * H(n - 1)(n)
p = (H(n - 1)(n - 1) - H(n)(n)) / 2.0
q = p * p + w
z = math.sqrt(math.abs(q))
H(n)(n) += exshift
H(n - 1)(n - 1) += exshift
x = H(n)(n)
// Real pair
if (q >= 0) {
if (p >= 0) {
z = p + z
} else {
z = p - z
}
d(n - 1) = x + z
d(n) = d(n - 1)
if (z != 0.0) {
d(n) = x - w / z
}
e(n - 1) = 0.0
e(n) = 0.0
x = H(n)(n - 1)
s = math.abs(x) + math.abs(z)
p = x / s
q = z / s
r = math.sqrt(p * p + q * q)
p /= r
q /= r
// Row modification
var j = n - 1
while (j < nn) {
z = H(n - 1)(j)
H(n - 1)(j) = q * z + p * H(n)(j)
H(n)(j) = q * H(n)(j) - p * z
j += 1
}
// Column modification
var i = 0
while (i <= n) {
z = H(i)(n - 1)
H(i)(n - 1) = q * z + p * H(i)(n)
H(i)(n) = q * H(i)(n) - p * z
i += 1
}
// Accumulate transformations
i = low
while (i <= high) {
z = V(i)(n - 1)
V(i)(n - 1) = q * z + p * V(i)(n)
V(i)(n) = q * V(i)(n) - p * z
i += 1
}
// Complex pair
} else {
d(n - 1) = x + p
d(n) = x + p
e(n - 1) = z
e(n) = -z
}
n -= 2
iter = 0
// No convergence yet
} else {
// Form shift
x = H(n)(n)
y = 0.0
w = 0.0
if (l < n) {
y = H(n - 1)(n - 1)
w = H(n)(n - 1) * H(n - 1)(n)
}
// Wilkinson's original ad hoc shift
if (iter == 10) {
exshift += x
var i = low
while (i <= n) {
H(i)(i) -= x
i += 1
}
s = math.abs(H(n)(n - 1)) + math.abs(H(n - 1)(n - 2))
x = 0.75 * s
y = 0.75 * s
w = -0.4375 * s * s
}
// MATLAB's new ad hoc shift
if (iter == 30) {
s = (y - x) / 2.0
s = s * s + w
if (s > 0) {
s = math.sqrt(s)
if (y < x) {
s = -s
}
s = x - w / ((y - x) / 2.0 + s)
var i = low
while (i <= n) {
H(i)(i) -= s
i += 1
}
exshift += s
x = 0.964
y = 0.964
w = 0.964
}
}
iter += 1 // (Could check iteration count here.)
// Look for two consecutive small sub-diagonal elements
var m = n - 2
var continue = true
while (m >= l && continue) {
z = H(m)(m)
r = x - z
s = y - z
p = (r * s - w) / H(m + 1)(m) + H(m)(m + 1)
q = H(m + 1)(m + 1) - z - r - s
r = H(m + 2)(m + 1)
s = math.abs(p) + math.abs(q) + math.abs(r)
p /= s
q /= s
r /= s
if (m == l) {
continue = false
} else if (math.abs(H(m)(m - 1)) * (math.abs(q) + math.abs(r)) <
eps * math.abs(p) * (math.abs(H(m - 1)(m - 1)) + math.abs(z) + math.abs(H(m + 1)(m + 1)))) {
continue = false
} else {
m -= 1
}
}
var i = m + 2
while (i <= n) {
H(i)(i - 2) = 0.0
if (i > m + 2) {
H(i)(i - 3) = 0.0
}
i += 1
}
// Double QR step involving rows l:n and columns m:n
var k = m
continue = true
while (k <= n - 1 && continue) {
val notlast = k != n - 1
if (k != m) {
p = H(k)(k - 1)
q = H(k + 1)(k - 1)
r = if (notlast) H(k + 2)(k - 1) else 0.0
x = math.abs(p) + math.abs(q) + math.abs(r)
if (x != 0.0) {
p /= x
q /= x
r /= x
}
}
if (x == 0.0) {
continue = false
} else {
s = math.sqrt(p * p + q * q + r * r)
if (p < 0) {
s = -s
}
if (s != 0) {
if (k != m) {
H(k)(k - 1) = -s * x
} else if (l != m) {
H(k)(k - 1) = -H(k)(k - 1)
}
p += s
x = p / s
y = q / s
z = r / s
q /= p
r /= p
// Row modification
var j = k
while (j < nn) {
p = H(k)(j) + q * H(k + 1)(j)
if (notlast) {
p += r * H(k + 2)(j)
H(k + 2)(j) -= p * z
}
H(k)(j) -= p * x
H(k + 1)(j) -= p * y
j += 1
}
// Column modification
var i = 0
while (i <= math.min(n, k + 3)) {
p = x * H(i)(k) + y * H(i)(k + 1)
if (notlast) {
p += z * H(i)(k + 2)
H(i)(k + 2) -= p * r
}
H(i)(k) -= p
H(i)(k + 1) -= p * q
i += 1
}
// Accumulate transformations
i = low
while (i <= high) {
p = x * V(i)(k) + y * V(i)(k + 1)
if (notlast) {
p += z * V(i)(k + 2)
V(i)(k + 2) -= p * r
}
V(i)(k) -= p
V(i)(k + 1) -= p * q
i += 1
}
} // (s != 0)
}
k += 1
} // k loop
} // check convergence
} // while (n >= low)
// Backsubstitute to find vectors of upper triangular form
if (norm == 0.0) {
return
}
n = nn - 1
while (n >= 0) {
p = d(n)
q = e(n)
// Real vector
var t = 0.0
if (q == 0) {
var l = n
H(n)(n) = 1.0
var i = n - 1
while (i >= 0) {
w = H(i)(i) - p
r = 0.0
var j = l
while (j <= n) {
r += H(i)(j) * H(j)(n)
j += 1
}
if (e(i) < 0.0) {
z = w
s = r
} else {
l = i
if (e(i) == 0.0) {
if (w != 0.0) {
H(i)(n) = -r / w
} else {
H(i)(n) = -r / (eps * norm)
}
// Solve real equations
} else {
x = H(i)(i + 1)
y = H(i + 1)(i)
q = (d(i) - p) * (d(i) - p) + e(i) * e(i)
t = (x * s - z * r) / q
H(i)(n) = t
if (math.abs(x) > math.abs(z)) {
H(i + 1)(n) = (-r - w * t) / x
} else {
H(i + 1)(n) = (-s - y * t) / z
}
}
// Overflow control
t = math.abs(H(i)(n))
if (eps * t * t > 1) {
var j = i
while (j <= n) {
H(j)(n) /= t
j += 1
}
}
}
i -= 1
}
// Complex vector
} else if (q < 0) {
var l = n - 1
// Last vector component imaginary so matrix is triangular
if (math.abs(H(n)(n - 1)) > math.abs(H(n - 1)(n))) {
H(n - 1)(n - 1) = q / H(n)(n - 1)
H(n - 1)(n) = -(H(n)(n) - p) / H(n)(n - 1)
} else {
cdiv(0.0, -H(n - 1)(n), H(n - 1)(n - 1) - p, q)
H(n - 1)(n - 1) = cdivr
H(n - 1)(n) = cdivi
}
H(n)(n - 1) = 0.0
H(n)(n) = 1.0
var i = n - 2
while (i >= 0) {
var ra = 0.0
var sa = 0.0
var j = l
while (j <= n) {
ra += H(i)(j) * H(j)(n - 1)
sa += H(i)(j) * H(j)(n)
j += 1
}
w = H(i)(i) - p
if (e(i) < 0.0) {
z = w
r = ra
s = sa
} else {
l = i
if (e(i) == 0) {
cdiv(-ra, -sa, w, q)
H(i)(n - 1) = cdivr
H(i)(n) = cdivi
} else {
// Solve complex equations
x = H(i)(i + 1)
y = H(i + 1)(i)
var vr = (d(i) - p) * (d(i) - p) + e(i) * e(i) - q * q
var vi = (d(i) - p) * 2.0 * q
if (vr == 0.0 && vi == 0.0) {
vr = eps * norm * (math.abs(w) + math.abs(q) + math.abs(x) + math.abs(y) + math.abs(z))
}
cdiv(x * r - z * ra + q * sa, x * s - z * sa - q * ra, vr, vi)
H(i)(n - 1) = cdivr
H(i)(n) = cdivi
if (math.abs(x) > math.abs(z) + math.abs(q)) {
H(i + 1)(n - 1) = (-ra - w * H(i)(n - 1) + q * H(i)(n)) / x
H(i + 1)(n) = (-sa - w * H(i)(n) - q * H(i)(n - 1)) / x
} else {
cdiv(-r - y * H(i)(n - 1), -s - y * H(i)(n), z, q)
H(i + 1)(n - 1) = cdivr
H(i + 1)(n) = cdivi
}
}
// Overflow control
t = math.max(math.abs(H(i)(n - 1)), math.abs(H(i)(n)))
if (eps * t * t > 1) {
var j = i
while (j <= n) {
H(j)(n - 1) /= t
H(j)(n) /= t
j += 1
}
}
}
i -= 1
}
}
n -= 1
}
// Vectors of isolated roots
i = 0
while (i < nn) {
if (i < low || i > high) {
System.arraycopy(H(i), i, V(i), i, nn - i)
}
i += 1
}
// Back transformation to get eigenvectors of original matrix
var j = nn - 1
while (j >= low) {
var i = low
while (i <= high) {
z = 0.0
var k = low
while (k <= math.min(j, high)) {
z += V(i)(k) * H(k)(j)
k += 1
}
V(i)(j) = z
i += 1
}
j -= 1
}
}
/** Nonsymmetric reduction to Hessenberg form. */
private def orthes() {
// This is derived from the Algol procedures orthes and ortran,
// by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutines in EISPACK.
val low = 0
val high = n - 1
var m = low + 1
while (m <= high - 1) {
// Scale column.
var scale = 0.0
var i = m
while (i <= high) {
scale += math.abs(H(i)(m - 1))
i += 1
}
if (scale != 0.0) {
// Compute Householder transformation.
var h = 0.0
var i = high
while (i >= m) {
ort(i) = H(i)(m - 1) / scale
h += ort(i) * ort(i)
i -= 1
}
var g = math.sqrt(h)
if (ort(m) > 0) {
g = -g
}
h -= ort(m) * g
ort(m) -= g
// Apply Householder similarity transformation
// H = (I-u*u'/h)*H*(I-u*u')/h)
var j = m
while (j < n) {
var f = 0.0
var i = high
while (i >= m) {
f += ort(i) * H(i)(j)
i -= 1
}
f /= h
i = m
while (i <= high) {
H(i)(j) -= f * ort(i)
i += 1
}
j += 1
}
i = 0
while (i <= high) {
var f = 0.0
var j = high
while (j >= m) {
f += ort(j) * H(i)(j)
j -= 1
}
f /= h
j = m
while (j <= high) {
H(i)(j) -= f * ort(j)
j += 1
}
i += 1
}
ort(m) = scale * ort(m)
H(m)(m - 1) = scale * g
}
m += 1
}
// Accumulate transformations (Algol's ortran).
var i = 0
while (i < n) {
var j = 0
while (j < n) {
V(i)(j) = if (i == j) 1.0 else 0.0
j += 1
}
i += 1
}
m = high - 1
while (m >= low + 1) {
if (H(m)(m - 1) != 0.0) {
var i = m + 1
while (i <= high) {
ort(i) = H(i)(m - 1)
i += 1
}
var j = m
while (j <= high) {
var g = 0.0
var i = m
while (i <= high) {
g += ort(i) * V(i)(j)
i += 1
}
// Double division avoids possible underflow
g = g / ort(m) / H(m)(m - 1)
i = m
while (i <= high) {
V(i)(j) += g * ort(i)
i += 1
}
j += 1
}
}
m -= 1
}
}
/**
* Returns a String with (propertyName, propertyValue) pairs. Useful for debugging or to quickly get the rough
* picture. For example,
* <pre>
* rank : 3
* trace : 0
* </pre>
*/
override def toString = {
val sb = new StringBuilder()
sb.append("---------------------------------------------------------------------\\n")
sb.append("EigenvalueDecomposition(A) --> D, V, realEigenvalues, imagEigenvalues\\n")
sb.append("---------------------------------------------------------------------\\n")
sb.append("realEigenvalues = ")
val unknown = "Illegal operation or error: "
try {
sb.append(String.valueOf(this.getRealEigenvalues))
} catch {
case ex: IllegalArgumentException =>
sb.append(unknown).append(ex.getMessage)
}
sb.append("\\nimagEigenvalues = ")
try {
sb.append(String.valueOf(this.getImagEigenvalues))
} catch {
case ex: IllegalArgumentException =>
sb.append(unknown).append(ex.getMessage)
}
sb.append("\\n\\nD = ")
try {
sb.append(String.valueOf(this.getD))
} catch {
case ex: IllegalArgumentException =>
sb.append(unknown).append(ex.getMessage)
}
sb.append("\\n\\nV = ")
try {
sb.append(String.valueOf(this.getV))
} catch {
case ex: IllegalArgumentException =>
sb.append(unknown).append(ex.getMessage)
}
sb.toString
}
/** Symmetric tridiagonal QL algorithm. */
private def tql2() {
// This is derived from the Algol procedures tql2, by
// Bowdler, Martin, Reinsch, and Wilkinson, Handbook for
// Auto. Comp., Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutine in EISPACK.
System.arraycopy(e, 1, e, 0, n - 1)
e(n - 1) = 0.0
var f = 0.0
var tst1 = 0.0
val eps = math.pow(2.0, -52.0)
var l = 0
while (l < n) {
// Find small subdiagonal element
tst1 = math.max(tst1, math.abs(d(l)) + math.abs(e(l)))
var m = l
var continue = true
while (m < n && continue) {
if (math.abs(e(m)) <= eps * tst1) {
continue = false
} else {
m += 1
}
}
// If m == l, d(l) is an eigenvalue,
// otherwise, iterate.
if (m > l) {
var iter = 0
do {
iter += 1 // (Could check iteration count here.)
// Compute implicit shift
var g = d(l)
var p = (d(l + 1) - g) / (2.0 * e(l))
var r = java.lang.Math.hypot(p, 1.0)
if (p < 0) {
r = -r
}
d(l) = e(l) / (p + r)
d(l + 1) = e(l) * (p + r)
val dl1 = d(l + 1)
var h = g - d(l)
var i = l + 2
while (i < n) {
d(i) -= h
i += 1
}
f += h
// Implicit QL transformation.
p = d(m)
var c = 1.0
var c2 = c
var c3 = c
var el1 = e(l + 1)
var s = 0.0
var s2 = 0.0
i = m - 1
while (i >= l) {
c3 = c2
c2 = c
s2 = s
g = c * e(i)
h = c * p
r = java.lang.Math.hypot(p, e(i))
e(i + 1) = s * r
s = e(i) / r
c = p / r
p = c * d(i) - s * g
d(i + 1) = h + s * (c * g + s * d(i))
// Accumulate transformation.
var k = 0
while (k < n) {
h = V(k)(i + 1)
V(k)(i + 1) = s * V(k)(i) + c * h
V(k)(i) = c * V(k)(i) - s * h
k += 1
}
i -= 1
}
p = -s * s2 * c3 * el1 * e(l) / dl1
e(l) = s * p
d(l) = c * p
// Check for convergence.
} while (math.abs(e(l)) > eps * tst1)
}
d(l) += f
e(l) = 0.0
l += 1
}
// Sort eigenvalues and corresponding vectors.
var i = 0
while (i < n - 1) {
var k = i
var p = d(i)
var j = i + 1
while (j < n) {
if (d(j) < p) {
k = j
p = d(j)
}
j += 1
}
if (k != i) {
d(k) = d(i)
d(i) = p
var j = 0
while (j < n) {
p = V(j)(i)
V(j)(i) = V(j)(k)
V(j)(k) = p
j += 1
}
}
i += 1
}
}
/** Symmetric Householder reduction to tridiagonal form. */
private def tred2() {
// This is derived from the Algol procedures tred2 by
// Bowdler, Martin, Reinsch, and Wilkinson, Handbook for
// Auto. Comp., Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutine in EISPACK.
System.arraycopy(V(n - 1), 0, d, 0, n)
// Householder reduction to tridiagonal form.
var i = n - 1
while (i > 0) {
// Scale to avoid under/overflow.
var scale = 0.0
var k = 0
while (k < i) {
scale += math.abs(d(k))
k += 1
}
var h = 0.0
if (scale == 0.0) {
e(i) = d(i - 1)
var j = 0
while (j < i) {
d(j) = V(i - 1)(j)
V(i)(j) = 0.0
V(j)(i) = 0.0
j += 1
}
} else {
// Generate Householder vector.
var k = 0
while (k < i) {
d(k) /= scale
h += d(k) * d(k)
k += 1
}
var f = d(i - 1)
var g = math.sqrt(h)
if (f > 0) {
g = -g
}
e(i) = scale * g
h -= f * g
d(i - 1) = f - g
var j = 0
while (j < i) {
e(j) = 0.0
j += 1
}
// Apply similarity transformation to remaining columns.
j = 0
while (j < i) {
f = d(j)
V(j)(i) = f
g = e(j) + V(j)(j) * f
var k = j + 1
while (k <= i - 1) {
g += V(k)(j) * d(k)
e(k) += V(k)(j) * f
k += 1
}
e(j) = g
j += 1
}
f = 0.0
j = 0
while (j < i) {
e(j) /= h
f += e(j) * d(j)
j += 1
}
val hh = f / (h + h)
j = 0
while (j < i) {
e(j) -= hh * d(j)
j += 1
}
j = 0
while (j < i) {
f = d(j)
g = e(j)
var k = j
while (k <= i - 1) {
V(k)(j) -= f * e(k) + g * d(k)
k += 1
}
d(j) = V(i - 1)(j)
V(i)(j) = 0.0
j += 1
}
}
d(i) = h
i -= 1
}
// Accumulate transformations.
i = 0
while (i < n - 1) {
V(n - 1)(i) = V(i)(i)
V(i)(i) = 1.0
val h = d(i + 1)
if (h != 0.0) {
var k = 0
while (k <= i) {
d(k) = V(k)(i + 1) / h
k += 1
}
var j = 0
while (j <= i) {
var g = 0.0
var k = 0
while (k <= i) {
g += V(k)(i + 1) * V(k)(j)
k += 1
}
k = 0
while (k <= i) {
V(k)(j) -= g * d(k)
k += 1
}
j += 1
}
}
var k = 0
while (k <= i) {
V(k)(i + 1) = 0.0
k += 1
}
i += 1
}
var j = 0
while (j < n) {
d(j) = V(n - 1)(j)
V(n - 1)(j) = 0.0
j += 1
}
V(n - 1)(n - 1) = 1.0
e(0) = 0.0
}
}
| wandoulabs/wandou-math | wandou-math/src/main/scala/wandou/math/algebra/EigenvalueDecomposition.scala | Scala | apache-2.0 | 24,433 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
/** `AnyVal` is the root class of all ''value types'', which describe values
* not implemented as objects in the underlying host system. Value classes
* are specified in Scala Language Specification, section 12.2.
*
* The standard implementation includes nine `AnyVal` subtypes:
*
* [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]],
* [[scala.Short]], and [[scala.Byte]] are the ''numeric value types''.
*
* [[scala.Unit]] and [[scala.Boolean]] are the ''non-numeric value types''.
*
* Other groupings:
*
* - The ''subrange types'' are [[scala.Byte]], [[scala.Short]], and [[scala.Char]].
* - The ''integer types'' include the subrange types as well as [[scala.Int]] and [[scala.Long]].
* - The ''floating point types'' are [[scala.Float]] and [[scala.Double]].
*
* Prior to Scala 2.10, `AnyVal` was a sealed trait. Beginning with Scala 2.10,
* however, it is possible to define a subclass of `AnyVal` called a ''user-defined value class''
* which is treated specially by the compiler. Properly-defined user value classes provide a way
* to improve performance on user-defined types by avoiding object allocation at runtime, and by
* replacing virtual method invocations with static method invocations.
*
* User-defined value classes which avoid object allocation...
*
* - must have a single `val` parameter that is the underlying runtime representation.
* - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s.
* - typically extend no other trait apart from `AnyVal`.
* - cannot be used in type tests or pattern matching.
* - may not override `equals` or `hashCode` methods.
*
* A minimal example:
* {{{
* class Wrapper(val underlying: Int) extends AnyVal {
* def foo: Wrapper = new Wrapper(underlying * 19)
* }
* }}}
*
* It's important to note that user-defined value classes are limited, and in some circumstances,
* still must allocate a value class instance at runtime. These limitations and circumstances are
* explained in greater detail in the [[http://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]].
*/
abstract class AnyVal extends Any {
def getClass(): Class[_ <: AnyVal] = null
}
| felixmulder/scala | src/library/scala/AnyVal.scala | Scala | bsd-3-clause | 2,823 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming
import java.util.concurrent.ConcurrentLinkedQueue
import org.apache.spark.rdd.{BlockRDD, RDD}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.{DStream, TailWindowedDStream}
import org.apache.spark.streaming.event.{MaxEventExtent, Event}
import org.apache.spark.util.{Clock, ManualClock}
import org.apache.spark.{HashPartitioner, SparkConf, SparkException}
import scala.collection.mutable
import scala.language.existentials
import scala.reflect.ClassTag
class BasicOperationsSuite extends TestSuiteBase {
test("map") {
val input = Seq(1 to 4, 5 to 8, 9 to 12)
testOperation(
input,
(r: DStream[Int]) => r.map(_.toString),
input.map(_.map(_.toString))
)
}
test("flatMap") {
val input = Seq(1 to 4, 5 to 8, 9 to 12)
testOperation(
input,
(r: DStream[Int]) => r.flatMap(x => Seq(x, x * 2)),
input.map(_.flatMap(x => Array(x, x * 2)))
)
}
test("filter") {
val input = Seq(1 to 4, 5 to 8, 9 to 12)
testOperation(
input,
(r: DStream[Int]) => r.filter(x => (x % 2 == 0)),
input.map(_.filter(x => (x % 2 == 0)))
)
}
test("glom") {
assert(numInputPartitions === 2, "Number of input partitions has been changed from 2")
val input = Seq(1 to 4, 5 to 8, 9 to 12)
val output = Seq(
Seq( Seq(1, 2), Seq(3, 4) ),
Seq( Seq(5, 6), Seq(7, 8) ),
Seq( Seq(9, 10), Seq(11, 12) )
)
val operation = (r: DStream[Int]) => r.glom().map(_.toSeq)
testOperation(input, operation, output)
}
test("mapPartitions") {
assert(numInputPartitions === 2, "Number of input partitions has been changed from 2")
val input = Seq(1 to 4, 5 to 8, 9 to 12)
val output = Seq(Seq(3, 7), Seq(11, 15), Seq(19, 23))
val operation = (r: DStream[Int]) => r.mapPartitions(x => Iterator(x.sum))
testOperation(input, operation, output, true)
}
test("repartition (more partitions)") {
val input = Seq(1 to 100, 101 to 200, 201 to 300)
val operation = (r: DStream[Int]) => r.repartition(5)
withStreamingContext(setupStreams(input, operation, 2)) { ssc =>
val output = runStreamsWithPartitions(ssc, 3, 3)
assert(output.size === 3)
val outputArray = output.toArray
val first = outputArray(0)
val second = outputArray(1)
val third = outputArray(2)
assert(first.size === 5)
assert(second.size === 5)
assert(third.size === 5)
assert(first.flatten.toSet.equals((1 to 100).toSet))
assert(second.flatten.toSet.equals((101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}
}
test("repartition (fewer partitions)") {
val input = Seq(1 to 100, 101 to 200, 201 to 300)
val operation = (r: DStream[Int]) => r.repartition(2)
withStreamingContext(setupStreams(input, operation, 5)) { ssc =>
val output = runStreamsWithPartitions(ssc, 3, 3)
assert(output.size === 3)
val outputArray = output.toArray
val first = outputArray(0)
val second = outputArray(1)
val third = outputArray(2)
assert(first.size === 2)
assert(second.size === 2)
assert(third.size === 2)
assert(first.flatten.toSet.equals((1 to 100).toSet))
assert(second.flatten.toSet.equals((101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}
}
test("groupByKey") {
testOperation(
Seq( Seq("a", "a", "b"), Seq("", ""), Seq() ),
(s: DStream[String]) => s.map(x => (x, 1)).groupByKey().mapValues(_.toSeq),
Seq( Seq(("a", Seq(1, 1)), ("b", Seq(1))), Seq(("", Seq(1, 1))), Seq() ),
true
)
}
test("reduceByKey") {
testOperation(
Seq( Seq("a", "a", "b"), Seq("", ""), Seq() ),
(s: DStream[String]) => s.map(x => (x, 1)).reduceByKey(_ + _),
Seq( Seq(("a", 2), ("b", 1)), Seq(("", 2)), Seq() ),
true
)
}
test("reduce") {
testOperation(
Seq(1 to 4, 5 to 8, 9 to 12),
(s: DStream[Int]) => s.reduce(_ + _),
Seq(Seq(10), Seq(26), Seq(42))
)
}
test("count") {
testOperation(
Seq(Seq(), 1 to 1, 1 to 2, 1 to 3, 1 to 4),
(s: DStream[Int]) => s.count(),
Seq(Seq(0L), Seq(1L), Seq(2L), Seq(3L), Seq(4L))
)
}
test("countByValue") {
testOperation(
Seq(1 to 1, Seq(1, 1, 1), 1 to 2, Seq(1, 1, 2, 2)),
(s: DStream[Int]) => s.countByValue(),
Seq(Seq((1, 1L)), Seq((1, 3L)), Seq((1, 1L), (2, 1L)), Seq((2, 2L), (1, 2L))),
true
)
}
test("mapValues") {
testOperation(
Seq( Seq("a", "a", "b"), Seq("", ""), Seq() ),
(s: DStream[String]) => s.map(x => (x, 1)).reduceByKey(_ + _).mapValues(_ + 10),
Seq( Seq(("a", 12), ("b", 11)), Seq(("", 12)), Seq() ),
true
)
}
test("flatMapValues") {
testOperation(
Seq( Seq("a", "a", "b"), Seq("", ""), Seq() ),
(s: DStream[String]) => {
s.map(x => (x, 1)).reduceByKey(_ + _).flatMapValues(x => Seq(x, x + 10))
},
Seq( Seq(("a", 2), ("a", 12), ("b", 1), ("b", 11)), Seq(("", 2), ("", 12)), Seq() ),
true
)
}
test("union") {
val input = Seq(1 to 4, 101 to 104, 201 to 204)
val output = Seq(1 to 8, 101 to 108, 201 to 208)
testOperation(
input,
(s: DStream[Int]) => s.union(s.map(_ + 4)),
output
)
}
test("union with input stream return None") {
val input = Seq(1 to 4, 101 to 104, 201 to 204, null)
val output = Seq(1 to 8, 101 to 108, 201 to 208)
intercept[SparkException] {
testOperation(
input,
(s: DStream[Int]) => s.union(s.map(_ + 4)),
output,
input.length,
false
)
}
}
test("StreamingContext.union") {
val input = Seq(1 to 4, 101 to 104, 201 to 204)
val output = Seq(1 to 12, 101 to 112, 201 to 212)
// union over 3 DStreams
testOperation(
input,
(s: DStream[Int]) => s.context.union(Seq(s, s.map(_ + 4), s.map(_ + 8))),
output
)
}
test("transform") {
val input = Seq(1 to 4, 5 to 8, 9 to 12)
testOperation(
input,
(r: DStream[Int]) => r.transform(rdd => rdd.map(_.toString)), // RDD.map in transform
input.map(_.map(_.toString))
)
}
test("transform with NULL") {
val input = Seq(1 to 4)
intercept[SparkException] {
testOperation(
input,
(r: DStream[Int]) => r.transform(rdd => null.asInstanceOf[RDD[Int]]),
Seq(Seq()),
1,
false
)
}
}
test("transform with input stream return None") {
val input = Seq(1 to 4, 5 to 8, null)
intercept[SparkException] {
testOperation(
input,
(r: DStream[Int]) => r.transform(rdd => rdd.map(_.toString)),
input.filterNot(_ == null).map(_.map(_.toString)),
input.length,
false
)
}
}
test("transformWith") {
val inputData1 = Seq( Seq("a", "b"), Seq("a", ""), Seq(""), Seq() )
val inputData2 = Seq( Seq("a", "b"), Seq("b", ""), Seq(), Seq("") )
val outputData = Seq(
Seq( ("a", (1, "x")), ("b", (1, "x")) ),
Seq( ("", (1, "x")) ),
Seq( ),
Seq( )
)
val operation = (s1: DStream[String], s2: DStream[String]) => {
val t1 = s1.map(x => (x, 1))
val t2 = s2.map(x => (x, "x"))
t1.transformWith( // RDD.join in transform
t2,
(rdd1: RDD[(String, Int)], rdd2: RDD[(String, String)]) => rdd1.join(rdd2)
)
}
testOperation(inputData1, inputData2, operation, outputData, true)
}
test("transformWith with input stream return None") {
val inputData1 = Seq( Seq("a", "b"), Seq("a", ""), Seq(""), null )
val inputData2 = Seq( Seq("a", "b"), Seq("b", ""), Seq(), null )
val outputData = Seq(
Seq("a", "b", "a", "b"),
Seq("a", "b", "", ""),
Seq("")
)
val operation = (s1: DStream[String], s2: DStream[String]) => {
s1.transformWith( // RDD.join in transform
s2,
(rdd1: RDD[String], rdd2: RDD[String]) => rdd1.union(rdd2)
)
}
intercept[SparkException] {
testOperation(inputData1, inputData2, operation, outputData, inputData1.length, true)
}
}
test("StreamingContext.transform") {
val input = Seq(1 to 4, 101 to 104, 201 to 204)
val output = Seq(1 to 12, 101 to 112, 201 to 212)
// transform over 3 DStreams by doing union of the 3 RDDs
val operation = (s: DStream[Int]) => {
s.context.transform(
Seq(s, s.map(_ + 4), s.map(_ + 8)), // 3 DStreams
(rdds: Seq[RDD[_]], time: Time) =>
rdds.head.context.union(rdds.map(_.asInstanceOf[RDD[Int]])) // union of RDDs
)
}
testOperation(input, operation, output)
}
test("StreamingContext.transform with input stream return None") {
val input = Seq(1 to 4, 101 to 104, 201 to 204, null)
val output = Seq(1 to 12, 101 to 112, 201 to 212)
// transform over 3 DStreams by doing union of the 3 RDDs
val operation = (s: DStream[Int]) => {
s.context.transform(
Seq(s, s.map(_ + 4), s.map(_ + 8)), // 3 DStreams
(rdds: Seq[RDD[_]], time: Time) =>
rdds.head.context.union(rdds.map(_.asInstanceOf[RDD[Int]])) // union of RDDs
)
}
intercept[SparkException] {
testOperation(input, operation, output, input.length, false)
}
}
test("cogroup") {
val inputData1 = Seq( Seq("a", "a", "b"), Seq("a", ""), Seq(""), Seq() )
val inputData2 = Seq( Seq("a", "a", "b"), Seq("b", ""), Seq(), Seq() )
val outputData = Seq(
Seq( ("a", (Seq(1, 1), Seq("x", "x"))), ("b", (Seq(1), Seq("x"))) ),
Seq( ("a", (Seq(1), Seq())), ("b", (Seq(), Seq("x"))), ("", (Seq(1), Seq("x"))) ),
Seq( ("", (Seq(1), Seq())) ),
Seq( )
)
val operation = (s1: DStream[String], s2: DStream[String]) => {
s1.map(x => (x, 1)).cogroup(s2.map(x => (x, "x"))).mapValues(x => (x._1.toSeq, x._2.toSeq))
}
testOperation(inputData1, inputData2, operation, outputData, true)
}
test("join") {
val inputData1 = Seq( Seq("a", "b"), Seq("a", ""), Seq(""), Seq() )
val inputData2 = Seq( Seq("a", "b"), Seq("b", ""), Seq(), Seq("") )
val outputData = Seq(
Seq( ("a", (1, "x")), ("b", (1, "x")) ),
Seq( ("", (1, "x")) ),
Seq( ),
Seq( )
)
val operation = (s1: DStream[String], s2: DStream[String]) => {
s1.map(x => (x, 1)).join(s2.map(x => (x, "x")))
}
testOperation(inputData1, inputData2, operation, outputData, true)
}
test("leftOuterJoin") {
val inputData1 = Seq( Seq("a", "b"), Seq("a", ""), Seq(""), Seq() )
val inputData2 = Seq( Seq("a", "b"), Seq("b", ""), Seq(), Seq("") )
val outputData = Seq(
Seq( ("a", (1, Some("x"))), ("b", (1, Some("x"))) ),
Seq( ("", (1, Some("x"))), ("a", (1, None)) ),
Seq( ("", (1, None)) ),
Seq( )
)
val operation = (s1: DStream[String], s2: DStream[String]) => {
s1.map(x => (x, 1)).leftOuterJoin(s2.map(x => (x, "x")))
}
testOperation(inputData1, inputData2, operation, outputData, true)
}
test("rightOuterJoin") {
val inputData1 = Seq( Seq("a", "b"), Seq("a", ""), Seq(""), Seq() )
val inputData2 = Seq( Seq("a", "b"), Seq("b", ""), Seq(), Seq("") )
val outputData = Seq(
Seq( ("a", (Some(1), "x")), ("b", (Some(1), "x")) ),
Seq( ("", (Some(1), "x")), ("b", (None, "x")) ),
Seq( ),
Seq( ("", (None, "x")) )
)
val operation = (s1: DStream[String], s2: DStream[String]) => {
s1.map(x => (x, 1)).rightOuterJoin(s2.map(x => (x, "x")))
}
testOperation(inputData1, inputData2, operation, outputData, true)
}
test("fullOuterJoin") {
val inputData1 = Seq( Seq("a", "b"), Seq("a", ""), Seq(""), Seq() )
val inputData2 = Seq( Seq("a", "b"), Seq("b", ""), Seq(), Seq("") )
val outputData = Seq(
Seq( ("a", (Some(1), Some("x"))), ("b", (Some(1), Some("x"))) ),
Seq( ("", (Some(1), Some("x"))), ("a", (Some(1), None)), ("b", (None, Some("x"))) ),
Seq( ("", (Some(1), None)) ),
Seq( ("", (None, Some("x"))) )
)
val operation = (s1: DStream[String], s2: DStream[String]) => {
s1.map(x => (x, 1)).fullOuterJoin(s2.map(x => (x, "x")))
}
testOperation(inputData1, inputData2, operation, outputData, true)
}
test("updateStateByKey") {
val inputData =
Seq(
Seq("a"),
Seq("a", "b"),
Seq("a", "b", "c"),
Seq("a", "b"),
Seq("a"),
Seq()
)
val outputData =
Seq(
Seq(("a", 1)),
Seq(("a", 2), ("b", 1)),
Seq(("a", 3), ("b", 2), ("c", 1)),
Seq(("a", 4), ("b", 3), ("c", 1)),
Seq(("a", 5), ("b", 3), ("c", 1)),
Seq(("a", 5), ("b", 3), ("c", 1))
)
val updateStateOperation = (s: DStream[String]) => {
val updateFunc = (values: Seq[Int], state: Option[Int]) => {
Some(values.sum + state.getOrElse(0))
}
s.map(x => (x, 1)).updateStateByKey[Int](updateFunc)
}
testOperation(inputData, updateStateOperation, outputData, true)
}
test("updateStateByKey - simple with initial value RDD") {
val initial = Seq(("a", 1), ("c", 2))
val inputData =
Seq(
Seq("a"),
Seq("a", "b"),
Seq("a", "b", "c"),
Seq("a", "b"),
Seq("a"),
Seq()
)
val outputData =
Seq(
Seq(("a", 2), ("c", 2)),
Seq(("a", 3), ("b", 1), ("c", 2)),
Seq(("a", 4), ("b", 2), ("c", 3)),
Seq(("a", 5), ("b", 3), ("c", 3)),
Seq(("a", 6), ("b", 3), ("c", 3)),
Seq(("a", 6), ("b", 3), ("c", 3))
)
val updateStateOperation = (s: DStream[String]) => {
val initialRDD = s.context.sparkContext.makeRDD(initial)
val updateFunc = (values: Seq[Int], state: Option[Int]) => {
Some(values.sum + state.getOrElse(0))
}
s.map(x => (x, 1)).updateStateByKey[Int](updateFunc,
new HashPartitioner (numInputPartitions), initialRDD)
}
testOperation(inputData, updateStateOperation, outputData, true)
}
test("updateStateByKey - with initial value RDD") {
val initial = Seq(("a", 1), ("c", 2))
val inputData =
Seq(
Seq("a"),
Seq("a", "b"),
Seq("a", "b", "c"),
Seq("a", "b"),
Seq("a"),
Seq()
)
val outputData =
Seq(
Seq(("a", 2), ("c", 2)),
Seq(("a", 3), ("b", 1), ("c", 2)),
Seq(("a", 4), ("b", 2), ("c", 3)),
Seq(("a", 5), ("b", 3), ("c", 3)),
Seq(("a", 6), ("b", 3), ("c", 3)),
Seq(("a", 6), ("b", 3), ("c", 3))
)
val updateStateOperation = (s: DStream[String]) => {
val initialRDD = s.context.sparkContext.makeRDD(initial)
val updateFunc = (values: Seq[Int], state: Option[Int]) => {
Some(values.sum + state.getOrElse(0))
}
val newUpdateFunc = (iterator: Iterator[(String, Seq[Int], Option[Int])]) => {
iterator.flatMap(t => updateFunc(t._2, t._3).map(s => (t._1, s)))
}
s.map(x => (x, 1)).updateStateByKey[Int](newUpdateFunc,
new HashPartitioner (numInputPartitions), true, initialRDD)
}
testOperation(inputData, updateStateOperation, outputData, true)
}
test("updateStateByKey - object lifecycle") {
val inputData =
Seq(
Seq("a", "b"),
null,
Seq("a", "c", "a"),
Seq("c"),
null,
null
)
val outputData =
Seq(
Seq(("a", 1), ("b", 1)),
Seq(("a", 1), ("b", 1)),
Seq(("a", 3), ("c", 1)),
Seq(("a", 3), ("c", 2)),
Seq(("c", 2)),
Seq()
)
val updateStateOperation = (s: DStream[String]) => {
class StateObject(var counter: Int = 0, var expireCounter: Int = 0) extends Serializable
// updateFunc clears a state when a StateObject is seen without new values twice in a row
val updateFunc = (values: Seq[Int], state: Option[StateObject]) => {
val stateObj = state.getOrElse(new StateObject)
values.sum match {
case 0 => stateObj.expireCounter += 1 // no new values
case n => // has new values, increment and reset expireCounter
stateObj.counter += n
stateObj.expireCounter = 0
}
stateObj.expireCounter match {
case 2 => None // seen twice with no new values, give it the boot
case _ => Option(stateObj)
}
}
s.map(x => (x, 1)).updateStateByKey[StateObject](updateFunc).mapValues(_.counter)
}
testOperation(inputData, updateStateOperation, outputData, true)
}
test("slice") {
withStreamingContext(new StreamingContext(conf, Seconds(1))) { ssc =>
val input = Seq(Seq(1), Seq(2), Seq(3), Seq(4))
val stream = new TestInputStream[Int](ssc, input, 2)
stream.foreachRDD(_ => {}) // Dummy output stream
ssc.start()
Thread.sleep(2000)
def getInputFromSlice(fromMillis: Long, toMillis: Long): Set[Int] = {
stream.slice(new Time(fromMillis), new Time(toMillis)).flatMap(_.collect()).toSet
}
assert(getInputFromSlice(0, 1000) == Set(1))
assert(getInputFromSlice(0, 2000) == Set(1, 2))
assert(getInputFromSlice(1000, 2000) == Set(1, 2))
assert(getInputFromSlice(2000, 4000) == Set(2, 3, 4))
}
}
test("slice - has not been initialized") {
withStreamingContext(new StreamingContext(conf, Seconds(1))) { ssc =>
val input = Seq(Seq(1), Seq(2), Seq(3), Seq(4))
val stream = new TestInputStream[Int](ssc, input, 2)
val thrown = intercept[SparkException] {
stream.slice(new Time(0), new Time(1000))
}
assert(thrown.getMessage.contains("has not been initialized"))
}
}
val cleanupTestInput = (0 until 10).map(x => Seq(x, x + 1)).toSeq
test("rdd cleanup - map and window") {
val rememberDuration = Seconds(3)
def operation(s: DStream[Int]): DStream[(Int, Int)] = {
s.map(x => (x % 10, 1))
.window(Seconds(2), Seconds(1))
.window(Seconds(4), Seconds(2))
}
val operatedStream = runCleanupTest(conf, operation _,
numExpectedOutput = cleanupTestInput.size / 2, rememberDuration = Seconds(3))
val windowedStream2 = operatedStream.asInstanceOf[TailWindowedDStream[_]]
val windowedStream1 = windowedStream2.dependencies.head.stream.asInstanceOf[TailWindowedDStream[_]]
val mappedStream = windowedStream1.dependencies.head.stream
val windowedStream2RememberExtent = new MaxEventExtent
windowedStream2RememberExtent.set(rememberDuration)
val windowedStream1RememberExtent = new MaxEventExtent
windowedStream1RememberExtent.set(rememberDuration +
batchDuration * windowedStream2.windowLength)
val mappedStreamRememberExtent = new MaxEventExtent
mappedStreamRememberExtent.set(rememberDuration + batchDuration *
(windowedStream2.windowLength + windowedStream1.windowLength))
// Checkpoint remember counts
assert(windowedStream2.rememberCount ===
windowedStream2RememberExtent.evalCount(windowedStream2.generatedEvents))
assert(windowedStream1.rememberCount ===
windowedStream1RememberExtent.evalCount(windowedStream1.generatedEvents))
assert(mappedStream.rememberCount ===
mappedStreamRememberExtent.evalCount(mappedStream.generatedEvents))
// Checkpoint remember durations
assert(windowedStream2.rememberDuration ===
windowedStream2RememberExtent.evalDuration(windowedStream2.generatedEvents))
assert(windowedStream1.rememberDuration ===
windowedStream1RememberExtent.evalDuration(windowedStream1.generatedEvents))
assert(mappedStream.rememberDuration ===
mappedStreamRememberExtent.evalDuration(mappedStream.generatedEvents))
// WindowedStream2 should remember till 7 seconds: 10, 9, 8, 7
// WindowedStream1 should remember till 4 seconds: 10, 9, 8, 7, 6, 5, 4
// MappedStream should remember till 2 seconds: 10, 9, 8, 7, 6, 5, 4, 3, 2
// WindowedStream2
assert(windowedStream2.generatedRDDs.keys.exists(_.time == Time(10000)))
assert(windowedStream2.generatedRDDs.keys.exists(_.time == Time(8000)))
assert(!windowedStream2.generatedRDDs.keys.exists(_.time == Time(6000)))
// WindowedStream1
assert(windowedStream1.generatedRDDs.keys.exists(_.time == Time(10000)))
assert(windowedStream1.generatedRDDs.keys.exists(_.time == Time(4000)))
assert(!windowedStream1.generatedRDDs.keys.exists(_.time == Time(3000)))
// MappedStream
assert(mappedStream.generatedRDDs.keys.exists(_.time == Time(10000)))
assert(mappedStream.generatedRDDs.keys.exists(_.time == Time(2000)))
assert(!mappedStream.generatedRDDs.keys.exists(_.time == Time(1000)))
}
test("rdd cleanup - updateStateByKey") {
val updateFunc = (values: Seq[Int], state: Option[Int]) => {
Some(values.sum + state.getOrElse(0))
}
val stateStream = runCleanupTest(
conf, _.map(_ -> 1).updateStateByKey(updateFunc).checkpoint(Seconds(3)))
val stateStreamRememberExtent = new MaxEventExtent
stateStreamRememberExtent.set(stateStream.checkpointDuration.get * 2)
assert(stateStream.rememberCount ===
stateStreamRememberExtent.evalCount(stateStream.generatedEvents))
assert(stateStream.rememberDuration ===
stateStreamRememberExtent.evalDuration(stateStream.generatedEvents))
assert(stateStream.generatedRDDs.keys.exists(_.time == Time(10000)))
assert(!stateStream.generatedRDDs.keys.exists(_.time == Time(4000)))
}
test("rdd cleanup - input blocks and persisted RDDs") {
// Actually receive data over through receiver to create BlockRDDs
withTestServer(new TestServer()) { testServer =>
withStreamingContext(new StreamingContext(conf, batchDuration)) { ssc =>
testServer.start()
val batchCounter = new BatchCounter(ssc)
// Set up the streaming context and input streams
val networkStream =
ssc.socketTextStream("localhost", testServer.port, StorageLevel.MEMORY_AND_DISK)
val mappedStream = networkStream.map(_ + ".").persist()
val outputQueue = new ConcurrentLinkedQueue[Seq[String]]
val outputStream = new TestOutputStream(mappedStream, outputQueue)
outputStream.register()
ssc.start()
// Feed data to the server to send to the network receiver
val clock = ssc.scheduler.clock.asInstanceOf[ManualClock]
val input = Seq(1, 2, 3, 4, 5, 6)
val blockRdds = new mutable.HashMap[Event, BlockRDD[_]]
val persistentRddIds = new mutable.HashMap[Event, Int]
def collectRddInfo() { // get all RDD info required for verification
networkStream.generatedRDDs.foreach { case (event, rdd) =>
blockRdds(event) = rdd.asInstanceOf[BlockRDD[_]]
}
mappedStream.generatedRDDs.foreach { case (event, rdd) =>
persistentRddIds(event) = rdd.id
}
}
Thread.sleep(200)
for (i <- 0 until input.size) {
testServer.send(input(i).toString + "\\n")
Thread.sleep(200)
val numCompletedBatches = batchCounter.getNumCompletedBatches
clock.advance(batchDuration.milliseconds)
if (!batchCounter.waitUntilBatchesCompleted(numCompletedBatches + 1, 5000)) {
fail("Batch took more than 5 seconds to complete")
}
collectRddInfo()
}
Thread.sleep(200)
collectRddInfo()
logInfo("Stopping server")
testServer.stop()
// verify data has been received
assert(!outputQueue.isEmpty)
assert(blockRdds.size > 0)
assert(persistentRddIds.size > 0)
val latestPersistedRddId = persistentRddIds(persistentRddIds.keySet.max)
val earliestPersistedRddId = persistentRddIds(persistentRddIds.keySet.min)
val latestBlockRdd = blockRdds(blockRdds.keySet.max)
val earliestBlockRdd = blockRdds(blockRdds.keySet.min)
// verify that the latest mapped RDD is persisted but the earliest one has been unpersisted
assert(ssc.sparkContext.persistentRdds.contains(latestPersistedRddId))
assert(!ssc.sparkContext.persistentRdds.contains(earliestPersistedRddId))
// verify that the latest input blocks are present but the earliest blocks have been removed
assert(latestBlockRdd.isValid)
assert(latestBlockRdd.collect != null)
assert(!earliestBlockRdd.isValid)
earliestBlockRdd.blockIds.foreach { blockId =>
assert(!ssc.sparkContext.env.blockManager.master.contains(blockId))
}
}
}
}
/** Test cleanup of RDDs in DStream metadata */
def runCleanupTest[T: ClassTag](
conf2: SparkConf,
operation: DStream[Int] => DStream[T],
numExpectedOutput: Int = cleanupTestInput.size,
rememberDuration: Duration = null
): DStream[T] = {
// Setup the stream computation
assert(batchDuration === Seconds(1),
"Batch duration has changed from 1 second, check cleanup tests")
withStreamingContext(setupStreams(cleanupTestInput, operation)) { ssc =>
val operatedStream =
ssc.graph.getOutputStreams().head.dependencies.head.stream.asInstanceOf[DStream[T]]
if (rememberDuration != null) ssc.remember(rememberDuration)
val output = runStreams[(Int, Int)](ssc, cleanupTestInput.size, numExpectedOutput)
val clock = ssc.scheduler.clock.asInstanceOf[Clock]
assert(clock.getTimeMillis() === Seconds(10).milliseconds)
assert(output.size === numExpectedOutput)
operatedStream
}
}
}
| mashin-io/rich-spark | streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala | Scala | apache-2.0 | 26,514 |
def get: State[S, S] = State(s => (s, s)) | hmemcpy/milewski-ctfp-pdf | src/content/3.5/code/scala/snippet20.scala | Scala | gpl-3.0 | 41 |
package com.sageserpent.plutonium
import java.nio.ByteBuffer
import java.time.Instant
import java.util.{NoSuchElementException, UUID}
import com.esotericsoftware.kryo.Kryo
import com.lambdaworks.redis.RedisClient
import com.lambdaworks.redis.api.rx.RedisReactiveCommands
import com.lambdaworks.redis.codec.{ByteArrayCodec, RedisCodec, Utf8StringCodec}
import com.sageserpent.americium.{PositiveInfinity, Unbounded}
import com.sageserpent.plutonium.ItemExtensionApi.UniqueItemSpecification
import com.twitter.chill.{KryoPool, ScalaKryoInstantiator}
import io.netty.handler.codec.EncoderException
import rx.lang.scala.JavaConversions._
import rx.lang.scala.Observable
import scala.Ordering.Implicits._
object WorldRedisBasedImplementation {
import UniqueItemSpecificationSerializationSupport.SpecialSerializer
val redisNamespaceComponentSeparator = ":"
val kryoPool = KryoPool.withByteArrayOutputStream(
40,
new ScalaKryoInstantiator().withRegistrar { (kryo: Kryo) =>
kryo.register(classOf[UniqueItemSpecification], new SpecialSerializer)
}
)
object redisCodecDelegatingKeysToStandardCodec
extends RedisCodec[String, Any] {
protected val stringKeyStringValueCodec = new Utf8StringCodec
override def encodeKey(key: String): ByteBuffer =
stringKeyStringValueCodec.encodeKey(key)
override def decodeKey(bytes: ByteBuffer): String =
stringKeyStringValueCodec.decodeKey(bytes)
override def encodeValue(value: Any): ByteBuffer =
ByteArrayCodec.INSTANCE.encodeValue(kryoPool.toBytesWithClass(value))
override def decodeValue(bytes: ByteBuffer): Any =
kryoPool.fromBytes(ByteArrayCodec.INSTANCE.decodeValue(bytes))
}
}
class WorldRedisBasedImplementation(redisClient: RedisClient,
identityGuid: String)
extends WorldInefficientImplementationCodeFactoring {
parentWorld =>
import World._
import WorldImplementationCodeFactoring._
import WorldRedisBasedImplementation._
var redisApi: RedisReactiveCommands[String, Any] = null
setupRedisApi()
private def setupRedisApi() = {
redisApi =
redisClient.connect(redisCodecDelegatingKeysToStandardCodec).reactive()
}
private def teardownRedisApi(): Unit = {
redisApi.close()
}
val asOfsKey = s"${identityGuid}${redisNamespaceComponentSeparator}asOfs"
val eventCorrectionsKeyPrefix =
s"${identityGuid}${redisNamespaceComponentSeparator}eventCorrectionsFor${redisNamespaceComponentSeparator}"
val eventIdsKey =
s"${identityGuid}${redisNamespaceComponentSeparator}eventIds"
def eventCorrectionsKeyFrom(eventId: EventId) =
s"${eventCorrectionsKeyPrefix}${eventId}"
override def nextRevision: Revision = nextRevisionObservable.toBlocking.first
protected def nextRevisionObservable: Observable[Revision] =
toScalaObservable(redisApi.llen(asOfsKey)) map (_.toInt)
override def forkExperimentalWorld(scope: javaApi.Scope): World =
new WorldRedisBasedImplementation(
redisClient = parentWorld.redisClient,
identityGuid =
s"${parentWorld.identityGuid}-experimental-${UUID.randomUUID()}") {
val baseWorld = parentWorld
val numberOfRevisionsInCommon = scope.nextRevision
val cutoffWhenAfterWhichHistoriesDiverge = scope.when
override protected def nextRevisionObservable: Observable[Revision] =
super.nextRevisionObservable map (numberOfRevisionsInCommon + _)
override protected def revisionAsOfsObservable
: Observable[Array[Instant]] =
for {
revisionAsOfsFromBaseWorld <- baseWorld.revisionAsOfsObservable
revisionAsOfsFromSuper <- super.revisionAsOfsObservable
} yield
(revisionAsOfsFromBaseWorld take numberOfRevisionsInCommon) ++ revisionAsOfsFromSuper
override protected def pertinentEventDatumsObservable(
cutoffRevision: Revision,
cutoffWhen: Unbounded[Instant],
eventIdInclusion: EventIdInclusion)
: Observable[(EventId, AbstractEventData)] = {
val cutoffWhenForBaseWorld = cutoffWhen min cutoffWhenAfterWhichHistoriesDiverge
if (cutoffRevision > numberOfRevisionsInCommon) for {
eventIdsAndTheirDatums <- eventIdsAndTheirDatumsObservable(
cutoffRevision,
eventIdInclusion).toList
eventIdsToBeExcluded = eventIdsAndTheirDatums.map(_._1).toSet
eventIdAndItsDatum <- Observable
.from(eventIdsAndTheirDatums filter {
case (_, eventDatum) =>
eventDatumComesWithinCutoff(eventDatum, cutoffWhen)
}) merge baseWorld.pertinentEventDatumsObservable(
numberOfRevisionsInCommon,
cutoffWhenForBaseWorld,
eventId =>
!eventIdsToBeExcluded.contains(eventId) && eventIdInclusion(
eventId))
} yield eventIdAndItsDatum
else
baseWorld.pertinentEventDatumsObservable(cutoffRevision,
cutoffWhenForBaseWorld,
eventIdInclusion)
}
}
override def revisionAsOfs: Array[Instant] =
revisionAsOfsObservable.toBlocking.first
protected def revisionAsOfsObservable: Observable[Array[Instant]] =
toScalaObservable(redisApi.lrange(asOfsKey, 0, -1))
.asInstanceOf[Observable[Instant]]
.toArray
override protected def eventTimeline(
cutoffRevision: Revision): Seq[(Event, EventId)] =
try {
val eventDatumsObservable = for {
_ <- toScalaObservable(redisApi.watch(asOfsKey))
eventDatums <- pertinentEventDatumsObservable(cutoffRevision).toList
transactionStart = toScalaObservable(redisApi.multi())
// NASTY HACK: there needs to be at least one Redis command sent in a
// transaction for the result of the 'exec' command to yield a difference
// between an aborted transaction and a completed one. Yuk!
transactionBody = toScalaObservable(redisApi.llen(asOfsKey))
transactionEnd = toScalaObservable(redisApi.exec())
((_, _), _) <- transactionStart zip transactionBody zip transactionEnd
} yield eventDatums
eventTimelineFrom(eventDatumsObservable.toBlocking.single)
} catch {
case exception: EncoderException =>
recoverRedisApi
throw exception.getCause
case _: NoSuchElementException =>
throw new RuntimeException(
"Concurrent revision attempt detected in query.")
}
type EventIdInclusion = EventId => Boolean
protected def pertinentEventDatumsObservable(
cutoffRevision: Revision,
cutoffWhen: Unbounded[Instant],
eventIdInclusion: EventIdInclusion)
: Observable[(EventId, AbstractEventData)] =
eventIdsAndTheirDatumsObservable(cutoffRevision, eventIdInclusion) filter {
case (_, eventDatum) =>
eventDatumComesWithinCutoff(eventDatum, cutoffWhen)
}
private def eventDatumComesWithinCutoff(eventDatum: AbstractEventData,
cutoffWhen: Unbounded[Instant]) =
eventDatum match {
case eventData: EventData =>
eventData.serializableEvent.when <= cutoffWhen
case _ => true
}
def eventIdsAndTheirDatumsObservable(cutoffRevision: Revision,
eventIdInclusion: EventIdInclusion)
: Observable[(EventId, AbstractEventData)] = {
for {
eventId <- toScalaObservable(redisApi.smembers(eventIdsKey))
.asInstanceOf[Observable[EventId]] filter eventIdInclusion
eventIdAndDataPair <- toScalaObservable(
redisApi.zrevrangebyscore(eventCorrectionsKeyFrom(eventId),
cutoffRevision - 1,
initialRevision,
0,
1))
.asInstanceOf[Observable[AbstractEventData]] map (eventId -> _)
} yield eventIdAndDataPair
}
def pertinentEventDatumsObservable(cutoffRevision: Revision,
eventIdsForNewEvents: Iterable[EventId])
: Observable[(EventId, AbstractEventData)] = {
val eventIdsToBeExcluded = eventIdsForNewEvents.toSet
pertinentEventDatumsObservable(
cutoffRevision,
PositiveInfinity(),
eventId => !eventIdsToBeExcluded.contains(eventId))
}
def pertinentEventDatumsObservable(
cutoffRevision: Revision): Observable[(EventId, AbstractEventData)] =
pertinentEventDatumsObservable(cutoffRevision,
PositiveInfinity(),
_ => true)
override protected def transactNewRevision(
asOf: Instant,
newEventDatumsFor: Revision => Map[EventId, AbstractEventData],
buildAndValidateEventTimelineForProposedNewRevision: (
Seq[(EventId, AbstractEventData)],
Seq[(EventId, AbstractEventData)]) => Unit): Revision = {
try {
val revisionObservable = for {
_ <- toScalaObservable(redisApi.watch(asOfsKey))
nextRevisionPriorToUpdate <- nextRevisionObservable
newEventDatums: Map[EventId, AbstractEventData] = newEventDatumsFor(
nextRevisionPriorToUpdate)
(pertinentEventDatumsExcludingTheNewRevision: Seq[(EventId,
AbstractEventData)],
_) <- pertinentEventDatumsObservable(
nextRevisionPriorToUpdate,
newEventDatums.keys.toSeq).toList zip
(for (revisionAsOfs <- revisionAsOfsObservable)
yield checkRevisionPrecondition(asOf, revisionAsOfs))
_ = buildAndValidateEventTimelineForProposedNewRevision(
newEventDatums.toSeq,
pertinentEventDatumsExcludingTheNewRevision)
transactionGuid = UUID.randomUUID()
foo <- Observable
.from(newEventDatums map {
case (eventId, eventDatum) =>
val eventCorrectionsKey = eventCorrectionsKeyFrom(eventId)
val timeToExpireGarbageInSeconds = 5
redisApi.zadd(s"${eventCorrectionsKey}:${transactionGuid}",
nextRevisionPriorToUpdate.toDouble,
eventDatum) zip
redisApi
.sadd(s"${eventIdsKey}:${transactionGuid}", eventId) zip
redisApi.expire(s"${eventCorrectionsKey}:${transactionGuid}",
timeToExpireGarbageInSeconds) zip
redisApi.expire(s"${eventIdsKey}:${transactionGuid}",
timeToExpireGarbageInSeconds)
})
.flatten
.toList
transactionStart = toScalaObservable(redisApi.multi())
transactionBody = Observable
.from(newEventDatums map {
case (eventId, eventDatum) =>
val eventCorrectionsKey = eventCorrectionsKeyFrom(eventId)
redisApi.zunionstore(
eventCorrectionsKey,
eventCorrectionsKey,
s"${eventCorrectionsKey}:${transactionGuid}") zip
redisApi.sunionstore(eventIdsKey,
eventIdsKey,
s"${eventIdsKey}:${transactionGuid}")
})
.flatten
.toList zip redisApi.rpush(asOfsKey, asOf)
transactionEnd = toScalaObservable(redisApi.exec())
// NASTY HACK: the order of evaluation of the subterms in the next double-zip is vital to ensure that Redis sees
// the 'multi', body commands and 'exec' verbs in the correct order, even though the processing of the results is
// handled by ReactiveX, which simply sees three streams of replies.
_ <- transactionStart zip transactionBody zip transactionEnd
} yield nextRevisionPriorToUpdate
revisionObservable.toBlocking.first
} catch {
case exception: EncoderException =>
recoverRedisApi
throw exception.getCause
case _: NoSuchElementException =>
throw new RuntimeException(
"Concurrent revision attempt detected in revision.")
}
}
private def recoverRedisApi: Observable[String] = {
teardownRedisApi()
setupRedisApi()
redisApi.unwatch()
redisApi.discard()
}
}
| sageserpent-open/open-plutonium | src/main/scala/com/sageserpent/plutonium/WorldRedisBasedImplementation.scala | Scala | mit | 12,360 |
// Copyright (C) 2010-2011 Monash University
//
// This file is part of Factotum.
//
// Factotum is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Factotum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with Factotum. If not, see <http://www.gnu.org/licenses/>.
//
// Designed and implemented by Dmitri Nikulin.
//
// Repository: https://github.com/dnikulin/factotum
// Email: dnikulin+factotum@gmail.com
package bootstrap.liftweb
import _root_.net.liftweb.common._
import _root_.net.liftweb.util._
import _root_.net.liftweb.http._
import _root_.net.liftweb.mapper._
import _root_.net.liftweb.sitemap._
import _root_.net.liftweb.sitemap.Loc._
import _root_.net.liftweb.widgets.tablesorter._
import Helpers._
import com.dnikulin.factotum.model._
import com.dnikulin.factotum.render._
import com.dnikulin.factotum.snippet._
import com.dnikulin.factotum.web._
/**
* A class that's instantiated early and run. It allows the application
* to modify lift's environment
*/
class Boot {
import FactotumWeb.logInfo
def menus(): List[ConvertableToMenu] = {
// Don't expose login/logout in demo mode
var loginMenus =
if (FactotumWeb.isDemo) Nil
else User.sitemap
var mainMenus = List(
Menu.i("About") / "index",
Menu.i("Texts") / "texts",
Menu.i("Reports") / "reports",
Menu.i("Classy") / "classy",
// Hidden workflow menus
Menu.i("Submit") / "submit" >> Hidden,
Menu.i("Analyse") / "analyse" >> Hidden,
Menu.i("Comment") / "comment" >> Hidden,
Menu.i("Record") / "record" >> Hidden,
// Hidden handlers for text pages
Menu.i("View") / "view" >> Hidden,
Menu.i("Similar") / "similar" >> Hidden,
Menu.i("Against") / "against" >> Hidden,
Menu.i("Deep") / "deep" >> Hidden,
Menu.i("Diff") / "diff" >> Hidden,
Menu.i("Matches") / "matches" >> Hidden,
Menu.i("Remove") / "remove" >> Hidden,
Menu.i("Metrics") / "metrics" >> Hidden,
Menu.i("Baysect") / "baysect" >> Hidden,
// Hidden handlers for report pages
Menu.i("ViewReport") / "viewreport" >> Hidden,
Menu.i("SaveReport") / "savereport" >> Hidden,
Menu.i("DelReports") / "delreports" >> Hidden
)
loginMenus ++ mainMenus
}
def boot {
S.addAnalyzer((req, time, logs) => logInfo("request took %d ms".format(time)))
// Snippet package path.
LiftRules.addToPackages("com.dnikulin.factotum")
// Force UTF-8 for requests.
LiftRules.early.append(_.setCharacterEncoding("UTF-8"))
// Install OpenID integration.
FactotumOpenIDVendor.installToLift()
LiftRules.statefulRewrite.append {
case RewriteRequest(ParsePath(List("texts", hash, "view"), _, _, _), _, _) =>
RewriteResponse(List("view"), Map("text" -> hash))
case RewriteRequest(ParsePath(List("texts", hash, "metrics"), _, _, _), _, _) =>
RewriteResponse(List("metrics"), Map("text" -> hash))
case RewriteRequest(ParsePath(List("texts", hash, "baysect"), _, _, _), _, _) =>
RewriteResponse(List("baysect"), Map("text" -> hash))
case RewriteRequest(ParsePath(List("texts", hash, "similar"), _, _, _), _, _) =>
RewriteResponse(List("similar"), Map("text" -> hash))
case RewriteRequest(ParsePath(List("texts", hash, "matches"), _, _, _), _, _) =>
RewriteResponse(List("matches"), Map("text" -> hash))
case RewriteRequest(ParsePath(List("texts", hash1, "against", hash2), _, _, _), _, _) =>
RewriteResponse(List("against"), Map("text1" -> hash1, "text2" -> hash2))
case RewriteRequest(ParsePath(List("texts", hash1, "deep", hash2), _, _, _), _, _) =>
RewriteResponse(List("deep"), Map("text1" -> hash1, "text2" -> hash2))
case RewriteRequest(ParsePath(List("texts", hash1, "diff", hash2), _, _, _), _, _) =>
RewriteResponse(List("diff"), Map("text1" -> hash1, "text2" -> hash2))
case RewriteRequest(ParsePath(List("texts", hash, "remove"), _, _, _), _, _) =>
RewriteResponse(List("remove"), Map("text" -> hash))
case RewriteRequest(ParsePath(List("texts", hash, "matchsets"), _, _, _), _, _) =>
RewriteResponse(List("sectionmatches"), Map("text" -> hash))
case RewriteRequest(ParsePath(List("report", "view"), _, _, _), _, _) =>
RewriteResponse(List("viewreport"))
case RewriteRequest(ParsePath(List("report", "save"), _, _, _), _, _) =>
RewriteResponse(List("savereport"))
case RewriteRequest(ParsePath(List("reports", hash, "view"), _, _, _), _, _) =>
RewriteResponse(List("viewreport"), Map("report" -> hash))
}
ReportLatex.installDispatch()
Retrieve.installDispatch()
SectionMarker.installDispatch()
SectionPairMarker.installDispatch()
TextLatex.installDispatch()
// Install table sorter widget.
TableSorter.init()
// Generate site map in menus() function
LiftRules.setSiteMap(SiteMap(menus:_*))
val vendor = new DataVendor(FactotumWeb.rootPath)
DB.defineConnectionManager(DefaultConnectionIdentifier, vendor)
Schemifier.schemify(true, Schemifier.infoF _,
User, StoredText, StoredReport)
FactotumWeb.startup()
}
}
| dnikulin/factotum | src/main/scala/bootstrap/liftweb/Boot.scala | Scala | agpl-3.0 | 5,771 |
package scalacookbook.chapter19
import scala._
/**
* Created by liguodong on 2016/8/24.
*/
object CreateATimer extends App {
//Example 1: Creating a Timer
def timer[A](blockOfCode: => A) = {
val startTime = System.nanoTime
val result = blockOfCode
val stopTime = System.nanoTime
val delta = stopTime - startTime
(result, delta/1000000d)
}
//create a similar timer method in Scala to let you run code
//val (result, time) = timer(someLongRunningAlgorithm)
// the timer runs a method named longRunningAlgorithm
val (result, time) = timer {
Thread.sleep(500);
1
}
println(s"result: $result, time: $time")
val (result2, time2) = timer{ println("Hello") }
println(s"result2: $result2, time2: $time2")
//Or an algorithm that reads a file and returns an iterator
def readFile(filename: String) = io.Source.fromFile(filename).getLines
val (result3, time3) = timer{ readFile("d:\\\\temp.txt") }
println(s"result3: $result3, time3: $time3")
} | liguodongIOT/java-scala-mix-sbt | src/main/scala/scalacookbook/chapter19/CreateATimer.scala | Scala | apache-2.0 | 1,004 |
package fr.laas.fape.anml.model.abs.statements
import fr.laas.fape.anml.ANMLException
import fr.laas.fape.anml.model.{Context, _}
import fr.laas.fape.anml.model.abs.time.{AbsTP, AbstractTemporalAnnotation, IntervalEnd, IntervalStart}
import fr.laas.fape.anml.model.abs.{AbstractExactDelay, AbstractMinDelay}
import fr.laas.fape.anml.model.concrete.statements.{Assignment, LogStatement, Persistence, Transition}
import fr.laas.fape.anml.model.concrete.{Chronicle, RefCounter}
import fr.laas.fape.anml.pending.IntExpression
abstract trait ChronicleComponent
abstract class AbstractStatement(val id:LocalRef) extends VarContainer {
/**
* Produces the corresponding concrete statement, by replacing all local variables
* by the global ones defined in Context
*
* @param context Context in which this statement appears.
* @return
*/
def bind(context:Context, pb:AnmlProblem, container: Chronicle, refCounter: RefCounter) : Any
def start : AbsTP = IntervalStart(id)
def end : AbsTP = IntervalEnd(id)
/** Produces the temporal constraints by applying the temporal annotation to this statement. */
def getTemporalConstraints(annot : AbstractTemporalAnnotation) : List[AbstractMinDelay] = {
val stStart = IntervalStart(id)
val stEnd = IntervalEnd(id)
(annot, this) match {
case (AbstractTemporalAnnotation(s, e, "is"), tr:AbstractTransition) if s == e =>
throw new ANMLException("Instantaneous transitions are not allowed: "+this)
case (AbstractTemporalAnnotation(s, e, "is"), ass:AbstractAssignment) =>
assert(s == e, "Non instantaneous assignment: "+this)
AbstractExactDelay(annot.start.timepoint, stEnd, IntExpression.lit(annot.start.delta)) ++
AbstractExactDelay(stStart, stEnd, IntExpression.lit(1))
case (AbstractTemporalAnnotation(s, e, "is"), _) =>
AbstractExactDelay(annot.start.timepoint, stStart, IntExpression.lit(annot.start.delta)) ++
AbstractExactDelay(annot.end.timepoint, stEnd, IntExpression.lit(annot.end.delta))
case ((AbstractTemporalAnnotation(_, _, "contains"), s:AbstractTransition)) =>
throw new ANMLException("The contains annotation is not allowed on transitions ")
case ((AbstractTemporalAnnotation(_, _, "contains"), s:AbstractAssignment)) =>
throw new ANMLException("The contains annotation is not allowed on assignments ")
case (AbstractTemporalAnnotation(s,e,"contains"), i) => List(
new AbstractMinDelay(s.timepoint, stStart, IntExpression.lit(s.delta)), // start(id) >= start+delta
new AbstractMinDelay(stEnd, e.timepoint, IntExpression.lit(-e.delta)) // end(id) <= end+delta
)
}
}
}
abstract class AbstractLogStatement(val sv:AbstractParameterizedStateVariable, override val id:LStatementRef)
extends AbstractStatement(id)
{
require(!sv.func.valueType.isNumeric, "Error: the function of this LogStatement has an integer value.")
require(!sv.func.isConstant, "LogStatement on a constant function")
def bind(context:Context, pb:AnmlProblem, container:Chronicle, refCounter: RefCounter) : LogStatement
def hasConditionAtStart : Boolean
def hasEffectAtEnd: Boolean
def conditionValue : LVarRef
def effectValue : LVarRef
}
/**
* Describes an assignment of a state variable to value `statevariable(x, y) := v`
*
* @param sv State variable getting the assignment
* @param value value of the state variable after the assignment
*/
class AbstractAssignment(sv:AbstractParameterizedStateVariable, val value:LVarRef, id:LStatementRef)
extends AbstractLogStatement(sv, id)
{
override def bind(context:Context, pb:AnmlProblem, container:Chronicle, refCounter: RefCounter) =
new Assignment(sv.bind(context), context.getGlobalVar(value), container, refCounter)
override def toString = "%s := %s".format(sv, value)
override def hasConditionAtStart: Boolean = false
override def conditionValue: LVarRef = throw new ANMLException("Assignments have conditions at start")
override def effectValue: LVarRef = value
override def hasEffectAtEnd: Boolean = true
override def getAllVars: Set[LVarRef] = sv.getAllVars ++ value.getAllVars
}
class AbstractTransition(sv:AbstractParameterizedStateVariable, val from:LVarRef, val to:LVarRef, id:LStatementRef)
extends AbstractLogStatement(sv, id)
{
override def bind(context:Context, pb:AnmlProblem, container:Chronicle, refCounter: RefCounter) =
new Transition(sv.bind(context), context.getGlobalVar(from), context.getGlobalVar(to), container, refCounter)
override def toString = "%s == %s :-> %s".format(sv, from, to)
override def hasConditionAtStart: Boolean = true
override def conditionValue: LVarRef = from
override def effectValue: LVarRef = to
override def hasEffectAtEnd: Boolean = true
override def getAllVars: Set[LVarRef] = sv.getAllVars ++ from.getAllVars ++ to.getAllVars
}
class AbstractPersistence(sv:AbstractParameterizedStateVariable, val value:LVarRef, id:LStatementRef)
extends AbstractLogStatement(sv, id)
{
override def bind(context:Context, pb:AnmlProblem, container:Chronicle, refCounter: RefCounter) =
new Persistence(sv.bind(context), context.getGlobalVar(value), container, refCounter)
override def toString = "%s == %s".format(sv, value)
override def hasConditionAtStart: Boolean = true
override def conditionValue: LVarRef = value
override def effectValue: LVarRef = throw new ANMLException("Persistences have no effects at end")
override def hasEffectAtEnd: Boolean = false
override def getAllVars: Set[LVarRef] = sv.getAllVars ++ value.getAllVars
}
| athy/fape | anml-parser/src/main/scala/fr/laas/fape/anml/model/abs/statements/AbstractStatement.scala | Scala | bsd-2-clause | 5,606 |
package com.twitter.finagle.ssl
import java.util.logging.Logger
import java.security.cert.X509Certificate
import javax.net.ssl._
import collection.mutable.{Map => MutableMap}
/*
* Creates JSSE SSLEngines on behalf of the Ssl singleton
*/
object JSSE {
private[this] val log = Logger.getLogger(getClass.getName)
private[this] val contextCache: MutableMap[String, SSLContext] = MutableMap.empty
private[this] val protocol = "TLS"
private[this] lazy val defaultSSLContext: SSLContext = {
val ctx = SSLContext.getInstance(protocol)
ctx.init(null, null, null)
ctx
}
/**
* Get an SSL server via JSSE
*
* @param certificatePath The path to the PEM encoded certificate file
* @param keyPath The path to the corresponding PEM encoded key file
* @param caCertPath The path to the optional PEM encoded CA cert file.
* If caCertPath is set, use it in setting up the connection instead of
* certificatePath. The cert chain should contain the certificate.
* @param useCache Use a cache of SSL contexts, keyed on certificatePath
* @throws RuntimeException if no provider could be initialized
* @return an SSLEngine
*/
private[finagle] def server(
certificatePath: String,
keyPath: String,
caCertPath: Option[String],
useCache: Boolean = true
): Option[Engine] = {
def makeContext: SSLContext = {
val context = SSLContext.getInstance(protocol)
val kms = PEMEncodedKeyManager(
certificatePath,
keyPath,
caCertPath)
context.init(kms, null, null)
log.finest("JSSE context instantiated for certificate '%s'".format(
certificatePath
))
context
}
val context = synchronized {
if (useCache)
contextCache.getOrElseUpdate(
List(certificatePath, keyPath, caCertPath).mkString(" + "),
makeContext
)
else
makeContext
}
Some(new Engine(context.createSSLEngine()))
}
/**
* Get a client
*/
def client(): Engine = new Engine(defaultSSLContext.createSSLEngine())
/**
* Get a client from the given Context
*/
def client(ctx : SSLContext) : Engine = {
val sslEngine = ctx.createSSLEngine();
sslEngine.setUseClientMode(true);
new Engine(sslEngine)
}
/**
* Get a client that skips verification of certificates.
*
* Security Warning: This defeats the purpose of SSL.
*/
def clientWithoutCertificateValidation(): Engine =
client(trustAllCertificates())
private[this] def client(trustManagers: Array[TrustManager]): Engine = {
val ctx = SSLContext.getInstance(protocol)
ctx.init(null, trustManagers, null)
new Engine(ctx.createSSLEngine())
}
/**
* @return a trust manager chain that does not validate certificates
*/
private[this] def trustAllCertificates(): Array[TrustManager] =
Array(new IgnorantTrustManager)
/**
* A trust manager that does not validate anything
*/
private[this] class IgnorantTrustManager extends X509TrustManager {
def getAcceptedIssuers(): Array[X509Certificate] = new Array[X509Certificate](0)
def checkClientTrusted(certs: Array[X509Certificate], authType: String) {
// Do nothing.
}
def checkServerTrusted(certs: Array[X509Certificate], authType: String) {
// Do nothing.
}
}
}
| firebase/finagle | finagle-core/src/main/scala/com/twitter/finagle/ssl/JSSE.scala | Scala | apache-2.0 | 3,338 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.stream.table
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.scala._
import org.apache.flink.table.runtime.utils.{StreamITCase, StreamingWithStateTestBase}
import org.apache.flink.table.utils.TableFunc0
import org.apache.flink.types.Row
import org.junit.Assert._
import org.junit.Test
/**
* tests for retraction
*/
class RetractionITCase extends StreamingWithStateTestBase {
// input data
val data = List(
("Hello", 1),
("word", 1),
("Hello", 1),
("bark", 1),
("bark", 1),
("bark", 1),
("bark", 1),
("bark", 1),
("bark", 1),
("flink", 1)
)
// keyed groupby + keyed groupby
@Test
def testWordCount(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = StreamTableEnvironment.create(env)
StreamITCase.clear
env.setStateBackend(getStateBackend)
val stream = env.fromCollection(data)
val table = stream.toTable(tEnv, 'word, 'num)
val resultTable = table
.groupBy('word)
.select('num.sum as 'count)
.groupBy('count)
.select('count, 'count.count as 'frequency)
val results = resultTable.toRetractStream[Row]
results.addSink(new StreamITCase.RetractingSink)
env.execute()
val expected = Seq("1,2", "2,1", "6,1")
assertEquals(expected.sorted, StreamITCase.retractedResults.sorted)
}
// keyed groupby + non-keyed groupby
@Test
def testGroupByAndNonKeyedGroupBy(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = StreamTableEnvironment.create(env)
StreamITCase.clear
env.setStateBackend(getStateBackend)
val stream = env.fromCollection(data)
val table = stream.toTable(tEnv, 'word, 'num)
val resultTable = table
.groupBy('word)
.select('word as 'word, 'num.sum as 'cnt)
.select('cnt.sum)
val results = resultTable.toRetractStream[Row]
results.addSink(new StreamITCase.RetractingSink).setParallelism(1)
env.execute()
val expected = Seq("10")
assertEquals(expected.sorted, StreamITCase.retractedResults.sorted)
}
// non-keyed groupby + keyed groupby
@Test
def testNonKeyedGroupByAndGroupBy(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = StreamTableEnvironment.create(env)
StreamITCase.clear
env.setStateBackend(getStateBackend)
val stream = env.fromCollection(data)
val table = stream.toTable(tEnv, 'word, 'num)
val resultTable = table
.select('num.sum as 'count)
.groupBy('count)
.select('count, 'count.count)
val results = resultTable.toRetractStream[Row]
results.addSink(new StreamITCase.RetractingSink).setParallelism(1)
env.execute()
val expected = Seq("10,1")
assertEquals(expected.sorted, StreamITCase.retractedResults.sorted)
}
// test unique process, if the current output message of unbounded groupby equals the
// previous message, unbounded groupby will ignore the current one.
@Test
def testUniqueProcess(): Unit = {
// data input
val data = List(
(1, 1L),
(2, 2L),
(3, 3L),
(3, 3L),
(4, 1L),
(4, 0L),
(4, 0L),
(4, 0L),
(5, 1L),
(6, 6L),
(6, 6L),
(6, 6L),
(7, 8L)
)
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = StreamTableEnvironment.create(env)
StreamITCase.clear
env.setStateBackend(getStateBackend)
env.setParallelism(1)
val stream = env.fromCollection(data)
val table = stream.toTable(tEnv, 'pk, 'value)
val resultTable = table
.groupBy('pk)
.select('pk as 'pk, 'value.sum as 'sum)
.groupBy('sum)
.select('sum, 'pk.count as 'count)
val results = resultTable.toRetractStream[Row]
results.addSink(new StreamITCase.RetractMessagesSink)
env.execute()
val expected = Seq(
"+1,1", "+2,1", "+3,1", "-3,1", "+6,1", "-1,1", "+1,2", "-1,2", "+1,3", "-6,1", "+6,2",
"-6,2", "+6,1", "+12,1", "-12,1", "+18,1", "+8,1")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
// correlate should handle retraction messages correctly
@Test
def testCorrelate(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = StreamTableEnvironment.create(env)
StreamITCase.clear
env.setStateBackend(getStateBackend)
val func0 = new TableFunc0
val stream = env.fromCollection(data)
val table = stream.toTable(tEnv, 'word, 'num)
val resultTable = table
.groupBy('word)
.select('word as 'word, 'num.sum as 'cnt)
.leftOuterJoinLateral(func0('word))
.groupBy('cnt)
.select('cnt, 'word.count as 'frequency)
val results = resultTable.toRetractStream[Row]
results.addSink(new StreamITCase.RetractingSink)
env.execute()
val expected = Seq("1,2", "2,1", "6,1")
assertEquals(expected.sorted, StreamITCase.retractedResults.sorted)
}
}
| fhueske/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/RetractionITCase.scala | Scala | apache-2.0 | 5,886 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.catalyst.analysis.{EmptyFunctionRegistry, FakeV2SessionCatalog, UnresolvedAttribute}
import org.apache.spark.sql.catalyst.catalog.{InMemoryCatalog, SessionCatalog}
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.errors.TreeNodeException
import org.apache.spark.sql.catalyst.expressions.{Alias, Literal, NamedExpression}
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, LocalRelation, LogicalPlan, OneRowRelation, Project}
import org.apache.spark.sql.catalyst.rules._
import org.apache.spark.sql.connector.catalog.CatalogManager
import org.apache.spark.sql.internal.SQLConf
class OptimizerStructuralIntegrityCheckerSuite extends PlanTest {
object OptimizeRuleBreakSI extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
case Project(projectList, child) =>
val newAttr = UnresolvedAttribute("unresolvedAttr")
Project(projectList ++ Seq(newAttr), child)
case agg @ Aggregate(Nil, aggregateExpressions, child) =>
// Project cannot host AggregateExpression
Project(aggregateExpressions, child)
}
}
object Optimize extends Optimizer(
new CatalogManager(
new SQLConf(),
FakeV2SessionCatalog,
new SessionCatalog(new InMemoryCatalog, EmptyFunctionRegistry, new SQLConf()))) {
val newBatch = Batch("OptimizeRuleBreakSI", Once, OptimizeRuleBreakSI)
override def defaultBatches: Seq[Batch] = Seq(newBatch) ++ super.defaultBatches
}
test("check for invalid plan after execution of rule - unresolved attribute") {
val analyzed = Project(Alias(Literal(10), "attr")() :: Nil, OneRowRelation()).analyze
assert(analyzed.resolved)
val message = intercept[TreeNodeException[LogicalPlan]] {
Optimize.execute(analyzed)
}.getMessage
val ruleName = OptimizeRuleBreakSI.ruleName
assert(message.contains(s"After applying rule $ruleName in batch OptimizeRuleBreakSI"))
assert(message.contains("the structural integrity of the plan is broken"))
}
test("check for invalid plan after execution of rule - special expression in wrong operator") {
val analyzed =
Aggregate(Nil, Seq[NamedExpression](max('id) as 'm),
LocalRelation('id.long)).analyze
assert(analyzed.resolved)
// Should fail verification with the OptimizeRuleBreakSI rule
val message = intercept[TreeNodeException[LogicalPlan]] {
Optimize.execute(analyzed)
}.getMessage
val ruleName = OptimizeRuleBreakSI.ruleName
assert(message.contains(s"After applying rule $ruleName in batch OptimizeRuleBreakSI"))
assert(message.contains("the structural integrity of the plan is broken"))
// Should not fail verification with the regular optimizer
SimpleTestOptimizer.execute(analyzed)
}
test("check for invalid plan before execution of any rule") {
val analyzed =
Aggregate(Nil, Seq[NamedExpression](max('id) as 'm),
LocalRelation('id.long)).analyze
val invalidPlan = OptimizeRuleBreakSI.apply(analyzed)
// Should fail verification right at the beginning
val message = intercept[TreeNodeException[LogicalPlan]] {
Optimize.execute(invalidPlan)
}.getMessage
assert(message.contains("The structural integrity of the input plan is broken"))
}
}
| goldmedal/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerStructuralIntegrityCheckerSuite.scala | Scala | apache-2.0 | 4,286 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.index.legacy
import java.nio.charset.StandardCharsets
import java.util.{Locale, Collection => JCollection}
import com.google.common.primitives.{Bytes, Shorts, UnsignedBytes}
import com.typesafe.scalalogging.LazyLogging
import org.calrissian.mango.types.{LexiTypeEncoders, TypeRegistry}
import org.geotools.data.DataUtilities
import org.geotools.factory.Hints
import org.geotools.feature.simple.SimpleFeatureTypeBuilder
import org.geotools.util.Converters
import org.locationtech.geomesa.filter._
import org.locationtech.geomesa.index.api.{FilterStrategy, GeoMesaFeatureIndex, QueryPlan, WrappedFeature}
import org.locationtech.geomesa.index.conf.TableSplitter
import org.locationtech.geomesa.index.conf.splitter.DefaultSplitter
import org.locationtech.geomesa.index.geotools.GeoMesaDataStore
import org.locationtech.geomesa.index.index.IndexKeySpace.BoundedByteRange
import org.locationtech.geomesa.index.index.attribute.AttributeIndex
import org.locationtech.geomesa.index.index.attribute.AttributeIndex.AttributeRowDecoder
import org.locationtech.geomesa.index.index.z2.{XZ2IndexKeySpace, Z2IndexKeySpace}
import org.locationtech.geomesa.index.index.z3.{XZ3IndexKeySpace, Z3IndexKeySpace}
import org.locationtech.geomesa.index.index.{IndexAdapter, IndexKeySpace}
import org.locationtech.geomesa.index.strategies.AttributeFilterStrategy
import org.locationtech.geomesa.index.utils.{Explainer, SplitArrays}
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors.RichAttributeDescriptor
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.index.ByteArrays
import org.opengis.feature.`type`.AttributeDescriptor
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter._
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.util.Try
/**
* Attribute index with secondary z-curve indexing. Z-indexing is based on the sft and will be
* one of Z3, XZ3, Z2, XZ2. Shards come after the attribute number, instead of before it.
*/
trait AttributeShardedIndex[DS <: GeoMesaDataStore[DS, F, W], F <: WrappedFeature, W, R, C]
extends GeoMesaFeatureIndex[DS, F, W] with IndexAdapter[DS, F, W, R, C] with AttributeFilterStrategy[DS, F, W]
with AttributeRowDecoder with LazyLogging {
import AttributeShardedIndex._
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
override val name: String = AttributeIndex.Name
override def supports(sft: SimpleFeatureType): Boolean =
sft.getAttributeDescriptors.exists(_.isIndexed)
override def writer(sft: SimpleFeatureType, ds: DS): (F) => Seq[W] = {
val getRows = getRowKeys(sft, lenient = false)
(wf) => getRows(wf).map { case (_, r) => createInsert(r, wf) }
}
override def remover(sft: SimpleFeatureType, ds: DS): (F) => Seq[W] = {
val getRows = getRowKeys(sft, lenient = true)
(wf) => getRows(wf).map { case (_, r) => createDelete(r, wf) }
}
override def getIdFromRow(sft: SimpleFeatureType): (Array[Byte], Int, Int, SimpleFeature) => String = {
val idFromBytes = GeoMesaFeatureIndex.idFromBytes(sft)
// drop the encoded value and the date field (12 bytes) if it's present - the rest of the row is the ID
val shard = getShards(sft).head.length
// exclude feature byte and 2 index bytes and shard bytes
val from = if (sft.isTableSharing) { 3 + shard } else { 2 + shard }
val secondary = getSecondaryIndexKeyLength(sft)
(row, offset, length, feature) => {
val start = row.indexOf(NullByte, from + offset) + secondary + 1
idFromBytes(row, start, length + offset - start, feature)
}
}
override def decodeRowValue(sft: SimpleFeatureType, index: Int): (Array[Byte], Int, Int) => Try[AnyRef] = {
val shard = getShards(sft).head.length
// exclude feature byte and 2 index bytes and shard bytes
val from = if (sft.isTableSharing) { 3 + shard } else { 2 + shard }
val descriptor = sft.getDescriptor(index)
val decode: (String) => AnyRef = if (descriptor.isList) {
// get the alias from the type of values in the collection
val alias = descriptor.getListType().getSimpleName.toLowerCase(Locale.US)
// Note that for collection types, only a single entry of the collection will be decoded - this is
// because the collection entries have been broken up into multiple rows
(encoded) => Seq(typeRegistry.decode(alias, encoded)).asJava
} else {
val alias = descriptor.getType.getBinding.getSimpleName.toLowerCase(Locale.US)
typeRegistry.decode(alias, _)
}
(row, offset, length) => Try {
val valueStart = offset + from // start of the encoded value
val end = offset + length // end of the row, max search space
var valueEnd = valueStart // end of the encoded value
while (valueEnd < end && row(valueEnd) != NullByte) { // null byte indicates end of value
valueEnd += 1
}
decode(new String(row, valueStart, valueEnd - valueStart, StandardCharsets.UTF_8))
}
}
override def getSplits(sft: SimpleFeatureType): Seq[Array[Byte]] = {
def nonEmpty(bytes: Seq[Array[Byte]]): Seq[Array[Byte]] = if (bytes.nonEmpty) { bytes } else { Seq(Array.empty) }
val sharing = sft.getTableSharingBytes
val indices = SimpleFeatureTypes.getSecondaryIndexedAttributes(sft).map(d => sft.indexOf(d.getLocalName))
val shards = nonEmpty(getShards(sft))
val splitter = sft.getTableSplitter.getOrElse(classOf[DefaultSplitter]).newInstance().asInstanceOf[TableSplitter]
val result = indices.flatMap { indexOf =>
val singleAttributeType = {
val builder = new SimpleFeatureTypeBuilder()
builder.setName(sft.getName)
builder.add(sft.getDescriptor(indexOf))
builder.buildFeatureType()
}
val bytes = indexToBytes(indexOf)
val splits = nonEmpty(splitter.getSplits(singleAttributeType, name, sft.getTableSplitterOptions))
for (shard <- shards; split <- splits) yield {
Bytes.concat(sharing, bytes, shard, split)
}
}
// if not sharing, or the first feature in the table, drop the first split, which will otherwise be empty
if (sharing.isEmpty || sharing.head == 0.toByte) {
result.drop(1)
} else {
result
}
}
override def getQueryPlan(sft: SimpleFeatureType,
ds: DS,
filter: FilterStrategy[DS, F, W],
hints: Hints,
explain: Explainer): QueryPlan[DS, F, W] = {
import org.locationtech.geomesa.utils.conversions.ScalaImplicits.RichTraversableOnce
val primary = filter.primary.getOrElse {
throw new IllegalStateException("Attribute index does not support Filter.INCLUDE")
}
// pull out any dates from the filter to help narrow down the attribute ranges
val secondaryRanges = filter.secondary.map(getSecondaryIndexRanges(sft, _, explain)).getOrElse(Seq.empty)
// TODO GEOMESA-1336 fix exclusive AND handling for list types
val attribute = {
val names = DataUtilities.attributeNames(primary)
require(names.length == 1, s"Couldn't extract single attribute name from filter '${filterToString(primary)}'")
names(0)
}
val i = sft.indexOf(attribute)
require(i != -1, s"Attribute '$attribute' from filter '${filterToString(primary)}' does not exist in '$sft'")
val binding = {
val descriptor = sft.getDescriptor(i)
if (descriptor.isList) { descriptor.getListType() } else { descriptor.getType.getBinding }
}
require(classOf[Comparable[_]].isAssignableFrom(binding), s"Attribute '$attribute' is not comparable")
val shards = getShards(sft)
val fb = FilterHelper.extractAttributeBounds(primary, attribute, binding)
if (fb.isEmpty) {
// we have an attribute, but weren't able to extract any bounds... scan all values and apply the filter
logger.warn(s"Unable to extract any attribute bounds from: ${filterToString(primary)}")
val starts = lowerBounds(sft, i, shards)
val ends = upperBounds(sft, i, shards)
val ranges = shards.indices.map(i => createRange(starts(i), ends(i)))
scanPlan(sft, ds, filter, scanConfig(sft, ds, filter, ranges, filter.filter, hints))
} else {
val ordering = Ordering.comparatorToOrdering(UnsignedBytes.lexicographicalComparator)
lazy val lowerSecondary = secondaryRanges.map(_._1).minOption(ordering).getOrElse(Array.empty)
lazy val upperSecondary = secondaryRanges.map(_._2).maxOption(ordering).getOrElse(Array.empty)
val ranges = fb.values.flatMap { bounds =>
bounds.bounds match {
case (None, None) => // not null
val starts = lowerBounds(sft, i, shards)
val ends = upperBounds(sft, i, shards)
shards.indices.map(i => createRange(starts(i), ends(i)))
case (Some(lower), None) =>
val starts = startRows(sft, i, shards, lower, bounds.lower.inclusive, lowerSecondary)
val ends = upperBounds(sft, i, shards)
shards.indices.map(i => createRange(starts(i), ends(i)))
case (None, Some(upper)) =>
val starts = lowerBounds(sft, i, shards)
val ends = endRows(sft, i, shards, upper, bounds.upper.inclusive, upperSecondary)
shards.indices.map(i => createRange(starts(i), ends(i)))
case (Some(lower), Some(upper)) =>
if (lower == upper) {
equals(sft, i, shards, lower, secondaryRanges)
} else if (lower + WILDCARD_SUFFIX == upper) {
val prefix = rowPrefix(sft, i)
val value = encodeForQuery(lower, sft.getDescriptor(i))
shards.map { shard =>
val p = Bytes.concat(prefix, shard, value)
createRange(p, ByteArrays.rowFollowingPrefix(p))
}
} else {
val starts = startRows(sft, i, shards, lower, bounds.lower.inclusive, lowerSecondary)
val ends = endRows(sft, i, shards, upper, bounds.upper.inclusive, upperSecondary)
shards.indices.map(i => createRange(starts(i), ends(i)))
}
}
}
val ecql = if (fb.precise) { filter.secondary } else { filter.filter }
scanPlan(sft, ds, filter, scanConfig(sft, ds, filter, ranges, ecql, hints))
}
}
/**
* Shards to use for attribute indices. Subclasses can override to disable shards by returning
* `IndexedSeq(Array.empty[Byte])`
*
* @param sft simple feature type
* @return number of shards
*/
protected def getShards(sft: SimpleFeatureType): IndexedSeq[Array[Byte]] =
Option(sft.getAttributeShards).filter(_ > 1).map(SplitArrays.apply).getOrElse(SplitArrays.EmptySplits)
/**
* Rows in the attribute table have the following layout:
*
* - 1 byte identifying the sft (OPTIONAL - only if table is shared)
* - 2 bytes storing the index of the attribute in the sft
* - 1 byte shard (OPTIONAL)
* - n bytes storing the lexicoded attribute value
* - NULLBYTE as a separator
* - n bytes storing the secondary z-index of the feature - identified by getSecondaryIndexKeyLength
* - n bytes storing the feature ID
*/
protected def getRowKeys(sft: SimpleFeatureType, lenient: Boolean): (F) => Seq[(Int, Array[Byte])] = {
val prefix = sft.getTableSharingBytes
val getSecondaryKey = getSecondaryIndexKey(sft, lenient)
val getShard: (F) => Array[Byte] = {
val shards = getShards(sft)
if (shards.length == 1) {
val shard = shards.head
(_) => shard
} else {
(wf: F) => shards(wf.idHash % shards.length)
}
}
val indexedAttributes = SimpleFeatureTypes.getSecondaryIndexedAttributes(sft).map { d =>
val i = sft.indexOf(d.getName)
(d, i, indexToBytes(i))
}
(wf) => {
val secondary = getSecondaryKey(wf)
val shard = getShard(wf)
indexedAttributes.flatMap { case (descriptor, idx, idxBytes) =>
val attributes = encodeForIndex(wf.feature.getAttribute(idx), descriptor)
attributes.map(a => (idx, Bytes.concat(prefix, idxBytes, shard, a, NullByteArray, secondary, wf.idBytes)))
}
}
}
protected def secondaryIndex(sft: SimpleFeatureType): Option[IndexKeySpace[_, _]] =
Seq(Z3IndexKeySpace, XZ3IndexKeySpace, Z2IndexKeySpace, XZ2IndexKeySpace).find(_.supports(sft))
protected def getSecondaryIndexKeyLength(sft: SimpleFeatureType): Int =
secondaryIndex(sft).map(_.indexKeyByteLength).getOrElse(0)
// ranges for querying - equals
private def equals(sft: SimpleFeatureType,
i: Int,
shards: Seq[Array[Byte]],
value: Any,
secondary: Seq[(Array[Byte], Array[Byte])]): Seq[R] = {
val prefixes = {
val sharing = sft.getTableSharingBytes
val index = indexToBytes(i)
shards.map(shard => Bytes.concat(sharing, index, shard))
}
val encoded = encodeForQuery(value, sft.getDescriptor(i))
if (secondary.isEmpty) {
// if no secondary ranges, use a prefix range terminated with a null byte to match all secondary values
prefixes.map { prefix =>
val p = Bytes.concat(prefix, encoded, NullByteArray)
createRange(p, ByteArrays.rowFollowingPrefix(p))
}
} else {
prefixes.flatMap { prefix =>
secondary.map { case (lo, hi) =>
val start = Bytes.concat(prefix, encoded, NullByteArray, lo)
val end = Bytes.concat(prefix, encoded, NullByteArray, hi)
createRange(start, end)
}
}
}
}
private def getSecondaryIndexKey(sft: SimpleFeatureType, lenient: Boolean): (F) => Array[Byte] = {
secondaryIndex(sft).map(_.toIndexKeyBytes(sft, lenient)) match {
case None => (_) => Array.empty
case Some(toKey) => (f) => toKey(Seq.empty, f.feature, Array.empty).head
}
}
// TODO verify this
private def getSecondaryIndexRanges(sft: SimpleFeatureType,
filter: Filter,
explain: Explainer): Seq[(Array[Byte], Array[Byte])] = {
secondaryIndex(sft).map { case secondary: IndexKeySpace[Any, Any] =>
val values = secondary.getIndexValues(sft, filter, explain)
secondary.getRangeBytes(secondary.getRanges(values), Seq.empty).map {
case BoundedByteRange(lo, hi) => (lo, hi)
}.toSeq
}.getOrElse(Seq.empty)
}
}
object AttributeShardedIndex {
val NullByte: Byte = 0
val NullByteArray = Array(NullByte)
val typeRegistry: TypeRegistry[String] = LexiTypeEncoders.LEXI_TYPES
// store 2 bytes for the index of the attribute in the sft - this allows up to 32k attributes in the sft.
def indexToBytes(i: Int): Array[Byte] = Shorts.toByteArray(i.toShort)
// convert back from bytes to the index of the attribute
def bytesToIndex(b0: Byte, b1: Byte): Short = Shorts.fromBytes(b0, b1)
/**
* Gets the row prefix for a given attribute
*
* @param sft simple feature type
* @param i index of the attribute
* @return
*/
private def rowPrefix(sft: SimpleFeatureType, i: Int): Array[Byte] =
Bytes.concat(sft.getTableSharingBytes, indexToBytes(i))
/**
* Lexicographically encode the value. Collections will return multiple rows, one for each entry.
*/
def encodeForIndex(value: Any, descriptor: AttributeDescriptor): Seq[Array[Byte]] = {
val strings = if (value == null) {
Seq.empty
} else if (descriptor.isList) {
// encode each value into a separate row
value.asInstanceOf[JCollection[_]].toSeq.filter(_ != null).map(typeEncode).filter(_ != null)
} else {
Seq(typeEncode(value)).filter(_ != null)
}
strings.map(_.getBytes(StandardCharsets.UTF_8))
}
/**
* Lexicographically encode the value. Will convert types appropriately.
*/
def encodeForQuery(value: Any, descriptor: AttributeDescriptor): Array[Byte] =
encodeForQuery(value, if (descriptor.isList) { descriptor.getListType() } else { descriptor.getType.getBinding })
/**
* Lexicographically encode the value. Will convert types appropriately.
*/
def encodeForQuery(value: Any, binding: Class[_]): Array[Byte] = {
if (value == null) { Array.empty } else {
val converted = Option(Converters.convert(value, binding)).getOrElse(value)
val encoded = typeEncode(converted)
if (encoded == null || encoded.isEmpty) {
Array.empty
} else {
encoded.getBytes(StandardCharsets.UTF_8)
}
}
}
// Lexicographically encode a value using it's runtime class
private def typeEncode(value: Any): String = Try(typeRegistry.encode(value)).getOrElse(value.toString)
// gets a lower bound for a range
private def startRows(sft: SimpleFeatureType,
attributeIndex: Int,
shards: Seq[Array[Byte]],
value: Any,
inclusive: Boolean,
secondary: Array[Byte]): Seq[Array[Byte]] = {
val prefixes = {
val prefix = rowPrefix(sft, attributeIndex)
shards.map(shard => Bytes.concat(prefix, shard))
}
val encoded = encodeForQuery(value, sft.getDescriptor(attributeIndex))
if (inclusive) {
prefixes.map(prefix => Bytes.concat(prefix, encoded, NullByteArray, secondary))
} else {
// get the next row, then append the secondary range
prefixes.map { prefix =>
val following = ByteArrays.rowFollowingPrefix(Bytes.concat(prefix, encoded))
Bytes.concat(following, NullByteArray, secondary)
}
}
}
// gets an upper bound for a range
private def endRows(sft: SimpleFeatureType,
attributeIndex: Int,
shards: Seq[Array[Byte]],
value: Any,
inclusive: Boolean,
secondary: Array[Byte]): Seq[Array[Byte]] = {
val prefixes = {
val prefix = rowPrefix(sft, attributeIndex)
shards.map(shard => Bytes.concat(prefix, shard))
}
val encoded = encodeForQuery(value, sft.getDescriptor(attributeIndex))
if (inclusive) {
if (secondary.length == 0) {
// use a rowFollowingPrefix to match any secondary values
prefixes.map(prefix => ByteArrays.rowFollowingPrefix(Bytes.concat(prefix, encoded, NullByteArray)))
} else {
// matches anything with the same value, up to the secondary
prefixes.map(prefix => Bytes.concat(prefix, encoded, NullByteArray, secondary))
}
} else {
// can't use secondary range on an exclusive upper, as there aren't any methods to calculate previous rows
prefixes.map(prefix => Bytes.concat(prefix, encoded, NullByteArray))
}
}
// lower bound for all values of the attribute, inclusive
private def lowerBounds(sft: SimpleFeatureType, i: Int, shards: Seq[Array[Byte]]): Seq[Array[Byte]] = {
val prefix = rowPrefix(sft, i)
shards.map(shard => Bytes.concat(prefix, shard))
}
// upper bound for all values of the attribute, exclusive
private def upperBounds(sft: SimpleFeatureType, i: Int, shards: Seq[Array[Byte]]): Seq[Array[Byte]] = {
val prefix = rowPrefix(sft, i)
shards.map(shard => ByteArrays.rowFollowingPrefix(Bytes.concat(prefix, shard)))
}
}
| ddseapy/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/index/legacy/AttributeShardedIndex.scala | Scala | apache-2.0 | 20,042 |
//// See the LICENCE.txt file distributed with this work for additional
//// information regarding copyright ownership.
////
//// Licensed under the Apache License, Version 2.0 (the "License");
//// you may not use this file except in compliance with the License.
//// You may obtain a copy of the License at
////
//// http://www.apache.org/licenses/LICENSE-2.0
////
//// Unless required by applicable law or agreed to in writing, software
//// distributed under the License is distributed on an "AS IS" BASIS,
//// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//// See the License for the specific language governing permissions and
//// limitations under the License.
//package scray.querying.source.indexing
//
//import com.twitter.util.Time
//
//import java.util.{Calendar, GregorianCalendar, TimeZone}
//
//import scala.collection.mutable.Buffer
//import scala.reflect.ClassTag
//import scala.reflect.runtime.universe._
//
//import scray.querying.description.{Column, ColumnGrouping, ColumnOrdering, QueryRange, Row, TableIdentifier}
//import scray.querying.description.internal.{Bound, Domain, QueryDomainRangeException, RangeValueDomain, SingleValueDomain}
//import scray.querying.description.internal.CombinedIndexColumnMissingException
//import scray.querying.description.internal.ComposedMultivalueDomain
//import scray.querying.queries.DomainQuery
//import scray.querying.source.{AbstractHashJoinSource, KeyValueSource, LazySource}
//import com.typesafe.scalalogging.slf4j.LazyLogging
//
///**
// * creates an indexed-source with a hashed-join reference on a time column.
// *
// * Format of this hand-made index will be:
// * TIME[a], TIME[ms], Set[ref]
// */
//class TimeIndexSource[Q <: DomainQuery, M, R, V](
// timeIndexConfig: TimeIndexConfig,
// indexsource: LazySource[Q],
// lookupSource: KeyValueSource[R, V],
// lookupSourceTable: TableIdentifier,
// lookupkeymapper: Option[M => R] = None,
// sequencedmapper: Option[Int] = None,
// combinedIndexColumns: Set[Column] = Set(),
// useranges: Boolean = false)(implicit tag: ClassTag[M])
// extends AbstractHashJoinSource[Q, M, R, V](indexsource, lookupSource, lookupSourceTable, lookupkeymapper, combinedIndexColumns, sequencedmapper)
// with LazyLogging {
//
// /**
// * return the year from a time using GregorianCalendar and the given TimeZone
// */
// @inline private def getYearFromTime(time: Time): Int = {
// val greg = new GregorianCalendar
// greg.setTimeZone(timeIndexConfig.timeZone)
// greg.setTime(time.toDate)
// greg.get(Calendar.YEAR)
// }
//
// /**
// * creates a domain query that matches the provided domains and trys to reflect
// * the original query options
// */
// @inline private def createDomainQuery(query: Q, domains: List[Domain[_]]): Q = {
// val resultColumns = Set(timeIndexConfig.indexRowColumnYear,
// timeIndexConfig.indexColumnMs, timeIndexConfig.indexReferencesColumn)
// val range = if(useranges) {
// query.getQueryRange.map { qrange =>
// val skipLines = qrange.skip.getOrElse(0L)
// QueryRange(None, qrange.limit.map(_ + skipLines).orElse(timeIndexConfig.maxLimit.map(_ + skipLines)), None)
// }
// } else {
// None
// }
// DomainQuery(query.getQueryID, query.getQueryspace, query.querySpaceVersion, resultColumns, timeIndexConfig.indexRowColumnYear.table,
// domains, Some(ColumnGrouping(timeIndexConfig.indexColumnMs)),
// Some(ColumnOrdering[Long](timeIndexConfig.indexColumnMs,
// query.getOrdering.filter(_.descending).isDefined)), range).asInstanceOf[Q]
// }
//
// override protected def transformIndexQuery(query: Q): Set[Q] = {
// val combinedIndexDomains = getCombinedIndexColumns(query, timeIndexConfig.indexRowColumnYear.table)
// val optDomain = query.getWhereAST.find(domain => domain.column == timeIndexConfig.timeReferenceCol)
// optDomain.map(_ match {
// case cmd: ComposedMultivalueDomain[_] => ??? // currently unused domain type
// case svd: SingleValueDomain[_] => {
// val time = Time.fromMilliseconds(svd.value.asInstanceOf[Long])
// val yearDomain = new SingleValueDomain(timeIndexConfig.indexRowColumnYear, getYearFromTime(time))
// val timeDomain = new SingleValueDomain(timeIndexConfig.indexColumnMs, time.inMilliseconds)
// Set(createDomainQuery(query, List(yearDomain, timeDomain) ++ combinedIndexDomains))
// }
// case rvd: RangeValueDomain[_] => {
// val years = Buffer[Int]()
// val transformedRangeValueDomain = rvd.lowerBound match {
// case Some(start) => {
// val startValue = start.value.asInstanceOf[Long]
// val startYear = getYearFromTime(Time.fromMilliseconds(startValue))
// val realStartYear = if(startYear < timeIndexConfig.minimumYear) {
// timeIndexConfig.minimumYear
// } else {
// startYear
// }
// val startBound = Some(Bound[Long](start.inclusive, startValue))
// rvd.upperBound match {
// case Some(end) =>
// val endValue = end.value.asInstanceOf[Long]
// val endBound = Some(Bound[Long](end.inclusive, endValue))
// // add all years in between; if endValue < realStartYear => return nothing
// if(endValue < realStartYear) {
// years += 9999 // obviously queries will return no data for this
// } else {
// years ++= realStartYear.to(getYearFromTime(Time.fromMilliseconds(endValue)))
// }
// Some(new RangeValueDomain[Long](timeIndexConfig.indexColumnMs, startBound, endBound))
// case None => {
// // add years from back then up to now
// years ++= realStartYear.to(getYearFromTime(Time.now))
// Some(new RangeValueDomain[Long](timeIndexConfig.indexColumnMs, startBound, None))
// }
// }
// }
// case None => rvd.upperBound match {
// case Some(end) => {
// // warning: query years all up to end!
// val endValue = end.value.asInstanceOf[Long]
// val endBound = Some(Bound[Long](end.inclusive, endValue))
// years ++= timeIndexConfig.minimumYear.to(getYearFromTime(Time.now))
// Some(new RangeValueDomain[Long](timeIndexConfig.indexColumnMs, None, endBound))
// }
// case None => throw new QueryDomainRangeException(timeIndexConfig.timeReferenceCol, query)
// }
// }
// years.map { year =>
// val yearDomain = new SingleValueDomain(timeIndexConfig.indexRowColumnYear, year)
// createDomainQuery(query, transformedRangeValueDomain.map(List(_, yearDomain)).getOrElse(List(yearDomain) ++ combinedIndexDomains))
// }.toSet
// }
// }).getOrElse {
// // warning: query years all up to now!
// timeIndexConfig.minimumYear.to(getYearFromTime(Time.now)).map(year =>
// createDomainQuery(query, List(new SingleValueDomain(timeIndexConfig.indexRowColumnYear, year)) ++ combinedIndexDomains)).toSet
// }
// }
//
// override protected def getJoinablesFromIndexSource(index: Row): Array[M] = {
// index.getColumnValue[M](timeIndexConfig.indexReferencesColumn) match {
// case Some(refs) => refs match {
// case travs: TraversableOnce[M] => travs.asInstanceOf[TraversableOnce[M]].toArray
// case travs: M => Array[M](travs)
// }
// case None => Array[M]()
// }
// }
//
// override protected def isOrderedAccordingToOrignalOrdering(transformedQuery: Q, ordering: ColumnOrdering[_]): Boolean =
// ordering.column == timeIndexConfig.timeReferenceCol
//
// /**
// * since this is a true index only
// */
// override def getColumns: Set[Column] = lookupSource.getColumns
//}
| scray/scray | scray-querying/modules/scray-querying/src/main/scala/scray/querying/source/indexing/TimeIndexSource.scala | Scala | apache-2.0 | 7,976 |
package com.varunvats.practice.string
import com.varunvats.practice.sorting.UnitSpec
class PalindromeCheckSpec extends UnitSpec {
"A string" must {
"be a palindrome" when {
"empty" in {
PalindromeCheck("") shouldBe true
}
"it contains only one character" in {
PalindromeCheck("c") shouldBe true
}
"it contains only two characters, both of the same value" in {
PalindromeCheck("cc") shouldBe true
}
"it contains only three characters, all of the same value" in {
PalindromeCheck("ppp") shouldBe true
}
"it contains only three characters, and only the first and last are same" in {
PalindromeCheck("opo") shouldBe true
}
"it contains an even number of characters that form a palindrome" in {
PalindromeCheck("xaax") shouldBe true
}
"it contains an odd number of characters that form a palindrome" in {
PalindromeCheck("omooppqppoomo") shouldBe true
}
}
"not be a palindrome" when {
"it contains two characters of different values" in {
PalindromeCheck("pq") shouldBe false
}
"it contains three characters that don't form a palindrome" in {
PalindromeCheck("ppq") shouldBe false
}
"it contains an even number of characters that don't form a palindrome" in {
PalindromeCheck("papaapaq") shouldBe false
}
"it contains an odd number of characters that don't form a palindrome" in {
PalindromeCheck("nonoponom") shouldBe false
}
}
}
}
| varunvats/practice | jvm/src/test/scala/com/varunvats/practice/string/PalindromeCheckSpec.scala | Scala | mit | 1,583 |
package fi.onesto.sbt
import net.schmizz.sshj.SSHClient
import net.schmizz.sshj.common.IOUtils
import net.schmizz.sshj.connection.channel.direct.Session
package object mobilizer {
import util._
implicit final class SSHClientHelpers(val underlying: SSHClient) extends AnyVal {
def withSession[A](action: Session => A): A = {
val session = underlying.startSession()
try {
action(session)
} finally {
session.close()
}
}
def run(commandName: String, args: String*): Iterator[String] =
runWithOptionalInput(commandName, None, args: _*)
def runShAndDiscard(commandLine: String, requestPty: RequestPty = NoPty): Unit = {
withSession { session =>
if (requestPty == WithPty)
session.allocateDefaultPTY()
val command = session.exec(commandLine)
command.getOutputStream.close()
val errors = IOUtils.readFully(command.getErrorStream).toString
IOUtils.readFully(command.getInputStream)
command.getInputStream.close()
command.join()
val exitStatus = command.getExitStatus
if (exitStatus != 0)
throw new CommandException(commandLine, errors, exitStatus)
}
}
def runAndDiscard(commandName: String, args: String*): Unit = {
runWithOptionalInputAndDiscard(commandName, None, args: _*)
}
def runWithInput(commandName: String, input: String, args: String*): Iterator[String] =
runWithOptionalInput(commandName, Option(input), args: _*)
def runWithInputAndDiscard(commandName: String, input: String, args: String*): Unit = {
runWithOptionalInputAndDiscard(commandName, Option(input), args: _*)
}
def runWithOptionalInput(commandName: String, inputOption: Option[String], args: String*): Iterator[String] = {
withSession { session =>
val command = session.exec(shellQuote(commandName, args: _*))
inputOption foreach { input =>
command.getOutputStream.write(input.getBytes)
command.getOutputStream.flush()
}
command.getOutputStream.close()
val output = IOUtils.readFully(command.getInputStream).toString.lines
val errors = IOUtils.readFully(command.getErrorStream).toString
command.join()
val exitStatus = command.getExitStatus
if (exitStatus != 0)
throw new CommandException(commandName, errors, exitStatus)
output
}
}
def runWithOptionalInputAndDiscard(commandName: String, inputOption: Option[String], args: String*): Unit = {
withSession { session =>
val command = session.exec(shellQuote(commandName, args: _*))
inputOption foreach { input =>
command.getOutputStream.write(input.getBytes)
command.getOutputStream.flush()
}
command.getOutputStream.close()
val errors = IOUtils.readFully(command.getErrorStream).toString
IOUtils.readFully(command.getInputStream)
command.getInputStream.close()
command.join()
val exitStatus = command.getExitStatus
if (exitStatus != 0)
throw new CommandException(commandName, errors, exitStatus)
}
}
def symlink(source: String, destination: String): Unit = {
runAndDiscard("ln", "-nsf", source, destination)
}
def rmTree(path: String): Unit = {
runAndDiscard("rm", "-rf", path)
}
}
}
| onesto/sbt-mobilizer | src/main/scala/fi/onesto/sbt/mobilizer/package.scala | Scala | mit | 3,413 |
package byte_dce
import byteR._
import byteR.cfg._
import byte_walk._
import scala.collection.mutable.{HashMap,HashSet,Map}
/* This class uses the byte walk to build the follow set
* for each instruction. Each store is noted in a map.
* If a read is reached (on any branch) it is noted in the map
* so the store is not deleted.
*
*/
object DCEWalk {
/* Note that this function assumes that instructions form a BB. */
def applyBB(instructions: Array[JVMInstruction]) = {
// We have to walk the instructions backwards. A set of live
// ref and def set is returned for this BB. Variables can
// be in both the ref and def sets (if they are referenced
// before definition)
val residualRefs = new HashSet[JVMVariable]()
val defs = new HashSet[JVMVariable]()
instructions.reverse.foreach {
case load: JVMLoadInstruction => {
residualRefs += load.getVariable
}
case store: JVMStoreInstruction => {
val variable = store.getVariable
if (residualRefs.contains(variable)) {
residualRefs -= variable
}
defs += variable
}
case _ =>
}
(defs, residualRefs)
}
def apply(cfg: JVMCFG) = {
// Store the variables that are live into a BB.
val liveIn = new HashMap[BBStart, HashSet[JVMVariable]]()
// Go through the BBs and calculate the variables that are live in
// to each BB. Calculate the variables that are stored to in each BB.
cfg.walkWhileChanges({
case (from, to, predBBs, nextBBs) => {
// Now, go through the instructions backwards. There are a few
// cases: (1) STORE -> STORE (First store was dead, second store
// is dependent on the live out information). Kill set is set
// for the variable.
//
// (2) STORE -> LOAD (Store is live. Variable is not live in,
// killSet of variable is set)
//
// (3) LOAD -> STORE (Variable is live in, store is live dependent
// on information going out).
val (defs, refs) = applyBB(cfg.getBB(from))
val followLiveSets = nextBBs.succs map {
case bbNum =>
if (liveIn.contains(bbNum)) {
liveIn(bbNum)
} else {
val set = new HashSet[JVMVariable]()
liveIn(bbNum) = set
set
}
}
var newSetCreated = false
val newSet =
(bigUnion(followLiveSets) -- defs).union(refs)
val oldSet = if (liveIn.contains(from)) {
liveIn(from)
} else {
val set = new HashSet[JVMVariable]()
liveIn(from) = set
newSetCreated = true
set
}
// We return true if there were changes and the walk needs to continue.
val updated = if (newSet == oldSet) {
false
} else {
liveIn(from) = newSet
true
}
updated || newSetCreated
}
})
// Label particular stores with some status. This can be acted upon by a
// later walk capable of deciding whether variables are resident.
val storeStatus = new HashMap[Int, StoreStatus]
// Then, within each BB, go through and look at each store to see if it
// is a store to a dead variable. If so, delete it.
cfg.walk({
case (from, to, preds, nextBBs) => {
val nextBBsLiveIn = nextBBs.succs.map(liveIn(_))
val liveOut = bigUnion(nextBBsLiveIn)
// Get the instructions:
val bbInstructions = cfg.getBB(from, to)
// Create a set of variables referenced within this BB
// since the last def of that variable.
val localRefs = new HashSet[JVMVariable]()
// Create a set of variables defined within this BB
val localDefs = new HashSet[JVMVariable]()
// Now walk the instructions (backwards) to see if stores are live
// or dead.
((from.index to to.index) zip bbInstructions).reverse.foreach {
case (index, refIns: JVMLoadInstruction) =>
localRefs += refIns.getVariable
case (index, defIns: JVMStoreInstruction) => {
val defVar = defIns.getVariable
// Check if this has been referenced since the last def:
if (localRefs.contains(defVar)) {
// This variable is no longer referenced before it is
// defined.
localRefs -= defVar
// In this case, the store is live.
storeStatus(index) = LiveStore
} else {
// This variable has not been referenced.
// In the case that it has already been def'ed in this
// BB, it should be deleted.
if (localDefs.contains(defVar)) {
storeStatus(index) = DeadStore
} else if (!liveOut.contains(defVar)) {
// If this varaible is not live out, we can delete the store.
storeStatus(index) = DeadStore
} else {
// This store must not already have been def'd and it must
// be live out. Therefore, it is live.
storeStatus(index) = LiveStore
}
}
localDefs += defIns.getVariable
}
case _ =>
}
}
})
storeStatus
}
private def bigUnion(sets: List[HashSet[JVMVariable]]):
HashSet[JVMVariable] = {
sets.foldLeft(new HashSet[JVMVariable]) {
case (builder, newSet) => builder.union(newSet)
}
}
}
| j-c-w/mlc | src/main/scala/byte_dce/DCEWalk.scala | Scala | gpl-3.0 | 5,576 |
package lostvaults.server
/**
* Item.scala
* @Author Felix Färsjö, Jimmy Holm, Fredrik Larsson, Anna Nilsson, Philip Åkerfeldt
* @Version 1.0
*/
import scala.io.Source
/**
* The class item is a constructor for items in the game. An item is
* composed of the name of the item, and the value of it.
*/
class Item(ID: Int, Name: String, Attack: Int, Defense: Int, Speed: Int, ItemType: String) {
val id = ID
val name = Name
val attack = Attack
val defense = Defense
val speed = Speed
val itemType = ItemType
def compareTo(_Item: Item): String = {
compareTo(_Item.id)
}
def compareTo(ID: Int): String = {
val item = ItemRepo.getById(ID)
var ret = "The " + name + " is "
if(item.itemType.compareToIgnoreCase(itemType) != 0) {
ret + "not comparable with the " + item.name + "."
} else {
if(isWeapon()) {
if(attack == item.attack) {
ret + "no stronger " +
(if(speed == item.speed) {
"and no faster "
} else if(speed > item.speed) {
"and slower "
} else {
"but faster "
}) + "than the " + item.name + "."
} else if (attack < item.attack) {
ret + "weaker " +
(if(speed == item.speed) {
"and no faster "
} else if(speed > item.speed) {
"and slower "
} else {
"but faster "
}) + "than the " + item.name + "."
} else {
ret + "stronger " +
(if(speed == item.speed) {
"but no faster "
}else if(speed > item.speed){
"but slower "
}else {
"and faster "
}) + "than the " + item.name + "."
}
} else if(isArmor){
if(defense == item.defense) {
ret + "no stronger " +
(if(speed == item.speed) {
"and offers no better movement "
} else if(speed > item.speed) {
"and offer less movement"
} else {
"but offers better movement"
}) + "than the " + item.name + "."
} else if (defense < item.defense) {
ret + "weaker " +
(if(speed == item.speed) {
"and offers no better movement "
} else if(speed > item.speed) {
"and offers less movement "
} else {
"but offers better movement "
}) + "than the " + item.name + "."
} else {
ret + "stronger " +
(if(speed == item.speed) {
"but offers no better movement "
}else if(speed > item.speed){
"but offers less movement "
}else {
"and offers better movement "
}) + "than the " + item.name + "."
}
} else {
ret + "not comparable to the " + item.name
}
}
}
def isWeapon(): Boolean = {
itemType.compareToIgnoreCase("weapon") == 0
}
def isArmor(): Boolean = {
itemType.compareToIgnoreCase("armor") == 0
}
def isTreasure(): Boolean = {
itemType.compareToIgnoreCase("treasure") == 0
}
def isFood(): Boolean = {
itemType.compareToIgnoreCase("food") == 0
}
def isPotion(): Boolean = {
itemType.compareToIgnoreCase("potion") == 0
}
override def toString = {
name
}
}
| senilica/LostVaults | src/lostvaults/server/Item.scala | Scala | mit | 3,333 |
package com.mentatlabs.nsa
package scalac
package options
/* -Ywarn-nullary-unit
* ===================
* 2.9.1 - 2.12.0: Warn when nullary methods return Unit.
*/
case object ScalacYWarnNullaryUnit
extends ScalacOptionBoolean("-Ywarn-nullary-unit", ScalacVersions.`2.9.1`)
| mentat-labs/sbt-nsa | nsa-core/src/main/scala/com/mentatlabs/nsa/scalac/options/private/ScalacYWarnNullaryUnit.scala | Scala | bsd-3-clause | 284 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.