code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
// scalac: -Xsource:2.13
//
abstract class Abstract {
type TypeMember
val member: TypeMember
}
object Abstract {
class Ops(m: Abstract#TypeMember) {
def answer = 42
}
implicit def member2AbstractOps(m: Abstract#TypeMember) = new Ops(m)
}
object ShouldThisCompile {
val concrete: Abstract = new Abstract {
type TypeMember = String
val member = "hello"
}
concrete.member.answer
}
|
scala/scala
|
test/files/pos/t5818.scala
|
Scala
|
apache-2.0
| 410
|
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Input, MustBeNoneOrZeroOrPositive}
case class AC405(value: Option[Int]) extends CtBoxIdentifier(name = "Current Other Income")
with CtOptionalInteger with MustBeNoneOrZeroOrPositive with Input
|
keithhall/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/AC405.scala
|
Scala
|
apache-2.0
| 931
|
package doodle
package syntax
object approximatelyEqual {
implicit class ApproximatelyEqualOps[A](a1: A) {
def ~=(a2: A)(implicit distance: Distance[A]): Boolean =
distance.distance(a1, a2) < 0.01
}
}
|
Angeldude/doodle
|
shared/src/test/scala/doodle/syntax/approximatelyEqual.scala
|
Scala
|
apache-2.0
| 216
|
package controllers
import org.vindinium.server._
import org.vindinium.server.system.Replay
import org.vindinium.server.user.{ User => U }
import akka.pattern.{ ask, pipe }
import play.api._
import play.api.data._
import play.api.data.Forms._
import play.api.libs.EventSource
import play.api.libs.iteratee._
import play.api.libs.json._
import play.api.mvc._
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{ Future, Await }
object User extends Controller {
private val form = Form(single(
"name" -> text
.verifying("Name is too short", _.size >= 3)
.verifying("Name is too long", _.size <= 20)
.verifying("Name already taken", name => Await.result(U freeName name, 1 second))
))
def registerForm = Action { req =>
Ok(views.html.user.register(form))
}
def register = Action.async { implicit req =>
form.bindFromRequest.fold(
err => Future successful BadRequest(views.html.user.register(err)),
name => U make name map { user =>
Ok(views.html.user.postRegister(user))
}
)
}
private implicit val timeout = akka.util.Timeout(1.second)
def nowPlaying(id: String) = Action.async {
system.NowPlaying.actor ? system.NowPlaying.GetEnumeratorFor(id) mapTo
manifest[Enumerator[List[String]]] map { enumerator =>
val toJsonArray = Enumeratee.map[List[String]] { ids =>
Json stringify JsArray(ids map JsString.apply)
}
Ok.chunked(enumerator &> toJsonArray &> EventSource()).as("text/event-stream")
}
}
def show(id: String) = Action.async { req =>
U find id flatMap {
case None => Future successful notFoundPage
case Some(user) => Replay.recentByUserName(user.name, 100) map { replays =>
Ok(views.html.user.show(user, replays, None))
}
}
}
def list = Action.async { req =>
val topNb = 100
U top topNb map { users =>
Ok(views.html.user.top(users, topNb))
}
}
}
|
concerned3rdparty/vindinium
|
app/controllers/User.scala
|
Scala
|
mit
| 2,012
|
/*
* Copyright 2012-2014 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.xfinity.sirius.api.impl
import java.io.File
import java.lang.management.ManagementFactory
import java.net.InetAddress
import java.util.{HashMap => JHashMap}
import com.comcast.xfinity.sirius.admin.ObjectNameHelper
import com.comcast.xfinity.sirius.api.RequestHandler
import com.comcast.xfinity.sirius.api.SiriusConfiguration
import com.comcast.xfinity.sirius.info.SiriusInfo
import com.comcast.xfinity.sirius.writeaheadlog.CachedSiriusLog
import com.comcast.xfinity.sirius.writeaheadlog.SiriusLog
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import akka.actor.ActorRef
import akka.actor.ActorSystem
import javax.management.ObjectName
import com.comcast.xfinity.sirius.uberstore.segmented.SegmentedUberStore
import com.comcast.xfinity.sirius.uberstore.UberStore
import com.comcast.xfinity.sirius.util.AkkaExternalAddressResolver
import scala.collection.JavaConverters._
import org.slf4j.LoggerFactory
/**
* Provides the factory for [[com.comcast.xfinity.sirius.api.impl.SiriusImpl]] instances
*/
object SiriusFactory {
val traceLog = LoggerFactory.getLogger("SiriusFactory")
/**
* SiriusImpl factory method, takes parameters to construct a SiriusImplementation and the dependent
* ActorSystem and return the created instance. Calling shutdown on the produced SiriusImpl will also
* shutdown the dependent ActorSystem.
*
* @param requestHandler the RequestHandler containing callbacks for manipulating the system's state
* @param siriusConfig a SiriusConfiguration containing configuration info needed for this node.
* @see SiriusConfiguration for info on needed config.
*
* @return A SiriusImpl constructed using the parameters
*/
def createInstance(requestHandler: RequestHandler, siriusConfig: SiriusConfiguration): SiriusImpl = {
val uberStoreDir = siriusConfig.getProp[String](SiriusConfiguration.LOG_LOCATION) match {
case Some(dir) => dir
case None =>
throw new IllegalArgumentException(SiriusConfiguration.LOG_LOCATION + " must be set on config")
}
val backendLog = {
siriusConfig.getProp(SiriusConfiguration.LOG_VERSION_ID, "") match {
case version if version == SegmentedUberStore.versionId => SegmentedUberStore(uberStoreDir, siriusConfig)
case _ => UberStore(uberStoreDir)
}
}
val log: SiriusLog = {
if (siriusConfig.getProp(SiriusConfiguration.LOG_USE_WRITE_CACHE, true)) {
val cacheSize = siriusConfig.getProp(SiriusConfiguration.LOG_WRITE_CACHE_SIZE, 10000)
CachedSiriusLog(backendLog, cacheSize)
} else {
backendLog
}
}
createInstance(requestHandler, siriusConfig, log)
}
/**
* USE ONLY FOR TESTING HOOK WHEN YOU NEED TO MOCK OUT A LOG.
* Real code should use the two argument factory method.
*
* @param requestHandler the RequestHandler containing callbacks for manipulating the system's state
* @param siriusConfig a SiriusConfiguration containing configuration info needed for this node.
* @see SiriusConfiguration for info on needed config.
* @param siriusLog the persistence layer to which events should be committed to and replayed from.
*
* @return A SiriusImpl constructed using the parameters
*/
private[sirius] def createInstance(requestHandler: RequestHandler, siriusConfig: SiriusConfiguration,
siriusLog: SiriusLog): SiriusImpl = {
val systemName = siriusConfig.getProp(SiriusConfiguration.AKKA_SYSTEM_NAME, "sirius-system")
implicit val actorSystem = ActorSystem(systemName, createActorSystemConfig(siriusConfig))
// inject an mbean server, without regard for the one that may have been there
val mbeanServer = ManagementFactory.getPlatformMBeanServer
siriusConfig.setProp(SiriusConfiguration.MBEAN_SERVER, mbeanServer)
// inject AkkaExternalAddressResolver
siriusConfig.setProp(SiriusConfiguration.AKKA_EXTERNAL_ADDRESS_RESOLVER, AkkaExternalAddressResolver(actorSystem) (siriusConfig))
// here it is! the real deal creation
val impl = SiriusImpl(requestHandler, siriusLog, siriusConfig)
// create a SiriusInfo MBean which will remain registered until we explicity shutdown sirius
val (siriusInfoObjectName, siriusInfo) = createSiriusInfoMBean(actorSystem, impl.supervisor)(siriusConfig)
mbeanServer.registerMBean(siriusInfo, siriusInfoObjectName)
// need to shut down the actor system and unregister the mbeans when sirius is done
impl.onShutdown({
actorSystem.shutdown()
actorSystem.awaitTermination()
mbeanServer.unregisterMBean(siriusInfoObjectName)
})
impl
}
private def createSiriusInfoMBean(actorSystem: ActorSystem, siriusSup: ActorRef)
(siriusConfig: SiriusConfiguration): (ObjectName, SiriusInfo) = {
val resolver = siriusConfig.getProp[AkkaExternalAddressResolver](SiriusConfiguration.AKKA_EXTERNAL_ADDRESS_RESOLVER).
getOrElse(throw new IllegalStateException("SiriusConfiguration.AKKA_EXTERNAL_ADDRESS_RESOLVER returned nothing"))
val siriusInfo = new SiriusInfo(actorSystem, siriusSup, resolver)
val objectNameHelper = new ObjectNameHelper
val siriusInfoObjectName = objectNameHelper.getObjectName(siriusInfo, siriusSup, actorSystem)(siriusConfig)
(siriusInfoObjectName, siriusInfo)
}
/**
* Creates configuration for the ActorSystem. The config precedence is as follows:
* 1) host/port config trump all
* 2) siriusConfig supplied external config next
* 3) sirius-akka-base.conf, packaged with sirius, loaded with ConfigFactory.load
*/
private def createActorSystemConfig(siriusConfig: SiriusConfiguration): Config = {
val hostPortConfig = createHostPortConfig(siriusConfig)
val externalConfig = createExternalConfig(siriusConfig)
val baseAkkaConfig = ConfigFactory.load("sirius-akka-base.conf")
hostPortConfig.withFallback(externalConfig).withFallback(baseAkkaConfig)
}
private def createHostPortConfig(siriusConfig: SiriusConfiguration): Config = {
val configMap = new JHashMap[String, Any]()
val sslEnabled = siriusConfig.getProp(SiriusConfiguration.ENABLE_SSL,false)
if (sslEnabled) {
traceLog.info("AKKA using SSL transports akka.remote.netty.ssl. ")
configMap.put("akka.remote.netty.ssl.hostname",
siriusConfig.getProp(SiriusConfiguration.HOST, InetAddress.getLocalHost.getHostName))
configMap.put("akka.remote.netty.ssl.security.random-number-generator",
siriusConfig.getProp(SiriusConfiguration.SSL_RANDOM_NUMBER_GENERATOR).getOrElse(""))
configMap.put("akka.remote.netty.ssl.port", siriusConfig.getProp(SiriusConfiguration.PORT, 2552))
configMap.put("akka.remote.enabled-transports", List("akka.remote.netty.ssl").asJava)
configMap.put("akka.remote.netty.ssl.security.key-store",
siriusConfig.getProp(SiriusConfiguration.KEY_STORE_LOCATION)
.getOrElse(throw new IllegalArgumentException("No key-store value provided")))
configMap.put("akka.remote.netty.ssl.security.trust-store",
siriusConfig.getProp(SiriusConfiguration.TRUST_STORE_LOCATION)
.getOrElse(throw new IllegalArgumentException("No trust-store value provided")))
configMap.put("akka.remote.netty.ssl.security.key-store-password",
siriusConfig.getProp(SiriusConfiguration.KEY_STORE_PASSWORD)
.getOrElse(throw new IllegalArgumentException("No key-store-password value provided")))
configMap.put("akka.remote.netty.ssl.security.key-password",
siriusConfig.getProp(SiriusConfiguration.KEY_PASSWORD)
.getOrElse(throw new IllegalArgumentException("No key-password value provided")))
configMap.put("akka.remote.netty.ssl.security.trust-store-password",
siriusConfig.getProp(SiriusConfiguration.TRUST_STORE_PASSWORD)
.getOrElse(throw new IllegalArgumentException("No trust-store-password value provided")))
} else {
configMap.put("akka.remote.netty.tcp.hostname",
siriusConfig.getProp(SiriusConfiguration.HOST, InetAddress.getLocalHost.getHostName))
configMap.put("akka.remote.netty.tcp.port",
siriusConfig.getProp(SiriusConfiguration.PORT, 2552))
configMap.put("akka.remote.enabled-transports", List("akka.remote.netty.tcp").asJava)
}
// this is just so that the intellij shuts up
ConfigFactory.parseMap(configMap.asInstanceOf[JHashMap[String, _ <: AnyRef]])
}
/**
* If siriusConfig is such configured, will load up an external configuration
* for the Akka ActorSystem which is created. The filesystem is checked first,
* then the classpath, if neither exist, or siriusConfig is not configured as
* much, then an empty Config object is returned.
*/
private def createExternalConfig(siriusConfig: SiriusConfiguration): Config =
siriusConfig.getProp[String](SiriusConfiguration.AKKA_EXTERN_CONFIG) match {
case None => ConfigFactory.empty()
case Some(externConfig) =>
val externConfigFile = new File(externConfig)
if (externConfigFile.exists()) {
ConfigFactory.parseFile(externConfigFile).resolve()
} else {
ConfigFactory.parseResources(externConfig).resolve()
}
}
}
|
jwakemen/sirius-old
|
src/main/scala/com/comcast/xfinity/sirius/api/impl/SiriusFactory.scala
|
Scala
|
apache-2.0
| 9,967
|
/**
* Generated by API Builder - https://www.apibuilder.io
* Service version: 0.8.36
* apibuilder 0.15.11 app.apibuilder.io/flow/delta-config/latest/play_2_8_client
*/
package io.flow.delta.config.v0.models {
sealed trait BuildConfig extends _root_.scala.Product with _root_.scala.Serializable
/**
* Defines the valid discriminator values for the type BuildConfig
*/
sealed trait BuildConfigDiscriminator extends _root_.scala.Product with _root_.scala.Serializable
object BuildConfigDiscriminator {
case object EcsBuildConfig extends BuildConfigDiscriminator { override def toString = "ecs" }
case object K8sBuildConfig extends BuildConfigDiscriminator { override def toString = "k8s" }
final case class UNDEFINED(override val toString: String) extends BuildConfigDiscriminator
val all: scala.List[BuildConfigDiscriminator] = scala.List(EcsBuildConfig, K8sBuildConfig)
private[this] val byName: Map[String, BuildConfigDiscriminator] = all.map(x => x.toString.toLowerCase -> x).toMap
def apply(value: String): BuildConfigDiscriminator = fromString(value).getOrElse(UNDEFINED(value))
def fromString(value: String): _root_.scala.Option[BuildConfigDiscriminator] = byName.get(value.toLowerCase)
}
sealed trait Config extends _root_.scala.Product with _root_.scala.Serializable
/**
* Defines the valid discriminator values for the type Config
*/
sealed trait ConfigDiscriminator extends _root_.scala.Product with _root_.scala.Serializable
object ConfigDiscriminator {
case object ConfigProject extends ConfigDiscriminator { override def toString = "config_project" }
case object ConfigError extends ConfigDiscriminator { override def toString = "config_error" }
final case class UNDEFINED(override val toString: String) extends ConfigDiscriminator
val all: scala.List[ConfigDiscriminator] = scala.List(ConfigProject, ConfigError)
private[this] val byName: Map[String, ConfigDiscriminator] = all.map(x => x.toString.toLowerCase -> x).toMap
def apply(value: String): ConfigDiscriminator = fromString(value).getOrElse(UNDEFINED(value))
def fromString(value: String): _root_.scala.Option[ConfigDiscriminator] = byName.get(value.toLowerCase)
}
/**
* The name of the branch that we are actively monitoring, including any
* information needed for the initial deploy.
*/
final case class Branch(
name: String
)
/**
* Used to indicate that there was a problem parsing the project configuration
*/
final case class ConfigError(
errors: Seq[String]
) extends Config
/**
* Top level configuration for a project, including what builds and branches are
* covered and the current status (e.g. enabled, paused, etc.)
*/
final case class ConfigProject(
stages: Seq[io.flow.delta.config.v0.models.ProjectStage],
builds: Seq[io.flow.delta.config.v0.models.BuildConfig],
branches: Seq[io.flow.delta.config.v0.models.Branch]
) extends Config
/**
* @param initialNumberInstances When first deploying this branch, the number of instances we create
* @param memory The number of MiB of memory to set for jvm xmx
* @param containerMemory The number of MiB of memory to set for container memory. This must be at least
* the jvm number PLUS stack and native memory PLUS any sidecars
* @param portContainer The port number on the container that is bound to the user-specified or
* automatically assigned host port.
* @param portHost The port number on the container instance to reserve for your container
* @param remoteLogging Flag whether this build should send logs to a remote location. e.g. Sumo Logic
* @param dependencies The names of other builds that this one is dependent on. If specified, we will
* ensure that we never scale this build to a tag that is ahead of the minimum
* version of the dependent application running in production.
* @param version The version of Delta to use for deployments. Defaults to 1.0 if not specified
* @param healthcheckUrl The URL used for healthchecks by the ELB
* @param crossZoneLoadBalancing Flag whether this build should enable CrossZoneLoadBalancing
* @param allowDowntime Allows Delta to optimize infrastructure for services that can accept downtime
* deployments. Defaults to false.
*/
final case class EcsBuildConfig(
name: String,
cluster: _root_.scala.Option[io.flow.delta.config.v0.models.Cluster] = None,
dockerfile: String,
initialNumberInstances: Long,
instanceType: io.flow.delta.config.v0.models.InstanceType,
memory: _root_.scala.Option[Long] = None,
containerMemory: _root_.scala.Option[Long] = None,
portContainer: Int,
portHost: Int,
remoteLogging: _root_.scala.Option[Boolean] = None,
stages: Seq[io.flow.delta.config.v0.models.BuildStage],
dependencies: Seq[String],
version: _root_.scala.Option[String] = None,
healthcheckUrl: _root_.scala.Option[String] = None,
crossZoneLoadBalancing: _root_.scala.Option[Boolean] = None,
allowDowntime: _root_.scala.Option[Boolean] = None
) extends BuildConfig
final case class K8sBuildConfig(
name: String,
cluster: io.flow.delta.config.v0.models.Cluster
) extends BuildConfig
/**
* Provides future compatibility in clients - in the future, when a type is added
* to the union BuildConfig, it will need to be handled in the client code. This
* implementation will deserialize these future types as an instance of this class.
*
* @param description Information about the type that we received that is undefined in this version of
* the client.
*/
final case class BuildConfigUndefinedType(
description: String
) extends BuildConfig
/**
* Provides future compatibility in clients - in the future, when a type is added
* to the union Config, it will need to be handled in the client code. This
* implementation will deserialize these future types as an instance of this class.
*
* @param description Information about the type that we received that is undefined in this version of
* the client.
*/
final case class ConfigUndefinedType(
description: String
) extends Config
/**
* Represents the individual stages of the continuous delivery system that can be
* enabled / disabled at the build level
*/
sealed trait BuildStage extends _root_.scala.Product with _root_.scala.Serializable
object BuildStage {
case object SetDesiredState extends BuildStage { override def toString = "set_desired_state" }
case object SyncDockerImage extends BuildStage { override def toString = "sync_docker_image" }
case object BuildDockerImage extends BuildStage { override def toString = "build_docker_image" }
case object Scale extends BuildStage { override def toString = "scale" }
/**
* UNDEFINED captures values that are sent either in error or
* that were added by the server after this library was
* generated. We want to make it easy and obvious for users of
* this library to handle this case gracefully.
*
* We use all CAPS for the variable name to avoid collisions
* with the camel cased values above.
*/
final case class UNDEFINED(override val toString: String) extends BuildStage
/**
* all returns a list of all the valid, known values. We use
* lower case to avoid collisions with the camel cased values
* above.
*/
val all: scala.List[BuildStage] = scala.List(SetDesiredState, SyncDockerImage, BuildDockerImage, Scale)
private[this]
val byName: Map[String, BuildStage] = all.map(x => x.toString.toLowerCase -> x).toMap
def apply(value: String): BuildStage = fromString(value).getOrElse(UNDEFINED(value))
def fromString(value: String): _root_.scala.Option[BuildStage] = byName.get(value.toLowerCase)
}
sealed trait Cluster extends _root_.scala.Product with _root_.scala.Serializable
object Cluster {
case object Ecs extends Cluster { override def toString = "ecs" }
case object K8s extends Cluster { override def toString = "k8s" }
/**
* UNDEFINED captures values that are sent either in error or
* that were added by the server after this library was
* generated. We want to make it easy and obvious for users of
* this library to handle this case gracefully.
*
* We use all CAPS for the variable name to avoid collisions
* with the camel cased values above.
*/
final case class UNDEFINED(override val toString: String) extends Cluster
/**
* all returns a list of all the valid, known values. We use
* lower case to avoid collisions with the camel cased values
* above.
*/
val all: scala.List[Cluster] = scala.List(Ecs, K8s)
private[this]
val byName: Map[String, Cluster] = all.map(x => x.toString.toLowerCase -> x).toMap
def apply(value: String): Cluster = fromString(value).getOrElse(UNDEFINED(value))
def fromString(value: String): _root_.scala.Option[Cluster] = byName.get(value.toLowerCase)
}
/**
* List of supported AWS instance types - see
* https://aws.amazon.com/ec2/instance-types/
*/
sealed trait InstanceType extends _root_.scala.Product with _root_.scala.Serializable
object InstanceType {
case object C4Large extends InstanceType { override def toString = "c4.large" }
case object C4Xlarge extends InstanceType { override def toString = "c4.xlarge" }
case object C42xlarge extends InstanceType { override def toString = "c4.2xlarge" }
case object M4Large extends InstanceType { override def toString = "m4.large" }
case object M4Xlarge extends InstanceType { override def toString = "m4.xlarge" }
case object M42xlarge extends InstanceType { override def toString = "m4.2xlarge" }
case object M5Large extends InstanceType { override def toString = "m5.large" }
case object M5Xlarge extends InstanceType { override def toString = "m5.xlarge" }
case object M52xlarge extends InstanceType { override def toString = "m5.2xlarge" }
case object M54xlarge extends InstanceType { override def toString = "m5.4xlarge" }
case object C5Large extends InstanceType { override def toString = "c5.large" }
case object C5Xlarge extends InstanceType { override def toString = "c5.xlarge" }
case object C52xlarge extends InstanceType { override def toString = "c5.2xlarge" }
case object T2Micro extends InstanceType { override def toString = "t2.micro" }
case object T2Small extends InstanceType { override def toString = "t2.small" }
case object T2Medium extends InstanceType { override def toString = "t2.medium" }
case object T2Large extends InstanceType { override def toString = "t2.large" }
case object T3Micro extends InstanceType { override def toString = "t3.micro" }
case object T3Small extends InstanceType { override def toString = "t3.small" }
case object T3Medium extends InstanceType { override def toString = "t3.medium" }
case object T3Large extends InstanceType { override def toString = "t3.large" }
case object T3Xlarge extends InstanceType { override def toString = "t3.xlarge" }
/**
* UNDEFINED captures values that are sent either in error or
* that were added by the server after this library was
* generated. We want to make it easy and obvious for users of
* this library to handle this case gracefully.
*
* We use all CAPS for the variable name to avoid collisions
* with the camel cased values above.
*/
final case class UNDEFINED(override val toString: String) extends InstanceType
/**
* all returns a list of all the valid, known values. We use
* lower case to avoid collisions with the camel cased values
* above.
*/
val all: scala.List[InstanceType] = scala.List(C4Large, C4Xlarge, C42xlarge, M4Large, M4Xlarge, M42xlarge, M5Large, M5Xlarge, M52xlarge, M54xlarge, C5Large, C5Xlarge, C52xlarge, T2Micro, T2Small, T2Medium, T2Large, T3Micro, T3Small, T3Medium, T3Large, T3Xlarge)
private[this]
val byName: Map[String, InstanceType] = all.map(x => x.toString.toLowerCase -> x).toMap
def apply(value: String): InstanceType = fromString(value).getOrElse(UNDEFINED(value))
def fromString(value: String): _root_.scala.Option[InstanceType] = byName.get(value.toLowerCase)
}
/**
* Represents the individual stages of the continuous delivery system that can be
* enabled / disabled at the project level
*/
sealed trait ProjectStage extends _root_.scala.Product with _root_.scala.Serializable
object ProjectStage {
case object SyncShas extends ProjectStage { override def toString = "sync_shas" }
case object SyncTags extends ProjectStage { override def toString = "sync_tags" }
case object Tag extends ProjectStage { override def toString = "tag" }
/**
* UNDEFINED captures values that are sent either in error or
* that were added by the server after this library was
* generated. We want to make it easy and obvious for users of
* this library to handle this case gracefully.
*
* We use all CAPS for the variable name to avoid collisions
* with the camel cased values above.
*/
final case class UNDEFINED(override val toString: String) extends ProjectStage
/**
* all returns a list of all the valid, known values. We use
* lower case to avoid collisions with the camel cased values
* above.
*/
val all: scala.List[ProjectStage] = scala.List(SyncShas, SyncTags, Tag)
private[this]
val byName: Map[String, ProjectStage] = all.map(x => x.toString.toLowerCase -> x).toMap
def apply(value: String): ProjectStage = fromString(value).getOrElse(UNDEFINED(value))
def fromString(value: String): _root_.scala.Option[ProjectStage] = byName.get(value.toLowerCase)
}
}
package io.flow.delta.config.v0.models {
package object json {
import play.api.libs.json.__
import play.api.libs.json.JsString
import play.api.libs.json.Writes
import play.api.libs.functional.syntax._
import io.flow.delta.config.v0.models.json._
private[v0] implicit val jsonReadsUUID = __.read[String].map { str =>
_root_.java.util.UUID.fromString(str)
}
private[v0] implicit val jsonWritesUUID = new Writes[_root_.java.util.UUID] {
def writes(x: _root_.java.util.UUID) = JsString(x.toString)
}
private[v0] implicit val jsonReadsJodaDateTime = __.read[String].map { str =>
_root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseDateTime(str)
}
private[v0] implicit val jsonWritesJodaDateTime = new Writes[_root_.org.joda.time.DateTime] {
def writes(x: _root_.org.joda.time.DateTime) = {
JsString(_root_.org.joda.time.format.ISODateTimeFormat.dateTime.print(x))
}
}
private[v0] implicit val jsonReadsJodaLocalDate = __.read[String].map { str =>
_root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseLocalDate(str)
}
private[v0] implicit val jsonWritesJodaLocalDate = new Writes[_root_.org.joda.time.LocalDate] {
def writes(x: _root_.org.joda.time.LocalDate) = {
JsString(_root_.org.joda.time.format.ISODateTimeFormat.date.print(x))
}
}
implicit val jsonReadsDeltaConfigBuildStage = new play.api.libs.json.Reads[io.flow.delta.config.v0.models.BuildStage] {
def reads(js: play.api.libs.json.JsValue): play.api.libs.json.JsResult[io.flow.delta.config.v0.models.BuildStage] = {
js match {
case v: play.api.libs.json.JsString => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.BuildStage(v.value))
case _ => {
(js \\ "value").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.BuildStage(v))
case err: play.api.libs.json.JsError =>
(js \\ "build_stage").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.BuildStage(v))
case err: play.api.libs.json.JsError => err
}
}
}
}
}
}
def jsonWritesDeltaConfigBuildStage(obj: io.flow.delta.config.v0.models.BuildStage) = {
play.api.libs.json.JsString(obj.toString)
}
def jsObjectBuildStage(obj: io.flow.delta.config.v0.models.BuildStage) = {
play.api.libs.json.Json.obj("value" -> play.api.libs.json.JsString(obj.toString))
}
implicit def jsonWritesDeltaConfigBuildStage: play.api.libs.json.Writes[BuildStage] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.BuildStage] {
def writes(obj: io.flow.delta.config.v0.models.BuildStage) = {
jsonWritesDeltaConfigBuildStage(obj)
}
}
}
implicit val jsonReadsDeltaConfigCluster = new play.api.libs.json.Reads[io.flow.delta.config.v0.models.Cluster] {
def reads(js: play.api.libs.json.JsValue): play.api.libs.json.JsResult[io.flow.delta.config.v0.models.Cluster] = {
js match {
case v: play.api.libs.json.JsString => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.Cluster(v.value))
case _ => {
(js \\ "value").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.Cluster(v))
case err: play.api.libs.json.JsError =>
(js \\ "cluster").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.Cluster(v))
case err: play.api.libs.json.JsError => err
}
}
}
}
}
}
def jsonWritesDeltaConfigCluster(obj: io.flow.delta.config.v0.models.Cluster) = {
play.api.libs.json.JsString(obj.toString)
}
def jsObjectCluster(obj: io.flow.delta.config.v0.models.Cluster) = {
play.api.libs.json.Json.obj("value" -> play.api.libs.json.JsString(obj.toString))
}
implicit def jsonWritesDeltaConfigCluster: play.api.libs.json.Writes[Cluster] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.Cluster] {
def writes(obj: io.flow.delta.config.v0.models.Cluster) = {
jsonWritesDeltaConfigCluster(obj)
}
}
}
implicit val jsonReadsDeltaConfigInstanceType = new play.api.libs.json.Reads[io.flow.delta.config.v0.models.InstanceType] {
def reads(js: play.api.libs.json.JsValue): play.api.libs.json.JsResult[io.flow.delta.config.v0.models.InstanceType] = {
js match {
case v: play.api.libs.json.JsString => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.InstanceType(v.value))
case _ => {
(js \\ "value").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.InstanceType(v))
case err: play.api.libs.json.JsError =>
(js \\ "instance_type").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.InstanceType(v))
case err: play.api.libs.json.JsError => err
}
}
}
}
}
}
def jsonWritesDeltaConfigInstanceType(obj: io.flow.delta.config.v0.models.InstanceType) = {
play.api.libs.json.JsString(obj.toString)
}
def jsObjectInstanceType(obj: io.flow.delta.config.v0.models.InstanceType) = {
play.api.libs.json.Json.obj("value" -> play.api.libs.json.JsString(obj.toString))
}
implicit def jsonWritesDeltaConfigInstanceType: play.api.libs.json.Writes[InstanceType] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.InstanceType] {
def writes(obj: io.flow.delta.config.v0.models.InstanceType) = {
jsonWritesDeltaConfigInstanceType(obj)
}
}
}
implicit val jsonReadsDeltaConfigProjectStage = new play.api.libs.json.Reads[io.flow.delta.config.v0.models.ProjectStage] {
def reads(js: play.api.libs.json.JsValue): play.api.libs.json.JsResult[io.flow.delta.config.v0.models.ProjectStage] = {
js match {
case v: play.api.libs.json.JsString => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.ProjectStage(v.value))
case _ => {
(js \\ "value").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.ProjectStage(v))
case err: play.api.libs.json.JsError =>
(js \\ "project_stage").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.ProjectStage(v))
case err: play.api.libs.json.JsError => err
}
}
}
}
}
}
def jsonWritesDeltaConfigProjectStage(obj: io.flow.delta.config.v0.models.ProjectStage) = {
play.api.libs.json.JsString(obj.toString)
}
def jsObjectProjectStage(obj: io.flow.delta.config.v0.models.ProjectStage) = {
play.api.libs.json.Json.obj("value" -> play.api.libs.json.JsString(obj.toString))
}
implicit def jsonWritesDeltaConfigProjectStage: play.api.libs.json.Writes[ProjectStage] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.ProjectStage] {
def writes(obj: io.flow.delta.config.v0.models.ProjectStage) = {
jsonWritesDeltaConfigProjectStage(obj)
}
}
}
implicit def jsonReadsDeltaConfigBranch: play.api.libs.json.Reads[Branch] = {
(__ \\ "name").read[String].map { x => new Branch(name = x) }
}
def jsObjectBranch(obj: io.flow.delta.config.v0.models.Branch): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"name" -> play.api.libs.json.JsString(obj.name)
)
}
implicit def jsonWritesDeltaConfigBranch: play.api.libs.json.Writes[Branch] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.Branch] {
def writes(obj: io.flow.delta.config.v0.models.Branch) = {
jsObjectBranch(obj)
}
}
}
implicit def jsonReadsDeltaConfigConfigError: play.api.libs.json.Reads[ConfigError] = {
(__ \\ "errors").read[Seq[String]].map { x => new ConfigError(errors = x) }
}
def jsObjectConfigError(obj: io.flow.delta.config.v0.models.ConfigError): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"errors" -> play.api.libs.json.Json.toJson(obj.errors)
) ++ play.api.libs.json.Json.obj("discriminator" -> "config_error")
}
implicit def jsonWritesDeltaConfigConfigError: play.api.libs.json.Writes[ConfigError] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.ConfigError] {
def writes(obj: io.flow.delta.config.v0.models.ConfigError) = {
jsObjectConfigError(obj)
}
}
}
implicit def jsonReadsDeltaConfigConfigProject: play.api.libs.json.Reads[ConfigProject] = {
for {
stages <- (__ \\ "stages").read[Seq[io.flow.delta.config.v0.models.ProjectStage]]
builds <- (__ \\ "builds").read[Seq[io.flow.delta.config.v0.models.BuildConfig]]
branches <- (__ \\ "branches").read[Seq[io.flow.delta.config.v0.models.Branch]]
} yield ConfigProject(stages, builds, branches)
}
def jsObjectConfigProject(obj: io.flow.delta.config.v0.models.ConfigProject): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"stages" -> play.api.libs.json.Json.toJson(obj.stages),
"builds" -> play.api.libs.json.Json.toJson(obj.builds),
"branches" -> play.api.libs.json.Json.toJson(obj.branches)
) ++ play.api.libs.json.Json.obj("discriminator" -> "config_project")
}
implicit def jsonWritesDeltaConfigConfigProject: play.api.libs.json.Writes[ConfigProject] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.ConfigProject] {
def writes(obj: io.flow.delta.config.v0.models.ConfigProject) = {
jsObjectConfigProject(obj)
}
}
}
implicit def jsonReadsDeltaConfigEcsBuildConfig: play.api.libs.json.Reads[EcsBuildConfig] = {
for {
name <- (__ \\ "name").read[String]
cluster <- (__ \\ "cluster").readNullable[io.flow.delta.config.v0.models.Cluster]
dockerfile <- (__ \\ "dockerfile").read[String]
initialNumberInstances <- (__ \\ "initial_number_instances").read[Long]
instanceType <- (__ \\ "instance_type").read[io.flow.delta.config.v0.models.InstanceType]
memory <- (__ \\ "memory").readNullable[Long]
containerMemory <- (__ \\ "container_memory").readNullable[Long]
portContainer <- (__ \\ "port_container").read[Int]
portHost <- (__ \\ "port_host").read[Int]
remoteLogging <- (__ \\ "remote_logging").readNullable[Boolean]
stages <- (__ \\ "stages").read[Seq[io.flow.delta.config.v0.models.BuildStage]]
dependencies <- (__ \\ "dependencies").read[Seq[String]]
version <- (__ \\ "version").readNullable[String]
healthcheckUrl <- (__ \\ "healthcheck_url").readNullable[String]
crossZoneLoadBalancing <- (__ \\ "cross_zone_load_balancing").readNullable[Boolean]
allowDowntime <- (__ \\ "allow_downtime").readNullable[Boolean]
} yield EcsBuildConfig(name, cluster, dockerfile, initialNumberInstances, instanceType, memory, containerMemory, portContainer, portHost, remoteLogging, stages, dependencies, version, healthcheckUrl, crossZoneLoadBalancing, allowDowntime)
}
def jsObjectEcsBuildConfig(obj: io.flow.delta.config.v0.models.EcsBuildConfig): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"name" -> play.api.libs.json.JsString(obj.name),
"dockerfile" -> play.api.libs.json.JsString(obj.dockerfile),
"initial_number_instances" -> play.api.libs.json.JsNumber(obj.initialNumberInstances),
"instance_type" -> play.api.libs.json.JsString(obj.instanceType.toString),
"port_container" -> play.api.libs.json.JsNumber(obj.portContainer),
"port_host" -> play.api.libs.json.JsNumber(obj.portHost),
"stages" -> play.api.libs.json.Json.toJson(obj.stages),
"dependencies" -> play.api.libs.json.Json.toJson(obj.dependencies)
) ++ (obj.cluster match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("cluster" -> play.api.libs.json.JsString(x.toString))
}) ++
(obj.memory match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("memory" -> play.api.libs.json.JsNumber(x))
}) ++
(obj.containerMemory match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("container_memory" -> play.api.libs.json.JsNumber(x))
}) ++
(obj.remoteLogging match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("remote_logging" -> play.api.libs.json.JsBoolean(x))
}) ++
(obj.version match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("version" -> play.api.libs.json.JsString(x))
}) ++
(obj.healthcheckUrl match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("healthcheck_url" -> play.api.libs.json.JsString(x))
}) ++
(obj.crossZoneLoadBalancing match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("cross_zone_load_balancing" -> play.api.libs.json.JsBoolean(x))
}) ++
(obj.allowDowntime match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("allow_downtime" -> play.api.libs.json.JsBoolean(x))
}) ++ play.api.libs.json.Json.obj("discriminator" -> "ecs")
}
implicit def jsonWritesDeltaConfigEcsBuildConfig: play.api.libs.json.Writes[EcsBuildConfig] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.EcsBuildConfig] {
def writes(obj: io.flow.delta.config.v0.models.EcsBuildConfig) = {
jsObjectEcsBuildConfig(obj)
}
}
}
implicit def jsonReadsDeltaConfigK8sBuildConfig: play.api.libs.json.Reads[K8sBuildConfig] = {
for {
name <- (__ \\ "name").read[String]
cluster <- (__ \\ "cluster").read[io.flow.delta.config.v0.models.Cluster]
} yield K8sBuildConfig(name, cluster)
}
def jsObjectK8sBuildConfig(obj: io.flow.delta.config.v0.models.K8sBuildConfig): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"name" -> play.api.libs.json.JsString(obj.name),
"cluster" -> play.api.libs.json.JsString(obj.cluster.toString)
) ++ play.api.libs.json.Json.obj("discriminator" -> "k8s")
}
implicit def jsonWritesDeltaConfigK8sBuildConfig: play.api.libs.json.Writes[K8sBuildConfig] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.K8sBuildConfig] {
def writes(obj: io.flow.delta.config.v0.models.K8sBuildConfig) = {
jsObjectK8sBuildConfig(obj)
}
}
}
implicit def jsonReadsDeltaConfigBuildConfig: play.api.libs.json.Reads[BuildConfig] = new play.api.libs.json.Reads[BuildConfig] {
def reads(js: play.api.libs.json.JsValue): play.api.libs.json.JsResult[BuildConfig] = {
(js \\ "discriminator").asOpt[String].getOrElse("ecs") match {
case "ecs" => js.validate[io.flow.delta.config.v0.models.EcsBuildConfig]
case "k8s" => js.validate[io.flow.delta.config.v0.models.K8sBuildConfig]
case other => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.BuildConfigUndefinedType(other))
}
}
}
def jsObjectBuildConfig(obj: io.flow.delta.config.v0.models.BuildConfig): play.api.libs.json.JsObject = {
obj match {
case x: io.flow.delta.config.v0.models.EcsBuildConfig => jsObjectEcsBuildConfig(x)
case x: io.flow.delta.config.v0.models.K8sBuildConfig => jsObjectK8sBuildConfig(x)
case other => {
sys.error(s"The type[${other.getClass.getName}] has no JSON writer")
}
}
}
implicit def jsonWritesDeltaConfigBuildConfig: play.api.libs.json.Writes[BuildConfig] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.BuildConfig] {
def writes(obj: io.flow.delta.config.v0.models.BuildConfig) = {
jsObjectBuildConfig(obj)
}
}
}
implicit def jsonReadsDeltaConfigConfig: play.api.libs.json.Reads[Config] = new play.api.libs.json.Reads[Config] {
def reads(js: play.api.libs.json.JsValue): play.api.libs.json.JsResult[Config] = {
(js \\ "discriminator").asOpt[String].getOrElse { sys.error("Union[Config] requires a discriminator named 'discriminator' - this field was not found in the Json Value") } match {
case "config_project" => js.validate[io.flow.delta.config.v0.models.ConfigProject]
case "config_error" => js.validate[io.flow.delta.config.v0.models.ConfigError]
case other => play.api.libs.json.JsSuccess(io.flow.delta.config.v0.models.ConfigUndefinedType(other))
}
}
}
def jsObjectConfig(obj: io.flow.delta.config.v0.models.Config): play.api.libs.json.JsObject = {
obj match {
case x: io.flow.delta.config.v0.models.ConfigProject => jsObjectConfigProject(x)
case x: io.flow.delta.config.v0.models.ConfigError => jsObjectConfigError(x)
case other => {
sys.error(s"The type[${other.getClass.getName}] has no JSON writer")
}
}
}
implicit def jsonWritesDeltaConfigConfig: play.api.libs.json.Writes[Config] = {
new play.api.libs.json.Writes[io.flow.delta.config.v0.models.Config] {
def writes(obj: io.flow.delta.config.v0.models.Config) = {
jsObjectConfig(obj)
}
}
}
}
}
package io.flow.delta.config.v0 {
object Bindables {
import play.api.mvc.{PathBindable, QueryStringBindable}
// import models directly for backwards compatibility with prior versions of the generator
import Core._
import Models._
object Core {
implicit def pathBindableDateTimeIso8601(implicit stringBinder: QueryStringBindable[String]): PathBindable[_root_.org.joda.time.DateTime] = ApibuilderPathBindable(ApibuilderTypes.dateTimeIso8601)
implicit def queryStringBindableDateTimeIso8601(implicit stringBinder: QueryStringBindable[String]): QueryStringBindable[_root_.org.joda.time.DateTime] = ApibuilderQueryStringBindable(ApibuilderTypes.dateTimeIso8601)
implicit def pathBindableDateIso8601(implicit stringBinder: QueryStringBindable[String]): PathBindable[_root_.org.joda.time.LocalDate] = ApibuilderPathBindable(ApibuilderTypes.dateIso8601)
implicit def queryStringBindableDateIso8601(implicit stringBinder: QueryStringBindable[String]): QueryStringBindable[_root_.org.joda.time.LocalDate] = ApibuilderQueryStringBindable(ApibuilderTypes.dateIso8601)
}
object Models {
import io.flow.delta.config.v0.models._
val buildStageConverter: ApibuilderTypeConverter[io.flow.delta.config.v0.models.BuildStage] = new ApibuilderTypeConverter[io.flow.delta.config.v0.models.BuildStage] {
override def convert(value: String): io.flow.delta.config.v0.models.BuildStage = io.flow.delta.config.v0.models.BuildStage(value)
override def convert(value: io.flow.delta.config.v0.models.BuildStage): String = value.toString
override def example: io.flow.delta.config.v0.models.BuildStage = io.flow.delta.config.v0.models.BuildStage.SetDesiredState
override def validValues: Seq[io.flow.delta.config.v0.models.BuildStage] = io.flow.delta.config.v0.models.BuildStage.all
}
implicit def pathBindableBuildStage(implicit stringBinder: QueryStringBindable[String]): PathBindable[io.flow.delta.config.v0.models.BuildStage] = ApibuilderPathBindable(buildStageConverter)
implicit def queryStringBindableBuildStage(implicit stringBinder: QueryStringBindable[String]): QueryStringBindable[io.flow.delta.config.v0.models.BuildStage] = ApibuilderQueryStringBindable(buildStageConverter)
val clusterConverter: ApibuilderTypeConverter[io.flow.delta.config.v0.models.Cluster] = new ApibuilderTypeConverter[io.flow.delta.config.v0.models.Cluster] {
override def convert(value: String): io.flow.delta.config.v0.models.Cluster = io.flow.delta.config.v0.models.Cluster(value)
override def convert(value: io.flow.delta.config.v0.models.Cluster): String = value.toString
override def example: io.flow.delta.config.v0.models.Cluster = io.flow.delta.config.v0.models.Cluster.Ecs
override def validValues: Seq[io.flow.delta.config.v0.models.Cluster] = io.flow.delta.config.v0.models.Cluster.all
}
implicit def pathBindableCluster(implicit stringBinder: QueryStringBindable[String]): PathBindable[io.flow.delta.config.v0.models.Cluster] = ApibuilderPathBindable(clusterConverter)
implicit def queryStringBindableCluster(implicit stringBinder: QueryStringBindable[String]): QueryStringBindable[io.flow.delta.config.v0.models.Cluster] = ApibuilderQueryStringBindable(clusterConverter)
val instanceTypeConverter: ApibuilderTypeConverter[io.flow.delta.config.v0.models.InstanceType] = new ApibuilderTypeConverter[io.flow.delta.config.v0.models.InstanceType] {
override def convert(value: String): io.flow.delta.config.v0.models.InstanceType = io.flow.delta.config.v0.models.InstanceType(value)
override def convert(value: io.flow.delta.config.v0.models.InstanceType): String = value.toString
override def example: io.flow.delta.config.v0.models.InstanceType = io.flow.delta.config.v0.models.InstanceType.C4Large
override def validValues: Seq[io.flow.delta.config.v0.models.InstanceType] = io.flow.delta.config.v0.models.InstanceType.all
}
implicit def pathBindableInstanceType(implicit stringBinder: QueryStringBindable[String]): PathBindable[io.flow.delta.config.v0.models.InstanceType] = ApibuilderPathBindable(instanceTypeConverter)
implicit def queryStringBindableInstanceType(implicit stringBinder: QueryStringBindable[String]): QueryStringBindable[io.flow.delta.config.v0.models.InstanceType] = ApibuilderQueryStringBindable(instanceTypeConverter)
val projectStageConverter: ApibuilderTypeConverter[io.flow.delta.config.v0.models.ProjectStage] = new ApibuilderTypeConverter[io.flow.delta.config.v0.models.ProjectStage] {
override def convert(value: String): io.flow.delta.config.v0.models.ProjectStage = io.flow.delta.config.v0.models.ProjectStage(value)
override def convert(value: io.flow.delta.config.v0.models.ProjectStage): String = value.toString
override def example: io.flow.delta.config.v0.models.ProjectStage = io.flow.delta.config.v0.models.ProjectStage.SyncShas
override def validValues: Seq[io.flow.delta.config.v0.models.ProjectStage] = io.flow.delta.config.v0.models.ProjectStage.all
}
implicit def pathBindableProjectStage(implicit stringBinder: QueryStringBindable[String]): PathBindable[io.flow.delta.config.v0.models.ProjectStage] = ApibuilderPathBindable(projectStageConverter)
implicit def queryStringBindableProjectStage(implicit stringBinder: QueryStringBindable[String]): QueryStringBindable[io.flow.delta.config.v0.models.ProjectStage] = ApibuilderQueryStringBindable(projectStageConverter)
}
trait ApibuilderTypeConverter[T] {
def convert(value: String): T
def convert(value: T): String
def example: T
def validValues: Seq[T] = Nil
def errorMessage(key: String, value: String, ex: java.lang.Exception): String = {
val base = s"Invalid value '$value' for parameter '$key'. "
validValues.toList match {
case Nil => base + "Ex: " + convert(example)
case values => base + ". Valid values are: " + values.mkString("'", "', '", "'")
}
}
}
object ApibuilderTypes {
val dateTimeIso8601: ApibuilderTypeConverter[_root_.org.joda.time.DateTime] = new ApibuilderTypeConverter[_root_.org.joda.time.DateTime] {
override def convert(value: String): _root_.org.joda.time.DateTime = _root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseDateTime(value)
override def convert(value: _root_.org.joda.time.DateTime): String = _root_.org.joda.time.format.ISODateTimeFormat.dateTime.print(value)
override def example: _root_.org.joda.time.DateTime = _root_.org.joda.time.DateTime.now
}
val dateIso8601: ApibuilderTypeConverter[_root_.org.joda.time.LocalDate] = new ApibuilderTypeConverter[_root_.org.joda.time.LocalDate] {
override def convert(value: String): _root_.org.joda.time.LocalDate = _root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseLocalDate(value)
override def convert(value: _root_.org.joda.time.LocalDate): String = _root_.org.joda.time.format.ISODateTimeFormat.date.print(value)
override def example: _root_.org.joda.time.LocalDate = _root_.org.joda.time.LocalDate.now
}
}
final case class ApibuilderQueryStringBindable[T](
converters: ApibuilderTypeConverter[T]
) extends QueryStringBindable[T] {
override def bind(key: String, params: Map[String, Seq[String]]): _root_.scala.Option[_root_.scala.Either[String, T]] = {
params.getOrElse(key, Nil).headOption.map { v =>
try {
Right(
converters.convert(v)
)
} catch {
case ex: java.lang.Exception => Left(
converters.errorMessage(key, v, ex)
)
}
}
}
override def unbind(key: String, value: T): String = {
s"$key=${converters.convert(value)}"
}
}
final case class ApibuilderPathBindable[T](
converters: ApibuilderTypeConverter[T]
) extends PathBindable[T] {
override def bind(key: String, value: String): _root_.scala.Either[String, T] = {
try {
Right(
converters.convert(value)
)
} catch {
case ex: java.lang.Exception => Left(
converters.errorMessage(key, value, ex)
)
}
}
override def unbind(key: String, value: T): String = {
converters.convert(value)
}
}
}
}
package io.flow.delta.config.v0 {
object Constants {
val Namespace = "io.flow.delta.config.v0"
val UserAgent = "apibuilder 0.15.11 app.apibuilder.io/flow/delta-config/latest/play_2_8_client"
val Version = "0.8.36"
val VersionMajor = 0
}
class Client(
ws: play.api.libs.ws.WSClient,
val baseUrl: String,
auth: scala.Option[io.flow.delta.config.v0.Authorization] = None,
defaultHeaders: Seq[(String, String)] = Nil
) extends interfaces.Client {
import io.flow.delta.config.v0.models.json._
private[this] val logger = play.api.Logger("io.flow.delta.config.v0.Client")
logger.info(s"Initializing io.flow.delta.config.v0.Client for url $baseUrl")
def _requestHolder(path: String): play.api.libs.ws.WSRequest = {
val holder = ws.url(baseUrl + path).addHttpHeaders(
"User-Agent" -> Constants.UserAgent,
"X-Apidoc-Version" -> Constants.Version,
"X-Apidoc-Version-Major" -> Constants.VersionMajor.toString
).addHttpHeaders(defaultHeaders : _*)
auth.fold(holder) {
case Authorization.Basic(username, password) => {
holder.withAuth(username, password.getOrElse(""), play.api.libs.ws.WSAuthScheme.BASIC)
}
case a => sys.error("Invalid authorization scheme[" + a.getClass + "]")
}
}
def _logRequest(method: String, req: play.api.libs.ws.WSRequest): play.api.libs.ws.WSRequest = {
val queryComponents = for {
(name, values) <- req.queryString
value <- values
} yield s"$name=$value"
val url = s"${req.url}${queryComponents.mkString("?", "&", "")}"
auth.fold(logger.info(s"curl -X $method '$url'")) { _ =>
logger.info(s"curl -X $method -u '[REDACTED]:' '$url'")
}
req
}
def _executeRequest(
method: String,
path: String,
queryParameters: Seq[(String, String)] = Nil,
requestHeaders: Seq[(String, String)] = Nil,
body: Option[play.api.libs.json.JsValue] = None
): scala.concurrent.Future[play.api.libs.ws.WSResponse] = {
method.toUpperCase match {
case "GET" => {
_logRequest("GET", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).get()
}
case "POST" => {
_logRequest("POST", _requestHolder(path).addHttpHeaders(_withJsonContentType(requestHeaders):_*).addQueryStringParameters(queryParameters:_*)).post(body.getOrElse(play.api.libs.json.Json.obj()))
}
case "PUT" => {
_logRequest("PUT", _requestHolder(path).addHttpHeaders(_withJsonContentType(requestHeaders):_*).addQueryStringParameters(queryParameters:_*)).put(body.getOrElse(play.api.libs.json.Json.obj()))
}
case "PATCH" => {
_logRequest("PATCH", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).patch(body.getOrElse(play.api.libs.json.Json.obj()))
}
case "DELETE" => {
_logRequest("DELETE", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).delete()
}
case "HEAD" => {
_logRequest("HEAD", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).head()
}
case "OPTIONS" => {
_logRequest("OPTIONS", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).options()
}
case _ => {
_logRequest(method, _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*))
sys.error("Unsupported method[%s]".format(method))
}
}
}
/**
* Adds a Content-Type: application/json header unless the specified requestHeaders
* already contain a Content-Type header
*/
def _withJsonContentType(headers: Seq[(String, String)]): Seq[(String, String)] = {
headers.find { _._1.toUpperCase == "CONTENT-TYPE" } match {
case None => headers ++ Seq(("Content-Type" -> "application/json; charset=UTF-8"))
case Some(_) => headers
}
}
}
object Client {
def parseJson[T](
className: String,
r: play.api.libs.ws.WSResponse,
f: (play.api.libs.json.JsValue => play.api.libs.json.JsResult[T])
): T = {
f(play.api.libs.json.Json.parse(r.body)) match {
case play.api.libs.json.JsSuccess(x, _) => x
case play.api.libs.json.JsError(errors) => {
throw io.flow.delta.config.v0.errors.FailedRequest(r.status, s"Invalid json for class[" + className + "]: " + errors.mkString(" "))
}
}
}
}
sealed trait Authorization extends _root_.scala.Product with _root_.scala.Serializable
object Authorization {
final case class Basic(username: String, password: Option[String] = None) extends Authorization
}
package interfaces {
trait Client {
def baseUrl: String
}
}
package errors {
final case class FailedRequest(responseCode: Int, message: String, requestUri: Option[_root_.java.net.URI] = None) extends _root_.java.lang.Exception(s"HTTP $responseCode: $message")
}
}
|
flowcommerce/delta
|
generated/app/FlowDeltaConfigV0Client.scala
|
Scala
|
mit
| 46,124
|
package org.chipmunk.repository
import org.chipmunk.entity.Entity
import org.chipmunk.entity.Identifiable.Id
import org.squeryl.PrimitiveTypeMode.__thisDsl
import org.squeryl.PrimitiveTypeMode.from
import org.squeryl.PrimitiveTypeMode.long2ScalarLong
import org.squeryl.PrimitiveTypeMode.where
import org.squeryl.Table
abstract class SquerylRepo[T <: Entity[_]] extends Repository[T] {
protected def table: Table[T]
def get(id: Id): Option[T] = {
val elems = from(table){ s => where(s.id === id).select(s) }
elems.headOption
}
/**
* Note that this method *ONLY CHECKS elem's id, IT DOES NOT CHECK THE DB*
*/
def save(elem: T): T = {
val elemsTable = elem.table
assume(elemsTable eq table, s"Cannot save $elem in $table, it belongs to $elemsTable")
elem.persist()
elem
}
def remove(elem: T): Int = {
val elemsTable = elem.table
assume(elemsTable eq table, s"Cannot remove $elem from $table, it belongs to $elemsTable")
table.deleteWhere(_.id === elem.id)
//FIXME: cascades?
}
}
|
kpjjpk/chipmunk
|
src/main/scala/org/chipmunk/repository/SquerylRepo.scala
|
Scala
|
mit
| 1,044
|
//======================================================================================================================
// Facsimile: A Discrete-Event Simulation Library
// Copyright © 2004-2020, Michael J Allen.
//
// This file is part of Facsimile.
//
// Facsimile is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later
// version.
//
// Facsimile is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
// details.
//
// You should have received a copy of the GNU Lesser General Public License along with Facsimile. If not, see:
//
// http://www.gnu.org/licenses/lgpl.
//
// The developers welcome all comments, suggestions and offers of assistance. For further information, please visit the
// project home page at:
//
// http://facsim.org/
//
// Thank you for your interest in the Facsimile project!
//
// IMPORTANT NOTE: All patches (modifications to existing files and/or the addition of new files) submitted for
// inclusion as part of the official Facsimile code base, must comply with the published Facsimile Coding Standards. If
// your code fails to comply with the standard, then your patches will be rejected. For further information, please
// visit the coding standards at:
//
// http://facsim.org/Documentation/CodingStandards/
//======================================================================================================================
// Scala source file belonging to the org.facsim.sfx.beans package.
//======================================================================================================================
package org.facsim.sfx.beans
import javafx.beans.{InvalidationListener, Observable}
import org.facsim.sfx.event.subscriptions.Subscription
import org.facsim.util.requireNonNull
/** Rich wrappers for [[javafx.beans]] elements.
*
* @since 0.0
*/
object SFXBeansImplicits {
/** Rich wrapper for [[javafx.beans.Observable]] interface instances.
*
* @param o ''JavaFX'' observable being wrapped.
*
* @see [[javafx.beans.Observable]] for further information.
*
* @since 0.0
*/
final implicit class RichObservable(private val o: Observable)
extends AnyVal {
/** Install an ''invalidation listener''.
*
* Registers the specified function to be called when the associated observable is invalidated.
*
* @param il ''Invalidation listener'' function to be notified when this observable is invalidated. The first
* argument to this function will be instance of the observable that has been invalidated.
*
* @return Subscription to the registered listener, allowing the listener to be removed by canceling the
* subscription.
*
* @throws scala.NullPointerException if `il` is `null`.
*
* @see [[javafx.beans.InvalidationListener]] for information about invalidation listeners.
*
* @see [[javafx.beans.Observable#addListener]] for information about registering invalidation listeners.
*
* @see [[javafx.beans.Observable#removeListener]] for information about removing invalidation listeners. This
* method is called when the onInvalidate subscription is canceled.
*
* @since 0.0
*/
def onInvalidate(il: Observable => Unit): Subscription = {
// Sanity check. This value cannot be null.
requireNonNull(il)
// Create an invalidation listener instance, which will invoke the specified function.
val listener = new InvalidationListener {
final override def invalidated(observable: Observable): Unit = il(observable)
}
// Register the listener with the observable.
o.addListener(listener)
// Create and return a new subscription allowing the caller to unsubscribe from listening to this observable.
new Subscription {
final override def cancel(): Unit = o.removeListener(listener)
}
}
}
}
|
MichaelJAllen/facsimile
|
facsimile-sfx/src/main/scala/org/facsim/sfx/beans/SFXBeansImplicits.scala
|
Scala
|
lgpl-3.0
| 4,212
|
package se.stagehand.swing.player
import se.stagehand.lib.scripting.network.NetworkedEffect
import se.stagehand.lib.scripting.network.NetworkedTarget
import com.jidesoft.swing.MultilineLabel
import scala.collection.mutable.StringBuilder
import se.stagehand.lib.Log
import se.stagehand.lib.scripting._
import scala.swing._
import se.stagehand.swing.gui.BetterDialog
import java.awt.MouseInfo
import java.awt.Dimension
import javax.swing.JList
import javax.swing.JLabel
import javax.swing.ListCellRenderer
import javax.swing.event.ListSelectionListener
import scala.swing.event.SelectionChanged
import scala.swing.event.MouseClicked
import se.stagehand.swing.gui.PopupMenu
import javax.swing.SwingUtilities
object NetworkedTargetPicker {
private val log = Log.getLog(this.getClass())
/**
* Add targets to an effect.
*/
def pickTargets(filter: Set[_ <: Target] => Set[_ <: Target], applier: Target => Unit, existing: Set[_<: Target], remover:Target => Unit) {
val dialog = new TargetDialog(filter, existing, remover)
dialog.targets.foreach(applier)
}
class TargetDialog(filter: Set[_ <: Target] => Set[_ <: Target], existing: Set[_<: Target], remover:Target => Unit) extends BetterDialog {
private val allTargets:Set[_ <: Target] = Target.allTargets.toSet
private val valid = filter(allTargets)
log.debug("Valid targets: " + valid.size + " all targets : " + allTargets.size)
private val targetView = new ListView[Target](valid.toSeq) {
lazy val typedPeer = peer.asInstanceOf[JList[Target]]
border = Swing.EtchedBorder(Swing.Raised)
typedPeer.setCellRenderer(new Renderer)
}
private val existList = new BoxPanel(Orientation.Vertical) {
border = Swing.EtchedBorder(Swing.Raised)
existing.foreach(x => {
contents += new ListWrapper(x)
})
}
private def items = targetView.selection.items
private var _targets:Seq[Target] = Seq()
def targets = _targets
title = "Choose Targets"
modal = true
import BorderPanel.Position._
contents = new BorderPanel {
layout(new FlowPanel {
border = Swing.EmptyBorder(5,5,5,5)
contents += new BoxPanel(Orientation.Vertical) {
contents += new Label("All Valid")
contents += targetView
}
contents += new BoxPanel(Orientation.Vertical) {
contents += new Label("Added")
contents += existList
}
}) = Center
layout(new FlowPanel(FlowPanel.Alignment.Right)(
Button("Select") {
_targets = items
close()
}
)) = South
layout( Component.wrap(new MultilineLabel {
private val tvs = targetView.selection
listenTo(tvs)
reactions += {
case e:SelectionChanged => {
if (tvs.items.size > 0) {
// setText((tvs.items(tvs.anchorIndex).prettyDescription))
}
refresh
}
}
})) = East
refresh
}
centerOn(MouseInfo.getPointerInfo().getLocation())
open()
private class Renderer extends JLabel with ListCellRenderer[Target] with Publisher {
def getListCellRendererComponent(list:JList[_ <: Target], item: Target, index:Int, selected:Boolean, focus:Boolean) = {
setText(item.prettyName)
if (selected) {
setBackground(list.getSelectionBackground)
setForeground(list.getSelectionForeground)
} else {
setBackground(list.getBackground)
setForeground(list.getForeground)
}
setEnabled(list.isEnabled())
setFont(list.getFont)
setOpaque(true)
this
}
}
private class ListWrapper(target:Target) extends Label {
var me = this
text = this.name
reactions += {
case e: MouseClicked if SwingUtilities.isRightMouseButton(e.peer) => {
var cursor = MouseInfo.getPointerInfo.getLocation
SwingUtilities.convertPointFromScreen(cursor, this.peer)
contextMenu.show(this, cursor.x, cursor.y)
}
}
private val contextMenu = new PopupMenu {
contents += new MenuItem(new Action("Delete") {
def apply {
existList.contents -= me
remover(target)
}
})
}
}
}
}
|
evilcandybag/Stagehand-core
|
src/main/scala/se/stagehand/swing/player/TargetPicker.scala
|
Scala
|
gpl-2.0
| 4,430
|
package org.kermeta.kompren.parser
import scala.io.Source
/**
* Singleton util methods used by the parser
*/
object ParserUtil {
def loadFile(uri: String): String = {
val res = new StringBuilder
Source.fromFile(uri).getLines.foreach {
l => res.append(l); res.append('\n')
}
res.toString
}
}
|
arnobl/kompren
|
kompren-core/fr.inria.diverse.kompren.parser/src/org/kermeta/kompren/parser/ParserUtil.scala
|
Scala
|
epl-1.0
| 322
|
package org.apache.commons.collections
class ArrayStack
class Buffer
|
todesking/sbt-conflict-classes
|
src/sbt-test/sbt-conflict-classes/simple/src/main/scala/dummy2.scala
|
Scala
|
mit
| 70
|
package twitter.kafka
import java.util
import akka.actor._
import akka.stream.actor.ActorSubscriber
import akka.stream.scaladsl.{Keep, Sink, Source}
import akka.stream.{ActorMaterializer, OverflowStrategy}
import com.softwaremill.react.kafka.{ProducerMessage, ProducerProperties, ReactiveKafka}
import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.common.serialization.{Deserializer, Serializer}
import spray.json.{DefaultJsonProtocol, _}
import twitter.kafka.PrivateFeedToKafka.{Tweet, TweetSerializer}
import twitter4j.{Status, _}
import scala.concurrent.Await
import scala.concurrent.duration.Duration
/**
* twitter to kafka microservice example
* todo;; externalize configuration
*/
object FeedToKafka extends App with LazyLogging {
implicit val actorSystem: ActorSystem = ActorSystem("FeedToKafka")
implicit val materializer: ActorMaterializer = ActorMaterializer()
val topicName = "test_topic"
val subscriberProps = new ReactiveKafka().producerActorProps(ProducerProperties(
bootstrapServers = "localhost:9092",
topic = topicName,
valueSerializer = TweetSerializer
))
val subscriber = actorSystem.actorOf(subscriberProps)
val (actorRef, publisher) = Source.actorRef[Status](1000, OverflowStrategy.fail).toMat(Sink.asPublisher(false))(Keep.both).run()
val factory = new TwitterStreamFactory()
val twitterStream = factory.getInstance()
twitterStream.addListener(new StatusForwarder(actorRef))
twitterStream.filter(new FilterQuery("espn"))
Source.fromPublisher(publisher).map(s => ProducerMessage(Tweet(s.getUser.getName, s.getText)))
.runWith(Sink.fromSubscriber(ActorSubscriber[ProducerMessage[Array[Byte], Tweet]](subscriber)))
Await.ready(actorSystem.whenTerminated, Duration.Inf)
// read a twitter feed writing to kafka topic
}
class StatusForwarder(publisher: ActorRef) extends StatusListener {
def onStatus(status: Status): Unit = publisher ! status
//\\\\ nop all the others for now //\\\\
def onStallWarning(warning: StallWarning): Unit = {}
def onDeletionNotice(statusDeletionNotice: StatusDeletionNotice): Unit = {}
def onScrubGeo(userId: Long, upToStatusId: Long): Unit = {}
def onTrackLimitationNotice(numberOfLimitedStatuses: Int): Unit = {}
def onException(ex: Exception): Unit = {}
}
private[kafka] object PrivateFeedToKafka {
case class Tweet(user: String, text: String)
object TweetProtocol extends DefaultJsonProtocol {
implicit val currencyRteFormat = jsonFormat2(Tweet)
}
object TweetSerializer extends Serializer[Tweet] {
import TweetProtocol._
override def serialize(s: String, t: Tweet): Array[Byte] =
t.toJson.compactPrint.getBytes("UTF-8")
override def close(): Unit = {}
override def configure(map: util.Map[String, _], b: Boolean): Unit = {}
}
object TweetDeserializer extends Deserializer[Tweet] {
import TweetProtocol._
override def deserialize(s: String, bytes: Array[Byte]): Tweet =
new String(bytes, "UTF-8").parseJson.convertTo[Tweet]
override def close(): Unit = {}
override def configure(map: util.Map[String, _], b: Boolean): Unit = {}
}
}
|
jw3/example-docker-microservice
|
twitter-kafka/src/main/scala/twitter/kafka/FeedToKafka.scala
|
Scala
|
apache-2.0
| 3,251
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.testutil
import org.scalatest.matchers.{MatchResult, BeMatcher}
/**
* Matcher to test if 2 values are approximately equal.
*/
case class approximatelyEqualToOption(r: Option[Double]) extends BeMatcher[Option[Double]] {
val epsilon = 0.001
def apply(l: Option[Double]) = (r,l) match {
case (Some(x), Some(y)) => approximatelyEqualTo(x)(y)
case _ => {
MatchResult(
l.isDefined == r.isDefined,
l + " is not approximately equal to " + r,
l + " is approximately equal to " + r
)
}
}
}
|
fusepoolP3/p3-silk
|
silk-core/src/test/scala/de/fuberlin/wiwiss/silk/testutil/approximatelyEqualToOption.scala
|
Scala
|
apache-2.0
| 1,135
|
package com.roundeights.tubeutil.static
import org.specs2.mutable._
import org.specs2.mock._
import scala.concurrent.ExecutionContext.Implicits.global
import com.roundeights.skene._
import java.util.Date
class AssetHandlerTest extends Specification with Mockito {
// A shared renderable instance
val renderable = mock[Renderable]
// Generates a mock Headers object
def mockHeaders ( ifModified: Option[Date] ) = {
val headers = mock[Headers]
headers.ifModifiedSince returns ifModified
headers
}
// Generates a mock request
def mockRequest ( path: String, ifModified: Option[Date] ) = {
val request = mock[Request]
request.url returns URL("http://example.com/" + path)
request.headers returns mockHeaders(ifModified)
request.params returns Map[String, String]()
request
}
// Generates a mock asset
def mockReader ( modified: Date = new Date(123456789) ) = {
val reader = mock[Asset.Reader]
reader.renderable returns renderable
reader.modified returns modified
reader.mimeType returns Some( Response.ContentType.JavaScript )
reader
}
// Generates a mock Asset handler
def mockHandler ( reader: Asset.Reader ) = {
new AssetHandler( path => {
path must_== Asset("path.js")
Some( reader )
})
}
"An AssetHandler" should {
"Serve out the file on a cache miss" in {
val request = mockRequest( "path.js", None )
val response = mock[Response]
val recover = mock[Recover]
mockHandler( mockReader() ).handle( recover, request, response )
there was no(response)
.header( ===(Response.Header.CacheControl), any[String] )
there was one(response).content(renderable)
there was one(response).done
there was no(recover).orRethrow( any[Throwable] )
}
"Strip a version number from a resource" in {
val request = mockRequest( "path.ABC123.js", None )
val response = mock[Response]
val recover = mock[Recover]
mockHandler( mockReader() ).handle( recover, request, response )
there was one(response).header(
Response.Header.CacheControl,
"max-age=31560000, must-revalidate, public"
)
there was one(response).content(renderable)
there was one(response).done
there was no(recover).orRethrow( any[Throwable] )
}
"Pull from the 'asset' parameter when defined" in {
val request = mock[Request]
request.headers returns mockHeaders(None)
request.params returns Map[String, String]( "asset" -> "path.js" )
val response = mock[Response]
val recover = mock[Recover]
mockHandler( mockReader() ).handle( recover, request, response )
there was one(response).content(renderable)
there was one(response).done
there was no(recover).orRethrow( any[Throwable] )
}
"Serve a file when the date modified is newer" in {
val request = mockRequest( "path.js", Some(new Date(1000L)) )
val response = mock[Response]
val recover = mock[Recover]
mockHandler( mockReader( new Date(2000L) ) )
.handle( recover, request, response )
there was one(response).content(renderable)
there was one(response).done
there was no(recover).orRethrow( any[Throwable] )
}
"Send back a 304 when the file age is equal" in {
val request = mockRequest( "path.js", Some(new Date(1000L)) )
val response = mock[Response]
val recover = mock[Recover]
mockHandler( mockReader( new Date(1000L) ) )
.handle( recover, request, response )
there was no(response).content(renderable)
there was one(response).code( Response.Code.NotModified )
there was one(response).done
there was no(recover).orRethrow( any[Throwable] )
}
"Send back a 304 when the file is older" in {
val request = mockRequest( "path.js", Some(new Date(2000L)) )
val response = mock[Response]
val recover = mock[Recover]
mockHandler( mockReader( new Date(1000L) ) )
.handle( recover, request, response )
there was no(response).content(renderable)
there was one(response).code( Response.Code.NotModified )
there was one(response).done
there was no(recover).orRethrow( any[Throwable] )
}
"Throw when the asset isn't found" in {
val request = mockRequest( "path.js", None )
val response = mock[Response]
val recover = mock[Recover]
new AssetHandler( path => None )
.handle( recover, request, response )
there was no(recover).orRethrow( any[Throwable] )
there was one(response).notFound
there was one(response).done
}
"Prevent traversal attacks" in {
val request = mockRequest( ".././path.js", None )
val response = mock[Response]
val recover = mock[Recover]
mockHandler( mockReader() ).handle( recover, request, response )
there was one(response).content(renderable)
there was one(response).done
there was no(recover).orRethrow( any[Throwable] )
}
"Should allow a custom cache TTL" in {
val request = mockRequest( "path.ABC123.js", None )
val response = mock[Response]
val recover = mock[Recover]
new AssetHandler( AssetFinder(_ => Some(mockReader())), 123456 )
.handle( recover, request, response )
there was one(response).header(
Response.Header.CacheControl,
"max-age=123456, must-revalidate, public"
)
there was no(recover).orRethrow( any[Throwable] )
}
"Allow cache headers to be forced" in {
val request = mockRequest( "path.js", None )
val response = mock[Response]
val recover = mock[Recover]
new AssetHandler(
AssetFinder(_ => Some(mockReader())),
forceCache = true
).handle( recover, request, response )
there was one(response).header(
Response.Header.CacheControl,
"max-age=31560000, must-revalidate, public"
)
there was no(recover).orRethrow( any[Throwable] )
}
}
}
|
Nycto/TubeUtil
|
src/test/scala/static/HandlerTest.scala
|
Scala
|
mit
| 6,828
|
package ammonite.sshd
import acyclic.file
import ammonite.main.Defaults
import ammonite.ops.Path
/**
* Ssh server parameters
* @param port a port to be used by ssh server. Set it as `0` to let server choose some random port.
* @param username username to authenticate on ssh server
* @param password password to authenticate on ssh server
* @param ammoniteHome path that ammonite repl sessions will be using as their home directory
* @param hostKeyFile path to the place where to store server's identity key
*/
case class SshServerConfig(address: String,
port: Int,
username:String,
password:String,
ammoniteHome: Path = Defaults.ammoniteHome,
hostKeyFile: Option[Path] = None
) {
require(username.nonEmpty, "username can't be an empty string")
override def toString =
s"(port = $port, username = '$username'," +
s"home = '$ammoniteHome', hostKeyFile = $hostKeyFile)"
}
|
coderabhishek/Ammonite
|
sshd/src/main/scala/ammonite/sshd/SshServerConfig.scala
|
Scala
|
mit
| 1,027
|
package com.normation.rudder.repository.jdbc
import com.normation.eventlog.ModificationId
import com.normation.rudder.db.DB
import com.normation.rudder.repository.GitCommitId
import com.normation.rudder.repository.GitModificationRepository
import net.liftweb.common._
import com.normation.rudder.db.Doobie
import scalaz.{Failure => _, _}, Scalaz._
import doobie.imports._
import scalaz.concurrent.Task
class GitModificationRepositoryImpl(
db : Doobie
) extends GitModificationRepository {
import db._
def addCommit(commit: GitCommitId, modId: ModificationId): Box[DB.GitCommitJoin] = {
val sql = sql"""
insert into gitcommit (gitcommit, modificationid)
values (${commit.value}, ${modId.value})
""".update
sql.run.attempt.transact(xa).run match {
case \\/-(x) => Full(DB.GitCommitJoin(commit, modId))
case -\\/(ex) => Failure(s"Error when trying to add a Git Commit in DB: ${ex.getMessage}", Full(ex), Empty)
}
}
def getCommits(modificationId: ModificationId): Box[Option[GitCommitId]] = {
val sql = sql"""
select gitcommit from gitcommit where modificationid=${modificationId.value}
""".query[String].option
sql.attempt.transact(xa).run match {
case \\/-(x) => Full(x.map(id => GitCommitId(id)))
case -\\/(ex) => Failure(s"Error when trying to get Git Commit for modification ID '${modificationId.value}': ${ex.getMessage}", Full(ex), Empty)
}
}
}
|
armeniaca/rudder
|
rudder-core/src/main/scala/com/normation/rudder/repository/jdbc/GitModificationRepository.scala
|
Scala
|
gpl-3.0
| 1,445
|
package org.scalajs.openui5.sap.m
import org.scalajs.openui5.sap.ui.core.{CSSSize, Wrapping}
import org.scalajs.openui5.util.{Settings, SettingsMap, noSettings}
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSName, ScalaJSDefined}
@ScalaJSDefined
trait TextAreaSettings extends InputBaseSettings
object TextAreaSettings extends TextAreaSettingsBuilder(noSettings)
class TextAreaSettingsBuilder(val dict: SettingsMap)
extends Settings[TextAreaSettings, TextAreaSettingsBuilder](new TextAreaSettingsBuilder(_))
with TextAreaSetters[TextAreaSettings, TextAreaSettingsBuilder]
trait TextAreaSetters[T <: js.Object, B <: Settings[T, _]] extends InputBaseSetters[T, B] {
def rows(v: Int) = setting("rows", v)
def cols(v: Int) = setting("cols", v)
def height(v: CSSSize) = setting("height", v)
def maxLength(v: Int) = setting("maxLength", v)
def wrapping(v: Wrapping) = setting("wrapping", v)
def valueLiveUpdate(v: Boolean) = setting("valueLiveMapping", v)
def liveChange(v: js.Function) = setting("liveChange", v)
}
@JSName("sap.m.TextArea")
@js.native
class TextArea(id: js.UndefOr[String] = js.native,
settings: js.UndefOr[TextAreaSettings] = js.native) extends InputBase {
def this(id: String) = this(id, js.undefined)
def this(settings: TextAreaSettings) = this(js.undefined, settings)
}
|
lastsys/scalajs-openui5
|
src/main/scala/org/scalajs/openui5/sap/m/TextArea.scala
|
Scala
|
mit
| 1,347
|
// This is free and unencumbered software released into the public domain.
//
// Anyone is free to copy, modify, publish, use, compile, sell, or
// distribute this software, either in source code form or as a compiled
// binary, for any purpose, commercial or non-commercial, and by any
// means.
//
// In jurisdictions that recognize copyright laws, the author or authors
// of this software dedicate any and all copyright interest in the
// software to the public domain. We make this dedication for the benefit
// of the public at large and to the detriment of our heirs and
// successors. We intend this dedication to be an overt act of
// relinquishment in perpetuity of all present and future rights to this
// software under copyright law.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
// OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
//
// For more information, please refer to <http://unlicense.org/>
package com.pasviegas.shoushiling.cli
import scala.io.StdIn
object Main extends App {
GameLoop.start(args, _.state.message.foreach(println), () => Some(StdIn.readLine()))
}
|
pasviegas/shoushiling
|
cli/src/main/scala/com/pasviegas/shoushiling/cli/Main.scala
|
Scala
|
unlicense
| 1,458
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package repositories.application
import config.{ MicroserviceAppConfig, PsiTestIds }
import connectors.launchpadgateway.exchangeobjects.in.reviewed.{ ReviewSectionQuestionRequest, ReviewedCallbackRequest }
import model.ApplicationRoute.ApplicationRoute
import model.ApplicationStatus.{ apply => _ }
import model.CivilServantAndInternshipType.CivilServantAndInternshipType
import model.OnlineTestCommands.PsiTestResult
import model.Phase.Phase
import model._
import model.command._
import model.persisted._
import model.persisted.sift.SiftTestGroup
import model.report._
import org.slf4j.{Logger, LoggerFactory}
import reactivemongo.bson.{ BSONDocument, _ }
import repositories.{ BaseBSONReader, CommonBSONDocuments }
trait ReportingRepoBSONReader extends CommonBSONDocuments with BaseBSONReader {
implicit val toCandidateProgressReportItem: BSONDocumentReader[CandidateProgressReportItem] = bsonReader {
doc: BSONDocument => {
val schemesDoc = doc.getAs[BSONDocument]("scheme-preferences")
val schemes = schemesDoc.flatMap(_.getAs[List[SchemeId]]("schemes"))
val adDoc = doc.getAs[BSONDocument]("assistance-details")
val disability = adDoc.flatMap(_.getAs[String]("hasDisability"))
val onlineAdjustments = adDoc.flatMap(_.getAs[Boolean]("needsSupportForOnlineAssessment")).map(booleanTranslator)
val assessmentCentreAdjustments = adDoc.flatMap(_.getAs[Boolean]("needsSupportAtVenue")).map(booleanTranslator)
val phoneAdjustments = adDoc.flatMap(_.getAs[Boolean]("needsSupportForPhoneInterview")).map(booleanTranslator)
val gis = adDoc.flatMap(_.getAs[Boolean]("guaranteedInterview")).map(booleanTranslator)
val csedDoc = doc.getAs[BSONDocument]("civil-service-experience-details")
val civilServantAndInternshipTypes = (internshipType: CivilServantAndInternshipType) =>
csedDoc.map(_.getAs[List[CivilServantAndInternshipType]]("civilServantAndInternshipTypes")
.getOrElse(List.empty[CivilServantAndInternshipType]).contains(internshipType))
val csedCivilServant = civilServantAndInternshipTypes(CivilServantAndInternshipType.CivilServant).map(booleanTranslator)
val csedEdipCompleted = civilServantAndInternshipTypes(CivilServantAndInternshipType.EDIP).map(booleanTranslator)
val csedSdip = civilServantAndInternshipTypes(CivilServantAndInternshipType.SDIP).map(booleanTranslator)
val csedOtherInternshipCompleted = civilServantAndInternshipTypes(CivilServantAndInternshipType.OtherInternship).map(booleanTranslator)
val fastPassCertificate = csedDoc.map(_.getAs[String]("certificateNumber").getOrElse("No"))
val pdDoc = doc.getAs[BSONDocument]("personal-details")
val edipCompleted = pdDoc.flatMap(_.getAs[Boolean]("edipCompleted"))
val otherInternshipCompleted = pdDoc.flatMap(_.getAs[Boolean]("otherInternshipCompleted")).map(booleanTranslator)
val applicationId = doc.getAs[String]("applicationId").getOrElse("")
val userId = doc.getAs[String]("userId").getOrElse("")
val applicationRoute = doc.getAs[ApplicationRoute]("applicationRoute").getOrElse(ApplicationRoute.Faststream)
val progress: ProgressResponse = toProgressResponse(applicationId).read(doc)
val edipReportColumn = applicationRoute match {
case ApplicationRoute.Faststream => csedEdipCompleted
case ApplicationRoute.SdipFaststream => edipCompleted.map(booleanTranslator)
case ApplicationRoute.Edip => None
case ApplicationRoute.Sdip => edipCompleted.map(booleanTranslator)
case _ => None
}
val otherInternshipColumn = applicationRoute match {
case ApplicationRoute.Faststream => csedOtherInternshipCompleted
case _ => otherInternshipCompleted
}
val fsacIndicatorDoc = doc.getAs[BSONDocument]("fsac-indicator")
val assessmentCentre = fsacIndicatorDoc.flatMap(_.getAs[String]("assessmentCentre"))
CandidateProgressReportItem(userId, applicationId, Some(ProgressStatusesReportLabels.progressStatusNameInReports(progress)),
schemes.getOrElse(Nil), disability, onlineAdjustments, assessmentCentreAdjustments, phoneAdjustments, gis, csedCivilServant,
edipReportColumn, csedSdip, otherInternshipColumn, fastPassCertificate, assessmentCentre, applicationRoute)
}
}
implicit val toApplicationForInternshipReport: BSONDocumentReader[ApplicationForInternshipReport] = bsonReader {
doc: BSONDocument => {
val applicationId = doc.getAs[String]("applicationId").getOrElse("")
val route = doc.getAs[ApplicationRoute.ApplicationRoute]("applicationRoute")
.getOrElse(throw new Exception(s"Application route not set for $applicationId"))
val userId = doc.getAs[String]("userId").getOrElse("")
val progressResponse = toProgressResponse(applicationId).read(doc)
val psDoc = doc.getAs[BSONDocument]("personal-details")
val firstName = psDoc.flatMap(_.getAs[String]("firstName"))
val lastName = psDoc.flatMap(_.getAs[String]("lastName"))
val preferredName = psDoc.flatMap(_.getAs[String]("preferredName"))
val adDoc = doc.getAs[BSONDocument]("assistance-details")
val guaranteedInterviewScheme = adDoc.flatMap(_.getAs[Boolean]("guaranteedInterview"))
val testResults: TestResultsForOnlineTestPassMarkReportItem =
toTestResultsForOnlineTestPassMarkReportItem(doc, applicationId)
// TODO: Fix this when updating this report. We now hve list of tests
val behaviouralTScore = None // testResults.behavioural.flatMap(_.tScore)
val situationalTScore = None //testResults.situational.flatMap(_.tScore)
// FDH to only return the statuses relevant to SDIP for an SdipFaststream candidate.
val modifiedProgressResponse = progressResponse.copy(phase2ProgressResponse = Phase2ProgressResponse(),
phase3ProgressResponse = Phase3ProgressResponse(),
// if they've failed SDIP then we don't care if they've been exported for Faststream
exported = if (progressResponse.phase1ProgressResponse.sdipFSFailed) false else progressResponse.exported,
updateExported = if (progressResponse.phase1ProgressResponse.sdipFSFailed) false else progressResponse.updateExported
)
ApplicationForInternshipReport(
applicationRoute = route,
userId = userId,
progressStatus = Some(ProgressStatusesReportLabels.progressStatusNameInReports(modifiedProgressResponse)),
firstName = firstName,
lastName = lastName,
preferredName = preferredName,
guaranteedInterviewScheme = guaranteedInterviewScheme,
behaviouralTScore = behaviouralTScore,
situationalTScore = situationalTScore
)
}
}
implicit val toApplicationForAnalyticalSchemesReport: BSONDocumentReader[ApplicationForAnalyticalSchemesReport] = bsonReader {
doc: BSONDocument => {
val applicationId = doc.getAs[String]("applicationId").getOrElse("")
val userId = doc.getAs[String]("userId").getOrElse("")
val psDoc = doc.getAs[BSONDocument]("personal-details")
val firstName = psDoc.flatMap(_.getAs[String]("firstName"))
val lastName = psDoc.flatMap(_.getAs[String]("lastName"))
val spDoc = doc.getAs[BSONDocument]("scheme-preferences")
val schemes = spDoc.flatMap(_.getAs[List[SchemeId]]("schemes"))
val firstSchemePreference = schemes.map(_.head.toString)
val adDoc = doc.getAs[BSONDocument]("assistance-details")
val guaranteedInterviewScheme = adDoc.flatMap(_.getAs[Boolean]("guaranteedInterview"))
val testResults: TestResultsForOnlineTestPassMarkReportItem =
toTestResultsForOnlineTestPassMarkReportItem(doc, applicationId)
//TODO: Fix this when fixing this report
val behaviouralTScore = None //testResults.behavioural.flatMap(_.tScore)
val situationalTScore = None //testResults.situational.flatMap(_.tScore)
val etrayTScore = None // testResults.etray.flatMap(_.tScore)
val overallVideoScore = testResults.videoInterview.map(_.overallTotal)
ApplicationForAnalyticalSchemesReport(
userId = userId,
firstName = firstName,
lastName = lastName,
firstSchemePreference = firstSchemePreference,
guaranteedInterviewScheme = guaranteedInterviewScheme,
behaviouralTScore = behaviouralTScore,
situationalTScore = situationalTScore,
etrayTScore = etrayTScore,
overallVideoScore = overallVideoScore
)
}
}
implicit val toApplicationForDiversityReport: BSONDocumentReader[ApplicationForDiversityReport] = bsonReader {
doc: BSONDocument => {
val applicationRoute = doc.getAs[ApplicationRoute]("applicationRoute").getOrElse(ApplicationRoute.Faststream)
val onlineAdjustmentsKey = if(applicationRoute == ApplicationRoute.Edip) { "needsSupportForPhoneInterview" }
else if (applicationRoute == ApplicationRoute.Sdip) { "needsSupportForPhoneInterview" }
else { "needsSupportForOnlineAssessment" }
val schemesDocOpt = doc.getAs[BSONDocument]("scheme-preferences")
val schemes = schemesDocOpt.flatMap(_.getAs[List[SchemeId]]("schemes"))
val adDocOpt = doc.getAs[BSONDocument]("assistance-details")
val disability = adDocOpt.flatMap(_.getAs[String]("hasDisability"))
val onlineAdjustments = adDocOpt.flatMap(_.getAs[Boolean](onlineAdjustmentsKey)).map(booleanTranslator)
val assessmentCentreAdjustments = adDocOpt.flatMap(_.getAs[Boolean]("needsSupportAtVenue")).map(booleanTranslator)
val gis = adDocOpt.flatMap(_.getAs[Boolean]("guaranteedInterview"))
val civilServiceExperience = toCivilServiceExperienceDetailsReportItem(applicationRoute, doc)
val applicationId = doc.getAs[String]("applicationId").getOrElse("")
val userId = doc.getAs[String]("userId").getOrElse("")
val progress: ProgressResponse = toProgressResponse(applicationId).read(doc)
val curSchemeStatus = doc.getAs[List[SchemeEvaluationResult]]("currentSchemeStatus").getOrElse(
schemes.getOrElse(List.empty).map(s => new SchemeEvaluationResult(s, EvaluationResults.Green.toString)))
ApplicationForDiversityReport(applicationId, userId, applicationRoute,
Some(ProgressStatusesReportLabels.progressStatusNameInReports(progress)),
schemes.getOrElse(List.empty), disability, gis, onlineAdjustments,
assessmentCentreAdjustments, civilServiceExperience, curSchemeStatus)
}
}
implicit val toApplicationForOnlineTestPassMarkReport: BSONDocumentReader[ApplicationForOnlineTestPassMarkReport] = bsonReader {
doc: BSONDocument => {
val userId = doc.getAs[String]("userId").getOrElse("")
val applicationId = doc.getAs[String]("applicationId").getOrElse("")
val applicationRoute = doc.getAs[ApplicationRoute]("applicationRoute").getOrElse(ApplicationRoute.Faststream)
val schemesDoc = doc.getAs[BSONDocument]("scheme-preferences")
val schemes = schemesDoc.flatMap(_.getAs[List[SchemeId]]("schemes"))
val adDoc = doc.getAs[BSONDocument]("assistance-details")
val gis = adDoc.flatMap(_.getAs[Boolean]("guaranteedInterview"))
val disability = adDoc.flatMap(_.getAs[String]("hasDisability"))
val onlineAdjustments = adDoc.flatMap(_.getAs[Boolean]("needsSupportForOnlineAssessment")).map(booleanTranslator)
val assessmentCentreAdjustments = adDoc.flatMap(_.getAs[Boolean]("needsSupportAtVenue")).map(booleanTranslator)
val curSchemeStatus = doc.getAs[List[SchemeEvaluationResult]]("currentSchemeStatus").getOrElse(
schemes.getOrElse(List.empty).map(s => new SchemeEvaluationResult(s, EvaluationResults.Green.toString)))
val progress: ProgressResponse = toProgressResponse(applicationId).read(doc)
ApplicationForOnlineTestPassMarkReport(
userId,
applicationId,
ProgressStatusesReportLabels.progressStatusNameInReports(progress),
applicationRoute,
schemes.getOrElse(Nil),
disability,
gis,
onlineAdjustments,
assessmentCentreAdjustments,
toTestResultsForOnlineTestPassMarkReportItem(doc, applicationId),
curSchemeStatus)
}
}
implicit val toApplicationForNumericTestExtractReport: BSONDocumentReader[ApplicationForNumericTestExtractReport] = bsonReader {
doc: BSONDocument => {
val userId = doc.getAs[String]("userId").getOrElse("")
val applicationId = doc.getAs[String]("applicationId").getOrElse("")
val applicationRoute = doc.getAs[ApplicationRoute]("applicationRoute").getOrElse(ApplicationRoute.Faststream)
val schemesDoc = doc.getAs[BSONDocument]("scheme-preferences")
val schemes = schemesDoc.flatMap(_.getAs[List[SchemeId]]("schemes"))
val personalDetails = doc.getAs[PersonalDetails]("personal-details").getOrElse(
throw new Exception(s"Error parsing personal details for $userId")
)
val adDoc = doc.getAs[BSONDocument]("assistance-details")
val gis = adDoc.flatMap(_.getAs[Boolean]("guaranteedInterview"))
val disability = adDoc.flatMap(_.getAs[String]("hasDisability"))
val onlineAdjustments = adDoc.flatMap(_.getAs[Boolean]("needsSupportForOnlineAssessment")).map(booleanTranslator)
val assessmentCentreAdjustments = adDoc.flatMap(_.getAs[Boolean]("needsSupportAtVenue")).map(booleanTranslator)
val currentSchemeStatus = doc.getAs[List[SchemeEvaluationResult]]("currentSchemeStatus").getOrElse(
throw new Exception(s"Error parsing current scheme status for $userId")
)
val progress: ProgressResponse = toProgressResponse(applicationId).read(doc)
ApplicationForNumericTestExtractReport(
userId,
applicationId,
applicationRoute,
personalDetails.firstName,
personalDetails.lastName,
personalDetails.preferredName,
ProgressStatusesReportLabels.progressStatusNameInReports(progress),
schemes.getOrElse(Nil),
disability,
gis,
onlineAdjustments,
assessmentCentreAdjustments,
toTestResultsForOnlineTestPassMarkReportItem(doc, applicationId),
currentSchemeStatus
)
}
}
implicit val toApplicationForOnlineActiveTestCountReport: BSONDocumentReader[ApplicationForOnlineActiveTestCountReport]
= bsonReader {
doc: BSONDocument => {
val userId = doc.getAs[String]("userId").getOrElse("")
val applicationId = doc.getAs[String]("applicationId").getOrElse("")
val testGroupsDoc = doc.getAs[BSONDocument]("testGroups")
val tests = (phase: Phase, bsonReader: BSONDocument => PsiTestProfile) =>
testGroupsDoc.flatMap(_.getAs[BSONDocument](phase)).map { phaseDoc =>
val profile = bsonReader(phaseDoc)
profile.activeTests.length
}
val gis = booleanTranslator(doc.getAs[BSONDocument]("assistance-details").exists(_.getAs[Boolean]("guaranteedInterview").contains(true)))
val p1TestsCount = tests(Phase.PHASE1, Phase1TestProfile.bsonHandler.read _).getOrElse(0)
val p2TestsCount = tests(Phase.PHASE2, Phase2TestGroup.bsonHandler.read _).getOrElse(0)
ApplicationForOnlineActiveTestCountReport(userId,applicationId, gis, p1TestsCount, p2TestsCount)
}
}
private[application] def toCivilServiceExperienceDetailsReportItem(applicationRoute: ApplicationRoute,
doc: BSONDocument
): Option[CivilServiceExperienceDetailsForDiversityReport] = {
val civilServiceExperienceDetails = repositories.getCivilServiceExperienceDetails(applicationRoute, doc)
Some(CivilServiceExperienceDetailsForDiversityReport(civilServiceExperienceDetails))
}
private[application] def toTestResultsForOnlineTestPassMarkReportItem(
appDoc: BSONDocument, applicationId: String): TestResultsForOnlineTestPassMarkReportItem = {
val testGroupsDoc = appDoc.getAs[BSONDocument]("testGroups")
val p1Tests = toPhase1TestResults(testGroupsDoc)
val p2Tests = toPhase2TestResults(testGroupsDoc)
val videoInterviewResults = toPhase3TestResults(testGroupsDoc)
val siftTestResults = toSiftTestResults(applicationId, testGroupsDoc)
TestResultsForOnlineTestPassMarkReportItem(
p1Tests,
p2Tests,
videoInterviewResults,
siftTestResults,
None, None, None, None)
}
// Just declaring that implementations needs to provide a MicroserviceAppConfig impl
def appConfig: MicroserviceAppConfig
private[application] def toPhase1TestResults(testGroupsDoc: Option[BSONDocument]): Seq[Option[PsiTestResult]] = {
testGroupsDoc.flatMap(_.getAs[BSONDocument](Phase.PHASE1)).map { phase1Doc =>
val phase1TestProfile = Phase1TestProfile.bsonHandler.read(phase1Doc)
// Sort the tests in config based on their names eg. test1, test2, test3, test4
val p1TestNamesSorted = appConfig.onlineTestsGatewayConfig.phase1Tests.tests.keys.toList.sorted
val p1TestIds = p1TestNamesSorted.map(testName => appConfig.onlineTestsGatewayConfig.phase1Tests.tests(testName))
toPhaseXTestResults(phase1TestProfile.activeTests, p1TestIds)
}.getOrElse(Seq.fill(4)(None))
}
private[application] def toPhase2TestResults(testGroupsDoc: Option[BSONDocument]): Seq[Option[PsiTestResult]] = {
testGroupsDoc.flatMap(_.getAs[BSONDocument](Phase.PHASE2)).map { phase2Doc =>
val phase2TestProfile = Phase2TestGroup.bsonHandler.read(phase2Doc)
// Sort the tests in config based on their names eg. test1, test2
val p2TestNamesSorted = appConfig.onlineTestsGatewayConfig.phase2Tests.tests.keys.toList.sorted
val p2TestIds = p2TestNamesSorted.map(testName => appConfig.onlineTestsGatewayConfig.phase2Tests.tests(testName))
toPhaseXTestResults(phase2TestProfile.activeTests, p2TestIds)
}.getOrElse(Seq.fill(2)(None))
}
private[application] def toPhaseXTestResults(activeTests: Seq[PsiTest],
allTestIds: Seq[PsiTestIds]): Seq[Option[PsiTestResult]] = {
def getTestResult(inventoryId: String): Option[PsiTestResult] = {
activeTests.find(_.inventoryId == inventoryId).flatMap { psiTest =>
// TODO: What is status?
psiTest.testResult.map { tr => PsiTestResult(status = "", tScore = tr.tScore, raw = tr.rawScore) }
}
}
allTestIds.map{ testIds => getTestResult(testIds.inventoryId) }
}
private[application] def toPhase3TestResults(testGroupsDoc: Option[BSONDocument]): Option[VideoInterviewTestResult] = {
val reviewedDocOpt = testGroupsDoc.flatMap(_.getAs[BSONDocument](Phase.PHASE3))
.flatMap(_.getAs[BSONArray]("tests"))
.flatMap(_.getAs[BSONDocument](0))
.flatMap(_.getAs[BSONDocument]("callbacks"))
.flatMap(_.getAs[List[BSONDocument]]("reviewed"))
val latestReviewedOpt = reviewedDocOpt
.map(_.map(ReviewedCallbackRequest.bsonHandler.read))
.flatMap(ReviewedCallbackRequest.getLatestReviewed)
latestReviewedOpt.map { latestReviewed =>
VideoInterviewTestResult(
toVideoInterviewQuestionTestResult(latestReviewed.latestReviewer.question1),
toVideoInterviewQuestionTestResult(latestReviewed.latestReviewer.question2),
toVideoInterviewQuestionTestResult(latestReviewed.latestReviewer.question3),
toVideoInterviewQuestionTestResult(latestReviewed.latestReviewer.question4),
toVideoInterviewQuestionTestResult(latestReviewed.latestReviewer.question5),
toVideoInterviewQuestionTestResult(latestReviewed.latestReviewer.question6),
toVideoInterviewQuestionTestResult(latestReviewed.latestReviewer.question7),
toVideoInterviewQuestionTestResult(latestReviewed.latestReviewer.question8),
latestReviewed.calculateTotalScore()
)
}
}
private[this] def toVideoInterviewQuestionTestResult(question: ReviewSectionQuestionRequest) = {
VideoInterviewQuestionTestResult(
question.reviewCriteria1.score,
question.reviewCriteria2.score)
}
private val logger: Logger = LoggerFactory.getLogger(this.getClass)
private[application] def toSiftTestResults(applicationId: String,
testGroupsDoc: Option[BSONDocument]): Option[PsiTestResult] = {
val siftDocOpt = testGroupsDoc.flatMap(_.getAs[BSONDocument]("SIFT_PHASE"))
siftDocOpt.flatMap { siftDoc =>
val siftTestProfile = SiftTestGroup.bsonHandler.read(siftDoc)
siftTestProfile.activeTests.size match {
case 1 => siftTestProfile.activeTests.head.testResult.map { tr => PsiTestResult("", tr.tScore, tr.rawScore) }
case 0 => None
case s if s > 1 =>
logger.error(s"There are $s active sift tests which is invalid for application id [$applicationId]")
None
}
}
}
}
|
hmrc/fset-faststream
|
app/repositories/application/ReportingRepoBSONReader.scala
|
Scala
|
apache-2.0
| 21,495
|
import java.io.{File}
import org.specs2.matcher.ValueCheck.valueIsTypedValueCheck
import scalaxb.compiler.Config
object LensPurchaseOrderTest extends TestBase {
val inFile = new File("integration/src/test/resources/ipo.xsd")
val usageFile = new File(tmp, "PurchaseOrderUsage.scala")
copyFileFromResource("PurchaseOrderUsage.scala", usageFile)
// override val module = new scalaxb.compiler.xsd.Driver with Verbose
lazy val generated = module.process(inFile,
Config(packageNames = Map(None -> Some("ipo")),
outdir = tmp,
generateLens = true
))
"ipo.scala file must compile so Address can be used" in {
(List("import ipo._",
"Address.name.set(Address(\\"\\", \\"\\", \\"\\"), \\"hello\\").toString"),
generated) must evaluateTo("Address(hello,,)", outdir = "./tmp", usecurrentcp = true)
}
"ipo.scala file must compile together with PurchaseOrderUsage.scala" in {
(List("import ipo._",
"PurchaseOrderUsage.allTests"),
usageFile :: generated) must evaluateTo(true, outdir = "./tmp", usecurrentcp = true)
}
}
|
Banno/scalaxb
|
integration/src/test/scala/LensPurchaseOrderTest.scala
|
Scala
|
mit
| 1,083
|
package a65.测试3
object Runner4 {
def number1A(n: Int): Number1 = n match {
case n1 if n1 > 0 => Number1SA(number1A(n1 - 1))
case 0 => Number1TA
}
def number1B(n: Int): Number1 = n match {
case n1 if n1 > 0 => Number1SB(number1B(n1 - 1))
case 0 => Number1TB
}
def number2A(n: Int): (Number2T, Number2SA) = {
def gen(n1: Int, zero: => Number2SA): Number2T = n1 match {
case n2 if n2 > 1 => Number2T(() => gen(n2 - 1, zero))
case 1 => Number2T(() => zero)
}
lazy val number2t: Number2T = gen(n, number2sa)
lazy val number2sa: Number2SA = Number2SA(() => number2t)
(number2t, number2sa)
}
def number2B(n: Int): (Number2SB, Number2T) = {
def gen(n1: Int, zero: => Number2T): Number2SB = n1 match {
case n2 if n2 > 1 => Number2SB(() => gen(n2 - 1, zero))
case 1 => Number2SB(() => zero)
}
lazy val number2sb: Number2SB = gen(n, number2t)
lazy val number2t: Number2T = Number2T(() => number2sb)
(number2sb, number2t)
}
def count(number1: Number1): Int = number1 match {
case Number1SA(tail) => count(tail) + 1
case Number1SB(tail) => count(tail) + 1
case _ => 0
}
def main(arr: Array[String]): Unit = {
for {
i <- 1 to 2000
n <- 2 to 80
} {
val result1 = MathCount.log(n, i)
val number1 = number1A(i)
val (number2Positive, number2Zero) = number2A(n)
val result2 = number2Positive.method4(number1)
assert(result1 + 1 == count(result2))
}
for {
i <- 1 to 8
n <- 1 to 5
} {
val result1 = MathCount.pow(n, i)
val number1 = number1B(i)
val (number2Positive, number2Zero) = number2B(n)
val result2 = number1.method3(number2Positive)
assert(result1 == count(result2))
}
}
}
|
djx314/ubw
|
a66-指数对数-原型/src/main/scala/a65/测试3/Runner4.scala
|
Scala
|
bsd-3-clause
| 1,993
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
package exceptions
/**
* Exception that indicates a table-driven property check failed.
*
* <p>
* For an introduction to using tables, see the documentation for trait
* <a href="../prop/TableDrivenPropertyChecks.html">TableDrivenPropertyChecks</a>.
* </p>
*
* @param messageFun a function that returns a detail message, not optional) for this <code>TableDrivenPropertyCheckFailedException</code>.
* @param cause an optional cause, the <code>Throwable</code> that caused this <code>TableDrivenPropertyCheckFailedException</code> to be thrown.
* @param failedCodeStackDepthFun a function that returns the depth in the stack trace of this exception at which the line of test code that failed resides.
* @param payload an optional payload, which ScalaTest will include in a resulting <code>TestFailed</code> event
* @param undecoratedMessage just a short message that has no redundancy with args, labels, etc. The regular "message" has everything in it
* @param args the argument values
* @param namesOfArgs a list of string names for the arguments
* @param row the index of the table row that failed the property check, causing this exception to be thrown
*
* @throws NullPointerException if any parameter is <code>null</code> or <code>Some(null)</code>.
*
* @author Bill Venners
*/
class TableDrivenPropertyCheckFailedException(
messageFun: StackDepthException => String,
cause: Option[Throwable],
failedCodeStackDepthFun: StackDepthException => Int,
payload: Option[Any],
undecoratedMessage: String,
args: List[Any],
namesOfArgs: List[String],
val row: Int
) extends PropertyCheckFailedException(
messageFun, cause, failedCodeStackDepthFun, payload, undecoratedMessage, args, Some(namesOfArgs)
) {
/**
* This constructor has been deprecated and will be removed in a future version of ScalaTest. Please
* use the primary constructor instead.
*/
@deprecated("Please use the primary constructor instead.")
def this(
messageFun: StackDepthException => String,
cause: Option[Throwable],
failedCodeStackDepthFun: StackDepthException => Int,
undecoratedMessage: String,
args: List[Any],
namesOfArgs: List[String],
row: Int
) = this(messageFun, cause, failedCodeStackDepthFun, None, undecoratedMessage, args, namesOfArgs, row)
/**
* Returns an instance of this exception's class, identical to this exception,
* except with the detail message option string replaced with the result of passing
* the current detail message to the passed function, <code>fun</code>.
*
* @param fun A function that, given the current optional detail message, will produce
* the modified optional detail message for the result instance of <code>TestFailedDueToTimeoutException</code>.
*/
override def modifyMessage(fun: Option[String] => Option[String]): TableDrivenPropertyCheckFailedException = {
val mod =
new TableDrivenPropertyCheckFailedException(
sde => fun(message).getOrElse(messageFun(this)),
cause,
failedCodeStackDepthFun,
payload,
undecoratedMessage,
args,
namesOfArgs,
row
)
mod.setStackTrace(getStackTrace)
mod
}
/**
* Returns an instance of this exception's class, identical to this exception,
* except with the payload option replaced with the result of passing
* the current payload option to the passed function, <code>fun</code>.
*
* @param fun A function that, given the current optional payload, will produce
* the modified optional payload for the result instance of <code>TableDrivenPropertyCheckFailedException</code>.
*/
override def modifyPayload(fun: Option[Any] => Option[Any]): TableDrivenPropertyCheckFailedException = {
val currentPayload = payload
val mod =
new TableDrivenPropertyCheckFailedException(
messageFun,
cause,
failedCodeStackDepthFun,
fun(currentPayload),
undecoratedMessage,
args,
namesOfArgs,
row
)
mod.setStackTrace(getStackTrace)
mod
}
}
|
travisbrown/scalatest
|
src/main/scala/org/scalatest/exceptions/TableDrivenPropertyCheckFailedException.scala
|
Scala
|
apache-2.0
| 4,700
|
/*
* Copyright 2001-2009 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import tools.ConcurrentDistributor
/**
* Trait that can be mixed into suites that need methods invoked before and after executing the
* suite. This trait allows code to be executed before and/or after all the tests and nested suites of a
* suite are run. This trait overrides <code>run</code> (the main <code>run</code> method that
* takes seven parameters, an optional test name, reporter, stopper, filter, configMap, optional distributor,
* and tracker) and calls the
* <code>beforeAll</code> method, then calls <code>super.run</code>. After the <code>super.run</code>
* invocation completes, whether it returns normally or completes abruptly with an exception,
* this trait's <code>run</code> method will invoke <code>afterAll</code>.
*
* <p>
* Trait <code>BeforeAndAfterAll</code> defines two overloaded variants each of <code>beforeAll</code>
* and <code>afterAll</code>, one that takes a <code>configMap</code> and another that takes no
* arguments. This traits implemention of the variant that takes the <code>configMap</code>
* simply invokes the variant that takes no parameters, which does nothing. Thus you can override
* whichever variant you want. If you need something from the <code>configMap</code> before
* all tests and nested suites are run, override <code>beforeAll(Map[String, Any])</code>. Otherwise,
* override <code>beforeAll()</code>.
* </p>
*
* <p>
* For example, the following <code>MasterSuite</code> mixes in <code>BeforeAndAfterAll</code> and
* in <code>beforeAll</code>, creates and writes to a temp file, taking the name of the temp file
* from the <code>configMap</code>. This same <code>configMap</code> is then passed to the <code>run</code>
* methods of the nested suites, <code>OneSuite</code>, <code>TwoSuite</code>, <code>RedSuite</code>,
* and <code>BlueSuite</code>, so those suites can access the filename and, therefore, the file's
* contents. After all of the nested suites have executed, <code>afterAll</code> is invoked, which
* again grabs the file name from the <code>configMap</code> and deletes the file:
* </p>
*
* <pre class="stHighlight">
* import org.scalatest.SuperSuite
* import org.scalatest.BeforeAndAfterAll
* import java.io.FileReader
* import java.io.FileWriter
* import java.io.File
*
* class MasterSuite extends Suite with BeforeAndAfterAll {
*
* private val FileNameKeyInGoodies = "tempFileName"
*
* // Set up the temp file needed by the test, taking
* // a file name from the configMap
* override def beforeAll(configMap: Map[String, Any]) {
*
* require(
* configMap.isDefinedAt(FileNameKeyInGoodies),
* "must place a temp file name in the configMap under the key: " + FileNameKeyInGoodies
* )
*
* val fileName = configMap(tempFileName)
*
* val writer = new FileWriter(fileName)
* try {
* writer.write("Hello, suite of tests!")
* }
* finally {
* writer.close()
* }
* }
*
* override def nestedSuites =
* List(new OneSuite, new TwoSuite, new RedSuite, new BlueSuite)
*
* // Delete the temp file
* override def afterAll(configMap: Map[String, Any]) {
* // No need to require that configMap contains the key again because it won't get
* // here if it didn't contain the key in beforeAll
* val fileName = configMap(tempFileName))
* val file = new File(fileName)
* file.delete()
* }
* }
* </pre>
*
* <p>
* Because the <code>BeforeAndAfterAll</code> trait invokes <code>super.run</code> to run the suite, you may need to
* mix this trait in last to get the desired behavior. For example, this won't
* work, because <code>BeforeAndAfterAll</code> is "super" to </code>FunSuite</code>:
* </p>
* <pre class="stHighlight">
* class MySuite extends BeforeAndAfterAll with FunSuite
* </pre>
* <p>
* You'd need to turn it around, so that <code>FunSuite</code> is "super" to <code>BeforeAndAfterAll</code>, like this:
* </p>
* <pre class="stHighlight">
* class MySuite extends FunSuite with BeforeAndAfterAll
* </pre>
*
* <strong>Note: This trait does not currently ensure that the code in <code>afterAll</code> is executed after
* all the tests and nested suites are executed if a <code>Distributor</code> is passed. The
* plan is to do that eventually, but in the meantime, be aware that <code>afterAll</code> is
* guaranteed to be run after all the tests and nested suites only when they are run
* sequentially.</strong>
*
* @author Bill Venners
*/
trait ParallelBeforeAndAfterAll extends AbstractSuite {
this: Suite =>
/**
* Defines a method to be run before any of this suite's tests or nested suites are run.
*
* <p>
* This trait's implementation
* of <code>run</code> invokes the overloaded form of this method that
* takes a <code>configMap</code> before executing
* any tests or nested suites. This trait's implementation of that <code>beforeAll(Map[String, Any])</code>
* method simply invokes this <code>beforeAll()</code>
* method. Thus this method can be used to set up a test fixture
* needed by the entire suite, when you don't need anything from the <code>configMap</code>.
* This trait's implementation of this method does nothing.
* </p>
*/
protected def beforeAll() = ()
/**
* Defines a method (that takes a <code>configMap</code>) to be run before any
* of this suite's tests or nested suites are run.
*
* <p>
* This trait's implementation
* of <code>run</code> invokes this method before executing
* any tests or nested suites (passing in the <code>configMap</code> passed to it), thus this
* method can be used to set up a test fixture
* needed by the entire suite. This trait's implementation of this method invokes the
* overloaded form of <code>beforeAll</code> that takes no <code>configMap</code>.
* </p>
*/
protected def beforeAll(configMap: Map[String, Any]) {
beforeAll()
}
/**
* Defines a method to be run after all of this suite's tests and nested suites have
* been run.
*
* <p>
* This trait's implementation
* of <code>run</code> invokes the overloaded form of this method that
* takes a <code>configMap</code> after executing
* all tests and nested suites. This trait's implementation of that <code>afterAll(Map[String, Any])</code> method simply invokes this
* <code>afterAll()</code> method. Thus this method can be used to tear down a test fixture
* needed by the entire suite, when you don't need anything from the <code>configMap</code>.
* This trait's implementation of this method does nothing.
* </p>
*/
protected def afterAll() = ()
/**
* Defines a method (that takes a <code>configMap</code>) to be run after
* all of this suite's tests and nested suites have been run.
*
* <p>
* This trait's implementation
* of <code>run</code> invokes this method after executing all tests
* and nested suites (passing in the <code>configMap</code> passed to it), thus this
* method can be used to tear down a test fixture
* needed by the entire suite. This trait's implementation of this method invokes the
* overloaded form of <code>afterAll</code> that takes no <code>configMap</code>.
* </p>
*/
protected def afterAll(configMap: Map[String, Any]) {
afterAll()
}
/**
* Execute a suite surrounded by calls to <code>beforeAll</code> and <code>afterAll</code>.
*
* <p>
* This trait's implementation of this method ("this method") invokes <code>beforeAll(Map[String, Any])</code>
* before executing any tests or nested suites and <code>afterAll(Map[String, Any])</code>
* after executing all tests and nested suites. It runs the suite by invoking <code>super.run</code>, passing along
* the seven parameters passed to it.
* </p>
*
* <p>
* If any invocation of <code>beforeAll</code> completes abruptly with an exception, this
* method will complete abruptly with the same exception. If any call to
* <code>super.run</code> completes abruptly with an exception, this method
* will complete abruptly with the same exception, however, before doing so, it will
* invoke <code>afterAll</code>. If <cod>afterAll</code> <em>also</em> completes abruptly with an exception, this
* method will nevertheless complete abruptly with the exception previously thrown by <code>super.run</code>.
* If <code>super.run</code> returns normally, but <code>afterAll</code> completes abruptly with an
* exception, this method will complete abruptly with the same exception.
* </p>
*/
abstract override def run(testName: Option[String], reporter: Reporter, stopper: Stopper, filter: Filter,
configMap: Map[String, Any], distributor: Option[Distributor], tracker: Tracker) {
var thrownException: Option[Throwable] = None
def waitUntilDistributorIsDone = {
// If you are using a concurrent distributor.. wait for all
// concurrent tasks to complete before executing the afterAll.
distributor match {
case Some(distributor) => distributor match {
case distributor: ConcurrentDistributor =>
distributor.waitUntilDone();
case _ =>
}
case _ =>
}
}
beforeAll(configMap)
waitUntilDistributorIsDone
try {
super.run(testName, reporter, stopper, filter, configMap, distributor, tracker)
}
catch {
case e: Exception => thrownException = Some(e)
}
finally {
waitUntilDistributorIsDone
try {
afterAll(configMap) // Make sure that afterAll is called even if run completes abruptly.
thrownException match {
case Some(e) => throw e
case None =>
}
}
catch {
case laterException: Exception =>
thrownException match {
// If both run and afterAll throw an exception, report the test exception
case Some(earlierException) => throw earlierException
case None => throw laterException
}
}
}
}
}
|
chirino/activemq-apollo
|
apollo-util/src/test/scala/org/scalatest/ParallelBeforeAndAfterAll.scala
|
Scala
|
apache-2.0
| 10,604
|
package org.finra.datagenerator.scaffolding.transformer.service
import java.lang.reflect.Field
import org.finra.datagenerator.scaffolding.config.{ConfigDefinition, ConfigName, Configurable, Configuration}
import org.finra.datagenerator.scaffolding.utils.{Mapper, ReflectionUtils, TypeUtils}
import org.finra.datagenerator.scaffolding.config._
import org.finra.datagenerator.scaffolding.exceptions.SpelContextException
import org.finra.datagenerator.scaffolding.random.core.RubberRandom
import org.finra.datagenerator.scaffolding.transformer.service.{OutputTransformationContainer, TransformationContainer, TransformationContext}
import org.finra.datagenerator.scaffolding.transformer.service.TransformationSession.{AbortOnSpelExceptionsName, AddTransformationContainerToContextName, DefaultFieldGenerateName}
import org.finra.datagenerator.scaffolding.transformer.service.transformations.TransformationImpl
import org.finra.datagenerator.scaffolding.utils.{Mapper, TypeUtils}
import scala.reflect.runtime.universe._
/**
* Created by dkopel on 12/14/16.
*/
trait TransformationSession extends TransformationContextProvider
with TransformationsProvider
with FunctionTransformationProvider {
implicit var tContext: TransformationContext = _
val random: RubberRandom
def setContext(tcxt: TransformationContext) = {
tContext = tcxt
updateContext(tContext)
}
def processOverride[S](): S = {
val oo = tContext.getCurrentOutputOverride
if(oo.isPresent) oo.get().action.get().asInstanceOf[S]
else null.asInstanceOf[S]
}
def processValue[S](field: Field)(implicit transformation: TransformationImpl, tt: TypeTag[S]): S = {
// Overrides
if(tContext.hasOverrides) {
processOverride()
}
// isNull
else if(transformation.isNull) {
null.asInstanceOf[S]
}
// isEmptyString
else if(transformation.isEmptyString) {
"".asInstanceOf[S]
}
// FunctionTransformation
else if(hasFunctionTransformation) {
processFunctionTransformation[S](transformation, tContext).asInstanceOf[S]
}
// Expression Value
else if(transformation.getValue != null && transformation.getValue.length > 0) {
parseExpression[S](transformation.getValue)
}
// Nothing
else {
throw new IllegalStateException()
}
}
def getRandom[T](container: TransformationContainer[_]): T = {
implicit val tt: TypeTag[T] = TypeUtils.stringToTypeTag(container.clazz.getName)
implicit val _random = random
_random.generate[T]
//new RubberRandom().nextObject(container.clazz).asInstanceOf[T]
}
def checkLimits[T](field: Field, value: T)(implicit transformation: TransformationImpl) = {
if(value != null && transformation.getLimits.length > 0) {
for(l <- transformation.getLimits[T]) {
if (!l.isValid(value)) {
throw new IllegalArgumentException()
}
}
}
}
def setField[T, S](field: Field, inst: T, value: S): T = {
logger.debug(s"Setting the field ${field.getName} to the value $value")
try {
ReflectionUtils.setField(field, inst, value)
} catch {
case e: IllegalArgumentException => logger.error(e.getMessage)
}
inst
}
def getInitialValue[T](container: TransformationContainer[T]): T = {
if (container.value != null) {
container.value
} else {
container.clazz.newInstance()
}
}
def setField[T](field: Field, inst: T)(implicit conf: Configuration): T = {
conf.conf[DefaultFieldGenerateStrategy](DefaultFieldGenerateName).getValue()(field, inst)(random)
}
def processOutputClass[T, S](container: TransformationContainer[T])(implicit conf: Configuration): TransformationContainer[_] = {
val output: T = getInitialValue(container)
logger.debug(s"Here is the initialized value for the class ${container.clazz}: {}", Mapper.toString(output))
setRoot(output)
tContext.setCurrentClass(container.clazz)
tContext.setCurrentInstance(output)
val fields = getFields(container)
logger.debug(s"Found ${fields.size} for class ${container.clazz.getName}")
fields.foreach(f => {
logger.debug(s"Processing class ${container.clazz.getName} field ${f.getName}")
tContext.field = f
val transformation = getTransformation(f)
implicit val tt = TypeUtils.stringToTypeTag(f.getType.getName)
var value: S = null.asInstanceOf[S]
// Has Override
if(tContext.hasOverrides) {
value = processOverride[S]()
if(transformation.isDefined) {
implicit val actualTransformation = transformation.get
checkLimits(f, value)
}
setField(f, output, value)
}
// No override...has a transformation
else if(transformation.isDefined) {
implicit val actualTransformation = transformation.get
try {
value = processValue(f)
checkLimits(f, value)
setField(f, output, value)
} catch {
case e: SpelContextException => {
logger.error("An error occurred in the Spel context: {}", e.message)
if(conf.conf(AbortOnSpelExceptionsName).getValue()) throw e
}
case e: IllegalStateException => {
logger.warn("A transformation was not executed on the field {}, using random data", f.getName)
setField(f, output)
}
}
} else {
logger.warn(s"No valid transformations were found on the field ${f.getName}")
setField(f, output)
}
})
val nc = new OutputTransformationContainer[T](container.alias, container.clazz, output, container.order, container.join)
if(conf.conf[Boolean](AddTransformationContainerToContextName).getValue()) {
registerVariable(container.alias, output)
tContext.addTransformationContainer(nc)
}
logger.debug(s"Transformation completed for ${tContext.iteration} ${container.alias}/${container.clazz}: {}", Mapper.toString(output))
nc
}
}
trait DefaultFieldGenerateStrategy {
def apply[T](field: Field, inst: T)(implicit rubberRandom: RubberRandom): T
}
object RandomFieldGenerate extends DefaultFieldGenerateStrategy {
override def apply[T](field: Field, inst: T)(implicit rubberRandom: RubberRandom): T = {
ReflectionUtils.setField(field, inst, rubberRandom.generate(field))
inst
}
}
object TransformationSession extends Configurable {
object DefaultFieldGenerateName extends ConfigName("defaultFieldGenerateStrategy")
object AddTransformationContainerToContextName extends ConfigName("addTransformationContainerToContext")
object AbortOnSpelExceptionsName extends ConfigName("abortOnSpelExceptions")
val DefaultFieldStrategy: ConfigDefinition[DefaultFieldGenerateStrategy] = ConfigDefinition[DefaultFieldGenerateStrategy](
DefaultFieldGenerateName,
Some(RandomFieldGenerate)
)
val AddTransformationContainerToContext: ConfigDefinition[Boolean] = ConfigDefinition[Boolean](
AddTransformationContainerToContextName,
Some(true)
)
val AbortOnSpelExceptions: ConfigDefinition[Boolean] = ConfigDefinition[Boolean](
AbortOnSpelExceptionsName,
Some(false)
)
override def configBundle: ConfigBundle = {
ConfigBundle(
getClass,
Seq(
DefaultFieldStrategy,
AddTransformationContainerToContext,
AbortOnSpelExceptions
)
)
}
}
|
FINRAOS/DataGenerator
|
rubber-scaffolding/rubber-transformer/src/main/scala/org/finra/datagenerator/scaffolding/transformer/service/TransformationSession.scala
|
Scala
|
apache-2.0
| 8,144
|
trait SecondaryIndex {
val name: String
}
|
hirokikonishi/awscala
|
aws/dynamo/src/main/scala/SecondaryIndex.scala
|
Scala
|
apache-2.0
| 46
|
dl('load, "examples/dtpdhs/lhc_distclocks.dl")
val okcuttctfm1 =
parseFormula(
"2 * B() * X2 > 2 * B() * X1 + V1^2- V2^2 + (A+B())*(A *" +
"(eps()-T3)^2+2*(eps()-T3)*V1)"
)
val okcuttctfm2 =
parseFormula(
"2 * B() * X2 > 2 * B() * X1 + V1^2- V2^2 + (A+B())*(A *" +
"(s())^2+2*(s())*V1)"
)
val okcuttct = cutT(
DirectedCut,
okcuttctfm1,
okcuttctfm2
)
val okcuttct2fm1 = parseFormula(
"2 * B() * X2 > 2 * B() * X1 + V1^2- V2^2 + (A+B())*(A *" +
"(eps()-T3)^2+2*(eps()-T3)*V1)"
)
val okcuttct2fm2 =
parseFormula(
" (A+B())*(A *(s())^2+2*(s())*V1) <= (A+B())*(A *(eps() - T3)^2+2*(eps() - T3)*V1) "
)
val okcuttct2 = cutT(
StandardCut,
okcuttct2fm1,
okcuttct2fm2
)
val diffinv = parseFormula(
"forall f : C. forall l : C. " +
"(f /= l & e(f) = 1 & e(l) = 1 & id(f) <= id(l)) ==> " +
" ( (v(f) + a(f) * (eps() - t(f)) >= 0 & " +
"2 * B() * x(l) > 2 * B() * x(f) + v(f)^2 - v(l)^2 " +
" + (a(f) + B()) * (a(f) * (eps() - t(f) )^2 + 2 * (eps() - t(f) ) * v(f))) |" +
" (v(f) + a(f) * (eps() - t(f)) < 0 & " +
" 2 * B() * a(f)^2 * x(f) - B() * a(f) * v(f)^2 < 2 * B() * a(f)^2 * x(l) + a(f)^2 * v(l)^2 )) "
)
val instT = instantiatebyT(St("C")) (List(("i", List("f", "l")),
("f", List("f")),
("l", List("l"))))
val cutdiffinv2 = cutT(
DirectedCut,
parseFormula("2 * B() * A^2 * X1 - B() * A * V1^2 < "+
"2 * B() * A^2 * X2 + A^2 * V2^2"),
parseFormula("2 * B() * A * X1 - B() * V1^2 > "+
"2 * B() * A * X2 + A * V2^2")
)
val tyltct = composelistT(
hpalphaT*,
diffsolveT(RightP(0),Endpoint),
hpalphaT*,
tryruleT(andRight)<(
composelistT(
nilT
),
composelistT(
alphaT*,
instantiatebyT(St("C"))(List(("i", List("f", "l")),
("f", List("f")),
("l", List("l"))))*,
alphaT*,
tryruleT(impLeft)<(
composelistT(
alphaT*,
nullarizeT*,
substT*
),
alleasyT
)
)
)
)
val deletetct = nilT
val createtct = nilT
val controltct = nilT
val loopinv = parseFormula(
"eps() > 0 & B() > 0 & " +
"(forall i : C. ( a(i) >= -B() & v(i) >= 0 & " +
"t(i) >= 0 & t(i) <= eps() )) & " +
"(forall f : C. forall l : C. " +
"(f /= l & e(f) = 1 & e(l) = 1 & id(f) <= id(l)) ==> " +
" x(f) < x(l) & " +
" ((v(f) + a(f) * (eps() - t(f)) >= 0 & " +
"2 * B() * x(l) > 2 * B() * x(f) + v(f)^2 - v(l)^2 " +
" + (a(f) + B()) * (a(f) * (eps() - t(f) )^2 + 2 * (eps() - t(f) ) * v(f))) |" +
" (v(f) + a(f) * (eps() - t(f)) < 0 & " +
" 2 * B() * a(f)^2 * x(f) - B() * a(f) * v(f)^2 < 2 * B() * a(f)^2 * x(l) + a(f)^2 * v(l)^2 ))) "
)
val starttct =
tryruleT(loopInduction(loopinv))<(
easywithforallsT(St("C")),
composelistT(
hpalphaT*,
tryruleT(andRight)<(
composelistT(
tryruleT(choose),
tryruleT(andRight)<(
composelistT(
tryruleT(choose),
tryruleT(andRight)<(
deletetct,
createtct)),
// control
controltct)
),
//dynamics
tyltct
)
),
// post condition
composelistT(
tryruleT(directedCut(parseFormula(
"forall f : C. forall l : C. "+
"(f /= l & e(f) = 1 & e(l) = 1 & id(f) <= id(l) ) ==> x(f) < x(l)")))<(
composelistT(
hpalphaT*,
instantiatebyT(St("C"))
(List(("i", List("f", "l")),
("f", List("f")),
("l", List("l"))))*,
nullarizeT*,
(nonarithcloseT | alphaT | betaT)*,
hidethencloseT),
composelistT(
hpalphaT*,
instantiatebyT(St("C"))
(List(("f", List("f", "l")),
("l", List("f", "l"))))*,
(nonarithcloseT | alphaT | betaT | commuteequalsT)*,
nullarizeT*,
hidethencloseT
)
)
)
)
dl('gotoroot)
dl('tactic, starttct)
|
keymaerad/KeYmaeraD
|
examples/dtpdhs/lhc_distclocks_diffsolve.scala
|
Scala
|
bsd-3-clause
| 4,207
|
package logful.server
import java.util.concurrent.TimeUnit
import io.gatling.core.Predef._
import logful.server.config.LogFileReqConfig
import scala.concurrent.duration.FiniteDuration
class SmallLogEmptyHandleRampUserPerSecSimulation extends Simulation {
val from = 100
val to = 3000
val time = 3*60
val during = new FiniteDuration(time, TimeUnit.SECONDS)
val second = during.toSeconds
val c = new LogFileReqConfig((0.6 * ((from + to) * time)).toInt)
setUp(c.scn.inject(rampUsersPerSec(from) to to during during).protocols(c httpProtocol))
}
|
foxundermoon/gatling-test
|
src/gatling/scala/logful/server/SmallLogEmptyHandleRampUserPerSecSimulation.scala
|
Scala
|
mit
| 563
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import java.io._
import java.nio.charset.StandardCharsets
import java.util.{ConcurrentModificationException, EnumSet, UUID}
import scala.reflect.ClassTag
import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs._
import org.apache.hadoop.fs.permission.FsPermission
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
/**
* A [[MetadataLog]] implementation based on HDFS. [[HDFSMetadataLog]] uses the specified `path`
* as the metadata storage.
*
* When writing a new batch, [[HDFSMetadataLog]] will firstly write to a temp file and then rename
* it to the final batch file. If the rename step fails, there must be multiple writers and only
* one of them will succeed and the others will fail.
*
* Note: [[HDFSMetadataLog]] doesn't support S3-like file systems as they don't guarantee listing
* files in a directory always shows the latest files.
*/
class HDFSMetadataLog[T <: AnyRef : ClassTag](sparkSession: SparkSession, path: String)
extends MetadataLog[T] with Logging {
private implicit val formats = Serialization.formats(NoTypeHints)
/** Needed to serialize type T into JSON when using Jackson */
private implicit val manifest = Manifest.classType[T](implicitly[ClassTag[T]].runtimeClass)
// Avoid serializing generic sequences, see SPARK-17372
require(implicitly[ClassTag[T]].runtimeClass != classOf[Seq[_]],
"Should not create a log with type Seq, use Arrays instead - see SPARK-17372")
import HDFSMetadataLog._
val metadataPath = new Path(path)
protected val fileManager = createFileManager()
if (!fileManager.exists(metadataPath)) {
fileManager.mkdirs(metadataPath)
}
/**
* A `PathFilter` to filter only batch files
*/
protected val batchFilesFilter = new PathFilter {
override def accept(path: Path): Boolean = isBatchFile(path)
}
protected def batchIdToPath(batchId: Long): Path = {
new Path(metadataPath, batchId.toString)
}
protected def pathToBatchId(path: Path) = {
path.getName.toLong
}
protected def isBatchFile(path: Path) = {
try {
path.getName.toLong
true
} catch {
case _: NumberFormatException => false
}
}
protected def serialize(metadata: T, out: OutputStream): Unit = {
// called inside a try-finally where the underlying stream is closed in the caller
Serialization.write(metadata, out)
}
protected def deserialize(in: InputStream): T = {
// called inside a try-finally where the underlying stream is closed in the caller
val reader = new InputStreamReader(in, StandardCharsets.UTF_8)
Serialization.read[T](reader)
}
/**
* Store the metadata for the specified batchId and return `true` if successful. If the batchId's
* metadata has already been stored, this method will return `false`.
*/
override def add(batchId: Long, metadata: T): Boolean = {
require(metadata != null, "'null' metadata cannot written to a metadata log")
get(batchId).map(_ => false).getOrElse {
// Only write metadata when the batch has not yet been written
writeBatch(batchId, metadata)
true
}
}
private def writeTempBatch(metadata: T): Option[Path] = {
while (true) {
val tempPath = new Path(metadataPath, s".${UUID.randomUUID.toString}.tmp")
try {
val output = fileManager.create(tempPath)
try {
serialize(metadata, output)
return Some(tempPath)
} finally {
IOUtils.closeQuietly(output)
}
} catch {
case e: FileAlreadyExistsException =>
// Failed to create "tempPath". There are two cases:
// 1. Someone is creating "tempPath" too.
// 2. This is a restart. "tempPath" has already been created but not moved to the final
// batch file (not committed).
//
// For both cases, the batch has not yet been committed. So we can retry it.
//
// Note: there is a potential risk here: if HDFSMetadataLog A is running, people can use
// the same metadata path to create "HDFSMetadataLog" and fail A. However, this is not a
// big problem because it requires the attacker must have the permission to write the
// metadata path. In addition, the old Streaming also have this issue, people can create
// malicious checkpoint files to crash a Streaming application too.
}
}
None
}
/**
* Write a batch to a temp file then rename it to the batch file.
*
* There may be multiple [[HDFSMetadataLog]] using the same metadata path. Although it is not a
* valid behavior, we still need to prevent it from destroying the files.
*/
private def writeBatch(batchId: Long, metadata: T): Unit = {
val tempPath = writeTempBatch(metadata).getOrElse(
throw new IllegalStateException(s"Unable to create temp batch file $batchId"))
try {
// Try to commit the batch
// It will fail if there is an existing file (someone has committed the batch)
logDebug(s"Attempting to write log #${batchIdToPath(batchId)}")
fileManager.rename(tempPath, batchIdToPath(batchId))
// SPARK-17475: HDFSMetadataLog should not leak CRC files
// If the underlying filesystem didn't rename the CRC file, delete it.
val crcPath = new Path(tempPath.getParent(), s".${tempPath.getName()}.crc")
if (fileManager.exists(crcPath)) fileManager.delete(crcPath)
} catch {
case e: FileAlreadyExistsException =>
// If "rename" fails, it means some other "HDFSMetadataLog" has committed the batch.
// So throw an exception to tell the user this is not a valid behavior.
throw new ConcurrentModificationException(
s"Multiple HDFSMetadataLog are using $path", e)
} finally {
fileManager.delete(tempPath)
}
}
/**
* @return the deserialized metadata in a batch file, or None if file not exist.
* @throws IllegalArgumentException when path does not point to a batch file.
*/
def get(batchFile: Path): Option[T] = {
if (fileManager.exists(batchFile)) {
if (isBatchFile(batchFile)) {
get(pathToBatchId(batchFile))
} else {
throw new IllegalArgumentException(s"File ${batchFile} is not a batch file!")
}
} else {
None
}
}
override def get(batchId: Long): Option[T] = {
val batchMetadataFile = batchIdToPath(batchId)
if (fileManager.exists(batchMetadataFile)) {
val input = fileManager.open(batchMetadataFile)
try {
Some(deserialize(input))
} catch {
case ise: IllegalStateException =>
// re-throw the exception with the log file path added
throw new IllegalStateException(
s"Failed to read log file $batchMetadataFile. ${ise.getMessage}", ise)
} finally {
IOUtils.closeQuietly(input)
}
} else {
logDebug(s"Unable to find batch $batchMetadataFile")
None
}
}
override def get(startId: Option[Long], endId: Option[Long]): Array[(Long, T)] = {
val files = fileManager.list(metadataPath, batchFilesFilter)
val batchIds = files
.map(f => pathToBatchId(f.getPath))
.filter { batchId =>
(endId.isEmpty || batchId <= endId.get) && (startId.isEmpty || batchId >= startId.get)
}
batchIds.sorted.map(batchId => (batchId, get(batchId))).filter(_._2.isDefined).map {
case (batchId, metadataOption) =>
(batchId, metadataOption.get)
}
}
override def getLatest(): Option[(Long, T)] = {
val batchIds = fileManager.list(metadataPath, batchFilesFilter)
.map(f => pathToBatchId(f.getPath))
.sorted
.reverse
for (batchId <- batchIds) {
val batch = get(batchId)
if (batch.isDefined) {
return Some((batchId, batch.get))
}
}
None
}
/**
* Get an array of [FileStatus] referencing batch files.
* The array is sorted by most recent batch file first to
* oldest batch file.
*/
def getOrderedBatchFiles(): Array[FileStatus] = {
fileManager.list(metadataPath, batchFilesFilter)
.sortBy(f => pathToBatchId(f.getPath))
.reverse
}
/**
* Removes all the log entry earlier than thresholdBatchId (exclusive).
*/
override def purge(thresholdBatchId: Long): Unit = {
val batchIds = fileManager.list(metadataPath, batchFilesFilter)
.map(f => pathToBatchId(f.getPath))
for (batchId <- batchIds if batchId < thresholdBatchId) {
val path = batchIdToPath(batchId)
fileManager.delete(path)
logTrace(s"Removed metadata log file: $path")
}
}
private def createFileManager(): FileManager = {
val hadoopConf = sparkSession.sessionState.newHadoopConf()
try {
new FileContextManager(metadataPath, hadoopConf)
} catch {
case e: UnsupportedFileSystemException =>
logWarning("Could not use FileContext API for managing metadata log files at path " +
s"$metadataPath. Using FileSystem API instead for managing log files. The log may be " +
s"inconsistent under failures.")
new FileSystemManager(metadataPath, hadoopConf)
}
}
/**
* Parse the log version from the given `text` -- will throw exception when the parsed version
* exceeds `maxSupportedVersion`, or when `text` is malformed (such as "xyz", "v", "v-1",
* "v123xyz" etc.)
*/
private[sql] def parseVersion(text: String, maxSupportedVersion: Int): Int = {
if (text.length > 0 && text(0) == 'v') {
val version =
try {
text.substring(1, text.length).toInt
} catch {
case _: NumberFormatException =>
throw new IllegalStateException(s"Log file was malformed: failed to read correct log " +
s"version from $text.")
}
if (version > 0) {
if (version > maxSupportedVersion) {
throw new IllegalStateException(s"UnsupportedLogVersion: maximum supported log version " +
s"is v${maxSupportedVersion}, but encountered v$version. The log file was produced " +
s"by a newer version of Spark and cannot be read by this version. Please upgrade.")
} else {
return version
}
}
}
// reaching here means we failed to read the correct log version
throw new IllegalStateException(s"Log file was malformed: failed to read correct log " +
s"version from $text.")
}
}
object HDFSMetadataLog {
/** A simple trait to abstract out the file management operations needed by HDFSMetadataLog. */
trait FileManager {
/** List the files in a path that matches a filter. */
def list(path: Path, filter: PathFilter): Array[FileStatus]
/** Make directory at the give path and all its parent directories as needed. */
def mkdirs(path: Path): Unit
/** Whether path exists */
def exists(path: Path): Boolean
/** Open a file for reading, or throw exception if it does not exist. */
def open(path: Path): FSDataInputStream
/** Create path, or throw exception if it already exists */
def create(path: Path): FSDataOutputStream
/**
* Atomically rename path, or throw exception if it cannot be done.
* Should throw FileNotFoundException if srcPath does not exist.
* Should throw FileAlreadyExistsException if destPath already exists.
*/
def rename(srcPath: Path, destPath: Path): Unit
/** Recursively delete a path if it exists. Should not throw exception if file doesn't exist. */
def delete(path: Path): Unit
}
/**
* Default implementation of FileManager using newer FileContext API.
*/
class FileContextManager(path: Path, hadoopConf: Configuration) extends FileManager {
private val fc = if (path.toUri.getScheme == null) {
FileContext.getFileContext(hadoopConf)
} else {
FileContext.getFileContext(path.toUri, hadoopConf)
}
override def list(path: Path, filter: PathFilter): Array[FileStatus] = {
fc.util.listStatus(path, filter)
}
override def rename(srcPath: Path, destPath: Path): Unit = {
fc.rename(srcPath, destPath)
}
override def mkdirs(path: Path): Unit = {
fc.mkdir(path, FsPermission.getDirDefault, true)
}
override def open(path: Path): FSDataInputStream = {
fc.open(path)
}
override def create(path: Path): FSDataOutputStream = {
fc.create(path, EnumSet.of(CreateFlag.CREATE))
}
override def exists(path: Path): Boolean = {
fc.util().exists(path)
}
override def delete(path: Path): Unit = {
try {
fc.delete(path, true)
} catch {
case e: FileNotFoundException =>
// ignore if file has already been deleted
}
}
}
/**
* Implementation of FileManager using older FileSystem API. Note that this implementation
* cannot provide atomic renaming of paths, hence can lead to consistency issues. This
* should be used only as a backup option, when FileContextManager cannot be used.
*/
class FileSystemManager(path: Path, hadoopConf: Configuration) extends FileManager {
private val fs = path.getFileSystem(hadoopConf)
override def list(path: Path, filter: PathFilter): Array[FileStatus] = {
fs.listStatus(path, filter)
}
/**
* Rename a path. Note that this implementation is not atomic.
* @throws FileNotFoundException if source path does not exist.
* @throws FileAlreadyExistsException if destination path already exists.
* @throws IOException if renaming fails for some unknown reason.
*/
override def rename(srcPath: Path, destPath: Path): Unit = {
if (!fs.exists(srcPath)) {
throw new FileNotFoundException(s"Source path does not exist: $srcPath")
}
if (fs.exists(destPath)) {
throw new FileAlreadyExistsException(s"Destination path already exists: $destPath")
}
if (!fs.rename(srcPath, destPath)) {
throw new IOException(s"Failed to rename $srcPath to $destPath")
}
}
override def mkdirs(path: Path): Unit = {
fs.mkdirs(path, FsPermission.getDirDefault)
}
override def open(path: Path): FSDataInputStream = {
fs.open(path)
}
override def create(path: Path): FSDataOutputStream = {
fs.create(path, false)
}
override def exists(path: Path): Boolean = {
fs.exists(path)
}
override def delete(path: Path): Unit = {
try {
fs.delete(path, true)
} catch {
case e: FileNotFoundException =>
// ignore if file has already been deleted
}
}
}
}
|
bOOm-X/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala
|
Scala
|
apache-2.0
| 15,566
|
package lambdacalculus
import lambdacalculus.parser.{LambdaParser, StandardLambdaParser}
import lambdacalculus.compiler.{CBVCompiler, CBNCompiler, Compiler}
import lambdacalculus.machine.CallByName._
import lambdacalculus.machine.CallByValue._
import lambdacalculus.machine._
import lambdacalculus.parser.ast.Expr
import scala.util.{Failure, Success, Try}
import com.typesafe.scalalogging.slf4j.Logging
import lambdacalculus.ExecutionOrder.ExecutionOrder
object ExecutionOrder extends Enumeration {
type ExecutionOrder = Value
val CallByName = Value("CallByName")
val CallByValue = Value("CallByValue")
}
case class ParseException(msg: String) extends Exception(msg)
object LambdaCalculusDecompilation extends App {
// val l = "(\\x.x ADD 1) 2"
// val l = "1 ADD 2"
val l ="let mySucc = (\\x. add x 1) endlet mySucc 1"
val lc = LambdaCalculus(ExecutionOrder.CallByValue, true, false)
val parsed =
for {s <- lc.substituteLibrary(l)
p <- lc.parse(s)}
yield p
val compiled = parsed.flatMap(lc.compile)
val decompiled = compiled match {
case Success(c) => {
lc.compiler.decompile(c)
}
case Failure(e) => throw new Exception(s"Could not decompile $e")
}
println(parsed.get)
println("====================================")
println(compiled.get)
println("====================================")
println(decompiled)
}
object LambdaCalculus extends App {
val l =
"""
|call 3 SumService 11 1
""".stripMargin
// """
// | let countToTen =
// | \x.
// | if (x LT 10)
// | then countToTen (x ADD 1)
// | else x
// |endlet
// |
// |countToTen 9
// """.stripMargin
val lc = LambdaCalculus(ExecutionOrder.CallByValue)
lc.substituteParseCompileExecute(l)
}
case class LambdaCalculus(execOrder: ExecutionOrder.ExecutionOrder = ExecutionOrder.CallByValue,
debug: Boolean = false,
storeIntermediateSteps: Boolean = false,
maybeExecutor: Option[CallExecutor] = None,
parser: LambdaParser = new StandardLambdaParser()) extends Logging {
val compiler = compilerForExecOrder(debug, execOrder)
val machine = machineForExecOrder(storeIntermediateSteps, execOrder, maybeExecutor)
private def compilerForExecOrder(debug: Boolean, execOrder: ExecutionOrder): Compiler = execOrder match {
case ExecutionOrder.CallByName => CBNCompiler(debug)
case ExecutionOrder.CallByValue => CBVCompiler(debug)
}
private def machineForExecOrder(storeIntermediateSteps: Boolean, execOrder: ExecutionOrder, maybeExecutor: Option[CallExecutor]): AbstractMachine = execOrder match {
case ExecutionOrder.CallByName => CBNAbstractMachine(storeIntermediateSteps)
case ExecutionOrder.CallByValue => CBVAbstractMachine(storeIntermediateSteps, maybeExecutor)
}
def substituteParse(code: String): Try[Expr] = {
for {
substituted <- substituteLibrary(code)
parsed <- parse(substituted)
} yield parsed
}
def substituteParseCompile(code: String): Try[List[Instruction]] = {
for {
parsed <- substituteParse(code)
compiled <- compile(parsed)
} yield compiled
}
def substituteParseCompileExecute(code: String): Try[List[MachineValue]] = {
val result = for {
compiled <- substituteParseCompile(code)
executed <- execute(compiled)
} yield executed
val intermediateStepCount = machine.intermediateConfigurations.fold("?")(_.size.toString)
logger.info(s"$code -$intermediateStepCount-> $result")
result
}
def substituteLibrary(code: String): Try[String] = Try(CommandLibrary(compiler)(code))
def parse(code: String):Try[Expr] = {
import parser.{ Success, NoSuccess }
parser(code) match {
case Success(res: Expr, _) => Try(res)
case NoSuccess(err, next) =>
val msg = s"\n$err:\n${next.pos.longString}\n"
throw new ParseException(msg)
}
}
def compile(code: Expr): Try[List[Instruction]] = Try(compiler(code))
def execute(code: List[Instruction]): Try[List[MachineValue]] = {
Try(machine(code))
}
}
|
cn-uofbasel/nfn-scala
|
lambdacalc/src/main/scala/lambdacalculus/LambdaCalculus.scala
|
Scala
|
isc
| 4,171
|
/*
* Copyright 2010 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.logging
/**
* A factory to configure a Logger. Note that because Loggers are global, executing this
* factory has side-effects.
*
* @param node
* Name of the logging node. The default ("") is the top-level logger.
*
* @param level
* Log level for this node. Leaving it None is java's secret signal to use the parent logger's
* level.
*
* @param handlers
* Where to send log messages.
*
* @param useParents
* Override to have log messages stop at this node. Otherwise they are passed up to parent
* nodes.
*/
case class LoggerFactory(
node: String = "",
level: Option[Level] = None,
handlers: List[HandlerFactory] = Nil,
useParents: Boolean = true)
extends (() => Logger) {
/**
* Registers new handlers and setting with the logger at `node`
* @note It clears all the existing handlers for the node
*/
def apply(): Logger = {
val logger = Logger.get(node)
logger.clearHandlers()
level.foreach { x => logger.setLevel(x) }
handlers.foreach { h => logger.addHandler(h()) }
logger.setUseParentHandlers(useParents)
logger
}
}
/**
* Shim for java compatibility. Make a new LoggerFactoryBuilder with `LoggerFactory#newBuilder()`.
*/
class LoggerFactoryBuilder private[logging] (factory: LoggerFactory) {
def node(_node: String): LoggerFactoryBuilder =
new LoggerFactoryBuilder(factory.copy(node = _node))
def level(_level: Level): LoggerFactoryBuilder =
new LoggerFactoryBuilder(factory.copy(level = Some(_level)))
def parentLevel(): LoggerFactoryBuilder = new LoggerFactoryBuilder(factory.copy(level = None))
def addHandler[T <: Handler](handler: () => T): LoggerFactoryBuilder =
new LoggerFactoryBuilder(factory.copy(handlers = handler :: factory.handlers))
def unhandled(): LoggerFactoryBuilder = new LoggerFactoryBuilder(factory.copy(handlers = Nil))
def useParents(): LoggerFactoryBuilder = new LoggerFactoryBuilder(factory.copy(useParents = true))
def ignoreParents(): LoggerFactoryBuilder =
new LoggerFactoryBuilder(factory.copy(useParents = false))
def build(): LoggerFactory = factory
}
object LoggerFactory {
def newBuilder(): LoggerFactoryBuilder = new LoggerFactoryBuilder(LoggerFactory())
}
|
twitter/util
|
util-logging/src/main/scala/com/twitter/logging/LoggerFactory.scala
|
Scala
|
apache-2.0
| 2,831
|
package com.github.novamage.svalidator.html
import com.github.novamage.svalidator.validation.binding.BindingAndValidationWithData
import language.implicitConversions
/** Helper class that eases the use of an [[com.github.novamage.svalidator.html.HtmlFactory HtmlFactory]]
*
* @param summary The summary whose fields will be extracted to generate html inputs for
* @tparam A Type of the instance validated by the summary
*/
class BindingAndValidationSummaryHelper[A](summary: BindingAndValidationWithData[A, _]) {
/** Generates a <input type="hidden"> with the given name
*
* @param name Name attribute for the generated hidden input
* @param valueGetter Value provider from the summary for the property of this hidden input
* @param attributes Any additional attributes to put on the hidden input
* @tparam B Type of the instance validated by the summary
* @return The markup of the hidden input after applying the converter function
*/
def hidden[B](name: String,
valueGetter: A => Any,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.hidden(summary, name, valueGetter, attributes)
}
/** Generates a <input type="text"> with the given name and label
*
* @param name Name attribute for the generated input
* @param valueGetter Value provider from the summary for the property of this input
* @param label Label to place alongside this input
* @param attributes Any additional attributes to put on the input
* @tparam B Type of the instance validated by the summary
* @return The markup of the input after applying the converter function
*/
def textBox[B](name: String,
valueGetter: A => Any,
label: String,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.textBox(summary, name, valueGetter, label, attributes)
}
/** Generates a <input type="password"> with the given name and label
*
* @param name Name attribute for the generated input
* @param label Label to place alongside this input
* @param attributes Any additional attributes to put on the input
* @tparam B Type of the instance validated by the summary
* @return The markup of the input after applying the converter function
*/
def password[B](name: String,
label: String,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.password(summary, name, label, attributes)
}
/** Generates a <input type="checkbox"> with the given name and label
*
* @param name Name attribute for the generated input
* @param valueGetter Value provider from the summary for the property of this input
* @param label Label to place alongside this input
* @param attributes Any additional attributes to put on the input
* @tparam B Type of the instance validated by the summary
* @return The markup of the input after applying the converter function
*/
def checkBox[B](name: String,
valueGetter: A => Boolean,
label: String,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.checkBox(summary, name, valueGetter, label, attributes)
}
/** Generates a <select> with the given name and label
*
* @param name Name attribute for the generated input
* @param valueGetter Value provider from the summary for the property of this input
* @param optionValuesAndText List of options to display. First attribute will be used as the value, second attribute as the text
* @param label Label to place alongside this input
* @param attributes Any additional attributes to put on the input
* @tparam B Type of the instance validated by the summary
* @return The markup of the input after applying the converter function
*/
def select[B](name: String,
valueGetter: A => Any,
optionValuesAndText: List[(Any, Any)],
label: String,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.select(summary, name, valueGetter, optionValuesAndText, label, attributes)
}
/** Generates a group of <input type="radio"> that share the given name, and are tagged as a group with the label
*
* @param name Name attribute for the group of inputs
* @param valueGetter Value provider from the summary for the property of this input
* @param optionValuesAndText List of radios to display. First attribute will be used as the value, second attribute as the label
* @param label Label to place alongside the group of inputs
* @param attributes Any additional attributes to pass to the group of inputs
* @tparam B Type of the instance validated by the summary
* @return The markup of the group of inputs after applying the converter function
*/
def radioGroup[B](name: String,
valueGetter: A => Any,
optionValuesAndText: List[(Any, Any)],
label: String,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.radioGroup(summary, name, valueGetter, optionValuesAndText, label, attributes)
}
/** Generates a group of <input type="checkbox"> that share the given name, and are tagged as a group with the label
*
* @param name Name attribute for the group of inputs
* @param valueGetter Values provider from the summary for the property of this input
* @param optionValuesAndText List of checkboxes to display. First attribute will be used as the value, second attribute as the label
* @param label Label to place alongside the group of inputs
* @param attributes Any additional attributes to pass to the group of inputs
* @tparam B Type of the instance validated by the summary
* @return The markup of the group of inputs after applying the converter function
*/
def checkBoxGroup[B](name: String,
valueGetter: A => List[Any],
optionValuesAndText: List[(Any, Any)],
label: String,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.checkBoxGroup(summary, name, valueGetter, optionValuesAndText, label, attributes)
}
/** Generates a <textarea> with the given name and label
*
* @param name Name attribute for the generated textarea
* @param valueGetter Value provider from the summary for the property of this textarea
* @param label Label to place alongside this textarea
* @param attributes Any additional attributes to put on the textarea
* @tparam B Type of the instance validated by the summary
* @return The markup of the input after applying the converter function
*/
def textArea[B](name: String,
valueGetter: A => Any,
label: String,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.textArea(summary, name, valueGetter, label, attributes)
}
/** Generates a <button> with the given name and displayed text
*
* @param name Name attribute for the generated button
* @param text Text to display on the button
* @param attributes Any additional attributes to put on the button
* @tparam B Type of the instance validated by the summary
* @return The markup of the input after applying the converter function
*/
def button[B](name: String,
text: String,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.button(summary, name, text, attributes)
}
/** Generates a <input type="submit"> with the given name and value
*
* @param name Name attribute for the generated button
* @param value Value attribute, which is the text displayed for the button
* @param attributes Any additional attributes to put on the button
* @tparam B Type of the instance validated by the summary
* @return The markup of the input after applying the converter function
*/
def submit[B](name: String,
value: String,
attributes: Map[String, Any] = Map.empty)(implicit htmlFactory: HtmlFactory[B]): B = {
htmlFactory.submit(summary, name, value, attributes)
}
}
/** Helper object that eases the use of an [[com.github.novamage.svalidator.html.HtmlFactory HtmlFactory]]
*/
object BindingAndValidationSummaryHelper {
/** This method enables implicit conversions of summaries to add to them the HTML helper methods of an
* [[com.github.novamage.svalidator.html.HtmlFactory HtmlFactory]].
*
* @param summary Summary to add the methods to
* @tparam A Type of the instance validated for the summary
* @return The wrapped summary with helper methods
*/
implicit def helper[A](summary: BindingAndValidationWithData[A, _]): BindingAndValidationSummaryHelper[A] = {
new BindingAndValidationSummaryHelper(summary)
}
}
|
NovaMage/SValidator
|
src/main/scala/com/github/novamage/svalidator/html/BindingAndValidationSummaryHelper.scala
|
Scala
|
mit
| 9,368
|
package org.scalatra
import org.scalatra.util.{ MapWithIndifferentAccess, MultiMapHeadView }
class ScalatraParams(
protected val multiMap: Map[String, Seq[String]])
extends MultiMapHeadView[String, String]
with MapWithIndifferentAccess[String]
|
lzpfmh/scalatra
|
core/src/main/scala/org/scalatra/ScalatraParams.scala
|
Scala
|
bsd-2-clause
| 256
|
package im.actor
import sbt._
object Dependencies {
object V {
val akka = "2.3.12"
val akkaExperimental = "1.0"
val scalaz = "7.1.1"
val slick = "3.0.0"
val scalatest = "2.2.4"
}
object Compile {
val akkaActor = "com.typesafe.akka" %% "akka-actor" % V.akka
val akkaContrib = "com.typesafe.akka" %% "akka-contrib" % V.akka
val akkaStream = "com.typesafe.akka" %% "akka-stream-experimental" % V.akkaExperimental
val akkaHttp = "com.typesafe.akka" %% "akka-http-experimental" % V.akkaExperimental
val akkaHttpCore = "com.typesafe.akka" %% "akka-http-core-experimental" % V.akkaExperimental
val akkaHttpSpray = "com.typesafe.akka" %% "akka-http-spray-json-experimental" % V.akkaExperimental
val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % V.akka
val akkaPersistenceJdbc = "com.github.dnvriend" %% "akka-persistence-jdbc" % "1.1.7"
val apacheEmail = "org.apache.commons" % "commons-email" % "1.4"
val concmap = "com.googlecode.concurrentlinkedhashmap" % "concurrentlinkedhashmap-lru" % "1.4.2"
val caffeine = "com.github.ben-manes.caffeine" % "caffeine" % "1.2.0"
val eaioUuid = "com.eaio.uuid" % "uuid" % "3.4"
val configs = "com.github.kxbmap" %% "configs" % "0.2.4"
val dispatch = "net.databinder.dispatch" %% "dispatch-core" % "0.11.2"
val javaCompat = "org.scala-lang.modules" %% "scala-java8-compat" % "0.5.0"
@deprecated("use `playJson` instead")
val sprayJson = "io.spray" %% "spray-json" % "1.3.1"
val playJson = "com.typesafe.play" %% "play-json" % "2.4.1"
val postgresJdbc = "org.postgresql" % "postgresql" % "9.4-1201-jdbc41" exclude("org.slf4j", "slf4j-simple")
val slick = "com.typesafe.slick" %% "slick" % V.slick
val slickJoda = "com.github.tototoshi" %% "slick-joda-mapper" % "2.0.0"
val slickPg = "com.github.tminglei" %% "slick-pg" % "0.9.0"
val slickTestkit = "com.typesafe.slick" %% "slick-testkit" % V.slick
val flywayCore = "org.flywaydb" % "flyway-core" % "3.1"
val hikariCP = "com.zaxxer" % "HikariCP" % "2.3.5"
val amazonaws = "com.amazonaws" % "aws-java-sdk-s3" % "1.9.31"
val awsWrap = "com.github.dwhjames" %% "aws-wrap" % "0.7.2"
val bcprov = "org.bouncycastle" % "bcprov-jdk15on" % "1.50"
val libPhoneNumber = "com.googlecode.libphonenumber" % "libphonenumber" % "7.0.+"
val akkaKryoSerialization = "com.github.romix.akka" %% "akka-kryo-serialization" % "0.3.3"
val kryoSerializers = "de.javakaffee" % "kryo-serializers" % "0.29"
val protobuf = "com.google.protobuf" % "protobuf-java" % "2.6.1"
val scodecBits = "org.scodec" %% "scodec-bits" % "1.0.9"
val scodecCore = "org.scodec" %% "scodec-core" % "1.8.1"
val scalazCore = "org.scalaz" %% "scalaz-core" % V.scalaz
val scalazConcurrent = "org.scalaz" %% "scalaz-concurrent" % V.scalaz
val shapeless = "com.chuusai" %% "shapeless" % "2.2.4"
val scrImageCore = "com.sksamuel.scrimage" %% "scrimage-core" % "1.4.2"
val tyrex = "tyrex" % "tyrex" % "1.0.1"
val gcmServer = "com.google.android.gcm" % "gcm-server" % "1.0.2"
val pushy = "com.relayrides" % "pushy" % "0.4.3"
val logbackClassic = "ch.qos.logback" % "logback-classic" % "1.1.2"
val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % "3.1.0"
val jodaTime = "joda-time" % "joda-time" % "2.7"
val jodaConvert = "org.joda" % "joda-convert" % "1.7"
val apacheCommonsCodec = "commons-codec" % "commons-codec" % "1.10"
val apacheCommonsIo = "commons-io" % "commons-io" % "2.4"
}
object Testing {
val akkaTestkit = "com.typesafe.akka" %% "akka-testkit" % V.akka % "test"
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.12.2" % "test"
val scalatest = "org.scalatest" %% "scalatest" % V.scalatest % "test"
// val scalaTestPlay = "org.scalatestplus" %% "play" % "1.2.0" % "test"
val jfairy = "io.codearte.jfairy" % "jfairy" % "0.3.1" % "test"
}
import Compile._
import Testing._
val shared = Seq(configs, javaCompat, logbackClassic, scalaLogging, tyrex)
val root = shared ++ Seq(
akkaSlf4j, akkaActor, akkaStream
)
val activation = shared ++ Seq(akkaActor, akkaHttp, playJson)
val commonsBase = shared ++ Seq(akkaActor, akkaPersistenceJdbc, akkaKryoSerialization, concmap, jodaConvert, jodaTime, kryoSerializers)
val commonsApi = shared ++ Seq(akkaSlf4j, akkaActor, akkaStream, apacheCommonsCodec, protobuf, scalazCore)
val core = shared ++ Seq(akkaActor, akkaContrib, gcmServer, pushy, jodaTime, postgresJdbc, slick)
val enrich = shared ++ Seq(akkaActor, akkaHttp)
val rpcApi = shared ++ Seq(
akkaSlf4j, akkaActor, amazonaws, awsWrap, bcprov, apacheCommonsIo, shapeless
)
val httpApi = shared ++ Seq(akkaActor, jodaTime, akkaHttp, playJson)
val email = shared ++ Seq(akkaActor, apacheEmail)
val internalServices = shared ++ Seq(akkaActor, akkaStream, scodecBits)
val oauth = shared ++ Seq(akkaActor, akkaHttp, playJson)
val session = shared ++ Seq(
akkaSlf4j, akkaActor, akkaStream, scodecCore
)
val sessionMessages = Seq(akkaActor)
val persist = shared ++ Seq(akkaActor, apacheCommonsCodec, postgresJdbc, slick, slickJoda, slickPg, slickTestkit, flywayCore, hikariCP, jodaTime, jodaConvert)
val presences = shared :+ akkaContrib
val sms = shared ++ Seq(akkaActor, akkaHttp, dispatch)
val social = shared :+ akkaContrib
val tls = shared ++ Seq(akkaHttp, akkaStream)
val codecs = shared ++ Seq(scalazCore, scodecBits, scodecCore)
val models = shared ++ Seq(eaioUuid, scodecBits, scodecCore, sprayJson, jodaTime, jodaConvert, slickPg)
val frontend = shared ++ Seq(
akkaSlf4j, akkaActor, akkaStream,
scodecBits, scodecCore,
scalazCore, scalazConcurrent
)
val dashboard = shared :+ scalazCore
val notifications = shared ++ Seq(akkaContrib, slick)
val utils = shared ++ Seq(akkaActor, akkaHttp, amazonaws, awsWrap, libPhoneNumber, scrImageCore, slick)
val utilsCache = shared :+ caffeine
val utilsHttp = shared ++ Seq(akkaActor, akkaHttp, akkaTestkit, scalatest)
val voximplant = shared ++ Seq(akkaActor, dispatch, playJson)
val tests = shared ++ Seq(
jfairy, scalacheck, scalatest, slickTestkit, akkaTestkit //, scalaTestPlay
)
}
|
darioajr/actor-platform
|
actor-server/project/Dependencies.scala
|
Scala
|
mit
| 8,430
|
package org.openapitools.client.model
case class User (
_class: Option[String],
_id: Option[String],
_fullName: Option[String],
_email: Option[String],
_name: Option[String]
)
object User {
def toStringBody(var_class: Object, var_id: Object, var_fullName: Object, var_email: Object, var_name: Object) =
s"""
| {
| "class":$var_class,"id":$var_id,"fullName":$var_fullName,"email":$var_email,"name":$var_name
| }
""".stripMargin
}
|
cliffano/swaggy-jenkins
|
clients/scala-gatling/generated/src/gatling/scala/org/openapitools/client/model/User.scala
|
Scala
|
mit
| 496
|
package special.collection
import scala.reflect.ClassTag
import scala.collection.mutable
import scalan.Internal
import spire.syntax.all._
object Helpers {
private def sameLengthErrorMsg[A,B](xs: Coll[A], ys: Coll[B]) =
s"Collections should have same length but was ${xs.length} and ${ys.length}:\n xs=$xs;\n ys=$ys"
def assertSameLength[A,B](xs: Coll[A], ys: Coll[B]) = {
assert(xs.length == ys.length, sameLengthErrorMsg(xs, ys))
}
def requireSameLength[A,B](xs: Coll[A], ys: Coll[B]) = {
require(xs.length == ys.length, sameLengthErrorMsg(xs, ys))
}
@inline def asColl[T](coll: Coll[_]): Coll[T] = coll.asInstanceOf[Coll[T]]
def mapReduce[A, K: ClassTag, V: ClassTag](arr: Array[A], m: A => (K, V), r: ((V, V)) => V): (Array[K], Array[V]) = {
val keyPositions = new java.util.HashMap[K, Int](32)
val keys = mutable.ArrayBuilder.make[K]
val values = Array.ofDim[V](arr.length)
var i = 0
var nValues = 0
while (i < arr.length) {
val (key, value) = m(arr(i))
val pos = keyPositions.getOrDefault(key, 0)
if (pos == 0) {
keyPositions.put(key, nValues + 1)
keys += key
values(nValues) = value
nValues += 1
} else {
values(pos - 1) = r((values(pos - 1), value))
}
i += 1
}
val resValues = Array.ofDim[V](nValues)
Array.copy(values, 0, resValues, 0, nValues)
(keys.result(), resValues)
}
def mapToArrays[K: ClassTag, V: ClassTag](m: Map[K,V]): (Array[K], Array[V]) = {
val keys = mutable.ArrayBuilder.make[K]
val values = mutable.ArrayBuilder.make[V]
for ((k,v) <- m) {
keys += k
values += v
}
(keys.result, values.result)
}
}
|
ScorexFoundation/sigmastate-interpreter
|
library-impl/src/main/scala/special/collection/Helpers.scala
|
Scala
|
mit
| 1,709
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples
import java.util.Random
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
import breeze.linalg.{Vector, DenseVector, squaredDistance}
import org.apache.spark.SparkContext._
/**
* K-means clustering.
* k-均值聚类
* This is an example implementation for learning how to use Spark. For more conventional use,
* 这是一个学习如何使用Spark的例子实现,更传统的使用
* please refer to org.apache.spark.mllib.clustering.KMeans
*/
object LocalKMeans {
val N = 1000
val R = 1000 // Scaling factor
val D = 10
val K = 10
val convergeDist = 0.001
val rand = new Random(42)
def generateData: Array[DenseVector[Double]] = {
def generatePoint(i: Int): DenseVector[Double] = {
DenseVector.fill(D){rand.nextDouble * R}
}
Array.tabulate(N)(generatePoint)
}
def closestPoint(p: Vector[Double], centers: HashMap[Int, Vector[Double]]): Int = {
var index = 0
var bestIndex = 0
var closest = Double.PositiveInfinity
for (i <- 1 to centers.size) {
val vCurr = centers.get(i).get
val tempDist = squaredDistance(p, vCurr)
if (tempDist < closest) {
closest = tempDist
bestIndex = i
}
}
bestIndex
}
def showWarning() {
System.err.println(
"""WARN: This is a naive implementation of KMeans Clustering and is given as an example!
|Please use the KMeans method found in org.apache.spark.mllib.clustering
|for more conventional use.
""".stripMargin)
//String.stripMargin 移除每行字符串开头的空格和第一个遇到的垂直分割符|
}
def main(args: Array[String]) {
showWarning()
val data = generateData
//可变HashSet,措施化
var points = new HashSet[Vector[Double]]
var kPoints = new HashMap[Int, Vector[Double]]
var tempDist = 1.0
while (points.size < K) {
points.add(data(rand.nextInt(N)))
}
val iter = points.iterator
for (i <- 1 to points.size) {
kPoints.put(i, iter.next())
}
println("Initial centers: " + kPoints)
while(tempDist > convergeDist) {
var closest = data.map (p => (closestPoint(p, kPoints), (p, 1)))
var mappings = closest.groupBy[Int] (x => x._1)
var pointStats = mappings.map { pair =>
pair._2.reduceLeft [(Int, (Vector[Double], Int))] {
case ((id1, (p1, c1)), (id2, (p2, c2))) => (id1, (p1 + p2, c1 + c2))
}
}
var newPoints = pointStats.map {mapping =>
(mapping._1, mapping._2._1 * (1.0 / mapping._2._2))}
tempDist = 0.0
for (mapping <- newPoints) {
tempDist += squaredDistance(kPoints.get(mapping._1).get, mapping._2)
}
for (newP <- newPoints) {
kPoints.put(newP._1, newP._2)
}
}
println("Final centers: " + kPoints)
}
}
// scalastyle:on println
|
tophua/spark1.52
|
examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
|
Scala
|
apache-2.0
| 3,732
|
/*
* Copyright 2016 by Eugene Yokota
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gigahorse
package support.akkahttp
import scala.concurrent.{ Future, ExecutionContext }
import akka.http.scaladsl.model.HttpResponse
import akka.stream.Materializer
import scala.concurrent.Future
abstract class AkkaHttpStreamHandler[A] extends AkkaHttpCompletionHandler[A] {
override def onCompleted(response: FullResponse): A = ???
def onStream(response: StreamResponse): Future[A]
override def onPartialResponse(httpResponse: HttpResponse, config: Config)(implicit fm: Materializer, ec: ExecutionContext): Future[A] =
onStream(new AkkaHttpStreamResponse(httpResponse, config))
}
|
eed3si9n/gigahorse
|
akka-http/src/main/scala/gigahorse/support/akkahttp/AkkaHttpStreamHandler.scala
|
Scala
|
apache-2.0
| 1,198
|
/**
* A simple text based RPG
*
* @package simplerpg
* @copyright 2015
*/
package simplerpg.action
import simplerpg.{Player, World, Item, Weapon, Armor}
final class InitialParseAction(commands: Array[String]) extends Action {
def run(currentPlayer: Player, world: World): Option[Action] = Some(parseCommands)
protected def parseCommands(): Action = {
val initialAction = commands match {
case Array("attack", enemyName) => new AttackAction(enemyName)
case Array("list", category) => new ShowStoreInventoryAction(category)
case Array("buy", category, _*) => new BuyAction(category, implode(commands, 2))
case Array("sell", category, _*) => new SellAction(category, implode(commands, 2))
case Array("equip", category, _*) => new EquipAction(category, implode(commands, 2))
case Array("drop", category, _*) => new DropAction(category, implode(commands, 2))
case Array("use", _*) => new UseItemAction(implode(commands))
case Array("show", _*) => new ShowInventoryAction(commands.drop(1))
case Array("inventory", _*) => new ShowInventoryAction(commands.drop(1))
case Array("stats", _*) => new StatsAction(commands.drop(1))
case Array("vitals", _*) => new VitalsAction(commands.drop(1))
case Array("exp", _*) => new ExperienceAction(commands.drop(1))
case Array("goto", _*) => new GotoAction(implode(commands))
case Array("save") => new SavePlayerAction
case Array("places") => new PlacesAction
case Array("where") => new WhereAction
case Array("roll") => new DiceAction
case Array("leave") => new LeaveAction
case Array("quit") => new LeaveAction
case Array("exit") => new LeaveAction
case _ => new InvalidAction
}
new CompoundPrintAction(initialAction)
}
protected def implode(items: Array[String], offset: Int = 1): String = items.drop(offset).mkString(" ")
}
|
mcross1882/SimpleRPG
|
src/main/scala/simplerpg/action/InitialParseAction.scala
|
Scala
|
mit
| 2,355
|
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.tools
import org.scalatest._
import java.io.BufferedOutputStream
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
import java.io.OutputStream
import java.io.OutputStreamWriter
import java.io.PrintWriter
import java.util.Iterator
import java.util.Set
import java.io.StringWriter
import org.scalatest.events._
import PrintReporter._
import org.scalatest.junit.JUnitTestFailedError
import org.scalatest.exceptions.PropertyCheckFailedException
import org.scalatest.exceptions.TableDrivenPropertyCheckFailedException
import Suite.indentation
import org.scalatest.exceptions.StackDepth
/**
* A <code>Reporter</code> that prints test status information to
* a <code>Writer</code>, <code>OutputStream</code>, or file.
*
* @author Bill Venners
*/
private[scalatest] abstract class StringReporter(presentAllDurations: Boolean,
presentInColor: Boolean, presentShortStackTraces: Boolean, presentFullStackTraces: Boolean,
presentUnformatted: Boolean) extends ResourcefulReporter {
private def withPossibleLineNumber(stringToPrint: String, throwable: Option[Throwable]): String = {
throwable match {
case Some(stackDepth: StackDepth) =>
stackDepth.failedCodeFileNameAndLineNumberString match {
case Some(lineNumberString) =>
Resources("printedReportPlusLineNumber", stringToPrint, lineNumberString)
case None => stringToPrint
}
case _ => stringToPrint
}
}
protected def printPossiblyInColor(text: String, ansiColor: String)
/*
I either want to print the full stack trace, like this:
[scalatest] TEST FAILED - JUnitTestCaseSuite: testSomething(org.scalatestexamples.junit.JUnitTestCaseSuite) (JUnitTestCaseSuite.scala:22)
[scalatest] hi there
[scalatest] org.scalatest.junit.JUnitTestFailedError: hi there
[scalatest] at org.scalatest.junit.AssertionsForJUnit$class.newAssertionFailedException(AssertionsForJUnit.scala:101)
[scalatest] at org.scalatest.junit.JUnit3Suite.newAssertionFailedException(JUnit3Suite.scala:140)
[scalatest] at org.scalatest.Assertions$class.fail(Assertions.scala:601)
[scalatest] at org.scalatest.junit.JUnit3Suite.fail(JUnit3Suite.scala:140)
[scalatest] at org.scalatestexamples.junit.JUnitTestCaseSuite.testSomething(JUnitTestCaseSuite.scala:22)
[scalatest] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[scalatest] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
[scalatest] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[scalatest] at java.lang.reflect.Method.invoke(Method.java:585)
[scalatest] at junit.framework.TestCase.runTest(TestCase.java:168)
[scalatest] at junit.framework.TestCase.runBare(TestCase.java:134)
[scalatest] at junit.framework.TestResult$1.protect(TestResult.java:110)
[scalatest] at junit.framework.TestResult.runProtected(TestResult.java:128)
[scalatest] at junit.framework.TestResult.run(TestResult.java:113)
[scalatest] at junit.framework.TestCase.run(TestCase.java:124)
[scalatest] at junit.framework.TestSuite.runTest(TestSuite.java:232)
[scalatest] at junit.framework.TestSuite.run(TestSuite.java:227)
[scalatest] at junit.framework.TestSuite.runTest(TestSuite.java:232)
[scalatest] at junit.framework.TestSuite.run(TestSuite.java:227)
[scalatest] at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:151)
[scalatest] at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:59)
[scalatest] at org.scalatest.tools.Runner$$anonfun$doRunRunRunADoRunRun$2.apply(Runner.scala:1430)
[scalatest] at org.scalatest.tools.Runner$$anonfun$doRunRunRunADoRunRun$2.apply(Runner.scala:1427)
[scalatest] at scala.List.foreach(List.scala:834)
[scalatest] at org.scalatest.tools.Runner$.doRunRunRunADoRunRun(Runner.scala:1427)
[scalatest] at org.scalatest.tools.RunnerJFrame$RunnerThread$$anonfun$run$1.apply(RunnerJFrame.scala:1352)
[scalatest] at org.scalatest.tools.RunnerJFrame$RunnerThread$$anonfun$run$1.apply(RunnerJFrame.scala:1350)
[scalatest] at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1471)
[scalatest] at org.scalatest.tools.RunnerJFrame$RunnerThread.run(RunnerJFrame.scala:1349)
Or show a truncated one like this:
[scalatest] TEST FAILED - JUnitTestCaseSuite: testSomething(org.scalatestexamples.junit.JUnitTestCaseSuite) (JUnitTestCaseSuite.scala:22)
[scalatest] hi there
[scalatest] org.scalatest.junit.JUnitTestFailedError: hi there
[scalatest] ...
[scalatest] at org.scalatestexamples.junit.JUnitTestCaseSuite.testSomething(JUnitTestCaseSuite.scala:22)
[scalatest] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[scalatest] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
[scalatest] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[scalatest] at java.lang.reflect.Method.invoke(Method.java:585)
[scalatest] at junit.framework.TestCase.runTest(TestCase.java:168)
[scalatest] at junit.framework.TestCase.runBare(TestCase.java:134)
[scalatest] ...
If F is specified for the reporter, then show the full stack trace (or if it is not a StackDepth). But
if a StackDepth and no F specified, then show the truncated form.
Now want to change from:
- should do something interesting *** FAILED *** (<console>:18) (0 milliseconds)
org.scalatest.TestFailedException: 2 did not equal 3
To:
- should do something interesting *** FAILED *** (0 milliseconds)
2 did not equal 3 (<console>:18)
org.scalatest.TestFailedException:
The second line would only be printed out if there was an exception. That way
when I add noStacks option, I get:
- should do something interesting *** FAILED *** (0 milliseconds)
2 did not equal 3 (<console>:18)
Or for a prop check get:
- should do something interesting *** FAILED *** (0 milliseconds)
Property check failed. (InfoInsideTestFiredAfterTestProp.scala:24)
Message: 2 was not less than 1
Location: InfoInsideTestFiredAfterTestProp.scala:27
Occurred at table row 0 (zero based, not counting headings), which had values ( / This shouldb e had value without the s
suite = org.scalatest.InfoInsideTestFiredAfterTestProp$$anon$3@18a4edc4
)
Easiest thing is if the exception message just printed this out. Then StringReporter would just print the message always,
and not print it after the outermost exception
org.scalatest.prop.TableDrivenPropertyCheckFailedException:
...
And does print it out after the subsequent ones:
org.scalatest.TestFailedException: 2 did not equal 3
And it would not need to put the line number there. It would already be in the message. It would use the message sent with
the event. Message should just be the throwable's message, or "<exception class> was thrown" Then it is easy. Always
use the message from the event.
org.scalatest.prop.TableDrivenPropertyCheckFailedException: TestFailedException (included as this exception's cause) was thrown during property evaluation.
[scalatest] Message:
[scalatest] Location: InfoInsideTestFiredAfterTestProp.scala:27
[scalatest] Occurred at table row 0 (zero based, not counting headings), which had values (
[scalatest] suite = org.scalatest.InfoInsideTestFiredAfterTestProp$$anon$3@18a4edc4
[scalatest] )
*/
// Called for TestFailed, InfoProvided (because it can have a throwable in it), SuiteAborted, and RunAborted
private def stringsToPrintOnError(noteResourceName: String, errorResourceName: String, message: String, throwable: Option[Throwable],
formatter: Option[Formatter], suiteName: Option[String], testName: Option[String], duration: Option[Long]): List[String] = {
def genFormattedText = {
formatter match {
case Some(IndentedText(formattedText, _, _)) =>
Resources("specTextAndNote", formattedText, Resources(noteResourceName))
case _ =>
genUnformattedText
}
}
def genUnformattedText = {
// Deny MotionToSuppress directives in error events, because error info needs to be seen by users
suiteName match {
case Some(sn) =>
testName match {
case Some(tn) => Resources(errorResourceName, sn + ": " + tn + ": " + message)
case None => Resources(errorResourceName, sn + ": " + message)
}
// Should not get here with built-in ScalaTest stuff, but custom stuff could get here.
case None => Resources(errorResourceName, Resources("noNameSpecified"))
}
}
val stringToPrint =
if (presentUnformatted) genUnformattedText
else genFormattedText
val stringToPrintWithPossibleDuration =
duration match {
case Some(milliseconds) =>
if (presentAllDurations)
Resources("withDuration", stringToPrint, makeDurationString(milliseconds))
else
stringToPrint
case None => stringToPrint
}
// If there's a message, put it on the next line, indented two spaces
val possiblyEmptyMessage = Reporter.messageOrThrowablesDetailMessage(message, throwable)
val possiblyEmptyMessageWithPossibleLineNumber =
throwable match {
case Some(e: PropertyCheckFailedException) => possiblyEmptyMessage // PCFEs already include the line number
case Some(e: StackDepth) => withPossibleLineNumber(possiblyEmptyMessage, throwable) // Show it in the stack depth case
case _ => "" // Don't show it in the non-stack depth case. It will be shown after the exception class name and colon.
}
// The whiteSpace is just used for printing out stack traces, etc., things that go after a test name. The formatted
// text for test names actually goes over to the left once in a sense, to make room for the icon. So if the indentation
// level is 3 for example, the "- " for that test's formatted text is really indented 2 times (or four spaces: " ")
// So that's why normally the indentation level times two spaces should be the white space. But at the top level (indentation
// level of 0), the icon also has to go at 0 (because subtracting one would put it at -1), so in that case the white space
// should be two spaces (or 1 level of indentation).
val whiteSpace =
formatter match {
case Some(IndentedText(_, _, indentationLevel)) if (indentationLevel != 0) => indentation(indentationLevel)
case _ => indentation(1)
}
def getStackTrace(throwable: Option[Throwable]): List[String] =
throwable match {
case Some(throwable) =>
def stackTrace(throwable: Throwable, isCause: Boolean): List[String] = {
val className = throwable.getClass.getName
val labeledClassName = if (isCause) Resources("DetailsCause") + ": " + className else className
// Only show the : message if a cause, because first one will have its message printed out
// Or if it is a non-StackDepth exception, because if they throw Exception with no message, the
// message was coming out as "java.lang.Exception" then on the next line it repeated it. In the
// case of no exception message, I think it looks best to just say the class name followed by a colon
// and nothing else.
val colonMessageOrJustColon =
if ((throwable.getMessage != null && !throwable.getMessage.trim.isEmpty) && (isCause || !(throwable.isInstanceOf[StackDepth])))
": " + throwable.getMessage.trim
else
":"
val labeledClassNameWithMessage =
whiteSpace + labeledClassName + colonMessageOrJustColon
if (presentShortStackTraces || presentFullStackTraces || !(throwable.isInstanceOf[StackDepth])) {
// Indent each stack trace item two spaces, and prepend that with an "at "
val stackTraceElements = throwable.getStackTrace.toList map { whiteSpace + "at " + _.toString }
val cause = throwable.getCause
val stackTraceThisThrowable = labeledClassNameWithMessage :: stackTraceElements
if (presentFullStackTraces) {
if (cause == null)
stackTraceThisThrowable
else
stackTraceThisThrowable ::: stackTrace(cause, true) // Not tail recursive, but shouldn't be too deep
}
else {
// The drop(1) or drop(stackDepth + 1) that extra one is the labeledClassNameWithMessage
val stackTraceThisThrowableTruncated =
throwable match {
case e: Throwable with StackDepth =>
val stackDepth = e.failedCodeStackDepth
stackTraceThisThrowable.head :: (whiteSpace + "...") :: stackTraceThisThrowable.drop(stackDepth + 1).take(7) ::: List(whiteSpace + "...")
case _ => // In case of IAE or what not, show top 10 stack frames
stackTraceThisThrowable.head :: stackTraceThisThrowable.drop(1).take(10) ::: List(whiteSpace + "...")
}
if (cause == null)
stackTraceThisThrowableTruncated
else
stackTraceThisThrowableTruncated ::: stackTrace(cause, true) // Not tail recursive, but shouldn't be too deep
}
}
else
Nil
}
stackTrace(throwable, false)
case None => List()
}
if (possiblyEmptyMessageWithPossibleLineNumber.isEmpty)
stringToPrintWithPossibleDuration :: getStackTrace(throwable)
else
stringToPrintWithPossibleDuration :: possiblyEmptyMessageWithPossibleLineNumber.split("\\n").toList.map(whiteSpace + _) ::: getStackTrace(throwable)
}
private def stringToPrintWhenNoError(resourceName: String, formatter: Option[Formatter], suiteName: String, testName: Option[String], message: Option[String]): Option[String] =
stringToPrintWhenNoError(resourceName, formatter, suiteName, testName, None, message)
private def stringToPrintWhenNoError(resourceName: String, formatter: Option[Formatter], suiteName: String, testName: Option[String], duration: Option[Long], message: Option[String]): Option[String] = {
def genUnformattedText = {
val arg =
testName match {
case Some(tn) => suiteName + ": " + tn
case None => suiteName
}
val messageText =
message match {
case Some(text) => ": " + text
case None => ""
}
val unformattedText = Resources(resourceName, arg + messageText)
duration match {
case Some(milliseconds) =>
if (presentAllDurations)
Some(Resources("withDuration", unformattedText, makeDurationString(milliseconds)))
else
Some(unformattedText)
case None => Some(unformattedText)
}
}
def genFormattedText = {
formatter match {
case Some(IndentedText(formattedText, _, _)) =>
duration match {
case Some(milliseconds) =>
if (presentAllDurations)
Some(Resources("withDuration", formattedText, makeDurationString(milliseconds)))
else
Some(formattedText)
case None => Some(formattedText)
}
case Some(MotionToSuppress) => None
case _ => genUnformattedText
}
}
if (presentUnformatted) genUnformattedText
else genFormattedText
}
def apply(event: Event) {
event match {
case _: DiscoveryStarting =>
val stringToPrint =
stringToPrintWhenNoError("discoveryStarting", None, "", None, None)
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiCyan)
case None =>
}
case DiscoveryCompleted(_, duration, _, _) =>
val stringToPrint =
duration match {
case Some(milliseconds) =>
Resources("discoveryCompletedIn", makeDurationString(milliseconds))
case None =>
Resources("discoveryCompleted")
}
printPossiblyInColor(stringToPrint, ansiCyan)
case RunStarting(ordinal, testCount, configMap, formatter, location, payload, threadName, timeStamp) =>
if (testCount < 0)
throw new IllegalArgumentException
val string = Resources("runStarting", testCount.toString)
printPossiblyInColor(string, ansiCyan)
case RunCompleted(ordinal, duration, summary, formatter, location, payload, threadName, timeStamp) =>
makeFinalReport("runCompleted", duration, summary)
case RunStopped(ordinal, duration, summary, formatter, location, payload, threadName, timeStamp) =>
makeFinalReport("runStopped", duration, summary)
case RunAborted(ordinal, message, throwable, duration, summary, formatter, location, payload, threadName, timeStamp) =>
val lines = stringsToPrintOnError("abortedNote", "runAborted", message, throwable, formatter, None, None, duration)
for (line <- lines) printPossiblyInColor(line, ansiRed)
case SuiteStarting(ordinal, suiteName, suiteId, suiteClassName, formatter, location, rerunnable, payload, threadName, timeStamp) =>
val stringToPrint = stringToPrintWhenNoError("suiteStarting", formatter, suiteName, None, None)
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiGreen)
case None =>
}
case SuiteCompleted(ordinal, suiteName, suiteId, suiteClassName, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
val stringToPrint = stringToPrintWhenNoError("suiteCompleted", formatter, suiteName, None, duration, None)
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiGreen)
case None =>
}
case SuiteAborted(ordinal, message, suiteName, suiteId, suiteClassName, throwable, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
val lines = stringsToPrintOnError("abortedNote", "suiteAborted", message, throwable, formatter, Some(suiteName), None, duration)
for (line <- lines) printPossiblyInColor(line, ansiRed)
case TestStarting(ordinal, suiteName, suiteId, suiteClassName, testName, testText, formatter, location, rerunnable, payload, threadName, timeStamp) =>
val stringToPrint = stringToPrintWhenNoError("testStarting", formatter, suiteName, Some(testName), None)
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiGreen)
case None =>
}
case TestSucceeded(ordinal, suiteName, suiteId, suiteClassName, testName, testText, recordedEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
val stringToPrint = stringToPrintWhenNoError("testSucceeded", formatter, suiteName, Some(testName), duration, None)
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiGreen)
case None =>
}
handleRecordedEvents(recordedEvents)
case TestIgnored(ordinal, suiteName, suiteId, suiteClassName, testName, testText, formatter, location, payload, threadName, timeStamp) =>
val stringToPrint =
if (presentUnformatted)
Some(Resources("testIgnored", suiteName + ": " + testName))
else
formatter match {
case Some(IndentedText(formattedText, _, _)) => Some(Resources("specTextAndNote", formattedText, Resources("ignoredNote")))
case Some(MotionToSuppress) => None
case _ => Some(Resources("testIgnored", suiteName + ": " + testName))
}
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiYellow)
case None =>
}
case TestFailed(ordinal, message, suiteName, suiteId, suiteClassName, testName, testText, recordedEvents, throwable, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
val lines = stringsToPrintOnError("failedNote", "testFailed", message, throwable, formatter, Some(suiteName), Some(testName), duration)
for (line <- lines) printPossiblyInColor(line, ansiRed)
handleRecordedEvents(recordedEvents, ansiRed)
case TestCanceled(ordinal, message, suiteName, suiteId, suiteClassName, testName, testText, recordedEvents, throwable, duration, formatter, location, payload, threadName, timeStamp) =>
val lines = stringsToPrintOnError("canceledNote", "testCanceled", message, throwable, formatter, Some(suiteName), Some(testName), duration)
for (line <- lines) printPossiblyInColor(line, ansiYellow)
handleRecordedEvents(recordedEvents, ansiYellow)
case ipEvent: InfoProvided =>
handleInfoProvided(ipEvent, ansiGreen)
case ScopeOpened(ordinal, message, nameInfo, formatter, location, payload, threadName, timeStamp) =>
val testNameInfo = nameInfo.testName
val stringToPrint = stringToPrintWhenNoError("scopeOpened", formatter, nameInfo.suiteName, nameInfo.testName, Some(message))
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiGreen)
case None =>
}
// TODO: Reduce duplication among InfoProvided, ScopeOpened, and ScopeClosed
case ScopeClosed(ordinal, message, nameInfo, formatter, location, payload, threadName, timeStamp) =>
val testNameInfo = nameInfo.testName
val stringToPrint = stringToPrintWhenNoError("scopeClosed", formatter, nameInfo.suiteName, nameInfo.testName, Some(message)) // TODO: I htink I want ot say Scope Closed - + message
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiGreen)
case None =>
}
case ScopePending(ordinal, message, nameInfo, formatter, location, payload, threadName, timeStamp) =>
val stringToPrint =
if (presentUnformatted)
Some(Resources("scopePending", nameInfo.suiteName + ": " + message))
else
formatter match {
case Some(IndentedText(formattedText, _, _)) => Some(Resources("specTextAndNote", formattedText, Resources("pendingNote")))
case Some(MotionToSuppress) => None
case _ => Some(Resources("scopePending", nameInfo.suiteName + ": " + message))
}
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiYellow)
case None =>
}
case mpEvent: MarkupProvided =>
handleMarkupProvided(mpEvent, ansiGreen)
case TestPending(ordinal, suiteName, suiteId, suiteClassName, testName, testText, recordedEvents, duration, formatter, location, payload, threadName, timeStamp) =>
val stringToPrint =
if (presentUnformatted)
Some(Resources("testPending", suiteName + ": " + testName))
else
formatter match {
case Some(IndentedText(formattedText, _, _)) => Some(Resources("specTextAndNote", formattedText, Resources("pendingNote")))
case Some(MotionToSuppress) => None
case _ => Some(Resources("testPending", suiteName + ": " + testName))
}
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiYellow)
case None =>
}
handleRecordedEvents(recordedEvents, ansiYellow)
// case _ => throw new RuntimeException("Unhandled event")
}
}
private def handleInfoProvided(event: InfoProvided, ansiColor: String) {
val (suiteName, testName) =
event.nameInfo match {
case Some(NameInfo(suiteName, _, _, testName)) => (Some(suiteName), testName)
case None => (None, None)
}
val lines = stringsToPrintOnError("infoProvidedNote", "infoProvided", event.message, event.throwable, event.formatter, suiteName, testName, None)
for (line <- lines) printPossiblyInColor(line, ansiColor)
}
private def stringToPrintWhenMarkup(formatter: Option[Formatter],
suiteName: Option[String],
testName: Option[String],
text: String): Option[String] =
{
def genUnformattedText = {
val prefix =
(suiteName, testName) match {
case (None, None) => ""
case (None, Some(tName)) => tName + ": "
case (Some(sName), None) => sName + ": "
case (Some(sName), Some(tName)) => sName + ": " + tName + ": "
}
Some(prefix + text)
}
def genFormattedText = {
formatter match {
case Some(IndentedText(formattedText, _, _)) => Some(formattedText)
case Some(MotionToSuppress) => None
case _ => genUnformattedText
}
}
if (presentUnformatted) genUnformattedText
else genFormattedText
}
private def handleMarkupProvided(event: MarkupProvided, ansiColor: String) {
val (suiteName, testName) =
event.nameInfo match {
case Some(NameInfo(suiteName, _, _, testName)) =>
(Some(suiteName), testName)
case None => (None, None)
}
val stringToPrint =
stringToPrintWhenMarkup(event.formatter, suiteName, testName, event.text)
stringToPrint match {
case Some(string) => printPossiblyInColor(string, ansiColor)
case None =>
}
}
private def handleRecordedEvents(recordedEvents: collection.immutable.IndexedSeq[RecordableEvent], ansiColor: String = ansiGreen) {
recordedEvents.foreach { e =>
e match {
case ipEvent: InfoProvided => handleInfoProvided(ipEvent, ansiColor)
case mpEvent: MarkupProvided => handleMarkupProvided(mpEvent,
ansiColor)
}
}
}
protected def makeFinalReport(resourceName: String, duration: Option[Long], summaryOption: Option[Summary]) {
summaryOption match {
case Some(summary) =>
import summary._
duration match {
case Some(msSinceEpoch) =>
printPossiblyInColor(Resources(resourceName + "In", makeDurationString(msSinceEpoch)), ansiCyan)
case None =>
printPossiblyInColor(Resources(resourceName), ansiCyan)
}
// totalNumberOfTestsRun=Total number of tests run was: {0}
printPossiblyInColor(Resources("totalNumberOfTestsRun", testsCompletedCount.toString), ansiCyan)
if (scopesPendingCount > 0) {
// Suite Summary: completed {0}, aborted {1} Scopes: pending {2}
printPossiblyInColor(Resources("suiteScopeSummary", suitesCompletedCount.toString, suitesAbortedCount.toString, scopesPendingCount.toString), ansiCyan)
}
else {
// Suite Summary: completed {0}, aborted {1}
printPossiblyInColor(Resources("suiteSummary", suitesCompletedCount.toString, suitesAbortedCount.toString), ansiCyan)
}
// Test Summary: succeeded {0}, failed {1}, ignored, {2}, pending {3}, canceled {4}
printPossiblyInColor(Resources("testSummary", testsSucceededCount.toString, testsFailedCount.toString, testsCanceledCount.toString, testsIgnoredCount.toString, testsPendingCount.toString), ansiCyan)
// *** 1 SUITE ABORTED ***
if (suitesAbortedCount == 1)
printPossiblyInColor(Resources("oneSuiteAborted"), ansiRed)
// *** {0} SUITES ABORTED ***
else if (suitesAbortedCount > 1)
printPossiblyInColor(Resources("multipleSuitesAborted", suitesAbortedCount.toString), ansiRed)
// *** 1 TEST FAILED ***
if (testsFailedCount == 1)
printPossiblyInColor(Resources("oneTestFailed"), ansiRed)
// *** {0} TESTS FAILED ***
else if (testsFailedCount > 1)
printPossiblyInColor(Resources("multipleTestsFailed", testsFailedCount.toString), ansiRed)
else if (suitesAbortedCount == 0) // Maybe don't want to say this if the run aborted or stopped because "all"
printPossiblyInColor(Resources("allTestsPassed"), ansiGreen)
case None =>
}
}
// We subtract one from test reports because we add "- " in front, so if one is actually zero, it will come here as -1
// private def indent(s: String, times: Int) = if (times <= 0) s else (" " * times) + s
// Stupid properties file won't let me put spaces at the beginning of a property
// " {0}" comes out as "{0}", so I can't do indenting in a localizable way. For now
// just indent two space to the left. // if (times <= 0) s
// else Resources("indentOnce", indent(s, times - 1))
}
private[scalatest] object StringReporter {
def countTrailingEOLs(s: String): Int = s.length - s.lastIndexWhere(_ != '\\n') - 1
def countLeadingEOLs(s: String): Int = {
val idx = s.indexWhere(_ != '\\n')
if (idx != -1) idx else 0
}
def colorizeLinesIndividually(text: String, ansiColor: String): String =
if (text.trim.isEmpty) text
else {
("\\n" * countLeadingEOLs(text)) +
text.split("\\n").dropWhile(_.isEmpty).map(ansiColor + _ + ansiReset).mkString("\\n") +
("\\n" * countTrailingEOLs(text))
}
}
|
svn2github/scalatest
|
src/main/scala/org/scalatest/tools/StringReporter.scala
|
Scala
|
apache-2.0
| 30,977
|
package slick.test.jdbc
import scala.language.higherKinds
import slick.testutil._
import com.typesafe.slick.testkit.util.{DBTest, DBTestObject, JdbcTestDB}
object ExecutorTest extends DBTestObject()
@deprecated("Using deprecated .simple API", "3.0")
class ExecutorTest(val tdb: JdbcTestDB) extends DBTest {
import tdb.profile.backend.Database.dynamicSession
import tdb.profile.simple._
def all[E, C[_]](q: Query[_, E, C]) = {
// static tests if the implicit conversions can be applied
q.list
q.run
}
}
|
adamkozuch/slick
|
slick-testkit/src/test/scala/slick/test/jdbc/ExecutorTest.scala
|
Scala
|
bsd-2-clause
| 524
|
package lila.mod
import akka.actor._
import com.typesafe.config.Config
import lila.db.dsl.Coll
import lila.security.{ Firewall, UserSpy }
final class Env(
config: Config,
db: lila.db.Env,
hub: lila.hub.Env,
system: ActorSystem,
scheduler: lila.common.Scheduler,
firewall: Firewall,
reportApi: lila.report.ReportApi,
lightUserApi: lila.user.LightUserApi,
userSpy: String => Fu[UserSpy],
securityApi: lila.security.Api,
tournamentApi: lila.tournament.TournamentApi,
simulEnv: lila.simul.Env,
chatApi: lila.chat.ChatApi,
notifyApi: lila.notify.NotifyApi,
historyApi: lila.history.HistoryApi,
rankingApi: lila.user.RankingApi,
relationApi: lila.relation.RelationApi,
userJson: lila.user.JsonView,
emailAddress: lila.security.EmailAddress) {
private object settings {
val CollectionPlayerAssessment = config getString "collection.player_assessment"
val CollectionBoosting = config getString "collection.boosting"
val CollectionModlog = config getString "collection.modlog"
val CollectionGamingHistory = config getString "collection.gaming_history"
val ActorName = config getString "actor.name"
val NbGamesToMark = config getInt "boosting.nb_games_to_mark"
val RatioGamesToMark = config getDouble "boosting.ratio_games_to_mark"
}
import settings._
val ApiKey = config getString "api.key"
private[mod] lazy val logColl = db(CollectionModlog)
lazy val logApi = new ModlogApi(logColl)
private lazy val notifier = new ModNotifier(notifyApi, reportApi)
private lazy val ratingRefund = new RatingRefund(
scheduler = scheduler,
notifier = notifier,
historyApi = historyApi,
rankingApi = rankingApi,
wasUnengined = logApi.wasUnengined)
lazy val api = new ModApi(
logApi = logApi,
userSpy = userSpy,
firewall = firewall,
reporter = hub.actor.report,
lightUserApi = lightUserApi,
notifier = notifier,
refunder = ratingRefund,
lilaBus = system.lilaBus)
private lazy val boosting = new BoostingApi(
modApi = api,
collBoosting = db(CollectionBoosting),
nbGamesToMark = NbGamesToMark,
ratioGamesToMark = RatioGamesToMark)
lazy val assessApi = new AssessApi(
collAssessments = db(CollectionPlayerAssessment),
logApi = logApi,
modApi = api,
reporter = hub.actor.report,
fishnet = hub.actor.fishnet,
userIdsSharingIp = securityApi.userIdsSharingIp)
lazy val gamify = new Gamify(
logColl = logColl,
reportApi = reportApi,
historyColl = db(CollectionGamingHistory))
lazy val publicChat = new PublicChat(chatApi, tournamentApi, simulEnv)
lazy val search = new UserSearch(
securityApi = securityApi,
emailAddress = emailAddress)
lazy val jsonView = new JsonView(
assessApi = assessApi,
relationApi = relationApi,
userJson = userJson)
lazy val userHistory = new UserHistory(
logApi = logApi,
reportApi = reportApi)
// api actor
system.lilaBus.subscribe(system.actorOf(Props(new Actor {
def receive = {
case lila.hub.actorApi.mod.MarkCheater(userId) => api autoAdjust userId
case lila.analyse.actorApi.AnalysisReady(game, analysis) =>
assessApi.onAnalysisReady(game, analysis)
case lila.game.actorApi.FinishGame(game, whiteUserOption, blackUserOption) if !game.aborted =>
(whiteUserOption |@| blackUserOption) apply {
case (whiteUser, blackUser) => boosting.check(game, whiteUser, blackUser) >>
assessApi.onGameReady(game, whiteUser, blackUser)
}
case lila.hub.actorApi.mod.ChatTimeout(mod, user, reason) => logApi.chatTimeout(mod, user, reason)
}
}), name = ActorName), 'finishGame, 'analysisReady)
}
object Env {
lazy val current = "mod" boot new Env(
config = lila.common.PlayApp loadConfig "mod",
db = lila.db.Env.current,
hub = lila.hub.Env.current,
system = lila.common.PlayApp.system,
scheduler = lila.common.PlayApp.scheduler,
firewall = lila.security.Env.current.firewall,
reportApi = lila.report.Env.current.api,
userSpy = lila.security.Env.current.userSpy,
lightUserApi = lila.user.Env.current.lightUserApi,
securityApi = lila.security.Env.current.api,
tournamentApi = lila.tournament.Env.current.api,
simulEnv = lila.simul.Env.current,
chatApi = lila.chat.Env.current.api,
notifyApi = lila.notify.Env.current.api,
historyApi = lila.history.Env.current.api,
rankingApi = lila.user.Env.current.rankingApi,
relationApi = lila.relation.Env.current.api,
userJson = lila.user.Env.current.jsonView,
emailAddress = lila.security.Env.current.emailAddress)
}
|
clarkerubber/lila
|
modules/mod/src/main/Env.scala
|
Scala
|
agpl-3.0
| 4,682
|
package org.jetbrains.plugins.scala
package lang.refactoring.changeSignature
import com.intellij.psi._
import com.intellij.refactoring.changeSignature._
import com.intellij.usageView.UsageInfo
import org.jetbrains.plugins.scala.codeInsight.intention.types.{AddOnlyStrategy, AddOrRemoveStrategy}
import org.jetbrains.plugins.scala.extensions.{ChildOf, ElementText, PsiElementExt, PsiTypeExt}
import org.jetbrains.plugins.scala.lang.psi.api.base.ScReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScClass
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScModifierListOwner, ScNamedElement}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory._
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.api.{FunctionType, JavaArrayType}
import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiUtil, TypeAdjuster}
import org.jetbrains.plugins.scala.lang.refactoring._
import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.changeInfo.ScalaChangeInfo
import org.jetbrains.plugins.scala.lang.refactoring.extractMethod.ScalaExtractMethodUtils
import org.jetbrains.plugins.scala.lang.refactoring.namesSuggester.NameSuggester
import org.jetbrains.plugins.scala.lang.refactoring.rename.ScalaRenameUtil
import org.jetbrains.plugins.scala.project.ProjectContext
import scala.collection.mutable.ListBuffer
/**
* Nikolay.Tropin
* 2014-08-13
*/
private[changeSignature] trait ScalaChangeSignatureUsageHandler {
protected def handleChangedName(change: ChangeInfo, usage: UsageInfo): Unit = {
if (!change.isNameChanged) return
val nameId = usage match {
case ScalaNamedElementUsageInfo(scUsage) => scUsage.namedElement.nameId
case MethodCallUsageInfo(ref, _) => ref.nameId
case RefExpressionUsage(r) => r.nameId
case InfixExprUsageInfo(i) => i.operation.nameId
case PostfixExprUsageInfo(p) => p.operation.nameId
case AnonFunUsageInfo(_, ref) => ref.nameId
case ImportUsageInfo(ref) => ref.nameId
case _ => null
}
nameId match {
case null =>
case ChildOf(ref: ScReferenceElement) if ScalaRenameUtil.isAliased(ref) =>
case _ =>
val newName = change.getNewName
replaceNameId(nameId, newName)
}
}
protected def handleVisibility(change: ChangeInfo, usage: ScalaNamedElementUsageInfo): Unit = {
val visibility = change match {
case j: JavaChangeInfo => j.getNewVisibility
case _ => return
}
val member = ScalaPsiUtil.nameContext(usage.namedElement) match {
case cl: ScClass => cl.constructor.getOrElse(return)
case m: ScModifierListOwner => m
case _ => return
}
ScalaPsiUtil.changeVisibility(member, visibility)
}
protected def handleReturnTypeChange(change: ChangeInfo, usage: ScalaNamedElementUsageInfo): Unit = {
def addType(element: ScNamedElement, oldTypeElem: Option[ScTypeElement], substType: ScType): Unit = {
oldTypeElem match {
case Some(te) =>
val replaced = te.replace(createTypeElementFromText(substType.canonicalCodeText)(element.getManager))
TypeAdjuster.markToAdjust(replaced)
case None =>
val (context, anchor) = ScalaPsiUtil.nameContext(element) match {
case f: ScFunction => (f, f.paramClauses)
case p: ScPatternDefinition => (p, p.pList)
case v: ScVariableDefinition => (v, v.pList)
case cp: ScClassParameter => (cp.getParent, cp)
case ctx => (ctx, ctx.getLastChild)
}
new AddOnlyStrategy().addTypeAnnotation(substType, context, anchor)
}
}
val element = usage.namedElement
val oldTypeElem = element match {
case fun: ScFunction => fun.returnTypeElement
case ScalaPsiUtil.inNameContext(pd: ScPatternDefinition) => pd.typeElement
case ScalaPsiUtil.inNameContext(vd: ScVariableDefinition) => vd.typeElement
case cp: ScClassParameter => cp.typeElement
case _ => None
}
val addTypeAnnotationOption = change match {
case scalaInfo: ScalaChangeInfo => scalaInfo.addTypeAnnotation
case _ => Some(true)
}
UsageUtil.returnType(change, usage).foreach { substType =>
if (!change.isReturnTypeChanged)
addTypeAnnotationOption.foreach { addTypeAnnotation =>
if (addTypeAnnotation) {
if (oldTypeElem.isEmpty) addType(element, None, substType)
} else {
oldTypeElem.foreach(AddOrRemoveStrategy.removeTypeAnnotation)
}
}
else
addType(element, oldTypeElem, substType)
}
}
protected def handleParametersUsage(change: ChangeInfo, usage: ParameterUsageInfo): Unit = {
if (change.isParameterNamesChanged || change.isParameterSetOrOrderChanged) {
replaceNameId(usage.ref.getElement, usage.newName)
}
}
def handleAnonFunUsage(change: ChangeInfo, usage: AnonFunUsageInfo): Unit = {
if (!change.isParameterSetOrOrderChanged) return
val jChange = change match {
case j: JavaChangeInfo => j
case _ => return
}
val expr = usage.expr
val paramTypes = expr.`type`() match {
case Right(FunctionType(_, pTypes)) => pTypes
case _ => Seq.empty
}
val (names, exprText) = expr match {
case inv: MethodInvocation =>
var paramsBuf = Seq[String]()
for {
(arg, param) <- inv.matchedParameters.sortBy(_._2.index)
if ScUnderScoreSectionUtil.isUnderscore(arg)
} {
val paramName =
if (!param.name.isEmpty) param.name
else param.nameInCode match {
case Some(n) => n
case None => NameSuggester.suggestNamesByType(param.paramType).head
}
paramsBuf = paramsBuf :+ paramName
arg.replaceExpression(createExpressionFromText(paramName)(arg.getManager), removeParenthesis = true)
}
(paramsBuf, inv.getText)
case _ =>
val paramNames = jChange.getOldParameterNames.toSeq
val refText = usage.ref.getText
val argText = paramNames.mkString("(", ", ", ")")
(paramNames, s"$refText$argText")
}
val params =
if (paramTypes.size == names.size)
names.zip(paramTypes).map {
case (name, tpe) =>
ScalaExtractMethodUtils.typedName(name, tpe.canonicalCodeText, expr.getProject)
}
else names
val clause = params.mkString("(", ", ", ")")
val newFunExprText = s"$clause => $exprText"
val replaced = expr.replaceExpression(createExpressionFromText(newFunExprText)(expr.getManager), removeParenthesis = true)
.asInstanceOf[ScFunctionExpr]
TypeAdjuster.markToAdjust(replaced)
replaced.result match {
case Some(infix: ScInfixExpr) =>
handleInfixUsage(change, InfixExprUsageInfo(infix))
case Some(mc @ ScMethodCall(ref: ScReferenceExpression, _)) =>
handleMethodCallUsagesArguments(change, MethodCallUsageInfo(ref, mc))
case _ =>
}
}
protected def handleChangedParameters(change: ChangeInfo, usage: ScalaNamedElementUsageInfo): Unit = {
if (!change.isParameterNamesChanged && !change.isParameterSetOrOrderChanged && !change.isParameterTypesChanged) return
val named = usage.namedElement
val keywordToChange = named match {
case _: ScFunction | _: ScClass => None
case ScalaPsiUtil.inNameContext(pd: ScPatternDefinition) if pd.isSimple => Some(pd.keywordToken)
case ScalaPsiUtil.inNameContext(vd: ScVariableDefinition) if vd.isSimple => Some(vd.keywordToken)
case _ => return
}
keywordToChange.foreach { kw =>
val defKeyword = createMethodFromText("def foo {}")(named.getManager).children.find(_.getText == "def").get
if (change.getNewParameters.nonEmpty) kw.replace(defKeyword)
}
val paramsText = parameterListText(change, usage)
val nameId = named.nameId
val newClauses = named match {
case cl: ScClass =>
createClassParamClausesWithContext(paramsText, cl)
case _ =>
createParamClausesWithContext(paramsText, named, nameId)
}
val result = usage.paramClauses match {
case Some(p) => p.replace(newClauses)
case None => nameId.getParent.addAfter(newClauses, nameId)
}
TypeAdjuster.markToAdjust(result)
}
protected def handleUsageArguments(change: ChangeInfo, usage: UsageInfo): Unit = {
usage match {
case c: ConstructorUsageInfo => handleConstructorUsageArguments(change, c)
case m: MethodCallUsageInfo => handleMethodCallUsagesArguments(change, m)
case r: RefExpressionUsage => handleRefUsageArguments(change, r)
case i: InfixExprUsageInfo => handleInfixUsage(change, i)
case p: PostfixExprUsageInfo => handlePostfixUsage(change, p)
case _ =>
}
}
protected def handleInfixUsage(change: ChangeInfo, usage: InfixExprUsageInfo): Unit = {
val infix = usage.infix
val ScInfixExpr.withAssoc(ElementText(qualText), operation, argument) = infix
if (change.getNewParameters.length != 1) {
argument match {
case t: ScTuple if !hasSeveralClauses(change) =>
val tupleText = argsText(change, usage)
val newTuple = createExpressionWithContextFromText(tupleText, infix, t)
t.replaceExpression(newTuple, removeParenthesis = false)
case _ =>
val newCallText = s"$qualText.${operation.refName}${argsText(change, usage)}"
val methodCall = createExpressionWithContextFromText(newCallText, infix.getContext, infix)
infix.replaceExpression(methodCall, removeParenthesis = true)
}
} else {
val argText = arguments(change, usage).headOption match {
case Some(Seq(text)) if text.trim.isEmpty => "()"
case Some(Seq(text)) => text
case _ => "()"
}
val expr = createExpressionWithContextFromText(argText, infix, argument)
argument.replaceExpression(expr, removeParenthesis = true)
}
}
def handleConstructorUsageArguments(change: ChangeInfo, usage: ConstructorUsageInfo): Unit = {
val constr = usage.constr
val typeElem = constr.typeElement
val text = typeElem.getText + argsText(change, usage)
val newConstr = createConstructorFromText(text, constr.getContext, constr)
constr.replace(newConstr)
}
protected def handleRefUsageArguments(change: ChangeInfo, usage: RefExpressionUsage): Unit = {
if (change.getNewParameters.isEmpty) return
val ref = usage.refExpr
val text = ref.getText + argsText(change, usage)
val call = createExpressionWithContextFromText(text, ref.getContext, ref)
ref.replaceExpression(call, removeParenthesis = true)
}
protected def handlePostfixUsage(change: ChangeInfo, usage: PostfixExprUsageInfo): Unit = {
if (change.getNewParameters.isEmpty) return
val postfix = usage.postfix
val qualRef = createEquivQualifiedReference(postfix)
val text = qualRef.getText + argsText(change, usage)
val call = createExpressionWithContextFromText(text, postfix.getContext, postfix)
postfix.replaceExpression(call, removeParenthesis = true)
}
protected def handleMethodCallUsagesArguments(change: ChangeInfo, usage: MethodCallUsageInfo): Unit = {
val call = usage.call
val newText = usage.ref.getText + argsText(change, usage)
val newCall = createExpressionWithContextFromText(newText, call.getContext, call)
call.replace(newCall)
}
private def arguments(change: ChangeInfo, methodUsage: MethodUsageInfo): Seq[Seq[String]] = {
if (change.getNewParameters.isEmpty) return Seq.empty
val isAddDefault = change match {
case c: ScalaChangeInfo => c.isAddDefaultArgs
case c: JavaChangeInfo => c.isGenerateDelegate
case _ => true
}
val manager = change.getMethod.getManager
val oldArgsInfo = methodUsage.argsInfo
def nonVarargArgs(clause: Seq[ParameterInfo]) = {
var needNamed = false
val buffer = new ListBuffer[String]
for {
(param, idx) <- clause.zipWithIndex
if !isRepeated(param)
} {
newArgumentExpression(oldArgsInfo, param, manager, isAddDefault, needNamed) match {
case Some(text) =>
buffer += text
if (text.contains("=") && idx > buffer.size - 1) needNamed = true
case None => needNamed = true
}
}
buffer
}
def varargsExprs(clause: Seq[ParameterInfo]): Seq[String] = {
val param = clause.last
param match {
case s: ScalaParameterInfo if s.isRepeatedParameter =>
case j: JavaParameterInfo if j.isVarargType =>
case _ => return Seq.empty
}
val oldIndex = param.getOldIndex
change match {
case jChangeInfo: JavaChangeInfo =>
if (oldIndex < 0) {
val text = param.getDefaultValue
if (text != "") Seq(text)
else Seq.empty
}
else {
val (argExprs, wasNamed) = oldArgsInfo.byOldParameterIndex.get(oldIndex) match {
case Some(Seq(ScAssignStmt(_, Some(expr)))) => (Seq(expr), true)
case Some(seq) => (seq, false)
case _ => return Seq.empty
}
if (jChangeInfo.isArrayToVarargs) {
argExprs match {
case Seq(ScMethodCall(ElementText("Array"), arrayArgs)) => arrayArgs.map(_.getText)
case Seq(expr) =>
val typedText = ScalaExtractMethodUtils.typedName(expr.getText, "_*", expr.getProject, byName = false)
val naming = if (wasNamed) param.getName + " = " else ""
val text = naming + typedText
Seq(text)
}
}
else argExprs.map(_.getText)
}
case _ => Seq.empty
}
}
def toArgs(clause: Seq[ParameterInfo]) = nonVarargArgs(clause) ++: varargsExprs(clause)
val clauses = change match {
case sc: ScalaChangeInfo => sc.newParams.filter(_.nonEmpty)
case _ => Seq(change.getNewParameters.toSeq)
}
clauses.map(toArgs)
}
def argsText(change: ChangeInfo, methodUsage: MethodUsageInfo): String = {
val args = arguments(change, methodUsage)
if (args.isEmpty && !methodUsage.isInstanceOf[RefExpressionUsage])
"()"
else
args.map(_.mkString("(", ", ", ")")).mkString
}
private def newArgumentExpression(argsInfo: OldArgsInfo,
newParam: ParameterInfo,
manager: PsiManager,
addDefaultArg: Boolean,
named: Boolean): Option[String] = {
val oldIdx = newParam.getOldIndex
if (oldIdx < 0 && addDefaultArg) return None
val default = newParam.getDefaultValue
val withoutName =
if (oldIdx < 0) {
if (default != null && !default.isEmpty) default else ""
}
else {
argsInfo.byOldParameterIndex.get(oldIdx) match {
case None => return None
case Some(seq) if seq.size > 1 => return None
case Some(Seq(assignStmt: ScAssignStmt)) => return Some(assignStmt.getText)
case Some(Seq(expr)) => expr.getText
}
}
val argText = if (named) s"${newParam.getName} = $withoutName" else withoutName
Some(argText)
}
private def replaceNameId(elem: PsiElement, newName: String): Unit = {
implicit val ctx: ProjectContext = elem
elem match {
case scRef: ScReferenceElement =>
val newId = createIdentifier(newName).getPsi
scRef.nameId.replace(newId)
case jRef: PsiReferenceExpression =>
jRef.getReferenceNameElement match {
case nameId: PsiIdentifier =>
val factory: PsiElementFactory = JavaPsiFacade.getInstance(jRef.getProject).getElementFactory
val newNameIdentifier: PsiIdentifier = factory.createIdentifier(newName)
nameId.replace(newNameIdentifier)
case _ =>
}
case _ =>
elem.replace(createIdentifier(newName).getPsi)
}
}
private def parameterListText(change: ChangeInfo, usage: ScalaNamedElementUsageInfo): String = {
implicit val project: ProjectContext = change.getMethod.getProject
def paramType(paramInfo: ParameterInfo) = {
val method = change.getMethod
paramInfo match {
case sInfo: ScalaParameterInfo =>
val text = UsageUtil.substitutor(usage).subst(sInfo.scType).canonicalCodeText
val `=> ` = if (sInfo.isByName) ScalaPsiUtil.functionArrow(method.getProject) + " " else ""
val `*` = if (sInfo.isRepeatedParameter) "*" else ""
`=> ` + text + `*`
case jInfo: JavaParameterInfo =>
val javaType = jInfo.createType(method, method.getManager)
val scType = UsageUtil.substitutor(usage).subst(javaType.toScType())
(scType, javaType) match {
case (JavaArrayType(argument), _: PsiEllipsisType) => argument.canonicalCodeText + "*"
case _ => scType.canonicalCodeText
}
case info => info.getTypeText
}
}
def scalaDefaultValue(paramInfo: ParameterInfo): Option[String] = {
val oldIdx = paramInfo.getOldIndex
if (oldIdx >= 0) usage.defaultValues(oldIdx)
else change match {
case sc: ScalaChangeInfo if !sc.function.isConstructor && sc.function != usage.namedElement => None
case sc: ScalaChangeInfo if sc.isAddDefaultArgs =>
paramInfo.getDefaultValue match {
case "" | null => Some(" ")
case s => Some(s)
}
case _ => None
}
}
def newParamName(p: ParameterInfo) = {
val oldIdx = p.getOldIndex
change match {
case jc: JavaChangeInfo if oldIdx >= 0 =>
val oldNameOfChanged = jc.getOldParameterNames()(oldIdx)
val oldNameOfCurrent = usage.parameters(oldIdx).name
if (oldNameOfChanged != oldNameOfCurrent) oldNameOfCurrent
else p.getName
case _ => p.getName
}
}
def paramText(p: ParameterInfo) = {
val typedName = ScalaExtractMethodUtils.typedName(newParamName(p), paramType(p), project, byName = false)
val default = scalaDefaultValue(p).fold("")(" = " + _)
val keywordsAndAnnots = p match {
case spi: ScalaParameterInfo => spi.keywordsAndAnnotations
case _ => ""
}
keywordsAndAnnots + typedName + default
}
change match {
case sc: ScalaChangeInfo =>
sc.newParams.map(cl => cl.map(paramText).mkString("(", ", ", ")")).mkString
case _ => change.getNewParameters.toSeq.map(paramText).mkString("(", ", ", ")")
}
}
private def hasSeveralClauses(change: ChangeInfo): Boolean = {
change match {
case sc: ScalaChangeInfo => sc.newParams.size > 1
case _ => false
}
}
private def isRepeated(p: ParameterInfo) = p match {
case p: ScalaParameterInfo => p.isRepeatedParameter
case p: JavaParameterInfo => p.isVarargType
case _ => false
}
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureUsageHandler.scala
|
Scala
|
apache-2.0
| 19,371
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.feature
import java.lang.{Iterable => JavaIterable}
import scala.collection.JavaConverters._
import scala.collection.mutable
import com.github.fommil.netlib.BLAS.{getInstance => blas}
import org.json4s.DefaultFormats
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.SparkContext
import org.apache.spark.annotation.Since
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.internal.Logging
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.mllib.util.{Loader, Saveable}
import org.apache.spark.rdd._
import org.apache.spark.sql.SparkSession
import org.apache.spark.util.BoundedPriorityQueue
import org.apache.spark.util.Utils
import org.apache.spark.util.random.XORShiftRandom
/**
* Entry in vocabulary
*/
private case class VocabWord(
var word: String,
var cn: Int,
var point: Array[Int],
var code: Array[Int],
var codeLen: Int
)
/**
* Word2Vec creates vector representation of words in a text corpus.
* The algorithm first constructs a vocabulary from the corpus
* and then learns vector representation of words in the vocabulary.
* The vector representation can be used as features in
* natural language processing and machine learning algorithms.
*
* We used skip-gram model in our implementation and hierarchical softmax
* method to train the model. The variable names in the implementation
* matches the original C implementation.
*
* For original C implementation, see https://code.google.com/p/word2vec/
* For research papers, see
* Efficient Estimation of Word Representations in Vector Space
* and
* Distributed Representations of Words and Phrases and their Compositionality.
*/
@Since("1.1.0")
class Word2Vec extends Serializable with Logging {
private var vectorSize = 100
private var learningRate = 0.025
private var numPartitions = 1
private var numIterations = 1
private var seed = Utils.random.nextLong()
private var minCount = 5
private var maxSentenceLength = 1000
/**
* Sets the maximum length (in words) of each sentence in the input data.
* Any sentence longer than this threshold will be divided into chunks of
* up to `maxSentenceLength` size (default: 1000)
*/
@Since("2.0.0")
def setMaxSentenceLength(maxSentenceLength: Int): this.type = {
require(maxSentenceLength > 0,
s"Maximum length of sentences must be positive but got ${maxSentenceLength}")
this.maxSentenceLength = maxSentenceLength
this
}
/**
* Sets vector size (default: 100).
*/
@Since("1.1.0")
def setVectorSize(vectorSize: Int): this.type = {
require(vectorSize > 0,
s"vector size must be positive but got ${vectorSize}")
this.vectorSize = vectorSize
this
}
/**
* Sets initial learning rate (default: 0.025).
*/
@Since("1.1.0")
def setLearningRate(learningRate: Double): this.type = {
require(learningRate > 0,
s"Initial learning rate must be positive but got ${learningRate}")
this.learningRate = learningRate
this
}
/**
* Sets number of partitions (default: 1). Use a small number for accuracy.
*/
@Since("1.1.0")
def setNumPartitions(numPartitions: Int): this.type = {
require(numPartitions > 0,
s"Number of partitions must be positive but got ${numPartitions}")
this.numPartitions = numPartitions
this
}
/**
* Sets number of iterations (default: 1), which should be smaller than or equal to number of
* partitions.
*/
@Since("1.1.0")
def setNumIterations(numIterations: Int): this.type = {
require(numIterations >= 0,
s"Number of iterations must be nonnegative but got ${numIterations}")
this.numIterations = numIterations
this
}
/**
* Sets random seed (default: a random long integer).
*/
@Since("1.1.0")
def setSeed(seed: Long): this.type = {
this.seed = seed
this
}
/**
* Sets the window of words (default: 5)
*/
@Since("1.6.0")
def setWindowSize(window: Int): this.type = {
require(window > 0,
s"Window of words must be positive but got ${window}")
this.window = window
this
}
/**
* Sets minCount, the minimum number of times a token must appear to be included in the word2vec
* model's vocabulary (default: 5).
*/
@Since("1.3.0")
def setMinCount(minCount: Int): this.type = {
require(minCount >= 0,
s"Minimum number of times must be nonnegative but got ${minCount}")
this.minCount = minCount
this
}
private val EXP_TABLE_SIZE = 1000
private val MAX_EXP = 6
private val MAX_CODE_LENGTH = 40
/** context words from [-window, window] */
private var window = 5
private var trainWordsCount = 0L
private var vocabSize = 0
@transient private var vocab: Array[VocabWord] = null
@transient private var vocabHash = mutable.HashMap.empty[String, Int]
private def learnVocab[S <: Iterable[String]](dataset: RDD[S]): Unit = {
val words = dataset.flatMap(x => x)
vocab = words.map(w => (w, 1))
.reduceByKey(_ + _)
.filter(_._2 >= minCount)
.map(x => VocabWord(
x._1,
x._2,
new Array[Int](MAX_CODE_LENGTH),
new Array[Int](MAX_CODE_LENGTH),
0))
.collect()
.sortWith((a, b) => a.cn > b.cn)
vocabSize = vocab.length
require(vocabSize > 0, "The vocabulary size should be > 0. You may need to check " +
"the setting of minCount, which could be large enough to remove all your words in sentences.")
var a = 0
while (a < vocabSize) {
vocabHash += vocab(a).word -> a
trainWordsCount += vocab(a).cn
a += 1
}
logInfo(s"vocabSize = $vocabSize, trainWordsCount = $trainWordsCount")
}
private def createExpTable(): Array[Float] = {
val expTable = new Array[Float](EXP_TABLE_SIZE)
var i = 0
while (i < EXP_TABLE_SIZE) {
val tmp = math.exp((2.0 * i / EXP_TABLE_SIZE - 1.0) * MAX_EXP)
expTable(i) = (tmp / (tmp + 1.0)).toFloat
i += 1
}
expTable
}
private def createBinaryTree(): Unit = {
val count = new Array[Long](vocabSize * 2 + 1)
val binary = new Array[Int](vocabSize * 2 + 1)
val parentNode = new Array[Int](vocabSize * 2 + 1)
val code = new Array[Int](MAX_CODE_LENGTH)
val point = new Array[Int](MAX_CODE_LENGTH)
var a = 0
while (a < vocabSize) {
count(a) = vocab(a).cn
a += 1
}
while (a < 2 * vocabSize) {
count(a) = 1e9.toInt
a += 1
}
var pos1 = vocabSize - 1
var pos2 = vocabSize
var min1i = 0
var min2i = 0
a = 0
while (a < vocabSize - 1) {
if (pos1 >= 0) {
if (count(pos1) < count(pos2)) {
min1i = pos1
pos1 -= 1
} else {
min1i = pos2
pos2 += 1
}
} else {
min1i = pos2
pos2 += 1
}
if (pos1 >= 0) {
if (count(pos1) < count(pos2)) {
min2i = pos1
pos1 -= 1
} else {
min2i = pos2
pos2 += 1
}
} else {
min2i = pos2
pos2 += 1
}
count(vocabSize + a) = count(min1i) + count(min2i)
parentNode(min1i) = vocabSize + a
parentNode(min2i) = vocabSize + a
binary(min2i) = 1
a += 1
}
// Now assign binary code to each vocabulary word
var i = 0
a = 0
while (a < vocabSize) {
var b = a
i = 0
while (b != vocabSize * 2 - 2) {
code(i) = binary(b)
point(i) = b
i += 1
b = parentNode(b)
}
vocab(a).codeLen = i
vocab(a).point(0) = vocabSize - 2
b = 0
while (b < i) {
vocab(a).code(i - b - 1) = code(b)
vocab(a).point(i - b) = point(b) - vocabSize
b += 1
}
a += 1
}
}
/**
* Computes the vector representation of each word in vocabulary.
* @param dataset an RDD of sentences,
* each sentence is expressed as an iterable collection of words
* @return a Word2VecModel
*/
@Since("1.1.0")
def fit[S <: Iterable[String]](dataset: RDD[S]): Word2VecModel = {
learnVocab(dataset)
createBinaryTree()
val sc = dataset.context
val expTable = sc.broadcast(createExpTable())
val bcVocab = sc.broadcast(vocab)
val bcVocabHash = sc.broadcast(vocabHash)
// each partition is a collection of sentences,
// will be translated into arrays of Index integer
val sentences: RDD[Array[Int]] = dataset.mapPartitions { sentenceIter =>
// Each sentence will map to 0 or more Array[Int]
sentenceIter.flatMap { sentence =>
// Sentence of words, some of which map to a word index
val wordIndexes = sentence.flatMap(bcVocabHash.value.get)
// break wordIndexes into trunks of maxSentenceLength when has more
wordIndexes.grouped(maxSentenceLength).map(_.toArray)
}
}
val newSentences = sentences.repartition(numPartitions).cache()
val initRandom = new XORShiftRandom(seed)
if (vocabSize.toLong * vectorSize >= Int.MaxValue) {
throw new RuntimeException("Please increase minCount or decrease vectorSize in Word2Vec" +
" to avoid an OOM. You are highly recommended to make your vocabSize*vectorSize, " +
"which is " + vocabSize + "*" + vectorSize + " for now, less than `Int.MaxValue`.")
}
val syn0Global =
Array.fill[Float](vocabSize * vectorSize)((initRandom.nextFloat() - 0.5f) / vectorSize)
val syn1Global = new Array[Float](vocabSize * vectorSize)
var alpha = learningRate
for (k <- 1 to numIterations) {
val bcSyn0Global = sc.broadcast(syn0Global)
val bcSyn1Global = sc.broadcast(syn1Global)
val partial = newSentences.mapPartitionsWithIndex { case (idx, iter) =>
val random = new XORShiftRandom(seed ^ ((idx + 1) << 16) ^ ((-k - 1) << 8))
val syn0Modify = new Array[Int](vocabSize)
val syn1Modify = new Array[Int](vocabSize)
val model = iter.foldLeft((bcSyn0Global.value, bcSyn1Global.value, 0L, 0L)) {
case ((syn0, syn1, lastWordCount, wordCount), sentence) =>
var lwc = lastWordCount
var wc = wordCount
if (wordCount - lastWordCount > 10000) {
lwc = wordCount
// TODO: discount by iteration?
alpha =
learningRate * (1 - numPartitions * wordCount.toDouble / (trainWordsCount + 1))
if (alpha < learningRate * 0.0001) alpha = learningRate * 0.0001
logInfo("wordCount = " + wordCount + ", alpha = " + alpha)
}
wc += sentence.length
var pos = 0
while (pos < sentence.length) {
val word = sentence(pos)
val b = random.nextInt(window)
// Train Skip-gram
var a = b
while (a < window * 2 + 1 - b) {
if (a != window) {
val c = pos - window + a
if (c >= 0 && c < sentence.length) {
val lastWord = sentence(c)
val l1 = lastWord * vectorSize
val neu1e = new Array[Float](vectorSize)
// Hierarchical softmax
var d = 0
while (d < bcVocab.value(word).codeLen) {
val inner = bcVocab.value(word).point(d)
val l2 = inner * vectorSize
// Propagate hidden -> output
var f = blas.sdot(vectorSize, syn0, l1, 1, syn1, l2, 1)
if (f > -MAX_EXP && f < MAX_EXP) {
val ind = ((f + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2.0)).toInt
f = expTable.value(ind)
val g = ((1 - bcVocab.value(word).code(d) - f) * alpha).toFloat
blas.saxpy(vectorSize, g, syn1, l2, 1, neu1e, 0, 1)
blas.saxpy(vectorSize, g, syn0, l1, 1, syn1, l2, 1)
syn1Modify(inner) += 1
}
d += 1
}
blas.saxpy(vectorSize, 1.0f, neu1e, 0, 1, syn0, l1, 1)
syn0Modify(lastWord) += 1
}
}
a += 1
}
pos += 1
}
(syn0, syn1, lwc, wc)
}
val syn0Local = model._1
val syn1Local = model._2
// Only output modified vectors.
Iterator.tabulate(vocabSize) { index =>
if (syn0Modify(index) > 0) {
Some((index, syn0Local.slice(index * vectorSize, (index + 1) * vectorSize)))
} else {
None
}
}.flatten ++ Iterator.tabulate(vocabSize) { index =>
if (syn1Modify(index) > 0) {
Some((index + vocabSize, syn1Local.slice(index * vectorSize, (index + 1) * vectorSize)))
} else {
None
}
}.flatten
}
val synAgg = partial.reduceByKey { case (v1, v2) =>
blas.saxpy(vectorSize, 1.0f, v2, 1, v1, 1)
v1
}.collect()
var i = 0
while (i < synAgg.length) {
val index = synAgg(i)._1
if (index < vocabSize) {
Array.copy(synAgg(i)._2, 0, syn0Global, index * vectorSize, vectorSize)
} else {
Array.copy(synAgg(i)._2, 0, syn1Global, (index - vocabSize) * vectorSize, vectorSize)
}
i += 1
}
bcSyn0Global.destroy(false)
bcSyn1Global.destroy(false)
}
newSentences.unpersist()
expTable.destroy(false)
bcVocab.destroy(false)
bcVocabHash.destroy(false)
val wordArray = vocab.map(_.word)
new Word2VecModel(wordArray.zipWithIndex.toMap, syn0Global)
}
/**
* Computes the vector representation of each word in vocabulary (Java version).
* @param dataset a JavaRDD of words
* @return a Word2VecModel
*/
@Since("1.1.0")
def fit[S <: JavaIterable[String]](dataset: JavaRDD[S]): Word2VecModel = {
fit(dataset.rdd.map(_.asScala))
}
}
/**
* Word2Vec model
* @param wordIndex maps each word to an index, which can retrieve the corresponding
* vector from wordVectors
* @param wordVectors array of length numWords * vectorSize, vector corresponding
* to the word mapped with index i can be retrieved by the slice
* (i * vectorSize, i * vectorSize + vectorSize)
*/
@Since("1.1.0")
class Word2VecModel private[spark] (
private[spark] val wordIndex: Map[String, Int],
private[spark] val wordVectors: Array[Float]) extends Serializable with Saveable {
private val numWords = wordIndex.size
// vectorSize: Dimension of each word's vector.
private val vectorSize = wordVectors.length / numWords
// wordList: Ordered list of words obtained from wordIndex.
private val wordList: Array[String] = {
val (wl, _) = wordIndex.toSeq.sortBy(_._2).unzip
wl.toArray
}
// wordVecNorms: Array of length numWords, each value being the Euclidean norm
// of the wordVector.
private val wordVecNorms: Array[Double] = {
val wordVecNorms = new Array[Double](numWords)
var i = 0
while (i < numWords) {
val vec = wordVectors.slice(i * vectorSize, i * vectorSize + vectorSize)
wordVecNorms(i) = blas.snrm2(vectorSize, vec, 1)
i += 1
}
wordVecNorms
}
@Since("1.5.0")
def this(model: Map[String, Array[Float]]) = {
this(Word2VecModel.buildWordIndex(model), Word2VecModel.buildWordVectors(model))
}
override protected def formatVersion = "1.0"
@Since("1.4.0")
def save(sc: SparkContext, path: String): Unit = {
Word2VecModel.SaveLoadV1_0.save(sc, path, getVectors)
}
/**
* Transforms a word to its vector representation
* @param word a word
* @return vector representation of word
*/
@Since("1.1.0")
def transform(word: String): Vector = {
wordIndex.get(word) match {
case Some(ind) =>
val vec = wordVectors.slice(ind * vectorSize, ind * vectorSize + vectorSize)
Vectors.dense(vec.map(_.toDouble))
case None =>
throw new IllegalStateException(s"$word not in vocabulary")
}
}
/**
* Find synonyms of a word; do not include the word itself in results.
* @param word a word
* @param num number of synonyms to find
* @return array of (word, cosineSimilarity)
*/
@Since("1.1.0")
def findSynonyms(word: String, num: Int): Array[(String, Double)] = {
val vector = transform(word)
findSynonyms(vector, num, Some(word))
}
/**
* Find synonyms of the vector representation of a word, possibly
* including any words in the model vocabulary whose vector respresentation
* is the supplied vector.
* @param vector vector representation of a word
* @param num number of synonyms to find
* @return array of (word, cosineSimilarity)
*/
@Since("1.1.0")
def findSynonyms(vector: Vector, num: Int): Array[(String, Double)] = {
findSynonyms(vector, num, None)
}
/**
* Find synonyms of the vector representation of a word, rejecting
* words identical to the value of wordOpt, if one is supplied.
* @param vector vector representation of a word
* @param num number of synonyms to find
* @param wordOpt optionally, a word to reject from the results list
* @return array of (word, cosineSimilarity)
*/
private def findSynonyms(
vector: Vector,
num: Int,
wordOpt: Option[String]): Array[(String, Double)] = {
require(num > 0, "Number of similar words should > 0")
val fVector = vector.toArray.map(_.toFloat)
val cosineVec = Array.fill[Float](numWords)(0)
val alpha: Float = 1
val beta: Float = 0
// Normalize input vector before blas.sgemv to avoid Inf value
val vecNorm = blas.snrm2(vectorSize, fVector, 1)
if (vecNorm != 0.0f) {
blas.sscal(vectorSize, 1 / vecNorm, fVector, 0, 1)
}
blas.sgemv(
"T", vectorSize, numWords, alpha, wordVectors, vectorSize, fVector, 1, beta, cosineVec, 1)
val cosVec = cosineVec.map(_.toDouble)
var ind = 0
while (ind < numWords) {
val norm = wordVecNorms(ind)
if (norm == 0.0) {
cosVec(ind) = 0.0
} else {
cosVec(ind) /= norm
}
ind += 1
}
val pq = new BoundedPriorityQueue[(String, Double)](num + 1)(Ordering.by(_._2))
for(i <- cosVec.indices) {
pq += Tuple2(wordList(i), cosVec(i))
}
val scored = pq.toSeq.sortBy(-_._2)
val filtered = wordOpt match {
case Some(w) => scored.filter(tup => w != tup._1)
case None => scored
}
filtered.take(num).toArray
}
/**
* Returns a map of words to their vector representations.
*/
@Since("1.2.0")
def getVectors: Map[String, Array[Float]] = {
wordIndex.map { case (word, ind) =>
(word, wordVectors.slice(vectorSize * ind, vectorSize * ind + vectorSize))
}
}
}
@Since("1.4.0")
object Word2VecModel extends Loader[Word2VecModel] {
private def buildWordIndex(model: Map[String, Array[Float]]): Map[String, Int] = {
model.keys.zipWithIndex.toMap
}
private def buildWordVectors(model: Map[String, Array[Float]]): Array[Float] = {
require(model.nonEmpty, "Word2VecMap should be non-empty")
val (vectorSize, numWords) = (model.head._2.length, model.size)
val wordList = model.keys.toArray
val wordVectors = new Array[Float](vectorSize * numWords)
var i = 0
while (i < numWords) {
Array.copy(model(wordList(i)), 0, wordVectors, i * vectorSize, vectorSize)
i += 1
}
wordVectors
}
private object SaveLoadV1_0 {
val formatVersionV1_0 = "1.0"
val classNameV1_0 = "org.apache.spark.mllib.feature.Word2VecModel"
case class Data(word: String, vector: Array[Float])
def load(sc: SparkContext, path: String): Word2VecModel = {
val spark = SparkSession.builder().sparkContext(sc).getOrCreate()
val dataFrame = spark.read.parquet(Loader.dataPath(path))
// Check schema explicitly since erasure makes it hard to use match-case for checking.
Loader.checkSchema[Data](dataFrame.schema)
val dataArray = dataFrame.select("word", "vector").collect()
val word2VecMap = dataArray.map(i => (i.getString(0), i.getSeq[Float](1).toArray)).toMap
new Word2VecModel(word2VecMap)
}
def save(sc: SparkContext, path: String, model: Map[String, Array[Float]]): Unit = {
val spark = SparkSession.builder().sparkContext(sc).getOrCreate()
val vectorSize = model.values.head.length
val numWords = model.size
val metadata = compact(render(
("class" -> classNameV1_0) ~ ("version" -> formatVersionV1_0) ~
("vectorSize" -> vectorSize) ~ ("numWords" -> numWords)))
sc.parallelize(Seq(metadata), 1).saveAsTextFile(Loader.metadataPath(path))
// We want to partition the model in partitions smaller than
// spark.kryoserializer.buffer.max
val bufferSize = Utils.byteStringAsBytes(
spark.conf.get("spark.kryoserializer.buffer.max", "64m"))
// We calculate the approximate size of the model
// We only calculate the array size, considering an
// average string size of 15 bytes, the formula is:
// (floatSize * vectorSize + 15) * numWords
val approxSize = (4L * vectorSize + 15) * numWords
val nPartitions = ((approxSize / bufferSize) + 1).toInt
val dataArray = model.toSeq.map { case (w, v) => Data(w, v) }
spark.createDataFrame(dataArray).repartition(nPartitions).write.parquet(Loader.dataPath(path))
}
}
@Since("1.4.0")
override def load(sc: SparkContext, path: String): Word2VecModel = {
val (loadedClassName, loadedVersion, metadata) = Loader.loadMetadata(sc, path)
implicit val formats = DefaultFormats
val expectedVectorSize = (metadata \\ "vectorSize").extract[Int]
val expectedNumWords = (metadata \\ "numWords").extract[Int]
val classNameV1_0 = SaveLoadV1_0.classNameV1_0
(loadedClassName, loadedVersion) match {
case (classNameV1_0, "1.0") =>
val model = SaveLoadV1_0.load(sc, path)
val vectorSize = model.getVectors.values.head.length
val numWords = model.getVectors.size
require(expectedVectorSize == vectorSize,
s"Word2VecModel requires each word to be mapped to a vector of size " +
s"$expectedVectorSize, got vector of size $vectorSize")
require(expectedNumWords == numWords,
s"Word2VecModel requires $expectedNumWords words, but got $numWords")
model
case _ => throw new Exception(
s"Word2VecModel.load did not recognize model with (className, format version):" +
s"($loadedClassName, $loadedVersion). Supported:\\n" +
s" ($classNameV1_0, 1.0)")
}
}
}
|
Panos-Bletsos/spark-cost-model-optimizer
|
mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala
|
Scala
|
apache-2.0
| 23,659
|
package scalacookbook.chapter10
/**
* Created by liguodong on 2016/7/30.
*/
object MergeTwoSequentialCollectionPair extends App{
val women = List("Wilma", "Betty")
val men = List("Fred", "Barney")
val couples = women zip men
for ((wife, husband) <- couples) {
println(s"$wife is married to $husband")
}
val couplesMap = couples.toMap //Map(Wilma -> Fred, Betty -> Barney)
println(couplesMap)
//Discussion
// three elements
val products = Array("breadsticks", "pizza", "soft drink")
// one element
val prices = Array(4)
//zip函数将传进来的两个参数中相应位置上的元素组成一个pair数组。
// 如果其中一个参数元素比较长,那么多余的参数会被删掉。
// one resulting element
val productsWithPrice = products.zip(prices) //Array[(String, Int)] = Array((breadsticks,4))
println(productsWithPrice.toList)
val (a,b) = productsWithPrice.unzip
println(a.toList) //ArrayBuffer(breadsticks, pizza, soft drink)
println(b.toList) //ArrayBuffer(4.0, 10.0, 1.5)
//zipAll 函数和上面的zip函数类似,但是如果其中一个元素个数比较少,那么将用默认的元素填充。
//zipAll
val aaa = List("a1","a2","a3")
val bbb = List("b1","b2","b3","b4")
val ccc = List("c1","c2")
val aaaWithBbb = aaa.zipAll(bbb,"##","@@")
println(aaaWithBbb.toMap)
val aaaWithCcc = aaa.zipAll(ccc,"%%","**")
println(aaaWithCcc.toMap)
//zipped
//The zipped method on tuples generalizes several common operations to work on multiple lists.
val values = List.range(1, 5)
println((values, values).zipped.toMap)
val sumOfSquares = (values, values).zipped.map(_ * _).sum
println(sumOfSquares)
//zipWithIndex函数将元素和其所在的下标组成一个pair。
val series = Seq(0, 1, 1, 2, 3, 5, 8, 13)
println(series.zipWithIndex.toMap)
}
|
liguodongIOT/java-scala-mix-sbt
|
src/main/scala/scalacookbook/chapter10/MergeTwoSequentialCollectionPair.scala
|
Scala
|
apache-2.0
| 1,869
|
package concrete.constraint.extension
;
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import concrete.IntDomain
import concrete.Problem
import concrete.Variable
import concrete.constraint.AdviseCount
import mdd.MDD
final class ExtensionConstraintTrieTest extends FlatSpec with Matchers {
val mdd = MDD(Array(0, 0), Array(1, 1), Array(2, 2))
// val ta = new MDDMatrix(mdd, false)
val v0 = new Variable("V0", IntDomain(0 to 1))
val v1 = new Variable("V1", IntDomain(0 to 2))
val mmd = new ReduceableExt(Array(v0, v1), new MDDRelation(mdd))
mmd.register(new AdviseCount())
//println(content map (_.toSeq) mkString (", "))
"ReduceableExt" should "filter" in {
val problem = Problem(v0, v1)
problem.addConstraint(mmd)
val state = problem.initState.toState
mmd.eventAll(state)
val mod = mmd.revise(state).toState
mod.dom(v0) should be theSameInstanceAs v0.initDomain
mod.dom(v1) should not be theSameInstanceAs(v1.initDomain)
mod(mmd) should have size 2
}
}
|
concrete-cp/concrete
|
src/test/scala/concrete/constraint/extension/ExtensionConstraintTrieTest.scala
|
Scala
|
lgpl-2.1
| 1,029
|
package leo.datastructures.blackboard.impl
import leo.datastructures.{Clause, ClauseProxy}
import leo.datastructures.blackboard._
import scala.collection.mutable
/**
*
* A simple tree context set implementation.
*
* Stores all formulas (enriched clauses) in the program.
*
*/
object FormulaDataStore extends DataStore {
val formulaStore : mutable.Set[ClauseProxy] = new mutable.HashSet[ClauseProxy]
/**
* <p>
* Returns a List of all Formulas of the Blackboard.
* </p>
*
* @return All formulas of the blackboard.
*/
def getFormulas: Iterable[ClauseProxy] = formulaStore
override def isEmpty: Boolean = formulaStore.isEmpty
/**
*
* <p>
* Filters Set of Formulas according to a predicate.
* </p>
*
* @param p Predicate to select formulas
* @return Set of Formulas satisfying the Predicate
*/
def getAll(p: ClauseProxy => Boolean): Iterable[ClauseProxy] = synchronized(formulaStore.filter(p))
/**
* <p>
* Adds a formula to the blackboard, if it does not exist. If it exists
* the old formula is returned.
* </p>
*
* @param formula to be added.
* @return The inserted Formula, or the already existing one.
*/
def addFormula(formula : ClauseProxy) : Boolean = synchronized{
if(formulaStore.contains(formula)) return false
val f = formulaStore.add(formula)
// TODO: handle merge
f
}
/**
* Adds a formula to the Blackboard.
* Returns true, if the adding was successful
* and false, if the formula already existed.
*
* @param formula - New to add formula
* @return true if the formula was not contained in the blackboard previously
*/
def addNewFormula(formula : ClauseProxy) : Boolean = synchronized {
// TODO: Implement Sets to check containment of Clauses.
if(Clause.trivial(formula.cl)) return false
if (formulaStore.exists(c => c.cl == formula.cl))
false
else {
addFormula(formula)
}
}
/**
* <p>
* Removes a formula from the Set fo formulas of the Blackboard.
* </p>
*
* @return true if the formula was removed, false if the formula does not exist.
*/
def removeFormula(formula: ClauseProxy): Boolean = synchronized {
formulaStore.remove(formula)
}
/**
* <p>
* Remove all Formulas from the Blackboard satisfying a Predicate.
* </p>
*
* @param p - All x with p(x) will be removed.
*/
def rmAll(p: ClauseProxy => Boolean) = synchronized {
val filter = formulaStore.filter(p).iterator
while(filter.nonEmpty){
formulaStore.remove(filter.next())
}
}
//========================================================
//
// Data Store Implementation
//
//========================================================
override def storedTypes: Seq[DataType[Any]] = List(ClauseType)
override def updateResult(r: Delta) : Delta = {
val delta = Result()
val del = r.removes(ClauseType).iterator
val up = r.updates(ClauseType).iterator
val ins = r.inserts(ClauseType).iterator
while(del.nonEmpty) {
val toRemove = del.next()
val removed = removeFormula(toRemove)
if(removed) delta.remove(ClauseType)(toRemove)
}
while(up.nonEmpty) {
val (oldV, newV) = up.next()
removeFormula(oldV)
val inserted = addNewFormula(newV)
if (inserted) delta.update(ClauseType)(oldV)(newV)
}
while(ins.nonEmpty) {
val toInsert = ins.next()
val inserted = addNewFormula(ins.next())
if (inserted) delta.insert(ClauseType)(toInsert)
}
delta
}
override def clear(): Unit = formulaStore.clear()
override def get[T](t: DataType[T]): Set[T] = t match {
case ClauseType => getFormulas.toSet.asInstanceOf[Set[T]]
case _ => Set()
}
}
|
leoprover/Leo-III
|
oldsrc/main/scala/leo/datastructures/blackboard/impl/FormulaDataStore.scala
|
Scala
|
bsd-3-clause
| 3,795
|
package org.boidflow.pool
import simplex3d.math.{Vec3i, ConstVec3i}
import simplex3d.math.float.functions._
import simplex3d.math.float._
/**
*
*/
final class PoolBuffer3(size: ConstVec3i) {
val yMult = size.x
val zMult = size.x * size.y
val totalSize = size.x * size.y * size.z
val x = new PoolBuffer(size)
val y = new PoolBuffer(size)
val z = new PoolBuffer(size)
def set(pos: Vec3i, value: inVec3) {
val index = pos.x + pos.y * yMult + pos.z * zMult
x.buffer(index) = value.x
y.buffer(index) = value.y
z.buffer(index) = value.z
}
def get(pos: Vec3i, valueOut: outVec3) {
get(pos.x + pos.y * yMult + pos.z * zMult, valueOut)
}
def get(index: Int, valueOut: outVec3) {
valueOut.x = x.buffer(index)
valueOut.y = y.buffer(index)
valueOut.z = z.buffer(index)
}
def scaleAdd(source: PoolBuffer3, scale: Float) {
x.scaleAdd(source.x, scale)
y.scaleAdd(source.y, scale)
z.scaleAdd(source.z, scale)
}
def diffuse(previous: PoolBuffer3, boundary: Boundary3, diff: Float, timeStep: Float) {
x.diffuse(previous.x, boundary.x, diff, timeStep)
y.diffuse(previous.y, boundary.y, diff, timeStep)
z.diffuse(previous.z, boundary.z, diff, timeStep)
}
/**
* Does a linear backtrace using velocity vector to move the buffer contents from previous buffer to this.
*/
def advect(previous: PoolBuffer3, boundary: Boundary3, velocity: PoolBuffer3, timeStep: Float, advectCellsPerSecond: Float = 1f) {
x.advect(previous.x, boundary.x, velocity, timeStep, advectCellsPerSecond)
y.advect(previous.y, boundary.y, velocity, timeStep, advectCellsPerSecond)
z.advect(previous.z, boundary.z, velocity, timeStep, advectCellsPerSecond)
}
def project() {
}
}
|
zzorn/boidflow
|
src/main/scala/org/boidflow/pool/PoolBuffer3.scala
|
Scala
|
gpl-2.0
| 1,764
|
/**
* Copyright (C) 2016 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.function.xxforms
import org.orbeon.oxf.xforms.model.InstanceData
import org.orbeon.saxon.`type`.BuiltInAtomicType
import org.orbeon.saxon.expr.XPathContext
import org.orbeon.saxon.om.{NodeInfo, StandardNames}
import org.orbeon.saxon.value.{AtomicValue, QNameValue}
class XXFormsType extends XXFormsMIPFunction {
override def evaluateItem(xpathContext: XPathContext): QNameValue =
itemArgumentOrContextOpt(0)(xpathContext) match {
case Some(atomicValue: AtomicValue) =>
atomicValue.getItemType(null) match {
case atomicType: BuiltInAtomicType =>
val fingerprint = atomicType.getFingerprint
new QNameValue(
StandardNames.getPrefix(fingerprint),
StandardNames.getURI(fingerprint),
StandardNames.getLocalName(fingerprint),
null
)
case _ =>
null
}
case Some(node: NodeInfo) =>
// Get type from node
Option(InstanceData.getType(node)) match {
case Some(typeQName) =>
new QNameValue(
"",
typeQName.namespace.uri,
typeQName.localName,
null
)
case _ =>
null
}
case _ =>
null
}
}
|
orbeon/orbeon-forms
|
xforms-runtime/jvm/src/main/scala/org/orbeon/oxf/xforms/function/xxforms/XXFormsType.scala
|
Scala
|
lgpl-2.1
| 1,961
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import kafka.controller.ReplicaAssignment
import kafka.network.SocketServer
import org.junit.Assert._
import kafka.utils.TestUtils._
import kafka.utils.TestUtils
import kafka.server.BaseRequestTest
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.errors.InvalidReplicaAssignmentException
import org.apache.kafka.common.protocol.ApiKeys
import org.apache.kafka.common.requests.MetadataResponse.TopicMetadata
import org.apache.kafka.common.requests.{MetadataRequest, MetadataResponse}
import org.junit.{Before, Test}
import scala.collection.JavaConverters._
class AddPartitionsTest extends BaseRequestTest {
override def brokerCount: Int = 4
val partitionId = 0
val topic1 = "new-topic1"
val topic1Assignment = Map(0 -> ReplicaAssignment(Seq(0,1), List(), List()))
val topic2 = "new-topic2"
val topic2Assignment = Map(0 -> ReplicaAssignment(Seq(1,2), List(), List()))
val topic3 = "new-topic3"
val topic3Assignment = Map(0 -> ReplicaAssignment(Seq(2,3,0,1), List(), List()))
val topic4 = "new-topic4"
val topic4Assignment = Map(0 -> ReplicaAssignment(Seq(0,3), List(), List()))
val topic5 = "new-topic5"
val topic5Assignment = Map(1 -> ReplicaAssignment(Seq(0,1), List(), List()))
@Before
override def setUp(): Unit = {
super.setUp()
createTopic(topic1, partitionReplicaAssignment = topic1Assignment.mapValues(_.replicas).toMap)
createTopic(topic2, partitionReplicaAssignment = topic2Assignment.mapValues(_.replicas).toMap)
createTopic(topic3, partitionReplicaAssignment = topic3Assignment.mapValues(_.replicas).toMap)
createTopic(topic4, partitionReplicaAssignment = topic4Assignment.mapValues(_.replicas).toMap)
}
@Test
def testWrongReplicaCount(): Unit = {
try {
adminZkClient.addPartitions(topic1, topic1Assignment, adminZkClient.getBrokerMetadatas(), 2,
Some(Map(0 -> Seq(0, 1), 1 -> Seq(0, 1, 2))))
fail("Add partitions should fail")
} catch {
case _: InvalidReplicaAssignmentException => //this is good
}
}
@Test
def testMissingPartition0(): Unit = {
try {
adminZkClient.addPartitions(topic5, topic5Assignment, adminZkClient.getBrokerMetadatas(), 2,
Some(Map(1 -> Seq(0, 1), 2 -> Seq(0, 1, 2))))
fail("Add partitions should fail")
} catch {
case e: AdminOperationException => //this is good
assertTrue(e.getMessage.contains("Unexpected existing replica assignment for topic 'new-topic5', partition id 0 is missing"))
}
}
@Test
def testIncrementPartitions(): Unit = {
adminZkClient.addPartitions(topic1, topic1Assignment, adminZkClient.getBrokerMetadatas(), 3)
// wait until leader is elected
val leader1 = waitUntilLeaderIsElectedOrChanged(zkClient, topic1, 1)
val leader2 = waitUntilLeaderIsElectedOrChanged(zkClient, topic1, 2)
val leader1FromZk = zkClient.getLeaderForPartition(new TopicPartition(topic1, 1)).get
val leader2FromZk = zkClient.getLeaderForPartition(new TopicPartition(topic1, 2)).get
assertEquals(leader1, leader1FromZk)
assertEquals(leader2, leader2FromZk)
// read metadata from a broker and verify the new topic partitions exist
TestUtils.waitUntilMetadataIsPropagated(servers, topic1, 1)
TestUtils.waitUntilMetadataIsPropagated(servers, topic1, 2)
val response = sendMetadataRequest(new MetadataRequest.Builder(Seq(topic1).asJava, false).build)
assertEquals(1, response.topicMetadata.size)
val partitions = response.topicMetadata.asScala.head.partitionMetadata.asScala.sortBy(_.partition)
assertEquals(partitions.size, 3)
assertEquals(1, partitions(1).partition)
assertEquals(2, partitions(2).partition)
val replicas = partitions(1).replicas
assertEquals(replicas.size, 2)
assertTrue(replicas.contains(partitions(1).leader))
}
@Test
def testManualAssignmentOfReplicas(): Unit = {
// Add 2 partitions
adminZkClient.addPartitions(topic2, topic2Assignment, adminZkClient.getBrokerMetadatas(), 3,
Some(Map(0 -> Seq(1, 2), 1 -> Seq(0, 1), 2 -> Seq(2, 3))))
// wait until leader is elected
val leader1 = waitUntilLeaderIsElectedOrChanged(zkClient, topic2, 1)
val leader2 = waitUntilLeaderIsElectedOrChanged(zkClient, topic2, 2)
val leader1FromZk = zkClient.getLeaderForPartition(new TopicPartition(topic2, 1)).get
val leader2FromZk = zkClient.getLeaderForPartition(new TopicPartition(topic2, 2)).get
assertEquals(leader1, leader1FromZk)
assertEquals(leader2, leader2FromZk)
// read metadata from a broker and verify the new topic partitions exist
TestUtils.waitUntilMetadataIsPropagated(servers, topic2, 1)
TestUtils.waitUntilMetadataIsPropagated(servers, topic2, 2)
val response = sendMetadataRequest(new MetadataRequest.Builder(Seq(topic2).asJava, false).build)
assertEquals(1, response.topicMetadata.size)
val topicMetadata = response.topicMetadata.asScala.head
val partitionMetadata = topicMetadata.partitionMetadata.asScala.sortBy(_.partition)
assertEquals(3, topicMetadata.partitionMetadata.size)
assertEquals(0, partitionMetadata(0).partition)
assertEquals(1, partitionMetadata(1).partition)
assertEquals(2, partitionMetadata(2).partition)
val replicas = partitionMetadata(1).replicas
assertEquals(2, replicas.size)
assertTrue(replicas.asScala.head.id == 0 || replicas.asScala.head.id == 1)
assertTrue(replicas.asScala(1).id == 0 || replicas.asScala(1).id == 1)
}
@Test
def testReplicaPlacementAllServers(): Unit = {
adminZkClient.addPartitions(topic3, topic3Assignment, adminZkClient.getBrokerMetadatas(), 7)
// read metadata from a broker and verify the new topic partitions exist
TestUtils.waitUntilMetadataIsPropagated(servers, topic3, 1)
TestUtils.waitUntilMetadataIsPropagated(servers, topic3, 2)
TestUtils.waitUntilMetadataIsPropagated(servers, topic3, 3)
TestUtils.waitUntilMetadataIsPropagated(servers, topic3, 4)
TestUtils.waitUntilMetadataIsPropagated(servers, topic3, 5)
TestUtils.waitUntilMetadataIsPropagated(servers, topic3, 6)
val response = sendMetadataRequest(new MetadataRequest.Builder(Seq(topic3).asJava, false).build)
assertEquals(1, response.topicMetadata.size)
val topicMetadata = response.topicMetadata.asScala.head
validateLeaderAndReplicas(topicMetadata, 0, 2, Set(2, 3, 0, 1))
validateLeaderAndReplicas(topicMetadata, 1, 3, Set(3, 2, 0, 1))
validateLeaderAndReplicas(topicMetadata, 2, 0, Set(0, 3, 1, 2))
validateLeaderAndReplicas(topicMetadata, 3, 1, Set(1, 0, 2, 3))
validateLeaderAndReplicas(topicMetadata, 4, 2, Set(2, 3, 0, 1))
validateLeaderAndReplicas(topicMetadata, 5, 3, Set(3, 0, 1, 2))
validateLeaderAndReplicas(topicMetadata, 6, 0, Set(0, 1, 2, 3))
}
@Test
def testReplicaPlacementPartialServers(): Unit = {
adminZkClient.addPartitions(topic2, topic2Assignment, adminZkClient.getBrokerMetadatas(), 3)
// read metadata from a broker and verify the new topic partitions exist
TestUtils.waitUntilMetadataIsPropagated(servers, topic2, 1)
TestUtils.waitUntilMetadataIsPropagated(servers, topic2, 2)
val response = sendMetadataRequest(new MetadataRequest.Builder(Seq(topic2).asJava, false).build)
assertEquals(1, response.topicMetadata.size)
val topicMetadata = response.topicMetadata.asScala.head
validateLeaderAndReplicas(topicMetadata, 0, 1, Set(1, 2))
validateLeaderAndReplicas(topicMetadata, 1, 2, Set(0, 2))
validateLeaderAndReplicas(topicMetadata, 2, 3, Set(1, 3))
}
def validateLeaderAndReplicas(metadata: TopicMetadata, partitionId: Int, expectedLeaderId: Int,
expectedReplicas: Set[Int]): Unit = {
val partitionOpt = metadata.partitionMetadata.asScala.find(_.partition == partitionId)
assertTrue(s"Partition $partitionId should exist", partitionOpt.isDefined)
val partition = partitionOpt.get
assertNotNull("Partition leader should exist", partition.leader)
assertEquals("Partition leader id should match", expectedLeaderId, partition.leaderId)
assertEquals("Replica set should match", expectedReplicas, partition.replicas.asScala.map(_.id).toSet)
}
private def sendMetadataRequest(request: MetadataRequest, destination: Option[SocketServer] = None): MetadataResponse = {
val response = connectAndSend(request, ApiKeys.METADATA, destination = destination.getOrElse(anySocketServer))
MetadataResponse.parse(response, request.version)
}
}
|
noslowerdna/kafka
|
core/src/test/scala/unit/kafka/admin/AddPartitionsTest.scala
|
Scala
|
apache-2.0
| 9,318
|
package eu.execom.FabutPresentation.persistence
import java.sql.Blob
import java.sql.Timestamp
import java.sql.Date
import eu.execom.FabutPresentation.api._
import eu.execom.FabutPresentation.util._
import org.joda.time._
import scala.slick.driver.MySQLDriver.simple._
import scala.slick.jdbc.JdbcBackend.{Session => SlickSession}
case class UserSession(private var _id: Int, private var _userId: Int, private var _accessToken: String, private var _accessTokenExpires: Date, private var _refreshToken: String, private var _refreshTokenExpires: Date) {
private var id_persisted: Int = id
def idPersisted: Int = id_persisted
def id: Int = _id
def id_=(newId: Int)(implicit session: SlickSession): Any = if (newId != id) {
_id = newId
}
private var userId_persisted: Int = userId
def userIdPersisted: Int = userId_persisted
def userId: Int = _userId
def userId_=(newUserId: Int)(implicit session: SlickSession): Any = if (newUserId != userId) {
_userId = newUserId
}
private var accessToken_persisted: String = accessToken
def accessTokenPersisted: String = accessToken_persisted
def accessToken: String = _accessToken
def accessToken_=(newAccessToken: String)(implicit session: SlickSession): Any = if (newAccessToken != accessToken) {
if (newAccessToken == null) throw USER_SESSION_ACCESS_TOKEN_IS_REQUIRED
if (TableQuery[UserSessions].filter(_.accessToken === newAccessToken).exists.run) throw USER_SESSION_ACCESS_TOKEN_IS_NOT_UNIQUE
if (newAccessToken.size < 0) throw USER_SESSION_ACCESS_TOKEN_MIN_SIZE
if (newAccessToken.size > 128) throw USER_SESSION_ACCESS_TOKEN_MAX_SIZE
_accessToken = newAccessToken
}
private var accessTokenExpires_persisted: DateTime = accessTokenExpires
def accessTokenExpiresPersisted: DateTime = accessTokenExpires_persisted
def accessTokenExpires: DateTime = new org.joda.time.DateTime(_accessTokenExpires)
def accessTokenExpires_=(newAccessTokenExpires: DateTime)(implicit session: SlickSession): Any = if (newAccessTokenExpires != accessTokenExpires) {
if (newAccessTokenExpires == null) throw USER_SESSION_ACCESS_TOKEN_EXPIRES_IS_REQUIRED
_accessTokenExpires = new java.sql.Date(newAccessTokenExpires.getMillis)
}
private var refreshToken_persisted: String = refreshToken
def refreshTokenPersisted: String = refreshToken_persisted
def refreshToken: String = _refreshToken
def refreshToken_=(newRefreshToken: String)(implicit session: SlickSession): Any = if (newRefreshToken != refreshToken) {
if (newRefreshToken == null) throw USER_SESSION_REFRESH_TOKEN_IS_REQUIRED
if (TableQuery[UserSessions].filter(_.refreshToken === newRefreshToken).exists.run) throw USER_SESSION_REFRESH_TOKEN_IS_NOT_UNIQUE
if (newRefreshToken.size < 0) throw USER_SESSION_REFRESH_TOKEN_MIN_SIZE
if (newRefreshToken.size > 128) throw USER_SESSION_REFRESH_TOKEN_MAX_SIZE
_refreshToken = newRefreshToken
}
private var refreshTokenExpires_persisted: DateTime = refreshTokenExpires
def refreshTokenExpiresPersisted: DateTime = refreshTokenExpires_persisted
def refreshTokenExpires: DateTime = new org.joda.time.DateTime(_refreshTokenExpires)
def refreshTokenExpires_=(newRefreshTokenExpires: DateTime)(implicit session: SlickSession): Any = if (newRefreshTokenExpires != refreshTokenExpires) {
if (newRefreshTokenExpires == null) throw USER_SESSION_REFRESH_TOKEN_EXPIRES_IS_REQUIRED
_refreshTokenExpires = new java.sql.Date(newRefreshTokenExpires.getMillis)
}
def user(implicit session: SlickSession): User = TableQuery[Users].filter(_.id === userId).first
def user_=(user: User)(implicit session: SlickSession) = userId = user.id
def this(entity: UserSession) = this(entity._id, entity._userId, entity._accessToken, entity._accessTokenExpires, entity._refreshToken, entity._refreshTokenExpires)
def this() = this(0, 0, "", new java.sql.Date(DateTime.now(DateTimeZone.UTC).getMillis), "", new java.sql.Date(DateTime.now(DateTimeZone.UTC).getMillis))
def this(userId: Int, accessToken: String, accessTokenExpires: DateTime, refreshToken: String, refreshTokenExpires: DateTime)(implicit session: SlickSession) = {
this()
this.userId_=(userId)(session)
this.accessToken_=(accessToken)(session)
this.accessTokenExpires_=(accessTokenExpires)(session)
this.refreshToken_=(refreshToken)(session)
this.refreshTokenExpires_=(refreshTokenExpires)(session)
}
def this(user: User, accessToken: String, accessTokenExpires: DateTime, refreshToken: String, refreshTokenExpires: DateTime)(implicit session: SlickSession) = {
this()
this.user_=(user)(session)
this.accessToken_=(accessToken)(session)
this.accessTokenExpires_=(accessTokenExpires)(session)
this.refreshToken_=(refreshToken)(session)
this.refreshTokenExpires_=(refreshTokenExpires)(session)
}
def persisted() = {
id_persisted = id
userId_persisted = userId
accessToken_persisted = accessToken
accessTokenExpires_persisted = accessTokenExpires
refreshToken_persisted = refreshToken
refreshTokenExpires_persisted = refreshTokenExpires
}
}
object UserSession {
val ID: String = "id"
val USERID: String = "userId"
val ACCESSTOKEN: String = "accessToken"
val ACCESSTOKENEXPIRES: String = "accessTokenExpires"
val REFRESHTOKEN: String = "refreshToken"
val REFRESHTOKENEXPIRES: String = "refreshTokenExpires"
}
object USER_SESSION_ACCESS_TOKEN_IS_REQUIRED extends DataConstraintException("USER_SESSION_ACCESS_TOKEN_IS_REQUIRED")
object USER_SESSION_ACCESS_TOKEN_MIN_SIZE extends DataConstraintException("USER_SESSION_ACCESS_TOKEN_MIN_SIZE")
object USER_SESSION_ACCESS_TOKEN_MAX_SIZE extends DataConstraintException("USER_SESSION_ACCESS_TOKEN_MAX_SIZE")
object USER_SESSION_ACCESS_TOKEN_EXPIRES_IS_REQUIRED extends DataConstraintException("USER_SESSION_ACCESS_TOKEN_EXPIRES_IS_REQUIRED")
object USER_SESSION_REFRESH_TOKEN_IS_REQUIRED extends DataConstraintException("USER_SESSION_REFRESH_TOKEN_IS_REQUIRED")
object USER_SESSION_REFRESH_TOKEN_MIN_SIZE extends DataConstraintException("USER_SESSION_REFRESH_TOKEN_MIN_SIZE")
object USER_SESSION_REFRESH_TOKEN_MAX_SIZE extends DataConstraintException("USER_SESSION_REFRESH_TOKEN_MAX_SIZE")
object USER_SESSION_REFRESH_TOKEN_EXPIRES_IS_REQUIRED extends DataConstraintException("USER_SESSION_REFRESH_TOKEN_EXPIRES_IS_REQUIRED")
object USERSESSION_DOESNT_EXIST extends DataConstraintException("USERSESSION_DOESNT_EXIST")
object USER_SESSION_ID_IS_NOT_UNIQUE extends DataConstraintException("USER_SESSION_ID_IS_NOT_UNIQUE")
object USER_SESSION_ACCESS_TOKEN_IS_NOT_UNIQUE extends DataConstraintException("USER_SESSION_ACCESS_TOKEN_IS_NOT_UNIQUE")
object USER_SESSION_REFRESH_TOKEN_IS_NOT_UNIQUE extends DataConstraintException("USER_SESSION_REFRESH_TOKEN_IS_NOT_UNIQUE")
class UserSessions(tag: Tag) extends Table[UserSession](tag, "UserSession") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def userId = column[Int]("userId")
def accessToken = column[String]("accessToken")
def accessTokenExpires = column[Date]("accessTokenExpires")
def refreshToken = column[String]("refreshToken")
def refreshTokenExpires = column[Date]("refreshTokenExpires")
val create = UserSession.apply _
def * = (id, userId, accessToken, accessTokenExpires, refreshToken, refreshTokenExpires) <> (create.tupled, UserSession.unapply)
def ? = (id.?, userId.?, accessToken.?, accessTokenExpires.?, refreshToken.?, refreshTokenExpires.?).shaped.<>({r=>import r._; _1.map(_=> create.tupled((_1.get, _2.get, _3.get, _4.get, _5.get, _6.get)))}, (_:Any) => throw new Exception("Inserting into ? projection not supported."))
def user= foreignKey("USERSESSION_USER_FK", userId, TableQuery[Users])(_.id)
}
class UserSessionDao extends GenericSlickDao[UserSession] {
def save(entity: UserSession)(implicit session: SlickSession): Unit = {
logger.trace(s".save(entity: $entity)")
val tableQuery = TableQuery[UserSessions]
val id = tableQuery returning tableQuery.map(_.id) += entity
entity.id = id
entity.persisted()
}
def save(entities: List[UserSession])(implicit session: SlickSession): Unit = {
logger.trace(s".save(entities: $entities)")
val tableQuery = TableQuery[UserSessions]
val ids = tableQuery returning tableQuery.map(_.id) ++= entities
ids.zip(entities).foreach(idWithEntity => {
val id = idWithEntity._1
val entity = idWithEntity._2
entity.id = id
entity.persisted()
})
}
def update(entity: UserSession)(implicit session: SlickSession): Unit = {
logger.trace(s".update(entity: $entity)")
val tableQuery = TableQuery[UserSessions]
tableQuery.filter(_.id === entity.id).update(entity)
entity.persisted()
}
def findAll()(implicit session: SlickSession): List[UserSession] = {
logger.trace(s".findAll()")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query.list
}
def countAll()(implicit session: SlickSession): Int = {
logger.trace(s".countAll()")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query.length.run
}
def getById(id: Int)(implicit session: SlickSession): UserSession = {
logger.trace(s".getById(id: $id)")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query = query.filter(_.id === id)
query.firstOption.getOrElse(throw USERSESSION_DOESNT_EXIST)
}
def deleteById(id: Int)(implicit session: SlickSession): Boolean = {
logger.trace(s".deleteById(id: $id)")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query = query.filter(_.id === id)
query.delete != 0
}
def findById(id: Int)(implicit session: SlickSession): Option[UserSession] = {
logger.trace(s".findById(id: $id)")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query = query.filter(_.id === id)
query.firstOption
}
def findByUserId(userId: Int)(implicit session: SlickSession): List[UserSession] = {
logger.trace(s".findByUserId(userId: $userId)")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query = query.filter(_.userId === userId)
query.list
}
def findByAccessToken(accessToken: String)(implicit session: SlickSession): Option[UserSession] = {
logger.trace(s".findByAccessToken(accessToken: $accessToken)")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query = query.filter(_.accessToken === accessToken)
query.firstOption
}
def findByAccessTokenExpires(accessTokenExpires: DateTime)(implicit session: SlickSession): List[UserSession] = {
logger.trace(s".findByAccessTokenExpires(accessTokenExpires: $accessTokenExpires)")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query = query.filter(_.accessTokenExpires === new java.sql.Date(accessTokenExpires.getMillis))
query.list
}
def findByRefreshToken(refreshToken: String)(implicit session: SlickSession): Option[UserSession] = {
logger.trace(s".findByRefreshToken(refreshToken: $refreshToken)")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query = query.filter(_.refreshToken === refreshToken)
query.firstOption
}
def findByRefreshTokenExpires(refreshTokenExpires: DateTime)(implicit session: SlickSession): List[UserSession] = {
logger.trace(s".findByRefreshTokenExpires(refreshTokenExpires: $refreshTokenExpires)")
var query: Query[UserSessions, UserSessions#TableElementType, Seq] = TableQuery[UserSessions]
query = query.filter(_.refreshTokenExpires === new java.sql.Date(refreshTokenExpires.getMillis))
query.list
}
}
|
idostanic/FabutPresentation
|
src/main/scala/eu/execom/FabutPresentation/persistence/UserSessionDao.scala
|
Scala
|
apache-2.0
| 11,963
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.registration.errors
import itutil.ControllerISpec
import models.api.EligibilitySubmissionData
import play.api.test.Helpers._
class EmailPasscodeNotFoundControllerISpec extends ControllerISpec {
"show" must {
"return an OK" in new Setup {
given()
.user.isAuthorised()
insertCurrentProfileIntoDb(currentProfile, sessionId)
val res = await(buildClient(routes.EmailPasscodeNotFoundController.show("test").url).get())
res.status mustBe OK
}
}
}
|
hmrc/vat-registration-frontend
|
it/controllers/registration/errors/EmailPasscodeNotFoundControllerISpec.scala
|
Scala
|
apache-2.0
| 1,116
|
package xyz.nabijaczleweli.scala_game_of_life.engine.registries
import java.awt.Color
import xyz.nabijaczleweli.scala_game_of_life.cell.CellAction.CellAction
import xyz.nabijaczleweli.scala_game_of_life.cell.{Cell, Material}
import xyz.nabijaczleweli.scala_game_of_life.engine.GameRenderer
import xyz.nabijaczleweli.scala_game_of_life.entity.Entity
import xyz.nabijaczleweli.scala_game_of_life.util.ColorRainbow
import xyz.nabijaczleweli.scala_game_of_life.world.ICellAccess
import xyz.nabijaczleweli.scala_game_of_life.engine.GameEngine.rand
import scala.collection.mutable
/** Stored: Cell: <tt>[[Cell]]</tt><br />
* Key: Cell's ID: <tt>[[Short]]</tt>
*
* @author Jędrzej
* @since 06.05.14
*/
object CellRegistry extends Registry[Cell, Short] {
private final var __idtocell = new mutable.HashMap[key_type, stored_type]
private final var __celltoid = new mutable.HashMap[stored_type, key_type]
private final var length = 0
add(0.toShort, new Cell(Material.air) {
override def draw(onScreenX: Int, onScreenY: Int, worldX: Long, worldY: Long, world: ICellAccess) {}
override def onNeighbourCellChange(x: Long, y: Long, changedCell: (Cell, Long, Long), action: CellAction) {}
override def onCellAction(x: Long, y: Long, action: CellAction) {}
override def onUpdate(x: Long, y: Long) {}
})
add(1.toShort, new Cell(Material.notAir) {
override def draw(onScreenX: Int, onScreenY: Int, worldX: Long, worldY: Long, world: ICellAccess) {
import GameRenderer._
graph setColor (if(world.cellState(worldX, worldY)) Color.blue else Color.black)
graph.fillRect(onScreenX, onScreenY, cellWidth, cellHeight)
}
})
add(2.toShort, new Cell(Material.notAir) {
override def draw(onScreenX: Int, onScreenY: Int, worldX: Long, worldY: Long, world: ICellAccess) {
import GameRenderer._
graph setColor (if(world.cellState(worldX, worldY)) Color.pink else Color.darkGray)
graph.fillRect(onScreenX, onScreenY, cellWidth, cellHeight)
}
})
add(3.toShort, new Cell(Material.notAir) {
override def draw(onScreenX: Int, onScreenY: Int, worldX: Long, worldY: Long, world: ICellAccess) {
import GameRenderer._
graph setColor (if(world.cellState(worldX, worldY)) Color.orange else Color.gray)
graph.fillRect(onScreenX, onScreenY, cellWidth, cellHeight)
}
})
add(4.toShort, new Cell(Material.notAir) {
private val rain = new ColorRainbow(128, 128, 128)
override def draw(onScreenX: Int, onScreenY: Int, worldY: Long, worldX: Long, world: ICellAccess) {
import GameRenderer._
graph setColor rain
graph.fillRect(onScreenX, onScreenY, cellWidth, cellHeight)
}
override def onEntityWalking(entity: Entity) {
entity.entityObj.spawnParticle("sparkles", entity.posX, entity.posY, entity.motionX, entity.motionY, entity.worldObj)
}
})
// Only GameRegistry is allowed to put stuff in.
override private[registries] def add(key: key_type, obj: stored_type) {
__idtocell += key -> obj
__celltoid += obj -> key
length += 1
}
override def get(key: key_type): Option[stored_type] =
__idtocell get key
def get(key: stored_type): Option[key_type] =
__celltoid get key
def get = {
if(rand.nextBoolean() && rand.nextBoolean() && rand.nextBoolean()) // Low chance of possibly getting air cells
rand nextInt __idtocell.size
else {
rand.nextInt(length - 1) + 1
}
}.toShort
}
|
nabijaczleweli/Scala-Game-of-Life
|
src/main/scala/xyz/nabijaczleweli/scala_game_of_life/engine/registries/CellRegistry.scala
|
Scala
|
mit
| 3,367
|
package org.apache.spark.sql.cassandra
import scala.collection.mutable
import org.apache.spark.sql.SaveMode._
import org.apache.spark.sql.cassandra.DefaultSource._
import org.apache.spark.sql.sources.{BaseRelation, CreatableRelationProvider, RelationProvider, SchemaRelationProvider}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import com.datastax.spark.connector.cql.{AuthConfFactory, CassandraConnectorConf, DefaultAuthConfFactory}
import com.datastax.spark.connector.rdd.ReadConf
import com.datastax.spark.connector.util.Logging
import com.datastax.spark.connector.writer.WriteConf
/**
* Cassandra data source extends [[RelationProvider]], [[SchemaRelationProvider]] and [[CreatableRelationProvider]].
* It's used internally by Spark SQL to create Relation for a table which specifies the Cassandra data source
* e.g.
*
* CREATE TEMPORARY TABLE tmpTable
* USING org.apache.spark.sql.cassandra
* OPTIONS (
* table "table",
* keyspace "keyspace",
* cluster "test_cluster",
* pushdown "true",
* spark.cassandra.input.fetch.size_in_rows "10",
* spark.cassandra.output.consistency.level "ONE",
* spark.cassandra.connection.timeout_ms "1000"
* )
*/
class DefaultSource extends RelationProvider with SchemaRelationProvider with CreatableRelationProvider with Logging {
/**
* Creates a new relation for a cassandra table.
* The parameters map stores table level data. User can specify vale for following keys
*
* table -- table name, required
* keyspace -- keyspace name, required
* cluster -- cluster name, optional, default name is "default"
* pushdown -- true/false, optional, default is true
* Cassandra connection settings -- optional, e.g. spark.cassandra.connection.timeout_ms
* Cassandra Read Settings -- optional, e.g. spark.cassandra.input.fetch.size_in_rows
* Cassandra Write settings -- optional, e.g. spark.cassandra.output.consistency.level
*
* When push_down is true, some filters are pushed down to CQL.
*
*/
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
val (tableRef, options) = TableRefAndOptions(parameters)
CassandraSourceRelation(tableRef, sqlContext, options)
}
/**
* Creates a new relation for a cassandra table given table, keyspace, cluster and push_down
* as parameters and explicitly pass schema [[StructType]] as a parameter
*/
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: StructType): BaseRelation = {
val (tableRef, options) = TableRefAndOptions(parameters)
CassandraSourceRelation(tableRef, sqlContext, options, Option(schema))
}
/**
* Creates a new relation for a cassandra table given table, keyspace, cluster, push_down and schema
* as parameters. It saves the data to the Cassandra table depends on [[SaveMode]]
*/
override def createRelation(
sqlContext: SQLContext,
mode: SaveMode,
parameters: Map[String, String],
data: DataFrame): BaseRelation = {
val (tableRef, options) = TableRefAndOptions(parameters)
val table = CassandraSourceRelation(tableRef, sqlContext, options)
mode match {
case Append => table.insert(data, overwrite = false)
case Overwrite => table.insert(data, overwrite = true)
case ErrorIfExists =>
if (table.buildScan().isEmpty()) {
table.insert(data, overwrite = false)
} else {
throw new UnsupportedOperationException(
s"""'SaveMode is set to ErrorIfExists and Table
|${tableRef.keyspace + "." + tableRef.table} already exists and contains data.
|Perhaps you meant to set the DataFrame write mode to Append?
|Example: df.write.format.options.mode(SaveMode.Append).save()" '""".stripMargin)
}
case Ignore =>
if (table.buildScan().isEmpty()) {
table.insert(data, overwrite = false)
}
}
CassandraSourceRelation(tableRef, sqlContext, options)
}
}
/** Store data source options */
case class CassandraSourceOptions(pushdown: Boolean = true, cassandraConfs: Map[String, String] = Map.empty)
object DefaultSource {
val CassandraDataSourceTableNameProperty = "table"
val CassandraDataSourceKeyspaceNameProperty = "keyspace"
val CassandraDataSourceClusterNameProperty = "cluster"
val CassandraDataSourceUserDefinedSchemaNameProperty = "schema"
val CassandraDataSourcePushdownEnableProperty = "pushdown"
val CassandraDataSourceProviderPackageName = DefaultSource.getClass.getPackage.getName
val CassandraDataSourceProviderClassName = CassandraDataSourceProviderPackageName + ".DefaultSource"
/** Parse parameters into CassandraDataSourceOptions and TableRef object */
def TableRefAndOptions(parameters: Map[String, String]) : (TableRef, CassandraSourceOptions) = {
val tableName = parameters(CassandraDataSourceTableNameProperty)
val keyspaceName = parameters(CassandraDataSourceKeyspaceNameProperty)
val clusterName = parameters.get(CassandraDataSourceClusterNameProperty)
val pushdown : Boolean = parameters.getOrElse(CassandraDataSourcePushdownEnableProperty, "true").toBoolean
val cassandraConfs = buildConfMap(parameters)
(TableRef(tableName, keyspaceName, clusterName), CassandraSourceOptions(pushdown, cassandraConfs))
}
val confProperties = ReadConf.Properties.map(_.name) ++
WriteConf.Properties.map(_.name) ++
CassandraConnectorConf.Properties.map(_.name) ++
CassandraSourceRelation.Properties.map(_.name) ++
AuthConfFactory.Properties.map(_.name) ++
DefaultAuthConfFactory.properties
/** Construct a map stores Cassandra Conf settings from options */
def buildConfMap(parameters: Map[String, String]): Map[String, String] =
parameters.filterKeys(confProperties.contains)
/** Check whether the provider is Cassandra datasource or not */
def cassandraSource(provider: String) : Boolean = {
provider == CassandraDataSourceProviderPackageName || provider == CassandraDataSourceProviderClassName
}
}
|
ponkin/spark-cassandra-connector
|
spark-cassandra-connector/src/main/scala/org/apache/spark/sql/cassandra/DefaultSource.scala
|
Scala
|
apache-2.0
| 6,259
|
package controllers.rememberme
import jp.t2v.lab.play2.auth._
import play.api.mvc.{Cookie, RequestHeader, Result}
import scala.language.postfixOps
class RememberMeTokenAccessor(maxAge: Int) extends CookieTokenAccessor() {
override def put(token: AuthenticityToken)(result: Result)(implicit request: RequestHeader): Result = {
val remember = request.tags.get("rememberme").exists("true" ==) || request.session.get("rememberme").exists("true" ==)
val _maxAge = if (remember) Some(maxAge) else None
val c = Cookie(cookieName, sign(token), _maxAge, cookiePathOption, cookieDomainOption, cookieSecureOption, cookieHttpOnlyOption)
result.withCookies(c)
}
}
|
phosphene/play2.x-basic-auth-demo
|
app/controllers/rememberme/RememberMeTokenAccessor.scala
|
Scala
|
apache-2.0
| 676
|
import org.scalatest.{FlatSpec, Matchers}
import fpinscala.datastructures.List
class ListSpec extends FlatSpec with Matchers {
"List" should "tail correctly" in {
List.tail(List(1,2,3,4)) should be (List(2,3,4))
}
it should "replace head correctly" in {
List.setHead(1, List()) should be (List(1))
List.setHead(4, List(1,2,3)) should be (List(4,2,3))
}
it should "drop elements correctly" in {
List.drop(0, List(1,2,3)) should be (List(1,2,3))
List.drop(1, List(1,2)) should be (List(2))
List.drop(1, List()) should be (List())
List.drop(0, List()) should be (List())
List.drop(10, List(1,2,3)) should be (List())
}
it should "drop while elements are less than 5" in {
List.dropWhile(List(1,2,3,4,5,6,4))(i => i < 5) should be (List(5,6,4))
}
it should "return but the last element" in {
List.init(List(1,2,3,4)) should be (List(1,2,3))
}
it should "fold left and right the same way" in {
val product = (x: Int, y: Int) => x * y
val sum = (x: Int, y: Int) => x + y
val list = List(1,2,3,4,5)
List.foldLeft(list, 1)(product) should be (List.foldRight(list, 1)(product))
List.foldLeft(list, 0)(sum) should be (List.foldRight(list, 0)(sum))
}
}
|
nikolakasev/functional-scala
|
src/test/scala/ListSpec.scala
|
Scala
|
mit
| 1,234
|
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* Portions Crown Copyright (c) 2016-2020 Dstl
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.data
import java.io.IOException
import org.geotools.data.DataStoreFinder
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.MiniCluster
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class AccumuloDataStoreFactoryTest extends Specification {
import scala.collection.JavaConverters._
// we use class name to prevent spillage between unit tests
lazy val catalog = s"${MiniCluster.namespace}.${getClass.getSimpleName}"
"AccumuloDataStoreFactory" should {
"create a password authenticated store" in {
val params = Map(
AccumuloDataStoreParams.InstanceIdParam.key -> MiniCluster.cluster.getInstanceName,
AccumuloDataStoreParams.ZookeepersParam.key -> MiniCluster.cluster.getZooKeepers,
AccumuloDataStoreParams.UserParam.key -> MiniCluster.Users.root.name,
AccumuloDataStoreParams.PasswordParam.key -> MiniCluster.Users.root.password,
AccumuloDataStoreParams.CatalogParam.key -> catalog
).asJava
AccumuloDataStoreFactory.canProcess(params) must beTrue
val ds = DataStoreFinder.getDataStore(params)
try {
ds must beAnInstanceOf[AccumuloDataStore]
} finally {
if (ds != null) {
ds.dispose()
}
}
}
"create a keytab authenticated store" in {
val params = Map(
AccumuloDataStoreParams.InstanceIdParam.key -> MiniCluster.cluster.getInstanceName,
AccumuloDataStoreParams.ZookeepersParam.key -> MiniCluster.cluster.getZooKeepers,
AccumuloDataStoreParams.UserParam.key -> MiniCluster.Users.root.name,
AccumuloDataStoreParams.KeytabPathParam.key -> "/path/to/keytab",
AccumuloDataStoreParams.CatalogParam.key -> catalog
).asJava
AccumuloDataStoreFactory.canProcess(params) must beTrue
// TODO GEOMESA-2797 test kerberos
}
"not accept password and keytab" in {
val params = Map(
AccumuloDataStoreParams.InstanceIdParam.key -> MiniCluster.cluster.getInstanceName,
AccumuloDataStoreParams.ZookeepersParam.key -> MiniCluster.cluster.getZooKeepers,
AccumuloDataStoreParams.UserParam.key -> MiniCluster.Users.root.name,
AccumuloDataStoreParams.PasswordParam.key -> MiniCluster.Users.root.password,
AccumuloDataStoreParams.KeytabPathParam.key -> "/path/to/keytab",
AccumuloDataStoreParams.CatalogParam.key -> catalog
).asJava
AccumuloDataStoreFactory.canProcess(params) must beTrue
DataStoreFinder.getDataStore(params) must throwAn[IllegalArgumentException]
}
"not accept a missing instanceId" in {
val params = Map(
AccumuloDataStoreParams.ZookeepersParam.key -> MiniCluster.cluster.getZooKeepers,
AccumuloDataStoreParams.UserParam.key -> MiniCluster.Users.root.name,
AccumuloDataStoreParams.PasswordParam.key -> MiniCluster.Users.root.password,
AccumuloDataStoreParams.CatalogParam.key -> catalog
).asJava
AccumuloDataStoreFactory.canProcess(params) must beTrue
DataStoreFinder.getDataStore(params) must throwAn[IOException]
}
"not accept a missing zookeepers" in {
val params = Map(
AccumuloDataStoreParams.InstanceIdParam.key -> MiniCluster.cluster.getInstanceName,
AccumuloDataStoreParams.UserParam.key -> MiniCluster.Users.root.name,
AccumuloDataStoreParams.PasswordParam.key -> MiniCluster.Users.root.password,
AccumuloDataStoreParams.CatalogParam.key -> catalog
).asJava
AccumuloDataStoreFactory.canProcess(params) must beTrue
DataStoreFinder.getDataStore(params) must throwAn[IOException]
}
"not accept a missing user" in {
val params = Map(
AccumuloDataStoreParams.InstanceIdParam.key -> MiniCluster.cluster.getInstanceName,
AccumuloDataStoreParams.ZookeepersParam.key -> MiniCluster.cluster.getZooKeepers,
AccumuloDataStoreParams.PasswordParam.key -> MiniCluster.Users.root.password,
AccumuloDataStoreParams.CatalogParam.key -> catalog
).asJava
AccumuloDataStoreFactory.canProcess(params) must beTrue
DataStoreFinder.getDataStore(params) must throwAn[IOException]
}
"not accept a missing password and keytab" in {
val params = Map(
AccumuloDataStoreParams.InstanceIdParam.key -> MiniCluster.cluster.getInstanceName,
AccumuloDataStoreParams.ZookeepersParam.key -> MiniCluster.cluster.getZooKeepers,
AccumuloDataStoreParams.UserParam.key -> MiniCluster.Users.root.name,
AccumuloDataStoreParams.CatalogParam.key -> catalog
).asJava
AccumuloDataStoreFactory.canProcess(params) must beTrue
DataStoreFinder.getDataStore(params) must throwAn[IOException]
}
}
}
|
aheyne/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/data/AccumuloDataStoreFactoryTest.scala
|
Scala
|
apache-2.0
| 5,401
|
package io.skysail.server.website.html
import play.twirl.api.Html
import html.main
import io.skysail.server.RepresentationModel
object PutBookmarkResource_Get extends _root_.play.twirl.api.BaseScalaTemplate[play.twirl.api.HtmlFormat.Appendable,_root_.play.twirl.api.Format[play.twirl.api.HtmlFormat.Appendable]](play.twirl.api.HtmlFormat) with _root_.play.twirl.api.Template1[RepresentationModel,play.twirl.api.HtmlFormat.Appendable] {
/*************************************
* Home page. *
* *
* @param msg The message to display *
*************************************/
def apply/*6.2*/(rep: RepresentationModel):play.twirl.api.HtmlFormat.Appendable = {
_display_ {
{
Seq[Any](format.raw/*6.28*/("""
"""),_display_(/*8.2*/main/*8.6*/ {_display_(Seq[Any](format.raw/*8.8*/("""
"""),format.raw/*10.1*/("""<br><br><br>
<div class="container">
<div class="starter-template">
<h1>Bookmarks</h1>
<p class="lead">update bookmark:</p>
<span>"""),_display_(/*16.16*/rep/*16.19*/.rawData),format.raw/*16.27*/("""</span>
<form action='"""),_display_(/*17.24*/rep/*17.27*/.linkFor("io.skysail.app.bookmarks.PutBookmarkResource", rep.rawData.head.get("id"))),format.raw/*17.111*/("""?_method=PUT' method="GET">
<table class="table table-sm">
<thead>
<tr>
<th>Title</th>
<th>Url</th>
</tr>
</thead>
<tbody>
<tr>
<th scope="row"><input type="text" name="title" value='"""),_display_(/*28.77*/rep/*28.80*/.rawData.head.get("title")),format.raw/*28.106*/("""'/></th>
<td><input type="url" name="url" value='"""),_display_(/*29.62*/rep/*29.65*/.rawData.head.get("url")),format.raw/*29.89*/("""'/></td>
</tr>
<tr>
<th colspan="2">
<input type="submit">
</th>
</tr>
</tbody>
</table>
</form>
</div>
</div>
""")))}))
}
}
}
def render(rep:RepresentationModel): play.twirl.api.HtmlFormat.Appendable = apply(rep)
def f:((RepresentationModel) => play.twirl.api.HtmlFormat.Appendable) = (rep) => apply(rep)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Tue Dec 05 11:16:45 CET 2017
SOURCE: C:/git/skysail-server/skysail.server.website/./resources/templates/io/skysail/server/website/PutBookmarkResource_Get.scala.html
HASH: 80a92cb4e723bd05204b2ca4b550062fa4e4e453
MATRIX: 652->193|773->219|801->222|812->226|850->228|879->230|1066->390|1078->393|1107->401|1165->432|1177->435|1283->519|1663->872|1675->875|1723->901|1820->971|1832->974|1877->998
LINES: 15->6|20->6|22->8|22->8|22->8|24->10|30->16|30->16|30->16|31->17|31->17|31->17|42->28|42->28|42->28|43->29|43->29|43->29
-- GENERATED --
*/
|
evandor/skysail-server
|
skysail.server.website/src/io/skysail/server/website/html/PutBookmarkResource_Get.template.scala
|
Scala
|
apache-2.0
| 3,124
|
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Input}
case class CP48(value: Option[Int]) extends CtBoxIdentifier(name = "Donations") with CtOptionalInteger with Input
object CP48 {
def apply(int: Int): CP48 = CP48(Some(int))
}
|
keithhall/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/CP48.scala
|
Scala
|
apache-2.0
| 892
|
/**
* The `Algebraic` number type's goal is to create a guaranteed accuracy number [1].
* That is, a `Algebraic` can always be approximated to any given accuracy and, in
* addition, you are guaranteed that if a `Algebraic` `a` represents a real number
* `r`, then `a.sign == r.sign`.
*
* Central to this is the idea of a zero-bound function; this is a function
* `lowerBound` s.t. if `r != 0` then `r > r.lowerBound`. Here we use the
* BFMSS bound [2], though it seems other (C++) libraries use the max of the
* BFMSS bound and Li/Yap bound [3].
*
* [1] "On Guaranteed Accuracy Computation." C. K. Yap.
* http://www.cs.nyu.edu/exact/doc/guaranteed.pdf
* [2] "A Separation Bound for Real Algebraic Expressions." C. Burnikel, et al.
* http://stubber.math-inf.uni-greifswald.de/informatik/PEOPLE/Papers/ESA01/sepbound01.pd
* [3] "A New Constructive Root Bound for Algebraic Expressions." C. Li and C. Yap.
*/
package spire.math
import spire.algebra.{Eq, EuclideanRing, Field, IsReal, NRoot, Order, Ring, Sign, Signed}
import spire.algebra.Sign.{ Positive, Negative, Zero }
import java.math.{ MathContext, BigInteger, BigDecimal => BigDec }
import scala.math.{ ScalaNumber, ScalaNumericConversions }
import algebraic._
/**
* An general Algebraic type. Can be used represent real numbers and approximate
* them on-demand.
*/
@SerialVersionUID(0L)
final class Algebraic private (val expr: Expr[Algebraic])
extends ScalaNumber with ScalaNumericConversions
with RealLike[Algebraic]
with BMFSSBound[Algebraic]
with BigDecimalApprox[Algebraic]
with FPFilter[Algebraic]
with ConstantFolder[Algebraic]
with BubbleUpDivs[Algebraic]
with PrettyToString[Algebraic]
with Ordered[Algebraic]
with Serializable {
val coexpr: Coexpr[Algebraic] = Algebraic.RealCoexpr
// ugh
override def byteValue(): Byte = fpf.toLong map (_.toByte) getOrElse super.toByte
override def shortValue(): Short = fpf.toLong map (_.toShort) getOrElse super.toShort
override def equals(that: Any) = that match {
case that: Algebraic => (this - that).isZero
case that: Real => (this - Algebraic(that.toRational)).isZero
case that: Number => (this - Algebraic(that.toRational)).isZero
case that: Rational => (this - Algebraic(that)).isZero
case that: BigInt => isWhole && toBigInt == that
case that: Natural => isWhole && signum >= 0 && that == toBigInt
case that: SafeLong => isWhole && that == this
case that: Complex[_] => that == this
case that: Quaternion[_] => that == this
case that: BigDecimal => try {
toBigDecimal(that.mc) == that
} catch {
case ae: ArithmeticException => false
}
case _ => unifiedPrimitiveEquals(that)
}
override def hashCode: Int = if (isWhole && toBigInt == toLong) {
unifiedPrimitiveHashcode
} else {
val x = toBigDecimal(java.math.MathContext.DECIMAL64)
x.underlying.unscaledValue.hashCode + 23 * x.scale.hashCode + 17
}
}
object Algebraic extends AlgebraicInstances {
implicit def apply(n: Int): Algebraic = Expr(n)
implicit def apply(n: Long): Algebraic = Expr(n)
implicit def apply(n: BigInt): Algebraic = Expr(n)
implicit def apply(n: Rational): Algebraic = Expr(n)
implicit def apply(n: Double): Algebraic = Expr(n)
implicit def apply(n: BigDecimal): Algebraic = Expr(n)
implicit object RealCoexpr extends Coexpr[Algebraic] {
def expr(r: Algebraic): Expr[Algebraic] = r.expr
def coexpr(e: Expr[Algebraic]): Algebraic = new Algebraic(e)
}
}
trait AlgebraicInstances {
implicit final val AlgebraicAlgebra = new AlgebraicAlgebra
}
private[math] trait AlgebraicIsRing extends Ring[Algebraic] {
override def minus(a: Algebraic, b: Algebraic): Algebraic = a - b
def negate(a: Algebraic): Algebraic = -a
def one: Algebraic = Algebraic(1)
def plus(a: Algebraic, b: Algebraic): Algebraic = a + b
override def pow(a: Algebraic, b: Int): Algebraic = a pow b
override def times(a: Algebraic, b: Algebraic): Algebraic = a * b
def zero: Algebraic = Algebraic(0)
override def fromInt(n: Int): Algebraic = Algebraic(n)
}
private[math] trait AlgebraicIsEuclideanRing extends EuclideanRing[Algebraic] with AlgebraicIsRing {
def quot(a: Algebraic, b: Algebraic): Algebraic = a /~ b
def mod(a: Algebraic, b: Algebraic): Algebraic = a % b
def gcd(a: Algebraic, b: Algebraic): Algebraic = euclid(a, b)(Eq[Algebraic])
}
private[math] trait AlgebraicIsField extends Field[Algebraic] with AlgebraicIsEuclideanRing {
override def fromDouble(n: Double): Algebraic = Algebraic(n)
def div(a:Algebraic, b:Algebraic) = a / b
}
private[math] trait AlgebraicIsNRoot extends NRoot[Algebraic] {
def nroot(a: Algebraic, k: Int): Algebraic = a nroot k
def fpow(a:Algebraic, b:Algebraic) = sys.error("fixme")
}
private[math] trait AlgebraicOrder extends Order[Algebraic] {
override def eqv(x: Algebraic, y: Algebraic) = (x - y).isZero
override def neqv(x: Algebraic, y: Algebraic) = (x - y).isNonZero
def compare(x: Algebraic, y: Algebraic) = (x - y).signum
}
private[math] trait AlgebraicIsSigned extends Signed[Algebraic] {
override def sign(a: Algebraic): Sign = a.sign
def signum(a: Algebraic): Int = a.signum
def abs(a: Algebraic): Algebraic = a.abs
}
private[math] trait AlgebraicIsReal extends IsReal[Algebraic] with AlgebraicOrder with AlgebraicIsSigned {
def toDouble(x: Algebraic): Double = x.toDouble
def ceil(a:Algebraic) = if (a % 1 == 0) a else a + 1 - (a % 1)
def floor(a:Algebraic) = a - (a % 1)
def round(a:Algebraic) = {
val m = a % 1
if (m < 0.5) a - m else a + 1 - m
}
def isWhole(a:Algebraic) = a % 1 == 0
}
@SerialVersionUID(0L)
class AlgebraicAlgebra extends AlgebraicIsField with AlgebraicIsNRoot with AlgebraicIsReal with Serializable
|
lrytz/spire
|
core/src/main/scala/spire/math/Algebraic.scala
|
Scala
|
mit
| 5,770
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.examples.ml
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.classification.LogisticRegression
import org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
import org.apache.spark.ml.feature.{HashingTF, Tokenizer}
import org.apache.spark.ml.tuning.{ParamGridBuilder, CrossValidator}
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.sql.{Row, SQLContext}
/**
* A simple example demonstrating model selection using CrossValidator.
* This example also demonstrates how Pipelines are Estimators.
*
* This example uses the [[LabeledDocument]] and [[Document]] case classes from
* [[SimpleTextClassificationPipeline]].
*
* Run with
* {{{
* bin/run-example ml.CrossValidatorExample
* }}}
*/
object CrossValidatorExample {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("CrossValidatorExample").setMaster("local[2]")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
import sqlContext.implicits._
// Prepare training documents, which are labeled.
val training = sc.parallelize(Seq(
LabeledDocument(0L, "a b c d e spark", 1.0),
LabeledDocument(1L, "b d", 0.0),
LabeledDocument(2L, "spark f g h", 1.0),
LabeledDocument(3L, "hadoop mapreduce", 0.0),
LabeledDocument(4L, "b spark who", 1.0),
LabeledDocument(5L, "g d a y", 0.0),
LabeledDocument(6L, "spark fly", 1.0),
LabeledDocument(7L, "was mapreduce", 0.0),
LabeledDocument(8L, "e spark program", 1.0),
LabeledDocument(9L, "a e c l", 0.0),
LabeledDocument(10L, "spark compile", 1.0),
LabeledDocument(11L, "hadoop software", 0.0)))
// Configure an ML pipeline, which consists of three stages: tokenizer, hashingTF, and lr.
val tokenizer = new Tokenizer()
.setInputCol("text")
.setOutputCol("words")
val hashingTF = new HashingTF()
.setInputCol(tokenizer.getOutputCol)
.setOutputCol("features")
val lr = new LogisticRegression()
.setMaxIter(10)
val pipeline = new Pipeline()
.setStages(Array(tokenizer, hashingTF, lr))
// We now treat the Pipeline as an Estimator, wrapping it in a CrossValidator instance.
// This will allow us to jointly choose parameters for all Pipeline stages.
// A CrossValidator requires an Estimator, a set of Estimator ParamMaps, and an Evaluator.
val crossval = new CrossValidator()
.setEstimator(pipeline)
.setEvaluator(new BinaryClassificationEvaluator)
// We use a ParamGridBuilder to construct a grid of parameters to search over.
// With 3 values for hashingTF.numFeatures and 2 values for lr.regParam,
// this grid will have 3 x 2 = 6 parameter settings for CrossValidator to choose from.
val paramGrid = new ParamGridBuilder()
.addGrid(hashingTF.numFeatures, Array(10, 100, 1000))
.addGrid(lr.regParam, Array(0.1, 0.01))
.build()
crossval.setEstimatorParamMaps(paramGrid)
crossval.setNumFolds(2) // Use 3+ in practice
// Run cross-validation, and choose the best set of parameters.
val cvModel = crossval.fit(training.toDF())
// Prepare test documents, which are unlabeled.
val test = sc.parallelize(Seq(
Document(4L, "spark i j k"),
Document(5L, "l m n"),
Document(6L, "mapreduce spark"),
Document(7L, "apache hadoop")))
// Make predictions on test documents. cvModel uses the best model found (lrModel).
cvModel.transform(test.toDF())
.select("id", "text", "probability", "prediction")
.collect()
.foreach { case Row(id: Long, text: String, prob: Vector, prediction: Double) =>
println(s"($id, $text) --> prob=$prob, prediction=$prediction")
}
sc.stop()
}
}
|
shenbaise/mltoy
|
src/main/scala/org/apache/spark/examples/ml/CrossValidatorExample.scala
|
Scala
|
apache-2.0
| 4,618
|
package com.greencatsoft.d3.selection
import scala.scalajs.js
import org.scalajs.dom.Node
@js.native
trait Stylable[A <: Node, B <: Selection[A, B]] extends js.Object {
def classed(name: String): Boolean = js.native
def classed(name: String, value: Boolean): B = js.native
// d3-selection-multi required
def classed(classes: js.Dictionary[Boolean]): B = js.native
def classed[T](provider: ElementIterator[A, T]): B = js.native
def style(name: String): String = js.native
def style(name: String, value: String): B = js.native
def style(name: String, value: String, priority: Int): B = js.native
def style[T](name: String, provider: ElementIterator[A, T]): B = js.native
def style[T](name: String, provider: ElementIterator[A, T], priority: Int): B = js.native
def style(values: js.Dictionary[String]): B = js.native
}
|
greencatsoft/scalajs-d3
|
src/main/scala/com/greencatsoft/d3/selection/Stylable.scala
|
Scala
|
apache-2.0
| 852
|
package io.buoyant.linkerd
package protocol
import com.twitter.conversions.time._
import com.twitter.finagle.{Http => FinagleHttp, Status => _, http => _, _}
import com.twitter.finagle.buoyant.linkerd.Headers
import com.twitter.finagle.http.{param => _, _}
import com.twitter.finagle.http.Method._
import com.twitter.finagle.stats.{InMemoryStatsReceiver, NullStatsReceiver}
import com.twitter.finagle.tracing.{Annotation, BufferingTracer, NullTracer}
import com.twitter.util._
import io.buoyant.router.{Http, RoutingFactory}
import io.buoyant.router.http.MethodAndHostIdentifier
import io.buoyant.test.Awaits
import java.net.InetSocketAddress
import org.scalatest.FunSuite
import org.scalatest.{FunSuite, MustMatchers}
class HttpEndToEndTest extends FunSuite with Awaits with MustMatchers {
case class Downstream(name: String, server: ListeningServer) {
val address = server.boundAddress.asInstanceOf[InetSocketAddress]
val port = address.getPort
val dentry = Dentry(
Path.read(s"/svs/$name"),
NameTree.read(s"/$$/inet/127.1/$port")
)
}
object Downstream {
def mk(name: String)(f: Request=>Response): Downstream = {
val service = Service.mk { req: Request => Future(f(req)) }
val stack = FinagleHttp.server.stack.remove(Headers.Ctx.serverModule.role)
val server = FinagleHttp.server.withStack(stack)
.configured(param.Label(name))
.configured(param.Tracer(NullTracer))
.serve(":*", service)
Downstream(name, server)
}
def const(name: String, value: String, status: Status = Status.Ok): Downstream =
mk(name) { _ =>
val rsp = Response()
rsp.status = status
rsp.contentString = value
rsp
}
}
def upstream(server: ListeningServer) = {
val address = Address(server.boundAddress.asInstanceOf[InetSocketAddress])
val name = Name.Bound(Var.value(Addr.Bound(address)), address)
val stack = FinagleHttp.client.stack.remove(Headers.Ctx.clientModule.role)
FinagleHttp.client.withStack(stack)
.configured(param.Stats(NullStatsReceiver))
.configured(param.Tracer(NullTracer))
.newClient(name, "upstream").toService
}
def basicConfig(dtab: Dtab) =
s"""|routers:
|- protocol: http
| dtab: ${dtab.show}
| servers:
| - port: 0
|""".stripMargin
def annotationKeys(annotations: Seq[Annotation]): Seq[String] =
annotations.collect {
case Annotation.ClientSend() => "cs"
case Annotation.ClientRecv() => "cr"
case Annotation.ServerSend() => "ss"
case Annotation.ServerRecv() => "sr"
case Annotation.WireSend => "ws"
case Annotation.WireRecv => "wr"
case Annotation.BinaryAnnotation(k, _) if k == "l5d.success" => k
case Annotation.Message(m) if Seq("l5d.retryable", "l5d.failure").contains(m) => m
}
test("linking") {
val stats = NullStatsReceiver
val tracer = new BufferingTracer
def withAnnotations(f: Seq[Annotation] => Unit): Unit = {
f(tracer.iterator.map(_.annotation).toSeq)
tracer.clear()
}
val cat = Downstream.const("cat", "meow")
val dog = Downstream.const("dog", "woof")
val dtab = Dtab.read(s"""
/p/cat => /$$/inet/127.1/${cat.port} ;
/p/dog => /$$/inet/127.1/${dog.port} ;
/svc/felix => /p/cat ;
/svc/clifford => /p/dog ;
""")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
def get(host: String, path: String = "/")(f: Response => Unit): Unit = {
val req = Request()
req.host = host
req.uri = path
val rsp = await(client(req))
f(rsp)
}
try {
get("felix") { rsp =>
assert(rsp.status == Status.Ok)
assert(rsp.contentString == "meow")
val path = "/svc/felix"
val bound = s"/$$/inet/127.1/${cat.port}"
withAnnotations { anns =>
assert(annotationKeys(anns) == Seq("sr", "cs", "ws", "wr", "l5d.success", "cr", "ss"))
assert(anns.contains(Annotation.BinaryAnnotation("service", path)))
assert(anns.contains(Annotation.BinaryAnnotation("client", bound)))
assert(anns.contains(Annotation.BinaryAnnotation("residual", "/")))
()
}
}
get("ralph-machio") { rsp =>
assert(rsp.status == Status.BadGateway)
assert(rsp.headerMap.contains(Headers.Err.Key))
()
}
get("") { rsp =>
assert(rsp.status == Status.BadRequest)
assert(rsp.headerMap.contains(Headers.Err.Key))
()
}
// todo check stats
} finally {
await(client.close())
await(cat.server.close())
await(dog.server.close())
await(server.close())
await(router.close())
}
}
test("marks 5XX as failure by default") {
val stats = new InMemoryStatsReceiver
val tracer = NullTracer
val downstream = Downstream.mk("dog") {
case req if req.path == "/woof" =>
val rsp = Response()
rsp.status = Status.Ok
rsp.contentString = "woof"
rsp
case _ =>
val rsp = Response()
rsp.status = Status.InternalServerError
rsp
}
val label = s"$$/inet/127.1/${downstream.port}"
val dtab = Dtab.read(s"/svc/dog => /$label;")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
try {
val okreq = Request()
okreq.host = "dog"
okreq.uri = "/woof"
val okrsp = await(client(okreq))
assert(okrsp.status == Status.Ok)
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "requests")) == Some(1))
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "success")) == Some(1))
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "failures")) == None)
assert(stats.counters.get(Seq("http", "client", label, "requests")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "success")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "failures")) == None)
val errreq = Request()
errreq.host = "dog"
val errrsp = await(client(errreq))
assert(errrsp.status == Status.InternalServerError)
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "requests")) == Some(2))
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "success")) == Some(1))
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "failures")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "requests")) == Some(2))
assert(stats.counters.get(Seq("http", "client", label, "success")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "failures")) == Some(1))
} finally {
await(client.close())
await(downstream.server.close())
await(server.close())
await(router.close())
}
}
test("marks exceptions as failure by default") {
val stats = new InMemoryStatsReceiver
val tracer = NullTracer
val downstream = Downstream.mk("dog") { req => ??? }
val label = s"$$/inet/127.1/${downstream.port}"
val dtab = Dtab.read(s"/svc/dog => /$label;")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
// Just close the downstream right away to generate connection exceptions
await(downstream.server.close())
try {
val req = Request()
req.host = "dog"
val rsp = await(client(req))
assert(rsp.status == Status.BadGateway)
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "requests")) == Some(1))
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "success")) == None)
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "failures")) == Some(1))
assert(stats.counters.get(Seq("http", "service", "svc/dog", "requests")) == Some(1))
assert(stats.counters.get(Seq("http", "service", "svc/dog", "success")) == None)
assert(stats.counters.get(Seq("http", "service", "svc/dog", "failures")) == Some(1))
} finally {
await(client.close())
await(downstream.server.close())
await(server.close())
await(router.close())
}
}
val allMethods = Set[Method](Connect, Delete, Get, Head, Patch, Post, Put, Options, Trace)
val readMethods = Set[Method](Get, Head, Options, Trace)
val idempotentMethods = readMethods ++ Set[Method](Delete, Put)
def retryTest(kind: String, methods: Set[Method]): Unit = {
val stats = new InMemoryStatsReceiver
val tracer = new BufferingTracer
def withAnnotations(f: Seq[Annotation] => Unit): Unit = {
f(tracer.iterator.map(_.annotation).toSeq)
tracer.clear()
}
@volatile var failNext = false
val downstream = Downstream.mk("dog") { req =>
val rsp = Response()
rsp.status = if (failNext) Status.InternalServerError else Status.Ok
failNext = false
rsp
}
val label = s"$$/inet/127.1/${downstream.port}"
val dtab = Dtab.read(s"/svc/dog => /$label;")
val yaml =
s"""|routers:
|- protocol: http
| dtab: ${dtab.show}
| service:
| responseClassifier:
| kind: $kind
| servers:
| - port: 0
|""".stripMargin
val linker = Linker.load(yaml)
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
try {
// retryable request, fails and is retried
for (method <- methods) {
val req = Request()
req.method = method
req.host = "dog"
failNext = true
stats.clear()
val rsp = await(client(req))
assert(rsp.status == Status.Ok)
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "requests")) == Some(1))
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "success")) == Some(1))
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "failures")) == None)
assert(stats.counters.get(Seq("http", "client", label, "requests")) == Some(2))
assert(stats.counters.get(Seq("http", "client", label, "success")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "failures")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "status", "200")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "status", "500")) == Some(1))
val name = "svc/dog"
assert(stats.counters.get(Seq("http", "service", name, "requests")) == Some(1))
assert(stats.counters.get(Seq("http", "service", name, "success")) == Some(1))
assert(stats.counters.get(Seq("http", "service", name, "failures")) == None)
assert(stats.stats.get(Seq("http", "service", name, "retries", "per_request")) == Some(Seq(1.0)))
assert(stats.counters.get(Seq("http", "service", name, "retries", "total")) == Some(1))
withAnnotations { anns =>
assert(annotationKeys(anns) == Seq("sr", "cs", "ws", "wr", "l5d.retryable", "cr", "cs", "ws", "wr", "l5d.success", "cr", "ss"))
()
}
}
// non-retryable request, fails and is not retried
for (method <- allMethods -- methods) {
val req = Request()
req.method = method
req.host = "dog"
failNext = true
stats.clear()
val rsp = await(client(req))
assert(rsp.status == Status.InternalServerError)
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "requests")) == Some(1))
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "success")) == None)
assert(stats.counters.get(Seq("http", "server", "127.0.0.1/0", "failures")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "requests")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "success")) == None)
assert(stats.counters.get(Seq("http", "client", label, "failures")) == Some(1))
assert(stats.counters.get(Seq("http", "client", label, "status", "200")) == None)
assert(stats.counters.get(Seq("http", "client", label, "status", "500")) == Some(1))
val name = s"svc/dog"
assert(stats.counters.get(Seq("http", "service", name, "requests")) == Some(1))
assert(stats.counters.get(Seq("http", "service", name, "success")) == None)
assert(stats.counters.get(Seq("http", "service", name, "failures")) == Some(1))
assert(stats.stats.get(Seq("http", "service", name, "retries", "per_request")) == Some(Seq(0.0)))
assert(!stats.counters.contains(Seq("http", "service", name, "retries", "total")))
withAnnotations { anns =>
assert(annotationKeys(anns) == Seq("sr", "cs", "ws", "wr", "l5d.failure", "cr", "ss"))
()
}
}
} finally {
await(client.close())
await(downstream.server.close())
await(server.close())
await(router.close())
}
}
test("retries retryableIdempotent5XX") {
retryTest("io.l5d.http.retryableIdempotent5XX", idempotentMethods)
}
test("retries retryablRead5XX") {
retryTest("io.l5d.http.retryableRead5XX", readMethods)
}
test("retries nonRetryable5XX") {
retryTest("io.l5d.http.nonRetryable5XX", Set.empty)
}
val dtabReadHeaders = Seq("l5d-dtab", "l5d-ctx-dtab")
val dtabWriteHeader = "l5d-ctx-dtab"
for (readHeader <- dtabReadHeaders) test(s"dtab read from $readHeader header") {
val stats = NullStatsReceiver
val tracer = new BufferingTracer
@volatile var headers: HeaderMap = null
val dog = Downstream.mk("dog") { req =>
headers = req.headerMap
Response()
}
val dtab = Dtab.read(s"""
/svc/* => /$$/inet/127.1/${dog.port} ;
""")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
val req = Request()
req.host = "dog"
req.headerMap.set(readHeader, "/a=>/b")
await(client(req))
for (header <- dtabReadHeaders) {
if (header == dtabWriteHeader) assert(headers(header) == "/a=>/b")
else assert(!headers.contains(header))
}
assert(!headers.contains("dtab-local"))
}
test("dtab-local header is ignored") {
val stats = NullStatsReceiver
val tracer = new BufferingTracer
@volatile var headers: HeaderMap = null
val dog = Downstream.mk("dog") { req =>
headers = req.headerMap
Response()
}
val dtab = Dtab.read(s"""
/svc/* => /$$/inet/127.1/${dog.port} ;
""")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
val req = Request()
req.host = "dog"
req.headerMap.set("dtab-local", "/a=>/b")
await(client(req))
assert(headers("dtab-local") == "/a=>/b")
assert(!headers.contains(dtabWriteHeader))
}
test("with clearContext") {
val downstream = Downstream.mk("dog") { req =>
val rsp = Response()
rsp.contentString = req.headerMap.collect {
case (k, v) if k.startsWith("l5d-") => s"$k=$v"
}.mkString(",")
rsp
}
val localDtab = "/foo=>/bar"
val req = Request()
req.host = "test"
req.headerMap("l5d-dtab") = localDtab
req.headerMap("l5d-ctx-thing") = "yoooooo"
val yaml =
s"""|routers:
|- protocol: http
| dtab: /svc/* => /$$/inet/127.1/${downstream.port}
| servers:
| - port: 0
| clearContext: true
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val body =
try {
val c = upstream(s)
try await(c(req)).contentString
finally await(c.close())
} finally await(s.close())
val headers =
body.split(",").map { kv =>
val Array(k, v) = kv.split("=", 2)
k -> v
}.toMap
assert(headers.keySet == Set(
"l5d-dst-service",
"l5d-dst-client",
"l5d-reqid",
"l5d-ctx-trace"
))
}
test("clearContext will remove linkerd error headers and body") {
val yaml =
s"""|routers:
|- protocol: http
| dtab: /svc/* => /$$/inet/127.1/1234
| servers:
| - port: 0
| clearContext: true
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val req = Request()
req.host = "test"
val c = upstream(s)
try {
val resp = await(c(req))
resp.headerMap.keys must not contain ("l5d-err", "l5d-success-class", "l5d-retryable")
resp.contentString must be("")
} finally {
await(c.close())
await(s.close())
}
}
test("without clearContext") {
val downstream = Downstream.mk("dog") { req =>
val rsp = Response()
rsp.contentString = req.headerMap.collect {
case (k, v) if k.startsWith("l5d-") => s"$k=$v"
}.mkString(",")
rsp
}
val localDtab = "/foo=>/bar"
val req = Request()
req.host = "test"
req.headerMap("l5d-dtab") = localDtab
req.headerMap("l5d-ctx-thing") = "yoooooo"
val yaml =
s"""|routers:
|- protocol: http
| dtab: /svc/* => /$$/inet/127.1/${downstream.port}
| servers:
| - port: 0
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val body =
try {
val c = upstream(s)
try await(c(req)).contentString
finally await(c.close())
} finally await(s.close())
val headers =
body.split(",").map { kv =>
val Array(k, v) = kv.split("=", 2)
k -> v
}.toMap
assert(headers.keySet == Set(
"l5d-dst-service",
"l5d-dst-client",
"l5d-reqid",
"l5d-ctx-dtab",
"l5d-ctx-trace",
"l5d-ctx-thing"
))
assert(headers.get("l5d-ctx-dtab") == Some(localDtab))
}
}
|
pawelprazak/linkerd
|
linkerd/protocol/http/src/e2e/scala/io/buoyant/linkerd/protocol/HttpEndToEndTest.scala
|
Scala
|
apache-2.0
| 19,223
|
package views.html
import play.templates._
import play.templates.TemplateMagic._
import play.api.templates._
import play.api.templates.PlayMagic._
import models._
import controllers._
import java.lang._
import java.util._
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import play.api.i18n._
import play.core.j.PlayMagicForJava._
import play.mvc._
import play.data._
import play.api.data.Field
import play.mvc.Http.Context.Implicit._
import views.html._
/**/
object glycodigestBuilder extends BaseScalaTemplate[play.api.templates.HtmlFormat.Appendable,Format[play.api.templates.HtmlFormat.Appendable]](play.api.templates.HtmlFormat) with play.api.templates.Template1[String,play.api.templates.HtmlFormat.Appendable] {
/**/
def apply/*1.2*/(str: String):play.api.templates.HtmlFormat.Appendable = {
_display_ {import helper._
Seq[Any](format.raw/*1.15*/("""
"""),format.raw/*3.1*/("""
"""),_display_(Seq[Any](/*5.2*/main/*5.6*/ {_display_(Seq[Any](format.raw/*5.8*/("""
<style type="text/css">
#wrapperpin """),format.raw/*8.13*/("""{"""),format.raw/*8.14*/("""
width: 90%;
max-width: 1100px;
min-width: 800px;
margin: 50px auto;
"""),format.raw/*13.1*/("""}"""),format.raw/*13.2*/("""
#columnspin """),format.raw/*15.13*/("""{"""),format.raw/*15.14*/("""
-webkit-column-count: 3;
-webkit-column-gap: 10px;
-webkit-column-fill: auto;
-moz-column-count: 3;
-moz-column-gap: 10px;
-moz-column-fill: auto;
column-count: 3;
column-gap: 15px;
column-fill: auto;
"""),format.raw/*25.1*/("""}"""),format.raw/*25.2*/("""
.pin """),format.raw/*26.6*/("""{"""),format.raw/*26.7*/("""
display: inline-block;
background: #FEFEFE;
border: 2px solid #FAFAFA;
box-shadow: 0 1px 2px rgba(34, 25, 25, 0.4);
margin: 0 2px 15px;
-webkit-column-break-inside: avoid;
-moz-column-break-inside: avoid;
column-break-inside: avoid;
padding: 15px;
padding-bottom: 5px;
background: -webkit-linear-gradient(45deg, #FFF, #F9F9F9);
opacity: 1;
-webkit-transition: all .2s ease;
-moz-transition: all .2s ease;
-o-transition: all .2s ease;
transition: all .2s ease;
"""),format.raw/*43.1*/("""}"""),format.raw/*43.2*/("""
.pin img """),format.raw/*45.10*/("""{"""),format.raw/*45.11*/("""
width: 100%;
border-bottom: 1px solid #ccc;
padding-bottom: 15px;
margin-bottom: 5px;
"""),format.raw/*50.1*/("""}"""),format.raw/*50.2*/("""
.pin2 p """),format.raw/*52.9*/("""{"""),format.raw/*52.10*/("""
font: 12px/18px Arial, sans-serif;
color: #333;
margin: 0;
"""),format.raw/*56.1*/("""}"""),format.raw/*56.2*/("""
#columns:hover .pin:not(:hover) """),format.raw/*60.33*/("""{"""),format.raw/*60.34*/("""
opacity: 0.4;
"""),format.raw/*62.1*/("""}"""),format.raw/*62.2*/("""
</style>
<script>
$(document).ready(function() """),format.raw/*67.38*/("""{"""),format.raw/*67.39*/("""
$("#e20").select2("""),format.raw/*69.27*/("""{"""),format.raw/*69.28*/("""
tags:["ABS", "AMF", "BKF", "BTG", "GUH", "JBM", "NAN1", "SPG" ],
tokenSeparators: [",", " "]"""),format.raw/*71.40*/("""}"""),format.raw/*71.41*/(""");
"""),format.raw/*72.9*/("""}"""),format.raw/*72.10*/(""");
</script>
<ul class="breadcrumb">
<li><i class="icon-home" ></i><a href="/"> UniCarbKB</a> <span class="divider"></span></li>
<li class="active"><i class="icon-map-marker" ></i> GlycoDigest<span class="divider"></span></li>
</ul>
<div class="page-header row-fluid">
<h1 id="homeTitle">GlycoDigest</h1>
<h4 class="subheader">A tool to predict exoglycosidase digestions</h4>
</div>
<div id="actions">
<div>
<img src="http://www.glycodigest.org:8080/eurocarb/get_sugar_image.action?download=false&scale=1.0&opaque=false&outputType=png¬ation=cfglink&inputType=glycoct_condensed&sequences="""),_display_(Seq[Any](/*87.207*/helper/*87.213*/.urlEncode(str))),format.raw/*87.228*/("""" />
</div>
<p>Use the search box below to select the panel of exoglycosidase to digest the structure shown:</p>
<form class="form-search" action="/glycodigestBuilder/test/digest" method="GET">
<div id="selection" class="row-fluid">
<input name=digest id="e20" id="listBox" class="span4"></input>
<button type="submit" class="btn btn-primary">Digest</button>
</div>
</form>
</div>
"""),_display_(Seq[Any](/*99.8*/views/*99.13*/.html.footerunicarb.footerunicarb())),format.raw/*99.48*/("""
</section>
""")))})),format.raw/*104.2*/("""
"""))}
}
def render(str:String): play.api.templates.HtmlFormat.Appendable = apply(str)
def f:((String) => play.api.templates.HtmlFormat.Appendable) = (str) => apply(str)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Mon Jun 02 10:52:56 EST 2014
SOURCE: /Users/matthew/git/glycomics_working/app/views/glycodigestBuilder.scala.html
HASH: 79896e3587ea8045cc161fd3515e6ee09cc38839
MATRIX: 787->1|910->14|937->32|974->35|985->39|1023->41|1088->79|1116->80|1216->153|1244->154|1286->168|1315->169|1553->380|1581->381|1614->387|1642->388|2146->865|2174->866|2213->877|2242->878|2360->969|2388->970|2425->980|2454->981|2544->1044|2572->1045|2636->1081|2665->1082|2708->1098|2736->1099|2822->1157|2851->1158|2917->1196|2946->1197|3091->1314|3120->1315|3158->1326|3187->1327|3862->1965|3878->1971|3916->1986|4389->2424|4403->2429|4460->2464|4522->2494
LINES: 26->1|30->1|31->3|33->5|33->5|33->5|36->8|36->8|41->13|41->13|43->15|43->15|53->25|53->25|54->26|54->26|71->43|71->43|73->45|73->45|78->50|78->50|80->52|80->52|84->56|84->56|88->60|88->60|90->62|90->62|95->67|95->67|97->69|97->69|99->71|99->71|100->72|100->72|115->87|115->87|115->87|127->99|127->99|127->99|132->104
-- GENERATED --
*/
|
alternativeTime/unicarb_static
|
target/scala-2.10/src_managed/main/views/html/glycodigestBuilder.template.scala
|
Scala
|
gpl-3.0
| 5,935
|
/*
* Copyright 2017-2018 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package freestyle
import scala.util._
import scala.concurrent._
package object async {
/** An asynchronous computation that might fail. **/
type Proc[A] = (Either[Throwable, A] => Unit) => Unit
/** The context required to run an asynchronous computation. **/
trait AsyncContext[M[_]] {
def runAsync[A](fa: Proc[A]): M[A]
}
def future2AsyncM[F[_], A](
future: Future[A])(implicit AC: AsyncContext[F], E: ExecutionContext): F[A] =
AC.runAsync { cb =>
E.execute(new Runnable {
def run(): Unit = future.onComplete {
case Failure(e) => cb(Left(e))
case Success(r) => cb(Right(r))
}
})
}
trait Implicits {
implicit def futureAsyncContext(implicit ec: ExecutionContext) = new AsyncContext[Future] {
def runAsync[A](fa: Proc[A]): Future[A] = {
val p = Promise[A]()
ec.execute(new Runnable {
def run() = fa(_.fold(p.tryFailure, p.trySuccess))
})
p.future
}
}
}
trait Syntax {
implicit def futureOps[A](f: Future[A]): FutureOps[A] = new FutureOps(f)
final class FutureOps[A](f: Future[A]) {
@deprecated("Use unsafeTo instead.", "0.6.0")
def to[F[_]](implicit AC: AsyncContext[F], E: ExecutionContext): F[A] = future2AsyncM[F, A](f)
def unsafeTo[F[_]](implicit AC: AsyncContext[F], E: ExecutionContext): F[A] = future2AsyncM[F, A](f)
}
}
}
|
frees-io/freestyle
|
modules/async/async/shared/src/main/scala/async.scala
|
Scala
|
apache-2.0
| 2,052
|
package jsonsong.spider.common
import com.mongodb.MongoClientURI
import com.typesafe.config.{Config, ConfigFactory}
import org.junit.Test
import org.springframework.data.mongodb.core.SimpleMongoDbFactory
class ConfigTest {
@Test def Test1 {
val uriStr: String = ConfigHelper.getProperty("database.uri", "")
val db = new SimpleMongoDbFactory(new MongoClientURI(uriStr))
val str = "" ;
}
}
|
178220709/HelloScala
|
ScalaSpider/src/test/Scala/jsonsong/spider/common/ConfigTest.scala
|
Scala
|
apache-2.0
| 409
|
package org.mms.demo;
class PPP{
val z=System.currentTimeMillis();
}
object X extends Cloneable{
println("Creation")
val x=new PPP();
def copy():X.type=super.clone().asInstanceOf[X.type];
}
object Test extends App(){
val m=X.copy();
val m1=X;
println(m+":"+m1)
}
|
petrochenko-pavel-a/mms.core
|
org.mms.core/src/main/scala/org/mms/demo/Test.scala
|
Scala
|
epl-1.0
| 287
|
/*
* Copyright (c) 2014-2015 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics
package snowplow
package enrich
package common
package utils
package shredder
// Snowplow Common Enrich
import outputs.EnrichedEvent
// Specs2
import org.specs2.Specification
class ShredderSpec extends Specification { def is = s2"""
This is a specification to test the Shredder functionality
makePartialHierarchy should initialize a partial TypeHierarchy $e1
shred should extract the JSONs from an unstructured event with multiple contexts $e2
"""
val EventId = "f81d4fae-7dec-11d0-a765-00a0c91e6bf6"
val CollectorTimestamp = "2014-04-29 09:00:54.000"
implicit val resolver = SpecHelpers.IgluResolver
def e1 =
Shredder.makePartialHierarchy(EventId, CollectorTimestamp) must_==
TypeHierarchy(
rootId = EventId,
rootTstamp = CollectorTimestamp,
refRoot = "events",
refTree = List("events"),
refParent = "events")
def e2 = {
val event = {
val e = new EnrichedEvent()
e.event_id = EventId
e.collector_tstamp = CollectorTimestamp
e.unstruct_event = """{"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/link_click/jsonschema/1-0-0","data":{"targetUrl":"http://snowplowanalytics.com/blog/page2","elementClasses":["next"]}}}"""
e.contexts = """{"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-0","data":[{"schema":"iglu:org.schema/WebPage/jsonschema/1-0-0","data":{"datePublished":"2014-07-23T00:00:00Z","author":"Jonathan Almeida","inLanguage":"en-US","genre":"blog","breadcrumb":["blog","releases"],"keywords":["snowplow","analytics","java","jvm","tracker"]}},{"schema":"iglu:org.schema/WebPage/jsonschema/1-0-0","data":{"datePublished":"2014-07-23T00:00:00Z","author":"Jonathan Almeida","inLanguage":"en-US","genre":"blog","breadcrumb":["blog","releases"],"keywords":["snowplow","analytics","java","jvm","tracker"]}}]}"""
e
}
// TODO: check actual contents (have already confirmed in REPL)
Shredder.shred(event).toOption.get must have size(3)
}
}
|
haensel-ams/snowplow
|
3-enrich/scala-common-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/ShredderSpec.scala
|
Scala
|
apache-2.0
| 2,845
|
package athena.connector
import java.net.InetAddress
import athena.data.DataType
import athena.client.Row
private[connector] object ClusterInfo {
case class ClusterMetadata(name: Option[String],
partitioner: Option[String],
hosts: Map[InetAddress, Host])
case class Host(address: InetAddress, datacenter: Option[String] = None, rack: Option[String] = None)
case class IndexMetadata(name: String, customClassName: Option[String] = None)
case class ColumnMetadata(name: String, dataType: DataType, index: Option[IndexMetadata])
case class TableMetadata(name: String, partitionKey: Seq[ColumnMetadata], clusteringColumns: Seq[ColumnMetadata],
columns: Seq[ColumnMetadata])
object TableMetadata {
private val CF_NAME = "columnfamily_name"
private val KEY_VALIDATOR = "key_validator"
private val COMPARATOR = "comparator"
def fromRow(row: Row, columnRows: Seq[Row]): TableMetadata = {
val name = row.value(CF_NAME).as[String]
//TODO: Implement this crap
TableMetadata(name, Seq(), Seq(), Seq())
}
}
case class KeyspaceMetadata(name: String,
durableWrites: Boolean,
tables: Seq[TableMetadata],
replicationStrategy: Option[ReplicationStrategy])
}
|
vast-engineering/athena
|
src/main/scala/athena/connector/ClusterInfo.scala
|
Scala
|
apache-2.0
| 1,402
|
package 练习06
object Runner {
class 类型匹配[T]
object 类型匹配 {
def i1[T1, T2]: (类型匹配[T1], 类型匹配[T2]) = (new 类型匹配[T1], new 类型匹配[T2])
def i2[T](i: (类型匹配[T], 类型匹配[T])): List[类型匹配[T]] = List(i._1, i._2)
}
class Item1
class Item2
class Item3
class Item4
class Item5
class Item6
class Item7
class Item8
class Item9
class Item10
class Item11
class Item12
class Item13
type 被除数值1 = 被除数Positive[
被除数Positive[
被除数Positive[被除数Positive[被除数Positive[被除数Positive[被除数Positive[被除数Positive[被除数Zero, Item1], Item2], Item3], Item4], Item5], Item6],
Item7
],
Item8
]
class 除数值2 extends 除数Positive[除数Positive[除数值3, Item9], Item10]
class 除数值3 extends 除数Zero[除数值2]
class 除数值4 extends 除数Positive[除数Positive[除数Positive[除数值5, Item9], Item10], Item11]
class 除数值5 extends 除数Zero[除数值4]
class 除数值6 extends 除数Positive[除数Positive[除数Positive[除数Positive[除数值7, Item9], Item10], Item11], Item12]
class 除数值7 extends 除数Zero[除数值6]
class 除数值8 extends 除数Positive[除数Positive[除数Positive[除数Positive[除数Positive[除数值9, Item9], Item10], Item11], Item12], Item13]
class 除数值9 extends 除数Zero[除数值8]
类型匹配.i2(类型匹配.i1[除数值2#除[被除数值1], 自然数Positive[自然数Positive[自然数Positive[自然数Positive[自然数Zero]]]]])
类型匹配.i2(类型匹配.i1[除数值4#除[被除数值1], 自然数Positive[自然数Positive[自然数Zero]]])
类型匹配.i2(类型匹配.i1[除数值6#除[被除数值1], 自然数Positive[自然数Positive[自然数Zero]]])
类型匹配.i2(类型匹配.i1[除数值8#除[被除数值1], 自然数Positive[自然数Zero]])
}
|
djx314/ubw
|
a28-练习/src/main/scala/练习06/Runner.scala
|
Scala
|
bsd-3-clause
| 1,947
|
/*
* Nailed, a Minecraft PvP server framework
* Copyright (C) jk-5 <http://github.com/jk-5/>
* Copyright (C) Nailed team and contributors <http://github.com/nailed/>
*
* This program is free software: you can redistribute it and/or modify it
* under the terms of the MIT License.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the MIT License along with
* this program. If not, see <http://opensource.org/licenses/MIT/>.
*/
package jk_5.nailed.plugins.internal
import jk_5.nailed.api.plugin.Plugin
/**
* No description given
*
* @author jk-5
*/
class NailedInternalPlugin extends Plugin {
override def onEnable(){
this.getPluginManager.registerCommand(this, CommandGoto)
this.getPluginManager.registerCommand(this, CommandTps)
this.getPluginManager.registerCommand(this, CommandGamerule)
this.getPluginManager.registerCommand(this, CommandLoadmap)
this.getPluginManager.registerCommand(this, CommandTeam)
this.getPluginManager.registerCommand(this, CommandStatemitter)
this.getPluginManager.registerCommand(this, CommandTime)
this.getPluginManager.registerCommand(this, CommandHelp)
this.getPluginManager.registerCommand(this, CommandEffect)
this.getPluginManager.registerCommand(this, CommandPos)
this.getPluginManager.registerCommand(this, CommandExperience)
this.getPluginManager.registerCommand(this, CommandAnalog)
this.getPluginManager.registerCommand(this, new RemovedCommand("defaultgamemode"))
this.getPluginManager.registerCommand(this, new RemovedCommand("debug"))
this.getPluginManager.registerCommand(this, new RemovedCommand("setworldspawn"))
this.getPluginManager.registerCommand(this, new RemovedCommand("save-all"))
this.getPluginManager.registerCommand(this, new RemovedCommand("save-on"))
this.getPluginManager.registerCommand(this, new RemovedCommand("save-off"))
}
}
|
nailed/nailed
|
cmdplugin/internal/NailedInternalPlugin.scala
|
Scala
|
mit
| 2,129
|
package jgo.tools.compiler
package interm
import instr._
package object codeseq {
implicit def fromInstr(instr: Instr): CodeBuilder = {
val ls = Code(instr)
new CodeBuilder(ls, ls)
}
implicit def fromOption(opt: Option[CodeBuilder]): CodeBuilder =
opt getOrElse CodeBuilder()
}
|
thomasmodeneis/jgo
|
src/src/main/scala/jgo/tools/compiler/interm/codeseq/package.scala
|
Scala
|
gpl-3.0
| 301
|
package pl.combosolutions.backup.psm.elevation.posix.linux
import org.specs2.matcher.Scope
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import pl.combosolutions.backup.psm.ImplementationPriority._
import pl.combosolutions.backup.psm.systems._
import pl.combosolutions.backup.{ Cleaner, Result }
import pl.combosolutions.backup.psm.commands.TestCommand
import pl.combosolutions.backup.psm.elevation.{ RemoteElevatorCommand, RemoteElevatorProgram, TestElevationFacadeComponent }
import pl.combosolutions.backup.psm.programs.{ GenericProgram, Program }
import pl.combosolutions.backup.test.Tags.UnitTest
class LinuxElevationSpec extends Specification with Mockito {
"GKSudoElevationService" should {
"correctly calculate availability" in new GKSudoResolutionTestContext {
// given
// when
val availabilityForGnome = serviceForGnome.elevationService.elevationAvailable
val availabilityForKde = serviceForKde.elevationService.elevationAvailable
val availabilityForShell = serviceForShell.elevationService.elevationAvailable
val availabilityForWindows = serviceForWindows.elevationService.elevationAvailable
// then
availabilityForGnome mustEqual true
availabilityForKde mustEqual true
availabilityForShell mustEqual false
availabilityForWindows mustEqual false
} tag UnitTest
"correctly calculate priority" in new GKSudoResolutionTestContext {
// given
// when
val priorityForGnome = serviceForGnome.elevationService.elevationPriority
val priorityForKde = serviceForKde.elevationService.elevationPriority
val priorityForShell = serviceForShell.elevationService.elevationPriority
val priorityForWindows = serviceForWindows.elevationService.elevationPriority
// then
priorityForGnome mustEqual Preferred
priorityForKde mustEqual Allowed
priorityForShell mustEqual NotAllowed
priorityForWindows mustEqual NotAllowed
} tag UnitTest
"elevate directly using DirectElevationProgram" in new GKSudoTestContext {
// given
val expectedName = "gksudo"
val expectedArgs = List("-m", "BackupDSL elevation runner", "--") ++ List(program.name) ++ program.arguments
// when
val result: Program[GenericProgram] = service elevateDirect program
val resultAsGeneric = result.asGeneric
// then
resultAsGeneric.name mustEqual expectedName
resultAsGeneric.arguments mustEqual expectedArgs
} tag UnitTest
"elevate remotely using RemoteElevationCommand" in new GKSudoTestContext {
// given
val expected = command
val cleaner = new Cleaner {}
// when
val result = service elevateRemote (command, cleaner)
val elevated = result.asInstanceOf[RemoteElevatorCommand[TestCommand]].command
// then
elevated mustEqual expected
} tag UnitTest
"elevate remotely using RemoteElevationProgram" in new GKSudoTestContext {
// given
val expected = program
val cleaner = new Cleaner {}
// when
val result = service elevateRemote (program, cleaner)
val elevated = result.asInstanceOf[RemoteElevatorProgram[GenericProgram]].program
// then
elevated mustEqual expected
} tag UnitTest
}
"KDESudoElevationService" should {
"correctly calculate availability" in new KDESudoResolutionTestContext {
// given
// when
val availabilityForGnome = serviceForGnome.elevationService.elevationAvailable
val availabilityForKde = serviceForKde.elevationService.elevationAvailable
val availabilityForWindows = serviceForShell.elevationService.elevationAvailable
// then
availabilityForGnome mustEqual true
availabilityForKde mustEqual true
availabilityForWindows mustEqual false
} tag UnitTest
"correctly calculate priority" in new KDESudoResolutionTestContext {
// given
// when
val priorityForGnome = serviceForGnome.elevationService.elevationPriority
val priorityForKde = serviceForKde.elevationService.elevationPriority
val priorityForWindows = serviceForShell.elevationService.elevationPriority
// then
priorityForGnome mustEqual Allowed
priorityForKde mustEqual Preferred
priorityForWindows mustEqual NotAllowed
} tag UnitTest
}
trait GKSudoResolutionTestContext extends Scope {
val serviceForGnome = new TestGKSudoElevationServiceComponent(DebianSystem, "gnome")
val serviceForKde = new TestGKSudoElevationServiceComponent(DebianSystem, "kde")
val serviceForShell = new TestGKSudoElevationServiceComponent(DebianSystem, "")
val serviceForWindows = new TestGKSudoElevationServiceComponent(WindowsXPSystem, "")
serviceForGnome.availableCommands.gkSudo returns true
serviceForKde.availableCommands.gkSudo returns true
serviceForShell.availableCommands.gkSudo returns false
}
trait GKSudoTestContext extends Scope {
// format: OFF
val component = new GKSudoElevationServiceComponent
with TestElevationFacadeComponent
with TestOperatingSystemComponent
with TestAvailableCommandsComponent
// format: ON
val service = component.elevationService
val command = TestCommand(Result(0, List(), List()))
val program = GenericProgram("test-name", List("test-args"))
}
class TestGKSudoElevationServiceComponent(
override val operatingSystem: OperatingSystem,
override val currentDesktopSession: String
) extends GKSudoElevationServiceComponent
with TestElevationFacadeComponent
with OperatingSystemComponent
with TestAvailableCommandsComponent
trait KDESudoResolutionTestContext extends Scope {
val serviceForGnome = new TestKDESudoElevationServiceComponent(DebianSystem, "gnome")
val serviceForKde = new TestKDESudoElevationServiceComponent(DebianSystem, "kde")
val serviceForShell = new TestKDESudoElevationServiceComponent(DebianSystem, "")
val serviceForWindows = new TestKDESudoElevationServiceComponent(WindowsXPSystem, "")
serviceForGnome.availableCommands.kdeSudo returns true
serviceForKde.availableCommands.kdeSudo returns true
serviceForShell.availableCommands.kdeSudo returns false
}
class TestKDESudoElevationServiceComponent(
override val operatingSystem: OperatingSystem,
override val currentDesktopSession: String
) extends KDESudoElevationServiceComponent
with TestElevationFacadeComponent
with OperatingSystemComponent
with TestAvailableCommandsComponent
}
|
MateuszKubuszok/BackupDSL
|
modules/psm/src/test/scala/pl/combosolutions/backup/psm/elevation/posix/linux/LinuxElevationSpec.scala
|
Scala
|
mit
| 6,592
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.dataset
import org.apache.calcite.plan._
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.metadata.RelMetadataQuery
import org.apache.flink.api.java.DataSet
import org.apache.flink.table.api.BatchTableEnvironment
import org.apache.flink.table.plan.nodes.PhysicalTableSourceScan
import org.apache.flink.table.plan.schema.TableSourceTable
import org.apache.flink.table.sources.{BatchTableSource, TableSource}
import org.apache.flink.types.Row
/** Flink RelNode to read data from an external source defined by a [[BatchTableSource]]. */
class BatchTableSourceScan(
cluster: RelOptCluster,
traitSet: RelTraitSet,
table: RelOptTable,
tableSource: BatchTableSource[_])
extends PhysicalTableSourceScan(cluster, traitSet, table, tableSource)
with BatchScan {
override def computeSelfCost(planner: RelOptPlanner, metadata: RelMetadataQuery): RelOptCost = {
val rowCnt = metadata.getRowCount(this)
planner.getCostFactory.makeCost(rowCnt, rowCnt, rowCnt * estimateRowSize(getRowType))
}
override def copy(traitSet: RelTraitSet, inputs: java.util.List[RelNode]): RelNode = {
new BatchTableSourceScan(
cluster,
traitSet,
getTable,
tableSource
)
}
override def copy(
traitSet: RelTraitSet,
newTableSource: TableSource[_])
: PhysicalTableSourceScan = {
new BatchTableSourceScan(
cluster,
traitSet,
getTable,
newTableSource.asInstanceOf[BatchTableSource[_]]
)
}
override def translateToPlan(tableEnv: BatchTableEnvironment): DataSet[Row] = {
val config = tableEnv.getConfig
val inputDataSet = tableSource.getDataSet(tableEnv.execEnv).asInstanceOf[DataSet[Any]]
convertToInternalRow(inputDataSet, new TableSourceTable(tableSource), config)
}
}
|
fanzhidongyzby/flink
|
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/dataset/BatchTableSourceScan.scala
|
Scala
|
apache-2.0
| 2,638
|
package com.yetu.play.authenticator
import com.google.inject.Guice
import com.mohiva.play.silhouette.api.{Logger, SecuredSettings}
import com.yetu.notification.client.NotificationManager
import com.yetu.play.authenticator.actors.EventsActor
import com.yetu.play.authenticator.controllers.routes
import com.yetu.play.authenticator.models.daos.{UserDAO, UserDAOImpl}
import com.yetu.play.authenticator.utils.di.{SilhouetteModule, YetuProvider}
import play.api.libs.concurrent.Akka
import play.api.{Application, Play, GlobalSettings}
import play.api.i18n.Lang
import play.api.mvc.Results._
import play.api.mvc.{RequestHeader, Result}
import play.api.Play.current
import scala.concurrent.ExecutionContext.Implicits.global
import com.yetu.play.authenticator.controllers._
import scala.concurrent.Future
/*
* The global object.
*/
object AuthenticatorGlobal extends AuthenticatorGlobal
/*
* The global configuration.
*/
trait AuthenticatorGlobal extends GlobalSettings with SecuredSettings with Logger {
val LOGOUT_SUBSCRIBE_TOPIC: String = "*.*.logout"
/*
* The Guice dependencies injector.
*/
val injector = Guice.createInjector(new SilhouetteModule)
/*
* Loads the controller classes with the Guice injector,
* in order to be able to inject dependencies directly into the controller.
*
* @param controllerClass The controller class to instantiate.
* @return The instance of the controller class.
* @throws Exception if the controller couldn't be instantiated.
*/
override def getControllerInstance[A](controllerClass: Class[A]) = injector.getInstance(controllerClass)
/*
* Called when a user is not authenticated.
*
* As defined by RFC 2616, the status code of the response should be 401 Unauthorized.
*
* @param request The request header.
* @param lang The currently selected language.
* @return The result to send to the client.
*/
override def onNotAuthenticated(request: RequestHeader, lang: Lang): Option[Future[Result]] = {
println("LIBRARY ON NOT AUTHENTICATED CALLED")
Some(Future.successful(Redirect(routes.SocialAuthController.authenticate(YetuProvider.Yetu))))
}
// init listening to the logout event
override def onStart(app: Application): Unit = {
val userDao: UserDAO = injector.getProvider(classOf[UserDAO]).get()
logger.info(s"Initialized userDao $userDao")
NotificationManager.bindConsumer(LOGOUT_SUBSCRIBE_TOPIC, Akka.system.actorOf(EventsActor.props(userDao)))
.map(r => logger.info(s"Connected to MQ with result $r"))
}
}
|
yetu/yetu-play-authenticator
|
app/com/yetu/play/authenticator/AuthenticatorGlobal.scala
|
Scala
|
apache-2.0
| 2,557
|
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt
package inc
import java.io.File
import Relations.Source
import Relations.SourceDependencies
/**
* Provides mappings between source files, generated classes (products), and binaries.
* Dependencies that are tracked include internal: a dependency on a source in the same compilation group (project),
* external: a dependency on a source in another compilation group (tracked as the name of the class),
* binary: a dependency on a class or jar file not generated by a source file in any tracked compilation group,
* inherited: a dependency that resulted from a public template inheriting,
* direct: any type of dependency, including inheritance.
*/
trait Relations {
/** All sources _with at least one product_ . */
def allSources: collection.Set[File]
/** All products associated with sources. */
def allProducts: collection.Set[File]
/** All files that are recorded as a binary dependency of a source file.*/
def allBinaryDeps: collection.Set[File]
/** All files in this compilation group (project) that are recorded as a source dependency of a source file in this group.*/
def allInternalSrcDeps: collection.Set[File]
/** All files in another compilation group (project) that are recorded as a source dependency of a source file in this group.*/
def allExternalDeps: collection.Set[String]
/** Fully qualified names of classes generated from source file `src`. */
def classNames(src: File): Set[String]
/** Source files that generated a class with the given fully qualified `name`. This is typically a set containing a single file. */
def definesClass(name: String): Set[File]
/** The classes that were generated for source file `src`. */
def products(src: File): Set[File]
/** The source files that generated class file `prod`. This is typically a set containing a single file. */
def produced(prod: File): Set[File]
/** The binary dependencies for the source file `src`. */
def binaryDeps(src: File): Set[File]
/** The source files that depend on binary file `dep`. */
def usesBinary(dep: File): Set[File]
/** Internal source dependencies for `src`. This includes both direct and inherited dependencies. */
def internalSrcDeps(src: File): Set[File]
/** Internal source files that depend on internal source `dep`. This includes both direct and inherited dependencies. */
def usesInternalSrc(dep: File): Set[File]
/** External source dependencies that internal source file `src` depends on. This includes both direct and inherited dependencies. */
def externalDeps(src: File): Set[String]
/** Internal source dependencies that depend on external source file `dep`. This includes both direct and inherited dependencies. */
def usesExternal(dep: String): Set[File]
private[inc] def usedNames(src: File): Set[String]
/** Records internal source file `src` as generating class file `prod` with top-level class `name`. */
def addProduct(src: File, prod: File, name: String): Relations
/**
* Records internal source file `src` as depending on class `dependsOn` in an external source file.
* If `inherited` is true, this dependency is recorded as coming from a public template in `src` extending something in `dependsOn` (an inheritance dependency).
* Whatever the value of `inherited`, the dependency is also recorded as a direct dependency.
* If `fromMacro` is true, this dependency is recorded as coming from the expansion of a macro.
*/
def addExternalDep(src: File, dependsOn: String, inherited: Boolean, fromMacro: Boolean): Relations
/** Records internal source file `src` depending on a dependency binary dependency `dependsOn`.*/
def addBinaryDep(src: File, dependsOn: File): Relations
/**
* Records internal source file `src` as having direct dependencies on internal source files `directDependsOn`
* and inheritance dependencies on `inheritedDependsOn`. Everything in `inheritedDependsOn` must be included in `directDependsOn`;
* this method does not automatically record direct dependencies like `addExternalDep` does.
*/
def addInternalSrcDeps(src: File, directDependsOn: Iterable[File], inheritedDependsOn: Iterable[File]): Relations
private[inc] def addUsedName(src: File, name: String): Relations
/** Concatenates the two relations. Acts naively, i.e., doesn't internalize external deps on added files. */
def ++(o: Relations): Relations
/** Drops all dependency mappings a->b where a is in `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files. */
def --(sources: Iterable[File]): Relations
@deprecated("OK to remove in 0.14", "0.13.1")
def groupBy[K](f: (File => K)): Map[K, Relations]
/** The relation between internal sources and generated class files. */
def srcProd: Relation[File, File]
/** The dependency relation between internal sources and binaries. */
def binaryDep: Relation[File, File]
/** The dependency relation between internal sources. This includes both direct and inherited dependencies.*/
def internalSrcDep: Relation[File, File]
/** The dependency relation between internal and external sources. This includes both direct and inherited dependencies.*/
def externalDep: Relation[File, String]
/**
* The source dependency relation between source files introduced by member reference.
*
* NOTE: All inheritance dependencies are included in this relation because in order to
* inherit from a member you have to refer to it. If you check documentation of `inheritance`
* you'll see that there's small oddity related to traits being the first parent of a
* class/trait that results in additional parents being introduced due to normalization.
* This relation properly accounts for that so the invariant that `memberRef` is a superset
* of `inheritance` is preserved.
*/
private[inc] def memberRef: SourceDependencies
/**
* The source dependency relation between source files introduced by inheritance.
* The dependency by inheritance is introduced when a template (class or trait) mentions
* a given type in a parent position.
*
* NOTE: Due to an oddity in how Scala's type checker works there's one unexpected dependency
* on a class being introduced. An example illustrates the best the problem. Let's consider
* the following structure:
*
* trait A extends B
* trait B extends C
* trait C extends D
* class D
*
* We are interested in dependencies by inheritance of `A`. One would expect it to be just `B`
* but the answer is `B` and `D`. The reason is because Scala's type checker performs a certain
* normalization so the first parent of a type is a class. Therefore the example above is normalized
* to the following form:
*
* trait A extends D with B
* trait B extends D with C
* trait C extends D
* class D
*
* Therefore if you inherit from a trait you'll get an additional dependency on a class that is
* resolved transitively. You should not rely on this behavior, though.
*
*/
private[inc] def inheritance: SourceDependencies
/**
* The source dependency relation between source files introduced by the expansion of a macro.
* These dependencies are introduced whenever the expansion of a macro introduces new dependencies.
*
* For instance, imagine that we have a macro that lists the members of another class. The dependency
* that will be introduced between the clients of this macro and the inspected class is stored
* in this relation.
*/
private[inc] def fromMacro: SourceDependencies
/** The dependency relations between sources. These include both direct and inherited dependencies.*/
def direct: Source
/** The inheritance dependency relations between sources.*/
def publicInherited: Source
/** The relation between a source file and the fully qualified names of classes generated from it.*/
def classes: Relation[File, String]
/**
* Flag which indicates whether given Relations object supports operations needed by name hashing algorithm.
*
* At the moment the list includes the following operations:
*
* - memberRef: SourceDependencies
* - inheritance: SourceDependencies
*
* The `memberRef` and `inheritance` implement a new style source dependency tracking. When this flag is
* enabled access to `direct` and `publicInherited` relations is illegal and will cause runtime exception
* being thrown. That is done as an optimization that prevents from storing two overlapping sets of
* dependencies.
*
* Conversely, when `nameHashing` flag is disabled access to `memberRef` and `inheritance`
* relations is illegal and will cause runtime exception being thrown.
*/
private[inc] def nameHashing: Boolean
/**
* Relation between source files and _unqualified_ term and type names used in given source file.
*/
private[inc] def names: Relation[File, String]
}
object Relations {
/** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/
final class Source private[sbt] (val internal: Relation[File, File], val external: Relation[File, String]) {
def addInternal(source: File, dependsOn: Iterable[File]): Source = new Source(internal + (source, dependsOn), external)
def addExternal(source: File, dependsOn: String): Source = new Source(internal, external + (source, dependsOn))
/** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/
def --(sources: Iterable[File]): Source = new Source(internal -- sources, external -- sources)
def ++(o: Source): Source = new Source(internal ++ o.internal, external ++ o.external)
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
def groupBySource[K](f: File => K): Map[K, Source] = {
val i = internal.groupBy { case (a, b) => f(a) }
val e = external.groupBy { case (a, b) => f(a) }
val pairs = for (k <- i.keySet ++ e.keySet) yield (k, new Source(getOrEmpty(i, k), getOrEmpty(e, k)))
pairs.toMap
}
override def equals(other: Any) = other match {
case o: Source => internal == o.internal && external == o.external
case _ => false
}
override def hashCode = (internal, external).hashCode
}
/** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/
private[inc] final class SourceDependencies(val internal: Relation[File, File], val external: Relation[File, String]) {
def addInternal(source: File, dependsOn: Iterable[File]): SourceDependencies = new SourceDependencies(internal + (source, dependsOn), external)
def addExternal(source: File, dependsOn: String): SourceDependencies = new SourceDependencies(internal, external + (source, dependsOn))
/** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/
def --(sources: Iterable[File]): SourceDependencies = new SourceDependencies(internal -- sources, external -- sources)
def ++(o: SourceDependencies): SourceDependencies = new SourceDependencies(internal ++ o.internal, external ++ o.external)
override def equals(other: Any) = other match {
case o: SourceDependencies => internal == o.internal && external == o.external
case _ => false
}
override def hashCode = (internal, external).hashCode
}
private[sbt] def getOrEmpty[A, B, K](m: Map[K, Relation[A, B]], k: K): Relation[A, B] = m.getOrElse(k, Relation.empty)
private[this] lazy val e = Relation.empty[File, File]
private[this] lazy val estr = Relation.empty[File, String]
private[this] lazy val es = new Source(e, estr)
def emptySource: Source = es
private[inc] lazy val emptySourceDependencies: SourceDependencies = new SourceDependencies(e, estr)
def empty: Relations = empty(nameHashing = false)
private[inc] def empty(nameHashing: Boolean): Relations =
if (nameHashing)
new MRelationsNameHashing(e, e, emptySourceDependencies, emptySourceDependencies, emptySourceDependencies, estr, estr)
else
new MRelationsDefaultImpl(e, e, es, es, estr)
def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], direct: Source, publicInherited: Source, classes: Relation[File, String]): Relations =
new MRelationsDefaultImpl(srcProd, binaryDep, direct = direct, publicInherited = publicInherited, classes)
private[inc] def make(srcProd: Relation[File, File], binaryDep: Relation[File, File],
memberRef: SourceDependencies, inheritance: SourceDependencies, fromMacro: SourceDependencies, classes: Relation[File, String],
names: Relation[File, String]): Relations =
new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef, inheritance = inheritance, fromMacro = fromMacro,
classes, names)
def makeSource(internal: Relation[File, File], external: Relation[File, String]): Source = new Source(internal, external)
private[inc] def makeSourceDependencies(internal: Relation[File, File], external: Relation[File, String]): SourceDependencies = new SourceDependencies(internal, external)
}
/**
* An abstract class that contains common functionality inherited by two implementations of Relations trait.
*
* A little note why we have two different implementations of Relations trait. This is needed for the time
* being when we are slowly migrating to the new invalidation algorithm called "name hashing" which requires
* some subtle changes to dependency tracking. For some time we plan to keep both algorithms side-by-side
* and have a runtime switch which allows to pick one. So we need logic for both old and new dependency
* tracking to be available. That's exactly what two subclasses of MRelationsCommon implement. Once name
* hashing is proven to be stable and reliable we'll phase out the old algorithm and the old dependency tracking
* logic.
*
* `srcProd` is a relation between a source file and a product: (source, product).
* Note that some source files may not have a product and will not be included in this relation.
*
* `binaryDeps` is a relation between a source file and a binary dependency: (source, binary dependency).
* This only includes dependencies on classes and jars that do not have a corresponding source/API to track instead.
* A class or jar with a corresponding source should only be tracked in one of the source dependency relations.
*
* `classes` is a relation between a source file and its generated fully-qualified class names.
*/
private abstract class MRelationsCommon(val srcProd: Relation[File, File], val binaryDep: Relation[File, File],
val classes: Relation[File, String]) extends Relations {
def allSources: collection.Set[File] = srcProd._1s
def allProducts: collection.Set[File] = srcProd._2s
def allBinaryDeps: collection.Set[File] = binaryDep._2s
def allInternalSrcDeps: collection.Set[File] = internalSrcDep._2s
def allExternalDeps: collection.Set[String] = externalDep._2s
def classNames(src: File): Set[String] = classes.forward(src)
def definesClass(name: String): Set[File] = classes.reverse(name)
def products(src: File): Set[File] = srcProd.forward(src)
def produced(prod: File): Set[File] = srcProd.reverse(prod)
def binaryDeps(src: File): Set[File] = binaryDep.forward(src)
def usesBinary(dep: File): Set[File] = binaryDep.reverse(dep)
def internalSrcDeps(src: File): Set[File] = internalSrcDep.forward(src)
def usesInternalSrc(dep: File): Set[File] = internalSrcDep.reverse(dep)
def externalDeps(src: File): Set[String] = externalDep.forward(src)
def usesExternal(dep: String): Set[File] = externalDep.reverse(dep)
def usedNames(src: File): Set[String] = names.forward(src)
/** Making large Relations a little readable. */
private val userDir = sys.props("user.dir").stripSuffix("/") + "/"
private def nocwd(s: String) = s stripPrefix userDir
private def line_s(kv: (Any, Any)) = " " + nocwd("" + kv._1) + " -> " + nocwd("" + kv._2) + "\n"
protected def relation_s(r: Relation[_, _]) = (
if (r.forwardMap.isEmpty) "Relation [ ]"
else (r.all.toSeq map line_s sorted) mkString ("Relation [\n", "", "]")
)
}
/**
* This class implements Relations trait with support for tracking of `direct` and `publicInherited` source
* dependencies. Therefore this class preserves the "old" (from sbt 0.13.0) dependency tracking logic and it's
* a default implementation.
*
* `direct` defines relations for dependencies between internal and external source dependencies. It includes all types of
* dependencies, including inheritance.
*
* `publicInherited` defines relations for internal and external source dependencies, only including dependencies
* introduced by inheritance.
*
*/
private class MRelationsDefaultImpl(srcProd: Relation[File, File], binaryDep: Relation[File, File],
// direct should include everything in inherited
val direct: Source, val publicInherited: Source,
classes: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) {
def internalSrcDep: Relation[File, File] = direct.internal
def externalDep: Relation[File, String] = direct.external
def nameHashing: Boolean = false
def memberRef: SourceDependencies =
throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " +
"when `nameHashing` flag is disabled.")
def inheritance: SourceDependencies =
throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " +
"when `nameHashing` flag is disabled.")
def fromMacro: SourceDependencies =
throw new UnsupportedOperationException("The `fromMacro` source dependencies relation is not supported " +
"when `nameHashing` flag is disabled.")
def addProduct(src: File, prod: File, name: String): Relations =
new MRelationsDefaultImpl(srcProd + (src, prod), binaryDep, direct = direct,
publicInherited = publicInherited, classes + (src, name))
def addExternalDep(src: File, dependsOn: String, inherited: Boolean, fromMacro: Boolean): Relations = {
val newI = if (inherited) publicInherited.addExternal(src, dependsOn) else publicInherited
val newD = direct.addExternal(src, dependsOn)
new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes)
}
def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations =
{
val newI = publicInherited.addInternal(src, inherited)
val newD = direct.addInternal(src, dependsOn)
new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes)
}
def names: Relation[File, String] =
throw new UnsupportedOperationException("Tracking of used names is not supported " +
"when `nameHashing` is disabled.")
def addUsedName(src: File, name: String): Relations =
throw new UnsupportedOperationException("Tracking of used names is not supported " +
"when `nameHashing` is disabled.")
def addBinaryDep(src: File, dependsOn: File): Relations =
new MRelationsDefaultImpl(srcProd, binaryDep + (src, dependsOn), direct = direct,
publicInherited = publicInherited, classes)
def ++(o: Relations): Relations = {
if (nameHashing != o.nameHashing)
throw new UnsupportedOperationException("The `++` operation is not supported for relations " +
"with different values of `nameHashing` flag.")
new MRelationsDefaultImpl(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, direct ++ o.direct,
publicInherited ++ o.publicInherited, classes ++ o.classes)
}
def --(sources: Iterable[File]) =
new MRelationsDefaultImpl(srcProd -- sources, binaryDep -- sources, direct = direct -- sources,
publicInherited = publicInherited -- sources, classes -- sources)
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
def groupBy[K](f: File => K): Map[K, Relations] =
{
type MapRel[T] = Map[K, Relation[File, T]]
def outerJoin(srcProdMap: MapRel[File], binaryDepMap: MapRel[File], direct: Map[K, Source],
inherited: Map[K, Source], classesMap: MapRel[String],
namesMap: MapRel[String]): Map[K, Relations] =
{
def kRelations(k: K): Relations = {
def get[T](m: Map[K, Relation[File, T]]) = Relations.getOrEmpty(m, k)
def getSrc(m: Map[K, Source]): Source = m.getOrElse(k, Relations.emptySource)
def getSrcDeps(m: Map[K, SourceDependencies]): SourceDependencies =
m.getOrElse(k, Relations.emptySourceDependencies)
new MRelationsDefaultImpl(get(srcProdMap), get(binaryDepMap), getSrc(direct), getSrc(inherited),
get(classesMap))
}
val keys = (srcProdMap.keySet ++ binaryDepMap.keySet ++ direct.keySet ++ inherited.keySet ++ classesMap.keySet).toList
Map(keys.map((k: K) => (k, kRelations(k))): _*)
}
def f1[B](item: (File, B)): K = f(item._1)
outerJoin(srcProd.groupBy(f1), binaryDep.groupBy(f1), direct.groupBySource(f),
publicInherited.groupBySource(f), classes.groupBy(f1), names.groupBy(f1))
}
override def equals(other: Any) = other match {
case o: MRelationsDefaultImpl =>
srcProd == o.srcProd && binaryDep == o.binaryDep && direct == o.direct &&
publicInherited == o.publicInherited && classes == o.classes
case _ => false
}
override def hashCode = (srcProd :: binaryDep :: direct :: publicInherited :: classes :: Nil).hashCode
override def toString = (
"""
|Relations:
| products: %s
| bin deps: %s
| src deps: %s
| ext deps: %s
| class names: %s
""".trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes) map relation_s: _*)
)
}
/**
* This class implements Relations trait with support for tracking of `memberRef` and `inheritance` source
* dependencies. Therefore this class implements the new (compared to sbt 0.13.0) dependency tracking logic
* needed by the name hashing invalidation algorithm.
*/
private class MRelationsNameHashing(srcProd: Relation[File, File], binaryDep: Relation[File, File],
// memberRef should include everything in inherited
val memberRef: SourceDependencies, val inheritance: SourceDependencies, val fromMacro: SourceDependencies,
classes: Relation[File, String],
val names: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) {
def direct: Source =
throw new UnsupportedOperationException("The `direct` source dependencies relation is not supported " +
"when `nameHashing` flag is disabled.")
def publicInherited: Source =
throw new UnsupportedOperationException("The `publicInherited` source dependencies relation is not supported " +
"when `nameHashing` flag is disabled.")
val nameHashing: Boolean = true
def internalSrcDep: Relation[File, File] = memberRef.internal
def externalDep: Relation[File, String] = memberRef.external
def addProduct(src: File, prod: File, name: String): Relations =
new MRelationsNameHashing(srcProd + (src, prod), binaryDep, memberRef = memberRef,
inheritance = inheritance, fromMacro = fromMacro, classes + (src, name), names = names)
def addExternalDep(src: File, dependsOn: String, inherited: Boolean, fromMacroExpansion: Boolean): Relations = {
val newIH = if (inherited) inheritance.addExternal(src, dependsOn) else inheritance
val newMR = memberRef.addExternal(src, dependsOn)
val newFM = if (fromMacroExpansion) fromMacro.addExternal(src, dependsOn) else fromMacro
new MRelationsNameHashing(srcProd, binaryDep, memberRef = newMR, inheritance = newIH, fromMacro = newFM, classes,
names = names)
}
def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations = {
val newIH = inheritance.addInternal(src, inherited)
val newMR = memberRef.addInternal(src, dependsOn)
new MRelationsNameHashing(srcProd, binaryDep, memberRef = newMR, inheritance = newIH, fromMacro = fromMacro,
classes, names = names)
}
def addUsedName(src: File, name: String): Relations =
new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef,
inheritance = inheritance, fromMacro = fromMacro, classes, names = names + (src, name))
def addBinaryDep(src: File, dependsOn: File): Relations =
new MRelationsNameHashing(srcProd, binaryDep + (src, dependsOn), memberRef = memberRef,
inheritance = inheritance, fromMacro = fromMacro, classes, names = names)
def ++(o: Relations): Relations = {
if (!o.nameHashing)
throw new UnsupportedOperationException("The `++` operation is not supported for relations " +
"with different values of `nameHashing` flag.")
new MRelationsNameHashing(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep,
memberRef = memberRef ++ o.memberRef, inheritance = inheritance ++ o.inheritance,
fromMacro = fromMacro ++ o.fromMacro, classes ++ o.classes, names = names ++ o.names)
}
def --(sources: Iterable[File]) =
new MRelationsNameHashing(srcProd -- sources, binaryDep -- sources,
memberRef = memberRef -- sources, inheritance = inheritance -- sources, fromMacro = fromMacro -- sources,
classes -- sources, names = names -- sources)
def groupBy[K](f: File => K): Map[K, Relations] = {
throw new UnsupportedOperationException("Merging of Analyses that have" +
"`relations.nameHashing` set to `true` is not supported.")
}
override def equals(other: Any) = other match {
case o: MRelationsNameHashing =>
srcProd == o.srcProd && binaryDep == o.binaryDep && memberRef == o.memberRef &&
inheritance == o.inheritance && fromMacro == o.fromMacro && classes == o.classes
case _ => false
}
override def hashCode = (srcProd :: binaryDep :: memberRef :: inheritance :: fromMacro :: classes :: Nil).hashCode
override def toString = (
"""
|Relations (with name hashing enabled):
| products: %s
| bin deps: %s
| src deps: %s
| ext deps: %s
| class names: %s
| used names: %s
""".trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes, names) map relation_s: _*)
)
}
|
xeno-by/old-scalameta-sbt
|
compile/inc/src/main/scala/sbt/inc/Relations.scala
|
Scala
|
bsd-3-clause
| 26,484
|
// See LICENSE for license details.
package sifive.blocks.devices.chiplink
import Chisel.{defaultCompileOptions => _, _}
import freechips.rocketchip.util.CompileOptions.NotStrictInferReset
import freechips.rocketchip.tilelink._
import freechips.rocketchip.util._
class SourceC(info: ChipLinkInfo) extends Module
{
val io = new Bundle {
val c = Decoupled(new TLBundleC(info.edgeOut.bundle))
val q = Decoupled(UInt(width = info.params.dataBits)).flip
// Used by D to find the txn
val d_tlSource = Valid(UInt(width = info.params.sourceBits)).flip
val d_clSource = UInt(OUTPUT, width = info.params.clSourceBits)
}
// CAM of sources used for release
val cam = Module(new CAM(info.params.sourcesPerDomain, info.params.clSourceBits))
// A simple FSM to generate the packet components
val state = RegInit(UInt(0, width = 2))
val s_header = UInt(0, width = 2)
val s_address0 = UInt(1, width = 2)
val s_address1 = UInt(2, width = 2)
val s_data = UInt(3, width = 2)
private def hold(key: UInt)(data: UInt) = {
val enable = state === key
Mux(enable, data, RegEnable(data, enable))
}
// Extract header fields
val Seq(_, q_opcode, q_param, q_size, _, q_source) =
info.decode(io.q.bits).map(hold(s_header) _)
// Latch address
val q_address0 = hold(s_address0)(io.q.bits)
val q_address1 = hold(s_address1)(io.q.bits)
val (_, q_last) = info.firstlast(io.q, Some(UInt(2)))
val q_hasData = q_opcode(0)
val c_first = RegEnable(state =/= s_data, io.q.fire())
when (io.q.fire()) {
switch (state) {
is (s_header) { state := s_address0 }
is (s_address0) { state := s_address1 }
is (s_address1) { state := Mux(q_hasData, s_data, s_header) }
is (s_data) { state := Mux(!q_last, s_data, s_header) }
}
}
// Determine if the request is legal. If not, route to error device.
val q_address = Cat(q_address1, q_address0)
val exists = info.edgeOut.manager.containsSafe(q_address)
private def writeable(m: TLManagerParameters): Boolean = if (m.supportsAcquireB) m.supportsAcquireT else m.supportsPutFull
private def acquireable(m: TLManagerParameters): Boolean = m.supportsAcquireB || m.supportsAcquireT
private def toBool(x: Boolean) = Bool(x)
val writeOk = info.edgeOut.manager.fastProperty(q_address, writeable, toBool)
val acquireOk = info.edgeOut.manager.fastProperty(q_address, acquireable, toBool)
val q_legal = exists && (!q_hasData || writeOk) && acquireOk
// Look for an available source in the correct domain
val q_release = q_opcode === TLMessages.Release || q_opcode === TLMessages.ReleaseData
val source_ok = !q_release || cam.io.alloc.ready
val source = cam.io.key holdUnless c_first
io.c.bits.opcode := q_opcode
io.c.bits.param := q_param
io.c.bits.size := q_size
io.c.bits.source := Mux(q_release, source, UInt(0)) // always domain 0
io.c.bits.address := info.makeError(q_legal, q_address)
io.c.bits.data := io.q.bits
io.c.bits.corrupt := Bool(false)
val stall = c_first && !source_ok
val xmit = q_last || state === s_data
io.c.valid := (io.q.valid && !stall) && xmit
io.q.ready := (io.c.ready && !stall) || !xmit
cam.io.alloc.valid := q_release && c_first && xmit && io.q.valid && io.c.ready
cam.io.alloc.bits := q_source
// Free the CAM entries
io.d_clSource := cam.io.data
cam.io.free := io.d_tlSource
}
|
sifive/sifive-blocks
|
src/main/scala/devices/chiplink/SourceC.scala
|
Scala
|
apache-2.0
| 3,399
|
package net.kemuridama.kafcon.service
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import net.kemuridama.kafcon.model.{Cluster, Broker}
import net.kemuridama.kafcon.repository.{UsesBrokerRepository, MixinBrokerRepository}
trait BrokerService
extends UsesBrokerRepository
with UsesClusterService
with UsesBrokerMetricsService {
def update(cluster: Cluster): Unit = {
cluster.getAllBrokers.foreach { brokers =>
brokerRepository.insert(brokers)
brokers.foreach { broker =>
brokerMetricsService.update(broker)
}
}
}
def all: Future[List[Broker]] = brokerRepository.all
def find(clusterId: Int, id: Int): Future[Option[Broker]] = brokerRepository.find(clusterId, id)
def findAll(clusterId: Int): Future[List[Broker]] = brokerRepository.findAll(clusterId)
}
object BrokerService
extends BrokerService
with MixinBrokerRepository
with MixinClusterService
with MixinBrokerMetricsService
trait UsesBrokerService {
val brokerService: BrokerService
}
trait MixinBrokerService {
val brokerService = BrokerService
}
|
kemuridama/kafcon
|
src/main/scala/net/kemuridama/kafcon/service/BrokerService.scala
|
Scala
|
mit
| 1,117
|
package sbt
object Signals {
val CONT = "CONT"
val INT = "INT"
def withHandler[T](handler: () => Unit, signal: String = INT)(action: () => T): T =
{
val result =
try {
val signals = new Signals0
signals.withHandler(signal, handler, action)
} catch { case e: LinkageError => Right(action()) }
result match {
case Left(e) => throw e
case Right(v) => v
}
}
/** Helper interface so we can expose internals of signal-isms to others. */
sealed trait Registration {
def remove(): Unit
}
/**
* Register a signal handler that can be removed later.
* NOTE: Does not stack with other signal handlers!!!!
*/
def register(handler: () => Unit, signal: String = INT): Registration =
// TODO - Maybe we can just ignore things if not is-supported.
if (supported(signal)) {
import sun.misc.{ Signal, SignalHandler }
val intSignal = new Signal(signal)
val newHandler = new SignalHandler {
def handle(sig: Signal) { handler() }
}
val oldHandler = Signal.handle(intSignal, newHandler)
object unregisterNewHandler extends Registration {
override def remove(): Unit = {
Signal.handle(intSignal, oldHandler)
}
}
unregisterNewHandler
} else {
// TODO - Maybe we should just throw an exception if we don't support signals...
object NullUnregisterNewHandler extends Registration {
override def remove(): Unit = ()
}
NullUnregisterNewHandler
}
def supported(signal: String): Boolean =
try {
val signals = new Signals0
signals.supported(signal)
} catch { case e: LinkageError => false }
}
// Must only be referenced using a
// try { } catch { case e: LinkageError => ... }
// block to
private final class Signals0 {
def supported(signal: String): Boolean =
{
import sun.misc.Signal
try { new Signal(signal); true }
catch { case e: IllegalArgumentException => false }
}
// returns a LinkageError in `action` as Left(t) in order to avoid it being
// incorrectly swallowed as missing Signal/SignalHandler
def withHandler[T](signal: String, handler: () => Unit, action: () => T): Either[Throwable, T] =
{
import sun.misc.{ Signal, SignalHandler }
val intSignal = new Signal(signal)
val newHandler = new SignalHandler {
def handle(sig: Signal) { handler() }
}
val oldHandler = Signal.handle(intSignal, newHandler)
try Right(action())
catch { case e: LinkageError => Left(e) }
finally Signal.handle(intSignal, oldHandler)
}
}
|
xeno-by/old-scalameta-sbt
|
util/collection/src/main/scala/sbt/Signal.scala
|
Scala
|
bsd-3-clause
| 2,659
|
package dao
import scala.concurrent.Future
import javax.inject.Inject
import models.Cat
import play.api.db.slick.DatabaseConfigProvider
import play.api.db.slick.HasDatabaseConfigProvider
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import slick.driver.JdbcProfile
import slick.jdbc.GetResult
class CatDAO @Inject()(protected val dbConfigProvider: DatabaseConfigProvider) extends HasDatabaseConfigProvider[JdbcProfile] {
import driver.api._
private val Cats = TableQuery[CatsTable]
def all(): Future[Seq[Cat]] = db.run(Cats.result)
def search(name:String): Future[Seq[Cat]] = db.run(Cats.filter( _.name === name).result)
def update(name:String,color:String) = Future[Unit] {
db.run(
Cats.filter(_.name === name)
.map(p => (p.name,p.color))
.update((name,color))
)
}
def delete(name:String): Future[Int] = db.run(Cats.filter( _.name === name).delete)
def insert(cat: Cat): Future[Unit] = db.run(Cats += cat).map { _ => () }
def fromSQL(name:String): Future[Seq[Cat]]= {
implicit val getCatResult = GetResult(r => Cat(r.<<,r.<<, r.<<))
db.run(sql"""SELECT * FROM SLICK.Cat WHERE NAME = $name """.as[(Cat)])
}
class CatsTable(tag: Tag) extends Table[Cat](tag, "Cat") {
def id = column[Int]("ID", O.PrimaryKey)
def name = column[String]("NAME")
def color = column[String]("COLOR")
def * = (id, name, color) <> (Cat.tupled, Cat.unapply _)
}
def toTable = TableQuery[CatsTable]
}
|
diegopacheco/scala-playground
|
play-scala-slick31/app/dao/CatDao.scala
|
Scala
|
unlicense
| 1,508
|
/**
* Generated by API Builder - https://www.apibuilder.io
* Service version: 0.14.85
* apibuilder 0.14.93 app.apibuilder.io/apicollective/apibuilder-generator/latest/anorm_2_8_parsers
*/
import anorm._
package io.apibuilder.generator.v0.anorm.parsers {
import io.apibuilder.generator.v0.anorm.conversions.Standard._
import io.apibuilder.generator.v0.anorm.conversions.Types._
import io.apibuilder.spec.v0.anorm.conversions.Types._
object FileFlag {
def parserWithPrefix(prefix: String, sep: String = "_"): RowParser[io.apibuilder.generator.v0.models.FileFlag] = parser(prefixOpt = Some(s"$prefix$sep"))
def parser(name: String = "file_flag", prefixOpt: Option[String] = None): RowParser[io.apibuilder.generator.v0.models.FileFlag] = {
SqlParser.str(prefixOpt.getOrElse("") + name) map {
case value => io.apibuilder.generator.v0.models.FileFlag(value)
}
}
}
object Attribute {
def parserWithPrefix(prefix: String, sep: String = "_"): RowParser[io.apibuilder.generator.v0.models.Attribute] = parser(prefixOpt = Some(s"$prefix$sep"))
def parser(
name: String = "name",
value: String = "value",
prefixOpt: Option[String] = None
): RowParser[io.apibuilder.generator.v0.models.Attribute] = {
SqlParser.str(prefixOpt.getOrElse("") + name) ~
SqlParser.str(prefixOpt.getOrElse("") + value) map {
case name ~ value => {
io.apibuilder.generator.v0.models.Attribute(
name = name,
value = value
)
}
}
}
}
object Error {
def parserWithPrefix(prefix: String, sep: String = "_"): RowParser[io.apibuilder.generator.v0.models.Error] = parser(prefixOpt = Some(s"$prefix$sep"))
def parser(
code: String = "code",
message: String = "message",
prefixOpt: Option[String] = None
): RowParser[io.apibuilder.generator.v0.models.Error] = {
SqlParser.str(prefixOpt.getOrElse("") + code) ~
SqlParser.str(prefixOpt.getOrElse("") + message) map {
case code ~ message => {
io.apibuilder.generator.v0.models.Error(
code = code,
message = message
)
}
}
}
}
object File {
def parserWithPrefix(prefix: String, sep: String = "_"): RowParser[io.apibuilder.generator.v0.models.File] = parser(prefixOpt = Some(s"$prefix$sep"))
def parser(
name: String = "name",
dir: String = "dir",
contents: String = "contents",
flags: String = "flags",
prefixOpt: Option[String] = None
): RowParser[io.apibuilder.generator.v0.models.File] = {
SqlParser.str(prefixOpt.getOrElse("") + name) ~
SqlParser.str(prefixOpt.getOrElse("") + dir).? ~
SqlParser.str(prefixOpt.getOrElse("") + contents) ~
SqlParser.get[Seq[io.apibuilder.generator.v0.models.FileFlag]](prefixOpt.getOrElse("") + flags).? map {
case name ~ dir ~ contents ~ flags => {
io.apibuilder.generator.v0.models.File(
name = name,
dir = dir,
contents = contents,
flags = flags
)
}
}
}
}
object Generator {
def parserWithPrefix(prefix: String, sep: String = "_"): RowParser[io.apibuilder.generator.v0.models.Generator] = parser(prefixOpt = Some(s"$prefix$sep"))
def parser(
key: String = "key",
name: String = "name",
language: String = "language",
description: String = "description",
attributes: String = "attributes",
prefixOpt: Option[String] = None
): RowParser[io.apibuilder.generator.v0.models.Generator] = {
SqlParser.str(prefixOpt.getOrElse("") + key) ~
SqlParser.str(prefixOpt.getOrElse("") + name) ~
SqlParser.str(prefixOpt.getOrElse("") + language).? ~
SqlParser.str(prefixOpt.getOrElse("") + description).? ~
SqlParser.get[Seq[String]](prefixOpt.getOrElse("") + attributes) map {
case key ~ name ~ language ~ description ~ attributes => {
io.apibuilder.generator.v0.models.Generator(
key = key,
name = name,
language = language,
description = description,
attributes = attributes
)
}
}
}
}
object Healthcheck {
def parserWithPrefix(prefix: String, sep: String = "_"): RowParser[io.apibuilder.generator.v0.models.Healthcheck] = parser(prefixOpt = Some(s"$prefix$sep"))
def parser(
status: String = "status",
prefixOpt: Option[String] = None
): RowParser[io.apibuilder.generator.v0.models.Healthcheck] = {
SqlParser.str(prefixOpt.getOrElse("") + status) map {
case status => {
io.apibuilder.generator.v0.models.Healthcheck(
status = status
)
}
}
}
}
object Invocation {
def parserWithPrefix(prefix: String, sep: String = "_"): RowParser[io.apibuilder.generator.v0.models.Invocation] = parser(prefixOpt = Some(s"$prefix$sep"))
def parser(
source: String = "source",
files: String = "files",
prefixOpt: Option[String] = None
): RowParser[io.apibuilder.generator.v0.models.Invocation] = {
SqlParser.str(prefixOpt.getOrElse("") + source) ~
SqlParser.get[Seq[io.apibuilder.generator.v0.models.File]](prefixOpt.getOrElse("") + files) map {
case source ~ files => {
io.apibuilder.generator.v0.models.Invocation(
source = source,
files = files
)
}
}
}
}
object InvocationForm {
def parserWithPrefix(prefix: String, sep: String = "_"): RowParser[io.apibuilder.generator.v0.models.InvocationForm] = parser(prefixOpt = Some(s"$prefix$sep"))
def parser(
servicePrefix: String = "service",
attributes: String = "attributes",
userAgent: String = "user_agent",
importedServices: String = "imported_services",
prefixOpt: Option[String] = None
): RowParser[io.apibuilder.generator.v0.models.InvocationForm] = {
io.apibuilder.spec.v0.anorm.parsers.Service.parserWithPrefix(prefixOpt.getOrElse("") + servicePrefix) ~
SqlParser.get[Seq[io.apibuilder.generator.v0.models.Attribute]](prefixOpt.getOrElse("") + attributes) ~
SqlParser.str(prefixOpt.getOrElse("") + userAgent).? ~
SqlParser.get[Seq[io.apibuilder.spec.v0.models.Service]](prefixOpt.getOrElse("") + importedServices).? map {
case service ~ attributes ~ userAgent ~ importedServices => {
io.apibuilder.generator.v0.models.InvocationForm(
service = service,
attributes = attributes,
userAgent = userAgent,
importedServices = importedServices
)
}
}
}
}
}
|
mbryzek/apidoc
|
api/app/generated/ApicollectiveApibuilderGeneratorV0Parsers.scala
|
Scala
|
mit
| 6,745
|
package models
import play.api.db._
import play.api.Play.current
import anorm._
import anorm.SqlParser._
case class Theme(theme:String, content:String)
case class Challenger(theme:String, name:String)
case class Answer(theme:String, name:String, content:String = "")
object Theme {
val parse = {
str("theme") ~ str("content") map{
case t~c => Theme(t,c)}
}
def all = DB.withConnection { implicit c =>
SQL("select * from Theme").as(parse * )
}
def save(theme:Theme) = DB.withConnection { implicit c =>
SQL("insert into Theme values({t}, {c})").on('t ->theme.theme, 'c -> theme.content).executeUpdate();
}
def delete(theme:String) = DB.withConnection { implicit c =>
SQL("delete from Theme where theme = {theme}").on('theme ->theme).executeUpdate();
}
def get(theme:String) = DB.withConnection { implicit c =>
val list = SQL("select * from Theme where theme = {theme}").on('theme ->theme).as(parse * )
list match {
case Nil => None
case _ => Some(list.head)
}
}
}
|
kencharos/share_editor
|
app/models/Models.scala
|
Scala
|
mit
| 1,038
|
package latis.reader.tsml
import latis.reader.tsml.ml.Tsml
import latis.util.FileUtilsNio
/**
* Return a list of files as a Dataset.
* Use a regular expression (defined in the tsml as 'pattern')
* with groups to extract data values from the file names.
*
* This class is almost identical to FileListAdapter. The only
* difference is that it uses FileUtilsNio instead of FileUtils
* (i.e. it uses the nio package from Java7). Since we're still
* on Java6 for the time-being, this class is experimental and
* should be kept separate from the normal FileListAdapter.
* At some point in the future when Java7 becomes standard
* enough that we can use it in prod, we should merge these
* two classes.
*
* In my best tests, it appears that these methods are about
* 10% faster than the old FileListAdapter.
*
* NOTE: This adapter loads all filenames into memory. Therefore,
* for file systems larger than about 1m files, you should use
* StreamingFileListAdapter instead.
*/
class FileListAdapterNio(tsml: Tsml) extends RegexAdapter(tsml) {
//TODO: add the file variable without defining it in the tsml? but opportunity to define max length
//Note: Using the RegexAdapter with "()" around the file name pattern almost works.
// The matcher returns it first but we want the file variable to be last.
/**
* A record consists of a file name, file size.
*/
override def getRecordIterator: Iterator[String] = {
//TODO: support ftp...?
val dir = getUrl.getPath //assumes a file URL
val getSize = getOrigScalarNames.contains("fileSize")
FileUtilsNio.listAllFiles(dir, getSize).iterator
}
/**
* Override to add the file name (i.e. the data "record") itself as a data value.
* Note, this assumes that the TSML has the file and file size variables defined last.
*/
override def extractValues(record: String): Seq[String] = {
val name :: rest = record.split(',').toList
regex.findFirstMatchIn(name).map { m =>
m.subgroups ++ (name :: rest)
}.getOrElse(List.empty[String])
}
}
|
dlindhol/LaTiS
|
src/main/scala/latis/reader/tsml/FileListAdapterNio.scala
|
Scala
|
epl-1.0
| 2,063
|
package com.arcusys.valamis.lesson.scorm.model
import com.arcusys.valamis.lesson.scorm.model.manifest.{ Objective, ObjectiveMap }
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers
class ObjectiveTest extends FlatSpec with ShouldMatchers {
"Objective map" can "be constructed" in {
val map = new ObjectiveMap(
readSatisfiedStatusFrom = Some("O1"),
readNormalizedMeasureFrom = Some("O2"),
writeSatisfiedStatusTo = Some("O3"),
writeNormalizedMeasureTo = Some("O4"),
readRawScoreFrom = Some("O5"),
readMinScoreFrom = Some("O6"),
readMaxScoreFrom = Some("O7"),
readCompletionStatusFrom = Some("O8"),
readProgressMeasureFrom = Some("O9"),
writeRawScoreTo = Some("OA"),
writeMinScoreTo = Some("OB"),
writeMaxScoreTo = Some("OC"),
writeCompletionStatusTo = Some("OD"),
writeProgressMeasureTo = Some("OE")
)
map.readSatisfiedStatusFrom should equal(Some("O1"))
map.readNormalizedMeasureFrom should equal(Some("O2"))
map.writeSatisfiedStatusTo should equal(Some("O3"))
map.writeNormalizedMeasureTo should equal(Some("O4"))
map.readRawScoreFrom should equal(Some("O5"))
map.readMinScoreFrom should equal(Some("O6"))
map.readMaxScoreFrom should equal(Some("O7"))
map.readCompletionStatusFrom should equal(Some("O8"))
map.readProgressMeasureFrom should equal(Some("O9"))
map.writeRawScoreTo should equal(Some("OA"))
map.writeMinScoreTo should equal(Some("OB"))
map.writeMaxScoreTo should equal(Some("OC"))
map.writeCompletionStatusTo should equal(Some("OD"))
map.writeProgressMeasureTo should equal(Some("OE"))
}
it can "be constructed with defaults = None" in {
val map = new ObjectiveMap()
map.readSatisfiedStatusFrom should equal(None)
map.readNormalizedMeasureFrom should equal(None)
map.writeSatisfiedStatusTo should equal(None)
map.writeNormalizedMeasureTo should equal(None)
map.readRawScoreFrom should equal(None)
map.readMinScoreFrom should equal(None)
map.readMaxScoreFrom should equal(None)
map.readCompletionStatusFrom should equal(None)
map.readProgressMeasureFrom should equal(None)
map.writeRawScoreTo should equal(None)
map.writeMinScoreTo should equal(None)
map.writeMaxScoreTo should equal(None)
map.writeCompletionStatusTo should equal(None)
map.writeProgressMeasureTo should equal(None)
}
it should "have a preconstructed empty instance" in {
val map = ObjectiveMap.Empty
map.readSatisfiedStatusFrom should equal(None)
map.readNormalizedMeasureFrom should equal(None)
map.writeSatisfiedStatusTo should equal(None)
map.writeNormalizedMeasureTo should equal(None)
map.readRawScoreFrom should equal(None)
map.readMinScoreFrom should equal(None)
map.readMaxScoreFrom should equal(None)
map.readCompletionStatusFrom should equal(None)
map.readProgressMeasureFrom should equal(None)
map.writeRawScoreTo should equal(None)
map.writeMinScoreTo should equal(None)
map.writeMaxScoreTo should equal(None)
map.writeCompletionStatusTo should equal(None)
map.writeProgressMeasureTo should equal(None)
map should equal(ObjectiveMap.Empty)
}
"Objective" can "be constructed" in {
val map = new ObjectiveMap(readSatisfiedStatusFrom = Some("GO1"))
val objective = new Objective(Some("OBJ1"), satisfiedByMeasure = false, minNormalizedMeasure = BigDecimal("0.5"), globalObjectiveMap = map)
objective.id.get should equal("OBJ1")
objective.satisfiedByMeasure should equal(false)
objective.minNormalizedMeasure should equal(BigDecimal("0.5"))
objective.globalObjectiveMap should equal(map)
}
it can "be constructed without map" in {
val objective = new Objective(Some("OBJ1"), satisfiedByMeasure = false, minNormalizedMeasure = BigDecimal("0.5"))
objective.id.get should equal("OBJ1")
objective.satisfiedByMeasure should equal(false)
objective.minNormalizedMeasure should equal(BigDecimal("0.5"))
objective.globalObjectiveMap should equal(ObjectiveMap.Empty)
}
it can "be constructed with min normalized measure = -1" in {
val map = new ObjectiveMap(readSatisfiedStatusFrom = Some("GO1"))
val objective = new Objective(Some("OBJ1"), satisfiedByMeasure = false, minNormalizedMeasure = BigDecimal("-1"), globalObjectiveMap = map)
objective.id.get should equal("OBJ1")
objective.satisfiedByMeasure should equal(false)
objective.minNormalizedMeasure should equal(BigDecimal("-1"))
objective.globalObjectiveMap should equal(map)
}
it can "be constructed with min normalized measure = 1" in {
val map = new ObjectiveMap(readSatisfiedStatusFrom = Some("GO1"))
val objective = new Objective(Some("OBJ1"), satisfiedByMeasure = false, minNormalizedMeasure = BigDecimal("1"), globalObjectiveMap = map)
objective.id.get should equal("OBJ1")
objective.satisfiedByMeasure should equal(false)
objective.minNormalizedMeasure should equal(BigDecimal("1"))
objective.globalObjectiveMap should equal(map)
}
it can "not be constructed with min normalized measure < -1" in {
val map = new ObjectiveMap(readSatisfiedStatusFrom = Some("GO1"))
intercept[IllegalArgumentException] {
new Objective(Some("OBJ1"), satisfiedByMeasure = false, minNormalizedMeasure = BigDecimal("-2"), globalObjectiveMap = map)
}
}
it can "not be constructed with min normalized measure > 1" in {
val map = new ObjectiveMap(readSatisfiedStatusFrom = Some("GO1"))
intercept[IllegalArgumentException] {
new Objective(Some("OBJ1"), satisfiedByMeasure = false, minNormalizedMeasure = BigDecimal("1.1"), globalObjectiveMap = map)
}
}
}
|
ViLPy/Valamis
|
valamis-scorm-lesson/src/test/scala/com/arcusys/valamis/lesson/scorm/model/ObjectiveTest.scala
|
Scala
|
lgpl-3.0
| 5,739
|
package extractors.examples
object Binders {
case class User(name: String, age: Int)
def test(users: Any) = users match {
case List(single @ User(_, 20)) => single.name
case List(_, tail @ _*) => tail.mkString
case Nil => "nobody"
}
}
|
julienrf/scala-lessons
|
highlights/extractors/code/src/main/scala/extractors/examples/Binders.scala
|
Scala
|
mit
| 293
|
package io.buoyant.linkerd.protocol.h2
import com.twitter.finagle.buoyant.h2.Frame
import com.twitter.finagle.buoyant.h2.service.{H2Classifier, H2Classifiers, H2ReqRep, H2ReqRepFrame}
import com.twitter.finagle.service.ResponseClass
import com.twitter.util.Return
import io.buoyant.router.ClassifiedRetries
class RetryableIdempotent5XXConfig extends H2ClassifierConfig {
def mk: H2Classifier =
H2Classifiers.RetryableIdempotentFailures
}
class RetryableIdempotent5XXInitializer extends H2ClassifierInitializer {
val configClass = classOf[RetryableIdempotent5XXConfig]
override val configId = "io.l5d.h2.retryableIdempotent5XX"
}
object RetryableIdempotent5XXInitializer extends RetryableIdempotent5XXInitializer
class RetryableRead5XXConfig extends H2ClassifierConfig {
def mk: H2Classifier =
H2Classifiers.RetryableReadFailures
}
class RetryableRead5XXInitializer extends H2ClassifierInitializer {
val configClass = classOf[RetryableRead5XXConfig]
override val configId = "io.l5d.h2.retryableRead5XX"
}
object RetryableRead5XXInitializer extends RetryableRead5XXInitializer
class NonRetryable5XXConfig extends H2ClassifierConfig {
def mk: H2Classifier =
H2Classifiers.NonRetryableServerFailures
}
class NonRetryable5XXInitializer extends H2ClassifierInitializer {
val configClass = classOf[NonRetryable5XXConfig]
override val configId = "io.l5d.h2.nonRetryable5XX"
}
object NonRetryable5XXInitializer extends NonRetryable5XXInitializer
class AllSuccessfulConfig extends H2ClassifierConfig {
def mk: H2Classifier = H2Classifiers.AllSuccessful
}
class AllSuccessfulInitializer extends H2ClassifierInitializer {
val configClass = classOf[AllSuccessfulConfig]
override val configId = "io.l5d.h2.allSuccessful"
}
object AllSuccessfulInitializer extends AllSuccessfulInitializer
|
denverwilliams/linkerd
|
linkerd/protocol/h2/src/main/scala/io/buoyant/linkerd/protocol/h2/H2Classifiers.scala
|
Scala
|
apache-2.0
| 1,823
|
/**
* Copyright (C) 2012 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.submission
import java.io.InputStream
import java.net.URI
import org.orbeon.oxf.common.OXFException
import org.orbeon.oxf.http
import org.orbeon.oxf.pipeline.api.ExternalContext
import org.orbeon.oxf.util._
import org.orbeon.oxf.xforms.{XFormsContainingDocument, XFormsModel}
import org.orbeon.oxf.xml.TransformerUtils
import org.orbeon.saxon.om.{DocumentInfo, Navigator, NodeInfo}
// The plan is to move stuff from XFormsSubmissionUtils to here as needed
object SubmissionUtils {
def dataNodeHash(node: NodeInfo) =
SecureUtils.hmacString(Navigator.getPath(node), "hex")
def readByteArray(model: XFormsModel, resolvedURL: String): Array[Byte] =
processGETConnection(model, resolvedURL) { is ⇒
NetUtils.inputStreamToByteArray(is)
}
def readTinyTree(model: XFormsModel, resolvedURL: String, handleXInclude: Boolean): DocumentInfo =
processGETConnection(model, resolvedURL) { is ⇒
TransformerUtils.readTinyTree(
XPath.GlobalConfiguration,
is,
resolvedURL,
handleXInclude,
true
)
}
def processGETConnection[T](model: XFormsModel, resolvedURL: String)(body: InputStream ⇒ T): T =
ConnectionResult.withSuccessConnection(openGETConnection(model, resolvedURL), closeOnSuccess = true)(body)
def openGETConnection(model: XFormsModel, resolvedURL: String) = {
implicit val _logger = model.indentedLogger
val url = new URI(resolvedURL)
Connection(
httpMethodUpper = "GET",
url = url,
credentials = None,
content = None,
headers = Connection.buildConnectionHeadersLowerIfNeeded(
scheme = url.getScheme,
hasCredentials = false,
customHeaders = Map(),
headersToForward = Connection.headersToForwardFromProperty,
cookiesToForward = Connection.cookiesToForwardFromProperty
) mapValues (_.toList),
loadState = true,
logBody = BaseSubmission.isLogBody
).connect(
saveState = true
)
}
def evaluateHeaders(submission: XFormsModelSubmission, forwardClientHeaders: Boolean): Map[String, List[String]] = {
try {
val headersToForward =
clientHeadersToForward(submission.containingDocument.getRequestHeaders, forwardClientHeaders)
SubmissionHeaders.evaluateHeaders(
submission.container,
submission.getModel.getContextStack,
submission.getEffectiveId,
submission.getSubmissionElement,
headersToForward
)
} catch {
case e: OXFException ⇒ throw new XFormsSubmissionException(submission, e, e.getMessage, "processing <header> elements")
}
}
def clientHeadersToForward(allHeaders: Map[String, List[String]], forwardClientHeaders: Boolean) = {
if (forwardClientHeaders) {
// Forwarding the user agent and accept headers makes sense when dealing with resources that
// typically would come from the client browser, including:
//
// - submission with replace="all"
// - dynamic resources loaded by xf:output
//
// Also useful when the target URL renders XForms in noscript mode, where some browser sniffing takes
// place for handling the <button> vs. <submit> element.
val toForward =
for {
name ← List("user-agent", "accept")
values ← allHeaders.get(name)
} yield
name → values
// Give priority to explicit headers
toForward.toMap
} else
Map.empty[String, List[String]]
}
def forwardResponseHeaders(cxr: ConnectionResult, response: ExternalContext.Response): Unit =
for {
(headerName, headerValues) ← http.Headers.proxyHeaders(cxr.headers, request = false)
headerValue ← headerValues
} locally {
response.addHeader(headerName, headerValue)
}
}
|
wesley1001/orbeon-forms
|
src/main/scala/org/orbeon/oxf/xforms/submission/SubmissionUtils.scala
|
Scala
|
lgpl-2.1
| 4,560
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.yarn
import java.util.UUID
import akka.actor._
import grizzled.slf4j.Logger
import org.apache.flink.configuration.Configuration
import org.apache.flink.runtime.clusterframework.messages._
import org.apache.flink.runtime.leaderretrieval.{LeaderRetrievalListener, LeaderRetrievalService}
import org.apache.flink.runtime.{FlinkActor, LeaderSessionMessageFilter, LogMessages}
import org.apache.flink.yarn.YarnMessages._
import scala.collection.mutable
import scala.concurrent.duration._
import scala.language.postfixOps
/** Actor which is responsible to repeatedly poll the Yarn cluster status from the ResourceManager.
*
* This class represents the bridge between the [[YarnClusterClient]] and the
* [[YarnApplicationMasterRunner]].
*
* @param flinkConfig Configuration object
* @param leaderRetrievalService [[LeaderRetrievalService]] which is used to retrieve the current
* leading [[org.apache.flink.runtime.jobmanager.JobManager]]
*/
class ApplicationClient(
val flinkConfig: Configuration,
val leaderRetrievalService: LeaderRetrievalService)
extends FlinkActor
with LeaderSessionMessageFilter
with LogMessages
with LeaderRetrievalListener{
val log = Logger(getClass)
val INITIAL_POLLING_DELAY = 0 seconds
val WAIT_FOR_YARN_INTERVAL = 2 seconds
val POLLING_INTERVAL = 3 seconds
var yarnJobManager: Option[ActorRef] = None
var pollingTimer: Option[Cancellable] = None
var running = false
var messagesQueue : mutable.Queue[InfoMessage] = mutable.Queue[InfoMessage]()
var stopMessageReceiver : Option[ActorRef] = None
var leaderSessionID: Option[UUID] = None
override def preStart(): Unit = {
super.preStart()
try {
leaderRetrievalService.start(this)
} catch {
case e: Exception =>
log.error("Could not start the leader retrieval service.", e)
throw new RuntimeException("Could not start the leader retrieval service.", e)
}
}
override def postStop(): Unit = {
log.info("Stopped Application client.")
disconnectFromJobManager()
try {
leaderRetrievalService.stop()
} catch {
case e: Exception => log.error("Leader retrieval service did not shout down properly.")
}
// Terminate the whole actor system because there is only the application client running
context.system.shutdown()
}
override def handleMessage: Receive = {
// ----------------------------- Registration -> Status updates -> shutdown ----------------
case TriggerApplicationClientRegistration(jobManagerAkkaURL, currentTimeout, deadline) =>
if (isConnected) {
// we are already connected to the job manager
log.debug("ApplicationClient is already registered to the " +
s"JobManager ${yarnJobManager.get}.")
} else {
if (deadline.forall(_.isOverdue())) {
// we failed to register in time. That means we should quit
log.error(s"Failed to register at the JobManager with address $jobManagerAkkaURL. " +
s"Shutting down...")
self ! decorateMessage(PoisonPill)
} else {
log.info(s"Trying to register at JobManager $jobManagerAkkaURL.")
val jobManager = context.actorSelection(jobManagerAkkaURL)
jobManager ! decorateMessage(
RegisterInfoMessageListener.getInstance()
)
val nextTimeout = (currentTimeout * 2).min(ApplicationClient.MAX_REGISTRATION_TIMEOUT)
context.system.scheduler.scheduleOnce(
currentTimeout,
self,
decorateMessage(
TriggerApplicationClientRegistration(
jobManagerAkkaURL,
nextTimeout,
deadline
)
)
)(context.dispatcher)
}
}
case msg: RegisterInfoMessageListenerSuccessful =>
// The job manager acts as a proxy between the client and the resource manager
val jm = sender()
log.info(s"Successfully registered at the ResourceManager using JobManager $jm")
yarnJobManager = Some(jm)
case JobManagerLeaderAddress(jobManagerAkkaURL, newLeaderSessionID) =>
log.info(s"Received address of new leader $jobManagerAkkaURL with session ID" +
s" $newLeaderSessionID.")
disconnectFromJobManager()
leaderSessionID = Option(newLeaderSessionID)
Option(jobManagerAkkaURL).foreach{
akkaURL =>
if (akkaURL.nonEmpty) {
val maxRegistrationDuration = ApplicationClient.MAX_REGISTRATION_DURATION
val deadline = if (maxRegistrationDuration.isFinite()) {
Some(maxRegistrationDuration.fromNow)
} else {
None
}
// trigger registration at new leader
self ! decorateMessage(
TriggerApplicationClientRegistration(
akkaURL,
ApplicationClient.INITIAL_REGISTRATION_TIMEOUT,
deadline))
}
}
case msg @ LocalStopYarnSession(status, diagnostics) =>
log.info("Sending StopCluster request to JobManager.")
// preserve the original sender so we can reply
val originalSender = sender()
yarnJobManager match {
case Some(jm) =>
jm.tell(decorateMessage(new StopCluster(status, diagnostics)), originalSender)
case None =>
context.system.scheduler.scheduleOnce(1 second) {
// try once more; we might have been connected in the meantime
self.tell(msg, originalSender)
}(context.dispatcher)
}
// ----------------- handle messages from the cluster -------------------
// receive remote messages
case msg: InfoMessage =>
log.debug(s"Received new YarnMessage $msg. Now ${messagesQueue.size} messages in queue")
messagesQueue.enqueue(msg)
// locally forward messages
case LocalGetYarnMessage =>
if (messagesQueue.nonEmpty) {
sender() ! decorateMessage(Option(messagesQueue.dequeue()))
} else {
sender() ! decorateMessage(None)
}
}
/** Disconnects this [[ApplicationClient]] from the connected [[YarnJobManager]] and cancels
* the polling timer.
*
*/
def disconnectFromJobManager(): Unit = {
log.info(s"Disconnect from JobManager ${yarnJobManager.getOrElse(ActorRef.noSender)}.")
yarnJobManager foreach {
_ ! decorateMessage(UnRegisterInfoMessageListener.get())
}
pollingTimer foreach {
_.cancel()
}
yarnJobManager = None
leaderSessionID = None
pollingTimer = None
}
/** True if the [[ApplicationClient]] is connected to the [[YarnJobManager]]
*
* @return true if the client is connected to the JobManager, otherwise false
*/
def isConnected: Boolean = {
yarnJobManager.isDefined
}
/**
* Handle unmatched messages with an exception.
*/
override def unhandled(message: Any): Unit = {
// let the actor crash
throw new RuntimeException("Received unknown message " + message)
}
override def notifyLeaderAddress(leaderAddress: String, leaderSessionID: UUID): Unit = {
log.info(s"Notification about new leader address $leaderAddress with " +
s"session ID $leaderSessionID.")
self ! JobManagerLeaderAddress(leaderAddress, leaderSessionID)
}
override def handleError(exception: Exception): Unit = {
log.error("Error in leader retrieval service.", exception)
// in case of an error in the LeaderRetrievalService, we shut down the ApplicationClient
self ! decorateMessage(PoisonPill)
}
}
object ApplicationClient {
val INITIAL_REGISTRATION_TIMEOUT: FiniteDuration = 500 milliseconds
val MAX_REGISTRATION_DURATION: FiniteDuration = 5 minutes
val MAX_REGISTRATION_TIMEOUT = 5 minutes
}
|
hongyuhong/flink
|
flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala
|
Scala
|
apache-2.0
| 8,645
|
package com.bitunified.cb.server
import java.util
import com.bitunified.cb.notification.{Event, Notification}
import scala.collection
class DSManagerNotification extends Notification {
var subjects:scala.collection.mutable.Map[String, (Unit) => Unit]=new scala.collection.mutable.HashMap[String,Unit=>Unit]
def registerSubject(sub:String,f:Unit=>Unit): Unit = {
subjects.put(sub,f)
}
def notifyData(): Unit = {}
}
object DSManagerNotification extends DSManagerNotification{
def event(subject:String,event:Event,any: Any)={
val notifications:collection.Map[String, (Unit) => Unit]=subjects.filterKeys(_ ==subject)
val func:Option[Unit=>Unit]=subjects.get(subject)
// if (func.isDefined){
// func.get.apply()
// }
}
}
|
bitunified/node-platform
|
np-server/src/main/scala-2.11/com/bitunified/cb/server/DSManagerNotification.scala
|
Scala
|
mit
| 758
|
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.observers
import monix.execution.Ack.{Continue, Stop}
import monix.execution.Scheduler
import monix.execution.exceptions.DummyException
import monix.reactive.BaseTestSuite
object ContramapSubscriberSuite extends BaseTestSuite {
test("Subscriber.contramap equivalence with plain Subscriber") { implicit s =>
check1 { xs: List[Int] =>
var sum = 0
val plainSubscriber: Subscriber[Int] = new Subscriber[Int] {
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = sum += 100
def onNext(elem: Int) = {
sum += elem
Continue
}
override implicit def scheduler: Scheduler = s
}
val contramapSubscriber: Subscriber[Long] =
plainSubscriber.contramap(_.toInt)
val plainAck = plainSubscriber.onNextAll(xs)
val contraAck = contramapSubscriber.onNextAll(xs.map(_.toLong))
s.tick()
plainAck.syncTryFlatten(s) == Continue &&
contraAck.syncTryFlatten(s) == Continue &&
sum == xs.sum * 2
}
}
test("Subscriber.contramap protects against user code") { implicit s =>
val dummy = DummyException("dummy")
val out: Subscriber[Long] = (Subscriber.empty[Int]: Subscriber[Int])
.contramap(_ => throw dummy)
s.tick()
assertEquals(out.onNext(1), Stop)
}
test("Subscriber.contramap works") { implicit s =>
var isDone = 0
val intSubscriber: Subscriber[Int] = new Subscriber[Int] {
def onError(ex: Throwable): Unit = isDone += 1
def onComplete(): Unit = isDone += 1
def onNext(elem: Int) = Continue
override implicit def scheduler: Scheduler = s
}
val doubleSubscriber: Subscriber[Double] = intSubscriber.contramap(_.toInt)
assertEquals(doubleSubscriber.onNext(1.0), Continue)
doubleSubscriber.onComplete()
assertEquals(isDone, 1)
doubleSubscriber.onError(DummyException("dummy"))
assertEquals(isDone, 1)
assertEquals(doubleSubscriber.onNext(2.0), Stop)
}
}
|
monifu/monix
|
monix-reactive/shared/src/test/scala/monix/reactive/observers/ContramapSubscriberSuite.scala
|
Scala
|
apache-2.0
| 2,676
|
// scalac: -deprecation -Wunused:nowarn -Yrangepos:false -Werror
import scala.annotation._
class ann(a: Any) extends Annotation
class C {
@deprecated("message", "1.2.3") def dep = 0
@nowarn def t0 = { 0: @nowarn; 1 } // outer @nowarn unused
@nowarn def t1 = { 0: Int @nowarn; 1 } // inner @nowarn unused, it covers the type, not the expression
@nowarn @ann(dep) def t2 = 0 // deprecation warning, @nowarn unused
@ann(dep: @nowarn) def t3 = 0 // silent
@nowarn("cat=deprecation") def t4 = dep
def t5 = (new I1a).m
// completion forced by method above
@nowarn class I1a { // unused @nowarn
@nowarn def m = { 1; 2 }
}
// completion during type checking
@nowarn class I1b { // unused @nowarn
@nowarn def m = { 1; 2 }
}
def t6 = (new I1b).m
@nowarn val t7a = { 0; 1 }
val t7b = { 0; 1 }
@nowarn class I2a {
def f: Unit = 1
}
class I2b {
def f: Unit = 1
}
trait I3a
@nowarn object I3a {
def main(args: Array[String]) = ()
}
trait I3b
object I3b {
def main(args: Array[String]) = () // main method in companion of trait triggers a warning in the backend
}
def t8(): Unit = {
@nowarn
val a = {
123
()
}
val b = {
123
()
}
}
@nowarn("msg=pure expression")
def t9a(): Unit = {
123
}
@nowarn("msg=something else")
def t9b(): Unit = {
123
}
@nowarn
def t10a(): Unit = {
123
}
def t10b(): Unit = {
123
}
def t11(): Unit = {
val a = dep: @nowarn
a + dep
}
}
trait T {
@nowarn val t1 = { 0; 1 }
}
class K extends T
@nowarn("site=Uh.f.g")
class Uh {
def f = {
def g(c: C) = c.dep
}
}
|
scala/scala
|
test/files/neg/nowarnPointPos.scala
|
Scala
|
apache-2.0
| 1,693
|
package cn.edu.sjtu.omnilab.kalin.hz
/**
* Data field schema of ETLed Hangzhou Mobile data.
*/
object DataSchema {
val TTime = 0;
val DTime = 1;
val BS = 2;
val IMSI = 3;
val MobileType = 4;
val DestIP = 5;
val DestPort = 6;
val IsSuccess = 7;
val FailureCause = 8;
val ResponseTime = 9;
val Host = 10;
val ContentLength = 11;
val RetransCount = 12;
val Packets = 13;
val StatusCode = 14;
val WebVolume = 15;
val ContentType = 16;
val UserAgent = 17;
val IsMobile = 18;
val EGPRS = 19;
val UMTSTDD = 20;
val ICP = 21;
val SC = 22;
val URI = 23;
val OS = 24;
val LON = 25;
val LAT = 26;
}
|
caesar0301/MDMS
|
kalin-etl/src/main/scala/cn/edu/sjtu/omnilab/kalin/hz/DataSchema.scala
|
Scala
|
apache-2.0
| 647
|
package com.massrelevance.dropwizard.scala.inject
import com.sun.jersey.api.ParamException
import com.sun.jersey.api.core.HttpContext
import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable
import com.sun.jersey.server.impl.model.parameter.multivalued.{MultivaluedParameterExtractor, ExtractorContainerException}
class ScalaCollectionQueryParamInjectable(extractor: MultivaluedParameterExtractor,
decode: Boolean)
extends AbstractHttpContextInjectable[Object] {
def getValue(c: HttpContext) = try {
extractor.extract(c.getUriInfo.getQueryParameters(decode))
} catch {
case e: ExtractorContainerException =>
throw new ParamException.QueryParamException(e.getCause,
extractor.getName,
extractor.getDefaultStringValue)
}
}
|
torbjornvatn/dropwizard-scala
|
src/main/scala/com/massrelevance/dropwizard/scala/inject/ScalaCollectionQueryParamInjectable.scala
|
Scala
|
apache-2.0
| 818
|
package x7c1.linen.database.control
import android.content.Context
import android.database.sqlite.{SQLiteDatabase, SQLiteOpenHelper}
import x7c1.wheat.macros.logger.Log
import x7c1.wheat.modern.database.WritableDatabase
import x7c1.wheat.modern.database.selector.CanProvideSelector
class DatabaseHelper(context: Context)
extends SQLiteOpenHelper(context, LinenDatabase.name, null, LinenDatabase.version) {
lazy val writable = new WritableDatabase(getWritableDatabase)
def selectorOf[A](implicit x: CanProvideSelector[A]): x.Selector = {
x createFrom getReadableDatabase
}
override def onConfigure(db: SQLiteDatabase) = {
db.setForeignKeyConstraintsEnabled(true)
}
override def onUpgrade(db: SQLiteDatabase, oldVersion: Int, newVersion: Int): Unit = {
Log info s"[init] $oldVersion -> $newVersion"
val upgrades = LinenDatabase upgradesFrom oldVersion
for {
upgrade <- upgrades.view
_ = Log info s"version:${upgrade.version}"
query <- upgrade.queries
}{
Log info s"query: $query"
db execSQL query
}
}
override def onCreate(db: SQLiteDatabase): Unit = {
Log info "[init]"
LinenDatabase.defaults foreach { query =>
Log info s"query: $query"
db execSQL query
}
onUpgrade(db, 0, LinenDatabase.version)
}
}
|
x7c1/Linen
|
linen-repository/src/main/scala/x7c1/linen/database/control/DatabaseHelper.scala
|
Scala
|
mit
| 1,312
|
package io.github.nthportal.version.releasetype
import org.junit.Assert._
import org.junit.Test
class AlphaBetaTest {
@Test
@throws[Exception]
def testRCToString() {
val rc1 = AlphaBeta.ReleaseCandidate(1)
assertEquals(rc1.extension, rc1.toString)
}
}
|
NthPortal/version-scala
|
src/test/scala/io/github/nthportal/version/releasetype/AlphaBetaTest.scala
|
Scala
|
apache-2.0
| 270
|
package scodec
package codecs
import scalaz.\\/
import scodec.bits.BitVector
private[codecs] final class FailCodec[A](encMessage: String, decMessage: String) extends Codec[A] {
override def encode(a: A) = \\/.left(encMessage)
override def decode(b: BitVector) = \\/.left(decMessage)
override def toString = "fail"
}
|
ceedubs/scodec
|
src/main/scala/scodec/codecs/FailCodec.scala
|
Scala
|
bsd-3-clause
| 325
|
/*
* Distributed as part of Scalala, a linear algebra library.
*
* Copyright (C) 2008- Daniel Ramage
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110 USA
*/
package scalala;
package tensor;
package mutable;
import domain._;
import scalala.scalar.Scalar;
import scalala.generic.collection._;
/**
* A mutable tensor that acts like a collection of key-value pairs backed by
* a map.
*
* @author dramage
*/
trait CounterLike
[@specialized(Int,Long) K, @specialized(Int,Long,Float,Double) V,
+M<:scala.collection.mutable.Map[K,V],
+This<:Counter[K,V]]
extends tensor.CounterLike[K,V,M,This] with Tensor1Like[K,V,SetDomain[K],This] { self =>
def update(k : K, v : V) =
data(k) = v;
}
trait Counter
[@specialized(Int,Long) K, @specialized(Int,Long,Float,Double) V]
extends tensor.Counter[K,V] with Tensor1[K,V]
with CounterLike[K,V,scala.collection.mutable.Map[K,V],Counter[K,V]];
object Counter {
class Impl[@specialized(Int,Long) K, @specialized(Int,Long,Float,Double) V]
(override val data : scala.collection.mutable.Map[K,V])
(implicit override final val scalar : Scalar[V])
extends Counter[K,V] with Serializable;
/** Returns an empty counter. */
def apply[K,V:Scalar]() : Counter[K,V] =
new Impl(scala.collection.mutable.HashMap[K,V]());
/** Returns a counter by summing all the given values. */
def apply[K,V:Scalar](values : (K,V)*) : Counter[K,V] =
apply(values);
/** Returns a counter by summing all the given values. */
def apply[K,V:Scalar](values : TraversableOnce[(K,V)]) : Counter[K,V] = {
val rv = apply[K,V]();
values.foreach({ case (k,v) => rv(k) = rv.scalar.+(v,rv(k)); });
rv;
}
/** Counts each of the given items. */
def count[K](items : TraversableOnce[K]) : Counter[K,Int] = {
val rv = apply[K,Int]();
items.foreach(rv(_) += 1);
rv;
}
def count[K](items: K*): mutable.Counter[K,Int] = count(items);
def apply[K,V:Scalar](domain : Domain1[K]) : Counter[K,V] =
new Impl(scala.collection.mutable.HashMap[K,V]());
// implicit def opCanCopy[K,V:CanCopy] = CanCopy.opMapValues[Counter[K,V],V];
// implicit def opZeros[K,V:Scalar] = CanCreateZerosLike.opMapValues[Counter[K,V],V,Counter[K,V]];
implicit def CanMapValuesCounter
[@specialized(Int) K, @specialized(Int,Double) V, @specialized(Int,Double) RV:Scalar]: CanMapValues[Counter[K, V], V, RV, Counter[K, RV]]
= new CanMapValues[Counter[K,V],V,RV,Counter[K,RV]] {
override def map(from : Counter[K,V], fn : (V=>RV)) = {
val rv = Counter[K,RV]();
for( (k,v) <- from.pairsIterator) {
rv(k) = fn(from.data(k));
}
rv;
}
override def mapNonZero(from : Counter[K,V], fn : (V=>RV)) = {
val rv = Counter[K,RV]();
for( (k,v) <- from.pairsIteratorNonZero) {
rv(k) = fn(from.data(k));
}
rv;
}
}
}
|
scalala/Scalala
|
src/main/scala/scalala/tensor/mutable/Counter.scala
|
Scala
|
lgpl-2.1
| 3,529
|
package moe.nightfall.srails.common.tileentity.effect
import moe.nightfall.srails.SRails
import net.minecraft.client.Minecraft
import net.minecraft.client.entity.EntityPlayerSP
import net.minecraft.entity.Entity
import net.minecraftforge.fml.client.FMLClientHandler
import net.minecraftforge.fml.common.gameevent.TickEvent.{ClientTickEvent, WorldTickEvent}
import net.minecraftforge.fml.relauncher.{Side, SideOnly}
object AntiGravity extends EffectOnWorldTick with EffectOnClientTick{
final val g = 0.08D * 0.9800000190734863D
//0.0784000015258789 //Gravity
//need that here but maybe move it later
@SideOnly(Side.CLIENT)
lazy val gameSettings = FMLClientHandler.instance.getClient.gameSettings
SRails.log.debug(s"created antigravity effect object $this")
override def onEntityIntersect(entity: Entity, worldTickEvent: WorldTickEvent): Unit = {
//reverse gravity
entity.motionY += g
}
@SideOnly(Side.CLIENT)
def onEntityIntersect(entity: Entity, clientTickEvent: ClientTickEvent): Unit = {
entity match {
case p: EntityPlayerSP =>
if (!p.capabilities.isFlying && Minecraft.getMinecraft.inGameHasFocus) {
//counteract gravity I hope this doesnt break
p.motionY += g
// p.posY = p.lastTickPosY
}
case _ =>
entity.motionY += g
}
}
}
|
Nightfall/SRails
|
src/main/scala/moe/nightfall/srails/common/tileentity/effect/AntiGravity.scala
|
Scala
|
bsd-2-clause
| 1,340
|
object Version {
val library = "0.1.0-SNAPSHOT"
}
|
suzaku-io/suzaku
|
project/Version.scala
|
Scala
|
apache-2.0
| 52
|
/**
* Copyright (C) 2017 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr
import org.orbeon.oxf.fr.FormRunnerCommon._
import org.orbeon.oxf.fr.Names.FormResources
import org.orbeon.saxon.om.NodeInfo
import org.orbeon.scaxon.SimplePath._
import org.orbeon.xforms.XFormsCrossPlatformSupport
trait FormRunnerResourcesOps {
//@XPathFunction
def allLangs (resourcesRootElem: NodeInfo): Seq[String] = allResources(resourcesRootElem) attValue "*:lang"
def allResources(resourcesRootElem: NodeInfo): Seq[NodeInfo] = resourcesRootElem child "resource"
//@XPathFunction
def resourcesInstanceRootElemOpt(inDoc: NodeInfo): Option[NodeInfo] = frc.inlineInstanceRootElem(inDoc, FormResources)
def allLangsWithResources(resourcesRootElem: NodeInfo): Seq[(String, NodeInfo)] =
allLangs(resourcesRootElem) zip allResources(resourcesRootElem)
def formResourcesInGivenLangOrFirst(formResourcesRootElem: NodeInfo, lang: String): NodeInfo =
allResources(formResourcesRootElem).find(_.attValue("*:lang") == lang).getOrElse(allResources(formResourcesRootElem).head)
// Same as above but doesn't require a Form Builder context
// NOTE: Support an entirely missing resources instance (for tests).
// TODO: Migrate to `findResourceHoldersWithLangUseDocUseContext`.
def findResourceHoldersWithLangUseDoc(inDoc: NodeInfo, controlName: String): Seq[(String, NodeInfo)] =
resourcesInstanceRootElemOpt(inDoc) orElse
resourcesInstanceDocFromUrlOpt(inDoc) map
(findResourceHoldersWithLang(controlName, _)) getOrElse
Nil
def findResourceHoldersWithLangUseDocUseContext(
controlName : String)(implicit
ctx : FormRunnerDocContext
): Seq[(String, NodeInfo)] =
findResourceHoldersWithLang(controlName, ctx.resourcesRootElem)
// Find control resource holders with their language
def findResourceHoldersWithLang(controlName: String, resourcesRootElem: NodeInfo): Seq[(String, NodeInfo)] =
for {
(lang, resource) <- allLangsWithResources(resourcesRootElem)
holder <- resource child controlName headOption // there *should* be only one
} yield
(lang, holder)
// Support for `<xf:instance id="" src=""/>`, only for Form Builder's Summary page
private def resourcesInstanceDocFromUrlOpt(inDoc: NodeInfo): Option[NodeInfo] =
frc.instanceElem(inDoc, FormResources) flatMap
(_.attValueOpt("src")) map
readUrlAsImmutableXmlDocument map
(_.rootElement)
// Also used by tests!
private def readUrlAsImmutableXmlDocument(url: String) =
XFormsCrossPlatformSupport.readTinyTreeFromUrl(url)
}
object FormRunnerResourcesOps extends FormRunnerResourcesOps
|
orbeon/orbeon-forms
|
form-runner/shared/src/main/scala/org/orbeon/oxf/fr/FormRunnerResourcesOps.scala
|
Scala
|
lgpl-2.1
| 3,311
|
/*
* Copyright 2011 TomTom International BV
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tomtom.splitter.layer7
import java.net.InetSocketAddress
import java.nio.channels.ClosedChannelException
import java.util.concurrent.ExecutorService
import org.jboss.netty.bootstrap.ClientBootstrap
import org.jboss.netty.buffer.ChannelBuffer
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory
import org.jboss.netty.channel._
import org.jboss.netty.handler.codec.http._
import org.slf4j.LoggerFactory
/**
* Document me.
*
* @author Eric Bowman
* @since 2011-04-06 14:19
*/
object HttpClient {
def cb2String(content: ChannelBuffer): String = {
import content.{array, arrayOffset, readableBytes}
new String(array, arrayOffset, readableBytes)
}
}
case class HttpClient(host: String = "localhost", port: Int)(implicit executor: ExecutorService) extends SimpleChannelUpstreamHandler {
@volatile var onResponses: List[((HttpResponse, String) => Unit)] = Nil
@volatile var exceptions: List[Throwable] = Nil
var inFlight = false
@volatile var isClosed = false
@volatile var channel: Channel = _
@volatile var request: HttpRequest = _
val log = LoggerFactory.getLogger(getClass)
def supplementRequest(httpRequest: HttpRequest): HttpRequest = httpRequest
def onChunk(chunk: HttpChunk) {}
import HttpClient._
override def exceptionCaught(ctx: ChannelHandlerContext, e: ExceptionEvent) {
if (!e.getCause.isInstanceOf[ClosedChannelException]) {
this synchronized {
exceptions ::= e.getCause
}
e.getCause.printStackTrace()
} else {
e.getCause.printStackTrace()
}
}
override def channelClosed(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
isClosed = true
super.channelClosed(ctx, e)
}
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) {
e.getMessage match {
case response: HttpResponse =>
this synchronized {
try {
onResponses.head(response, cb2String(response.getContent))
} catch {
case ex: Throwable => exceptions ::= ex
} finally {
onResponses = onResponses.tail
}
}
if (!response.isChunked) {
this synchronized {
if (closed && onResponses == Nil) {
channel.close()
}
inFlight = false
this.notifyAll()
}
}
case chunk: HttpChunk =>
onChunk(chunk)
if (chunk.isLast) {
this synchronized {
if (closed && onResponses == Nil) {
channel.close()
}
inFlight = false
this.notifyAll()
}
}
}
super.messageReceived(ctx, e)
}
val bootstrap = new ClientBootstrap(
new NioClientSocketChannelFactory(
executor, executor))
bootstrap.setPipelineFactory(new ChannelPipelineFactory {
def getPipeline = {
val pipeline = Channels.pipeline
pipeline.addLast("codec", new HttpClientCodec())
pipeline.addLast("this", HttpClient.this)
pipeline
}
})
open()
def open() {
val connectFuture = bootstrap.connect(new InetSocketAddress(host, port))
channel = connectFuture.awaitUninterruptibly().getChannel
if (!connectFuture.isSuccess) {
connectFuture.getCause.printStackTrace()
bootstrap.releaseExternalResources()
sys.error("Could not connect")
} else {
isClosed = false
}
}
def POST(path: String, callback: ((HttpResponse, String) => Unit)): HttpClient = {
<<(HttpMethod.POST)(path, callback)
}
def GET(path: String, callback: ((HttpResponse, String) => Unit)): HttpClient = {
<<(HttpMethod.GET)(path, callback)
}
def <<(path: String, callback: ((HttpResponse, String) => Unit)): HttpClient = {
<<(HttpMethod.GET)(path, callback)
}
def <<(method: HttpMethod)(path: String, callback: ((HttpResponse, String) => Unit)): HttpClient = {
this synchronized {
onResponses = onResponses ::: List(callback)
while (inFlight) {
// splitter doesn't support pipelining!
this.wait()
}
inFlight = true
}
if (isClosed) {
open()
}
request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, method, path)
request.headers.set(HttpHeaders.Names.HOST, host + (if (port != 80) {
":" + port
} else {
""
}))
request = supplementRequest(request)
channel.write(request)
isClosed = !HttpHeaders.isKeepAlive(request)
this
}
def assertOk() {
this synchronized {
exceptions match {
case Nil =>
case head :: tail =>
exceptions.foreach(_.printStackTrace())
throw head
}
}
}
@volatile var closed = false
def close() {
closed = true
this synchronized {
this.wait()
}
}
}
|
ebowman/splitter
|
src/test/scala/tomtom/splitter/layer7/HttpClient.scala
|
Scala
|
apache-2.0
| 5,425
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.