code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package sss.ancillary
import java.util.Base64
/**
* Created by alan on 4/5/16.
*/
object ByteArrayEncodedStrOps {
implicit class ByteArrayToBase64UrlStr(bs: Array[Byte]) {
def toBase64Str: String = Base64.getUrlEncoder.withoutPadding.encodeToString(bs)
}
implicit class Base64StrToByteArray(hex:String) {
def fromBase64Str: Array[Byte]= Base64.getUrlDecoder.decode(hex)
}
}
|
mcsherrylabs/sss.ancillary
|
src/main/scala/sss/ancillary/ByteArrayEncodedStrOps.scala
|
Scala
|
gpl-3.0
| 398
|
object Seidel extends App{
def method(a:Array[Array[Double]],b:Array[Double]) ={
val n = a.length
val eps = 0.00001
var p = Array.empty[Double]
val x = new Array[Double](n)
def converge(xk:Array[Double], xkp:Array[Double]):Boolean = {
var norm:Double = 0
for (i <- 0 until n)
norm += (xk(i) - xkp(i))*(xk(i) - xkp(i))
if(Math.sqrt(norm) >= eps) false
else true
}
do {
p = x.clone()
for (i <- 0 until n) {
var v:Double = 0
for (j <- 0 until i)
v += (a(i)(j) * x(j))
for (j <- i+1 until n)
v += (a(i)(j) * p(j))
x(i) = (b(i) - v) / a(i)(i)
}
}
while (!converge(x, p))
x
}
def test1 = method(Array(Array(10,1,1),Array(2,10,1),Array(2,2,10)),Array(12,13,14))
test1.foreach(x => print(x.toString+" "))
}
|
dnoskov/cm
|
Iterative/src/main/scala/Seidel.scala
|
Scala
|
mit
| 850
|
package tutorial.webapp
import scala.scalajs.js.JSApp
import scala.scalajs.js.annotation.JSExport
import org.scalajs.jquery._
import scala.scalajs.js
object TutorialApp extends JSApp {
def afficheIti(iti:js.Dynamic): String = {
if(iti.status.toString == "OK"){
val r = iti.routes.asInstanceOf[js.Dictionary[js.Dynamic]]
val route = r("0").asInstanceOf[js.Dictionary[js.Dynamic]]
val legs = route("legs").asInstanceOf[js.Dictionary[js.Dynamic]]
val leg = legs("0").asInstanceOf[js.Dictionary[js.Dynamic]]
var s = "<p>Depart:" + leg("start_address") + ", Arrivee:" + leg("end_address") + "</p>"
val steps = leg("steps").asInstanceOf[js.Dictionary[js.Dynamic]]
for( k <- js.Object.keys(steps.asInstanceOf[js.Object])){
val step = steps(k).asInstanceOf[js.Dictionary[js.Dynamic]]
if(step("travel_mode").toString == "TRANSIT"){
val t = step("transit_details")
s = s + "<p>" + t.line.vehicle.name + ", line:"+ t.line.short_name + " vers " + t.headsign + "</p>"
s = s + "<p>--- arret " + t.departure_stop.name + ", a " + t.departure_time.text + "</p>"
s = s + "<p>--- sortir " + t.arrival_stop.name + ", a " + t.arrival_time.text + "</p>"
}
}
s
}else{
"<p>Aucun resultat</p>"
}
}
def rechItin(e:JQueryEventObject): Unit = {
e.preventDefault()
val ori = jQuery("#ori").value().replace(' ', '+')
val dest = jQuery("#dest").value().replace(' ', '+')
jQuery.ajax(js.Dynamic.literal(
url = "http://localhost:8080/api?origine="+ori+"&destination="+dest,
success = { (data: js.Any, textStatus: String, jqXHR: JQueryXHR) =>
val json = js.JSON.parse(jqXHR.responseText)
jQuery("#iti").html(afficheIti(json))
},
error = { (jqXHR: JQueryXHR, textStatus: String, errorThrow: String) =>
println(s"jqXHR=$jqXHR,text=$textStatus,err=$errorThrow")
},
`type` = "GET"
).asInstanceOf[JQueryAjaxSettings])
}
def setupUI(): Unit = {
jQuery("#rechItin").submit(rechItin _)
}
def main(): Unit = {
//appendPar("Hello World")
jQuery(setupUI _)
}
}
|
GIorfindel/transports
|
requete_ajax_scalajs/scalajs_to_javascript/src/main/scala/tutorial/webapp/TutorialApp.scala
|
Scala
|
gpl-3.0
| 2,091
|
package models
import controllers.external.ChannelLong
import org.joda.time.{DateTime, DateTimeZone}
import org.scalatest.MustMatchers
import org.scalatestplus.play.PlaySpec
class TVChannelSpec extends PlaySpec with MustMatchers {
val channel = ChannelLong(TVChannel("CHANNEL 1 PLUS", List(), List(), None))("http://localhost:9000")
"image" should {
"be http://localhost:9000/CHANNEL_1_PLUS.png when tvChannel is 'CHANNEL 1 PLUS'" in {
channel.image mustBe "http://localhost:9000/CHANNEL_1_PLUS.png"
}
}
"uriToday" should {
"be http://localhost:9000/channels/CHANNEL_1_PLUS/today when tvChannel 'CHANNEL 1 PLUS'" in {
channel.uriToday mustBe "http://localhost:9000/tvcontent/channel/CHANNEL+1+PLUS/today"
}
}
"uriCurrent" should {
"be http://localhost:9000/channels/CHANNEL_1_PLUS/current when tvChannel 'CHANNEL 1 PLUS'" in {
channel.uriCurrent mustBe "http://localhost:9000/tvcontent/channel/CHANNEL+1+PLUS/current"
}
}
"uriLeft" should {
"be http://localhost:9000/channels/CHANNEL_1_PLUS/left when tvChannel 'CHANNEL 1 PLUS'" in {
channel.uriLeft mustBe "http://localhost:9000/tvcontent/channel/CHANNEL+1+PLUS/left"
}
}
}
|
tvlive/tv-api
|
test/unit/models/TVChannelSpec.scala
|
Scala
|
apache-2.0
| 1,205
|
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.cassandra.tools.commands
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.cassandra.data.CassandraDataStore
import org.locationtech.geomesa.cassandra.tools.CassandraDataStoreCommand
import org.locationtech.geomesa.cassandra.tools.CassandraDataStoreCommand.CassandraDataStoreParams
import org.locationtech.geomesa.cassandra.tools.commands.CassandraCreateSchemaCommand.CassandraCreateSchemaParams
import org.locationtech.geomesa.tools.data.CreateSchemaCommand
import org.locationtech.geomesa.tools.data.CreateSchemaCommand.CreateSchemaParams
class CassandraCreateSchemaCommand extends CreateSchemaCommand[CassandraDataStore] with CassandraDataStoreCommand {
override val params = new CassandraCreateSchemaParams
}
object CassandraCreateSchemaCommand {
@Parameters(commandDescription = "Create a GeoMesa feature type")
class CassandraCreateSchemaParams extends CreateSchemaParams with CassandraDataStoreParams
}
|
aheyne/geomesa
|
geomesa-cassandra/geomesa-cassandra-tools/src/main/scala/org/locationtech/geomesa/cassandra/tools/commands/CassandraCreateSchemaCommand.scala
|
Scala
|
apache-2.0
| 1,438
|
class Cell[T](x0: T) {
type U = T
var x1: U = x0
}
object Test {
val str: Cell[String] = new Cell("a")
val other: Cell[Int] = new Cell(0)
def main(args: Array[String]): Unit = {
List(str, other) foreach (_.x1 = new AnyRef)
str.x1.length
}
}
// another way demonstrating the same underlying problem, as reported by Roman Kalukiewicz
class Holder[_T](_f1 : _T, _f2 : _T) {
type T = _T
var f1 : T = _f1
var f2 : T = _f2
}
object Test2 {
val str = new Holder("t1", "t2")
val num = new Holder(1, 2)
List(str, num).foreach(h => h.f1 = new Thread())
def main(args: Array[String]) {
println(str.f1)
}
}
|
felixmulder/scala
|
test/files/neg/t5120.scala
|
Scala
|
bsd-3-clause
| 639
|
/**
* Generated by apidoc - http://www.apidoc.me
* Service version: 0.2.12
* apidoc:0.11.84 http://www.apidoc.me/flow/error/0.2.12/play_2_x_json
*/
package io.flow.error.v0.models {
/**
* An error of some type has occured. The most common error will be validation on
* input. See messages for details.
*/
case class GenericError(
code: io.flow.error.v0.models.GenericErrorCode = io.flow.error.v0.models.GenericErrorCode.GenericError,
messages: Seq[String]
)
sealed trait GenericErrorCode extends _root_.scala.Product with _root_.scala.Serializable
object GenericErrorCode {
/**
* Generic errors are the default type. The accompanying message will provide
* details on the failure.
*/
case object GenericError extends GenericErrorCode { override def toString = "generic_error" }
/**
* A client error has occurred. This represents a misconfiguration of the client
*/
case object ClientError extends GenericErrorCode { override def toString = "client_error" }
/**
* A server error has occurred. The Flow tech team is automatically notified of all
* server errors
*/
case object ServerError extends GenericErrorCode { override def toString = "server_error" }
/**
* UNDEFINED captures values that are sent either in error or
* that were added by the server after this library was
* generated. We want to make it easy and obvious for users of
* this library to handle this case gracefully.
*
* We use all CAPS for the variable name to avoid collisions
* with the camel cased values above.
*/
case class UNDEFINED(override val toString: String) extends GenericErrorCode
/**
* all returns a list of all the valid, known values. We use
* lower case to avoid collisions with the camel cased values
* above.
*/
val all = Seq(GenericError, ClientError, ServerError)
private[this]
val byName = all.map(x => x.toString.toLowerCase -> x).toMap
def apply(value: String): GenericErrorCode = fromString(value).getOrElse(UNDEFINED(value))
def fromString(value: String): _root_.scala.Option[GenericErrorCode] = byName.get(value.toLowerCase)
}
}
package io.flow.error.v0.models {
package object json {
import play.api.libs.json.__
import play.api.libs.json.JsString
import play.api.libs.json.Writes
import play.api.libs.functional.syntax._
import io.flow.error.v0.models.json._
private[v0] implicit val jsonReadsUUID = __.read[String].map(java.util.UUID.fromString)
private[v0] implicit val jsonWritesUUID = new Writes[java.util.UUID] {
def writes(x: java.util.UUID) = JsString(x.toString)
}
private[v0] implicit val jsonReadsJodaDateTime = __.read[String].map { str =>
import org.joda.time.format.ISODateTimeFormat.dateTimeParser
dateTimeParser.parseDateTime(str)
}
private[v0] implicit val jsonWritesJodaDateTime = new Writes[org.joda.time.DateTime] {
def writes(x: org.joda.time.DateTime) = {
import org.joda.time.format.ISODateTimeFormat.dateTime
val str = dateTime.print(x)
JsString(str)
}
}
implicit val jsonReadsErrorGenericErrorCode = new play.api.libs.json.Reads[io.flow.error.v0.models.GenericErrorCode] {
def reads(js: play.api.libs.json.JsValue): play.api.libs.json.JsResult[io.flow.error.v0.models.GenericErrorCode] = {
js match {
case v: play.api.libs.json.JsString => play.api.libs.json.JsSuccess(io.flow.error.v0.models.GenericErrorCode(v.value))
case _ => {
(js \ "value").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.flow.error.v0.models.GenericErrorCode(v))
case err: play.api.libs.json.JsError => err
}
}
}
}
}
def jsonWritesErrorGenericErrorCode(obj: io.flow.error.v0.models.GenericErrorCode) = {
play.api.libs.json.JsString(obj.toString)
}
def jsObjectGenericErrorCode(obj: io.flow.error.v0.models.GenericErrorCode) = {
play.api.libs.json.Json.obj("value" -> play.api.libs.json.JsString(obj.toString))
}
implicit def jsonWritesErrorGenericErrorCode: play.api.libs.json.Writes[GenericErrorCode] = {
new play.api.libs.json.Writes[io.flow.error.v0.models.GenericErrorCode] {
def writes(obj: io.flow.error.v0.models.GenericErrorCode) = {
jsonWritesErrorGenericErrorCode(obj)
}
}
}
implicit def jsonReadsErrorGenericError: play.api.libs.json.Reads[GenericError] = {
(
(__ \ "code").read[io.flow.error.v0.models.GenericErrorCode] and
(__ \ "messages").read[Seq[String]]
)(GenericError.apply _)
}
def jsObjectGenericError(obj: io.flow.error.v0.models.GenericError) = {
play.api.libs.json.Json.obj(
"code" -> play.api.libs.json.JsString(obj.code.toString),
"messages" -> play.api.libs.json.Json.toJson(obj.messages)
)
}
implicit def jsonWritesErrorGenericError: play.api.libs.json.Writes[GenericError] = {
new play.api.libs.json.Writes[io.flow.error.v0.models.GenericError] {
def writes(obj: io.flow.error.v0.models.GenericError) = {
jsObjectGenericError(obj)
}
}
}
}
}
package io.flow.error.v0 {
object Bindables {
import play.api.mvc.{PathBindable, QueryStringBindable}
import org.joda.time.{DateTime, LocalDate}
import org.joda.time.format.ISODateTimeFormat
import io.flow.error.v0.models._
// Type: date-time-iso8601
implicit val pathBindableTypeDateTimeIso8601 = new PathBindable.Parsing[org.joda.time.DateTime](
ISODateTimeFormat.dateTimeParser.parseDateTime(_), _.toString, (key: String, e: _root_.java.lang.Exception) => s"Error parsing date time $key. Example: 2014-04-29T11:56:52Z"
)
implicit val queryStringBindableTypeDateTimeIso8601 = new QueryStringBindable.Parsing[org.joda.time.DateTime](
ISODateTimeFormat.dateTimeParser.parseDateTime(_), _.toString, (key: String, e: _root_.java.lang.Exception) => s"Error parsing date time $key. Example: 2014-04-29T11:56:52Z"
)
// Type: date-iso8601
implicit val pathBindableTypeDateIso8601 = new PathBindable.Parsing[org.joda.time.LocalDate](
ISODateTimeFormat.yearMonthDay.parseLocalDate(_), _.toString, (key: String, e: _root_.java.lang.Exception) => s"Error parsing date $key. Example: 2014-04-29"
)
implicit val queryStringBindableTypeDateIso8601 = new QueryStringBindable.Parsing[org.joda.time.LocalDate](
ISODateTimeFormat.yearMonthDay.parseLocalDate(_), _.toString, (key: String, e: _root_.java.lang.Exception) => s"Error parsing date $key. Example: 2014-04-29"
)
// Enum: GenericErrorCode
private[this] val enumGenericErrorCodeNotFound = (key: String, e: _root_.java.lang.Exception) => s"Unrecognized $key, should be one of ${io.flow.error.v0.models.GenericErrorCode.all.mkString(", ")}"
implicit val pathBindableEnumGenericErrorCode = new PathBindable.Parsing[io.flow.error.v0.models.GenericErrorCode] (
GenericErrorCode.fromString(_).get, _.toString, enumGenericErrorCodeNotFound
)
implicit val queryStringBindableEnumGenericErrorCode = new QueryStringBindable.Parsing[io.flow.error.v0.models.GenericErrorCode](
GenericErrorCode.fromString(_).get, _.toString, enumGenericErrorCodeNotFound
)
}
}
|
flowcommerce/splashpage
|
api/app/generated/FlowErrorV0Models.scala
|
Scala
|
mit
| 7,468
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.k8s
import io.fabric8.kubernetes.client.KubernetesClient
import org.apache.spark.SecurityManager
import org.apache.spark.deploy.k8s._
import org.apache.spark.deploy.k8s.features._
import org.apache.spark.resource.ResourceProfile
import org.apache.spark.util.Utils
private[spark] class KubernetesExecutorBuilder {
def buildFromFeatures(
conf: KubernetesExecutorConf,
secMgr: SecurityManager,
client: KubernetesClient,
resourceProfile: ResourceProfile): KubernetesExecutorSpec = {
val initialPod = conf.get(Config.KUBERNETES_EXECUTOR_PODTEMPLATE_FILE)
.map { file =>
KubernetesUtils.loadPodFromTemplate(
client,
file,
conf.get(Config.KUBERNETES_EXECUTOR_PODTEMPLATE_CONTAINER_NAME),
conf.sparkConf)
}
.getOrElse(SparkPod.initialPod())
val userFeatures = conf.get(Config.KUBERNETES_EXECUTOR_POD_FEATURE_STEPS)
.map { className =>
Utils.classForName(className).newInstance().asInstanceOf[KubernetesFeatureConfigStep]
}
val features = Seq(
new BasicExecutorFeatureStep(conf, secMgr, resourceProfile),
new ExecutorKubernetesCredentialsFeatureStep(conf),
new MountSecretsFeatureStep(conf),
new EnvSecretsFeatureStep(conf),
new MountVolumesFeatureStep(conf),
new LocalDirsFeatureStep(conf)) ++ userFeatures
val spec = KubernetesExecutorSpec(
initialPod,
executorKubernetesResources = Seq.empty)
// If using a template this will always get the resources from that and combine
// them with any Spark conf or ResourceProfile resources.
features.foldLeft(spec) { case (spec, feature) =>
val configuredPod = feature.configurePod(spec.pod)
val addedResources = feature.getAdditionalKubernetesResources()
KubernetesExecutorSpec(
configuredPod,
spec.executorKubernetesResources ++ addedResources)
}
}
}
|
maropu/spark
|
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesExecutorBuilder.scala
|
Scala
|
apache-2.0
| 2,768
|
package com.nulabinc.backlog.r2b.cli
import com.nulabinc.backlog.migration.common.modules.{ServiceInjector => BacklogInjector}
import com.nulabinc.backlog.migration.common.service.SpaceService
import com.nulabinc.backlog.migration.common.utils.{ConsoleOut, Logging}
import com.nulabinc.backlog.r2b.conf.AppConfiguration
import com.nulabinc.backlog.r2b.redmine.modules.{ServiceInjector => RedmineInjector}
import com.nulabinc.backlog.r2b.redmine.service.{ProjectService, UserService => RedmineUserService}
import com.nulabinc.backlog4j.BacklogAPIException
import com.osinka.i18n.Messages
import com.taskadapter.redmineapi.bean.Project
import com.taskadapter.redmineapi.{
NotAuthorizedException,
RedmineAuthenticationException,
RedmineTransportException
}
/**
* @author
* uchida
*/
class ParameterValidator(config: AppConfiguration) extends Logging {
def validate(): Seq[String] = {
val validateRedmine = validateConfigRedmine()
val validateBacklog = validateConfigBacklog()
ConsoleOut.println(
Messages("cli.param.get.project", Messages("common.src"))
)
val optRedmineProject = optProject()
val messages = Seq(
validateBacklog,
validProjectKey(config.backlogConfig.projectKey),
validateAuthBacklog(validateBacklog)
).flatten
if (config.importOnly) {
messages
} else {
messages concat Seq(
validateRedmine,
validateProject(optRedmineProject)
).flatten
}
}
private[this] def validateProject(
optRedmineProject: Option[Project]
): Option[String] = {
optRedmineProject match {
case None =>
Some(
s"- ${Messages("cli.param.error.disable.project", config.redmineConfig.projectKey)}"
)
case _ => None
}
}
private[this] def validateConfigBacklog(): Option[String] = {
ConsoleOut.println(
Messages("cli.param.check.access", Messages("common.dst"))
)
val messages =
try {
val injector = BacklogInjector.createInjector(config.backlogConfig)
val spaceService = injector.getInstance(classOf[SpaceService])
spaceService.space()
ConsoleOut.println(
Messages("cli.param.ok.access", Messages("common.dst"))
)
None
} catch {
case unknown: BacklogAPIException if unknown.getStatusCode == 404 =>
logger.error(unknown.getMessage, unknown)
Some(
s"- ${Messages("cli.param.error.disable.host", Messages("common.dst"), config.backlogConfig.url)}"
)
case e: Throwable =>
logger.error(e.getMessage, e)
Some(
s"- ${Messages("cli.param.error.disable.access", Messages("common.dst"))}"
)
}
messages
}
private[this] def validateAuthBacklog(
resultValidateConfig: Option[String]
): Option[String] = {
if (resultValidateConfig.isEmpty) {
ConsoleOut.println(Messages("cli.param.check.admin"))
val injector = BacklogInjector.createInjector(config.backlogConfig)
val spaceService = injector.getInstance(classOf[SpaceService])
if (spaceService.hasAdmin()) {
ConsoleOut.println(Messages("cli.param.ok.admin"))
None
} else Some(s"- ${Messages("cli.param.error.auth.backlog")}")
} else None
}
private[this] def validateConfigRedmine(): Option[String] = {
ConsoleOut.println(
Messages("cli.param.check.access", Messages("common.src"))
)
try {
val injector = RedmineInjector.createInjector(config.redmineConfig)
val userService = injector.getInstance(classOf[RedmineUserService])
userService.allUsers()
ConsoleOut.println(
Messages("cli.param.ok.access", Messages("common.src"))
)
None
} catch {
case auth: RedmineAuthenticationException =>
logger.error(auth.getMessage, auth)
Some(s"- ${Messages("cli.param.error.auth", Messages("common.src"))}")
case noauth: NotAuthorizedException =>
logger.error(noauth.getMessage, noauth)
Some(
s"- ${Messages("cli.param.error.auth.not.auth", noauth.getMessage)}"
)
case transport: RedmineTransportException =>
logger.error(transport.getMessage, transport)
Some(
s"- ${Messages("cli.param.error.disable.host", Messages("common.src"), config.redmineConfig.url)}"
)
case e: Throwable =>
logger.error(e.getMessage, e)
Some(
s"- ${Messages("cli.param.error.disable.access", Messages("common.src"))}"
)
}
}
private[this] def validProjectKey(projectKey: String): Option[String] = {
if (projectKey.matches("""^[0-9A-Z_]+$""")) None
else Some(s"- ${Messages("cli.param.error.project_key", projectKey)}")
}
private[this] def optProject(): Option[Project] = {
val injector = RedmineInjector.createInjector(config.redmineConfig)
val projectService = injector.getInstance(classOf[ProjectService])
projectService.optProjectOfKey(config.redmineConfig.projectKey)
}
}
|
nulab/BacklogMigration-Redmine
|
src/main/scala/com/nulabinc/backlog/r2b/cli/ParameterValidator.scala
|
Scala
|
mit
| 5,061
|
/*
* Copyright (c) 2014 Snowplow Analytics Ltd.
* All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache
* License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied.
*
* See the Apache License Version 2.0 for the specific language
* governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.storage.kinesis.elasticsearch.sinks
// Java
import java.nio.ByteBuffer
// Scala
import scala.util.Random
// Amazon
import com.amazonaws.services.kinesis.model.ResourceNotFoundException
import com.amazonaws.auth.AWSCredentialsProvider
import com.amazonaws.services.kinesis.AmazonKinesisClient
import com.amazonaws.services.kinesis.AmazonKinesis
import com.amazonaws.regions._
// Scalazon (for Kinesis interaction)
import io.github.cloudify.scala.aws.kinesis.Client
import io.github.cloudify.scala.aws.kinesis.Client.ImplicitExecution._
import io.github.cloudify.scala.aws.kinesis.Definitions.{
Stream,
PutResult,
Record
}
import io.github.cloudify.scala.aws.kinesis.KinesisDsl._
// Concurrent libraries
import scala.concurrent.{Future,Await,TimeoutException}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.util.{Success, Failure}
// Logging
import org.slf4j.LoggerFactory
/**
* Kinesis Sink
*
* @param provider AWSCredentialsProvider
* @param endpoint Kinesis stream endpoint
* @param name Kinesis stream name
* @param shards Number of shards with which to initialize the stream
* @param config Configuration for the Kinesis stream
*/
class KinesisSink(provider: AWSCredentialsProvider, endpoint: String, name: String, shards: Int)
extends ISink {
private lazy val log = LoggerFactory.getLogger(getClass())
import log.{error, debug, info, trace}
// Explicitly create a client so we can configure the end point
val client = new AmazonKinesisClient(provider)
client.setEndpoint(endpoint)
// Create a Kinesis client for stream interactions.
private implicit val kinesis = Client.fromClient(client)
// The output stream for failed events.
private val enrichedStream = createAndLoadStream()
/**
* Checks if a stream exists.
*
* @param name Name of the stream to look for
* @param timeout How long to wait for a description of the stream
* @return Whether the stream both exists and is active
*/
// TODO move out into a kinesis helpers library
def streamExists(name: String, timeout: Int = 60): Boolean = {
val exists: Boolean = try {
val streamDescribeFuture = for {
s <- Kinesis.stream(name).describe
} yield s
val description = Await.result(streamDescribeFuture, Duration(timeout, SECONDS))
description.isActive
} catch {
case rnfe: ResourceNotFoundException => false
}
if (exists) {
info(s"Stream $name exists and is active")
} else {
info(s"Stream $name doesn't exist or is not active")
}
exists
}
/**
* Creates a new stream if one doesn't exist
*
* @param How long to wait for the stream to be created
* @return The new stream
*/
// TODO move out into a kinesis helpers library
def createAndLoadStream(timeout: Int = 60): Stream = {
if (streamExists(name)) {
Kinesis.stream(name)
} else {
throw new RuntimeException(s"Cannot write because stream $name doesn't exist or is not active")
}
}
/**
* Write a record to the Kinesis stream
*
* @param output The string record to write
* @param key A hash of the key determines to which shard the
* record is assigned. Defaults to a random string.
* @param good Unused parameter which exists to extend ISink
*/
def store(output: String, key: Option[String], good: Boolean) {
val putData = for {
p <- enrichedStream.put(
ByteBuffer.wrap(output.getBytes),
key.getOrElse(Random.nextInt.toString)
)
} yield p
putData onComplete {
case Success(result) => {
info(s"Writing successful")
info(s" + ShardId: ${result.shardId}")
info(s" + SequenceNumber: ${result.sequenceNumber}")
}
case Failure(f) => {
error(s"Writing failed.")
error(s" + " + f.getMessage)
}
}
}
}
|
mdavid/lessig-bigdata
|
lib/snowplow/4-storage/kinesis-elasticsearch-sink/src/main/scala/com.snowplowanalytics.snowplow.storage.kinesis/elasticsearch/sinks/KinesisSink.scala
|
Scala
|
mit
| 4,670
|
/* This file is part of ZikDroid.
* Copyright (C) 2015 Sacha Delanoue <contact@shaac.me>
*
* ZikDroid is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ZikDroid is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ZikDroid. If not, see <http://www.gnu.org/licenses/>.
*/
package me.shaac.zikdroid
import android.annotation.TargetApi
import android.content.Context
import android.bluetooth.{BluetoothAdapter, BluetoothDevice, BluetoothSocket}
import java.io.{InputStream, OutputStream}
import scala.util.{Try, Success, Failure}
import Protocol._
class Connection(device: BluetoothDevice, context: Context) {
private var adapter: Option[BluetoothAdapter] = Bluetooth.getAdapter
private var socket: Option[BluetoothSocket] = None
private var input: Option[InputStream] = None
private var output: Option[OutputStream] = None
val state: State = new State
private val parser: Parser = new Parser(state, context)
def connect: Boolean =
Bluetooth connect device match {
case Failure(e) => false
case Success(sock) =>
socket = Some(sock)
output = Try(sock.getOutputStream).toOption
input = Try(sock.getInputStream).toOption
write(Array[Byte](0, 3, 0)) && skip(1024)
}
def reconnect: Boolean = {
disconnect
connect
}
def disconnect {
Try(socket map { _.close })
socket = None
input = None
output = None
}
def getBattery: Boolean = {
write(getRequest(API.BatteryGet))
read
}
def getANC: Boolean = {
write(getRequest(API.ANCEnableGet))
read
}
def enableANC(enable: Boolean): Option[Unit] = {
write(setRequest(API.ANCEnableSet, enable.toString))
read
if (input.isEmpty) None else Some(Unit)
}
private def read: Boolean = {
skip(7)
val data = new Array[Byte](1024)
val size = input flatMap { x => Try(x read data).toOption }
size map { new String(data, 0, _) } map parser.parse
size.isDefined
}
private def skip(i: Int): Boolean =
Try(input map { _ skip i }).toOption != None
private def write(data: Array[Byte]): Boolean =
Try(output map { _ write data }).toOption != None
}
|
Shaac/ZikDroid
|
src/Connection.scala
|
Scala
|
gpl-3.0
| 2,609
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import events.Event
/**
* A sorter for the events of a suite's distributed tests.
*
* <p>
* This trait is used, for example, by <a href="ParallelTestExecution.html"><code>ParallelTestExecution</code></a> to sort the
* events of tests back into sequential order, with a timeout if an event takes too long.
* </p>
*/
trait DistributedTestSorter {
/**
* Indicates a test with the specified name is about to be distributed.
*
* <p>
* For example, trait <code>ParallelTestExecution</code> invokes this method prior to
* passing a suite that will execute the specified test to the <code>Distributor</code>.
* Even though the tests are run in parallel, the events for the tests will be reported
* in the order this method is invoked.
* </p>
*
* @throws IllegalArgumentException if the specified test name has already
* completed (was already passed to <code>distributingTest</code>), but its events
* have not yet been fully reported.
* @throws NullArgumentException if <code>testName</code> is null.
*
* @param testName the name of the test that has completed
*/
def distributingTest(testName: String): Unit
/**
* Report an event for a distributed test.
*
* @param testName the name of the distributed test that produced this event
* @param event the event to report
* @throws NullArgumentException if either <code>testName</code> or <code>event</code> is null.
*/
def apply(testName: String, event: Event): Unit
/**
* Indicates the events for the distributed test with the specified name have all been fired.
*
* @throws IllegalArgumentException if the specified test name was never distributed
* (<em>i.e.</em>, was never passed to <code>distributingTest</code>), or has already
* completed (was already passed to <code>completedTest</code>.
* @throws NullArgumentException if <code>testName</code> is null.
*
* @param testName the name of the test that has completed
*/
def completedTest(testName: String): Unit
}
|
dotty-staging/scalatest
|
scalatest/src/main/scala/org/scalatest/DistributedTestSorter.scala
|
Scala
|
apache-2.0
| 2,667
|
/*
* Copyright 2016 rdbc contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rdbc.pgsql.core.internal.protocol.messages.frontend
case class CloseStatement(optionalName: Option[StmtName]) extends PgFrontendMessage
|
rdbc-io/rdbc-pgsql
|
rdbc-pgsql-core/src/main/scala/io/rdbc/pgsql/core/internal/protocol/messages/frontend/CloseStatement.scala
|
Scala
|
apache-2.0
| 750
|
package com.citypay.pan.search.io
case class FileSignature(name: String, extensions: List[String], signature: Array[Byte], offset: Int) {
/**
* Checks to see if a signature matches
*
* @param arr array to test against the signature
* @return true if the full signature byte length
*/
//noinspection ScalaStyle allow return values to escape for loop
def matches(arr: Array[Byte]): Boolean = {
for (i <- signature.indices) {
if (i > arr.length) {
return false
} else if (signature(i) != arr(i)) {
return false
}
}
true
}
}
|
citypay/citypay-pan-search
|
src/main/scala/com/citypay/pan/search/io/FileSignature.scala
|
Scala
|
mit
| 599
|
/*start*/1 == 1/*end*/
//Boolean
|
ilinum/intellij-scala
|
testdata/typeInference/methodCall/InfixIntegers.scala
|
Scala
|
apache-2.0
| 32
|
package org.programmiersportgruppe.scala.commons
package basics
object Lists {
implicit final class AugmentedList[A](val self: List[A]) extends AnyVal {
/** Removes the first occurrence of a value from the list.
*
* This method uses a stack frame per element until it finds a matching value or the end of the list,
* so it's not great for large lists, but is good for short lists.
*/
def -(value: A): List[A] =
self match {
case Nil => Nil
case head :: tail =>
if (head == value) tail
else head :: new AugmentedList(tail) - value
}
}
}
|
programmiersportgruppe/scala-commons
|
basics/src/main/scala/org/programmiersportgruppe/scala/commons/basics/Lists.scala
|
Scala
|
bsd-2-clause
| 625
|
package uk.ac.ncl.openlab.intake24.services.fooddb.admin
import uk.ac.ncl.openlab.intake24.api.data.admin._
import uk.ac.ncl.openlab.intake24.errors._
trait CategoriesAdminService {
def getCategoryRecord(code: String, locale: String): Either[LocalLookupError, CategoryRecord]
def isCategoryCodeAvailable(code: String): Either[UnexpectedDatabaseError, Boolean]
def isCategoryCode(code: String): Either[UnexpectedDatabaseError, Boolean]
def deleteCategory(categoryCode: String): Either[DeleteError, Unit]
def deleteAllCategories(): Either[UnexpectedDatabaseError, Unit]
def createMainCategoryRecords(records: Seq[NewMainCategoryRecord]): Either[DependentCreateError, Unit]
def createLocalCategoryRecords(localCategoryRecords: Map[String, NewLocalCategoryRecord], locale: String): Either[LocalCreateError, Unit]
def updateMainCategoryRecord(categoryCode: String, mainCategoryUpdate: MainCategoryRecordUpdate): Either[DependentUpdateError, Unit]
def updateLocalCategoryRecord(categoryCode: String, localCategoryUpdate: LocalCategoryRecordUpdate, locale: String): Either[LocalDependentUpdateError, Unit]
}
|
digitalinteraction/intake24
|
FoodDataServices/src/main/scala/uk/ac/ncl/openlab/intake24/services/fooddb/admin/CategoriesAdminService.scala
|
Scala
|
apache-2.0
| 1,130
|
package de.m7w3.signal
import de.m7w3.signal.messages.MessageSender
import org.whispersystems.signalservice.api.messages.multidevice.SignalServiceSyncMessage
import scala.collection.mutable
object TestMessageSender extends MessageSender {
val queue = mutable.Queue.empty[SignalServiceSyncMessage]
override def send(message: SignalServiceSyncMessage): Unit = {
queue += message
}
}
|
ayoub-benali/signal-desktop-client
|
src/test/scala/de/m7w3/signal/TestMessageSender.scala
|
Scala
|
apache-2.0
| 396
|
package rescala.fullmv.mirrors
import rescala.fullmv.sgt.synchronization.{LockStateResult0, SubsumableLock}
import scala.concurrent.Future
sealed trait RemoteTryLockResult
case class RemoteLocked(newRoot: SubsumableLock) extends RemoteTryLockResult
sealed trait RemoteTrySubsumeResult
case object RemoteSubsumed extends RemoteTrySubsumeResult {
val futured: Future[RemoteSubsumed.type] = Future.successful(this)
}
case class RemoteBlocked(newRoot: SubsumableLock) extends RemoteTrySubsumeResult with RemoteTryLockResult
case object RemoteGCd extends RemoteTrySubsumeResult with RemoteTryLockResult {
val futured: Future[RemoteGCd.type] = Future.successful(this)
}
trait SubsumableLockProxy {
def getLockedRoot: Future[LockStateResult0]
// result will have one temporary remote parameter reference for the caller to receive.
def remoteTryLock(): Future[RemoteTryLockResult]
final def remoteTryLockNoTail(): Future[RemoteTryLockResult] = remoteTryLock()
// parameter has one temporary remote parameter reference counted, which will be cleared by this call.
// result will have one temporary remote parameter reference for the caller to receive.
def remoteTrySubsume(lockedNewParent: SubsumableLock): Future[RemoteTrySubsumeResult]
final def remoteTrySubsumeNoTail(lockedNewParent: SubsumableLock): Future[RemoteTrySubsumeResult] =
remoteTrySubsume(lockedNewParent)
def remoteUnlock(): Unit
def asyncRemoteRefDropped(): Unit
}
|
guidosalva/REScala
|
Code/Main/jvm/src/main/scala/rescala/fullmv/mirrors/SubsumableLockProxy.scala
|
Scala
|
apache-2.0
| 1,457
|
package edu.rice.habanero.benchmarks.fjthrput
import edu.rice.habanero.actors.{JetlangActor, JetlangActorState, JetlangPool}
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner}
/**
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> (shams@rice.edu)
*/
object ThroughputJetlangActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new ThroughputJetlangActorBenchmark)
}
private final class ThroughputJetlangActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
ThroughputConfig.parseArgs(args)
}
def printArgInfo() {
ThroughputConfig.printArgs()
}
def runIteration() {
val actors = Array.tabulate[ThroughputActor](ThroughputConfig.A)(i => {
val loopActor = new ThroughputActor(ThroughputConfig.N)
loopActor.start()
loopActor
})
val message = new Object()
var m = 0
while (m < ThroughputConfig.N) {
actors.foreach(loopActor => {
loopActor.send(message)
})
m += 1
}
JetlangActorState.awaitTermination()
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double) {
if (lastIteration) {
JetlangPool.shutdown()
}
}
}
private class ThroughputActor(totalMessages: Int) extends JetlangActor[AnyRef] {
private var messagesProcessed = 0
override def process(msg: AnyRef) {
messagesProcessed += 1
ThroughputConfig.performComputation(37.2)
if (messagesProcessed == totalMessages) {
exit()
}
}
}
}
|
shamsmahmood/savina
|
src/main/scala/edu/rice/habanero/benchmarks/fjthrput/ThroughputJetlangActorBenchmark.scala
|
Scala
|
gpl-2.0
| 1,617
|
import com.typesafe.config._
import simplelib._
object ComplexApp extends App {
// This app is "complex" because we load multiple separate app
// configs into a single JVM and we have a separately-configurable
// context for simple lib.
// using a custom Config with the simple-lib library
// (simple-lib is a library in this same examples/ directory)
def demoConfigInSimpleLib(config: Config) {
val context = new SimpleLibContext(config)
context.printSetting("simple-lib.foo")
context.printSetting("simple-lib.hello")
context.printSetting("simple-lib.whatever")
}
// system property overrides work, but the properties must be set
// before the config lib is used (config lib will not notice changes
// once it loads the properties)
System.setProperty("simple-lib.whatever", "This value comes from a system property")
///////////
// "config1" is just an example of using a file other than application.conf
val config1 = ConfigFactory.load("complex1")
// use the config ourselves
println("config1, complex-app.something=" + config1.getString("complex-app.something"))
// use the config for a library
demoConfigInSimpleLib(config1)
//////////
// "config2" shows how to configure a library with a custom settings subtree
val config2 = ConfigFactory.load("complex2");
// use the config ourselves
println("config2, complex-app.something=" + config2.getString("complex-app.something"))
// pull out complex-app.simple-lib-context and move it to
// the toplevel, creating a new config suitable for our SimpleLibContext.
// The defaultOverrides() have to be put back on top of the stack so
// they still override any simple-lib settings.
// We fall back to config2 again to be sure we get simple-lib's
// reference.conf plus any other settings we've set. You could
// also just fall back to ConfigFactory.referenceConfig() if
// you don't want complex2.conf settings outside of
// complex-app.simple-lib-context to be used.
val simpleLibConfig2 = ConfigFactory.defaultOverrides()
.withFallback(config2.getConfig("complex-app.simple-lib-context"))
.withFallback(config2)
demoConfigInSimpleLib(simpleLibConfig2)
//////////
// Here's an illustration that simple-lib will get upset if we pass it
// a bad config. In this case, we'll fail to merge the reference
// config in to complex-app.simple-lib-context, so simple-lib will
// point out that some settings are missing.
try {
demoConfigInSimpleLib(config2.getConfig("complex-app.simple-lib-context"))
} catch {
case e: ConfigException.ValidationFailed =>
println("when we passed a bad config to simple-lib, it said: " + e.getMessage)
}
}
|
jasonchaffee/config
|
examples/scala/complex-app/src/main/scala/ComplexApp.scala
|
Scala
|
apache-2.0
| 2,837
|
package gapt.cli
import gapt.examples.Script
import gapt.formats.ClasspathInputFile
import scala.tools.nsc.interpreter._
import scala.tools.nsc.Settings
import ammonite.ops._
object CLIMain {
class ScriptsResultHolder( var result: Seq[Script] = Seq() ) {
def add( script: Script ): Unit = result :+= script
}
def main( args: Array[String] ): Unit = {
args match {
// If invoked as ./gapt.sh `script`, then execute `script` and exit.
case Array( scriptFile, scriptArgs @ _* ) =>
GaptScriptInterpreter.run( scriptFile, scriptArgs )
case _ =>
GaptRepl().run()
}
}
}
|
gapt/gapt
|
cli/src/main/scala/CLIMain.scala
|
Scala
|
gpl-3.0
| 626
|
package definiti.core.end2end.controls
import definiti.common.ast.Root
import definiti.common.program.Ko
import definiti.common.tests.{ConfigurationMock, LocationPath}
import definiti.core.Constants
import definiti.core.ProgramResultMatchers._
import definiti.core.end2end.EndToEndSpec
import definiti.core.validation.controls.TypeDeclarationParametersControl
class TypeReferenceParametersControlSpec extends EndToEndSpec {
import TypeReferenceParametersControlSpec._
"Project.generatePublicAST" should "validate a type referencing another type with valid parameters" in {
val output = processFile("controls.typeDeclarationParameters.nominal", configuration)
output shouldBe ok[Root]
}
it should "validate a type referencing another type with valid parameters in package" in {
val output = processFile("controls.typeDeclarationParameters.package", configuration)
output shouldBe ok[Root]
}
it should "validate a type referencing another type with transitive variables" in {
val output = processFile("controls.typeDeclarationParameters.transitive", configuration)
output shouldBe ok[Root]
}
it should "invalidate a type referencing another type with different number of parameters" in {
val output = processFile("controls.typeDeclarationParameters.invalidNumberOfParameters", configuration)
output should beResult(Ko[Root](
TypeDeclarationParametersControl.invalidNumberOfParameters(1, 2, invalidNumberOfParametersLocation(2, 9, 31)),
TypeDeclarationParametersControl.invalidNumberOfParameters(1, 2, invalidNumberOfParametersLocation(3, 12, 25)),
TypeDeclarationParametersControl.invalidNumberOfParameters(1, 2, invalidNumberOfParametersLocation(6, 27, 40))
))
}
it should "invalidate a type referencing another type with invalid type parameters" in {
val output = processFile("controls.typeDeclarationParameters.invalidTypeOfParameters", configuration)
output should beResult(Ko[Root](
TypeDeclarationParametersControl.invalidParameterType(Constants.number, Constants.string, invalidTypeOfParametersLocation(2, 24, 33)),
TypeDeclarationParametersControl.invalidParameterType(Constants.number, Constants.string, invalidTypeOfParametersLocation(3, 20, 29)),
TypeDeclarationParametersControl.invalidParameterType(Constants.number, Constants.string, invalidTypeOfParametersLocation(6, 35, 44))
))
}
}
object TypeReferenceParametersControlSpec {
val configuration = ConfigurationMock().withOnlyControls(TypeDeclarationParametersControl)
val invalidNumberOfParametersLocation = LocationPath.control(TypeDeclarationParametersControl, "invalidNumberOfParameters")
val invalidTypeOfParametersLocation = LocationPath.control(TypeDeclarationParametersControl, "invalidTypeOfParameters")
}
|
definiti/definiti-core
|
src/test/scala/definiti/core/end2end/controls/TypeReferenceParametersControlSpec.scala
|
Scala
|
mit
| 2,800
|
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package termination
import purescala.Path
import purescala.Expressions._
import purescala.ExprOps._
import purescala.Types._
import purescala.TypeOps._
import purescala.Constructors._
import purescala.Common._
trait ChainComparator { self : StructuralSize =>
val checker: TerminationChecker
def structuralDecreasing(e1: Expr, e2s: Seq[(Path, Expr)]): Seq[Expr] = flatTypesPowerset(e1.getType).toSeq.map {
recons => andJoin(e2s.map { case (path, e2) =>
path implies GreaterThan(self.fullSize(recons(e1)), self.fullSize(recons(e2)))
})
}
/*
def structuralDecreasing(e1: Expr, e2s: Seq[(Seq[Expr], Expr)]) : Expr = e1.getType match {
case ContainerType(def1, fields1) => Or(fields1.zipWithIndex map { case ((id1, type1), index) =>
structuralDecreasing(CaseClassSelector(def1, e1, id1), e2s.map { case (path, e2) =>
e2.getType match {
case ContainerType(def2, fields2) => (path, CaseClassSelector(def2, e2, fields2(index)._1))
case _ => scala.sys.error("Unexpected input combinations: " + e1 + " " + e2)
}
})
})
case TupleType(types1) => Or((0 until types1.length) map { case index =>
structuralDecreasing(tupleSelect(e1, index + 1), e2s.map { case (path, e2) =>
e2.getType match {
case TupleType(_) => (path, tupleSelect(e2, index + 1))
case _ => scala.sys.error("Unexpected input combination: " + e1 + " " + e2)
}
})
})
case c: ClassType => And(e2s map { case (path, e2) =>
e2.getType match {
case c2: ClassType => Implies(And(path), GreaterThan(self.size(e1), self.size(e2)))
case _ => scala.sys.error("Unexpected input combination: " + e1 + " " + e2)
}
})
case _ => BooleanLiteral(false)
}
*/
private sealed abstract class NumericEndpoint {
def inverse: NumericEndpoint = this match {
case UpperBoundEndpoint => LowerBoundEndpoint
case LowerBoundEndpoint => UpperBoundEndpoint
case InnerEndpoint => AnyEndpoint
case AnyEndpoint => InnerEndpoint
case NoEndpoint => NoEndpoint
}
def <(that: NumericEndpoint) : Boolean = (this, that) match {
case (UpperBoundEndpoint, AnyEndpoint) => true
case (LowerBoundEndpoint, AnyEndpoint) => true
case (InnerEndpoint, AnyEndpoint) => true
case (NoEndpoint, AnyEndpoint) => true
case (InnerEndpoint, UpperBoundEndpoint) => true
case (InnerEndpoint, LowerBoundEndpoint) => true
case (NoEndpoint, UpperBoundEndpoint) => true
case (NoEndpoint, LowerBoundEndpoint) => true
case (NoEndpoint, InnerEndpoint) => true
case _ => false
}
def <=(that: NumericEndpoint) : Boolean = (this, that) match {
case (t1, t2) if t1 < t2 => true
case (t1, t2) if t1 == t2 => true
case _ => false
}
def min(that: NumericEndpoint) : NumericEndpoint = {
if (this <= that) this else if (that <= this) that else InnerEndpoint
}
def max(that: NumericEndpoint) : NumericEndpoint = {
if (this <= that) that else if (that <= this) this else AnyEndpoint
}
}
private case object UpperBoundEndpoint extends NumericEndpoint
private case object LowerBoundEndpoint extends NumericEndpoint
private case object InnerEndpoint extends NumericEndpoint
private case object AnyEndpoint extends NumericEndpoint
private case object NoEndpoint extends NumericEndpoint
private def numericEndpoint(value: Expr, cluster: Set[Chain]) = {
object Value {
val vars = variablesOf(value)
assert(vars.size == 1)
def simplifyBinaryArithmetic(e1: Expr, e2: Expr) : Boolean = {
val (inE1, inE2) = (variablesOf(e1) == vars, variablesOf(e2) == vars)
if (inE1 && inE2) false else if (inE1) unapply(e1) else if (inE2) unapply(e2) else {
scala.sys.error("How the heck did we get here?!?")
}
}
def unapply(expr: Expr): Boolean = if (variablesOf(expr) != vars) false else expr match {
case Plus(e1, e2) => simplifyBinaryArithmetic(e1, e2)
case Minus(e1, e2) => simplifyBinaryArithmetic(e1, e2)
// case Times(e1, e2) => ... Need to make sure multiplier is not negative!
case e => e == value
}
}
def matches(expr: Expr) : NumericEndpoint = expr match {
case And(es) => es.map(matches).foldLeft[NumericEndpoint](AnyEndpoint)(_ min _)
case Or(es) => es.map(matches).foldLeft[NumericEndpoint](NoEndpoint)(_ max _)
case Not(e) => matches(e).inverse
case GreaterThan(Value(), e) if variablesOf(e).isEmpty => LowerBoundEndpoint
case GreaterThan(e, Value()) if variablesOf(e).isEmpty => UpperBoundEndpoint
case GreaterEquals(Value(), e) if variablesOf(e).isEmpty => LowerBoundEndpoint
case GreaterEquals(e, Value()) if variablesOf(e).isEmpty => UpperBoundEndpoint
case Equals(Value(), e) if variablesOf(e).isEmpty => InnerEndpoint
case Equals(e, Value()) if variablesOf(e).isEmpty => InnerEndpoint
case LessThan(e1, e2) => matches(GreaterThan(e2, e1))
case LessEquals(e1, e2) => matches(GreaterEquals(e2, e1))
case _ => NoEndpoint
}
def endpoint(expr: Expr) : NumericEndpoint = expr match {
case IfExpr(cond, thenn, elze) => matches(cond) match {
case NoEndpoint =>
endpoint(thenn) min endpoint(elze)
case ep =>
val terminatingThen = functionCallsOf(thenn).forall(fi => checker.terminates(fi.tfd.fd).isGuaranteed)
val terminatingElze = functionCallsOf(elze).forall(fi => checker.terminates(fi.tfd.fd).isGuaranteed)
val thenEndpoint = if (terminatingThen) ep max endpoint(thenn) else endpoint(thenn)
val elzeEndpoint = if (terminatingElze) ep.inverse max endpoint(elze) else endpoint(elze)
thenEndpoint max elzeEndpoint
}
case _ => NoEndpoint
}
cluster.foldLeft[NumericEndpoint](AnyEndpoint) { (acc, chain) =>
acc min chain.inlined.foldLeft[NumericEndpoint](NoEndpoint) { (acc, expr) =>
acc max endpoint(expr)
}
}
}
def numericConverging(e1: Expr, e2s: Seq[(Path, Expr)], cluster: Set[Chain]) : Seq[Expr] = flatType(e1.getType).toSeq.flatMap {
recons => recons(e1) match {
case e if e.getType == IntegerType =>
val endpoint = numericEndpoint(e, cluster)
val uppers = if (endpoint == UpperBoundEndpoint || endpoint == AnyEndpoint) {
Some(andJoin(e2s map { case (path, e2) => path implies GreaterThan(e, recons(e2)) }))
} else {
None
}
val lowers = if (endpoint == LowerBoundEndpoint || endpoint == AnyEndpoint) {
Some(andJoin(e2s map { case (path, e2) => path implies LessThan(e, recons(e2)) }))
} else {
None
}
uppers ++ lowers
case _ => Seq.empty
}
}
}
// vim: set ts=4 sw=4 et:
|
epfl-lara/leon
|
src/main/scala/leon/termination/ChainComparator.scala
|
Scala
|
gpl-3.0
| 6,937
|
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.common
import com.twitter.util.Duration
/**
* @param timestamp when was this annotation created? microseconds from epoch
* @param value description of what happened at the timestamp could for example be "cache miss for key: x"
* @param host host this annotation was created on
* @param duration how long did the operation this annotation represent take?
*/
case class Annotation(timestamp: Long, value: String, host: Option[Endpoint], duration: Option[Duration] = None)
extends Ordered[Annotation]{
def serviceName = host.map(_.serviceName).getOrElse("Unknown service name")
/**
* @return diff between timestamps of the two annotations.
*/
def -(annotation: Annotation): Long = timestamp - annotation.timestamp
override def compare(that: Annotation): Int = {
if (this.timestamp != that.timestamp)
(this.timestamp - that.timestamp).toInt
else if (this.value != that.value)
this.value compare that.value
else if (this.host != that.host)
this.host.getOrElse {return -1} compare that.host.getOrElse {return 1}
else if (this.duration != that.duration)
this.duration.getOrElse {return -1} compare that.duration.getOrElse {return 1}
else
0
}
}
|
srijs/zipkin
|
zipkin-common/src/main/scala/com/twitter/zipkin/common/Annotation.scala
|
Scala
|
apache-2.0
| 1,847
|
package de.leanovate.swaggercheck.schema
import com.fasterxml.jackson.annotation.{JsonCreator, JsonProperty}
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import de.leanovate.swaggercheck.SwaggerChecks
import de.leanovate.swaggercheck.schema.Operation.RequestBuilder
import de.leanovate.swaggercheck.schema.gen.GeneratableDefinition._
import de.leanovate.swaggercheck.schema.jackson.DefinitionBuilder
import de.leanovate.swaggercheck.schema.model.Definition
import org.scalacheck.Gen
@JsonDeserialize(builder = classOf[OperationParameterBuilder])
case class OperationParameter(
name: Option[String],
in: String,
required: Boolean,
schema: Definition
) {
def applyTo(context: SwaggerChecks, builder: RequestBuilder): RequestBuilder = (name, in) match {
case (Some(paramName), "path") =>
builder.withPathParam(schema.generate(context)
.map(value => Some(paramName -> value.asText("")))
.suchThat(_.get._2.length > 0))
case (Some(paramName), "query") if required =>
builder.withQueryParam(schema.generate(context).map(value => Some(paramName -> value.asText(""))))
case (Some(paramName), "query") =>
builder.withQueryParam(Gen.option(schema.generate(context).map(value => paramName -> value.asText(""))))
case (Some(headerName), "header") if required =>
builder.withHeader(schema.generate(context).map(value => Some(headerName -> value.asText(""))))
case (Some(headerName), "header") =>
builder.withHeader(Gen.option(schema.generate(context).map(value => headerName -> value.asText(""))))
case (_, "body") => builder.withBody(schema.generate(context))
case _ => builder
}
}
class ReferenceOperationParameter(globalRef: String) extends OperationParameter(None, "", false, null) {
require(globalRef.startsWith("#/parameters/"), "Global parameters references need to start with #/parameters/")
val ref = globalRef.substring(13)
override def applyTo(context: SwaggerChecks, builder: RequestBuilder): RequestBuilder = throw new IllegalStateException("Not resolved ReferenceParameter")
}
object ReferenceOperationParameter {
def unapply(parameter: ReferenceOperationParameter): Option[String] = Some(parameter.ref)
}
class OperationParameterBuilder @JsonCreator()(
@JsonProperty("name") name: Option[String],
@JsonProperty("$ref") ref: Option[String],
@JsonProperty("in") in: String,
@JsonProperty("required") required: Option[Boolean],
@JsonProperty("type") schemaType: Option[String],
@JsonProperty("format") format: Option[String],
@JsonProperty("schema") schema: Option[Definition],
@JsonProperty("enum") enum: Option[List[String]]
) {
def build(): OperationParameter =
ref match {
case Some(reference) => new ReferenceOperationParameter(reference)
case None => OperationParameter(
name,
in,
required.getOrElse(false),
schema.getOrElse(new DefinitionBuilder(schemaType = schemaType, format = format, enum = enum).build()))
}
}
|
leanovate/swagger-check
|
swagger-check-core/src/main/scala/de/leanovate/swaggercheck/schema/OperationParameter.scala
|
Scala
|
mit
| 3,560
|
package skinny
package object util {
type TimeLogging = skinny.logging.TimeLogging
}
|
xerial/skinny-micro
|
micro-common/src/main/scala/skinny/util/package.scala
|
Scala
|
bsd-2-clause
| 90
|
package eu.timepit.refined.scalacheck
import eu.timepit.refined.W
import eu.timepit.refined.api.Refined
import eu.timepit.refined.boolean.Or
import eu.timepit.refined.char._
import eu.timepit.refined.numeric.Interval
import eu.timepit.refined.scalacheck.boolean._
import eu.timepit.refined.scalacheck.char._
import eu.timepit.refined.scalacheck.numeric._
import eu.timepit.refined.types.char.{LowerCaseChar, UpperCaseChar}
import org.scalacheck.Properties
class CharArbitrarySpec extends Properties("CharArbitrary") {
property("Digit") = checkArbitraryRefinedType[Char Refined Digit]
property("Letter") = checkArbitraryRefinedType[Char Refined Letter]
property("LowerCaseChar") = checkArbitraryRefinedType[LowerCaseChar]
property("UpperCaseChar") = checkArbitraryRefinedType[UpperCaseChar]
property("Whitespace") = checkArbitraryRefinedType[Char Refined Whitespace]
property("LetterOrDigit") = checkArbitraryRefinedType[Char Refined LetterOrDigit]
property("HexDigit") = {
type HexDigit = Digit Or Interval.Closed[W.`'a'`.T, W.`'f'`.T]
checkArbitraryRefinedType[Char Refined HexDigit]
}
}
|
fthomas/refined
|
modules/scalacheck/shared/src/test/scala-3.0-/eu/timepit/refined/scalacheck/CharArbitrarySpec.scala
|
Scala
|
mit
| 1,122
|
class A extends (Int => i1) // error
class B extends (Int => this) // error
trait C {
val bar: Int => this // error
}
// Test that function types ending in SIP-23 singleton types are understood correctly.
class D extends (Int => 1) {
def apply(x: Int) = 2 // error
}
class Wrap(x: Int)
class E extends (Wrap)(
// error
|
lampepfl/dotty
|
tests/neg/parser-stability-25.scala
|
Scala
|
apache-2.0
| 325
|
/*
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.extensions.iterativebatch.runtime
package graph
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
import java.io.{ DataInput, DataOutput, File }
import scala.concurrent.{ Await, ExecutionContext, Future }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.{ NullWritable, Writable }
import org.apache.spark.SparkConf
import com.asakusafw.bridge.stage.StageInfo
import com.asakusafw.runtime.directio.{ Counter, DataDefinition, OutputAttemptContext }
import com.asakusafw.runtime.directio.hadoop.{ HadoopDataSource, HadoopDataSourceUtil, SequenceFileFormat }
import com.asakusafw.runtime.model.DataModel
import com.asakusafw.runtime.value.IntOption
import com.asakusafw.spark.runtime._
import com.asakusafw.spark.runtime.directio.{ BasicDataDefinition, HadoopObjectFactory }
import com.asakusafw.spark.runtime.graph.CacheOnce
import resource._
@RunWith(classOf[JUnitRunner])
class DirectOutputCommitForIterativeSpecTest extends DirectOutputCommitForIterativeSpec
class DirectOutputCommitForIterativeSpec
extends FlatSpec
with SparkForAll
with JobContextSugar
with RoundContextSugar
with TempDirForAll {
import DirectOutputCommitForIterativeSpec._
behavior of classOf[DirectOutputCommitForIterative].getSimpleName
private var root: File = _
override def configure(conf: SparkConf): SparkConf = {
val tmpDir = createTempDirectoryForAll("directio-").toFile()
val system = new File(tmpDir, "system")
root = new File(tmpDir, "directio")
conf.setHadoopConf("com.asakusafw.output.system.dir", system.getAbsolutePath)
conf.setHadoopConf("com.asakusafw.directio.test", classOf[HadoopDataSource].getName)
conf.setHadoopConf("com.asakusafw.directio.test.path", "test")
conf.setHadoopConf("com.asakusafw.directio.test.fs.path", root.getAbsolutePath)
}
it should "commit" in {
implicit val jobContext = newJobContext(sc)
val rounds = 0 to 1
val files = rounds.map { round =>
new File(root, s"out1_${round}/testing.bin")
}
val prepare = new Prepare("id", "test/out1_${round}", "testing.bin")("prepare")
val commit = new Commit(Set(prepare))(Set("test/out1_${round}"))
val origin = newRoundContext()
val rcs = rounds.map { round =>
newRoundContext(
stageId = s"round_${round}",
batchArguments = Map("round" -> round.toString))
}
assert(files.exists(_.exists()) === false)
Await.result(prepare.perform(origin, rcs), Duration.Inf)
assert(files.exists(_.exists()) === false)
Await.result(commit.perform(origin, rcs), Duration.Inf)
assert(files.forall(_.exists()) === true)
}
it should "commit out of scope round" in {
implicit val jobContext = newJobContext(sc)
val rounds = 0 to 1
val files = rounds.map { round =>
new File(root, s"out2_${round}/testing.bin")
}
val prepare = new Prepare("id", "test/out2_${round}", "testing.bin")("prepare")
val commit = new Commit(Set(prepare))(Set("test/out2_${round}"))
val origin = newRoundContext()
val rcs = rounds.map { round =>
newRoundContext(
stageId = s"round_${round}",
batchArguments = Map("round" -> round.toString))
}
assert(files.exists(_.exists()) === false)
Await.result(commit.perform(origin, Seq(rcs.head)), Duration.Inf)
assert(files.head.exists() === true)
assert(files.tail.exists(_.exists()) === false)
Await.result(commit.perform(origin, rcs.tail), Duration.Inf)
assert(files.forall(_.exists()) === true)
}
}
object DirectOutputCommitForIterativeSpec {
class Prepare(
id: String,
basePath: String,
resourceName: String)(
val label: String)(
implicit val jobContext: JobContext)
extends IterativeAction[Unit] with CacheAlways[Seq[RoundContext], Future[Unit]] {
override protected def doPerform(
origin: RoundContext,
rcs: Seq[RoundContext])(implicit ec: ExecutionContext): Future[Unit] = Future {
val conf = origin.hadoopConf.value
val repository = HadoopDataSourceUtil.loadRepository(conf)
rcs.foreach { rc =>
val conf = rc.hadoopConf.value
val stageInfo = StageInfo.deserialize(conf.get(StageInfo.KEY_NAME))
val basePath = stageInfo.resolveUserVariables(this.basePath)
val sourceId = repository.getRelatedId(basePath)
val containerPath = repository.getContainerPath(basePath)
val componentPath = repository.getComponentPath(basePath)
val dataSource = repository.getRelatedDataSource(containerPath)
val context = new OutputAttemptContext(
stageInfo.getExecutionId, stageInfo.getStageId, sourceId, new Counter())
dataSource.setupAttemptOutput(context)
val definition =
BasicDataDefinition(new HadoopObjectFactory(conf), classOf[FooSequenceFileFormat])
for {
out <- managed(
dataSource.openOutput(context, definition, componentPath, resourceName, new Counter()))
} {
val foo = new Foo()
foo.id.modify(1)
out.write(foo)
}
dataSource.commitAttemptOutput(context)
dataSource.cleanupAttemptOutput(context)
}
}
}
class Commit(
prepares: Set[IterativeAction[Unit]])(
val basePaths: Set[String])(
implicit jobContext: JobContext)
extends DirectOutputCommitForIterative(prepares)
with CacheAlways[Seq[RoundContext], Future[Unit]]
class Foo extends DataModel[Foo] with Writable {
val id = new IntOption()
override def reset(): Unit = {
id.setNull()
}
override def copyFrom(other: Foo): Unit = {
id.copyFrom(other.id)
}
override def readFields(in: DataInput): Unit = {
id.readFields(in)
}
override def write(out: DataOutput): Unit = {
id.write(out)
}
}
class FooSequenceFileFormat extends SequenceFileFormat[NullWritable, Foo, Foo] {
override def getSupportedType(): Class[Foo] = classOf[Foo]
override def createKeyObject(): NullWritable = NullWritable.get()
override def createValueObject(): Foo = new Foo()
override def copyToModel(key: NullWritable, value: Foo, model: Foo): Unit = {
model.copyFrom(value)
}
override def copyFromModel(model: Foo, key: NullWritable, value: Foo): Unit = {
value.copyFrom(model)
}
}
}
|
asakusafw/asakusafw-spark
|
extensions/iterativebatch/runtime/core/src/test/scala/com/asakusafw/spark/extensions/iterativebatch/runtime/graph/DirectOutputCommitForIterativeSpec.scala
|
Scala
|
apache-2.0
| 7,137
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.status.api.v1
import java.util.{Arrays, Date, List => JList}
import javax.ws.rs._
import javax.ws.rs.core.MediaType
import org.apache.spark.JobExecutionStatus
import org.apache.spark.ui.SparkUI
import org.apache.spark.ui.jobs.JobProgressListener
import org.apache.spark.ui.jobs.UIData.JobUIData
@Produces(Array(MediaType.APPLICATION_JSON))
private[v1] class AllJobsResource(ui: SparkUI) {
@GET
def jobsList(@QueryParam("status") statuses: JList[JobExecutionStatus]): Seq[JobData] = {
val statusToJobs: Seq[(JobExecutionStatus, Seq[JobUIData])] =
AllJobsResource.getStatusToJobs(ui)
val adjStatuses: JList[JobExecutionStatus] = {
if (statuses.isEmpty) {
Arrays.asList(JobExecutionStatus.values(): _*)
} else {
statuses
}
}
val jobInfos = for {
(status, jobs) <- statusToJobs
job <- jobs if adjStatuses.contains(status)
} yield {
AllJobsResource.convertJobData(job, ui.jobProgressListener, false)
}
jobInfos.sortBy{- _.jobId}
}
}
private[v1] object AllJobsResource {
def getStatusToJobs(ui: SparkUI): Seq[(JobExecutionStatus, Seq[JobUIData])] = {
val statusToJobs = ui.jobProgressListener.synchronized {
Seq(
JobExecutionStatus.RUNNING -> ui.jobProgressListener.activeJobs.values.toSeq,
JobExecutionStatus.SUCCEEDED -> ui.jobProgressListener.completedJobs.toSeq,
JobExecutionStatus.FAILED -> ui.jobProgressListener.failedJobs.reverse.toSeq
)
}
statusToJobs
}
def convertJobData(
job: JobUIData,
listener: JobProgressListener,
includeStageDetails: Boolean): JobData = {
listener.synchronized {
val lastStageInfo =
if (job.stageIds.isEmpty) {
None
} else {
listener.stageIdToInfo.get(job.stageIds.max)
}
val lastStageData = lastStageInfo.flatMap { s =>
listener.stageIdToData.get((s.stageId, s.attemptId))
}
val lastStageName = lastStageInfo.map { _.name }.getOrElse("(Unknown Stage Name)")
val lastStageDescription = lastStageData.flatMap { _.description }
new JobData(
jobId = job.jobId,
name = lastStageName,
description = lastStageDescription,
submissionTime = job.submissionTime.map{new Date(_)},
completionTime = job.completionTime.map{new Date(_)},
stageIds = job.stageIds,
jobGroup = job.jobGroup,
status = job.status,
numTasks = job.numTasks,
numActiveTasks = job.numActiveTasks,
numCompletedTasks = job.numCompletedTasks,
numSkippedTasks = job.numSkippedTasks,
numFailedTasks = job.numFailedTasks,
numActiveStages = job.numActiveStages,
numCompletedStages = job.completedStageIndices.size,
numSkippedStages = job.numSkippedStages,
numFailedStages = job.numFailedStages
)
}
}
}
|
sh-cho/cshSpark
|
status/api/v1/AllJobsResource.scala
|
Scala
|
apache-2.0
| 3,721
|
package pl.newicom.dddd.test.ar
import akka.actor._
import akka.testkit.TestKit
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, WordSpecLike}
import org.slf4j.Logger
import org.slf4j.LoggerFactory.getLogger
import pl.newicom.dddd.actor.{ActorFactory, BusinessEntityActorFactory}
import pl.newicom.dddd.aggregate._
import pl.newicom.dddd.office.OfficeFactory._
import pl.newicom.dddd.office.SimpleOffice._
import pl.newicom.dddd.office.{LocalOfficeId, OfficeListener, OfficeRef}
import pl.newicom.dddd.test.ar.ARSpec.sys
import pl.newicom.dddd.utils.UUIDSupport._
import scala.concurrent.duration._
import scala.reflect.ClassTag
object ARSpec {
def sys(arClass: Class[_]) = ActorSystem(s"${arClass.getSimpleName}Spec_$uuid7")
}
/**
* @param shareAggregateRoot if set to true, the same AR instance will be used in all tests, default is false
*/
abstract class ARSpec[Event <: DomainEvent, AR <: AggregateRoot[Event, _, AR] : BusinessEntityActorFactory : LocalOfficeId](_system: Option[ActorSystem] = None, val shareAggregateRoot: Boolean = false)(implicit arClassTag: ClassTag[AR])
extends GivenWhenThenARTestFixture[Event](_system.getOrElse(sys(arClassTag.runtimeClass))) with WithGenARSpec with WordSpecLike with BeforeAndAfterAll with BeforeAndAfter {
val logger: Logger = getLogger(getClass)
override def officeUnderTest: OfficeRef = {
implicit val _ = new OfficeListener[AR]
if (_officeUnderTest == null) _officeUnderTest = office[AR]
_officeUnderTest
}
private var _officeUnderTest: OfficeRef = _
implicit var _aggregateIdGen: Gen[AggregateId] = _
implicit def aggregatedIdArb: Arbitrary[AggregateId] = Arbitrary(_aggregateIdGen)
val testSuiteId: String = uuid10
before {
_aggregateIdGen = Gen.const[String](if (shareAggregateRoot) testSuiteId else uuid10).map(s => AggregateId(s))
}
after {
ensureOfficeTerminated() //will nullify _officeUnderTest
}
override def afterAll() {
TestKit.shutdownActorSystem(system)
}
def aggregateId(implicit aggregateIdGen: Gen[AggregateId]): AggregateId = aggregateIdGen.sample.get
implicit def topLevelParent[T : LocalOfficeId](implicit system: ActorSystem): ActorFactory[T] = {
new ActorFactory[T] {
override def getChild(name: String): Option[ActorRef] = None
override def createChild(props: Props, name: String): ActorRef = {
system.actorOf(props, name)
}
}
}
def ensureTerminated(actor: ActorRef): Any = {
watch(actor) ! PoisonPill
fishForMessage(1.seconds) {
case Terminated(_) =>
unwatch(actor)
true
case _ => false
}
}
override def ensureOfficeTerminated(): Unit = {
if (_officeUnderTest != null) {
ensureTerminated(_officeUnderTest.actor)
}
_officeUnderTest = null
}
}
|
pawelkaczor/akka-ddd
|
akka-ddd-test/src/main/scala/pl/newicom/dddd/test/ar/ARSpec.scala
|
Scala
|
mit
| 2,843
|
package scjson.codec
import scjson.ast.*
private object JsonEncoder {
/** unparse a JsonValue into a String */
def encode(v:JsonValue, pretty:Boolean):String = {
val builder = new StringBuilder
val encoder = new JsonEncoder(builder, pretty)
v.visit(encoder)
builder.toString
}
private val indention = "\t"
private val hexTable = "0123456789abcdef".toCharArray
}
private final class JsonEncoder(b:StringBuilder, pretty:Boolean) extends JsonVisitor[Unit] {
private var level = 0
def onNull():Unit = b append "null"
def onBoolean(value:Boolean):Unit = if (value) b append "true" else b append "false"
def onNumber(value:BigDecimal):Unit = b append value.toString
def onString(value:String):Unit = encodeString(value)
def onArray(items:Seq[JsonValue]):Unit =
if (items.isEmpty) {
b += '['
b += ']'
}
else if (pretty) {
b += '['
level += 1
var sep = false
items foreach { it =>
if (sep) b += ','
else sep = true
b += '\n'; indent()
it.visit(this)
}
level -= 1
b += '\n'; indent()
b += ']'
}
else {
b += '['
var sep = false
items foreach { it =>
if (sep) b += ','
else sep = true
it.visit(this)
}
b += ']'
}
def onObject(fields:Seq[(String, JsonValue)]):Unit =
if (fields.isEmpty) {
b += '{'
b += '}'
}
else if (pretty) {
b += '{'
level += 1
var sep = false
fields foreach { case (key, value) =>
if (sep) b += ','
else sep = true
b += '\n'; indent()
encodeString(key)
b += ':'
b ++= JsonEncoder.indention
value.visit(this)
}
level -= 1
b += '\n'; indent()
b += '}'
}
else {
b += '{'
var sep = false
fields foreach { case (key, value) =>
if (sep) b += ','
else sep = true
encodeString(key)
b += ':'
value.visit(this)
}
b += '}'
}
private def encodeString(data:String):Unit = {
b += '"'
var i = 0
while (i < data.length) {
data charAt i match {
case '\"' => b += '\\'; b += '\"'
case '\\' => b += '\\'; b += '\\'
case '\b' => b += '\\'; b += 'b'
case '\f' => b += '\\'; b += 'f'
case '\n' => b += '\\'; b += 'n'
case '\r' => b += '\\'; b += 'r'
case '\t' => b += '\\'; b += 't'
case c if c < 32 =>
b += '\\'; b += 'u'
b += JsonEncoder hexTable ((c >> 12) & 0xf)
b += JsonEncoder hexTable ((c >> 8) & 0xf)
b += JsonEncoder hexTable ((c >> 4) & 0xf)
b += JsonEncoder hexTable ((c >> 0) & 0xf)
case c => b += c
}
i += 1
}
b += '"'
}
private def indent():Unit = {
var x = 0
while (x < level) {
b ++= JsonEncoder.indention
x += 1
}
}
}
|
ritschwumm/scjson
|
modules/codec/src/main/scala/JsonEncoder.scala
|
Scala
|
bsd-2-clause
| 2,634
|
// ** Representing a MongoDB collection as a DataFrame **
//Import the relevant packages and classes
import com.mongodb.casbah.{WriteConcern => MongodbWriteConcern}
import com.stratio.provider._
import com.stratio.provider.mongodb._
import com.stratio.provider.mongodb.schema._
import com.stratio.provider.mongodb.writer._
import org.apache.spark.sql.hive.HiveContext
import MongodbConfig._
//Configure which database and collection to read from, with optional parameters too
val mcInputBuilder = MongodbConfigBuilder(Map(Host -> List("localhost:27017"), Database -> "marketdata", Collection -> "minbars", SamplingRatio -> 1.0, WriteConcern -> MongodbWriteConcern.Normal))
val readConfig = mcInputBuilder.build()
//HiveContext uses Hive's SQL parser with a superset of features of SQLContext so I used that one
// See http://spark.apache.org/docs/1.4.0/sql-programming-guide.html#starting-point-sqlcontext for more info
val sqlContext = new HiveContext(sc) //sc is already defined as a SparkContext by the shell
val dfOneMin = sqlContext.fromMongoDB(readConfig) //set up the MongoDB collection to read from as a DataFrame
dfOneMin.registerTempTable("minbars") //make the table minbars available to the SQL expressions later
// ** Querying Results and Saving to MongoDB **
//This applies a SQL windowing functions to partition the 1-minute bars into 5-minute windows
// and then selects the open, high, low, & close price within each 5 minute window
val dfFiveMinForMonth = sqlContext.sql(
"""
SELECT m.Symbol, m.OpenTime as Timestamp, m.Open, m.High, m.Low, m.Close
FROM
(SELECT
Symbol,
FIRST_VALUE(Timestamp)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as OpenTime,
LAST_VALUE(Timestamp)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as CloseTime,
FIRST_VALUE(Open)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as Open,
MAX(High)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as High,
MIN(Low)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as Low,
LAST_VALUE(Close)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as Close
FROM minbars)
as m
WHERE unix_timestamp(m.CloseTime, 'yyyy-MM-dd HH:mm') - unix_timestamp(m.OpenTime, 'yyyy-MM-dd HH:mm') = 60*4"""
)
//Configure which table we want to write to in MongoDB
val fiveMinOutputBuilder = MongodbConfigBuilder(Map(Host -> List("localhost:27017"), Database -> "marketdata", Collection -> "fiveMinBars", SamplingRatio -> 1.0, WriteConcern -> MongodbWriteConcern.Normal, SplitKey -> "_id", SplitSize -> 8))
val writeConfig = fiveMinOutputBuilder.build()
//Write the data to MongoDB - because of Spark's just-in-time execution, this actually triggers running the query to read from the 1-minute bars table in MongoDB and then writing to the 5-minute bars table in MongoDB
dfFiveMinForMonth.saveToMongodb(writeConfig)
// ** Running Spark on any slice of data **
val dfFiveMinForMonth = sqlContext.sql(
"""
SELECT m.Symbol, m.OpenTime as Timestamp, m.Open, m.High, m.Low, m.Close
FROM
(SELECT
Symbol,
FIRST_VALUE(Timestamp)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as OpenTime,
LAST_VALUE(Timestamp)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as CloseTime,
FIRST_VALUE(Open)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as Open,
MAX(High)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as High,
MIN(Low)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as Low,
LAST_VALUE(Close)
OVER (
PARTITION BY floor(unix_timestamp(Timestamp, 'yyyy-MM-dd HH:mm')/(5*60))
ORDER BY Timestamp)
as Close
FROM minbars
WHERE Timestamp >= '2010-07-01' AND Timestamp < '2010-08-01')
as m
WHERE unix_timestamp(m.CloseTime, 'yyyy-MM-dd HH:mm') - unix_timestamp(m.OpenTime, 'yyyy-MM-dd HH:mm') = 60*4"""
)
|
matthewkalan/mongodb-analytics-examples
|
spark-scala-dataframe-example.scala
|
Scala
|
apache-2.0
| 4,720
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
import scala.math.{Integral, Numeric, Ordering}
import scala.reflect.runtime.universe.typeTag
import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
/**
* The data type representing `Short` values. Please use the singleton `DataTypes.ShortType`.
*
* @since 1.3.0
*/
@InterfaceStability.Stable
class ShortType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "ShortType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = Short
@transient private[sql] lazy val tag = ScalaReflectionLock.synchronized { typeTag[InternalType] }
private[sql] val numeric = implicitly[Numeric[Short]]
private[sql] val integral = implicitly[Integral[Short]]
private[sql] val ordering = implicitly[Ordering[InternalType]]
/**
* The default size of a value of the ShortType is 2 bytes.
*/
override def defaultSize: Int = 2
override def simpleString: String = "smallint"
private[spark] override def asNullable: ShortType = this
}
/**
* @since 1.3.0
*/
@InterfaceStability.Stable
case object ShortType extends ShortType
|
aokolnychyi/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
|
Scala
|
apache-2.0
| 2,163
|
package at.forsyte.apalache.tla.typecheck.etc
import at.forsyte.apalache.tla.typecheck.VarT1
class TypeVarPool(start: Int = 0) {
/**
* The counter that we use to produce fresh variables
*/
private var nextVarNum = start
def fresh: VarT1 = {
val fresh = VarT1(nextVarNum)
nextVarNum += 1
fresh
}
def fresh(size: Int): Seq[VarT1] = {
val vars = nextVarNum.until(nextVarNum + size).map(l => VarT1(l))
nextVarNum += size
vars
}
}
|
konnov/apalache
|
tla-types/src/main/scala/at/forsyte/apalache/tla/typecheck/etc/TypeVarPool.scala
|
Scala
|
apache-2.0
| 474
|
package com.plz.scala.functionvaluesandclosures
/**
* @author lover
*/
object ObjectTest {
}
|
piaolinzhi/fight
|
scala-learning/src/main/scala/com/plz/scala/functionvaluesandclosures/ObjectTest.scala
|
Scala
|
gpl-2.0
| 98
|
/**
* Generated by Scrooge
* version: 3.14.1
* rev: a996c1128a032845c508102d62e65fc0aa7a5f41
* built at: 20140501-114733
*/
package cn.changhong.app.webapi.core
import com.twitter.finagle.{SourcedException, Service => FinagleService}
import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver}
import com.twitter.finagle.thrift.ThriftClientRequest
import com.twitter.scrooge.{ThriftStruct, ThriftStructCodec}
import com.twitter.util.{Future, Return, Throw}
import java.nio.ByteBuffer
import java.util.Arrays
import org.apache.thrift.protocol._
import org.apache.thrift.TApplicationException
import org.apache.thrift.transport.{TMemoryBuffer, TMemoryInputTransport}
import scala.collection.{Map, Set}
@javax.annotation.Generated(value = Array("com.twitter.scrooge.Compiler"))
class IndexNewsOperatorServices$FinagleClient(
val service: FinagleService[ThriftClientRequest, Array[Byte]],
val protocolFactory: TProtocolFactory = new TBinaryProtocol.Factory,
val serviceName: String = "",
stats: StatsReceiver = NullStatsReceiver
) extends IndexNewsOperatorServices[Future] {
import IndexNewsOperatorServices._
protected def encodeRequest(name: String, args: ThriftStruct) = {
val buf = new TMemoryBuffer(512)
val oprot = protocolFactory.getProtocol(buf)
oprot.writeMessageBegin(new TMessage(name, TMessageType.CALL, 0))
args.write(oprot)
oprot.writeMessageEnd()
val bytes = Arrays.copyOfRange(buf.getArray, 0, buf.length)
new ThriftClientRequest(bytes, false)
}
protected def decodeResponse[T <: ThriftStruct](resBytes: Array[Byte], codec: ThriftStructCodec[T]) = {
val iprot = protocolFactory.getProtocol(new TMemoryInputTransport(resBytes))
val msg = iprot.readMessageBegin()
try {
if (msg.`type` == TMessageType.EXCEPTION) {
val exception = TApplicationException.read(iprot) match {
case sourced: SourcedException =>
if (serviceName != "") sourced.serviceName = serviceName
sourced
case e => e
}
throw exception
} else {
codec.decode(iprot)
}
} finally {
iprot.readMessageEnd()
}
}
protected def missingResult(name: String) = {
new TApplicationException(
TApplicationException.MISSING_RESULT,
name + " failed: unknown result"
)
}
protected def setServiceName(ex: Exception): Exception =
if (this.serviceName == "") ex
else {
ex match {
case se: SourcedException =>
se.serviceName = this.serviceName
se
case _ => ex
}
}
// ----- end boilerplate.
private[this] val scopedStats = if (serviceName != "") stats.scope(serviceName) else stats
private[this] object __stats_indexNews {
val RequestsCounter = scopedStats.scope("indexNews").counter("requests")
val SuccessCounter = scopedStats.scope("indexNews").counter("success")
val FailuresCounter = scopedStats.scope("indexNews").counter("failures")
val FailuresScope = scopedStats.scope("indexNews").scope("failures")
}
def indexNews(indexNews: NewsModel): Future[Boolean] = {
__stats_indexNews.RequestsCounter.incr()
this.service(encodeRequest("indexNews", indexNews$args(indexNews))) flatMap { response =>
val result = decodeResponse(response, indexNews$result)
val exception: Future[Nothing] =
null
if (result.success.isDefined)
Future.value(result.success.get)
else if (exception != null)
exception
else
Future.exception(missingResult("indexNews"))
} respond {
case Return(_) =>
__stats_indexNews.SuccessCounter.incr()
case Throw(ex) =>
__stats_indexNews.FailuresCounter.incr()
__stats_indexNews.FailuresScope.counter(ex.getClass.getName).incr()
}
}
private[this] object __stats_deleteArtNews {
val RequestsCounter = scopedStats.scope("deleteArtNews").counter("requests")
val SuccessCounter = scopedStats.scope("deleteArtNews").counter("success")
val FailuresCounter = scopedStats.scope("deleteArtNews").counter("failures")
val FailuresScope = scopedStats.scope("deleteArtNews").scope("failures")
}
def deleteArtNews(id: Int): Future[Int] = {
__stats_deleteArtNews.RequestsCounter.incr()
this.service(encodeRequest("deleteArtNews", deleteArtNews$args(id))) flatMap { response =>
val result = decodeResponse(response, deleteArtNews$result)
val exception: Future[Nothing] =
null
if (result.success.isDefined)
Future.value(result.success.get)
else if (exception != null)
exception
else
Future.exception(missingResult("deleteArtNews"))
} respond {
case Return(_) =>
__stats_deleteArtNews.SuccessCounter.incr()
case Throw(ex) =>
__stats_deleteArtNews.FailuresCounter.incr()
__stats_deleteArtNews.FailuresScope.counter(ex.getClass.getName).incr()
}
}
}
|
guoyang2011/myfinagle
|
finagle-thrift-zipkin-cluster/src/main/scala/cn/changhong/app/webapi/core/IndexNewsOperatorServices$FinagleClient.scala
|
Scala
|
apache-2.0
| 4,968
|
package com.geeksville.flight
import org.mavlink.messages.ardupilotmega.msg_mission_item
import org.mavlink.messages.MAV_CMD
import org.mavlink.messages.MAV_FRAME
/**
* A wrapper for waypoints - to provide a higher level API
*/
case class Waypoint(val msg: msg_mission_item) {
private val frameCodes = Map(
MAV_FRAME.MAV_FRAME_GLOBAL -> "MSL",
MAV_FRAME.MAV_FRAME_GLOBAL_RELATIVE_ALT -> "AGL")
def commandStr = Waypoint.commandCodes.get(msg.command).getOrElse("cmd=" + msg.command)
def frameStr = frameCodes.get(msg.frame).getOrElse("frame=" + msg.frame)
def commandStr_=(s: String) {
msg.command = Waypoint.commandToCodes(s)
}
def seq = msg.seq
/// The magic home position
def isHome = (msg.current != 2) && (msg.seq == 0)
/// If the airplane is heading here
def isCurrent = msg.current == 1
def isMSL = msg.frame == MAV_FRAME.MAV_FRAME_GLOBAL
// For virgin APMs with no GPS they will deliver a home WP with command of 255
def isCommandValid = Waypoint.commandCodes.contains(msg.command) || msg.command == 255
def altitude = msg.z
def location = Location(msg.x, msg.y, Some(msg.z))
/**
* Should we show this waypoint on the map?
*/
def isForMap = (msg.x != 0 || msg.y != 0) && isNavCommand
//
// Accessors for particular waypoint types
//
def isJump = msg.command == MAV_CMD.MAV_CMD_DO_JUMP
def jumpSequence = msg.param1.toInt
def loiterTime = msg.param1
def loiterTurns = msg.param1
def isNavCommand = !Waypoint.nonNavCommands.contains(msg.command)
def isValidLatLng = msg.x != 0 || msg.y != 0
/**
* Allows access to params using a civilized index
*/
def getParam(i: Int) = {
i match {
case 0 => msg.param1
case 1 => msg.param2
case 2 => msg.param3
case 3 => msg.param4
}
}
/**
* Allows access to params using a civilized index
*/
def setParam(i: Int, f: Float) {
i match {
case 0 => msg.param1 = f
case 1 => msg.param2 = f
case 2 => msg.param3 = f
case 3 => msg.param4 = f
}
}
/**
* Just the type of the waypoint (RTL, LoiterN, etc...) or Home (as a special case)
*/
def typeString = {
if (isHome)
"Home"
else
commandStr
}
/**
* A short description of this waypoint
*/
def shortString: String = {
msg.command match {
case MAV_CMD.MAV_CMD_DO_JUMP => "Jump to WP #%d".format(jumpSequence)
case MAV_CMD.MAV_CMD_NAV_LOITER_UNLIM => "Loiter (forever)"
case MAV_CMD.MAV_CMD_NAV_LOITER_TURNS => "Loiter (%.1f turns)".format(loiterTurns)
case MAV_CMD.MAV_CMD_NAV_LOITER_TIME => "Loiter (%.1f seconds)".format(loiterTime)
case MAV_CMD.MAV_CMD_NAV_TAKEOFF => "Take-off (MinPitch %.1f)".format(msg.param1)
// FIXME - parse takeoff/land
case _ =>
typeString
}
}
/**
* Try to decode arguments into something understandable by a human
*/
private def decodedArguments = {
msg.command match {
case MAV_CMD.MAV_CMD_DO_JUMP => Some("Jump to WP #%d".format(jumpSequence))
case MAV_CMD.MAV_CMD_NAV_LOITER_UNLIM => Some("forever")
case MAV_CMD.MAV_CMD_NAV_LOITER_TURNS => Some("%.1f turns".format(loiterTurns))
case MAV_CMD.MAV_CMD_NAV_LOITER_TIME => Some("%.1f seconds".format(loiterTime))
case MAV_CMD.MAV_CMD_NAV_TAKEOFF => Some("MinPitch %.1f".format(msg.param1))
case _ =>
None
}
}
def numParamsUsed = {
msg.command match {
case MAV_CMD.MAV_CMD_DO_JUMP => 2
case MAV_CMD.MAV_CMD_NAV_LOITER_UNLIM => 1
case MAV_CMD.MAV_CMD_NAV_LOITER_TURNS => 1
case MAV_CMD.MAV_CMD_NAV_LOITER_TIME => 1
case MAV_CMD.MAV_CMD_NAV_TAKEOFF => 1
case MAV_CMD.MAV_CMD_DO_SET_HOME => 1
case MAV_CMD.MAV_CMD_CONDITION_DISTANCE => 1
case MAV_CMD.MAV_CMD_CONDITION_DELAY => 1
case MAV_CMD.MAV_CMD_CONDITION_CHANGE_ALT => 1
case MAV_CMD.MAV_CMD_DO_CHANGE_SPEED => 3
case MAV_CMD.MAV_CMD_DO_SET_SERVO => 2
case MAV_CMD.MAV_CMD_DO_SET_RELAY => 2
case MAV_CMD.MAV_CMD_DO_REPEAT_SERVO => 4
case MAV_CMD.MAV_CMD_DO_REPEAT_RELAY => 3
case MAV_CMD.MAV_CMD_DO_MOUNT_CONFIGURE => 4
case MAV_CMD.MAV_CMD_DO_SET_CAM_TRIGG_DIST => 1
case MAV_CMD.MAV_CMD_DO_MOUNT_CONTROL => 3
case MAV_CMD.MAV_CMD_DO_DIGICAM_CONTROL => 6
case MAV_CMD.MAV_CMD_DO_DIGICAM_CONFIGURE => 7
case _ =>
0
}
}
/**
* Longer descriptiong (with arguments)
*/
def longString = shortString + ": " + argumentsString
/**
* The arguments as a humang readable string
*/
def argumentsString = {
import msg._
val altStr = "Altitude %sm (%s)".format(z, frameStr)
val paramsStr = decodedArguments.map(", " + _).getOrElse {
val params = Seq(param1, param2, param3, param4)
val hasParams = params.find(_ != 0.0f).isDefined
if (hasParams)
", params=%s".format(params.mkString(","))
else
""
}
altStr + paramsStr
}
}
object Waypoint {
/**
* Commands that should not show on a map
*/
val nonNavCommands = Set(
MAV_CMD.MAV_CMD_DO_JUMP,
MAV_CMD.MAV_CMD_CONDITION_DISTANCE,
MAV_CMD.MAV_CMD_CONDITION_DELAY,
MAV_CMD.MAV_CMD_CONDITION_CHANGE_ALT,
MAV_CMD.MAV_CMD_DO_CHANGE_SPEED,
MAV_CMD.MAV_CMD_DO_SET_SERVO,
MAV_CMD.MAV_CMD_DO_SET_RELAY,
MAV_CMD.MAV_CMD_DO_REPEAT_SERVO,
MAV_CMD.MAV_CMD_DO_REPEAT_RELAY,
MAV_CMD.MAV_CMD_DO_CONTROL_VIDEO,
MAV_CMD.MAV_CMD_DO_MOUNT_CONFIGURE,
MAV_CMD.MAV_CMD_DO_MOUNT_CONTROL,
MAV_CMD.MAV_CMD_DO_SET_CAM_TRIGG_DIST,
MAV_CMD.MAV_CMD_DO_DIGICAM_CONFIGURE,
MAV_CMD.MAV_CMD_DO_DIGICAM_CONTROL)
/**
* We keep this separate so we can preserve order
*/
private val commandCodesSeq = Seq(
MAV_CMD.MAV_CMD_DO_JUMP -> "Jump",
MAV_CMD.MAV_CMD_NAV_TAKEOFF -> "Takeoff",
MAV_CMD.MAV_CMD_NAV_WAYPOINT -> "Waypoint", // Navigate to Waypoint
MAV_CMD.MAV_CMD_NAV_LAND -> "Land", // LAND to Waypoint
MAV_CMD.MAV_CMD_NAV_LOITER_UNLIM -> "Loiter", // Loiter indefinitely
MAV_CMD.MAV_CMD_NAV_LOITER_TURNS -> "LoiterN", // Loiter N Times
MAV_CMD.MAV_CMD_NAV_LOITER_TIME -> "LoiterT",
MAV_CMD.MAV_CMD_NAV_RETURN_TO_LAUNCH -> "RTL",
MAV_CMD.MAV_CMD_CONDITION_DISTANCE -> "CondDist",
MAV_CMD.MAV_CMD_CONDITION_DELAY -> "CondDelay",
MAV_CMD.MAV_CMD_CONDITION_CHANGE_ALT -> "CondAlt",
MAV_CMD.MAV_CMD_DO_CHANGE_SPEED -> "ChangeSpd",
MAV_CMD.MAV_CMD_DO_SET_SERVO -> "SetServo",
MAV_CMD.MAV_CMD_DO_SET_RELAY -> "SetRelay",
MAV_CMD.MAV_CMD_DO_REPEAT_SERVO -> "RepeatServo",
MAV_CMD.MAV_CMD_DO_REPEAT_RELAY -> "RepeatRelay",
MAV_CMD.MAV_CMD_DO_DIGICAM_CONFIGURE -> "DigiCfg",
MAV_CMD.MAV_CMD_DO_DIGICAM_CONTROL -> "DigiCtrl",
MAV_CMD.MAV_CMD_DO_MOUNT_CONFIGURE -> "MountCfg",
MAV_CMD.MAV_CMD_DO_SET_CAM_TRIGG_DIST -> "SetCamTriggDist",
MAV_CMD.MAV_CMD_DO_MOUNT_CONTROL -> "MountCtrl")
val commandCodes = Map(commandCodesSeq: _*)
val commandToCodes = commandCodes.map { case (k, v) => (v, k) }
val commandNames = commandCodesSeq.map(_._2).toArray
}
|
geeksville/arduleader
|
common/src/main/scala/com/geeksville/flight/Waypoint.scala
|
Scala
|
gpl-3.0
| 7,098
|
/**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.frame.internal.ops.cumulativedist
import org.apache.spark.sql.Row
import org.scalatest.Matchers
import org.trustedanalytics.sparktk.testutils.TestingSparkContextWordSpec
import org.trustedanalytics.sparktk.frame.{ Column, DataTypes, FrameSchema }
import org.trustedanalytics.sparktk.frame.internal.rdd.FrameRdd
class EcdfTest extends TestingSparkContextWordSpec with Matchers {
// Input data
val sampleOneList = List(
Row(0),
Row(1),
Row(2),
Row(3),
Row(4),
Row(5),
Row(6),
Row(7),
Row(8),
Row(9))
val sampleTwoList = List(
Row(0),
Row(0),
Row(0),
Row(0),
Row(4),
Row(5),
Row(6),
Row(7))
val sampleThreeList = List(
Row(-2),
Row(-1),
Row(0),
Row(1),
Row(2))
"ecdf" should {
"compute correct ecdf" in {
val sampleOneRdd = sparkContext.parallelize(sampleOneList, 2)
val sampleTwoRdd = sparkContext.parallelize(sampleTwoList, 2)
val sampleThreeRdd = sparkContext.parallelize(sampleThreeList, 2)
val colA = Column("a", DataTypes.int32)
val schema = FrameSchema(Vector(colA))
// Get binned results
val sampleOneECDF = CumulativeDistFunctions.ecdf(new FrameRdd(schema, sampleOneRdd), colA)
val resultOne = sampleOneECDF.take(10)
val sampleTwoECDF = CumulativeDistFunctions.ecdf(new FrameRdd(schema, sampleTwoRdd), colA)
val resultTwo = sampleTwoECDF.take(5)
val sampleThreeECDF = CumulativeDistFunctions.ecdf(new FrameRdd(schema, sampleThreeRdd), colA)
val resultThree = sampleThreeECDF.take(5)
// Validate
resultOne.apply(0) shouldBe Row(0, 0.1)
resultOne.apply(1) shouldBe Row(1, 0.2)
resultOne.apply(2) shouldBe Row(2, 0.3)
resultOne.apply(3) shouldBe Row(3, 0.4)
resultOne.apply(4) shouldBe Row(4, 0.5)
resultOne.apply(5) shouldBe Row(5, 0.6)
resultOne.apply(6) shouldBe Row(6, 0.7)
resultOne.apply(7) shouldBe Row(7, 0.8)
resultOne.apply(8) shouldBe Row(8, 0.9)
resultOne.apply(9) shouldBe Row(9, 1.0)
resultTwo.apply(0) shouldBe Row(0, 0.5)
resultTwo.apply(1) shouldBe Row(4, 0.625)
resultTwo.apply(2) shouldBe Row(5, 0.75)
resultTwo.apply(3) shouldBe Row(6, 0.875)
resultTwo.apply(4) shouldBe Row(7, 1.0)
resultThree.apply(0) shouldBe Row(-2, 0.2)
resultThree.apply(1) shouldBe Row(-1, 0.4)
resultThree.apply(2) shouldBe Row(0, 0.6)
resultThree.apply(3) shouldBe Row(1, 0.8)
resultThree.apply(4) shouldBe Row(2, 1.0)
}
}
}
|
ashaarunkumar/spark-tk
|
sparktk-core/src/test/scala/org/trustedanalytics/sparktk/frame/internal/ops/cumulativedist/EcdfTest.scala
|
Scala
|
apache-2.0
| 3,281
|
package mesosphere.marathon
package core.appinfo
trait Selector[A] {
def matches(a: A): Boolean
}
object Selector {
def apply[A](f: A => Boolean): Selector[A] =
new Selector[A] {
override def matches(a: A): Boolean = f(a)
}
def all[A]: Selector[A] = Selector[A] { _ => true }
def none[A]: Selector[A] = Selector[A] { _ => false }
def forall[A](it: Seq[Selector[A]]): Selector[A] =
new Selector[A] {
override def matches(a: A): Boolean = it.forall(_.matches(a))
}
}
|
mesosphere/marathon
|
src/main/scala/mesosphere/marathon/core/appinfo/Selector.scala
|
Scala
|
apache-2.0
| 510
|
package com.mesosphere.cosmos
import com.mesosphere.cosmos.http.RequestSession
import com.mesosphere.cosmos.thirdparty.marathon.model.{AppId, MarathonAppResponse, MarathonAppsResponse}
import com.mesosphere.cosmos.thirdparty.marathon.circe.Decoders._
import com.netaporter.uri.Uri
import com.netaporter.uri.dsl._
import com.twitter.finagle.Service
import com.twitter.finagle.http._
import com.twitter.util.Future
import io.circe.Json
import org.jboss.netty.handler.codec.http.HttpMethod
class MarathonClient(
marathonUri: Uri,
client: Service[Request, Response]
) extends ServiceClient(marathonUri) {
def createApp(appJson: Json)(implicit session: RequestSession): Future[Response] = {
client(post("v2" / "apps" , appJson))
}
def getAppOption(appId: AppId)(implicit session: RequestSession): Future[Option[MarathonAppResponse]] = {
val uri = "v2" / "apps" / appId.toUri
client(get(uri)).map { response =>
response.status match {
case Status.Ok => Some(decodeJsonTo[MarathonAppResponse](response))
case Status.NotFound => None
case s: Status => throw GenericHttpError(HttpMethod.GET, uri, s)
}
}
}
def getApp(appId: AppId)(implicit session: RequestSession): Future[MarathonAppResponse] = {
getAppOption(appId).map { appOption =>
appOption.getOrElse(throw MarathonAppNotFound(appId))
}
}
def listApps()(implicit session: RequestSession): Future[MarathonAppsResponse] = {
val uri = "v2" / "apps"
client(get(uri)).flatMap(decodeTo[MarathonAppsResponse](HttpMethod.GET, uri, _))
}
def deleteApp(appId: AppId, force: Boolean = false)(implicit session: RequestSession): Future[Response] = {
val uriPath = "v2" / "apps" / appId.toUri
force match {
case true => client(delete(uriPath ? ("force" -> "true")))
case false => client(delete(uriPath))
}
}
}
|
movicha/cosmos
|
cosmos-server/src/main/scala/com/mesosphere/cosmos/MarathonClient.scala
|
Scala
|
apache-2.0
| 1,874
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io.{ByteArrayOutputStream, PrintStream}
import java.lang.reflect.InvocationTargetException
import java.net.URI
import java.util.{List => JList}
import java.util.jar.JarFile
import scala.collection.JavaConversions._
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.io.Source
import org.apache.spark.deploy.SparkSubmitAction._
import org.apache.spark.launcher.SparkSubmitArgumentsParser
import org.apache.spark.util.Utils
/**
* Parses and encapsulates arguments from the spark-submit script.
* The env argument is used for testing.
*/
private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, String] = sys.env)
extends SparkSubmitArgumentsParser {
var master: String = null
var deployMode: String = null
var executorMemory: String = null
var executorCores: String = null
var totalExecutorCores: String = null
var propertiesFile: String = null
var driverMemory: String = null
var driverExtraClassPath: String = null
var driverExtraLibraryPath: String = null
var driverExtraJavaOptions: String = null
var queue: String = null
var numExecutors: String = null
var files: String = null
var archives: String = null
var mainClass: String = null
var primaryResource: String = null
var name: String = null
var childArgs: ArrayBuffer[String] = new ArrayBuffer[String]()
var jars: String = null
var packages: String = null
var repositories: String = null
var ivyRepoPath: String = null
var packagesExclusions: String = null
var verbose: Boolean = false
var isPython: Boolean = false
var pyFiles: String = null
var isR: Boolean = false
var action: SparkSubmitAction = null
val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
var proxyUser: String = null
var principal: String = null
var keytab: String = null
// Standalone cluster mode only
var supervise: Boolean = false
var driverCores: String = null
var submissionToKill: String = null
var submissionToRequestStatusFor: String = null
var useRest: Boolean = true // used internally
/** Default properties present in the currently defined defaults file. */
lazy val defaultSparkProperties: HashMap[String, String] = {
val defaultProperties = new HashMap[String, String]()
// scalastyle:off println
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
Option(propertiesFile).foreach { filename =>
Utils.getPropertiesFromFile(filename).foreach { case (k, v) =>
defaultProperties(k) = v
if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
}
}
// scalastyle:on println
defaultProperties
}
// Set parameters from command line arguments
try {
parse(args.toList)
} catch {
case e: IllegalArgumentException =>
SparkSubmit.printErrorAndExit(e.getMessage())
}
// Populate `sparkProperties` map from properties file
mergeDefaultSparkProperties()
// Remove keys that don't start with "spark." from `sparkProperties`.
ignoreNonSparkProperties()
// Use `sparkProperties` map along with env vars to fill in any missing parameters
loadEnvironmentArguments()
validateArguments()
/**
* Merge values from the default properties file with those specified through --conf.
* When this is called, `sparkProperties` is already filled with configs from the latter.
*/
private def mergeDefaultSparkProperties(): Unit = {
// Use common defaults file, if not specified by user
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile(env))
// Honor --conf before the defaults file
defaultSparkProperties.foreach { case (k, v) =>
if (!sparkProperties.contains(k)) {
sparkProperties(k) = v
}
}
}
/**
* Remove keys that don't start with "spark." from `sparkProperties`.
*/
private def ignoreNonSparkProperties(): Unit = {
sparkProperties.foreach { case (k, v) =>
if (!k.startsWith("spark.")) {
sparkProperties -= k
SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
}
}
}
/**
* Load arguments from environment variables, Spark properties etc.
*/
private def loadEnvironmentArguments(): Unit = {
master = Option(master)
.orElse(sparkProperties.get("spark.master"))
.orElse(env.get("MASTER"))
.orNull
driverExtraClassPath = Option(driverExtraClassPath)
.orElse(sparkProperties.get("spark.driver.extraClassPath"))
.orNull
driverExtraJavaOptions = Option(driverExtraJavaOptions)
.orElse(sparkProperties.get("spark.driver.extraJavaOptions"))
.orNull
driverExtraLibraryPath = Option(driverExtraLibraryPath)
.orElse(sparkProperties.get("spark.driver.extraLibraryPath"))
.orNull
driverMemory = Option(driverMemory)
.orElse(sparkProperties.get("spark.driver.memory"))
.orElse(env.get("SPARK_DRIVER_MEMORY"))
.orNull
driverCores = Option(driverCores)
.orElse(sparkProperties.get("spark.driver.cores"))
.orNull
executorMemory = Option(executorMemory)
.orElse(sparkProperties.get("spark.executor.memory"))
.orElse(env.get("SPARK_EXECUTOR_MEMORY"))
.orNull
executorCores = Option(executorCores)
.orElse(sparkProperties.get("spark.executor.cores"))
.orElse(env.get("SPARK_EXECUTOR_CORES"))
.orNull
totalExecutorCores = Option(totalExecutorCores)
.orElse(sparkProperties.get("spark.cores.max"))
.orNull
name = Option(name).orElse(sparkProperties.get("spark.app.name")).orNull
jars = Option(jars).orElse(sparkProperties.get("spark.jars")).orNull
ivyRepoPath = sparkProperties.get("spark.jars.ivy").orNull
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
packagesExclusions = Option(packagesExclusions)
.orElse(sparkProperties.get("spark.jars.excludes")).orNull
deployMode = Option(deployMode).orElse(env.get("DEPLOY_MODE")).orNull
numExecutors = Option(numExecutors)
.getOrElse(sparkProperties.get("spark.executor.instances").orNull)
keytab = Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull
principal = Option(principal).orElse(sparkProperties.get("spark.yarn.principal")).orNull
// Try to set main class from JAR if no --class argument is given
if (mainClass == null && !isPython && !isR && primaryResource != null) {
val uri = new URI(primaryResource)
val uriScheme = uri.getScheme()
uriScheme match {
case "file" =>
try {
val jar = new JarFile(uri.getPath)
// Note that this might still return null if no main-class is set; we catch that later
mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class")
} catch {
case e: Exception =>
SparkSubmit.printErrorAndExit(s"Cannot load main class from JAR $primaryResource")
}
case _ =>
SparkSubmit.printErrorAndExit(
s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " +
"Please specify a class through --class.")
}
}
// Global defaults. These should be keep to minimum to avoid confusing behavior.
master = Option(master).getOrElse("local[*]")
// In YARN mode, app name can be set via SPARK_YARN_APP_NAME (see SPARK-5222)
if (master.startsWith("yarn")) {
name = Option(name).orElse(env.get("SPARK_YARN_APP_NAME")).orNull
}
// Set name from main class if not given
name = Option(name).orElse(Option(mainClass)).orNull
if (name == null && primaryResource != null) {
name = Utils.stripDirectory(primaryResource)
}
// Action should be SUBMIT unless otherwise specified
action = Option(action).getOrElse(SUBMIT)
}
/** Ensure that required fields exists. Call this only once all defaults are loaded. */
private def validateArguments(): Unit = {
action match {
case SUBMIT => validateSubmitArguments()
case KILL => validateKillArguments()
case REQUEST_STATUS => validateStatusRequestArguments()
}
}
private def validateSubmitArguments(): Unit = {
if (args.length == 0) {
printUsageAndExit(-1)
}
if (primaryResource == null) {
SparkSubmit.printErrorAndExit("Must specify a primary resource (JAR or Python or R file)")
}
if (mainClass == null && SparkSubmit.isUserJar(primaryResource)) {
SparkSubmit.printErrorAndExit("No main class set in JAR; please specify one with --class")
}
if (pyFiles != null && !isPython) {
SparkSubmit.printErrorAndExit("--py-files given but primary resource is not a Python script")
}
if (master.startsWith("yarn")) {
val hasHadoopEnv = env.contains("HADOOP_CONF_DIR") || env.contains("YARN_CONF_DIR")
if (!hasHadoopEnv && !Utils.isTesting) {
throw new Exception(s"When running with master '$master' " +
"either HADOOP_CONF_DIR or YARN_CONF_DIR must be set in the environment.")
}
}
}
private def validateKillArguments(): Unit = {
if (!master.startsWith("spark://") && !master.startsWith("mesos://")) {
SparkSubmit.printErrorAndExit(
"Killing submissions is only supported in standalone or Mesos mode!")
}
if (submissionToKill == null) {
SparkSubmit.printErrorAndExit("Please specify a submission to kill.")
}
}
private def validateStatusRequestArguments(): Unit = {
if (!master.startsWith("spark://") && !master.startsWith("mesos://")) {
SparkSubmit.printErrorAndExit(
"Requesting submission statuses is only supported in standalone or Mesos mode!")
}
if (submissionToRequestStatusFor == null) {
SparkSubmit.printErrorAndExit("Please specify a submission to request status for.")
}
}
def isStandaloneCluster: Boolean = {
master.startsWith("spark://") && deployMode == "cluster"
}
override def toString: String = {
s"""Parsed arguments:
| master $master
| deployMode $deployMode
| executorMemory $executorMemory
| executorCores $executorCores
| totalExecutorCores $totalExecutorCores
| propertiesFile $propertiesFile
| driverMemory $driverMemory
| driverCores $driverCores
| driverExtraClassPath $driverExtraClassPath
| driverExtraLibraryPath $driverExtraLibraryPath
| driverExtraJavaOptions $driverExtraJavaOptions
| supervise $supervise
| queue $queue
| numExecutors $numExecutors
| files $files
| pyFiles $pyFiles
| archives $archives
| mainClass $mainClass
| primaryResource $primaryResource
| name $name
| childArgs [${childArgs.mkString(" ")}]
| jars $jars
| packages $packages
| packagesExclusions $packagesExclusions
| repositories $repositories
| verbose $verbose
|
|Spark properties used, including those specified through
| --conf and those from the properties file $propertiesFile:
|${sparkProperties.mkString(" ", "\\n ", "\\n")}
""".stripMargin
}
/** Fill in values by parsing user options. */
override protected def handle(opt: String, value: String): Boolean = {
opt match {
case NAME =>
name = value
case MASTER =>
master = value
case CLASS =>
mainClass = value
case DEPLOY_MODE =>
if (value != "client" && value != "cluster") {
SparkSubmit.printErrorAndExit("--deploy-mode must be either \\"client\\" or \\"cluster\\"")
}
deployMode = value
case NUM_EXECUTORS =>
numExecutors = value
case TOTAL_EXECUTOR_CORES =>
totalExecutorCores = value
case EXECUTOR_CORES =>
executorCores = value
case EXECUTOR_MEMORY =>
executorMemory = value
case DRIVER_MEMORY =>
driverMemory = value
case DRIVER_CORES =>
driverCores = value
case DRIVER_CLASS_PATH =>
driverExtraClassPath = value
case DRIVER_JAVA_OPTIONS =>
driverExtraJavaOptions = value
case DRIVER_LIBRARY_PATH =>
driverExtraLibraryPath = value
case PROPERTIES_FILE =>
propertiesFile = value
case KILL_SUBMISSION =>
submissionToKill = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $KILL.")
}
action = KILL
case STATUS =>
submissionToRequestStatusFor = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $REQUEST_STATUS.")
}
action = REQUEST_STATUS
case SUPERVISE =>
supervise = true
case QUEUE =>
queue = value
case FILES =>
files = Utils.resolveURIs(value)
case PY_FILES =>
pyFiles = Utils.resolveURIs(value)
case ARCHIVES =>
archives = Utils.resolveURIs(value)
case JARS =>
jars = Utils.resolveURIs(value)
case PACKAGES =>
packages = value
case PACKAGES_EXCLUDE =>
packagesExclusions = value
case REPOSITORIES =>
repositories = value
case CONF =>
value.split("=", 2).toSeq match {
case Seq(k, v) => sparkProperties(k) = v
case _ => SparkSubmit.printErrorAndExit(s"Spark config without '=': $value")
}
case PROXY_USER =>
proxyUser = value
case PRINCIPAL =>
principal = value
case KEYTAB =>
keytab = value
case HELP =>
printUsageAndExit(0)
case VERBOSE =>
verbose = true
case VERSION =>
SparkSubmit.printVersionAndExit()
case USAGE_ERROR =>
printUsageAndExit(1)
case _ =>
throw new IllegalArgumentException(s"Unexpected argument '$opt'.")
}
true
}
/**
* Handle unrecognized command line options.
*
* The first unrecognized option is treated as the "primary resource". Everything else is
* treated as application arguments.
*/
override protected def handleUnknown(opt: String): Boolean = {
if (opt.startsWith("-")) {
SparkSubmit.printErrorAndExit(s"Unrecognized option '$opt'.")
}
primaryResource =
if (!SparkSubmit.isShell(opt) && !SparkSubmit.isInternal(opt)) {
Utils.resolveURI(opt).toString
} else {
opt
}
isPython = SparkSubmit.isPython(opt)
isR = SparkSubmit.isR(opt)
false
}
override protected def handleExtraArgs(extra: JList[String]): Unit = {
childArgs ++= extra
}
private def printUsageAndExit(exitCode: Int, unknownParam: Any = null): Unit = {
// scalastyle:off println
val outStream = SparkSubmit.printStream
if (unknownParam != null) {
outStream.println("Unknown/unsupported param " + unknownParam)
}
val command = sys.env.get("_SPARK_CMD_USAGE").getOrElse(
"""Usage: spark-submit [options] <app jar | python file> [app arguments]
|Usage: spark-submit --kill [submission ID] --master [spark://...]
|Usage: spark-submit --status [submission ID] --master [spark://...]""".stripMargin)
outStream.println(command)
val mem_mb = Utils.DEFAULT_DRIVER_MEM_MB
outStream.println(
s"""
|Options:
| --master MASTER_URL spark://host:port, mesos://host:port, yarn, or local.
| --deploy-mode DEPLOY_MODE Whether to launch the driver program locally ("client") or
| on one of the worker machines inside the cluster ("cluster")
| (Default: client).
| --class CLASS_NAME Your application's main class (for Java / Scala apps).
| --name NAME A name of your application.
| --jars JARS Comma-separated list of local jars to include on the driver
| and executor classpaths.
| --packages Comma-separated list of maven coordinates of jars to include
| on the driver and executor classpaths. Will search the local
| maven repo, then maven central and any additional remote
| repositories given by --repositories. The format for the
| coordinates should be groupId:artifactId:version.
| --exclude-packages Comma-separated list of groupId:artifactId, to exclude while
| resolving the dependencies provided in --packages to avoid
| dependency conflicts.
| --repositories Comma-separated list of additional remote repositories to
| search for the maven coordinates given with --packages.
| --py-files PY_FILES Comma-separated list of .zip, .egg, or .py files to place
| on the PYTHONPATH for Python apps.
| --files FILES Comma-separated list of files to be placed in the working
| directory of each executor.
|
| --conf PROP=VALUE Arbitrary Spark configuration property.
| --properties-file FILE Path to a file from which to load extra properties. If not
| specified, this will look for conf/spark-defaults.conf.
|
| --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
| --driver-java-options Extra Java options to pass to the driver.
| --driver-library-path Extra library path entries to pass to the driver.
| --driver-class-path Extra class path entries to pass to the driver. Note that
| jars added with --jars are automatically included in the
| classpath.
|
| --executor-memory MEM Memory per executor (e.g. 1000M, 2G) (Default: 1G).
|
| --proxy-user NAME User to impersonate when submitting the application.
|
| --help, -h Show this help message and exit
| --verbose, -v Print additional debug output
| --version, Print the version of current Spark
|
| Spark standalone with cluster deploy mode only:
| --driver-cores NUM Cores for driver (Default: 1).
|
| Spark standalone or Mesos with cluster deploy mode only:
| --supervise If given, restarts the driver on failure.
| --kill SUBMISSION_ID If given, kills the driver specified.
| --status SUBMISSION_ID If given, requests the status of the driver specified.
|
| Spark standalone and Mesos only:
| --total-executor-cores NUM Total cores for all executors.
|
| Spark standalone and YARN only:
| --executor-cores NUM Number of cores per executor. (Default: 1 in YARN mode,
| or all available cores on the worker in standalone mode)
|
| YARN-only:
| --driver-cores NUM Number of cores used by the driver, only in cluster mode
| (Default: 1).
| --queue QUEUE_NAME The YARN queue to submit to (Default: "default").
| --num-executors NUM Number of executors to launch (Default: 2).
| --archives ARCHIVES Comma separated list of archives to be extracted into the
| working directory of each executor.
| --principal PRINCIPAL Principal to be used to login to KDC, while running on
| secure HDFS.
| --keytab KEYTAB The full path to the file that contains the keytab for the
| principal specified above. This keytab will be copied to
| the node running the Application Master via the Secure
| Distributed Cache, for renewing the login tickets and the
| delegation tokens periodically.
""".stripMargin
)
if (SparkSubmit.isSqlShell(mainClass)) {
outStream.println("CLI options:")
outStream.println(getSqlShellOptions())
}
// scalastyle:on println
SparkSubmit.exitFn(exitCode)
}
/**
* Run the Spark SQL CLI main class with the "--help" option and catch its output. Then filter
* the results to remove unwanted lines.
*
* Since the CLI will call `System.exit()`, we install a security manager to prevent that call
* from working, and restore the original one afterwards.
*/
private def getSqlShellOptions(): String = {
val currentOut = System.out
val currentErr = System.err
val currentSm = System.getSecurityManager()
try {
val out = new ByteArrayOutputStream()
val stream = new PrintStream(out)
System.setOut(stream)
System.setErr(stream)
val sm = new SecurityManager() {
override def checkExit(status: Int): Unit = {
throw new SecurityException()
}
override def checkPermission(perm: java.security.Permission): Unit = {}
}
System.setSecurityManager(sm)
try {
Utils.classForName(mainClass).getMethod("main", classOf[Array[String]])
.invoke(null, Array(HELP))
} catch {
case e: InvocationTargetException =>
// Ignore SecurityException, since we throw it above.
if (!e.getCause().isInstanceOf[SecurityException]) {
throw e
}
}
stream.flush()
// Get the output and discard any unnecessary lines from it.
Source.fromString(new String(out.toByteArray())).getLines
.filter { line =>
!line.startsWith("log4j") && !line.startsWith("usage")
}
.mkString("\\n")
} finally {
System.setSecurityManager(currentSm)
System.setOut(currentOut)
System.setErr(currentErr)
}
}
}
|
ArvinDevel/onlineAggregationOnSparkV2
|
core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
|
Scala
|
apache-2.0
| 23,518
|
// Project: angulate2-examples
// Module:
// Description:
// Copyright (c) 2017. Distributed under the MIT License (see included LICENSE file).
package routing.admin
import angulate2.std._
import angulate2.ext.tags.simple._
import angulate2.router.ActivatedRoute
import rxjs.Observable
import scala.scalajs.js
import scalatags.Text.all._
@Component(
template = tpl(
p("Dashboard"),
p("Session ID: {{ sessionId | async }}"),
a(id:="anchor"),
p("Token: {{ token | async }}")
)
)
class AdminDashboardComponent(route: ActivatedRoute) extends OnInit {
var sessionId: Observable[String] = _
var token: Observable[String] = _
override def ngOnInit(): Unit = {
sessionId = route.queryParams.map( (params,_) => params.getOrElse("session_id","None") )
token = route.fragment.map( (fragment,_) => fragment.getOrElse("None") )
}
}
|
jokade/angulate2-examples
|
routing/src/main/scala/routing/admin/AdminDashboardComponent.scala
|
Scala
|
mit
| 870
|
package com.kadwa.cassandraexport
import com.asimma.ScalaHadoop._
import org.apache.hadoop.io.{Text, LongWritable}
import ImplicitConversion._
import scala.collection.JavaConversions._
/*
* Created by Neville Kadwa.
*/
object MapReduceMain extends ScalaHadoopTool {
object CassandraExportAttributeMapper extends TypedMapper[LongWritable, Text, InterimKey, InterimValue] {
var cassandraExport:CassandraExport = null
override def setup(context:ContextType) = {
System.out.println("New Cassandra Export")
cassandraExport = new CassandraExport("src/test/resources/scottdata")
}
override def cleanup(context:ContextType) = {
System.out.println("CassandraExportMapper.cleanup called. Closing CassandraExport")
cassandraExport.close
}
override def map(k: LongWritable, v: Text, context: ContextType) : Unit = {
val sstableName = v.toString
cassandraExport.exportSSTable(sstableName, (attribute) => {
context.write(InterimKey(attribute._1,attribute._2), InterimValue(attribute._3,attribute._4,attribute._5))
})
}
}
object CassandraExportAttributeReducer extends TypedReducer[InterimKey, InterimValue, InterimKey, InterimValue] {
override def reduce(k: InterimKey, v: java.lang.Iterable[InterimValue], context: ContextType) : Unit = {
context.write(k, v.min)
}
}
object CassandraExportRecordMapper extends TypedMapper[InterimKey, InterimValue, Text, Text] {
override def map(k: InterimKey, v: InterimValue, context: ContextType) : Unit = {
val sb = new StringBuffer
sb.append("[\"").append(k.name).append("\",\"").append(v.value).append("\",").append(v.timestamp)
if (v.extra != null) {
sb.append(",\"")
sb.append(v.extra)
sb.append("\"")
}
sb.append("]")
context.write(k.key, sb.toString)
}
}
object CassandraExportRecordReducer extends TypedReducer[Text, Text, Text, Text] {
override def reduce(k: Text, v: java.lang.Iterable[Text], context: ContextType) : Unit = {
val value = v.mkString(", ")
// System.err.println(k + " => " + value)
context.write(s"""{\"key\":${k}, columns:[${value}]}""", null)
}
}
def run(args: Array[String]) : Int = {
val c = MapReduceTaskChain.init() -->
IO.Text[LongWritable, Text](args(0)).input -->
MapReduceTask.MapReduceTask(CassandraExportAttributeMapper, CassandraExportAttributeReducer) -->
MapReduceTask.MapReduceTask(CassandraExportRecordMapper, CassandraExportRecordReducer) -->
IO.Text[Text, Text](args(1)).output
c.execute()
0
}
}
|
kadwanev/cassandra-export
|
src/main/scala/com/kadwa/cassandraexport/MapReduceMain.scala
|
Scala
|
gpl-2.0
| 2,669
|
package org.pico.twiddle
import scala.language.higherKinds
trait ArrayIndexed[F[_], @specialized(Byte, Short, Int, Long) E] {
def setAtIndex(indexed: F[E], i: Bits, v: E): Unit
def getAtIndex(indexed: F[E], i: Bits): E
}
|
newhoggy/pico-cuckoo-filter
|
pico-twiddle/src/main/scala/org/pico/twiddle/ArrayIndexed.scala
|
Scala
|
bsd-3-clause
| 228
|
package edu.rit.csh.scaladb.serialization.binary
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
/**
* Generates BinarySerializers for case classes. This will have compile time failures if
* tried to be used on other type of classes
*/
object BinaryMacro {
def impl[T: c.WeakTypeTag](c: blackbox.Context): c.Tree = {
import c.universe._
val tpe = weakTypeOf[T]
val fields = tpe.decls.collect {
case sym: MethodSymbol if sym.isGetter => sym
}.toList
def write(fields: List[MethodSymbol]): c.Tree = fields match {
case f :: fs => q"implicitly[BinarySerializer[$f]].write(elem.$f, output) ; ${write(fs)}"
case Nil => q"Unit"
}
def serName(field: MethodSymbol): TermName = TermName(field.name + "Ser")
def isDefined(fields: List[MethodSymbol]): c.Tree = fields match {
case f :: fs => q"""${serName(f)}.isDefined && ${isDefined(fs)}"""
case Nil => q"true"
}
def sum(fields: List[MethodSymbol]): c.Tree = fields match {
case f :: fs => q"""${serName(f)}.get + ${sum(fs)}"""
case Nil => q"0"
}
def size(decFields: List[MethodSymbol]): c.Tree = decFields match {
case Nil =>
q"""if (${isDefined(fields)}) {
Some(${sum(fields)})
} else {
None
}
"""
case f :: fs =>
q"""val ${serName(f)} = implicitly[BinarySerializer[$f]].staticSize
${size(fs)}"""
}
def canGetSize(fields: List[MethodSymbol]): c.Tree = fields match {
case f :: fs => q"implicitly[BinarySerializer[$f]].canGetSize && ${canGetSize(fs)}"
case Nil => q"true"
}
def getSize(fields: List[MethodSymbol]): c.Tree = fields match {
case f :: fs => q"implicitly[BinarySerializer[$f]].getSize(elem.$f) + ${getSize(fs)}"
case Nil => q"0"
}
q"""new edu.rit.csh.scaladb.serialization.binary.BinarySerializer[$tpe] {
import edu.rit.csh.scaladb.serialization.binary.DefaultBinarySerializers._
import edu.rit.csh.scaladb.serialization.binary.BinaryMacro._
import edu.rit.csh.scaladb.serialization.binary.{BinarySerializer, ByteArrayInput, BinaryOutput, ByteArrayOutput}
override val staticSize: Option[Int] = {
${size(fields)}
}
override def canGetSize = ${canGetSize(fields)}
def getSize(elem: $tpe): Int = ${getSize(fields)}
override def read(buffer: ByteArrayInput): $tpe = {
new $tpe(..${fields.map(field => q"implicitly[BinarySerializer[$field]].read(buffer)")})
}
override def write(elem: $tpe, output: BinaryOutput): Unit = {
${write(fields)}
}
}
"""
}
/**
* Macro for generating Serializer for case classes at compile-time
* @tparam T the type to generate a serializer for
*/
implicit def materializeSerializer[T]: BinarySerializer[T] = macro impl[T]
}
|
JDrit/RaftService
|
serialization/src/main/scala/edu/rit/csh/scaladb/serialization/binary/BinaryMacro.scala
|
Scala
|
apache-2.0
| 2,961
|
package skinny.engine.test
import java.io.OutputStream
object NullOut extends OutputStream {
def write(b: Int) {}
}
|
holycattle/skinny-framework
|
engine-test/src/main/scala/skinny/engine/test/NullOut.scala
|
Scala
|
mit
| 122
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.`trait`
import org.apache.calcite.plan.{RelOptPlanner, RelTrait, RelTraitDef}
import org.apache.flink.table.connector.ChangelogMode
import org.apache.flink.types.RowKind
import scala.collection.JavaConversions._
/**
* ModifyKindSetTrait is used to describe what modify operation will be produced by this node.
*/
class ModifyKindSetTrait(val modifyKindSet: ModifyKindSet) extends RelTrait {
override def satisfies(relTrait: RelTrait): Boolean = relTrait match {
case other: ModifyKindSetTrait =>
// it’s satisfied when modify kinds are included in the required set,
// e.g. [I,U] satisfy [I,U,D]
// [I,U,D] not satisfy [I,D]
this.modifyKindSet.getContainedKinds.forall(other.modifyKindSet.contains)
case _ => false
}
override def getTraitDef: RelTraitDef[_ <: RelTrait] = ModifyKindSetTraitDef.INSTANCE
override def register(planner: RelOptPlanner): Unit = {}
override def hashCode(): Int = modifyKindSet.hashCode()
override def equals(obj: Any): Boolean = obj match {
case t: ModifyKindSetTrait => this.modifyKindSet.equals(t.modifyKindSet)
case _ => false
}
override def toString: String = s"[${modifyKindSet.toString}]"
}
object ModifyKindSetTrait {
/**
* An empty [[ModifyKindSetTrait]] which doesn't contain any [[ModifyKind]].
*/
val EMPTY = new ModifyKindSetTrait(ModifyKindSet.newBuilder().build())
/**
* Insert-only [[ModifyKindSetTrait]].
*/
val INSERT_ONLY = new ModifyKindSetTrait(ModifyKindSet.INSERT_ONLY)
/**
* A modify [[ModifyKindSetTrait]] that contains all change operations.
*/
val ALL_CHANGES = new ModifyKindSetTrait(ModifyKindSet.ALL_CHANGES)
/**
* Creates an instance of [[ModifyKindSetTrait]] from th given [[ChangelogMode]].
*/
def fromChangelogMode(changelogMode: ChangelogMode): ModifyKindSetTrait = {
val builder = ModifyKindSet.newBuilder
changelogMode.getContainedKinds.foreach {
case RowKind.INSERT => builder.addContainedKind(ModifyKind.INSERT)
case RowKind.DELETE => builder.addContainedKind(ModifyKind.DELETE)
case _ => builder.addContainedKind(ModifyKind.UPDATE) // otherwise updates
}
new ModifyKindSetTrait(builder.build)
}
}
|
tillrohrmann/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/trait/ModifyKindSetTrait.scala
|
Scala
|
apache-2.0
| 3,070
|
package org.bitcoins.core.wallet.builder
import org.bitcoins.core.config.{BitcoinNetwork, NetworkParameters}
import org.bitcoins.core.crypto.{
ECDigitalSignature,
EmptyDigitalSignature,
TransactionSignatureSerializer,
WitnessTxSigComponentP2SH
}
import org.bitcoins.core.currency.{CurrencyUnit, CurrencyUnits, Satoshis}
import org.bitcoins.core.number.{Int64, UInt32}
import org.bitcoins.core.policy.Policy
import org.bitcoins.core.protocol.script._
import org.bitcoins.core.protocol.transaction._
import org.bitcoins.core.script.constant.ScriptNumber
import org.bitcoins.core.script.control.OP_RETURN
import org.bitcoins.core.script.crypto.HashType
import org.bitcoins.core.script.locktime.LockTimeInterpreter
import org.bitcoins.core.util.BitcoinSLogger
import org.bitcoins.core.wallet.fee.FeeUnit
import org.bitcoins.core.wallet.signer._
import org.bitcoins.core.wallet.utxo.{BitcoinUTXOSpendingInfo, UTXOSpendingInfo}
import scala.annotation.tailrec
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
/**
* High level class to create a signed transaction that spends a set of
* unspent transaction outputs.
*
* The most important method in this class is the 'sign' method. This will start the signing procedure for a
* transaction and then return either a signed [[org.bitcoins.core.protocol.transaction.Transaction Transaction]]
* or a [[org.bitcoins.core.wallet.builder.TxBuilderError TxBuilderError]]
*
* For usage examples see TxBuilderSpec
*/
sealed abstract class TxBuilder {
/** The outputs which we are spending bitcoins to */
def destinations: Seq[TransactionOutput]
/** The [[org.bitcoins.core.protocol.script.ScriptPubKey ScriptPubKey]]'s we are spending bitcoins to */
def destinationSPKs: Seq[ScriptPubKey] = destinations.map(_.scriptPubKey)
/** A sequence of the amounts we are spending in this transaction */
def destinationAmounts: Seq[CurrencyUnit] = destinations.map(_.value)
/** The spent amount of bitcoins we are sending in the transaction, this does NOT include the fee */
def destinationAmount: CurrencyUnit =
destinationAmounts.fold(CurrencyUnits.zero)(_ + _)
/** The total amount of satoshis that are able to be spent by this transaction */
def creditingAmount: CurrencyUnit =
utxos.map(_.output.value).fold(CurrencyUnits.zero)(_ + _)
/** The largest possible fee in this transaction could pay */
def largestFee: CurrencyUnit = creditingAmount - destinationAmount
/**
* The list of [[org.bitcoins.core.protocol.transaction.TransactionOutPoint TransactionOutPoint]]s we are
* attempting to spend
* and the signers, redeem scripts, and script witnesses that might be needed to spend this outpoint.
* This information is dependent on what the [[org.bitcoins.core.protocol.script.ScriptPubKey ScriptPubKey]]
* type is we are spending. For isntance, if we are spending a
* regular [[org.bitcoins.core.protocol.script.P2PKHScriptPubKey P2PKHScriptPubKey]], we do not need a
* redeem script or script witness.
*
* If we are spending a [[org.bitcoins.core.protocol.script.P2WPKHWitnessSPKV0 P2WPKHWitnessSPKV0]] we do not
* need a redeem script, but we need a [[org.bitcoins.core.protocol.script.ScriptWitness ScriptWitness]]
*/
def utxoMap: TxBuilder.UTXOMap
def utxos: Seq[UTXOSpendingInfo] = utxoMap.values.toSeq
/** This represents the rate, in [[org.bitcoins.core.wallet.fee.FeeUnit FeeUnit]], we
* should pay for this transaction */
def feeRate: FeeUnit
/**
* This is where all the money that is NOT sent to destination outputs is spent too.
* If we don't specify a change output, a large miner fee may be paid as more than likely
* the difference between `creditingAmount` and `destinationAmount` is not a market rate miner fee
*/
def changeSPK: ScriptPubKey
/**
* The network that this [[org.bitcoins.core.wallet.builder.TxBuilder TxBuilder]] is signing a transaction for.
* An example could be [[org.bitcoins.core.config.MainNet MainNet]]
*/
def network: NetworkParameters
/** The outpoints that we are using in this transaction */
def outPoints: Seq[TransactionOutPoint] = utxoMap.keys.toSeq
/** The redeem scripts that are needed in this transaction */
def redeemScriptOpt: Seq[Option[ScriptPubKey]] = utxos.map(_.redeemScriptOpt)
/** The script witnesses that are needed in this transaction */
def scriptWitOpt: Seq[Option[ScriptWitness]] = utxos.map(_.scriptWitnessOpt)
/**
* The unsigned version of the tx with dummy signatures instead of real signatures in
* the [[org.bitcoins.core.protocol.script.ScriptSignature ScriptSignature]]s.
* This unsigned transaction has fee estimation done against
* the [[org.bitcoins.core.wallet.fee.SatoshisPerVirtualByte SatoshisPerVirtualByte]]
* you passed in as a parameter
* the change output is calculated and ready for signing.
*/
def unsignedTx(implicit ec: ExecutionContext): Future[Transaction]
def sign(implicit ec: ExecutionContext): Future[Transaction]
}
/**
* The [[org.bitcoins.core.wallet.builder.TxBuilder TxBuilder]] for the
* bitcoin network(s) [[org.bitcoins.core.config.BitcoinNetwork BitcoinNetwork]]
*/
sealed abstract class BitcoinTxBuilder extends TxBuilder {
private val logger = BitcoinSLogger.logger
private val tc = TransactionConstants
override def network: BitcoinNetwork
override def utxoMap: BitcoinTxBuilder.UTXOMap
override def sign(implicit ec: ExecutionContext): Future[Transaction] = {
val f: (Seq[BitcoinUTXOSpendingInfo], Transaction) => Boolean = { (_, _) =>
true
}
sign(f)
}
override def unsignedTx(
implicit ec: ExecutionContext): Future[Transaction] = {
val utxos = utxoMap.values.toList
val unsignedTxWit = TransactionWitness.fromWitOpt(scriptWitOpt.toVector)
val lockTime = calcLockTime(utxos)
val inputs = calcSequenceForInputs(utxos, Policy.isRBFEnabled)
val emptyChangeOutput = TransactionOutput(CurrencyUnits.zero, changeSPK)
val unsignedTxNoFee = lockTime.map { l =>
unsignedTxWit match {
case EmptyWitness =>
BaseTransaction(tc.validLockVersion,
inputs,
destinations ++ Seq(emptyChangeOutput),
l)
case wit: TransactionWitness =>
WitnessTransaction(tc.validLockVersion,
inputs,
destinations ++ Seq(emptyChangeOutput),
l,
wit)
}
}
val unsignedTxWithFee: Try[Future[Transaction]] = unsignedTxNoFee.map {
utxnf =>
val dummySignTx = loop(utxos, utxnf, true)
dummySignTx.map { dtx =>
logger.debug(s"dummySignTx $dtx")
val fee = feeRate.calc(dtx)
logger.debug(s"fee $fee")
val change = creditingAmount - destinationAmount - fee
val newChangeOutput = TransactionOutput(change, changeSPK)
logger.debug(s"newChangeOutput $newChangeOutput")
//if the change output is below the dust threshold after calculating the fee, don't add it
//to the tx
val newOutputs = if (newChangeOutput.value <= Policy.dustThreshold) {
logger.debug(
"removing change output as value is below the dustThreshold")
destinations
} else {
destinations ++ Seq(newChangeOutput)
}
dtx match {
case btx: BaseTransaction =>
BaseTransaction(btx.version, btx.inputs, newOutputs, btx.lockTime)
case wtx: WitnessTransaction =>
WitnessTransaction(wtx.version,
wtx.inputs,
newOutputs,
wtx.lockTime,
wtx.witness)
}
}
}
Future.fromTry(unsignedTxWithFee).flatMap(g => g)
}
/**
* Signs the given transaction and then returns a signed tx that spends
* all of the given outputs.
* Checks the given invariants when the signing process is done
* An example of some invariants is that the fee on the signed transaction below a certain amount,
* or that RBF is enabled on the signed transaction.
*
* @param invariants - invariants that should hold true when we are done signing the transaction
* @return the signed transaction, or a [[TxBuilderError]] indicating what went wrong when signing the tx
*/
def sign(invariants: (Seq[BitcoinUTXOSpendingInfo], Transaction) => Boolean)(
implicit ec: ExecutionContext): Future[Transaction] = {
val utxos = utxoMap.values.toList
val signedTxWithFee = unsignedTx.flatMap { utx: Transaction =>
//sign the tx for this time
val signedTx = loop(utxos, utx, false)
signedTx.flatMap { tx =>
val t: Try[Transaction] = {
if (invariants(utxos, tx)) {
//final sanity checks
TxBuilder.sanityChecks(this, tx) match {
case Success(_) => Success(tx)
case Failure(err) => Failure(err)
}
} else {
TxBuilderError.FailedUserInvariants
}
}
Future.fromTry(t)
}
}
signedTxWithFee
}
private def loop(
remaining: List[BitcoinUTXOSpendingInfo],
txInProgress: Transaction,
dummySignatures: Boolean)(
implicit ec: ExecutionContext): Future[Transaction] = remaining match {
case Nil => Future.successful(txInProgress)
case info :: t =>
val partiallySigned = signAndAddInput(info, txInProgress, dummySignatures)
partiallySigned.flatMap(tx => loop(t, tx, dummySignatures))
}
/**
* This function creates a newly signed input, and then adds it to the unsigned transaction
* @param utxo - the information needed to validly spend the given output
* @param unsignedTx - the transaction that we are spending this output in
* @return either the transaction with the signed input added, or a [[TxBuilderError]]
*/
private def signAndAddInput(
utxo: BitcoinUTXOSpendingInfo,
unsignedTx: Transaction,
dummySignatures: Boolean)(
implicit ec: ExecutionContext): Future[Transaction] = {
val outpoint = utxo.outPoint
val output = utxo.output
val signers = utxo.signers
val redeemScriptOpt = utxo.redeemScriptOpt
val scriptWitnessOpt = utxo.scriptWitnessOpt
val hashType = utxo.hashType
val idx =
unsignedTx.inputs.zipWithIndex.find(_._1.previousOutput == outpoint)
if (idx.isEmpty) {
Future.fromTry(TxBuilderError.MissingOutPoint)
} else {
val inputIndex = UInt32(idx.get._2)
val oldInput = unsignedTx.inputs(inputIndex.toInt)
output.scriptPubKey match {
case _: P2PKScriptPubKey =>
P2PKSigner
.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
.map(_.transaction)
case _: P2PKHScriptPubKey =>
P2PKHSigner
.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
.map(_.transaction)
case _: MultiSignatureScriptPubKey =>
MultiSigSigner
.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
.map(_.transaction)
case lock: LockTimeScriptPubKey =>
lock.nestedScriptPubKey match {
case _: P2PKScriptPubKey =>
P2PKSigner
.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
.map(_.transaction)
case _: P2PKHScriptPubKey =>
P2PKHSigner
.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
.map(_.transaction)
case _: MultiSignatureScriptPubKey =>
MultiSigSigner
.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
.map(_.transaction)
case _: P2SHScriptPubKey =>
Future.fromTry(TxBuilderError.NestedP2SHSPK)
case _: P2WSHWitnessSPKV0 | _: P2WPKHWitnessSPKV0 =>
Future.fromTry(TxBuilderError.NestedWitnessSPK)
case _: CSVScriptPubKey | _: CLTVScriptPubKey |
_: NonStandardScriptPubKey | _: WitnessCommitment |
EmptyScriptPubKey | _: UnassignedWitnessScriptPubKey =>
Future.fromTry(TxBuilderError.NoSigner)
}
case p2sh: P2SHScriptPubKey =>
redeemScriptOpt match {
case Some(redeemScript) =>
if (p2sh != P2SHScriptPubKey(redeemScript)) {
Future.fromTry(TxBuilderError.WrongRedeemScript)
} else {
val signedTxF: Future[Transaction] = redeemScript match {
case p2wpkh: P2WPKHWitnessSPKV0 =>
val uwtx = WitnessTransaction.toWitnessTx(unsignedTx)
//breaks an abstraction inside of all of the signers
//won't be able to be handled properly until gemini stuff
//is open sourced
signP2SHP2WPKH(unsignedTx = uwtx,
inputIndex = inputIndex,
output = output,
p2wpkh = p2wpkh,
utxo = utxo,
hashType = hashType,
dummySignatures = dummySignatures)
case _: P2PKScriptPubKey | _: P2PKHScriptPubKey |
_: MultiSignatureScriptPubKey | _: LockTimeScriptPubKey |
_: NonStandardScriptPubKey | _: WitnessCommitment |
_: UnassignedWitnessScriptPubKey | _: P2WSHWitnessSPKV0 |
EmptyScriptPubKey =>
val input = TransactionInput(outpoint,
EmptyScriptSignature,
oldInput.sequence)
val updatedTx =
unsignedTx.updateInput(inputIndex.toInt, input)
val updatedOutput =
TransactionOutput(output.value, redeemScript)
val updatedUTXOInfo = BitcoinUTXOSpendingInfo(
outpoint,
updatedOutput,
signers,
None,
scriptWitnessOpt,
hashType)
val signedTxEither = signAndAddInput(updatedUTXOInfo,
updatedTx,
dummySignatures)
signedTxEither.map { signedTx =>
val i = signedTx.inputs(inputIndex.toInt)
val p2sh =
P2SHScriptSignature(i.scriptSignature, redeemScript)
val signedInput =
TransactionInput(i.previousOutput, p2sh, i.sequence)
val signedInputs =
signedTx.inputs.updated(inputIndex.toInt, signedInput)
signedTx match {
case btx: BaseTransaction =>
BaseTransaction(btx.version,
signedInputs,
btx.outputs,
btx.lockTime)
case wtx: WitnessTransaction =>
WitnessTransaction(wtx.version,
signedInputs,
wtx.outputs,
wtx.lockTime,
wtx.witness)
}
}
case _: P2SHScriptPubKey =>
Future.fromTry(TxBuilderError.NestedP2SHSPK)
}
signedTxF
}
case None => Future.fromTry(TxBuilderError.NoRedeemScript)
}
case _: P2WPKHWitnessSPKV0 =>
//if we don't have a WitnessTransaction we need to convert our unsignedTx to a WitnessTransaction
val unsignedWTx: WitnessTransaction = unsignedTx match {
case btx: BaseTransaction =>
WitnessTransaction(btx.version,
btx.inputs,
btx.outputs,
btx.lockTime,
EmptyWitness)
case wtx: WitnessTransaction => wtx
}
val result = P2WPKHSigner.sign(signers,
output,
unsignedWTx,
inputIndex,
hashType,
dummySignatures)
result.map(_.transaction)
case p2wshSPK: P2WSHWitnessSPKV0 =>
//if we don't have a WitnessTransaction we need to convert our unsignedTx to a WitnessTransaction
val unsignedWTx: WitnessTransaction = unsignedTx match {
case btx: BaseTransaction =>
WitnessTransaction(btx.version,
btx.inputs,
btx.outputs,
btx.lockTime,
EmptyWitness)
case wtx: WitnessTransaction => wtx
}
val p2wshScriptWit = scriptWitnessOpt match {
case Some(wit) =>
wit match {
case EmptyScriptWitness | _: P2WPKHWitnessV0 =>
Future.fromTry(TxBuilderError.WrongWitness)
case x: P2WSHWitnessV0 => Future.successful(x)
}
case None => Future.fromTry(TxBuilderError.NoWitness)
}
val redeemScriptEither = p2wshScriptWit.map(_.redeemScript)
val result = redeemScriptEither.flatMap { redeemScript =>
if (P2WSHWitnessSPKV0(redeemScript) != p2wshSPK) {
Future.fromTry(TxBuilderError.WrongWitness)
} else {
redeemScript match {
case _: P2PKScriptPubKey =>
P2PKSigner.sign(signers,
output,
unsignedWTx,
inputIndex,
hashType,
dummySignatures)
case _: P2PKHScriptPubKey =>
P2PKHSigner.sign(signers,
output,
unsignedWTx,
inputIndex,
hashType,
dummySignatures)
case _: MultiSignatureScriptPubKey =>
MultiSigSigner.sign(signers,
output,
unsignedWTx,
inputIndex,
hashType,
dummySignatures)
case _: P2WPKHWitnessSPKV0 | _: P2WSHWitnessSPKV0 =>
Future.fromTry(TxBuilderError.NestedWitnessSPK)
case _: P2SHScriptPubKey =>
Future.fromTry(TxBuilderError.NestedP2SHSPK)
case lock: LockTimeScriptPubKey =>
lock.nestedScriptPubKey match {
case _: P2PKScriptPubKey =>
P2PKSigner.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
case _: P2PKHScriptPubKey =>
P2PKHSigner.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
case _: MultiSignatureScriptPubKey =>
MultiSigSigner.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
case _: P2WPKHWitnessSPKV0 =>
P2WPKHSigner.sign(signers,
output,
unsignedTx,
inputIndex,
hashType,
dummySignatures)
case _: P2SHScriptPubKey =>
Future.fromTry(TxBuilderError.NestedP2SHSPK)
case _: P2WSHWitnessSPKV0 =>
Future.fromTry(TxBuilderError.NestedWitnessSPK)
case _: CSVScriptPubKey | _: CLTVScriptPubKey |
_: NonStandardScriptPubKey | _: WitnessCommitment |
EmptyScriptPubKey | _: UnassignedWitnessScriptPubKey =>
Future.fromTry(TxBuilderError.NoSigner)
}
case _: NonStandardScriptPubKey | _: WitnessCommitment |
EmptyScriptPubKey | _: UnassignedWitnessScriptPubKey =>
Future.fromTry(TxBuilderError.NoSigner)
}
}
}
result.map(_.transaction)
case _: NonStandardScriptPubKey | _: WitnessCommitment |
EmptyScriptPubKey | _: UnassignedWitnessScriptPubKey =>
Future.fromTry(TxBuilderError.NoSigner)
}
}
}
/**
* Returns a valid sequence number for the given [[ScriptNumber]]
* A transaction needs a valid sequence number to spend a OP_CHECKSEQUENCEVERIFY script.
* See BIP68/112 for more information
* [[https://github.com/bitcoin/bips/blob/master/bip-0068.mediawiki]]
* [[https://github.com/bitcoin/bips/blob/master/bip-0112.mediawiki]]
*/
private def solveSequenceForCSV(scriptNum: ScriptNumber): UInt32 =
LockTimeInterpreter.isCSVLockByBlockHeight(scriptNum) match {
case true =>
//means that we need to have had scriptNum blocks bassed since this tx was included a block to be able to spend this output
val blocksPassed = scriptNum.toLong & TransactionConstants.sequenceLockTimeMask.toLong
val sequence = UInt32(blocksPassed)
sequence
case false =>
//means that we need to have had 512 * n seconds passed since the tx was included in a block passed
val n = scriptNum.toLong
val sequence = UInt32(
n & TransactionConstants.sequenceLockTimeMask.toLong)
//set sequence number to indicate this is relative locktime
sequence | TransactionConstants.sequenceLockTimeTypeFlag
}
/**
* This helper function calculates the appropriate locktime for a transaction.
* To be able to spend [[CLTVScriptPubKey]]'s you need to have the transaction's
* locktime set to the same value (or higher) than the output it is spending.
* See BIP65 for more info
*/
private def calcLockTime(utxos: Seq[BitcoinUTXOSpendingInfo]): Try[UInt32] = {
@tailrec
def loop(
remaining: Seq[BitcoinUTXOSpendingInfo],
currentLockTime: UInt32): Try[UInt32] = remaining match {
case Nil => Success(currentLockTime)
case BitcoinUTXOSpendingInfo(outpoint,
output,
signers,
redeemScriptOpt,
scriptWitOpt,
hashType) :: t =>
output.scriptPubKey match {
case cltv: CLTVScriptPubKey =>
val lockTime =
if (cltv.locktime.toLong > UInt32.max.toLong || cltv.locktime.toLong < 0) {
TxBuilderError.IncompatibleLockTimes
} else Success(UInt32(cltv.locktime.toLong))
val result = lockTime.flatMap { l: UInt32 =>
if (currentLockTime < l) {
val lockTimeThreshold = tc.locktimeThreshold
if (currentLockTime < lockTimeThreshold && l >= lockTimeThreshold) {
//means that we spend two different locktime types, one of the outputs spends a
//OP_CLTV script by block height, the other spends one by time stamp
TxBuilderError.IncompatibleLockTimes
} else Success(l)
} else Success(currentLockTime)
}
result
case _: P2SHScriptPubKey | _: P2WSHWitnessSPKV0 =>
if (redeemScriptOpt.isDefined) {
//recursively call with redeem script as output script
val o = TransactionOutput(output.value, redeemScriptOpt.get)
val i = BitcoinUTXOSpendingInfo(outpoint,
o,
signers,
None,
scriptWitOpt,
hashType)
loop(i +: t, currentLockTime)
} else if (scriptWitOpt.isDefined) {
scriptWitOpt.get match {
case EmptyScriptWitness => loop(t, currentLockTime)
case _: P2WPKHWitnessV0 => loop(t, currentLockTime)
case p2wsh: P2WSHWitnessV0 =>
//recursively call with the witness redeem script as the script
val o = TransactionOutput(output.value, p2wsh.redeemScript)
val i = BitcoinUTXOSpendingInfo(outpoint,
o,
signers,
redeemScriptOpt,
None,
hashType)
loop(i +: t, currentLockTime)
}
} else {
loop(t, currentLockTime)
}
case _: P2PKScriptPubKey | _: P2PKHScriptPubKey |
_: MultiSignatureScriptPubKey | _: P2SHScriptPubKey |
_: P2WPKHWitnessSPKV0 | _: P2WSHWitnessSPKV0 |
_: NonStandardScriptPubKey | _: WitnessCommitment |
EmptyScriptPubKey | _: UnassignedWitnessScriptPubKey |
_: CSVScriptPubKey =>
//non of these scripts affect the locktime of a tx
loop(t, currentLockTime)
}
}
loop(utxos, TransactionConstants.lockTime)
}
/**
* This helper function calculates the appropriate sequence number for each transaction input.
* [[CLTVScriptPubKey]] and [[CSVScriptPubKey]]'s need certain sequence numbers on the inputs
* to make them spendable.
* See BIP68/112 and BIP65 for more info
*/
private def calcSequenceForInputs(
utxos: Seq[UTXOSpendingInfo],
isRBFEnabled: Boolean): Seq[TransactionInput] = {
@tailrec
def loop(
remaining: Seq[UTXOSpendingInfo],
accum: Seq[TransactionInput]): Seq[TransactionInput] = remaining match {
case Nil => accum.reverse
case BitcoinUTXOSpendingInfo(outpoint,
output,
signers,
redeemScriptOpt,
scriptWitOpt,
hashType) :: t =>
output.scriptPubKey match {
case csv: CSVScriptPubKey =>
val sequence = solveSequenceForCSV(csv.locktime)
val i = TransactionInput(outpoint, EmptyScriptSignature, sequence)
loop(t, i +: accum)
case _: CLTVScriptPubKey =>
val sequence = UInt32.zero
val i = TransactionInput(outpoint, EmptyScriptSignature, sequence)
loop(t, i +: accum)
case _: P2SHScriptPubKey | _: P2WSHWitnessSPKV0 =>
if (redeemScriptOpt.isDefined) {
//recursively call with the redeem script in the output
val o = TransactionOutput(output.value, redeemScriptOpt.get)
val i = BitcoinUTXOSpendingInfo(outpoint,
o,
signers,
None,
scriptWitOpt,
hashType)
loop(i +: t, accum)
} else if (scriptWitOpt.isDefined) {
scriptWitOpt.get match {
case EmptyScriptWitness | _: P2WPKHWitnessV0 => loop(t, accum)
case p2wsh: P2WSHWitnessV0 =>
val o = TransactionOutput(output.value, p2wsh.redeemScript)
val i = BitcoinUTXOSpendingInfo(outpoint,
o,
signers,
redeemScriptOpt,
None,
hashType)
loop(i +: t, accum)
}
} else loop(t, accum)
case _: P2PKScriptPubKey | _: P2PKHScriptPubKey |
_: MultiSignatureScriptPubKey | _: P2WPKHWitnessSPKV0 |
_: NonStandardScriptPubKey | _: WitnessCommitment |
EmptyScriptPubKey | _: UnassignedWitnessScriptPubKey =>
//none of these script types affect the sequence number of a tx
//the sequence only needs to be adjustd if we have replace by fee (RBF) enabled
//see BIP125 for more information
val sequence =
if (isRBFEnabled) UInt32.zero else TransactionConstants.sequence
val input =
TransactionInput(outpoint, EmptyScriptSignature, sequence)
loop(t, input +: accum)
}
}
val inputs = loop(utxos, Nil)
inputs
}
def signP2SHP2WPKH(
unsignedTx: WitnessTransaction,
inputIndex: UInt32,
p2wpkh: P2WPKHWitnessSPKV0,
output: TransactionOutput,
utxo: UTXOSpendingInfo,
hashType: HashType,
dummySignatures: Boolean): Future[Transaction] = {
//special rule for p2sh(p2wpkh)
//https://bitcoincore.org/en/segwit_wallet_dev/#signature-generation-and-verification-for-p2sh-p2wpkh
//we actually sign the fully expanded redeemScript
val pubKey = utxo.signers.head.publicKey
if (P2WPKHWitnessSPKV0(pubKey) != p2wpkh) {
Future.fromTry(TxBuilderError.WrongPublicKey)
} else {
val p2shScriptSig = P2SHScriptSignature(p2wpkh)
val oldInput = unsignedTx.inputs(inputIndex.toInt)
val updatedInput = TransactionInput(oldInput.previousOutput,
p2shScriptSig,
oldInput.sequence)
val uwtx = {
val u = unsignedTx.updateInput(inputIndex.toInt, updatedInput)
WitnessTransaction.toWitnessTx(u)
}
val wtxSigComp = {
WitnessTxSigComponentP2SH(transaction = uwtx,
inputIndex = inputIndex,
output = output,
flags = Policy.standardFlags)
}
val hashForSig = TransactionSignatureSerializer.hashForSignature(
txSigComponent = wtxSigComp,
hashType = hashType)
//sign the hash
val signature: ECDigitalSignature = {
if (dummySignatures) {
EmptyDigitalSignature
} else {
val sig = utxo.signers.head.sign(hashForSig.bytes)
//append hash type
ECDigitalSignature.fromBytes(sig.bytes.:+(hashType.byte))
}
}
val p2wpkhWit = P2WPKHWitnessV0(publicKey = pubKey, signature = signature)
val updatedWit = uwtx.updateWitness(inputIndex.toInt, p2wpkhWit)
Future.successful(updatedWit)
}
}
}
object TxBuilder {
/** This contains all the information needed to create a valid
* [[org.bitcoins.core.protocol.transaction.TransactionInput TransactionInput]] that spends this utxo */
type UTXOMap = Map[TransactionOutPoint, UTXOSpendingInfo]
private val logger = BitcoinSLogger.logger
/** Runs various sanity checks on the final version of the signed transaction from TxBuilder */
def sanityChecks(txBuilder: TxBuilder, signedTx: Transaction): Try[Unit] = {
val sanityDestination = sanityDestinationChecks(txBuilder, signedTx)
if (sanityDestination.isFailure) {
sanityDestination
} else {
sanityAmountChecks(txBuilder, signedTx)
}
}
/** Checks that we did not lose a [[org.bitcoins.core.protocol.transaction.TransactionOutput TransactionOutput]]
* in the signing process of this transaction */
def sanityDestinationChecks(
txBuilder: TxBuilder,
signedTx: Transaction): Try[Unit] = {
//make sure we send coins to the appropriate destinations
val isMissingDestination = txBuilder.destinations
.map(o => signedTx.outputs.contains(o))
.exists(_ == false)
val hasExtraOutputs =
if (signedTx.outputs.size == txBuilder.destinations.size) {
false
} else {
//the extra output should be the changeOutput
!(signedTx.outputs.size == (txBuilder.destinations.size + 1) &&
signedTx.outputs.map(_.scriptPubKey).contains(txBuilder.changeSPK))
}
val spendingTxOutPoints = signedTx.inputs.map(_.previousOutput)
val hasExtraOutPoints = txBuilder.outPoints
.map(o => spendingTxOutPoints.contains(o))
.exists(_ == false)
if (isMissingDestination) {
TxBuilderError.MissingDestinationOutput
} else if (hasExtraOutputs) {
TxBuilderError.ExtraOutputsAdded
} else if (hasExtraOutPoints) {
TxBuilderError.ExtraOutPoints
} else {
Success(())
}
}
/**
* Checks that the [[org.bitcoins.core.wallet.builder.TxBuilder.creditingAmount TxBuilder.creditingAmount]]
* >= [[org.bitcoins.core.wallet.builder.TxBuilder.destinationAmount TxBuilder.destinationAmount]]
* and then does a sanity check on the tx's fee
*/
def sanityAmountChecks(
txBuilder: TxBuilder,
signedTx: Transaction): Try[Unit] = {
val spentAmount: CurrencyUnit =
signedTx.outputs.map(_.value).fold(CurrencyUnits.zero)(_ + _)
val creditingAmount = txBuilder.creditingAmount
val actualFee = creditingAmount - spentAmount
val estimatedFee = txBuilder.feeRate * signedTx
if (spentAmount > creditingAmount) {
TxBuilderError.MintsMoney
} else if (actualFee > txBuilder.largestFee) {
TxBuilderError.HighFee
} else if (signedTx.outputs
.filterNot(_.scriptPubKey.asm.contains(OP_RETURN))
.map(_.value)
.exists(_ < Policy.dustThreshold)) {
TxBuilderError.OutputBelowDustThreshold
} else {
val feeResult =
isValidFeeRange(estimatedFee, actualFee, txBuilder.feeRate)
feeResult
}
}
/**
* Checks if the fee is within a 'valid' range
* @param estimatedFee the estimated amount of fee we should pay
* @param actualFee the actual amount of fee the transaction pays
* @param feeRate the fee rate in satoshis/vbyte we paid per byte on this tx
* @return
*/
def isValidFeeRange(
estimatedFee: CurrencyUnit,
actualFee: CurrencyUnit,
feeRate: FeeUnit): Try[Unit] = {
//what the number '40' represents is the allowed variance -- in bytes -- between the size of the two
//versions of signed tx. I believe the two signed version can vary in size because the digital
//signature might have changed in size. It could become larger or smaller depending on the digital
//signatures produced.
//Personally I think 40 seems like a little high. As you shouldn't vary more than a 2 bytes per input in the tx i think?
//bumping for now though as I don't want to spend time debugging
//I think there is something incorrect that errors to the low side of fee estimation
//for p2sh(p2wpkh) txs
//See this link for more info on variance in size on ECDigitalSignatures
//https://en.bitcoin.it/wiki/Elliptic_Curve_Digital_Signature_Algorithm
val acceptableVariance = 40 * feeRate.toLong
val min = Satoshis(Int64(-acceptableVariance))
val max = Satoshis(Int64(acceptableVariance))
val difference = estimatedFee - actualFee
if (difference <= min) {
logger.error(
s"Fee was too high. Estimated fee ${estimatedFee}, actualFee ${actualFee}, difference ${difference}, acceptableVariance ${acceptableVariance}")
TxBuilderError.HighFee
} else if (difference >= max) {
logger.error(
s"Fee was too low. Estimated fee ${estimatedFee}, actualFee ${actualFee}, difference ${difference}, acceptableVariance ${acceptableVariance}")
TxBuilderError.LowFee
} else {
Success(())
}
}
}
object BitcoinTxBuilder {
type UTXOMap = Map[TransactionOutPoint, BitcoinUTXOSpendingInfo]
private case class BitcoinTxBuilderImpl(
destinations: Seq[TransactionOutput],
utxoMap: UTXOMap,
feeRate: FeeUnit,
changeSPK: ScriptPubKey,
network: BitcoinNetwork)
extends BitcoinTxBuilder
/**
* @param destinations where the money is going in the signed tx
* @param utxos extra information needed to spend the outputs in the creditingTxs
* @param feeRate the desired fee rate for this tx
* @param changeSPK where we should send the change from the creditingTxs
* @return either a instance of a [[org.bitcoins.core.wallet.builder.TxBuilder TxBuilder]],
* from which you can call [[org.bitcoins.core.wallet.builder.TxBuilder.sign TxBuilder.sign]]
* to generate a signed tx, or a
* [[org.bitcoins.core.wallet.builder.TxBuilderError TxBuilderError]]
*/
def apply(
destinations: Seq[TransactionOutput],
utxos: BitcoinTxBuilder.UTXOMap,
feeRate: FeeUnit,
changeSPK: ScriptPubKey,
network: BitcoinNetwork): Future[BitcoinTxBuilder] = {
if (feeRate.toLong <= 0) {
Future.fromTry(TxBuilderError.LowFee)
} else {
Future.successful(
BitcoinTxBuilderImpl(destinations, utxos, feeRate, changeSPK, network))
}
}
def apply(
destinations: Seq[TransactionOutput],
utxos: Seq[BitcoinUTXOSpendingInfo],
feeRate: FeeUnit,
changeSPK: ScriptPubKey,
network: BitcoinNetwork): Future[BitcoinTxBuilder] = {
@tailrec
def loop(utxos: Seq[UTXOSpendingInfo], accum: UTXOMap): UTXOMap =
utxos match {
case Nil => accum
case h +: t =>
val u = BitcoinUTXOSpendingInfo(outPoint = h.outPoint,
output = h.output,
signers = h.signers,
redeemScriptOpt = h.redeemScriptOpt,
scriptWitnessOpt = h.scriptWitnessOpt,
hashType = h.hashType)
val result: BitcoinTxBuilder.UTXOMap = accum.updated(h.outPoint, u)
loop(t, result)
}
val map = loop(utxos, Map.empty)
BitcoinTxBuilder(destinations, map, feeRate, changeSPK, network)
}
}
|
bitcoin-s/bitcoin-s-core
|
core/src/main/scala/org/bitcoins/core/wallet/builder/TxBuilder.scala
|
Scala
|
mit
| 40,701
|
package dregex
import dregex.impl.RegexTree
import scala.collection.mutable.ArrayBuffer
import dregex.impl.UnicodeChar
/**
* Generates, given a regex tree, sample strings that match the regex.
*/
object StringGenerator {
import RegexTree._
def generate(regex: Node, maxAlternatives: Int, maxRepeat: Int): Seq[String] = {
regex match {
case CharSet(ranges) =>
val gen = for {
range <- ranges
} yield {
generate(range, maxAlternatives, maxRepeat)
}
gen.flatten
case range: AbstractRange =>
val length = math.min(maxAlternatives, range.size)
for {
i <- 0 until length
} yield {
UnicodeChar(range.from.codePoint + i).toJavaString
}
case Disj(values) =>
values.flatMap(v => generate(v, maxAlternatives, maxRepeat))
case Rep(min, maxOpt, value) =>
import scala.util.control.Breaks._
val max = maxOpt.getOrElse(Int.MaxValue - 1)
var count = 0
val res = ArrayBuffer[String]()
breakable {
for (i <- min to max) {
res ++= fixedRepeat(value, maxAlternatives, maxRepeat, i)
count += 1
if (count >= maxRepeat)
break()
}
}
res.toSeq
case Juxt(Seq()) =>
Seq()
case Juxt(Seq(value)) =>
generate(value, maxAlternatives, maxRepeat)
case Juxt(first +: rest) =>
for {
left <- generate(first, maxAlternatives, maxRepeat)
right <- generate(Juxt(rest), maxAlternatives, maxRepeat)
} yield {
left + right
}
case other =>
throw new RuntimeException("Unsupported node type: " + other.getClass)
}
}
def fixedRepeat(value: Node, maxAlternatives: Int, maxRepeat: Int, qtty: Int): Seq[String] = {
/*
* To avoid a too fast explosion of combinations, we limit the number of
* alternatives and repetitions to 1 inside repetitions to all but one
* instance.
*/
qtty match {
case 0 =>
Seq()
case 1 =>
generate(value, maxAlternatives, maxRepeat)
case n =>
for {
left <- generate(value, maxAlternatives = 1, maxRepeat = 1)
right <- fixedRepeat(value, maxAlternatives, maxRepeat, qtty - 1)
} yield {
left + right
}
}
}
}
|
marianobarrios/dregex
|
src/test/scala/dregex/StringGenerator.scala
|
Scala
|
bsd-2-clause
| 2,403
|
package io.mth.dross
import org.scalatest.FunSuite
// FIX Replace with an automated scalacheck test.
class DrossManualTest extends FunSuite {
test("manual test") {
}
}
|
markhibberd/dross
|
src/test/io/mth/dross/DrossManualTest.scala
|
Scala
|
bsd-3-clause
| 174
|
/**
* Copyright 2015 ICT.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.ac.ict.acs.netflow.ha
import scala.reflect.ClassTag
/**
* Allows Master to persist any state that is necessary in order to recover from a failure.
*
* The implementation of this trait defines how name-object pairs are stored or retrieved.
*/
trait PersistenceEngine {
/**
* Defines how the object is serialized and persisted. Implementation will
* depend on the store used.
*/
def persist(name: String, obj: Object)
/**
* Defines how the object referred by its name is removed from the store.
*/
def unpersist(name: String)
/**
* Gives all objects, matching a prefix. This defines how objects are
* read/deserialized back.
*/
def read[T: ClassTag](prefix: String): Seq[T]
def close() {}
}
private[netflow] class BlackHolePersistenceEngine extends PersistenceEngine {
override def persist(name: String, obj: Object): Unit = {}
override def unpersist(name: String): Unit = {}
override def read[T: ClassTag](name: String): Seq[T] = Nil
}
|
DataSysLab/netflow
|
common/src/main/scala/cn/ac/ict/acs/netflow/ha/PersistenceEngine.scala
|
Scala
|
apache-2.0
| 1,833
|
/*
* Copyright (c) 2012-2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich.common
package enrichments
package registry
package apirequest
// Scalaz
import scalaz._
import Scalaz._
// json4s
import org.json4s.JInt
// specs2
import org.specs2.Specification
import org.specs2.scalaz.ValidationMatchers
import org.specs2.mock.Mockito
class CacheSpec extends Specification with ValidationMatchers with Mockito { def is = s2"""
This is a specification to test the API Request enrichment cache
Update on identical URLs $e1
Preserve ttl of cache $e2
Remove unused value $e3
"""
def e1 = {
val cache = Cache(3, 2)
cache.put("http://api.acme.com/url", JInt(42).success)
cache.put("http://api.acme.com/url", JInt(52).success)
cache.get("http://api.acme.com/url") must beSome.like {
case v => v must beSuccessful(JInt(52))
} and(cache.actualLoad must beEqualTo(1))
}
def e2 = {
val cache = Cache(3, 2)
cache.put("http://api.acme.com/url", JInt(42).success)
Thread.sleep(3000)
cache.get("http://api.acme.com/url") must beNone and(cache.actualLoad must beEqualTo(0))
}
def e3 = {
val cache = Cache(2, 2)
cache.put("http://api.acme.com/url1", JInt(32).success)
cache.put("http://api.acme.com/url2", JInt(42).success)
cache.put("http://api.acme.com/url3", JInt(52).success)
cache.get("http://api.acme.com/url1") must beNone and(cache.actualLoad must beEqualTo(2))
}
}
|
acgray/snowplow
|
3-enrich/scala-common-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/CacheSpec.scala
|
Scala
|
apache-2.0
| 2,127
|
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import Predef.{any2stringadd => _}
import scala.reflect.internal.util.Position
/**
* Simulate methods that were added in later versions of the scalac
* API, or to generate fake methods that we can use in both versions.
*/
trait PresentationCompilerBackCompat
trait PositionBackCompat {
implicit class RichPosition(pos: Position) {
// annoyingly, {start, end}OrPoint is deprecated
def startOrCursor: Int = if (pos.isRange) pos.start else pos.point
def endOrCursor: Int = if (pos.isRange) pos.end else pos.point
}
}
|
d1egoaz/ensime-sbt
|
src/sbt-test/sbt-ensime/ensime-server/core/src/main/scala-2.11/org/ensime/core/PresentationCompilerBackCompat.scala
|
Scala
|
apache-2.0
| 690
|
/**
* Digi-Lib - base library for Digi components
*
* Copyright (c) 2012-2014 Alexey Aksenov ezh@ezh.msk.ru
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.digimead.digi.lib.log
import java.util.Date
import org.digimead.digi.lib.log.api.XLevel
class Message(val date: Date,
val tid: Long,
val level: XLevel,
val tag: String,
val tagClass: Class[_],
val message: String,
val throwable: Option[Throwable],
val pid: Int) extends api.XMessage {
override def toString = "%s P%05d T%05d %s %-24s %s".format(dateFormat.format(date),
pid, tid, level.toString.charAt(0), tag + ":", message)
}
|
ezh/digi-lib
|
src/main/scala/org/digimead/digi/lib/log/Message.scala
|
Scala
|
apache-2.0
| 1,136
|
package 除.step1
trait 商
class 商Zero extends 商
class 商Positive[I <: 商] extends 商
trait 被除数 {
type 除以[N <: 除数] <: 商
}
class 被除数Zero extends 被除数 {
override type 除以[N <: 除数] = 商Zero
}
class 被除数Positive[Tail <: 被除数, Head] extends 被除数 {
override type 除以[N <: 除数] = N#除[Tail]
}
trait 除数 {
type 除[CTail <: 被除数] <: 商
}
trait 除数Zero[Tail <: 除数] extends 除数 {
override type 除[CTail <: 被除数] = 商Positive[Tail#除[CTail]]
}
trait 除数Positive[Tail <: 除数] extends 除数 {
override type 除[CTail <: 被除数] = CTail#除以[Tail]
}
|
djx314/ubw
|
a24-五法/src/main/scala/除/step1/除.scala
|
Scala
|
bsd-3-clause
| 671
|
package com.sksamuel.scapegoat.inspections.empty
import com.sksamuel.scapegoat.{ Inspection, InspectionContext, Inspector, Levels }
/** @author Stephen Samuel */
class EmptyWhileBlock extends Inspection {
def inspector(context: InspectionContext): Inspector = new Inspector(context) {
override def postTyperTraverser = Some apply new context.Traverser {
import context.global._
override def inspect(tree: Tree): Unit = {
tree match {
case LabelDef(_, _, If(_, Block(List(Literal(Constant(()))), _), _)) =>
context.warn("Empty while block", tree.pos, Levels.Warning, tree.toString().take(500), EmptyWhileBlock.this)
case _ => continue(tree)
}
}
}
}
}
|
pwwpche/scalac-scapegoat-plugin
|
src/main/scala/com/sksamuel/scapegoat/inspections/empty/EmptyWhileBlock.scala
|
Scala
|
apache-2.0
| 732
|
// Card types
abstract class BrownOrGreyCard extends Card {
val res: Resources
override def benefit(p: PlayerState, g: GameState) = p addResources res
}
abstract class BrownCard extends BrownOrGreyCard {
override def toString = SevenCli.brown + super.toString + Console.RESET
}
abstract class GreyCard extends BrownOrGreyCard {
override def toString = SevenCli.grey + super.toString + Console.RESET
}
abstract class RedCard extends Card {
val value: Int
override def benefit(p: PlayerState, g: GameState) = p.copy(shields = p.shields + value)
override def toString = SevenCli.red + super.toString + Console.RESET
}
abstract class GreenCard extends Card {
val value: (Int,Int,Int)
override def benefit(p: PlayerState, g: GameState) = p.copy(
science = (
p.science._1 + value._1,
p.science._2 + value._2,
p.science._3 + value._3
)
)
override def toString = SevenCli.green + super.toString + Console.RESET
}
abstract class BlueCard extends Card {
val value: Int
override def worth(p: PlayerState, g: GameState) = value
override def toString = SevenCli.blue + super.toString + Console.RESET
}
abstract class YellowCard extends Card {
override def toString = SevenCli.yellow + super.toString + Console.RESET
}
abstract class PurpleCard extends Card {
override def toString = SevenCli.purple + super.toString + Console.RESET
}
// age 1 brown cards
case object WoodPlace extends BrownCard {
override val res = Resources(wood = 1)
}
case object ClayPlace extends BrownCard {
override val res = Resources(clay = 1)
}
case object OrePlace extends BrownCard {
override val res = Resources(ore = 1)
}
case object StonePlace extends BrownCard {
override val res = Resources(stone = 1)
}
case object Baumschule extends BrownCard {
override val goldCost = 1
override val res = Resources.dynamic(wood = 1, clay = 1)
}
case object Ausgrabungsstätte extends BrownCard {
override val goldCost = 1
override val res = Resources.dynamic(stone = 1, clay = 1)
}
case object Tongrube extends BrownCard {
override val goldCost = 1
override val res = Resources.dynamic(clay = 1, ore = 1)
}
case object Forstwirtschaft extends BrownCard {
override val goldCost = 1
override val res = Resources.dynamic(wood = 1, stone =1)
}
case object Waldhöhle extends BrownCard {
override val goldCost = 1
override val res = Resources.dynamic(wood = 1, ore = 1)
}
case object Mine extends BrownCard {
override val goldCost = 1
override val res = Resources.dynamic(stone = 1, ore = 1)
}
// age 2 brown cards
case object Sägewerk extends BrownCard {
override val goldCost = 1
override val res = Resources(wood = 2)
}
case object Bildhauerei extends BrownCard {
override val goldCost = 1
override val res = Resources(stone = 2)
}
case object Ziegelbrennerei extends BrownCard {
override val goldCost = 1
override val res = Resources(clay = 2)
}
case object Giesserei extends BrownCard {
override val goldCost = 1
override val res = Resources(ore = 2)
}
// age 1 + 2 grey cards
case object Press extends GreyCard {
override val res = Resources(papyrus = 1)
}
case object Weavery extends GreyCard {
override val res = Resources(cloth = 1)
}
case object Glassery extends GreyCard {
override val res = Resources(glass = 1)
}
// age 1 blue cards
case object Pfandhaus extends BlueCard {
override val value = 3
}
case object Bäder extends BlueCard {
override val resourceReq = Resources(stone = 1)
override val value = 3
override val chains = List(Aquädukt)
}
case object Altar extends BlueCard {
override val value = 2
override val chains = List(Tempel)
}
case object Theatre extends BlueCard {
override val value = 2
override val chains = List(Statue)
}
// age 2 blue cards
case object Aquädukt extends BlueCard {
override val value = 5
override val resourceReq = Resources(stone = 3)
}
case object Tempel extends BlueCard {
override val value = 3
override val resourceReq = Resources(wood = 1, clay = 1, glass = 1)
override val chains = List(Pantheon)
}
case object Statue extends BlueCard {
override val value = 4
override val resourceReq = Resources(ore = 2, wood = 1)
override val chains = List(Gärten)
}
case object Gericht extends BlueCard {
override val value = 4
override val resourceReq = Resources(clay = 2, cloth = 1)
}
// age 3 blue cards
case object Pantheon extends BlueCard {
override val value = 7
override val resourceReq = Resources(clay = 2, ore = 1, glass = 1, papyrus = 1, cloth = 1)
}
case object Gärten extends BlueCard {
override val value = 5
override val resourceReq = Resources(clay = 2, wood = 1)
}
case object Rathaus extends BlueCard {
override val value = 6
override val resourceReq = Resources(stone = 2, ore = 1, glass = 1)
}
case object Palast extends BlueCard {
override val value = 8
override val resourceReq = Resources(wood = 1, stone = 1, clay = 1, ore = 1, glass = 1, papyrus = 1, cloth = 1)
}
case object Senat extends BlueCard {
override val value = 6
override val resourceReq = Resources(wood = 2, stone = 1, ore = 1)
}
// age 1 yellow cards
case object Tavern extends YellowCard {
override def benefit(p: PlayerState, g: GameState) = p addGold 5
}
case object KontorOst extends YellowCard {
override def benefit(p: PlayerState, g: GameState) = p.copy(tradeRight = (1, p.tradeRight._2))
}
case object KontorWest extends YellowCard {
override def benefit(p: PlayerState, g: GameState) = p.copy(tradeLeft = (1, p.tradeRight._2))
}
case object Market extends YellowCard {
override def benefit(p: PlayerState, g: GameState) = p.copy(
tradeLeft = (p.tradeRight._1, 1),
tradeRight = (p.tradeRight._1, 1)
)
}
// age 2 yellow cards
case object Forum extends YellowCard {
override val resourceReq = Resources(clay = 2)
override def benefit(p: PlayerState, g: GameState) = p addNoTradeResources Resources.dynamic(cloth = 1, glass = 1, papyrus = 1)
override val chains = List(Hafen)
}
case object Karawanserei extends YellowCard {
override val resourceReq = Resources(wood = 2)
override def benefit(p: PlayerState, g: GameState) = p addNoTradeResources Resources.dynamic(wood = 1, stone = 1, ore = 1, clay = 1)
override val chains = List(Leuchtturm)
}
case object Weinberg extends YellowCard {
override def benefit(p: PlayerState, g: GameState) = p addGold( p countAll( _.isInstanceOf[BrownCard], g) )
}
case object Basar extends YellowCard {
override def benefit(p: PlayerState, g: GameState) = p addGold( 2 * p.countAll( _.isInstanceOf[GreyCard], g) )
}
// age 3 yellow cards
case object Hafen extends YellowCard {
override val resourceReq = Resources(wood = 1, ore = 1, cloth = 1)
override def benefit(p: PlayerState, g: GameState) = p addGold p.count( _.isInstanceOf[BrownCard] )
override def worth(p: PlayerState, g: GameState) = p.cards.count( _.isInstanceOf[BrownCard] )
}
case object Leuchtturm extends YellowCard {
override val resourceReq = Resources(stone = 1, glass = 1)
override def benefit(p: PlayerState, g: GameState) = p addGold p.count( _.isInstanceOf[YellowCard] )
override def worth(p: PlayerState, g: GameState) = p.cards.count( _.isInstanceOf[YellowCard] )
}
case object Handelskammer extends YellowCard {
override val resourceReq = Resources(clay = 2, papyrus = 1)
override def benefit(p: PlayerState, g: GameState) = p addGold(2 * p.count( _.isInstanceOf[GreyCard] ) )
override def worth(p: PlayerState, g: GameState) = 2 * p.count( _.isInstanceOf[GreyCard] )
}
case object Arena extends YellowCard {
override val resourceReq = Resources(stone = 2, ore = 1)
override def benefit(p: PlayerState, g: GameState) = p addGold(3 * p.wonderStuffed.length)
override def worth(p: PlayerState, g: GameState) = p.wonderStuffed.length
}
// age 1 red cards
case object Befestigungsanlage extends RedCard {
override val value = 1
override val resourceReq = Resources(wood = 1)
}
case object Kaserne extends RedCard {
override val value = 1
override val resourceReq = Resources(ore = 1)
}
case object Wachturm extends RedCard {
override val value = 1
override val resourceReq = Resources(clay = 1)
}
// age 2 red cards
case object Mauern extends RedCard {
override val value = 2
override val resourceReq = Resources(stone = 3)
override val chains = List(Verteidigungsanlage)
}
case object Trainingsgelände extends RedCard {
override val value = 2
override val resourceReq = Resources(ore = 2, wood = 1)
override val chains = List(Zirkus)
}
case object Ställe extends RedCard {
override val value = 2
override val resourceReq = Resources(clay = 1, wood = 1, ore = 1)
}
case object Schiessplatz extends RedCard {
override val value = 2
override val resourceReq = Resources(wood = 2, ore = 1)
}
// age 3 red cards
case object Verteidigungsanlage extends RedCard {
override val value = 3
override val resourceReq = Resources(stone = 1, ore = 3)
}
case object Zirkus extends RedCard {
override val value = 3
override val resourceReq = Resources(stone = 3, ore = 1)
}
case object Waffenlager extends RedCard {
override val value = 3
override val resourceReq = Resources(wood = 2, ore = 1, cloth = 1)
}
case object Belagerungsmaschinen extends RedCard {
override val value = 3
override val resourceReq = Resources(clay = 3, wood = 1)
}
// age 1 green cards
case object Apothecary extends GreenCard {
override val resourceReq = Resources(cloth = 1)
override val chains = List(Arzneiausgabe, Ställe)
override val value = (1,0,0)
}
case object Werkstatt extends GreenCard {
override val resourceReq = Resources(glass = 1)
override val chains = List(Laboratorium, Schiessplatz)
override val value = (0,1,0)
}
case object Skriptorium extends GreenCard {
override val resourceReq = Resources(papyrus = 1)
override val chains = List(Bibliothek, Gericht)
override val value = (0,0,1)
}
// age 2 green cards
case object Arzneiausgabe extends GreenCard {
override val resourceReq = Resources(ore = 2, glass = 1)
override val value = (1,0,0)
override val chains = List(Loge, Arena)
}
case object Laboratorium extends GreenCard {
override val resourceReq = Resources(clay = 2, papyrus = 1)
override val value = (0,1,0)
override val chains = List(Belagerungsmaschinen, Observatorium)
}
case object Bibliothek extends GreenCard {
override val resourceReq = Resources(stone = 2, cloth = 1)
override val value = (0,0,1)
override val chains = List(Senat, Universität)
}
case object Schule extends GreenCard {
override val resourceReq = Resources(wood = 1, papyrus = 1)
override val value = (0,0,1)
override val chains = List(Akademie, Studierzimmer)
}
// age 3 green cards
case object Loge extends GreenCard {
override val resourceReq = Resources(clay = 2, papyrus = 1, cloth = 1)
override val value = (1,0,0)
}
case object Observatorium extends GreenCard {
override val resourceReq = Resources(ore = 2, glass = 1, cloth = 1)
override val value = (0,1,0)
}
case object Universität extends GreenCard {
override val resourceReq = Resources(wood = 2, papyrus = 1, glass = 1)
override val value = (0,0,1)
override val chains = List(Senat)
}
case object Akademie extends GreenCard {
override val resourceReq = Resources(stone = 3, glass = 1)
override val value = (1,0,0)
}
case object Studierzimmer extends GreenCard {
override val resourceReq = Resources(wood = 1, papyrus = 1, cloth = 1)
override val value = (0,1,0)
}
// age 3 purple cards
case object GuildWorkers extends PurpleCard {
override val resourceReq = Resources(ore = 2, clay = 1, stone = 1, wood = 1)
override def worth(p: PlayerState, g: GameState) = p countNeighbors( _.isInstanceOf[BrownCard], g)
}
case object GuildArtisans extends PurpleCard {
override val resourceReq = Resources(ore = 2, stone = 2)
override def worth(p: PlayerState, g: GameState) = 2 * p.countNeighbors( _.isInstanceOf[GreyCard], g)
}
case object GuildTraders extends PurpleCard {
override val resourceReq = Resources(glass = 1, papyrus = 1, cloth = 1)
override def worth(p: PlayerState, g: GameState) = p countNeighbors( _.isInstanceOf[YellowCard], g)
}
case object GuildPhilosophy extends PurpleCard {
override val resourceReq = Resources(clay = 3, papyrus = 1, cloth = 1)
override def worth(p: PlayerState, g: GameState) = p countNeighbors( _.isInstanceOf[GreenCard], g)
}
case object GuildSpies extends PurpleCard {
override val resourceReq = Resources(clay = 3, glass = 1)
override def worth(p: PlayerState, g: GameState) = p countNeighbors( _.isInstanceOf[RedCard], g)
}
case object GuildStrategists extends PurpleCard {
override val resourceReq = Resources(ore = 2, stone = 1, cloth = 1)
override def worth(p: PlayerState, g: GameState) = ((p lefty g).redlost + (p righty g).redlost) abs
}
case object GuildReeder extends PurpleCard {
override val resourceReq = Resources(wood = 3, glass = 1, papyrus = 1)
override def worth(p: PlayerState, g: GameState) = p count(x => x.isInstanceOf[BrownOrGreyCard] || x.isInstanceOf[PurpleCard])
}
case object GuildScientists extends PurpleCard {
override val resourceReq = Resources(wood = 2, ore = 2, papyrus = 1)
override def benefit(p: PlayerState, g: GameState) = p.copy(scienceWildCard = p.scienceWildCard+1)
}
case object GuildOfficials extends PurpleCard {
override val resourceReq = Resources(wood = 3, stone = 1, cloth = 1)
override def worth(p: PlayerState, g: GameState) = p countNeighbors( _.isInstanceOf[BlueCard], g)
}
case object GuildBuilders extends PurpleCard {
override val resourceReq = Resources(stone = 2, clay = 2, glass = 1)
override def worth(p: PlayerState, g: GameState) = p.wonderStuffed.length + (p lefty g).wonderStuffed.length + (p righty g).wonderStuffed.length
}
|
Valodim/SevenWonders
|
Cards.scala
|
Scala
|
gpl-3.0
| 14,137
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.scalar.pimps
import org.apache.ignite.scheduler.SchedulerFuture
import org.apache.ignite.{Ignite, IgniteCluster}
import org.jetbrains.annotations.Nullable
/**
* Companion object.
*/
object ScalarGridPimp {
/**
* Creates new Scalar grid pimp with given Java-side implementation.
*
* @param impl Java-side implementation.
*/
def apply(impl: Ignite) = {
if (impl == null)
throw new NullPointerException("impl")
val pimp = new ScalarGridPimp
pimp.impl = impl.cluster()
pimp
}
}
/**
* ==Overview==
* Defines Scalar "pimp" for `Grid` on Java side.
*
* Essentially this class extends Java `GridProjection` interface with Scala specific
* API adapters using primarily implicit conversions defined in `ScalarConversions` object. What
* it means is that you can use functions defined in this class on object
* of Java `GridProjection` type. Scala will automatically (implicitly) convert it into
* Scalar's pimp and replace the original call with a call on that pimp.
*
* Note that Scalar provide extensive library of implicit conversion between Java and
* Scala Ignite counterparts in `ScalarConversions` object
*
* ==Suffix '$' In Names==
* Symbol `$` is used in names when they conflict with the names in the base Java class
* that Scala pimp is shadowing or with Java package name that your Scala code is importing.
* Instead of giving two different names to the same function we've decided to simply mark
* Scala's side method with `$` suffix.
*/
class ScalarGridPimp extends ScalarProjectionPimp[IgniteCluster] with ScalarTaskThreadContext[IgniteCluster] {
/**
* Schedules closure for execution using local cron-based scheduling.
*
* @param s Closure to schedule to run as a background cron-based job.
* @param ptrn Scheduling pattern in UNIX cron format with optional prefix `{n1, n2}`
* where `n1` is delay of scheduling in seconds and `n2` is the number of execution. Both
* parameters are optional.
*/
def scheduleLocalCall[R](@Nullable s: Call[R], ptrn: String): SchedulerFuture[R] = {
assert(ptrn != null)
value.ignite().scheduler().scheduleLocal(toCallable(s), ptrn)
}
/**
* Schedules closure for execution using local cron-based scheduling.
*
* @param s Closure to schedule to run as a background cron-based job.
* @param ptrn Scheduling pattern in UNIX cron format with optional prefix `{n1, n2}`
* where `n1` is delay of scheduling in seconds and `n2` is the number of execution. Both
* parameters are optional.
*/
def scheduleLocalRun(@Nullable s: Run, ptrn: String): SchedulerFuture[_] = {
assert(ptrn != null)
value.ignite().scheduler().scheduleLocal(toRunnable(s), ptrn)
}
}
|
irudyak/ignite
|
modules/scalar/src/main/scala/org/apache/ignite/scalar/pimps/ScalarGridPimp.scala
|
Scala
|
apache-2.0
| 3,663
|
package com.electronwill.niol.buffer.storage
/** Provides [[com.electronwill.niol.buffer.storage.BytesStorage]]s */
trait StorageProvider {
/** @return a storage with a capacity of at least `minCapacity` */
def getStorage(minCapacity: Int): BytesStorage
}
|
TheElectronWill/Niol
|
main/src/com/electronwill/niol/buffer/storage/StorageProvider.scala
|
Scala
|
lgpl-3.0
| 261
|
package com.github.opengrabeso.mixtio
package moveslink
import Main.ActivityEvents
import common.Util._
import scala.annotation.tailrec
object MovesLinkUploader {
private def autodetectSport(data: ActivityEvents): ActivityEvents = {
// TODO: use differences from data.dist.stream instead of computing data.gps.distStream
val speedStream = DataStreamGPS.computeSpeedStream(data.gps.distStream)
val speedStats = DataStreamGPS.speedStats(speedStream)
val detectSport = Main.detectSportBySpeed(speedStats, data.id.sportName)
data.copy(id = data.id.copy(sportName = detectSport))
}
@tailrec
private def processTimelinesRecurse(lineGPS: List[ActivityEvents], lineHRD: List[ActivityEvents], processed: List[ActivityEvents]): List[ActivityEvents] = {
def prependNonEmpty(move: Option[ActivityEvents], list: List[ActivityEvents]): List[ActivityEvents] = {
move.find(!_.isAlmostEmpty(30)).toList ++ list
}
if (lineGPS.isEmpty) {
if (lineHRD.isEmpty) {
processed
} else {
// HR moves without GPS info
processTimelinesRecurse(lineGPS, lineHRD.tail, prependNonEmpty(lineHRD.headOption, processed))
}
} else if (lineHRD.isEmpty) {
processTimelinesRecurse(lineGPS.tail, lineHRD, prependNonEmpty(lineGPS.headOption, processed))
} else {
val hrdMove = lineHRD.head
val gpsMove = lineGPS.head
val gpsBeg = gpsMove.startTime
val gpsEnd = gpsMove.endTime
val hrdBeg = hrdMove.startTime
val hrdEnd = hrdMove.endTime
if (gpsBeg >= hrdEnd) {
// no match for hrd
processTimelinesRecurse(lineGPS, lineHRD.tail, prependNonEmpty(lineHRD.headOption, processed))
} else if (hrdBeg > gpsEnd) {
processTimelinesRecurse(lineGPS.tail, lineHRD, prependNonEmpty(lineGPS.headOption, processed))
} else {
// some overlap, handle it
// check if the activity start is the same within a tolerance
// 10 percent means approx. 5 minutes from 1 hour (60 minutes)
val tolerance = (lineGPS.head.duration max lineHRD.head.duration) * 0.10f
if (timeDifference(gpsBeg, hrdBeg).abs <= tolerance) {
// same beginning - drive by HRD
// use from GPS only as needed by HRD
// if GPS is only a bit longer than HDR, use it whole, unless there is another HDR waiting for it
val (takeGPS, leftGPS) = if (timeDifference(gpsEnd, hrdEnd).abs <= tolerance && lineHRD.tail.isEmpty) {
(Some(gpsMove), None)
} else {
gpsMove.span(hrdEnd)
}
val merged = takeGPS.map(m => (m.gps, m)).map { sm =>
val data = sm._2.merge(hrdMove)
data
}
println(s"Merged GPS ${takeGPS.map(_.toLog)} into ${hrdMove.toLog}")
processTimelinesRecurse(prependNonEmpty(leftGPS, lineGPS.tail), prependNonEmpty(merged, lineHRD.tail), processed)
} else if (gpsBeg > hrdBeg) {
val (takeHRD, leftHRD) = hrdMove.span(gpsBeg)
processTimelinesRecurse(lineGPS, prependNonEmpty(leftHRD, lineHRD.tail), prependNonEmpty(takeHRD, processed))
} else {
val (takeGPS, leftGPS) = gpsMove.span(hrdBeg)
processTimelinesRecurse(prependNonEmpty(leftGPS, lineGPS.tail), lineHRD, prependNonEmpty(takeGPS, processed))
}
}
}
}
def processTimelines(lineGPS: List[ActivityEvents], lineHRD: List[ActivityEvents]): List[ActivityEvents] = {
processTimelinesRecurse(lineGPS, lineHRD, Nil).reverse.map(autodetectSport)
}
}
|
OndrejSpanel/Stravamat
|
backend/src/main/scala/com/github/opengrabeso/mixtio/moveslink/MovesLinkUploader.scala
|
Scala
|
gpl-2.0
| 3,575
|
/**
* Created by MKucijan on 9.5.2017..
*/
object CryptographicConstantsExtended {
def getCryptoAPIs: Set[String] = getKeyParametarInitAPIs ++ getPBEKeySpecAPIs ++ getSecureRandomApis
/**
* Rule 3.
*/
final val Cipher_init_API_1="Ljavax/crypto/Cipher;.init:(ILjava/security/Key;)V"
final val Cipher_init_API_2="Ljavax/crypto/Cipher;.init:(ILjava/security/Key;Ljava/security/AlgorithmParameters;)V"
final val Cipher_init_API_3="Ljavax/crypto/Cipher;.init:(ILjava/security/Key;Ljava/security/spec/AlgorithmParameterSpec;)V"
final val Cipher_init_API_4="Ljavax/crypto/Cipher;.init:(ILjava/security/Key;Ljava/security/spec/AlgorithmParameterSpec;Ljava/security/SecureRandom;)V"
final val Cipher_init_API_5="Ljavax/crypto/Cipher;.init:(ILjava/security/Key;Ljava/security/AlgorithmParameters;Ljava/security/SecureRandom;)V"
final val Cipher_init_API_6="Ljavax/crypto/Cipher;.init:(ILjava/security/Key;Ljava/security/SecureRandom;)V"
final val JAVAX_KEYPARAMETER_INIT_1 ="Ljavax/crypto/spec/SecretKeySpec;.<init>:([BLjava/lang/String;)V"
final val JAVAX_KEYPARAMETER_INIT_2 ="Ljavax/crypto/spec/SecretKeySpec;.<init>:([BIILjava/lang/String;)V"
def getCipher_InitApis: Set[String] = Set(Cipher_init_API_1,Cipher_init_API_2,Cipher_init_API_3,Cipher_init_API_4,Cipher_init_API_5,Cipher_init_API_6)
def getKeyParametarInitAPIs: Set[String] = Set(JAVAX_KEYPARAMETER_INIT_1,JAVAX_KEYPARAMETER_INIT_2)
/**
* Rule 4. 5.
*/
final val PBE_KEY_SPEC_1="Ljavax/crypto/spec/PBEKeySpec;.<init>:([C[BII)V"
final val PBE_KEY_SPEC_2="Ljavax/crypto/spec/PBEKeySpec;.<init>:([C[BI)V"
final val PBE_KEY_SPEC_3="Ljavax/crypto/spec/PBEKeySpec;.<init>:([C[B)V"
final val PBE_KEY_SPEC_4="Ljavax/crypto/spec/PBEKeySpec;.<init>:([C)V"
def getPBEKeySpecAPIs: Set[String] = Set(PBE_KEY_SPEC_1,PBE_KEY_SPEC_2,PBE_KEY_SPEC_3,PBE_KEY_SPEC_4)
/**
* Rule 6.
*/
//final val SECURE_RANDOM_INIT_1="Ljava/security/SecureRandom;.<init>:()V"
final val SECURE_RANDOM_INIT_2="Ljava/security/SecureRandom;.<init>:([B)V"
def getSecureRandomApis: Set[String] = Set(SECURE_RANDOM_INIT_2)
}
|
mkucijan/Android-crypto-misuse-checker
|
src/main/scala/CryptographicConstantsExtended.scala
|
Scala
|
gpl-3.0
| 2,119
|
package org.jetbrains.plugins.scala
package lang
package psi
package api
package expr
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
/**
* @author Alexander Podkhalyuzin
*/
trait ScGuard extends ScalaPsiElement {
def expr: Option[ScExpression]
override def accept(visitor: ScalaElementVisitor) = visitor.visitGuard(this)
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScGuard.scala
|
Scala
|
apache-2.0
| 345
|
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.util
import language.implicitConversions
import java.util.concurrent.TimeUnit
import java.lang.{ Double ⇒ JDouble }
import scala.concurrent.duration.{ Duration, FiniteDuration }
@SerialVersionUID(1L)
case class Timeout(duration: FiniteDuration) {
/**
* Construct a Timeout from the given time unit and factor.
*/
def this(length: Long, unit: TimeUnit) = this(Duration(length, unit))
}
/**
* A Timeout is a wrapper on top of Duration to be more specific about what the duration means.
*/
object Timeout {
/**
* A timeout with zero duration, will cause most requests to always timeout.
*/
val zero: Timeout = new Timeout(Duration.Zero)
/**
* Construct a Timeout from the given time unit and factor.
*/
def apply(length: Long, unit: TimeUnit): Timeout = new Timeout(length, unit)
implicit def durationToTimeout(duration: FiniteDuration): Timeout = new Timeout(duration)
}
|
jmnarloch/akka.js
|
akka-js-actor/jvm/src/main/scala/akka/util/Timeout.scala
|
Scala
|
bsd-3-clause
| 1,002
|
package com.twitter.finatra.conversions
import com.github.nscala_time.time.{DurationBuilder, Implicits}
import com.twitter.util.{Duration => TwitterDuration, Time}
import org.joda.time.{DateTime, DateTimeZone, Duration}
/**
* Add additional conversions to 'scala-time' and also
* overcome issues with scala time joda wrappers not being serializable by jackson
*/
object time extends Implicits {
/* ------------------------------------------------ */
implicit class FinatraRichDateTime(dateTime: org.joda.time.DateTime) {
private val LongTimeFromNowMillis = new DateTime(9999, 1, 1, 0, 0, 0, 0).getMillis
def utcIso8601: String =
utcIso8601(dateTime)
def reverseUtcIso8601 = {
utcIso8601(
new DateTime(LongTimeFromNowMillis - dateTime.getMillis))
}
private def utcIso8601(dateTime: DateTime): String = {
dateTime.withZone(DateTimeZone.UTC).toString
}
def toTwitterTime: Time = {
Time.fromMilliseconds(dateTime.getMillis)
}
def epochSeconds: Int = {
(dateTime.getMillis / 1000).toInt
}
}
/* ------------------------------------------------ */
implicit class FinatraRichDuration(duration: Duration) {
def toTwitterDuration: TwitterDuration = {
TwitterDuration.fromMilliseconds(
duration.getMillis)
}
}
/* ------------------------------------------------ */
implicit class FinatraRichDurationBuilder(duration: DurationBuilder) {
def toTwitterDuration: TwitterDuration = {
TwitterDuration.fromMilliseconds(
duration.toDuration.getMillis)
}
}
/* ------------------------------------------------ */
implicit class RichStringTime(string: String) {
def toDateTime: DateTime = {
DateTime.parse(string)
}
}
}
|
tom-chan/finatra
|
utils/src/main/scala/com/twitter/finatra/conversions/time.scala
|
Scala
|
apache-2.0
| 1,773
|
package io.digitalmagic.akka.dsl
import scala.reflect.ClassTag
trait Event extends Product with Serializable {
type TimestampType
var timestamp: TimestampType
}
trait PersistentState extends Product with Serializable {
Self =>
type EventType <: Event
}
trait PersistentStateProcessor[T <: PersistentState] {
def empty: T
def process(state: T, event: T#EventType): T
}
trait EventSourced {
type EventType
implicit val eventTypeTag: ClassTag[EventType]
type State <: PersistentState { type EventType = EventSourced.this.EventType }
implicit val stateTag: ClassTag[State]
val persistentState: PersistentStateProcessor[State]
}
|
digital-magic-io/akka-cqrs-dsl
|
akka-cqrs-dsl-core/src/main/scala/io/digitalmagic/akka/dsl/EventSourced.scala
|
Scala
|
apache-2.0
| 653
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.integrationtest
import org.apache.spark.internal.config
private[spark] trait DecommissionSuite { k8sSuite: KubernetesSuite =>
import DecommissionSuite._
import KubernetesSuite.k8sTestTag
test("Test basic decommissioning", k8sTestTag) {
sparkAppConf
.set(config.DECOMMISSION_ENABLED.key, "true")
.set("spark.kubernetes.container.image", pyImage)
.set(config.STORAGE_DECOMMISSION_ENABLED.key, "true")
.set(config.STORAGE_DECOMMISSION_SHUFFLE_BLOCKS_ENABLED.key, "true")
.set(config.STORAGE_DECOMMISSION_RDD_BLOCKS_ENABLED.key, "true")
// Ensure we have somewhere to migrate our data too
.set("spark.executor.instances", "3")
// The default of 30 seconds is fine, but for testing we just want to get this done fast.
.set("spark.storage.decommission.replicationReattemptInterval", "1")
runSparkApplicationAndVerifyCompletion(
appResource = PYSPARK_DECOMISSIONING,
mainClass = "",
expectedDriverLogOnCompletion = Seq(
"Finished waiting, stopping Spark",
"Decommission executors",
"Final accumulator value is: 100"),
appArgs = Array.empty[String],
driverPodChecker = doBasicDriverPyPodCheck,
executorPodChecker = doBasicExecutorPyPodCheck,
appLocator = appLocator,
isJVM = false,
pyFiles = None,
executorPatience = None,
decommissioningTest = true)
}
test("Test basic decommissioning with shuffle cleanup", k8sTestTag) {
sparkAppConf
.set(config.DECOMMISSION_ENABLED.key, "true")
.set("spark.kubernetes.container.image", pyImage)
.set(config.STORAGE_DECOMMISSION_ENABLED.key, "true")
.set(config.STORAGE_DECOMMISSION_SHUFFLE_BLOCKS_ENABLED.key, "true")
.set(config.STORAGE_DECOMMISSION_RDD_BLOCKS_ENABLED.key, "true")
.set(config.DYN_ALLOCATION_SHUFFLE_TRACKING_ENABLED.key, "true")
.set(config.DYN_ALLOCATION_SHUFFLE_TRACKING_TIMEOUT.key, "400")
// Ensure we have somewhere to migrate our data too
.set("spark.executor.instances", "3")
// The default of 30 seconds is fine, but for testing we just want to get this done fast.
.set("spark.storage.decommission.replicationReattemptInterval", "1")
runSparkApplicationAndVerifyCompletion(
appResource = PYSPARK_DECOMISSIONING_CLEANUP,
mainClass = "",
expectedDriverLogOnCompletion = Seq(
"Finished waiting, stopping Spark",
"Decommission executors"),
appArgs = Array.empty[String],
driverPodChecker = doBasicDriverPyPodCheck,
executorPodChecker = doBasicExecutorPyPodCheck,
appLocator = appLocator,
isJVM = false,
pyFiles = None,
executorPatience = None,
decommissioningTest = true)
}
test("Test decommissioning with dynamic allocation & shuffle cleanups", k8sTestTag) {
sparkAppConf
.set(config.DECOMMISSION_ENABLED.key, "true")
.set("spark.kubernetes.container.image", pyImage)
.set(config.STORAGE_DECOMMISSION_ENABLED.key, "true")
.set(config.STORAGE_DECOMMISSION_SHUFFLE_BLOCKS_ENABLED.key, "true")
.set(config.STORAGE_DECOMMISSION_RDD_BLOCKS_ENABLED.key, "true")
.set(config.DYN_ALLOCATION_SHUFFLE_TRACKING_ENABLED.key, "true")
.set(config.DYN_ALLOCATION_SHUFFLE_TRACKING_TIMEOUT.key, "30")
.set(config.DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT.key, "30")
.set(config.DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT.key, "5")
.set(config.DYN_ALLOCATION_MIN_EXECUTORS.key, "1")
.set(config.DYN_ALLOCATION_INITIAL_EXECUTORS.key, "2")
.set(config.DYN_ALLOCATION_ENABLED.key, "true")
// The default of 30 seconds is fine, but for testing we just want to get this done fast.
.set("spark.storage.decommission.replicationReattemptInterval", "1")
var execLogs: String = ""
runSparkApplicationAndVerifyCompletion(
appResource = PYSPARK_SCALE,
mainClass = "",
expectedDriverLogOnCompletion = Seq(
"Finished waiting, stopping Spark",
"Decommission executors"),
appArgs = Array.empty[String],
driverPodChecker = doBasicDriverPyPodCheck,
executorPodChecker = doBasicExecutorPyPodCheck,
appLocator = appLocator,
isJVM = false,
pyFiles = None,
executorPatience = None,
decommissioningTest = false)
}
}
private[spark] object DecommissionSuite {
val TEST_LOCAL_PYSPARK: String = "local:///opt/spark/tests/"
val PYSPARK_DECOMISSIONING: String = TEST_LOCAL_PYSPARK + "decommissioning.py"
val PYSPARK_DECOMISSIONING_CLEANUP: String = TEST_LOCAL_PYSPARK + "decommissioning_cleanup.py"
val PYSPARK_SCALE: String = TEST_LOCAL_PYSPARK + "autoscale.py"
}
|
witgo/spark
|
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DecommissionSuite.scala
|
Scala
|
apache-2.0
| 5,525
|
package com.datastax.spark.connector.types
import com.datastax.driver.core.DataType
trait CustomDriverConverter {
val fromDriverRowExtension: PartialFunction[DataType, ColumnType[_]]
}
|
ponkin/spark-cassandra-connector
|
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/types/CustomDriverConverter.scala
|
Scala
|
apache-2.0
| 191
|
package databus
import api._
import com.google.inject.AbstractModule
import net.codingwell.scalaguice.ScalaModule
class DatabusModule extends AbstractModule with ScalaModule {
override def configure(): Unit = {
val usersServiceBinding = bind[UserService].to[UsersServiceImpl]
val gamesServiceBinding = bind[GamesService].to[GamesServiceImpl]
}
}
|
salceson/toik-games-price-comparator
|
app/databus/DatabusModule.scala
|
Scala
|
mit
| 360
|
package com.twitter.finagle.stats
import com.twitter.common.metrics.Metrics
import com.twitter.finagle.toggle.flag
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import scala.collection.JavaConverters._
@RunWith(classOf[JUnitRunner])
class StatsFormatterTest extends FunSuite {
private val metrics = Metrics.createDetached()
private val sr = new ImmediateMetricsStatsReceiver(metrics)
private val histo1 = sr.stat("histo1")
(0 to 100).foreach(histo1.add(_))
private val values = SampledValues(
Map.empty,
Map.empty,
metrics.sampleHistograms().asScala)
test("CommonsMetrics is formatted the same as Metrics.sample") {
val formatter = StatsFormatter.CommonsMetrics
val formatted = formatter(values)
// remove stddev as it is not supported
assert(formatted == metrics.sample().asScala.filterKeys(!_.endsWith("stddev")))
assert(formatted("histo1.p50") == 50)
assert(formatted("histo1.p90") == 90)
assert(formatted("histo1.p9990") == 100)
assert(formatted("histo1.p9999") == 100)
assert(formatted("histo1.count") == 101)
assert(formatted("histo1.max") == 100)
assert(formatted("histo1.min") == 0)
assert(formatted("histo1.avg") == 50)
}
test("Ostrich") {
val formatter = StatsFormatter.Ostrich
val formatted = formatter(values)
assert(formatted("histo1.p50") == 50)
assert(formatted("histo1.p90") == 90)
assert(formatted("histo1.p999") == 100)
assert(formatted("histo1.p9999") == 100)
assert(formatted("histo1.count") == 101)
assert(formatted("histo1.maximum") == 100)
assert(formatted("histo1.minimum") == 0)
assert(formatted("histo1.average") == 50)
}
test("CommonsStats") {
val formatter = StatsFormatter.CommonsStats
val formatted = formatter(values)
assert(formatted("histo1_50_0_percentile") == 50)
assert(formatted("histo1_90_0_percentile") == 90)
assert(formatted("histo1_99_0_percentile") == 99)
assert(formatted("histo1_99_9_percentile") == 100)
assert(formatted("histo1_99_99_percentile") == 100)
assert(formatted("histo1_count") == 101)
assert(formatted("histo1_max") == 100)
assert(formatted("histo1_min") == 0)
assert(formatted("histo1_avg") == 50)
}
test("includeEmptyHistograms flag") {
val metrics = Metrics.createDetached()
val stats = new ImmediateMetricsStatsReceiver(metrics)
stats.stat("empty_histo")
val values = SampledValues(
Map.empty,
Map.empty,
metrics.sampleHistograms().asScala)
val formatter = StatsFormatter.Ostrich
includeEmptyHistograms.let(false) {
val formatted = formatter(values)
assert(Map("empty_histo.count" -> 0) == formatted)
}
}
test("shouldIncludeEmptyHistograms uses flag when set") {
flag.overrides.let(StatsFormatter.ExportEmptyHistogramToggleId, 1.0) {
includeEmptyHistograms.let(false) {
assert(!StatsFormatter.shouldIncludeEmptyHistograms)
}
includeEmptyHistograms.let(true) {
assert(StatsFormatter.shouldIncludeEmptyHistograms)
}
}
}
test("shouldIncludeEmptyHistograms uses toggle when flag not set") {
includeEmptyHistograms.letClear {
flag.overrides.let(StatsFormatter.ExportEmptyHistogramToggleId, 0.0) {
assert(!StatsFormatter.shouldIncludeEmptyHistograms)
}
flag.overrides.let(StatsFormatter.ExportEmptyHistogramToggleId, 1.0) {
assert(StatsFormatter.shouldIncludeEmptyHistograms)
}
}
}
}
|
adriancole/finagle
|
finagle-stats/src/test/scala/com/twitter/finagle/stats/StatsFormatterTest.scala
|
Scala
|
apache-2.0
| 3,548
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.ops
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.T
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
class FloorDivSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val floorDiv = FloorDiv[Float, Float]().setName("floorDiv")
val input1 = Tensor[Float](5).fill(1.0f)
val input2 = Tensor[Float](5).fill(2.0f)
val input = T(input1, input2)
runSerializationTest(floorDiv, input)
}
}
|
wzhongyuan/BigDL
|
spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/ops/FloorDivSpec.scala
|
Scala
|
apache-2.0
| 1,135
|
package piecewise.intervaltree
import cats.instances.double._
import org.scalacheck.Gen._
import org.scalacheck.Prop._
import org.scalacheck.{Gen, Properties}
import piecewise.intervaltree.Gens._
object IntervalTreeFold extends Properties("Interval Tree Fold"){
def segments(s: Int, n: Int): Gen[List[Int]] =
if (n == 0) Gen.const(s :: Nil)
else for {
head <- if (s == 0) Gen.const(0) else choose(0, s)
tail <- segments(s - head, n - 1)
} yield {head :: tail}
def defaultNodes(min: Double,
n: Int): Gen[List[((Double, Double), Int)]] =
for {
ins <- intervalsGen(min, n)
vls <- listOfN(ins.size, chooseNum(0, 100))
} yield {ins zip vls}
def knownSumNodes(min: Double,
n: Int,
sum: Int): Gen[(Int, List[((Double, Double), Int)])] =
for {
ins <- intervalsGen(min, n)
vls <- segments(sum, ins.size)
} yield {
val zip = ins zip vls
val sum = zip.map(_._2).sum
(sum, zip)
}
val genMid: Gen[(Int, Double, Double, List[((Double, Double), Int)])] =
for {
knownSum <- chooseNum(10, 100)
left <- defaultNodes(-100.0, 7)
knownNodes <- knownSumNodes(lastBound(left), 10, knownSum)
right <- defaultNodes(lastBound(knownNodes._2), 9)
} yield {(knownNodes._1,
headBound(knownNodes._2),
lastBound(knownNodes._2),
left ::: knownNodes._2 ::: right)
}
val genLeft: Gen[(Int, Double, Double, List[((Double, Double), Int)])] =
for {
knownSum <- chooseNum(10, 100)
knownNodes <- knownSumNodes(-100.0, 10, knownSum)
right <- defaultNodes(lastBound(knownNodes._2), 9)
} yield {(knownNodes._1,
headBound(knownNodes._2),
lastBound(knownNodes._2),
knownNodes._2 ::: right)}
val genRight: Gen[(Int, Double, Double, List[((Double, Double), Int)])] =
for {
knownSum <- chooseNum(10, 100)
left <- defaultNodes(-100.0, 7)
knownNodes <- knownSumNodes(lastBound(left), 10, knownSum)
} yield {(knownNodes._1,
headBound(knownNodes._2),
lastBound(knownNodes._2),
left ::: knownNodes._2)}
val genFragment: Gen[(Int, Double, Double, List[((Double, Double), Int)])] =
for {
knownSum <- chooseNum(10, 100)
left <- defaultNodes(-100.0, 7)
knownNodes <- knownSumNodes(lastBound(left), 10, knownSum)
right <- defaultNodes(lastBound(knownNodes._2), 15)
} yield {(knownNodes._1,
headBound(knownNodes._2),
lastBound(knownNodes._2),
left ::: knownNodes._2 ::: right)}
val checkSum = {t: (Int, Double, Double, List[((Double, Double), Int)]) =>
val (sum, low, upp, src) = t
val tree = AbsITree.build(src.iterator, src.size)
val newSum =
AbsITree.viewFold(low, upp)(tree)((l, u, v) => if (l - u >= 0.0) 0.0 else v)
newSum ?= sum
}
property(" Fold mid") = forAll(genMid){checkSum}
property(" Fold left") = forAll(genLeft){checkSum}
property(" Fold right") = forAll(genRight){checkSum}
property(" Fold fragment") = forAll(genFragment){checkSum}
}
|
daniil-timofeev/gridsplines
|
piecewise/src/test/scala/piecewise/intervaltree/IntervalTreeFold.scala
|
Scala
|
apache-2.0
| 3,188
|
package newton
object Newton {
def sqrt(x: Double): Double = {
def sqrtIter(guess: Double): Double =
if (isGoodEnough(guess)) guess
else sqrtIter(improve(guess))
def improve(guess: Double): Double =
(guess + x / guess) / 2
def isGoodEnough(guess: Double): Boolean =
abs((guess * guess - x) / guess) < 0.001
def abs(x: Double): Double = if (x >= 0) x else -x
sqrtIter(1.0)
}
}
|
timtian090/Playground
|
Scala/scalabyexample/src/main/scala/Newton.scala
|
Scala
|
mit
| 430
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.scope
import scala.collection.mutable
import org.apache.spark.SparkConf
import org.apache.spark.scheduler._
import org.apache.spark.ui.SparkUI
/**
* A SparkListener that constructs a DAG of RDD operations.
*/
private[ui] class RDDOperationGraphListener(conf: SparkConf) extends SparkListener {
// Note: the fate of jobs and stages are tied. This means when we clean up a job,
// we always clean up all of its stages. Similarly, when we clean up a stage, we
// always clean up its job (and, transitively, other stages in the same job).
private[ui] val jobIdToStageIds = new mutable.HashMap[Int, Seq[Int]]
private[ui] val jobIdToSkippedStageIds = new mutable.HashMap[Int, Seq[Int]]
private[ui] val stageIdToJobId = new mutable.HashMap[Int, Int]
private[ui] val stageIdToGraph = new mutable.HashMap[Int, RDDOperationGraph]
private[ui] val completedStageIds = new mutable.HashSet[Int]
// Keep track of the order in which these are inserted so we can remove old ones
private[ui] val jobIds = new mutable.ArrayBuffer[Int]
private[ui] val stageIds = new mutable.ArrayBuffer[Int]
// How many root nodes to retain in DAG Graph
private[ui] val retainedNodes =
conf.getInt("spark.ui.dagGraph.retainedRootRDDs", Int.MaxValue)
// How many jobs or stages to retain graph metadata for
private val retainedJobs =
conf.getInt("spark.ui.retainedJobs", SparkUI.DEFAULT_RETAINED_JOBS)
private val retainedStages =
conf.getInt("spark.ui.retainedStages", SparkUI.DEFAULT_RETAINED_STAGES)
/**
* Return the graph metadata for all stages in the given job.
* An empty list is returned if one or more of its stages has been cleaned up.
*/
def getOperationGraphForJob(jobId: Int): Seq[RDDOperationGraph] = synchronized {
val skippedStageIds = jobIdToSkippedStageIds.getOrElse(jobId, Seq.empty)
val graphs = jobIdToStageIds.getOrElse(jobId, Seq.empty)
.flatMap { sid => stageIdToGraph.get(sid) }
// Mark any skipped stages as such
graphs.foreach { g =>
val stageId = g.rootCluster.id.replaceAll(RDDOperationGraph.STAGE_CLUSTER_PREFIX, "").toInt
if (skippedStageIds.contains(stageId) && !g.rootCluster.name.contains("skipped")) {
g.rootCluster.setName(g.rootCluster.name + " (skipped)")
}
}
graphs
}
/** Return the graph metadata for the given stage, or None if no such information exists. */
def getOperationGraphForStage(stageId: Int): Option[RDDOperationGraph] = synchronized {
stageIdToGraph.get(stageId)
}
/** On job start, construct a RDDOperationGraph for each stage in the job for display later. */
override def onJobStart(jobStart: SparkListenerJobStart): Unit = synchronized {
val jobId = jobStart.jobId
val stageInfos = jobStart.stageInfos
jobIds += jobId
jobIdToStageIds(jobId) = jobStart.stageInfos.map(_.stageId).sorted
stageInfos.foreach { stageInfo =>
val stageId = stageInfo.stageId
stageIds += stageId
stageIdToJobId(stageId) = jobId
stageIdToGraph(stageId) = RDDOperationGraph.makeOperationGraph(stageInfo, retainedNodes)
trimStagesIfNecessary()
}
trimJobsIfNecessary()
}
/** Keep track of stages that have completed. */
override def onStageCompleted(stageCompleted: SparkListenerStageCompleted): Unit = synchronized {
val stageId = stageCompleted.stageInfo.stageId
if (stageIdToJobId.contains(stageId)) {
// Note: Only do this if the stage has not already been cleaned up
// Otherwise, we may never clean this stage from `completedStageIds`
completedStageIds += stageCompleted.stageInfo.stageId
}
}
/** On job end, find all stages in this job that are skipped and mark them as such. */
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = synchronized {
val jobId = jobEnd.jobId
jobIdToStageIds.get(jobId).foreach { stageIds =>
val skippedStageIds = stageIds.filter { sid => !completedStageIds.contains(sid) }
// Note: Only do this if the job has not already been cleaned up
// Otherwise, we may never clean this job from `jobIdToSkippedStageIds`
jobIdToSkippedStageIds(jobId) = skippedStageIds
}
}
/** Clean metadata for old stages if we have exceeded the number to retain. */
private def trimStagesIfNecessary(): Unit = {
if (stageIds.size >= retainedStages) {
val toRemove = math.max(retainedStages / 10, 1)
stageIds.take(toRemove).foreach { id => cleanStage(id) }
stageIds.trimStart(toRemove)
}
}
/** Clean metadata for old jobs if we have exceeded the number to retain. */
private def trimJobsIfNecessary(): Unit = {
if (jobIds.size >= retainedJobs) {
val toRemove = math.max(retainedJobs / 10, 1)
jobIds.take(toRemove).foreach { id => cleanJob(id) }
jobIds.trimStart(toRemove)
}
}
/** Clean metadata for the given stage, its job, and all other stages that belong to the job. */
private[ui] def cleanStage(stageId: Int): Unit = {
completedStageIds.remove(stageId)
stageIdToGraph.remove(stageId)
stageIdToJobId.remove(stageId).foreach { jobId => cleanJob(jobId) }
}
/** Clean metadata for the given job and all stages that belong to it. */
private[ui] def cleanJob(jobId: Int): Unit = {
jobIdToSkippedStageIds.remove(jobId)
jobIdToStageIds.remove(jobId).foreach { stageIds =>
stageIds.foreach { stageId => cleanStage(stageId) }
}
}
}
|
sh-cho/cshSpark
|
ui/scope/RDDOperationGraphListener.scala
|
Scala
|
apache-2.0
| 6,259
|
// Copyright (C) 2015 IBM Corp. All Rights Reserved.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.ibm.watson.developer_cloud.visual_insights.v1.model
import spray.json.DefaultJsonProtocol
object VisualInsightsProtocol extends DefaultJsonProtocol {
implicit val classifierFormat = jsonFormat(Classifier, "name")
implicit val classifiersFormat = jsonFormat(Classifiers, "classifiers")
implicit val summaryFormat = jsonFormat(Summary, "name", "score")
}
|
kane77/watson-scala-wrapper
|
src/main/scala/com/ibm/watson/developer_cloud/visual_insights/v1/model/VisualInsightsProtocol.scala
|
Scala
|
apache-2.0
| 1,100
|
package com.twitter.finagle.toggle
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finagle.toggle.Toggle.Metadata
import com.twitter.logging.Logger
import java.nio.charset.StandardCharsets.UTF_8
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicReference
import java.util.zip.CRC32
import java.util.{function => juf}
import java.{lang => jl}
import scala.annotation.varargs
import scala.collection.JavaConverters._
import scala.collection.{immutable, mutable}
import scala.util.hashing.MurmurHash3
/**
* A collection of Int-typed [[Toggle toggles]] which can be
* used to build a form of feature toggles which allow for modifying
* behavior without changing code.
*
* Expected usage is for code to have [[Toggle toggles]] passed into
* their constructors instead of dynamically creating new [[Toggle toggles]]
* on every call.
*
* @see [[Toggle]]
* @see [[ServiceLoadedToggleMap]] and [[StandardToggleMap]] for typical usage
* entry points.
* @see [[https://martinfowler.com/articles/feature-toggles.html Feature Toggles]]
* for detailed discussion on the topic.
*/
abstract class ToggleMap { self =>
/**
* Get a [[Toggle]] for this `id`.
*
* The `Toggle.isDefined` method should return `false` if the
* [[ToggleMap]] does not know about that [[Toggle]]
* or it is currently not "operating" on that `id`.
*
* @param id the identifying name of the `Toggle`.
* These should generally be fully qualified names to avoid conflicts
* between libraries. For example, "com.twitter.finagle.CoolThing".
*
* @see [[get]] for a Java friendly version
*/
def apply(id: String): Toggle
/**
* Get a [[Toggle]] for this `id`. Java-friendly synonym for `apply`.
*
* The `Toggle.isDefined` method should return `false` if the
* [[ToggleMap]] does not know about that [[Toggle]]
* or it is currently not "operating" on that `id`.
*
* @param id the identifying name of the `Toggle`.
* These should generally be fully qualified names to avoid conflicts
* between libraries. For example, "com.twitter.finagle.CoolThing".
*
* @note this returns a `java.lang.Integer` for Java compatibility.
*/
final def get(id: String): Toggle = apply(id)
def iterator: Iterator[Toggle.Metadata]
/**
* Creates a [[ToggleMap]] which uses `this` before `that`.
*
* [[apply]] returns a [[Toggle]] that uses the [[Toggle]] from `this`
* if it `isDefined` for the input, before trying `that`.
*
* [[iterator]] includes metadata from both `self` and `that`,
* with `self`'s metadata taking precedence on conflicting ids.
* Note however that if a `ToggleMetadata.description` is not defined on `self`,
* the description from `that` will be preferred. This is done because many
* sources of `ToggleMaps` do not have a description defined and we want to
* surface that information.
*/
def orElse(that: ToggleMap): ToggleMap = {
new ToggleMap with ToggleMap.Composite {
override def toString: String =
s"${self.toString}.orElse(${that.toString})"
def apply(id: String): Toggle = {
self(id).orElse(that(id))
}
def iterator: Iterator[Metadata] = {
val byName = mutable.Map.empty[String, Toggle.Metadata]
that.iterator.foreach { md => byName.put(md.id, md) }
self.iterator.foreach { md =>
val mdWithDesc = md.description match {
case Some(_) => md
case None => md.copy(description = byName.get(md.id).flatMap(ToggleMap.MdDescFn))
}
byName.put(md.id, mdWithDesc)
}
byName.valuesIterator
}
def components: Seq[ToggleMap] = {
Seq(self, that)
}
}
}
}
object ToggleMap {
/**
* Used to create a `Toggle` that hashes its inputs to
* `apply` and `isDefined` in order to promote a relatively even
* distribution even when the inputs do not have a good distribution.
*
* This allows users to get away with using a poor hashing function,
* such as `String.hashCode`.
*/
private def hashedToggle(id: String, fn: Int => Boolean, fraction: Double): Toggle.Fractional =
new Toggle.Fractional(id) {
override def toString: String = s"Toggle($id)"
// Each Toggle has a different hash seed so that Toggles are independent
private[this] val hashSeed = MurmurHash3.stringHash(id)
private[this] def hash(i: Int): Int = {
val h = MurmurHash3.mix(hashSeed, i)
MurmurHash3.finalizeHash(h, 1)
}
def isDefined: Boolean = true
def apply(x: Int): Boolean = fn(hash(x))
def currentFraction: Double = fraction
}
private[this] val MetadataOrdering: Ordering[Toggle.Metadata] =
Ordering.by((md: Toggle.Metadata) => (md.id, md.fraction))
/**
* Creates a [[ToggleMap]] with a `Gauge`, "checksum", which summarizes the
* current state of the `Toggles` which may be useful for comparing state
* across a cluster or over time.
*
* @param statsReceiver in typical usage by [[StandardToggleMap]], will be
* scoped to "toggles/\\$libraryName".
*/
def observed(toggleMap: ToggleMap, statsReceiver: StatsReceiver): ToggleMap = {
new ToggleMap with Proxy with Composite {
private[this] val lastApplied =
new ConcurrentHashMap[String, AtomicReference[jl.Boolean]]()
private[this] val checksum = statsReceiver.addGauge("checksum") {
// crc32 is not a cryptographic hash, but good enough for our purposes
// of summarizing the current state of the ToggleMap. we only need it
// to be efficient to compute and have small changes to the input affect
// the output.
val crc32 = new CRC32()
// need a consistent ordering, forcing the sort before computation
iterator.toIndexedSeq.sorted(MetadataOrdering).foreach { md =>
crc32.update(md.id.getBytes(UTF_8))
// convert the md's fraction to a Long and then feed each
// byte into the crc
val f = java.lang.Double.doubleToLongBits(md.fraction)
crc32.update((0xff & f).toInt)
crc32.update((0xff & (f >> 8)).toInt)
crc32.update((0xff & (f >> 16)).toInt)
crc32.update((0xff & (f >> 24)).toInt)
crc32.update((0xff & (f >> 32)).toInt)
crc32.update((0xff & (f >> 40)).toInt)
crc32.update((0xff & (f >> 48)).toInt)
crc32.update((0xff & (f >> 56)).toInt)
}
crc32.getValue.toFloat
}
def underlying: ToggleMap = toggleMap
override def toString: String =
s"observed($toggleMap, $statsReceiver)"
def components: Seq[ToggleMap] =
Seq(underlying)
// mixes in `Toggle.Captured` to provide visibility into how
// toggles are in use at runtime.
override def apply(id: String): Toggle = {
val delegate = super.apply(id)
new Toggle(delegate.id) with Toggle.Captured {
private[this] val last =
lastApplied.computeIfAbsent(
id,
new juf.Function[String, AtomicReference[jl.Boolean]] {
def apply(t: String): AtomicReference[jl.Boolean] =
new AtomicReference[jl.Boolean](null)
})
override def toString: String = delegate.toString
def isDefined: Boolean =
delegate.isDefined
def apply(v1: Int): Boolean = {
val value = delegate(v1)
last.set(jl.Boolean.valueOf(value))
value
}
def lastApply: Option[Boolean] = last.get match {
case null => None
case v => Some(v)
}
}
}
}
}
/**
* A marker interface in support of [[components(ToggleMap)]]
*/
private trait Composite {
def components: Seq[ToggleMap]
}
/**
* For some administrative purposes, it can be useful to get at the
* component `ToggleMaps` that may make up a [[ToggleMap]].
*
* For example:
* {{{
* val toggleMap1: ToggleMap = ...
* val toggleMap2: ToggleMap = ...
* val combined = toggleMap1.orElse(toggleMap2)
* assert(Seq(toggleMap1, toggleMap2) == ToggleMap.components(combined))
* }}}
*/
def components(toggleMap: ToggleMap): Seq[ToggleMap] = {
toggleMap match {
case composite: Composite =>
composite.components.flatMap(components)
case proxy: Proxy =>
components(proxy.underlying)
case _ =>
Seq(toggleMap)
}
}
/**
* The [[ToggleMap]] interface is read only and this
* is the mutable side of it.
*
* Implementations are expected to be thread-safe.
*/
abstract class Mutable extends ToggleMap {
/**
* Add or replace the [[Toggle]] for this `id` with a
* [[Toggle]] that returns `true` for a `fraction` of the inputs.
*
* @param id the identifying name of the `Toggle`.
* These should generally be fully qualified names to avoid conflicts
* between libraries. For example, "com.twitter.finagle.CoolThing".
* @param fraction must be within `0.0–1.0`, inclusive. If not, the operation
* is ignored.
*/
def put(id: String, fraction: Double): Unit
/**
* Remove the [[Toggle]] for this `id`.
*
* This is a no-op for missing values.
*
* @param id the identifying name of the `Toggle`.
* These should generally be fully qualified names to avoid conflicts
* between libraries. For example, "com.twitter.finagle.CoolThing".
*/
def remove(id: String): Unit
}
/**
* Create a [[Toggle]] where `fraction` of the inputs will return `true.`
*
* @note that inputs to [[Toggle.apply]] will be modified to promote
* better distributions in the face of low entropy inputs.
*
* @param id the name of the Toggle which is used to mix
* where along the universe of Ints does the range fall.
* @param fraction the fraction, from 0.0 - 1.0 (inclusive), of Ints
* to return `true`. If outside of that range, a
* `java.lang.IllegalArgumentException` will be thrown.
*/
private[toggle] def fractional(id: String, fraction: Double): Toggle = {
Toggle.validateFraction(id, fraction)
// we want a continuous range within the space of Int.MinValue
// to Int.MaxValue, including overflowing Max.
// By mapping the range to a Long and then mapping this into the
// space of Ints we create a Toggle that is both space efficient
// as well as quick to respond to `apply`.
// within a range of [0, Int.MaxValue*2]
val range: Long = ((1L << 32) * fraction).toLong
// We want to use `id` as an input into the function so
// that ints have different likelihoods depending on the toggle's id.
// Without this, every Toggle's range would start at 0.
// The input to many toggle's may be something consistent per node,
// say a machine name. So without the offset, nodes that hash to
// close to 0 will be much more likely to have most or all toggles
// turned on. by using the id as an offset, we can shift this and
// make them be more evenly distributed.
val start = id.hashCode
val end: Int = (start + range).toInt
if (range == 0) {
Toggle.off(id) // 0%
} else if (start == end) {
Toggle.on(id) // 100%
} else if (start <= end) {
// the range is contiguous without overflows.
hashedToggle(id, { case i => i >= start && i <= end }, fraction)
} else {
// the range overflows around Int.MaxValue
hashedToggle(id, { case i => i >= start || i <= end }, fraction)
}
}
/**
* Create a [[ToggleMap]] out of the given [[ToggleMap ToggleMaps]].
*
* If `toggleMaps` is empty, [[NullToggleMap]] will be returned.
*/
@varargs
def of(toggleMaps: ToggleMap*): ToggleMap = {
val start: ToggleMap = NullToggleMap
toggleMaps.foldLeft(start) {
case (acc, tm) =>
acc.orElse(tm)
}
}
/**
* A [[ToggleMap]] implementation based on immutable [[Toggle.Metadata]].
*/
class Immutable(metadata: immutable.Seq[Toggle.Metadata]) extends ToggleMap {
private[this] val toggles: immutable.Map[String, Toggle] =
metadata.map { md => md.id -> fractional(md.id, md.fraction) }.toMap
override def toString: String =
s"ToggleMap.Immutable@${System.identityHashCode(this)}"
def apply(id: String): Toggle =
toggles.get(id) match {
case Some(t) => t
case None => Toggle.Undefined
}
def iterator: Iterator[Toggle.Metadata] =
metadata.iterator
}
private[this] val log = Logger.get()
private[this] val NoFractionAndToggle = (Double.NaN, Toggle.Undefined)
private class MutableToggle(id: String) extends Toggle.Fractional(id) {
private[this] val fractionAndToggle =
new AtomicReference[(Double, Toggle)](NoFractionAndToggle)
override def toString: String = s"MutableToggle($id)"
def currentFraction: Double =
fractionAndToggle.get()._1
private[ToggleMap] def setFraction(fraction: Double): Unit = {
val fAndT: (Double, Toggle) = if (Toggle.isValidFraction(fraction)) {
(fraction, fractional(id, fraction))
} else {
NoFractionAndToggle
}
fractionAndToggle.set(fAndT)
}
def isDefined: Boolean =
fractionAndToggle.get()._2.isDefined
def apply(t: Int): Boolean =
fractionAndToggle.get()._2(t)
}
/**
* Create an empty [[Mutable]] instance with a default [[Metadata.source]]
* specified.
*
* @note that inputs to [[Toggle.apply]] will be modified to promote
* better distributions in the face of low entropy inputs.
*/
def newMutable(): Mutable =
newMutable(None)
/**
* Create an empty [[Mutable]] instance with the given [[Metadata.source]].
*
* @note that inputs to [[Toggle.apply]] will be modified to promote
* better distributions in the face of low entropy inputs.
*/
def newMutable(source: String): Mutable =
newMutable(Some(source))
private[this] def newMutable(source: Option[String]): Mutable = new Mutable {
override def toString: String = source match {
case Some(src) => src
case None => s"ToggleMap.Mutable@${Integer.toHexString(hashCode())}"
}
// There will be minimal updates, so we can use a low concurrency level,
// which makes the footprint smaller.
private[this] val toggles =
new ConcurrentHashMap[String, MutableToggle](32, 0.75f, 1)
private[this] def toggleFor(id: String): MutableToggle = {
val curr = toggles.get(id)
if (curr != null) {
curr
} else {
val newToggle = new MutableToggle(id)
val prev = toggles.putIfAbsent(id, newToggle)
if (prev == null)
newToggle
else
prev
}
}
def apply(id: String): Toggle =
toggleFor(id)
def iterator: Iterator[Toggle.Metadata] = {
val source = toString
toggles.asScala.collect {
case (id, toggle) if Toggle.isValidFraction(toggle.currentFraction) =>
Toggle.Metadata(id, toggle.currentFraction, None, source)
}.toIterator
}
def put(id: String, fraction: Double): Unit = {
if (Toggle.isValidFraction(fraction)) {
log.info(s"Mutable Toggle id='$id' set to fraction=$fraction")
toggleFor(id).setFraction(fraction)
} else {
log.warning(s"Mutable Toggle id='$id' ignoring invalid fraction=$fraction")
}
}
def remove(id: String): Unit = {
log.info(s"Mutable Toggle id='$id' removed")
toggleFor(id).setFraction(Double.NaN)
}
}
/**
* A [[ToggleMap]] that is backed by a `com.twitter.app.GlobalFlag`,
* [[flag.overrides]].
*
* Its [[Toggle Toggles]] will reflect changes to the underlying `Flag` which
* enables usage in tests.
*
* Fractions that are out of range (outside of `[0.0-1.0]`) will be
* ignored.
*
* @note that inputs to [[Toggle.apply]] will be modified to promote
* better distributions in the face of low entropy inputs.
*/
val flags: ToggleMap = new ToggleMap {
override def toString: String = "ToggleMap.Flags"
private[this] def fractions: Map[String, Double] =
flag.overrides()
private[this] class FlagToggle(id: String) extends Toggle.Fractional(id) {
private[this] val fractionAndToggle =
new AtomicReference[(Double, Toggle)](NoFractionAndToggle)
override def toString: String = s"FlagToggle($id)"
def currentFraction: Double = fractionAndToggle.get()._1
def isDefined: Boolean =
fractions.get(id) match {
case Some(f) if Toggle.isValidFraction(f) => true
case _ => false
}
def apply(t: Int): Boolean = {
fractions.get(id) match {
case Some(f) if Toggle.isValidFraction(f) =>
val prev = fractionAndToggle.get()
val toggle =
if (f == prev._1) {
// we can use the cached toggle since the fraction matches
prev._2
} else {
val newToggle = fractional(id, f)
fractionAndToggle.compareAndSet(prev, (f, newToggle))
newToggle
}
toggle(t)
case _ =>
throw new IllegalStateException(s"$this not defined for input: $t")
}
}
}
def apply(id: String): Toggle =
new FlagToggle(id)
def iterator: Iterator[Toggle.Metadata] = {
val source = toString
fractions.iterator.collect {
case (id, f) if Toggle.isValidFraction(f) =>
Toggle.Metadata(id, f, None, source)
}
}
}
/**
* A [[ToggleMap]] that proxies work to `underlying`.
*
* @note: Does not by itself denote that inheritors extends [[ToggleMap]], but
* can only be used by traits or classes that extend [[ToggleMap]].
*/
trait Proxy { self: ToggleMap =>
def underlying: ToggleMap
override def toString: String = underlying.toString
def apply(id: String): Toggle = underlying(id)
def iterator: Iterator[Metadata] = underlying.iterator
}
private val MdDescFn: Toggle.Metadata => Option[String] =
md => md.description
/**
* A [[ToggleMap]] which returns [[Toggle.on]] for all `ids`.
*
* @note [[ToggleMap.iterator]] will always be empty.
*/
val On: ToggleMap = new ToggleMap {
def apply(id: String): Toggle = Toggle.on(id)
def iterator: Iterator[Metadata] = Iterator.empty
}
/**
* A [[ToggleMap]] which returns [[Toggle.off]] for all `ids`.
*
* @note [[ToggleMap.iterator]] will always be empty.
*/
val Off: ToggleMap = new ToggleMap {
def apply(id: String): Toggle = Toggle.off(id)
def iterator: Iterator[Metadata] = Iterator.empty
}
}
|
twitter/finagle
|
finagle-toggle/src/main/scala/com/twitter/finagle/toggle/ToggleMap.scala
|
Scala
|
apache-2.0
| 19,043
|
package com.shocktrade.server.services.yahoo
import com.shocktrade.server.services.yahoo.YahooFinanceCSVHistoryService._
import io.scalajs.npm.moment.Moment
import io.scalajs.npm.request.Request
import scala.concurrent.{ExecutionContext, Future}
import scala.language.postfixOps
import scala.scalajs.js
import scala.scalajs.js.JSConverters._
import scala.util.Try
/**
* Yahoo Finance! CSV History Service
* @author Lawrence Daniels <lawrence.daniels@gmail.com>
*/
class YahooFinanceCSVHistoryService() {
def apply(symbol: String, from: js.Date, to: js.Date)(implicit ec: ExecutionContext): Future[YFHistoricalQuotes] = {
val startTime = js.Date.now()
Request.getFuture(toURL(symbol, from, to)) map { case (response, data) =>
new YFHistoricalQuotes(symbol = symbol, quotes = parseHistory(data.toString), responseTime = js.Date.now() - startTime)
}
}
private def parseHistory(data: String) = {
data.split("[\\n]") flatMap { line =>
line.split("[,]") match {
case Array(date, open, high, low, close, volume, adjClose) if date != "Date" =>
Option(new YFHistoricalQuote(
tradeDate = Moment(date).toDate(),
open = Try(open.toDouble).toOption.orUndefined,
high = Try(high.toDouble).toOption.orUndefined,
low = Try(low.toDouble).toOption.orUndefined,
close = Try(close.toDouble).toOption.orUndefined,
volume = Try(volume.toDouble).toOption.orUndefined,
adjClose = Try(adjClose.toDouble).toOption.orUndefined
))
case _ => None
}
} toJSArray
}
private def toURL(symbol: String, from: js.Date, to: js.Date) = {
val (m0, d0, y0) = (from.getMonth(), from.getDay(), from.getFullYear())
val (m1, d1, y1) = (to.getMonth(), to.getDay(), to.getFullYear())
s"http://chart.finance.yahoo.com/table.csv?s=$symbol&a=$m0&b=$d0&c=$y0&d=$m1&e=$d1&f=$y1&g=d&ignore=.csv"
}
}
/**
* Yahoo Finance! CSV History Service Companion
* @author Lawrence Daniels <lawrence.daniels@gmail.com>
*/
object YahooFinanceCSVHistoryService {
class YFHistoricalQuotes(val symbol: String,
val quotes: js.Array[YFHistoricalQuote],
val responseTime: Double) extends js.Object
class YFHistoricalQuote(val tradeDate: js.UndefOr[js.Date],
val open: js.UndefOr[Double],
val high: js.UndefOr[Double],
val low: js.UndefOr[Double],
val close: js.UndefOr[Double],
val volume: js.UndefOr[Double],
val adjClose: js.UndefOr[Double]) extends js.Object
}
|
ldaniels528/shocktrade.js
|
app/server/services/src/main/scala/com/shocktrade/server/services/yahoo/YahooFinanceCSVHistoryService.scala
|
Scala
|
apache-2.0
| 2,711
|
package com.xhachi.gae4s.datastore
import java.math
import java.math.BigInteger
import java.util.Date
import com.google.appengine.api.blobstore.BlobKey
import com.google.appengine.api.datastore._
import com.google.appengine.api.users
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig
import com.xhachi.gae4s.datastore.meta.property
import com.xhachi.gae4s.tests.AppEngineTestSuite
import org.scalatest.FunSuite
class SimpleValueEntityTest extends FunSuite with AppEngineTestSuite {
override def getConfig = new LocalDatastoreServiceTestConfig :: super.getConfig
test("SimpleValueEntityのMetaが正しく生成されること") {
val meta = EntityMeta.createMeta[SimpleValueEntity]
assert(meta.properties.size == 27)
for (p <- meta.properties) {
assert(!p.isInstanceOf[IndexedProperty[_]], p.name)
assert(!p.isInstanceOf[OptionProperty[_]], p.name)
}
def assertProperty(name: String, propertyType: Class[_]) = {
assert(meta.property(name).isDefined)
assert(meta.property(name).get.isInstanceOf[ValueProperty[_]])
assert(meta.property(name).get.asInstanceOf[ValueProperty[_]].propertyType == propertyType)
}
assert(meta.property("userKey").get.isInstanceOf[KeyProperty[_]])
assertProperty("string", classOf[String])
assertProperty("int", classOf[Int])
assertProperty("long", classOf[Long])
assertProperty("double", classOf[Double])
assertProperty("bool", classOf[Boolean])
assertProperty("date", classOf[Date])
assertProperty("geoPt", classOf[GeoPt])
assertProperty("text", classOf[Text])
assertProperty("shortBlob", classOf[ShortBlob])
assertProperty("blob", classOf[Blob])
assertProperty("postalAddress", classOf[PostalAddress])
assertProperty("phoneNumber", classOf[PhoneNumber])
assertProperty("email", classOf[Email])
assertProperty("user", classOf[users.User])
assertProperty("imHandle", classOf[IMHandle])
assertProperty("link", classOf[Link])
assertProperty("category", classOf[Category])
assertProperty("rating", classOf[Rating])
assertProperty("blobKey", classOf[BlobKey])
assertProperty("bigInt", classOf[BigInt])
assertProperty("bigDecimal", classOf[BigDecimal])
assert(meta.property("javaEnum").get.isInstanceOf[StringStoreProperty[_]])
assert(meta.property("scalaEnum").get.isInstanceOf[StringStoreProperty[_]])
assert(meta.property("byteArray").get.isInstanceOf[ByteArrayProperty])
assert(meta.property("json").get.isInstanceOf[JsonProperty[_]])
assert(meta.property("json").get.propertyType == classOf[JsonValue])
assert(meta.property("serializable").get.isInstanceOf[SerializableProperty[_]])
assert(meta.property("serializable").get.propertyType == classOf[SerializableValue])
}
test("保存して読み込めること") {
val key = Datastore.allocateKey[SimpleValueEntity]
val e = new SimpleValueEntity(key)
e.userKey = Datastore.allocateKey[User]
e.string = "test_string"
e.int = 1
e.long = 2
e.double = 3
e.bool = true
e.date = new Date(5)
e.geoPt = new GeoPt(6, 7)
e.text = new Text("text7")
e.shortBlob = new ShortBlob("8".getBytes("UTF-8"))
e.blob = new Blob("9".getBytes("UTF-8"))
e.postalAddress = new PostalAddress("123-4567")
e.phoneNumber = new PhoneNumber("0120-123-456")
e.email = new Email("test@example.com")
e.user = new com.google.appengine.api.users.User("test2@example.com", "example.com")
e.imHandle = new IMHandle(IMHandle.Scheme.sip, "test3")
e.link = new Link("http://facebook.com")
e.category = new Category("test_category")
e.rating = new Rating(99)
e.blobKey = new BlobKey("123")
e.bigInt = new BigInt(new BigInteger("12345678"))
e.bigDecimal = new BigDecimal(new math.BigDecimal("12345678"))
e.javaEnum = JavaEnum.JAVA_ENUM2
e.scalaEnum = ScalaEnum.ScalaEnum2
e.byteArray = "test_byte_array".getBytes("UTF-8")
e.json = JsonValue("hoge")
e.serializable = SerializableValue("fuga")
Datastore.put(e)
val a = Datastore.get(key)
assert(e.userKey == a.userKey)
assert(e.string == a.string)
assert(e.int == a.int)
assert(e.long == a.long)
assert(e.double == a.double)
assert(e.bool == a.bool)
assert(e.date == a.date)
assert(e.geoPt == a.geoPt)
assert(e.text == a.text)
assert(e.shortBlob == a.shortBlob)
assert(e.blob == a.blob)
assert(e.postalAddress == a.postalAddress)
assert(e.phoneNumber == a.phoneNumber)
assert(e.email == a.email)
assert(e.user == a.user)
assert(e.imHandle == a.imHandle)
assert(e.link == a.link)
assert(e.category == a.category)
assert(e.rating == a.rating)
assert(e.blobKey == a.blobKey)
assert(e.bigInt == a.bigInt)
assert(e.bigDecimal == a.bigDecimal)
assert(e.javaEnum == a.javaEnum)
assert(e.scalaEnum == a.scalaEnum)
assert(e.byteArray.zip(a.byteArray).filterNot(b => b._1 == b._2).isEmpty)
assert(e.json == a.json)
assert(e.serializable == a.serializable)
}
}
trait DefaultEntityProperty {
var sample: String = ""
}
class SimpleValueEntity(val key: Key[SimpleValueEntity]) extends Entity[SimpleValueEntity] with DefaultEntityProperty {
var userKey: Key[User] = Datastore.allocateKey[User]
var string: String = ""
var int: Int = 0
var long: Long = 0
var double: Double = 0
var bool: Boolean = false
var date: Date = new Date(0)
var geoPt: GeoPt = new GeoPt(0, 0)
var text: Text = new Text(null)
var shortBlob: ShortBlob = new ShortBlob("shot_blob".getBytes("UTF-8"))
var blob: Blob = new Blob("blob".getBytes("UTF-8"))
var postalAddress: PostalAddress = new PostalAddress("060-0806")
var phoneNumber: PhoneNumber = new PhoneNumber("0120-501353")
var email: Email = new Email("eample@example.com")
var user: com.google.appengine.api.users.User = new com.google.appengine.api.users.User("sample@example.com", "example.com")
var imHandle: IMHandle = null
var link: Link = new Link("http://google.com")
var category: Category = new Category("category")
var rating: Rating = new Rating(0)
var blobKey: BlobKey = new BlobKey("")
var bigInt: BigInt = BigInt(0)
var bigDecimal: BigDecimal = BigDecimal(0)
var javaEnum: JavaEnum = JavaEnum.JAVA_ENUM1
var scalaEnum: ScalaEnum.Value = ScalaEnum.ScalaEnum1
var byteArray: Array[Byte] = "byte_array".getBytes("UTF-8")
@property(json = true) var json: JsonValue = JsonValue("test")
@property(serialize = true) var serializable: SerializableValue = SerializableValue("")
}
case class SerializableValue(name: String) extends Serializable
case class JsonValue(name: String)
|
thachi/gae4s
|
core/src/test/scala/com/xhachi/gae4s/datastore/SimpleValueEntityTest.scala
|
Scala
|
apache-2.0
| 6,708
|
package P { // `X' bound by package clause
import Console._ // `println' bound by wildcard import
object A {
println("L4: " + X) // `X' refers to `P.X' here
object B {
import Q._ // `X' bound by wildcard import
println("L7: " + X) // `X' refers to `Q.X' here
import X._ // `x' and `y' bound by wildcard import
println("L8: " + x) // `x' refers to `Q.X.x' here
object C {
val x = 3 // `x' bound by local definition
println("L12: " + x) // `x' refers to constant `3' here
locally {
import Q.X._ // `x' and `y' bound by wildcard import
println("L14: " + x) // reference to `x' is ambiguous here
import X.y // `y' bound by explicit import
println("L16: " + y) // `y' refers to `Q.X.y' here
locally {
import P.X._ // `x' and `y' bound by wildcard import
val x = "abc" // `x' bound by local definition
println("L19: " + y) // reference to `y' is ambiguous here
println("L20: " + x) // `x' refers to string ``abc'' here
}
}
}
}
}
}
|
reactormonk/dotty
|
tests/neg/typedIdents/typedIdents.scala
|
Scala
|
bsd-3-clause
| 1,121
|
package lore.compiler.assembly.functions
import lore.compiler.core.{CompilationException, Position}
import lore.compiler.poem.{Poem, PoemInstruction}
import scala.collection.immutable.HashMap
object LabelResolver {
/**
* Resolves all label locations in the given instructions and transforms them into absolute locations. All labels
* referenced by a jump instruction must be attached to exactly one instruction in `instructions`.
*
* As this function produces jumps with absolute locations, the instruction list must be given in its final size.
* For example, adding a new instruction to the beginning of the list <i>after</i> absolute locations have been
* resolved is illegal, as it leads to incorrect absolute locations.
*
* @param position The position of the expression from which the given instructions were generated. This improves
* compilation exception reporting.
*/
def resolve(instructions: Vector[PoemInstruction], position: Position): Vector[PoemInstruction] = {
var absoluteLocations = HashMap.empty[Poem.Label, Poem.AbsoluteLocation]
// Step 1: Collect the locations of all labels.
for (index <- instructions.indices) {
val instruction = instructions(index)
instruction.labels.foreach { label =>
// The label may only refer to a single location.
absoluteLocations.get(label) match {
case Some(location) => throw CompilationException(
s"The label of an instruction is already defined earlier at location ${location.pc}. Position (estimate): $position."
)
case None =>
}
val location = if (label.isPost) {
// We have to ensure that a post label actually refers to a valid instruction.
if (index + 1 >= instructions.length) {
throw CompilationException(s"A post label points to an instruction which doesn't exist. Position (estimate): $position.")
}
Poem.AbsoluteLocation(index + 1)
} else {
Poem.AbsoluteLocation(index)
}
absoluteLocations += label -> location
}
}
// Step 2: Modify all jump instructions that contain label locations.
def resolveLocation(location: Poem.Location): Poem.AbsoluteLocation = location match {
case Poem.LabelLocation(label) => absoluteLocations.get(label) match {
case Some(location) => location
case None => throw CompilationException(s"A label referenced by a jump instruction is not defined. Position (estimate): $position.")
}
case location: Poem.AbsoluteLocation => location
}
instructions.map {
case instruction@PoemInstruction.Jump(target) => instruction.copy(target = resolveLocation(target))
case instruction@PoemInstruction.JumpIfFalse(target, _) => instruction.copy(target = resolveLocation(target))
case instruction@PoemInstruction.JumpIfTrue(target, _) => instruction.copy(target = resolveLocation(target))
case instruction => instruction
}
}
}
|
marcopennekamp/lore
|
compiler/src/lore/compiler/assembly/functions/LabelResolver.scala
|
Scala
|
mit
| 3,045
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.tf
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.T
import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest
import org.scalatest.{FlatSpec, Matchers}
class FillSpec extends FlatSpec with Matchers {
"Fill forward" should "be correct" in {
val layer = Fill()
val shape = Tensor[Int](T(2, 3))
val value = Tensor[Float](Array(0.1f), Array[Int]())
layer.forward(T(shape, value)) should be(Tensor(T(T(0.1f, 0.1f, 0.1f), T(0.1f, 0.1f, 0.1f))))
}
"Fill forward scalar" should "be correct" in {
val layer = Fill[Double]()
val shape = Tensor[Int]()
val value = Tensor[Float](Array(0.1f), Array[Int]())
layer.forward(T(shape, value)) should be(Tensor.scalar[Float](0.1f))
}
"Fill backward" should "be correct" in {
val layer = Fill()
val shape = Tensor[Int](T(2, 3))
val value = Tensor[Float](Array(0.1f), Array[Int]())
val gradOutput = Tensor(2, 3).rand()
layer.forward(T(shape, value)) should be(Tensor(T(T(0.1f, 0.1f, 0.1f), T(0.1f, 0.1f, 0.1f))))
val gradInput = layer.backward(T(shape, value), gradOutput)
gradInput[Tensor[Int]](1) should be (Tensor[Int](2))
gradInput[Tensor[Float]](2) should be (Tensor[Float](Array(0.0f), Array[Int]()))
}
}
class FillSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val fill = Fill[Float]().setName("fill")
val shape = Tensor[Int](T(2, 3))
val value = Tensor[Float](Array(0.1f), Array[Int]())
val input = T(shape, value)
runSerializationTest(fill, input)
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/tf/FillSpec.scala
|
Scala
|
apache-2.0
| 2,296
|
/* Copyright 2009-2021 EPFL, Lausanne */
object MyTuple2 {
sealed abstract class A
case class B(i: Int) extends A
case class C(a: A) extends A
def foo(): Int = {
val t = (B(2), C(B(3)))
t match {
case (B(x), C(y)) => x
}
} ensuring(_ == 2)
}
|
epfl-lara/stainless
|
frontends/benchmarks/verification/valid/MicroTests/MyTuple2.scala
|
Scala
|
apache-2.0
| 274
|
package edu.illinois.wala.ipa.callgraph
import com.ibm.wala.classLoader.{ClassLoaderFactoryImpl, JavaLanguage, Language}
import com.ibm.wala.ipa.callgraph.Entrypoint
import com.ibm.wala.ipa.callgraph.impl.DefaultEntrypoint
import com.ibm.wala.ipa.cha.ClassHierarchy
import com.ibm.wala.ipa.cha.ClassHierarchyFactory
import com.ibm.wala.types.{MethodReference, TypeName, TypeReference}
import com.typesafe.config.{Config, ConfigFactory}
import scala.collection.JavaConversions._
class AnalysisOptions(scope: AnalysisScope, entrypoints: java.lang.Iterable[Entrypoint], val cha: ClassHierarchy, val isSourceAnalysis: Boolean)
extends com.ibm.wala.ipa.callgraph.AnalysisOptions(scope, entrypoints) {
}
object AnalysisOptions {
// TODO: replace below to use the above class
def apply(
extraEntrypoints: Iterable[(String, String)],
extraDependencies: Iterable[Dependency])(
implicit config: Config): AnalysisOptions = {
implicit val scope = AnalysisScope(extraDependencies)
val classLoaderFactory = new ClassLoaderFactoryImpl(scope.getExclusions())
implicit val cha = ClassHierarchyFactory.make(scope, classLoaderFactory, Language.JAVA)
new AnalysisOptions(scope, entrypoints(extraEntrypoints), cha, true) // !srcDep.isEmpty
}
def entrypoints(extraEntrypoints: Iterable[(String, String)] = Seq())(
implicit config: Config, cha: ClassHierarchy, scope: AnalysisScope) = {
val oneEntryPoint =
if (config.hasPath("wala.entry.class"))
Some((config.getString("wala.entry.class"), config.getString("wala.entry.method")))
else
None
val entryPointsFromPattern =
if (config.hasPath("wala.entry.signature-pattern")) {
val signaturePattern = config.getString("wala.entry.signature-pattern")
val matchingMethods = cha.iterator() flatMap { c =>
c.getAllMethods() filter { m =>
m.getSignature() matches signaturePattern
}
}
matchingMethods map { new DefaultEntrypoint(_, cha) } toSeq
} else
Seq()
val entrypoints = entryPointsFromPattern ++
((extraEntrypoints ++ oneEntryPoint) map { case (klass, method) => makeEntrypoint(klass, method) })
if (entrypoints.size == 0)
System.err.println("WARNING: no entrypoints")
entrypoints
}
// helper apply methods
def apply()(implicit config: Config = ConfigFactory.load): AnalysisOptions = {
apply(Seq(), Seq())
}
def apply(klass: String, method: String)(implicit config: Config): AnalysisOptions = apply((klass, method), Seq())
def apply(entrypoint: (String, String),
dependencies: Iterable[Dependency])(implicit config: Config): AnalysisOptions = apply(Seq(entrypoint), dependencies)
val mainMethod = "main([Ljava/lang/String;)V"
private def makeEntrypoint(entryClass: String, entryMethod: String)(implicit scope: AnalysisScope, cha: ClassHierarchy): Entrypoint = {
val methodReference = AnalysisScope.allScopes.toStream
.map { scope.getLoader(_) }
.map { TypeReference.findOrCreate(_, TypeName.string2TypeName(entryClass)) }
.map { MethodReference.findOrCreate(_, entryMethod.substring(0, entryMethod.indexOf('(')), entryMethod.substring(entryMethod.indexOf('('))) }
.find { cha.resolveMethod(_) != null } getOrElse { throw new Error("Could not find entrypoint: " + entryClass + "#" + entryMethod + " anywhere in loaded classes.") }
new DefaultEntrypoint(methodReference, cha)
}
}
|
cos/WALAFacade
|
src/main/scala/edu/illinois/wala/ipa/callgraph/AnalysisOptions.scala
|
Scala
|
epl-1.0
| 3,471
|
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.qos.evaluation.clazz.model
import org.beangle.data.model.LongId
import org.openurp.base.edu.model.{Semester, Teacher}
class FinalTeacherScore extends LongId {
var teacher: Teacher = _
var semester: Semester = _
var stdScore: Float = _
var supviScore: Float = _
var departScore: Float = _
var score: Float = _
/**部门排名*/
var departRank: Int = _
/**全校排名*/
var schoolRank: Int = _
}
|
openurp/api
|
qos/src/main/scala/org/openurp/qos/evaluation/clazz/model/FinalTeacherScore.scala
|
Scala
|
lgpl-3.0
| 1,160
|
package org.bfn.ninetynineprobs
import org.scalatest._
class P57Spec extends UnitSpec {
// TODO
}
|
bfontaine/99Scala
|
src/test/scala/P57Spec.scala
|
Scala
|
mit
| 105
|
package truerss.dto
case class SourceStatusDto(sourceId: Long, errorsCount: Int)
|
truerss/truerss
|
dtos/src/main/scala/truerss/dto/SourceStatusDto.scala
|
Scala
|
mit
| 82
|
package com.seanshubin.detangler.scanner
import java.nio.file.Path
trait FileScanner {
def loadBytes(jarOrDirectory: Path): Iterable[Seq[Byte]]
}
|
SeanShubin/detangler
|
scanner/src/main/scala/com/seanshubin/detangler/scanner/FileScanner.scala
|
Scala
|
unlicense
| 150
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.time
/**
* Trait providing four implicit conversions that allow you to specify spans of time
* by invoking "units" methods such as <code>millis</code>, <code>seconds</code>, and <code>minutes</code>
* on <code>Int</code>, <code>Long</code>, <code>Float</code>, and <code>Double</code>.
*
* <p>
* This trait enables you to specify a span of time in a clear, boilerplate-free way when you
* need to provide an instance of <a href="Span.html"><code>Span</code></a>. This
* can be used, for example, with the <code>failAfter</code> method of trait
* <a href="../concurrent/Timeouts.html"><code>Timeouts</code></a> or the <code>timeLimit</code> field of trait
* <a href="../concurrent/TimeLimitedTests.html"><code>TimeLimitedTests</code></a>. It can also be used to specify
* timeouts when using traits <a href="../concurrent/Eventually.html"><code>Eventually</code></a>,
* <a href="../concurrent/Futures.html"><code>Futures</code></a>,
* <a href="../concurrent/Waiter.html"><code>Waiter</code></a>. Here are examples of each unit enabled by this trait:
* </p>
*
* <table style="border-collapse: collapse; border: 1px solid black">
* <tr>
* <th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black">
* <strong><code>Int</code></strong>
* </th>
* <th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black">
* <strong><code>Long</code></strong>
* </th>
* <th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black">
* <strong><code>Float</code></strong>
* </th>
* <th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black">
* <strong><code>Double</code></strong>
* </th>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1 nanosecond
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1L nanosecond
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0F nanosecond
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0 nanosecond
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100 nanoseconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100L nanoseconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8F nanoseconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8 nanoseconds
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1 microsecond
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1L microsecond
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0F microsecond
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0 microsecond
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100 microseconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100L microseconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8F microseconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8 microseconds
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1 millisecond
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1L millisecond
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0F millisecond
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0 millisecond
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100 milliseconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100L milliseconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8F milliseconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8 milliseconds
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100 millis
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100L millis
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8F millis
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8 millis
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1 second
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1L second
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0F second
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0 second
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100 seconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100L seconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8F seconds
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8 seconds
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1 minute
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1L minute
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0F minute
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0 minute
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100 minutes
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100L minutes
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8F minutes
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8 minutes
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1 hour
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1L hour
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0F hour
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0 hour
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100 hours
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100L hours
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8F hours
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8 hours
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1 day
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1L day
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0F day
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 1.0 day
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100 days
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 100L days
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8F days
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* 99.8 days
* </td>
* </tr>
* </table>
*
* <p>
* This trait is not the default way to specify <code>Span</code>s for two reasons. First, it adds
* four implicits, which would give the compiler more work to do and may conflict with other implicits the
* user has in scope. Instead, <code>Span</code> provides a clear, concise default way to specify time
* spans that requires no implicits. Here's an example:
* </p>
*
* <pre class="stHighlight">
* Span(1, Second)
* </pre>
*
* <p>
* If you already have implicit conversions in scope that provide a similar syntax sugar for expression
* time spans, you can use that by providing an implicit conversion from the result of those expressions
* to <code>Span</code>. Note that because of implicit conversions in the <code>Span</code> companion object,
* you can use a <code>scala.concurrent.duration.Duration</code> (including in its "sugary" form) where
* a <code>Span</code> is needed, and vice versa.
* </p>
*/
trait SpanSugar {
implicit val postfixOps: languageFeature.postfixOps = language.postfixOps
/**
* Class containing methods that return a <code>Span</code> time value calculated from the
* <code>Long</code> value passed to the <code>GrainOfTime</code> constructor.
*
* @param value the value to be converted
*/
class GrainOfTime(value: Long) {
/**
* A units method for one nanosecond.
*
* @return A <code>Span</code> representing the value passed to the constructor in nanoseconds
*/
def nanosecond: Span = Span(value, Nanosecond)
/**
* A units method for nanoseconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in nanoseconds
*/
def nanoseconds: Span = Span(value, Nanoseconds)
/**
* A units method for one microsecond.
*
* @return A <code>Span</code> representing the value passed to the constructor in microseconds
*/
def microsecond: Span = Span(value, Microsecond)
/**
* A units method for microseconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in microseconds
*/
def microseconds: Span = Span(value, Microseconds)
/**
* A units method for one millisecond.
*
* @return A <code>Span</code> representing the value passed to the constructor in milliseconds
*/
def millisecond: Span = Span(value, Millisecond)
/**
* A units method for milliseconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in milliseconds
*/
def milliseconds: Span = Span(value, Milliseconds)
/**
* A shorter units method for milliseconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in milliseconds
*/
def millis: Span = Span(value, Millis)
/**
* A units method for one second.
*
* @return A <code>Span</code> representing the value passed to the constructor in seconds
*/
def second: Span = Span(value, Second)
/**
* A units method for seconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in seconds
*/
def seconds: Span = Span(value, Seconds)
/**
* A units method for one minute.
*
* @return A <code>Span</code> representing the value passed to the constructor in minutes
*/
def minute: Span = Span(value, Minute)
/**
* A units method for minutes.
*
* @return A <code>Span</code> representing the value passed to the constructor in minutes
*/
def minutes: Span = Span(value, Minutes)
/**
* A units method for one hour.
*
* @return A <code>Span</code> representing the value passed to the constructor in hours
*/
def hour: Span = Span(value, Hour)
/**
* A units method for hours.
*
* @return A <code>Span</code> representing the value passed to the constructor in hours
*/
def hours: Span = Span(value, Hours)
/**
* A units method for one day.
*
* @return A <code>Span</code> representing the value passed to the constructor in days
*/
def day: Span = Span(value, Day)
/**
* A units method for days.
*
* @return A <code>Span</code> representing the value passed to the constructor multiplied in days
*/
def days: Span = Span(value, Days)
}
/**
* Class containing methods that return a <code>Span</code> time value calculated from the
* <code>Double</code> value passed to the <code>FloatingGrainOfTime</code> constructor.
*
* @param value the value to be converted
*/
class FloatingGrainOfTime(value: Double) {
/**
* A units method for one nanosecond.
*
* @return A <code>Span</code> representing the value passed to the constructor in nanoseconds
*/
def nanosecond: Span = Span(value, Nanosecond)
/**
* A units method for nanoseconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in nanoseconds
*/
def nanoseconds: Span = Span(value, Nanoseconds)
/**
* A units method for one microsecond.
*
* @return A <code>Span</code> representing the value passed to the constructor in microseconds
*/
def microsecond: Span = Span(value, Microsecond)
/**
* A units method for microseconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in microseconds
*/
def microseconds: Span = Span(value, Microseconds)
/**
* A units method for one millisecond.
*
* @return A <code>Span</code> representing the value passed to the constructor in milliseconds
*/
def millisecond: Span = Span(value, Millisecond)
/**
* A units method for milliseconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in milliseconds
*/
def milliseconds: Span = Span(value, Milliseconds)
/**
* A shorter units method for milliseconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in milliseconds
*/
def millis: Span = Span(value, Millis)
/**
* A units method for one second.
*
* @return A <code>Span</code> representing the value passed to the constructor in seconds
*/
def second: Span = Span(value, Second)
/**
* A units method for seconds.
*
* @return A <code>Span</code> representing the value passed to the constructor in seconds
*/
def seconds: Span = Span(value, Seconds)
/**
* A units method for one minute.
*
* @return A <code>Span</code> representing the value passed to the constructor in minutes
*/
def minute: Span = Span(value, Minute)
/**
* A units method for minutes.
*
* @return A <code>Span</code> representing the value passed to the constructor in minutes
*/
def minutes: Span = Span(value, Minutes)
/**
* A units method for one hour.
*
* @return A <code>Span</code> representing the value passed to the constructor in hours
*/
def hour: Span = Span(value, Hour)
/**
* A units method for hours.
*
* @return A <code>Span</code> representing the value passed to the constructor in hours
*/
def hours: Span = Span(value, Hours)
/**
* A units method for one day.
*
* @return A <code>Span</code> representing the value passed to the constructor in days
*/
def day: Span = Span(value, Day)
/**
* A units method for days.
*
* @return A <code>Span</code> representing the value passed to the constructor multiplied in days
*/
def days: Span = Span(value, Days)
}
import scala.language.implicitConversions
/**
* Implicit conversion that adds time units methods to <code>Int</code>s.
*
* @param i: the <code>Int</code> to which to add time units methods
* @return a <code>GrainOfTime</code> wrapping the passed <code>Int</code>
*/
implicit def convertIntToGrainOfTime(i: Int): GrainOfTime = new GrainOfTime(i)
/**
* Implicit conversion that adds time units methods to <code>Long</code>s.
*
* @param i: the <code>Long</code> to which to add time units methods
* @return a <code>GrainOfTime</code> wrapping the passed <code>Long</code>
*/
implicit def convertLongToGrainOfTime(i: Long): GrainOfTime = new GrainOfTime(i)
/**
* Implicit conversion that adds time units methods to <code>Float</code>s.
*
* @param f: the <code>Float</code> to which to add time units methods
* @return a <code>FloatingGrainOfTime</code> wrapping the passed <code>Float</code>
*/
implicit def convertFloatToGrainOfTime(f: Float): FloatingGrainOfTime = new FloatingGrainOfTime(f)
/**
* Implicit conversion that adds time units methods to <code>Double</code>s.
*
* @param d: the <code>Double</code> to which to add time units methods
* @return a <code>FloatingGrainOfTime</code> wrapping the passed <code>Double</code>
*/
implicit def convertDoubleToGrainOfTime(d: Double): FloatingGrainOfTime = new FloatingGrainOfTime(d)
}
/**
* Companion object that facilitates the importing of <code>SpanSugar</code> members as
* an alternative to mixing it in. One use case is to import <code>SpanSugar</code> members so you can use
* them in the Scala interpreter:
*
* <pre class="stREPL">
* $scala -classpath scalatest.jar
* Welcome to Scala version 2.9.1.final (Java HotSpot(TM) 64-Bit Server VM, Java 1.6.0_29).
* Type in expressions to have them evaluated.
* Type :help for more information.
*
* scala> import org.scalatest._
* import org.scalatest._
*
* scala> import concurrent.Eventually._
* import org.scalatest.concurrent.Eventually._
*
* scala> import time.SpanSugar._
* import org.scalatest.time.SpanSugar._
*
* scala> eventually(timeout(100 millis)) { 1 + 1 should equal (3) }
* </pre>
*/
object SpanSugar extends SpanSugar
|
dotty-staging/scalatest
|
scalatest/src/main/scala/org/scalatest/time/SpanSugar.scala
|
Scala
|
apache-2.0
| 19,825
|
package im.actor.server.enrich
import scala.concurrent.ExecutionContextExecutor
import scala.util.{ Failure, Success, Try }
import akka.actor._
import akka.contrib.pattern.DistributedPubSubMediator
import akka.event.Logging
import akka.http.scaladsl.model.Uri
import akka.stream.Materializer
import com.sksamuel.scrimage.{ AsyncImage, Format }
import org.joda.time.DateTime
import slick.driver.PostgresDriver.api._
import im.actor.api.rpc.files.FastThumb
import im.actor.api.rpc.messaging._
import im.actor.server.api.rpc.service.messaging.Events
import im.actor.server.api.rpc.service.messaging.MessagingService._
import im.actor.server.util._
import im.actor.server.push.SeqUpdatesManagerRegion
object RichMessageWorker {
val groupId = Some("RichMessageWorker")
def startWorker(config: RichMessageConfig, mediator: ActorRef)(
implicit
system: ActorSystem,
db: Database,
seqUpdManagerRegion: SeqUpdatesManagerRegion,
materializer: Materializer,
fsAdapter: FileStorageAdapter
): ActorRef = system.actorOf(Props(
classOf[RichMessageWorker],
config, mediator, db, seqUpdManagerRegion, materializer, fsAdapter
))
}
class RichMessageWorker(config: RichMessageConfig, mediator: ActorRef)(
implicit
db: Database,
seqUpdManagerRegion: SeqUpdatesManagerRegion,
materializer: Materializer,
fsAdapter: FileStorageAdapter
) extends Actor with ActorLogging {
import AnyRefLogSource._
import DistributedPubSubMediator.{ Subscribe, SubscribeAck }
import PreviewMaker._
import RichMessageWorker._
implicit val system: ActorSystem = context.system
implicit val ec: ExecutionContextExecutor = system.dispatcher
override val log = Logging(system, this)
val previewMaker = PreviewMaker(config, "previewMaker")
import DistributedPubSubMediator.Subscribe
mediator ! Subscribe(privateMessagesTopic, groupId, self)
mediator ! Subscribe(groupMessagesTopic, groupId, self)
def receive: Receive = subscribing(privateAckReceived = false, groupAckReceived = false)
def subscribing(privateAckReceived: Boolean, groupAckReceived: Boolean): Receive = {
case SubscribeAck(Subscribe(`privateMessagesTopic`, `groupId`, `self`)) ⇒
if (groupAckReceived)
context.become(ready)
else
context.become(subscribing(true, groupAckReceived))
case SubscribeAck(Subscribe(`groupMessagesTopic`, `groupId`, `self`)) ⇒
if (privateAckReceived)
context.become(ready)
else
context.become(subscribing(privateAckReceived, true))
}
def ready: Receive = {
case Events.PeerMessage(fromPeer, toPeer, randomId, _, message) ⇒
message match {
case TextMessage(text, _, _) ⇒
Try(Uri(text.trim)) match {
case Success(uri) ⇒
log.debug("TextMessage with uri: {}", uri)
previewMaker ! GetPreview(uri.toString(), UpdateHandler.getHandler(fromPeer, toPeer, randomId))
case Failure(_) ⇒
}
case _ ⇒
}
case PreviewSuccess(imageBytes, optFileName, mimeType, handler) ⇒
log.debug("PreviewSuccess for message with randomId: {}, fileName: {}, mimeType: {}", handler.randomId, optFileName, mimeType)
val fullName = optFileName getOrElse {
val name = (new DateTime).toString("yyyyMMddHHmmss")
val ext = Try(mimeType.split("/").last).getOrElse("tmp")
s"$name.$ext"
}
db.run {
for {
(file, fileSize) ← DBIO.from(FileUtils.writeBytes(imageBytes))
location ← fsAdapter.uploadFile(fullName, file.toFile)
image ← DBIO.from(AsyncImage(imageBytes.toArray))
thumb ← DBIO.from(ImageUtils.scaleTo(image, 90))
thumbBytes ← DBIO.from(thumb.writer(Format.JPEG).write())
_ = log.debug("uploaded file to location {}", location)
_ = log.debug("image with width: {}, height: {}", image.width, image.height)
updated = DocumentMessage(
fileId = location.fileId,
accessHash = location.accessHash,
fileSize = fileSize.toInt,
name = fullName,
mimeType = mimeType,
thumb = Some(FastThumb(thumb.width, thumb.height, thumbBytes)),
ext = Some(DocumentExPhoto(image.width, image.height))
)
_ ← handler.handleDbUpdate(updated)
_ ← handler.handleUpdate(updated)
} yield ()
}
case PreviewFailure(mess, handler) ⇒
log.debug("failed to make preview for message with randomId: {}, cause: {} ", handler.randomId, mess)
}
}
|
TimurTarasenko/actor-platform
|
actor-server/actor-enrich/src/main/scala/im/actor/server/enrich/RichMessageWorker.scala
|
Scala
|
mit
| 4,669
|
/*
* Copyright 2009-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.json4s
import org.specs2.mutable.Specification
import text.Document
object NativeExamples extends Examples[Document]("Native") with native.JsonMethods
object JacksonExamples extends Examples[JValue]("Jackson") with jackson.JsonMethods
object Examples {
import JsonDSL._
val lotto = """
{
"lotto":{
"lotto-id":5,
"winning-numbers":[2,45,34,23,7,5,3],
"winners":[ {
"winner-id":23,
"numbers":[2,45,34,23,3, 5]
},{
"winner-id" : 54 ,
"numbers":[ 52,3, 12,11,18,22 ]
}]
}
}
"""
val person = """
{
"person": {
"name": "Joe",
"age": 35,
"spouse": {
"person": {
"name": "Marilyn",
"age": 33
}
}
}
}
"""
val personDSL =
("person" ->
("name" -> "Joe") ~
("age" -> 35) ~
("spouse" ->
("person" ->
("name" -> "Marilyn") ~
("age" -> 33)
)
)
)
val objArray =
"""
{ "name": "joe",
"address": {
"street": "Bulevard",
"city": "Helsinki"
},
"children": [
{
"name": "Mary",
"age": 5
},
{
"name": "Mazy",
"age": 3
}
]
}
"""
val nulls = ("f1" -> null) ~ ("f2" -> List(null, "s"))
val quoted = """["foo \\" \\n \\t \\r bar"]"""
val symbols = ("f1" -> 'foo) ~ ("f2" -> 'bar)
}
abstract class Examples[T](mod: String) extends Specification with JsonMethods[T] {
import JsonAST.concat
import Examples._
import JsonDSL._
(mod + " Examples") should {
"Lotto example" in {
val json = parse(lotto)
val renderedLotto = compact(render(json))
json must_== parse(renderedLotto)
}
"Person example" in {
val json = parse(person)
val renderedPerson = pretty(render(json))
json must_== parse(renderedPerson)
render(json) must_== render(personDSL)
compact(render(json \\\\ "name")) must_== """{"name":"Joe","name":"Marilyn"}"""
compact(render(json \\ "person" \\ "name")) must_== "\\"Joe\\""
}
"Transformation example" in {
val uppercased = parse(person).transformField { case JField(n, v) => JField(n.toUpperCase, v) }
val rendered = compact(render(uppercased))
rendered must_==
"""{"PERSON":{"NAME":"Joe","AGE":35,"SPOUSE":{"PERSON":{"NAME":"Marilyn","AGE":33}}}}"""
}
"Remove Field example" in {
val json = parse(person) removeField { _ == JField("name", "Marilyn") }
(json \\\\ "name") must_== JString("Joe")
compact(render(json \\\\ "name")) must_== "\\"Joe\\""
}
"Remove example" in {
val json = parse(person) remove { _ == JString("Marilyn") }
(json \\\\ "name") must_== JString("Joe")
compact(render(json \\\\ "name")) must_== "\\"Joe\\""
}
"XPath operator should behave the same after adding and removing a second field with the same name" in {
val json = parse(lotto)
val addition = parse("""{"lotto-two": {"lotto-id": 6}}""")
val json2 = json merge addition removeField { _ == JField("lotto-id", 6) }
(json2 \\\\ "lotto-id") must_== (json \\\\ "lotto-id")
}
"Queries on person example" in {
val json = parse(person)
val filtered = json filterField {
case JField("name", _) => true
case _ => false
}
filtered must_== List(JField("name", JString("Joe")), JField("name", JString("Marilyn")))
val found = json findField {
case JField("name", _) => true
case _ => false
}
found must_== Some(JField("name", JString("Joe")))
}
"Object array example" in {
val json = parse(objArray)
compact(render(json \\ "children" \\ "name")) must_== """["Mary","Mazy"]"""
compact(render((json \\ "children")(0) \\ "name")) must_== "\\"Mary\\""
compact(render((json \\ "children")(1) \\ "name")) must_== "\\"Mazy\\""
(for { JObject(o) <- json; JField("name", JString(y)) <- o } yield y) must_== List("joe", "Mary", "Mazy")
}
"Unbox values using XPath-like type expression" in {
parse(objArray) \\ "children" \\\\ classOf[JInt] must_== List(5, 3)
parse(lotto) \\ "lotto" \\ "winning-numbers" \\ classOf[JInt] must_== List(2, 45, 34, 23, 7, 5, 3)
parse(lotto) \\\\ "winning-numbers" \\ classOf[JInt] must_== List(2, 45, 34, 23, 7, 5, 3)
}
"Quoted example" in {
val json = parse(quoted)
List("foo \\" \\n \\t \\r bar") must_== json.values
}
"Null example" in {
compact(render(parse(""" {"name": null} """))) must_== """{"name":null}"""
}
"Null rendering example" in {
compact(render(nulls)) must_== """{"f1":null,"f2":[null,"s"]}"""
}
"Symbol example" in {
compact(render(symbols)) must_== """{"f1":"foo","f2":"bar"}"""
}
"Unicode example" in {
parse("[\\" \\\\u00e4\\\\u00e4li\\\\u00f6t\\"]") must_== JArray(List(JString(" \\u00e4\\u00e4li\\u00f6t")))
}
"Exponent example" in {
parse("""{"num": 2e5 }""") must_== JObject(List(JField("num", JDouble(200000.0))))
parse("""{"num": -2E5 }""") must_== JObject(List(JField("num", JDouble(-200000.0))))
parse("""{"num": 2.5e5 }""") must_== JObject(List(JField("num", JDouble(250000.0))))
parse("""{"num": 2.5e-5 }""") must_== JObject(List(JField("num", JDouble(2.5e-5))))
}
"JSON building example" in {
val json = JObject(("name", JString("joe")), ("age", JInt(34))) ++ JObject(("name", JString("mazy")), ("age", JInt(31)))
compact(render(json)) must_== """[{"name":"joe","age":34},{"name":"mazy","age":31}]"""
}
"JSON building with implicit primitive conversions example" in {
import DoubleMode._
val json = JObject(("name", "joe"), ("age", 34)) ++ JObject(("name", "mazy"), ("age", 31))
compact(render(json)) must_== """[{"name":"joe","age":34},{"name":"mazy","age":31}]"""
}
"Example which collects all integers and forms a new JSON" in {
val json = parse(person)
val ints = json.fold(JNothing: JValue) { (a, v) => v match {
case x: JInt => a ++ x
case _ => a
}}
compact(render(ints)) must_== """[35,33]"""
}
"Generate JSON with DSL example" in {
val json: JValue =
("id" -> 5) ~
("tags" -> Map("a" -> 5, "b" -> 7))
compact(render(json)) must_== """{"id":5,"tags":{"a":5,"b":7}}"""
}
}
}
|
nornagon/json4s
|
tests/src/test/scala/org/json4s/Examples.scala
|
Scala
|
apache-2.0
| 6,888
|
package org.fayalite.agg.yahoo.finance
import java.io.File
import com.github.tototoshi.csv.CSVReader
import scala.collection.mutable
import scala.util.Try
import fa._
/**
* Quick converter for taking save output and building
* a per-day per-file index.
*/
trait YahooTestUtils {
val yahooSave : java.io.File
val gbtime : java.io.File = null
def convertHistoricalCSVsToGroupByTimeTempIndex = {
val storM = mutable.HashMap[String, scala.collection.mutable.HashMap[String, Float]]()
yahooSave.listFiles().par.foreach{f =>
val nm = f.getName
println("Reading csv " + f.getCanonicalPath)
CSVReader.open(f).toStream().tail
.withFilter{_.nonEmpty}
.withFilter{_(0) != ""}
.foreach{
q =>
val time = q.head
Try {
val open = q(1).toFloat
synchronized {
if (storM.contains(time)) {
storM(time)(nm) = open
} else {
storM(time) = mutable.HashMap(nm -> open)
}
}
}
}
}
storM.par.foreach{
case (datetime, quotes) =>
val f = new File(gbtime, datetime.replaceAll("\\-", "_"))
writeToFile(f, quotes.toSeq.sortBy{_._1}.prettyTSVString)
}
storM
//storM.map{_._2.size}.toSeq.sorted.reverse.slice(0,100).foreach{println}
}
def getGroupByTimeIndexed = {
val gbf = gbtime.listFiles()
val r2 = gbf.map {
q =>
val f = q.getName
val qts = readLines(q).map {
q =>
val a = q.split("\t")
a(0) -> a(1).toDouble
}.toMap
f -> qts
}
r2
}
}
|
ryleg/fayalite
|
agg/src/main/scala/org/fayalite/agg/yahoo/finance/YahooTestUtils.scala
|
Scala
|
mit
| 1,695
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.learning.active.linkselector
import de.fuberlin.wiwiss.silk.entity.Link
import math.log
import de.fuberlin.wiwiss.silk.evaluation.ReferenceEntities
/**
* Link Selector which selects the links with the highest vote entropy.
*/
case class EntropySelector() extends LinkSelector {
override def apply(rules: Seq[WeightedLinkageRule], unlabeledLinks: Seq[Link], referenceEntities: ReferenceEntities): Seq[Link] = {
val maxLink = unlabeledLinks.par.maxBy(link => entropy(rules, link))
Seq(maxLink)
}
private def entropy(rules: Seq[WeightedLinkageRule], link: Link) = {
val fulfilledRules = rules.count(rule => rule(link.entities.get) > 0.0)
val p = fulfilledRules.toDouble / rules.size
if(p == 0.0 || p == 1.0)
0.0
else
(-p * log(p) - (1 - p) * log(1 - p)) / log(2)
}
}
|
fusepoolP3/p3-silk
|
silk-learning/src/main/scala/de/fuberlin/wiwiss/silk/learning/active/linkselector/EntropySelector.scala
|
Scala
|
apache-2.0
| 1,421
|
import sbt._
import Keys._
import Import._
object B extends Build
{
override def rootProject = Some(a)
lazy val a = Project("a", file("a")) settings(
TaskKey[Unit]("taskA") := {}
)
lazy val b = Project("b", file("b")) settings(
TaskKey[Unit]("taskB") := {}
)
}
|
dansanduleac/sbt
|
sbt/src/sbt-test/project/root-project/changes/RootA.scala
|
Scala
|
bsd-3-clause
| 272
|
package org.cloudio.morpheus.square
import java.io.PrintWriter
import org.morpheus._
import org.morpheus.Morpheus._
/**
* Created by zslajchrt on 27/09/15.
*/
trait ShapeVisitor {
def visitRectangle(r: Rectangle): Unit
def visitSquare(s: Rectangle with Square): Unit
}
trait Shape {
def accept(sv: ShapeVisitor): Unit
def area: Double
def move(dx: Double, dy: Double): Unit
def shapeName: String
def printShape(): Unit
}
class ShapePainter(gc: Graphics) extends ShapeVisitor {
override def visitRectangle(r: Rectangle): Unit = {
gc.drawRect(r.x, r.y, r.width, r.height)
val radius = Math.sqrt((r.width / 2) * (r.width / 2) + (r.height / 2) * (r.height / 2))
gc.drawCircle(r.x + r.width / 2, r.y + r.height / 2, radius)
}
override def visitSquare(s: Rectangle with Square): Unit = {
visitRectangle(s)
val radius = s.side / 2
gc.drawCircle(s.x + s.side / 2, s.y + s.side / 2, radius)
}
}
@fragment
trait Rectangle extends Shape {
var x: Double = 0
var y: Double = 0
var width: Double = 0
var height: Double = 0
def accept(sv: ShapeVisitor): Unit = {
sv.visitRectangle(this)
}
override def area: Double = width * height
override def move(dx: Double, dy: Double): Unit = {
x += dx
y += dy
}
def printShape(): Unit = {
println(s"""$shapeName($x,$y,$width,$height)""")
}
override def shapeName: String = "Rectangle"
}
@fragment
trait Square {
this: Rectangle =>
def side = width
def side_=(s: Double): Unit = {
assert(width == height)
this.width = s
this.height = s
}
}
@fragment
@wrapper
trait SquareW extends Rectangle {
this: Square =>
override def accept(sv: ShapeVisitor): Unit = {
sv.visitSquare(this)
}
override def shapeName: String = "Square"
}
@fragment
trait Point {
}
@fragment
trait Ellipse {
var axisX: Double = 0
var axisY: Double = 0
}
@fragment
trait Circle {
this: Ellipse =>
def radius = this.axisX
def radius_=(r: Double): Unit = {
this.axisX = r
this.axisY = r
}
}
@fragment
trait Segment {
this: Rectangle | Ellipse =>
def length: Double = {
select[Ellipse](this) match {
case Some(e) =>
if (e.axisX != 0) e.axisX else e.axisY
case None =>
select[Rectangle](this) match {
case Some(r) =>
if (r.width != 0) r.width else r.height
case None =>
sys.error("Unexpected")
}
}
}
def length_=(l: Double): Unit = {
select[Ellipse](this) match {
case Some(e) =>
if (e.axisX != 0) e.axisX = l else e.axisY = l
case None =>
select[Rectangle](this) match {
case Some(r) =>
if (r.width != 0) {
r.width = l
r.height = 0
} else {
r.width = 0
r.height = l
}
case None =>
sys.error("Unexpected")
}
}
}
}
object App {
def main0(args: Array[String]) {
val out = new PrintWriter("drawing2.svg")
val g = new SVGGraphics(out)
g.start()
val painter = new ShapePainter(g)
val rectModel = parse[Rectangle with \\?[Square with SquareW]](false)
val rectStg = promote[Square](rectModel)({
case None => Some(0)
case Some(rect) if rect.width == rect.height => Some(0)
case _ => None
})
val rectRkg = singleton(rectModel, rectStg)
val rect = rectRkg.~
rect.width = 100
rect.height = 50
rect.remorph
// draw
rect.accept(painter)
// make the square
rect.y = 150
rect.height = 100
rect.remorph
rect.accept(painter)
val sq: Square with Rectangle = select[Square with Rectangle](rect).get
sq.y = 350
sq.side = 80
rect.accept(painter)
sq.x = 200
sq.height = 50
rect.remorph
rect.accept(painter)
try {
sq.width = 100
} catch {
case e: StaleMorphException =>
println("No longer square")
}
g.end()
out.close()
}
def main1(args: Array[String]) {
val rectModel = parse[Rectangle with \\?[Square with SquareW]](false)
val rectStg = promote[Square](rectModel)({
case None => Some(0)
case Some(rect) if rect.width == rect.height => Some(0)
case _ => None
})
val rectRkg = singleton(rectModel, rectStg)
val rect = rectRkg.~
// do not present in the paper
def printRect(r: rectModel.MutableLUB): Unit = {
select[Square](r) match {
case Some(sq) => print(s"Square(${sq.side}):")
case None =>
}
println(s"Rectangle(${r.width},${r.height})")
}
printRect(rect)
rect.width = 10f
rect.remorph
printRect(rect)
rect.height = 10f
rect.remorph
printRect(rect)
select[Square](rect) match {
case Some(sq) => sq.side = 20f
case None => sys.error("Unexpected")
}
printRect(rect)
rect.height = 5f
rect.remorph
printRect(rect)
}
def main2(args: Array[String]): Unit = {
val rectModel = parse[Rectangle with (Unit | Square | Segment)](true)
def rectStg(initShape: Int) = {
val rectStr = maskFull[Unit | Square | Segment](rectModel)(rootStrategy(rectModel), {
case None => Some(initShape)
case Some(rect) if rect.width == rect.height => Some(1)
case Some(rect) if rect.width == 0 || rect.height == 0 => Some(2)
case _ => Some(0)
})
strict(rectStr)
}
val rectRkg = singleton(rectModel, rectStg(1))
val rect = rectRkg.~
// do not present in the paper
def printRect(r: rectModel.MutableLUB): Unit = {
select[Square](r) match {
case Some(rect) => print(s"Square(${rect.side}):")
case None =>
select[Segment](r) match {
case Some(seg) => print(s"Segment(${seg.length}):")
case None =>
}
}
println(s"Rectangle(${r.width},${r.height})")
}
printRect(rect)
rect.height = 10f
rect.remorph
printRect(rect)
rect.width = 20f
rect.remorph
printRect(rect)
rect.width = 0f
rect.remorph
printRect(rect)
var savedSide = select[Segment](rect) match {
case Some(seg) => seg.length
case None => sys.error("Unexpected")
}
val rectRef: &?[$[Ellipse] with (Unit | Segment | $[Circle])] = rect
val (ellipseModel, ellipseDefStg) = unveil(rectRef)
def ellipseStg(initShape: Int) = {
val stg = maskFull[Unit | Segment | Circle](ellipseModel)(ellipseDefStg, {
case None => Some(initShape)
case Some(ellipse) if ellipse.axisX == 0 || ellipse.axisX == 0 => Some(1)
case Some(ellipse) if ellipse.axisX == ellipse.axisY => Some(2)
case _ => Some(0)
})
strict(stg)
}
val ellipseRkg = *(rectRef, ellipseStg(1), single[Ellipse], single[Circle])
val ellipse = ellipseRkg.~
select[Segment](ellipse) match {
case Some(seg) => seg.length = savedSide
case None => sys.error("Unexpected")
}
// do not present in the paper
def printEllipse(el: ellipseModel.MutableLUB): Unit = {
select[Segment](el) match {
case Some(seg) => print(s"Segment(${seg.length}):")
case None =>
select[Circle](el) match {
case Some(cir) => print(s"Circle(${cir.radius}):")
case None =>
}
}
println(s"Ellipse(${el.axisX},${el.axisY})")
}
printEllipse(ellipse)
ellipse.axisX = 20f
ellipse.remorph
printEllipse(ellipse)
ellipse.axisY = 20f
ellipse.remorph
printEllipse(ellipse)
ellipse.axisX = 0f
ellipse.remorph
printEllipse(ellipse)
savedSide = select[Segment](ellipse) match {
case Some(seg) => seg.length
case None => sys.error("Unexpected")
}
val ellipseRef: &?[$[Rectangle] * (Unit | $[Square] | Segment)] = ellipse
val rect2Kernel = *(ellipseRef, rectStg(2), single[Rectangle], single[Square])
val rect2 = rect2Kernel.~
select[Segment](rect2) match {
case Some(seg) => seg.length = savedSide
case None => sys.error("Unexpected")
}
printRect(rect2)
rect2.width = 10f
rect2.height = 10f
rect2.remorph
printRect(rect2)
rect2.height = 30f
rect2.remorph
printRect(rect2)
}
def main3(args: Array[String]): Unit = {
type Model = (Rectangle with (Unit | Square | Segment | Point)) or (Ellipse with (Unit | Circle | Segment | Point))
val shapeModel = parse[Model](true)
var ellipseContextActive = false
val shapeStg = {
val stg1 = unmaskAll(rootStrategy(shapeModel))
val stg2 = maskFull_+[Model](shapeModel)(stg1, {
case None => if (ellipseContextActive) Some(7) else Some(3)
case Some(s) => select[Rectangle](s) match {
case Some(rect) if rect.width == 0 && rect.height == 0 =>
if (ellipseContextActive) Some(7) else Some(3) // point
case Some(rect) if rect.width == rect.height => Some(1) // square
case Some(rect) if rect.width == 0 || rect.height == 0 => Some(2) // segment
case Some(rect) => Some(0) // pure rectangle
case None => select[Ellipse](s) match {
case Some(ellipse) if ellipse.axisX == 0 && ellipse.axisY == 0 =>
if (ellipseContextActive) Some(7) else Some(3) // point
case Some(ellipse) if ellipse.axisX == ellipse.axisY => Some(5) // circle
case Some(ellipse) if ellipse.axisX == 0 || ellipse.axisY == 0 => Some(6) // segment
case Some(ellipse) => Some(4) // pure ellipse
case None => sys.error("Unexpected")
}
}
})
strict(stg2)
}
val shapeRkg = singleton(shapeModel, shapeStg)
val shape = shapeRkg.~
// do not present in the paper
def printShape(sh: shapeModel.MutableLUB): Unit = {
select[Rectangle](sh) match {
case Some(r) =>
select[Square](sh) match {
case Some(sq) => print(s"Square(${sq.side}):")
case None =>
select[Segment](sh) match {
case Some(seg) => print(s"Segment(${seg.length}):")
case None =>
select[Point](sh) match {
case Some(seg) => print(s"Point():")
case None =>
}
}
}
println(s"Rectangle(${r.width},${r.height})")
case None =>
select[Ellipse](sh) match {
case Some(el) =>
select[Circle](sh) match {
case Some(c) => print(s"Circle(${c.radius}):")
case None =>
select[Segment](sh) match {
case Some(seg) => print(s"Segment(${seg.length}):")
case None =>
select[Point](sh) match {
case Some(seg) => print(s"Point():")
case None =>
}
}
}
println(s"Ellipse(${el.axisX},${el.axisY})")
case None => sys.error("Unexpected")
}
}
}
printShape(shape)
select[Rectangle](shape) match {
case None => sys.error("Unexpected")
case Some(rect) =>
rect.height = 10f
shape.remorph
printShape(shape)
rect.width = 20f
shape.remorph
printShape(shape)
rect.height = 20f
shape.remorph
printShape(shape)
rect.width = 0f
shape.remorph
printShape(shape)
ellipseContextActive = true
rect.height = 0f
shape.remorph
printShape(shape)
}
select[Ellipse](shape) match {
case None => sys.error("Unexpected")
case Some(ellipse) =>
ellipse.axisX = 10f
shape.remorph
printShape(shape)
ellipse.axisY = 20f
shape.remorph
printShape(shape)
ellipse.axisX = 20f
shape.remorph
printShape(shape)
ellipse.axisX = 0f
shape.remorph
printShape(shape)
ellipseContextActive = false
ellipse.axisY = 0f
shape.remorph
printShape(shape)
}
}
def main4(args: Array[String]): Unit = {
val square = new Rectangle with Square with SquareW {}
square.printShape()
square.width = 200
square.printShape()
square.side = 300
}
def main(args: Array[String]) {
val rectModel = parse[Rectangle with \\?[(Square with SquareW)]](false)
val rectStg = promote[Square](rectModel)({
case None => Some(0)
case Some(rect) if rect.width == rect.height => Some(0)
case _ => None
})
val rectRkg = singleton(rectModel, rectStg)
val rect = rectRkg.make_~
rect.printShape()
rect.width = 200
rect.remorph
rect.printShape()
// make the square
rect.height = 200
rect.remorph
val sq: Square with SquareW = select[Square with SquareW](rect).get
sq.side = 80
rect.printShape
sq.height = 50
rect.remorph
sq.printShape
try {
sq.width = 100
} catch {
case e: StaleMorphException =>
println("No longer square")
}
rect.printShape
}
}
|
zslajchrt/morpheus-tutor
|
src/main/scala/org/cloudio/morpheus/square/Square.scala
|
Scala
|
apache-2.0
| 13,232
|
package com.aesireanempire.eplus.gui.elements
import net.minecraft.enchantment.EnchantmentData
abstract class DataProvider[T] {
var dataSet: Array[T]
var hasUpdated: Boolean = false
def setData(data: Array[T]) = {
if(!dataSet.equals(data)) {
dataSet = data.clone()
hasUpdated = true
}
}
}
|
darkhax/EnchantingPlus-Scala
|
src/main/scala/com/aesireanempire/eplus/gui/elements/DataProvider.scala
|
Scala
|
lgpl-3.0
| 349
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.metrics
import java.util
import scala.collection.JavaConverters._
import com.codahale.metrics.jvm.{MemoryUsageGaugeSet, ThreadStatesGaugeSet}
import com.codahale.metrics.{Metric, MetricSet}
class JvmMetricsSet(name: String) extends MetricSet {
override def getMetrics: util.Map[String, Metric] = {
val memoryMetrics = new MemoryUsageGaugeSet().getMetrics.asScala
val threadMetrics = new ThreadStatesGaugeSet().getMetrics.asScala
Map(
s"$name:memory.total.used" -> memoryMetrics("total.used"),
s"$name:memory.total.committed" -> memoryMetrics("total.committed"),
s"$name:memory.total.max" -> memoryMetrics("total.max"),
s"$name:memory.heap.used" -> memoryMetrics("heap.used"),
s"$name:memory.heap.committed" -> memoryMetrics("heap.committed"),
s"$name:memory.heap.max" -> memoryMetrics("heap.max"),
s"$name:thread.count" -> threadMetrics("count"),
s"$name:thread.daemon.count" -> threadMetrics("daemon.count")
).asJava
}
}
|
manuzhang/incubator-gearpump
|
core/src/main/scala/org/apache/gearpump/metrics/JvmMetricsSet.scala
|
Scala
|
apache-2.0
| 1,830
|
/*
* Copyright 2015 The SIRIS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The SIRIS Project is a cooperation between Beuth University, Berlin and the
* HCI Group at the University of Würzburg. The project is funded by the German
* Federal Ministry of Education and Research (grant no. 17N4409).
*/
package simx.components.ai.feature.recording.storage.xml
import java.io.File
import java.util.UUID
import com.thoughtworks.xstream.XStream
import simx.components.ai.feature.recording.storage.{StorageMetaData, Storage, StoredEntity, StoredProperty}
import scala.collection.mutable
import scala.xml.pull.{EvElemEnd, EvElemStart, EvText}
/**
* Created by martin
* on 05/08/15.
*/
trait PullParsingSerialization extends PullParsing with SemanticTypeRegistry {
def pullParseFromXml(storageFile: File): Storage = {
println(Console.MAGENTA + "Pull parsing " + storageFile.getAbsolutePath + Console.RESET)
val before = System.currentTimeMillis()
val storage = new Storage
def onTextCompanionVideoStartTime(s: Storage)(t: EvText): Unit = {
s.metaData ::= StorageMetaData(Some(storageFile), Some(t.text.toLong))
}
addEvHandler(Exactly("entityRecording", "companionVideoStartTime"), OnText(onTextCompanionVideoStartTime(storage)))
addEvHandler(Exactly("entityRecording", "entity"), new EntityBuilder(storage))
pullParse(storageFile)
println(Console.GREEN + "Finished parsing file (" + (System.currentTimeMillis() - before) + "ms)" + Console.RESET)
storage
}
private class RawXmlBuilder(out: mutable.Stack[StringBuilder]) extends EvHandler {
override def onStart(s: EvElemStart) {
val as = s.attrs.toString()
out.top ++= "<" + s.label + as + ">"
}
override def onText(t: EvText) { out.top ++= t.text }
override def onEnd(e: EvElemEnd) { out.top ++= "</" + e.label + ">" }
}
private class PropertyBuilder(properties: mutable.Stack[StoredProperty[_]]) extends EvHandler {
val semantics = new mutable.Stack[String]()
val values = new mutable.Stack[StringBuilder]()
addEvHandler(Exactly("property", "semantics"), OnText(onTextSemantics))
addEvHandler(AllBelow("values", "scala.Tuple2-array"), new RawXmlBuilder(values))
def onTextSemantics(t: EvText): Unit = {
semantics.push(t.text)
}
override def onStart(s: EvElemStart): Unit = {
values.push(new StringBuilder())
}
override def onEnd(e: EvElemEnd): Unit = {
val sem = semantics.pop()
val info = lookUpSemanticType(SemanticTypeReference(sem)).getOrElse(throw new Exception("Failed to look up semantic type " + sem))
val storedProperty = new StoredProperty(info)
def deserializeValues[T](storedProperty: StoredProperty[T]): Unit = {
val xmlValuesArray = values.pop().toString()
val xStream = new XStream()
val rawValuesArray = xStream.fromXML(xmlValuesArray)
val valuesArray = rawValuesArray.asInstanceOf[Array[(T, Long)]].toList
storedProperty.values = valuesArray
}
deserializeValues(storedProperty)
properties.push(storedProperty)
}
}
private class EntityBuilder(s: Storage) extends EvHandler {
val ids = new mutable.Stack[UUID]()
val names = new mutable.Stack[String]()
val properties = new mutable.Stack[StoredProperty[_]]()
addEvHandler(Exactly("entity", "id"), OnText(onTextId))
addEvHandler(Exactly("entity", "name"), OnText(onTextName))
addEvHandler(Exactly("entity", "property"), new PropertyBuilder(properties))
def onTextId(t: EvText): Unit = {
ids.push(UUID.fromString(t.text))
}
def onTextName(t: EvText): Unit = {
names.push(t.text)
}
override def onEnd(e: EvElemEnd): Unit = {
val newEntity = new StoredEntity(ids.pop(), names.pop())
newEntity.properties = properties.elems
properties.clear()
s.entities += newEntity
}
}
}
|
simulator-x/feature
|
src/simx/components/ai/feature/recording/storage/xml/PullParsingSerialization.scala
|
Scala
|
apache-2.0
| 4,482
|
package com.munchii.sbt.resolver
import com.amazonaws.services.s3.{AmazonS3, AmazonS3URI}
import com.amazonaws.services.s3.model.{ListObjectsRequest, S3Object}
import java.io.{ByteArrayInputStream, File}
import java.nio.file.{Files, StandardCopyOption}
import java.util.List
import org.apache.ivy.plugins.repository.{AbstractRepository, Resource}
import scala.annotation.tailrec
import scala.collection.JavaConverters._
// org.apache.ivy.util.Message.verbose(java.lang.String)
case class S3Repository(uri: AmazonS3URI, s3Client: AmazonS3) extends AbstractRepository {
val prefix = Option(uri.getKey()).map(_
.stripPrefix("/")
.stripSuffix("/") + "/").getOrElse("")
override def get(source: String, destination: File) = Files.copy(getResource(source).openStream, destination.toPath, StandardCopyOption.REPLACE_EXISTING)
override def getResource(source: String) = {
try {
val `object` = s3Client.getObject(uri.getBucket(), prefix + source)
S3Resource(`object`, source)
} catch {
case e: Exception => MissingResource()
}
}
// TODO: Does it make sense to use the Apache listing document (if available) instead of list operations?
override def list(parent: String): List[String] = {
val initialRequest = new ListObjectsRequest()
.withBucketName(uri.getBucket())
.withPrefix(prefix + parent)
.withDelimiter("/")
list(initialRequest, Seq()).asJava
}
@tailrec
private def list(request: ListObjectsRequest, keys: Seq[String]): Seq[String] = {
val listing = s3Client.listObjects(request);
val result = keys ++
listing.getCommonPrefixes().asScala ++ // Add "directories"
listing.getObjectSummaries().asScala.map(_.getKey) // Add "files"
Option(listing.getNextMarker) match {
case Some(nextMarker) => list(request.withMarker(nextMarker), result)
case None => result
}
}
override def put(source: File, destination: String, overwrite: Boolean) = {
s3Client.putObject(uri.getBucket(), prefix + destination, source)
}
case class MissingResource() extends Resource {
override def clone(name: String) = copy()
override def getContentLength = 0
override def getLastModified = 0
override def exists = false
override def isLocal = false
override def getName = ""
override def openStream = new ByteArrayInputStream("".getBytes())
}
case class S3Resource(`object`: S3Object, name: String) extends Resource {
override def clone(name: String) = copy(name = name)
override def getContentLength = `object`.getObjectMetadata.getContentLength
override def getLastModified = `object`.getObjectMetadata.getLastModified.getTime
override def exists = true
override def isLocal = false
override def getName = name
override def openStream = `object`.getObjectContent()
}
}
|
munchii/sbt-s3-resolver
|
src/main/scala/com/munchii/sbt/resolver/S3Repository.scala
|
Scala
|
apache-2.0
| 2,853
|
package rpm4s.build
import java.time.Instant
import rpm4s.data.FileEntry
import scodec.bits.ByteVector
case class FileInfo(
fileEntry: FileEntry,
content: ByteVector,
modtime: Instant,
user: String,
group: String
)
|
lucidd/rpm4s
|
shared/src/main/scala/rpm4s/build/FileInfo.scala
|
Scala
|
mit
| 228
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.