code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/******************************************************************************************************************\\ * Rapture JSON, version 2.0.0. Copyright 2010-2015 Jon Pretty, Propensive Ltd. * * * * The primary distribution site is http://rapture.io/ * * * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in complance * * with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed * * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License * * for the specific language governing permissions and limitations under the License. * \\******************************************************************************************************************/ package rapture.json import rapture.core._ import rapture.data._ import scala.reflect.macros._ import scala.annotation._ import language.experimental.macros import language.higherKinds object formatters extends formatters_1 { object compact { def apply[Ast <: JsonAst]()(implicit ast: Ast): Formatter[Ast] { type Out = String } = jsonFormatterImplicit[Ast] implicit def jsonFormatterImplicit[Ast <: JsonAst](implicit ast: Ast): Formatter[Ast] { type Out = String } = new Formatter[Ast] { type Out = String def format(json: Any): String = general(json, 0, ast, "", "") } } } private[json] class formatters_1 { /** Formats the JSON object for multi-line readability. */ protected def general[Ast <: JsonAst](json: Any, ln: Int, ast: Ast, pad: String = " ", brk: String = "\\n"): String = { val indent = pad*ln json match { case j => if(ast.isString(j)) { "\\""+ast.getString(j).replaceAll("\\\\\\\\", "\\\\\\\\\\\\\\\\").replaceAll("\\r", "\\\\\\\\r").replaceAll("\\n", "\\\\\\\\n").replaceAll("\\"", "\\\\\\\\\\"")+"\\"" } else if(ast.isBoolean(j)) { if(ast.getBoolean(j)) "true" else "false" } else if(ast.isNumber(j)) { val bd = ast.getBigDecimal(j) if(bd.isWhole) String(bd.toBigInt) else String(bd) } else if(ast.isArray(j)) { val arr = ast.getArray(j) if(arr.isEmpty) "[]" else List("[", arr map { v => s"${indent}${pad}${general(v, ln + 1, ast, pad, brk)}" } mkString s",${brk}", s"${indent}]") mkString brk } else if(ast.isObject(j)) { val keys = ast.getKeys(j) if(keys.isEmpty) "{}" else List("{", keys map { k => val inner = ast.dereferenceObject(j, k) s"""${indent}${pad}"${k}":${pad}${general(inner, ln + 1, ast, pad, brk)}""" } mkString s",${brk}", s"${indent}}") mkString brk } else if(ast.isNull(j)) "null" else if(j == DataCompanion.Empty) "empty" else "undefined" } } object humanReadable { def apply[Ast <: JsonAst]()(implicit ast: Ast): Formatter[Ast] { type Out = String } = jsonFormatterImplicit[Ast] implicit def jsonFormatterImplicit[Ast <: JsonAst](implicit ast: Ast): Formatter[Ast] { type Out = String } = new Formatter[Ast] { type Out = String def format(json: Any): String = general(json, 0, ast, " ", "\\n") } } }
joescii/rapture-json
src/formatters.scala
Scala
apache-2.0
3,879
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.hive.client import java.io.File import java.lang.reflect.InvocationTargetException import java.net.{URL, URLClassLoader} import java.util import scala.collection.mutable import scala.language.reflectiveCalls import scala.util.Try import org.apache.commons.io.{FileUtils, IOUtils} import org.apache.spark.Logging import org.apache.spark.deploy.SparkSubmitUtils import org.apache.spark.util.Utils import org.apache.spark.sql.catalyst.util.quietly import org.apache.spark.sql.hive.HiveContext /** Factory for `IsolatedClientLoader` with specific versions of hive. */ private[hive] object IsolatedClientLoader { /** * Creates isolated Hive client loaders by downloading the requested version from maven. */ def forVersion( version: String, config: Map[String, String] = Map.empty, ivyPath: Option[String] = None, sharedPrefixes: Seq[String] = Seq.empty, barrierPrefixes: Seq[String] = Seq.empty): IsolatedClientLoader = synchronized { val resolvedVersion = hiveVersion(version) val files = resolvedVersions.getOrElseUpdate(resolvedVersion, downloadVersion(resolvedVersion, ivyPath)) new IsolatedClientLoader( version = hiveVersion(version), execJars = files, config = config, sharedPrefixes = sharedPrefixes, barrierPrefixes = barrierPrefixes) } def hiveVersion(version: String): HiveVersion = version match { case "12" | "0.12" | "0.12.0" => hive.v12 case "13" | "0.13" | "0.13.0" | "0.13.1" => hive.v13 case "14" | "0.14" | "0.14.0" => hive.v14 case "1.0" | "1.0.0" => hive.v1_0 case "1.1" | "1.1.0" => hive.v1_1 case "1.2" | "1.2.0" | "1.2.1" => hive.v1_2 } private def downloadVersion(version: HiveVersion, ivyPath: Option[String]): Seq[URL] = { val hiveArtifacts = version.extraDeps ++ Seq("hive-metastore", "hive-exec", "hive-common", "hive-serde") .map(a => s"org.apache.hive:$a:${version.fullVersion}") ++ Seq("com.google.guava:guava:14.0.1", "org.apache.hadoop:hadoop-client:2.4.0") val classpath = quietly { SparkSubmitUtils.resolveMavenCoordinates( hiveArtifacts.mkString(","), Some("http://www.datanucleus.org/downloads/maven2"), ivyPath, exclusions = version.exclusions) } val allFiles = classpath.split(",").map(new File(_)).toSet // TODO: Remove copy logic. val tempDir = Utils.createTempDir(namePrefix = s"hive-${version}") allFiles.foreach(f => FileUtils.copyFileToDirectory(f, tempDir)) tempDir.listFiles().map(_.toURI.toURL) } private def resolvedVersions = new scala.collection.mutable.HashMap[HiveVersion, Seq[URL]] } /** * Creates a Hive `ClientInterface` using a classloader that works according to the following rules: * - Shared classes: Java, Scala, logging, and Spark classes are delegated to `baseClassLoader` * allowing the results of calls to the `ClientInterface` to be visible externally. * - Hive classes: new instances are loaded from `execJars`. These classes are not * accessible externally due to their custom loading. * - ClientWrapper: a new copy is created for each instance of `IsolatedClassLoader`. * This new instance is able to see a specific version of hive without using reflection. Since * this is a unique instance, it is not visible externally other than as a generic * `ClientInterface`, unless `isolationOn` is set to `false`. * * @param version The version of hive on the classpath. used to pick specific function signatures * that are not compatible across versions. * @param execJars A collection of jar files that must include hive and hadoop. * @param config A set of options that will be added to the HiveConf of the constructed client. * @param isolationOn When true, custom versions of barrier classes will be constructed. Must be * true unless loading the version of hive that is on Sparks classloader. * @param rootClassLoader The system root classloader. Must not know about Hive classes. * @param baseClassLoader The spark classloader that is used to load shared classes. */ private[hive] class IsolatedClientLoader( val version: HiveVersion, val execJars: Seq[URL] = Seq.empty, val config: Map[String, String] = Map.empty, val isolationOn: Boolean = true, val rootClassLoader: ClassLoader = ClassLoader.getSystemClassLoader.getParent.getParent, val baseClassLoader: ClassLoader = Thread.currentThread().getContextClassLoader, val sharedPrefixes: Seq[String] = Seq.empty, val barrierPrefixes: Seq[String] = Seq.empty) extends Logging { // Check to make sure that the root classloader does not know about Hive. assert(Try(rootClassLoader.loadClass("org.apache.hadoop.hive.conf.HiveConf")).isFailure) /** All jars used by the hive specific classloader. */ protected def allJars = execJars.toArray protected def isSharedClass(name: String): Boolean = name.contains("slf4j") || name.contains("log4j") || name.startsWith("org.apache.spark.") || (name.startsWith("org.apache.hadoop.") && !name.startsWith("org.apache.hadoop.hive.")) || name.startsWith("scala.") || (name.startsWith("com.google") && !name.startsWith("com.google.cloud")) || name.startsWith("java.lang.") || name.startsWith("java.net") || sharedPrefixes.exists(name.startsWith) /** True if `name` refers to a spark class that must see specific version of Hive. */ protected def isBarrierClass(name: String): Boolean = name.startsWith(classOf[ClientWrapper].getName) || name.startsWith(classOf[Shim].getName) || barrierPrefixes.exists(name.startsWith) protected def classToPath(name: String): String = name.replaceAll("\\.", "/") + ".class" /** The classloader that is used to load an isolated version of Hive. */ private[hive] var classLoader: ClassLoader = if (isolationOn) { new URLClassLoader(allJars, rootClassLoader) { override def loadClass(name: String, resolve: Boolean): Class[_] = { val loaded = findLoadedClass(name) if (loaded == null) doLoadClass(name, resolve) else loaded } def doLoadClass(name: String, resolve: Boolean): Class[_] = { val classFileName = name.replaceAll("\\.", "/") + ".class" if (isBarrierClass(name)) { // For barrier classes, we construct a new copy of the class. val bytes = IOUtils.toByteArray(baseClassLoader.getResourceAsStream(classFileName)) logDebug(s"custom defining: $name - ${util.Arrays.hashCode(bytes)}") defineClass(name, bytes, 0, bytes.length) } else if (!isSharedClass(name)) { logDebug(s"hive class: $name - ${getResource(classToPath(name))}") super.loadClass(name, resolve) } else { // For shared classes, we delegate to baseClassLoader. logDebug(s"shared class: $name") baseClassLoader.loadClass(name) } } } } else { baseClassLoader } private[hive] def addJar(path: String): Unit = synchronized { val jarURL = new java.io.File(path).toURI.toURL // TODO: we should avoid of stacking classloaders (use a single URLClassLoader and add jars // to that) classLoader = new java.net.URLClassLoader(Array(jarURL), classLoader) } /** The isolated client interface to Hive. */ private[hive] def createClient(): ClientInterface = { if (!isolationOn) { return new ClientWrapper(version, config, baseClassLoader, this) } // Pre-reflective instantiation setup. logDebug("Initializing the logger to avoid disaster...") val origLoader = Thread.currentThread().getContextClassLoader Thread.currentThread.setContextClassLoader(classLoader) try { classLoader .loadClass(classOf[ClientWrapper].getName) .getConstructors.head .newInstance(version, config, classLoader, this) .asInstanceOf[ClientInterface] } catch { case e: InvocationTargetException => if (e.getCause().isInstanceOf[NoClassDefFoundError]) { val cnf = e.getCause().asInstanceOf[NoClassDefFoundError] throw new ClassNotFoundException( s"$cnf when creating Hive client using classpath: ${execJars.mkString(", ")}\n" + "Please make sure that jars for your version of hive and hadoop are included in the " + s"paths passed to ${HiveContext.HIVE_METASTORE_JARS}.") } else { throw e } } finally { Thread.currentThread.setContextClassLoader(origLoader) } } /** * The place holder for shared Hive client for all the HiveContext sessions (they share an * IsolatedClientLoader). */ private[hive] var cachedHive: Any = null }
pronix/spark
sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
Scala
apache-2.0
9,601
import compiletime.* import compiletime.ops.int.* object Test extends App { inline def toInt[N]: Int = inline constValue[N] match { case _: S[n1] => 1 + toInt[n1] case 0 => 0 } println(toInt[0]) println(toInt[1]) println(toInt[2]) locally { inline def toInt[N]: Int = inline constValueOpt[N] match { case Some(_: S[n1]) => 1 + toInt[n1] case Some(0) => 0 case None => 0 } println(toInt[0]) println(toInt[1]) println(toInt[2]) } val xs = List(1, 2, 3) inline def select(n: Int) = inline constValueOpt[n.type] match { case Some(0) => xs(0) case Some(1) => xs(1) case Some(2) => xs(2) case Some(_) => -1 } println(select(0)) println(select(1)) println(select(2)) println(select(3)) final val idx = 0 println(select(idx)) }
dotty-staging/dotty
tests/run/typelevel-peano.scala
Scala
apache-2.0
863
/** * Copyright 2010-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package examples object FunSuiteExamples extends StyleTraitExamples { val name: String = "FunSuite" val description: String = """For teams coming from xUnit, FunSuite feels comfortable and familiar while still giving some of the benefits of BDD: FunSuite makes it easy to write descriptive test names, natural to write focused tests, and generates specification-like output that can facilitate communication among stakeholders.""" /* val exampleUsage: String = """<span class="stImport">import org.scalatest.FunSuite</span> |<span class="stReserved">class</span> <span class="stType">SetSuite</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { | test(<span class="stLiteral">"An empty Set should have size 0"</span>) { assert(<span class="stType">Set</span>.empty.size === <span class="stLiteral">0</span>) } | test(<span class="stLiteral">"Invoking head on an empty Set should produce NoSuchElementException"</span>) { | intercept[<span class="stType">NoSuchElementException</span>] { <span class="stType">Set</span>.empty.head } | } |} """.stripMargin */ val exampleUsage: String = """<span class="stImport">import org.scalatest._</span> | |<span class="stReserved">class</span> <span class="stType">SetSpec</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { | <span class="stReserved">override</span> <span class="stReserved">def</span> withFixture(test: <span class="stType">NoArgTest</span>) = { <span class="stExplain">// Define a shared fixture</span> | <span class="stExplain">// Shared setup (run at beginning of each test)</span> | <span class="stReserved">try</span> test() | <span class="stReserved">finally</span> { | <span class="stExplain">// Shared cleanup (run at end of each test)</span> | } | } | <span class="stExplain">// Define tests with 'test', a test name string in parentheses, | // and test body in curly braces</span> | test(<span class="stLiteral">"An empty Set should have size 0"</span>) { | assert(<span class="stType">Set</span>.empty.size == <span class="stLiteral">0</span>) | } | <span class="stExplain">// To ignore a test, change 'test' to 'ignore'</span> | ignore(<span class="stLiteral">"Invoking head on an empty Set should produce NoSuchElementException"</span>) { | intercept[<span class="stType">NoSuchElementException</span>] { | <span class="stType">Set</span>.empty.head | } | } | <span class="stExplain">// Define a pending test by using (pending) for the body</span> | test(<span class="stLiteral">"An empty Set's isEmpty method should return false"</span>) (pending) | <span class="stExplain">// Tag a test by placing a tag object after the test name</span> | <span class="stImport">import tagobjects.Slow</span> | test(<span class="stLiteral">"An empty Set's nonEmpty method should return true"</span>, Slow) { | assert(!<span class="stType">Set</span>.empty.nonEmpty) | } |} | |<span class="stExplain">// Can also pass fixtures into tests with fixture.FunSuite</span> |<span class="stReserved">class</span> <span class="stType">StringSpec</span> <span class="stReserved">extends</span> <span class="stType">fixture.FunSuite</span> { | <span class="stReserved">type</span> FixtureParam = <span class="stType">String</span> <span class="stExplain">// Define the type of the passed fixture object</span> | <span class="stReserved">override</span> <span class="stReserved">def</span> withFixture(test: <span class="stType">OneArgTest</span>) = { | <span class="stExplain">// Shared setup (run before each test), including...</span> | <span class="stReserved">val</span> fixture = <span class="stLiteral">"a fixture object"</span> <span class="stExplain">// ...creating a fixture object</span> | <span class="stReserved">try</span> test(fixture) <span class="stExplain">// Pass the fixture into the test</span> | <span class="stReserved">finally</span> { | <span class="stExplain">// Shared cleanup (run at end of each test)</span> | } | } | test(<span class="stLiteral">"The passed fixture can be used in the test"</span>) { s =&gt; <span class="stExplain">// Fixture passed in as s</span> | assert(s == <span class="stLiteral">"a fixture object"</span>) | } |} | |@DoNotDiscover <span class="stExplain">// Disable discovery of a test class</span> |<span class="stReserved">class</span> <span class="stType">InvisibleSpec</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { <span class="stBlockComment">/*code omitted*/</span> } | |@Ignore <span class="stExplain">// Ignore all tests in a test class</span> |<span class="stReserved">class</span> <span class="stType">IgnoredSpec</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { <span class="stBlockComment">/*code omitted*/</span> } | |<span class="stImport">import tags.Slow</span> |@Slow <span class="stExplain">// Mark all tests in a test class with a tag</span> |<span class="stReserved">class</span> <span class="stType">SlowSpec</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { <span class="stBlockComment">/*code omitted*/</span> } |""".stripMargin val play2Example: String = """<span class="stImport">import org.scalatest._</span> |<span class="stImport">import play.api.test._</span> |<span class="stImport">import play.api.test.Helpers._</span> | |<span class="stReserved">class</span> <span class="stType">ExampleSpec</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> <span class="stReserved">with</span> <span class="stType">Matchers</span> { | test(<span class="stLiteral">"Application should send 404 on a bad request"</span>) { | running(<span class="stType">FakeApplication</span>()) { route(<span class="stType">FakeRequest</span>(GET, "/boum")) shouldBe <span class="stType">None</span> } | } | test(<span class="stLiteral">"Application should send render the index page"</span>) { | running(<span class="stType">FakeApplication</span>()) { | <span class="stReserved">val</span> home = route(<span class="stType">FakeRequest</span>(GET, <span class="stLiteral">"/"</span>)).get | status(home) shouldBe OK | contentType(home) shouldBe <span class="stType">Some</span>(<span class="stLiteral">"text/html"</span>) | contentAsString(home) should include (<span class="stLiteral">"ScalaTest"</span>) | } | } |}""".stripMargin val doNotDiscover: String = """<span class="stImport">import org.scalatest._</span> |@DoNotDiscover |<span class="stReserved">class</span> <span class="stType">SetSuite</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { <span class="stBlockComment">/*code omitted*/</span> } """.stripMargin val ignoreTest: String = """<span class="stImport">import org.scalatest._</span> |<span class="stReserved">class</span> <span class="stType">SetSuite</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { | ignore(<span class="stLiteral">"An empty Set should have size 0"</span>) { <span class="stBlockComment">/*code omitted*/</span> } |}""".stripMargin val pendingTest: String = """<span class="stImport">import org.scalatest._</span> |<span class="stReserved">class</span> <span class="stType">SetSuite</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { | test(<span class="stLiteral">"An empty Set should have size 0"</span>) (pending) |}""".stripMargin val taggingTest: String = """<span class="stImport">import org.scalatest._</span> |<span class="stReserved">object</span> <span class="stType">SlowTest</span> <span class="stReserved">extends</span> <span class="stType">Tag</span>(<span class="stLiteral">"com.mycompany.tags.SlowTest"</span>) |<span class="stReserved">class</span> <span class="stType">SetSuite</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { | test(<span class="stLiteral">"An empty Set should have size 0"</span>, <span class="stType">SlowTest</span>) { | <span class="stBlockComment">/*code omitted*/</span> | } |}""".stripMargin val infoTest: String = """<span class="stImport">import org.scalatest._</span> |<span class="stReserved">class</span> <span class="stType">SetSuite</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { | test(<span class="stLiteral">"An empty Set should have size 0"</span>) { | info(<span class="stLiteral">"Some information."</span>) | <span class="stBlockComment">/*code omitted*/</span> | } |}""".stripMargin val fixtureNoArgTest: String = """<span class="stImport">import org.scalatest._</span> |<span class="stReserved">class</span> <span class="stType">SetSuite</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> { | <span class="stReserved">def</span> setup() { <span class="stBlockComment">/*code omitted*/</span> } | <span class="stReserved">def</span> cleanup() { <span class="stBlockComment">/*code omitted*/</span> } | <span class="stReserved">override</span> <span class="stReserved">protected</span> <span class="stReserved">def</span> withFixture(test: <span class="stType">NoArgTest</span>) = { | setup() | <span class="stReserved">try</span> test() <span class="stReserved">finally</span> cleanup() | } |}""".stripMargin val fixtureOneArgTest: String = """<span class="stImport">import org.scalatest._</span> |<span class="stReserved">class</span> <span class="stType">SetSuite</span> <span class="stReserved">extends</span> <span class="stType">fixture.FunSuite</span> { | <span class="stReserved">def</span> setup() { <span class="stBlockComment">/*code omitted*/</span> } | <span class="stReserved">def</span> cleanup() { <span class="stBlockComment">/*code omitted*/</span> } | <span class="stReserved">type</span> FixtureParam = <span class="stType">String</span> | <span class="stReserved">override</span> <span class="stReserved">protected</span> <span class="stReserved">def</span> withFixture(test: <span class="stType">OneArgTest</span>) = { | setup() | <span class="stReserved">try</span> test(<span class="stLiteral">"this is a fixture param"</span>) <span class="stReserved">finally</span> cleanup() | } |}""".stripMargin val seleniumExample: String = """<span class="stImport">import org.scalatest._ |import selenium._</span> |<span class="stReserved">class</span> <span class="stType">BlogSpec</span> <span class="stReserved">extends</span> <span class="stType">FunSuite</span> <span class="stReserved">with</span> <span class="stType">WebBrowser</span> <span class="stReserved">with</span> <span class="stType">HtmlUnit</span> { | <span class="stReserved">val</span> host = <span class="stLiteral">"http://localhost:9000/"</span> | test(<span class="stLiteral">"The blog app home page should have the correct title"</span>) { | go to (host + <span class="stLiteral">"index.html"</span>) | pageTitle should be (<span class="stLiteral">"Awesome Blog"</span>) | } |}""".stripMargin }
jedesah/scalatest-website
app/examples/FunSuiteExamples.scala
Scala
apache-2.0
12,501
/* * Copyright 2015 PayPal * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.squbs.unicomplex import java.util.concurrent.TimeUnit import akka.actor.ActorSystem import akka.io.IO import akka.testkit.{ImplicitSender, TestKit} import com.typesafe.config.ConfigFactory import org.scalatest._ import org.scalatest.concurrent.AsyncAssertions import org.squbs.lifecycle.GracefulStop import org.squbs.unicomplex.dummysvcactor.RegisterTimeoutHandler import spray.can.Http import spray.http._ import spray.util.Utils import scala.util.Try object UnicomplexTimeoutSpec { val dummyJarsDir = getClass.getClassLoader.getResource("classpaths").getPath val classPaths = Array( "DummySvcActor" ) map (dummyJarsDir + "/" + _) val (_, port) = Utils.temporaryServerHostnameAndPort() val aConfig = ConfigFactory.parseString( s""" |squbs { | actorsystem-name = unicomplexTimeoutSpec | ${JMX.prefixConfig} = true |} |default-listener { | bind-port = $port |} |spray.can.server { | request-timeout = 5s |} """.stripMargin) val boot = UnicomplexBoot(aConfig) .createUsing {(name, config) => ActorSystem(name, config)} .scanComponents(classPaths) .initExtensions.start() } class UnicomplexTimeoutSpec extends TestKit(UnicomplexTimeoutSpec.boot.actorSystem) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with AsyncAssertions { implicit val timeout: akka.util.Timeout = Try(System.getProperty("test.timeout").toLong) map { millis => akka.util.Timeout(millis, TimeUnit.MILLISECONDS) } getOrElse Timeouts.askTimeout val port = system.settings.config getInt "default-listener.bind-port" override def afterAll() { Unicomplex(system).uniActor ! GracefulStop } "Unicomplex" must { "Cause a timeout event" in { system.settings.config getString "spray.can.server.request-timeout" should be ("5s") system.actorSelection("/user/DummySvcActor/dummysvcactor-DummySvcActor-handler") ! RegisterTimeoutHandler val path = "/dummysvcactor/timeout" IO(Http) ! HttpRequest(HttpMethods.GET, Uri(s"http://127.0.0.1:$port$path")) within(timeout.duration) { val timedOut = expectMsgType[Timedout] timedOut.request should matchPattern { case req: HttpRequest if req.uri.path.toString === path => } } } } }
tutufool/squbs
squbs-unicomplex/src/test/scala/org/squbs/unicomplex/UnicomplexTimeoutSpec.scala
Scala
apache-2.0
2,937
package edu.gemini.phase2.template.factory.impl.phoenix import edu.gemini.spModel.gemini.phoenix.PhoenixParams sealed trait PhoenixFilterGroup object PhoenixFilterGroup { case object JHK extends PhoenixFilterGroup case object L extends PhoenixFilterGroup case object M extends PhoenixFilterGroup def forFilter(f: PhoenixParams.Filter): PhoenixFilterGroup = { import PhoenixParams.Filter._ f match { case /* J */ J7799 | J8265 | J9232 | J9440 | J9671 | /* H */ H6073 | H6420 | /* K */ K4132 | K4220 | K4308 | K4396 | K4484 | K4578 | K4667 | K4748 => JHK case /* L */ L2462 | L2734 | L2870 | L3010 | L3100 | L3290 => L case /* M */ M1930 | M2030 | M2150 => M } } }
arturog8m/ocs
bundle/edu.gemini.phase2.skeleton.servlet/src/main/scala/edu/gemini/phase2/template/factory/impl/phoenix/PhoenixFilterGroup.scala
Scala
bsd-3-clause
793
package com.github.opengrabeso.mixtio package frontend package views.settings_base import java.time.ZonedDateTime import io.udash.properties.model.ModelProperty import org.scalajs.dom trait SettingsPresenter { def init( model: ModelProperty[SettingsModel], userContextService: services.UserContextService ): Unit = { model.subProp(_.settings.maxHR).listen(p => userContextService.api.foreach(_.settings.max_hr(p))) model.subProp(_.settings.elevFilter).listen(p => userContextService.api.foreach(_.settings.elev_filter(p))) model.subProp(_.settings.questTimeOffset).listen(p => userContextService.api.foreach(_.settings.quest_time_offset(p))) // time changes once per 1000 ms, but we do not know when. If one would use 1000 ms, the error could be almost 1 sec if unlucky. // By using 200 ms we are sure the error will be under 200 ms dom.window.setInterval(() => model.subProp(_.currentTime).set(ZonedDateTime.now()), 200) } }
OndrejSpanel/Stravamat
frontend/src/main/scala/com/github/opengrabeso/mixtio/frontend/views/settings_base/SettingsPresenter.scala
Scala
gpl-2.0
964
package streams import common._ /** * This trait represents the layout and building blocks of the game * * @TODO: SHOULD RENAME `x` and `y` in class Pos to `row` and `col`. It's * confusing to have `x` being the vertical axis. */ trait GameDef { /** * The case class `Pos` encodes positions in the terrain. * * IMPORTANT NOTE * - The `x` coordinate denotes the position on the vertical axis * - The `y` coordinate is used for the horizontal axis * - The coordinates increase when moving down and right * * Illustration: * * 0 1 2 3 <- y axis * 0 o o o o * 1 o o o o * 2 o # o o # is at position Pos(2, 1) * 3 o o o o * * ^ * | * * x axis */ case class Pos(x: Int, y: Int) { /** The position obtained by changing the `x` coordinate by `d` */ def dx(d: Int) = copy(x = x + d) /** The position obtained by changing the `y` coordinate by `d` */ def dy(d: Int) = copy(y = y + d) def str(): String = x.toString + "/" + y.toString } /** * The position where the block is located initially. * * This value is left abstract, it will be defined in concrete * instances of the game. */ val startPos: Pos /** * The target position where the block has to go. * This value is left abstract. */ val goal: Pos /** * The terrain is represented as a function from positions to * booleans. The function returns `true` for every position that * is inside the terrain. * * As explained in the documentation of class `Pos`, the `x` axis * is the vertical one and increases from top to bottom. */ type Terrain = Pos => Boolean /** * The terrain of this game. This value is left abstract. */ val terrain: Terrain /** * In Bloxorz, we can move left, right, Up or down. * These moves are encoded as case objects. */ sealed abstract class Move { def go(b:Block):Block } case object Left extends Move { def go(b:Block):Block = b.left } case object Right extends Move { def go(b:Block):Block = b.right } case object Up extends Move { def go(b:Block):Block = b.up } case object Down extends Move { def go(b:Block):Block = b.down } object Move { val dirs = List(Left, Right, Up, Down) } /** * This function returns the block at the start position of * the game. */ //def startBlock: Block = ??? def startBlock: Block = Block(startPos,startPos) /** * A block is represented by the position of the two cubes that * it consists of. We make sure that `b1` is lexicographically * smaller than `b2`. */ case class Block(b1: Pos, b2: Pos) { // checks the requirement mentioned above require(b1.x <= b2.x && b1.y <= b2.y, "Invalid block position: b1=" + b1 + ", b2=" + b2) /** * Returns a block where the `x` coordinates of `b1` and `b2` are * changed by `d1` and `d2`, respectively. */ def dx(d1: Int, d2: Int) = Block(b1.dx(d1), b2.dx(d2)) /** * Returns a block where the `y` coordinates of `b1` and `b2` are * changed by `d1` and `d2`, respectively. */ def dy(d1: Int, d2: Int) = Block(b1.dy(d1), b2.dy(d2)) /** The block obtained by moving left */ def left = if (isStanding) dy(-2, -1) else if (b1.x == b2.x) dy(-1, -2) else dy(-1, -1) /** The block obtained by moving right */ def right = if (isStanding) dy(1, 2) else if (b1.x == b2.x) dy(2, 1) else dy(1, 1) /** The block obtained by moving up */ def up = if (isStanding) dx(-2, -1) else if (b1.x == b2.x) dx(-1, -1) else dx(-1, -2) /** The block obtained by moving down */ def down = if (isStanding) dx(1, 2) else if (b1.x == b2.x) dx(1, 1) else dx(2, 1) /** * Returns the list of blocks that can be obtained by moving * the current block, together with the corresponding move. */ def neighbors: List[(Block, Move)] = for(m <- Move.dirs) yield (m.go(this), m) /** * Returns the list of positions reachable from the current block * which are inside the terrain. */ def legalNeighbors: List[(Block, Move)] = neighbors.filter { case(b, m) => b.isLegal } /** * Returns `true` if the block is standing. */ //def isStanding: Boolean = ??? def isStanding: Boolean = b1 == b2 /** * Returns `true` if the block is entirely inside the terrain. */ //def isLegal: Boolean = ??? def isLegal: Boolean = { terrain(b1) && terrain(b2) } } }
gvamos/MilanOpera
streams/src/main/scala/streams/GameDef.scala
Scala
gpl-2.0
4,822
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package scaps.sbtPlugin import sbt.{ url => sbtUrl, _ } import sbt.Keys._ import sbt.APIMappings import scaps.buildInfo.BuildInfo case class IndexJob(organization: String, name: String, revision: String, artifactPath: String, docUrlPrefix: Option[String]) object ApiSearchPlugin extends AutoPlugin { override def trigger = allRequirements val scapsArtifact = "scaps-scala" object autoImport { lazy val indexDependencies = SettingKey[Seq[ModuleID]]("indexDependencies", "Dependencies that will be indexed.") lazy val scapsControlHost = SettingKey[String]("scapsControlHost", "Hostname of the Scala API Search control service.") lazy val scapsModules = TaskKey[Seq[IndexJob]]("scapsModules", "Modules that will be indexed.") lazy val Scaps = config("scaps").extend(Runtime) } import autoImport._ lazy val scapsSettings = Seq( indexDependencies := Seq(), scapsControlHost := "localhost:8081", scapsModules := { val modules = updateClassifiers.value.configuration(Compile.name).get.modules val mappings = apiMappings.in(Compile, doc).value val deps = indexDependencies.value.map(d => CrossVersion(scalaVersion.value, scalaBinaryVersion.value)(d)) modules .filter(m => deps.exists( d => m.module.organization == d.organization && m.module.name == d.name && m.module.revision == d.revision)) .flatMap { m => val module = m.module val as = m.artifacts val mapping = as.flatMap { case (_, f) => mappings.get(f) }.headOption val sourceFile = as.collectFirst { case (Artifact(name, _, _, Some(Artifact.SourceClassifier), _, _, _), file) => file } sourceFile.map(f => IndexJob( module.organization, module.name, module.revision, f.getAbsolutePath(), mapping.map(_.toString + "#"))) }.distinct }, javaOptions := { val classpath = (fullClasspath in Compile).value.map { case Attributed(f) => f.getAbsolutePath } val hostArg = s"-Dscaps.extraction.control-host=${scapsControlHost.value}" val cpArgs = classpath.zipWithIndex.map { case (cp, idx) => s"-Dscaps.extraction.classpath.${idx}=${cp}" } val moduleArgs = scapsModules.value.zipWithIndex.flatMap { case (m, idx) => Seq( s"-Dscaps.extraction.modules.${idx}.organization=${m.organization}", s"-Dscaps.extraction.modules.${idx}.name=${m.name}", s"-Dscaps.extraction.modules.${idx}.revision=${m.revision}", s"-Dscaps.extraction.modules.${idx}.artifact=${m.artifactPath}", s"-Dscaps.extraction.modules.${idx}.doc-url=${m.docUrlPrefix.getOrElse("")}") } (hostArg +: (cpArgs ++ moduleArgs)) }, fork := true, mainClass := Some("scaps.scala.Main")) override lazy val projectSettings = inConfig(Scaps)(Defaults.compileSettings ++ scapsSettings) ++ Seq(libraryDependencies += BuildInfo.organization %% scapsArtifact % BuildInfo.version) }
scala-search/scaps
sbtPlugin/src/main/scala/scaps/sbtPlugin/ApiSearchPlugin.scala
Scala
mpl-2.0
3,272
package chandu0101.scalajs.react.components package elementalui import chandu0101.macros.tojs.JSMacro import japgolly.scalajs.react._ import japgolly.scalajs.react.raw.React import japgolly.scalajs.react.vdom.VdomNode import scala.scalajs.js case class Dropdown(alignRight: js.UndefOr[Boolean] = js.undefined, buttonHasDisclosureArrow: js.UndefOr[Boolean] = js.undefined, buttonLabel: js.UndefOr[String] = js.undefined, buttonType: js.UndefOr[ButtonType] = js.undefined, className: js.UndefOr[String] = js.undefined, isOpen: js.UndefOr[Boolean] = js.undefined, children: js.UndefOr[Boolean] = js.undefined, items: js.Array[DropdownMenuItem], onSelect: js.UndefOr[ReactEventFromHtml => Callback] = js.undefined) { def apply(children: VdomNode*) = { val props = JSMacro[Dropdown](this) val component = JsComponent[js.Object, Children.Varargs, Null](Eui.Dropdown) component(props)(children: _*) } } case class DropdownMenuItemType private (value: String) extends AnyVal object DropdownMenuItemType { val DIVIDER = DropdownMenuItemType("divider") val HEADER = DropdownMenuItemType("header") val DEFAULT = DropdownMenuItemType("") def fromString(str: String): DropdownMenuItemType = str match { case "divider" => DIVIDER case "header" => HEADER case _ => DEFAULT } } case class DropdownMenuItem(label: String = "", `type`: DropdownMenuItemType = DropdownMenuItemType.DEFAULT) { val toJS = JSMacro[DropdownMenuItem](this) } object DropdownMenuItem { def fromJson(obj: js.Dynamic) = DropdownMenuItem(label = obj.label.toString, `type` = DropdownMenuItemType.fromString(obj.`type`.toString)) }
rleibman/scalajs-react-components
core/src/main/scala/chandu0101/scalajs/react/components/elementalui/Dropdown.scala
Scala
apache-2.0
1,866
package fyrie import net.fyrie.redis._ import akka.actor.{ ActorSystem, Actor, Props, ActorRef } import akka.bita.{ RandomScheduleHelper, Scheduler } import akka.bita.pattern.Patterns._ import akka.util.duration._ import akka.util.Timeout import akka.dispatch.Await import bita.util.{ FileHelper, TestHelper } import bita.criteria._ import bita.ScheduleOptimization._ import org.scalatest._ import akka.testkit._ import akka.testkit.TestProbe import com.typesafe.config.ConfigFactory /** * Ported from net.fyrie.redis.KeysSpec * Test: keys should fetch keys */ class InsertSpec extends BitaTests { override def name = "Fyrie-insert" def run { system = ActorSystem("ActorSystem", ConfigFactory.parseString(""" akka { event-handlers = ["akka.testkit.TestEventListener"] loglevel = "WARNING" actor { default-dispatcher { core-pool-size-min = 4 core-pool-size-factor = 2.0 throughput = 10 } } } """)) if (random) { RandomScheduleHelper.setMaxDelay(250) // Increase the delay between messages to 250 ms RandomScheduleHelper.setSystem(system) } val probe = new TestProbe(system) // Use a testprobe to represent the tests. val r = new RedisClient("localhost", 6379, RedisClientConfig(connections = 1))(system) r.set("anshin-1", "debasish") r.set("anshin-2", "maulindu") val result = r.sync.keys("anshin*").size if (result == 2) { bugDetected = false println(Console.GREEN + Console.BOLD+"***SUCCESS***"+Console.RESET) } else { bugDetected = true println(Console.RED + Console.BOLD+"***FAILURE***"+Console.RESET) } } }
Tjoene/thesis
benchmark/src/test/scala/fyrie/InsertSpec.scala
Scala
gpl-2.0
1,856
import org.apache.spark.sql.SQLContext :load /home/ealmansi/dev/yavi/spark/jobs/utility.scala def runJob(workDirectory: String, sqlContext: SQLContext): Unit = { loadTable("page_similarity", workDirectory, sqlContext) saveTableCsv("page_similarity", workDirectory, sqlContext) }
ealmansi/yavi
spark/jobs/export_page_similarity.scala
Scala
mit
288
package com.datastax.spark.connector.util import scala.annotation.tailrec /** * A HashMap and a PriorityQueue hybrid. * Works like a HashMap but offers additional O(1) access to the entry with * the highest value. As in a standard HashMap, entries can be looked up by * key in O(1) time. Adding, removing and updating items by key is handled * in O(log n) time. * * Keys must not be changed externally and must implement * proper equals and hashCode. It is advised to use immutable classes for keys. * * Values must be properly comparable. * Values may be externally mutated as long as a proper immediate call to `put` * is issued to notify the PriorityHashMap that the value associated with the given key * has changed, after each value mutation. * It is not allowed to externally mutate more than one value * at a time or to mutate a value associated with multiple keys. * Therefore, it is advised to use immutable classes for values, and updating * values only by calls to `put`. * * Contrary to standard Java HashMap implementation, PriorityHashMap does not * allocate memory on adding / removing / updating items and stores * all data in flat, non-resizable arrays instead. Therefore its * capacity cannot be modified after construction. It is technically possible * to remove this limitation in the future. * * PriorityHashMap is mutable and not thread-safe. * * Internally, PriorityHashMap is composed of the following data arrays: * - an array storing references to keys, forming a heap-based priority queue; * - an array storing corresponding references to values, always in the same order as keys; * - an array storing indexes into the first two arrays, used as an inline hash-table allowing to * quickly locate keys in the heap in constant time; * - an array for fast translating indexes in the heap into indexes into hash-table, so * after moving a key/value in the heap, the corresponding index in the hash-table can be * quickly updated, without hashing. * * The indexes hash-table doesn't use overflow lists for dealing with hash collisions. * The overflow entries are placed in the main hash-table array in the first not-taken * entry to the right from the original position pointed by key hash. On search, * if the key is not found immediately at a position pointed by key hash, it is searched * to the right, until it is found or an empty array entry is found. * * @param _capacity minimum required capacity of this collection; the actual capacity may be larger than this, * because for performance reasons it is rounded up to the nearest power of two * @tparam K type of keys * @tparam V type of values; values must be comparable */ final class PriorityHashMap[K, V : Ordering](_capacity: Int) { private[this] var _size = 0 def size = _size def isEmpty = _size == 0 def nonEmpty = !isEmpty private def log2(n: Int): Int = 32 - Integer.numberOfLeadingZeros(n - 1) private def pow2(n: Int): Int = math.pow(2, n).toInt /** The maximum number of items that can be stored at a time in this map. */ val capacity = pow2(log2(math.max(_capacity, 2))) // indexes array is twice bigger than capacity, // so we need to use twice bigger hashing mask as well; // the mask allows to filter appropriate number of less significant bits of the hash private val mask = (capacity * 2) - 1 /** Original hash multiplied by 2, to hash into even entries only, so there is * initially at least one empty entry between them (works as a search-terminator). * This is to protect against bad hashes forming long sequences * of consecutive numbers, which would result in O(n) lookup, instead of O(1), even * if there were no hash-collisions. */ @inline private def hash(key: K): Int = (key.hashCode() << 1) & mask /** A heap of keys. The key for the largest value is at index 0. */ private[this] val _keys = Array.ofDim[AnyRef](capacity).asInstanceOf[Array[K]] /** A heap of values. The largest value is at index 0. */ private[this] val _values = Array.ofDim[AnyRef](capacity).asInstanceOf[Array[V]] /** A hash-table mapping keys to indexes of the keys/values in the heap. * A key hash determines an entry in this table, which tells * the location of the key and value in the _keys and _values arrays. * Entries for multiple colliding hashes are placed next to each other (to the right). * Unused entries are denoted by -1. The hash-table is twice bigger than the heap * so that there are enough empty entries to make key searches stop after a small (typically 1) * number of entries. */ private[this] val _indexes = Array.fill(capacity * 2)(-1) /** Tells the location of the key in the _indexes array. * Each i-th entry of this table is a position in the _indexes array, * matching i-th key/value element on the _keys/_values heaps. * This allows to quickly update appropriate _indexes entry when a key/value pair is moved * on the heap, without hashing and searching for the key. */ private[this] val _positions = Array.fill(capacity)(-1) private[this] val ordering = implicitly[Ordering[V]] /** Finds a key in the indexes array. * Returns a position in the indexes array pointing to the found key or a position of * the first empty index entry, if key was not found. */ @inline private def find(key: K): Int = { find(key, hash(key)) } /** Finds a key in the indexes array, starting at a given position in the indexes array * Returns a position in the indexes array pointing to the found key or a position of * the first empty index entry, if key was not found. */ @tailrec private def find(key: K, pos: Int): Int = { val i = _indexes(pos) if (i < 0 || _keys(i) == key) pos else find(key, (pos + 1) & mask) } /** Records a new position of the key in the index array. * Returns the position of the key in the indexes hash-table. */ @inline private def setIndex(key: K, index: Int): Int = { val pos = find(key) _indexes(pos) = index _positions(index) = pos pos } /** Fixes the position of the key in the indexes array. * Required after removal of keys from the indexes array. */ @tailrec private def rehash(pos: Int): Unit = { val index = _indexes(pos) if (index >= 0) { val key = _keys(index) _indexes(pos) = -1 setIndex(key, index) rehash((pos + 1) & mask) } } /** Removes an entry from the hash table. * This is not as simple as just setting the given entry to empty value, * because there might be some overflow entries to the right. Therefore, * we need to rehash all the consecutive entries to the right and maybe * fix their positions (it is quite likely they will stay, though). */ @inline private def removeIndex(pos: Int): Unit = { val index = _indexes(pos) _indexes(pos) = -1 _positions(index) = -1 rehash((pos + 1) & mask) } /** Sets a key-value pair in the heap at a given index and stores the index * in the _indexes array under given position. The given position must be * the correct position that the key hashes to. */ @inline private def setKeyValueUnsafe(pos: Int, index: Int, key: K, value: V): Unit = { _keys(index) = key _values(index) = value _indexes(pos) = index _positions(index) = pos } /** Moves a key/value to a new position in the heap and updates the indexes hash-table appropriately. * Returns the index of the entry in the indexes hash-table. */ @inline private def move(from: Int, to: Int): Int = { val pos = _positions(from) setKeyValueUnsafe(pos, to, _keys(from), _values(from)) pos } /** Clears given key/value pair of the heap i.e. sets them to null. * This is to make sure we don't keep any references to the removed items so GC could clean them up. */ @inline private def clear(index: Int): Unit = { _keys.asInstanceOf[Array[AnyRef]](index) = null _values.asInstanceOf[Array[AnyRef]](index) = null _positions(index) = -1 } /** Returns the index of the left child of the given entry in the heap */ @inline private def left(index: Int) = (index << 1) + 1 /** Returns the index of the right child of the given entry in the heap */ @inline private def right(index: Int) = (index << 1) + 2 /** Returns the index of the parent of the given entry in the heap */ @inline private def parent(index: Int) = (index - 1) >>> 1 @inline private def isValidIndex(index: Int) = index < _size @inline private def hasLeft(index: Int) = isValidIndex(left(index)) @inline private def hasRight(index: Int) = isValidIndex(right(index)) @inline private def hasParent(index: Int) = index > 0 /** Returns the index of the child on the heap that has the highest value */ private def indexOfMaxChild(index: Int): Int = { val leftIndex = left(index) val leftValue = _values(leftIndex) if (hasRight(index)) { val rightIndex = right(index) val rightValue = _values(rightIndex) if (ordering.compare(leftValue, rightValue) > 0) leftIndex else rightIndex } else { leftIndex } } /** Goes up the path and moves parents one item down, until the parent is * larger than the given value. */ @tailrec private def moveSmallerParentDown(index: Int, value: V): Int = { if (hasParent(index)) { val parentIndex = parent(index) val parentValue = _values(parentIndex) if (ordering.compare(value, parentValue) > 0) { move(parentIndex, index) moveSmallerParentDown(parentIndex, value) } else index } else index } /** Maintains the heap invariant by moving a larger item up, until it is smaller * than its parent. */ private def siftUp(pos: Int, index: Int): Unit = { val thisKey = _keys(index) val thisValue = _values(index) val parentIndex = moveSmallerParentDown(index, thisValue) if (parentIndex != index) { setKeyValueUnsafe(pos, parentIndex, thisKey, thisValue) } } @tailrec private def moveLargerChildUp(index: Int, value: V): Int = { if (hasLeft(index)) { val maxIndex = indexOfMaxChild(index) val maxValue = _values(maxIndex) if (ordering.compare(value, maxValue) < 0) { move(maxIndex, index) moveLargerChildUp(maxIndex, value) } else index } else index } /** Maintains the heap invariant by moving a smaller item up, until it is larger * than all of its children. */ private def siftDown(pos: Int, index: Int): Unit = { val thisKey = _keys(index) val thisValue = _values(index) val childIndex = moveLargerChildUp(index, thisValue) if (childIndex != index) setKeyValueUnsafe(pos, childIndex, thisKey, thisValue) } private def siftUpOrDown(pos: Int, index: Int): Unit = { siftUp(pos, index) siftDown(pos, index) } /** Removes an element from the heap, replaces it with the last element, * and fixes the position of the replacement element to keep the heap invariant. */ private def removeAt(index: Int): Unit = { _size -= 1 if (index != _size) { val pos = move(_size, index) siftUpOrDown(pos, index) } clear(_size) } /** Updates a value and moves it up or down in the heap. */ private def update(pos: Int, index: Int, value: V): Unit = { _values(index) = value siftUpOrDown(pos, index) } /** Adds a new entry to the end of the heap and updates * the indexes hash-table. */ private def add(pos: Int, key: K, value: V): Unit = { if (_size == capacity) throw new IllegalStateException( s"Cannot add a new item ($key -> $value) to a PriorityMap that reached its maximum capacity $capacity") val index = _size _size += 1 setKeyValueUnsafe(pos, index, key, value) siftUp(pos, index) } /** Adds or updates a map entry. * Complexity: O(log n) average, O(1) optimistic. */ def put(key: K, value: V): Unit = { val pos = find(key) val index = _indexes(pos) if (index < 0) add(pos, key, value) else update(pos, index, value) } /** Returns a value associated with the given key. * If the key does not exist, throws NoSuchElementException. * If you know the key does exist, this method is preferred over * the [[get]] method, because it doesn't allocate an `Option` object. * Complexity: O(1). */ def apply(key: K): V = { val pos = find(key) val index = _indexes(pos) if (index < 0) throw new NoSuchElementException(s"Key not found $key") _values(index) } /** Returns a value associated with the given key. * If the key does not exist, returns None. * Complexity: O(1). */ def get(key: K): Option[V] = { val pos = find(key) val index = _indexes(pos) if (index < 0) None else Some(_values(index)) } /** Returns true if the map contains given key. */ def contains(key: K): Boolean = _indexes(find(key)) >= 0 /** Removes a key and reorders remaining items. * If the key does not exist, does nothing. * Returns true if key existed. * Complexity: O(log n) average, O(1) optimistic. */ def remove(key: K): Boolean = { val pos = find(key) val index = _indexes(pos) if (index >= 0) { removeIndex(pos) removeAt(index) true } else false } private def checkNonEmpty(): Unit = if (_size == 0) throw new NoSuchElementException("Requested head of an empty PriorityMap") /** Returns the key associated with the largest value. * Complexity: O(1). */ def headKey: K = { checkNonEmpty() _keys(0) } /** Returns the largest value. * Complexity: O(1). */ def headValue: V = { checkNonEmpty() _values(0) } /** Useful for iterating the map. */ def keys: IndexedSeq[K] = _keys.take(size) /** Useful for iterating the map */ def values: IndexedSeq[V] = _values.take(size) /** Removes the entry and returns its value */ def dequeue(): V = { checkNonEmpty() val v = _values(0) remove(_keys(0)) v } override def toString: String = { "PriorityHashMap(" + _keys.zip(_values).take(size).mkString(",") + ")" } }
Stratio/spark-cassandra-connector
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/util/PriorityHashMap.scala
Scala
apache-2.0
14,306
// https://leetcode.com/problems/find-the-duplicate-number object Solution { def findDuplicate(numbers: Array[Int]): Int = { def next = numbers def race(x: Int, y: Int): Int = if (x == y) next(x) else race(next(x), next(next(y))) def walk(x: Int, y: Int): Int = if (x == y) x else walk(next(x), next(y)) val head = numbers(0) val intersection = race(head, next(head)) walk(head, intersection) } }
airtial/Codegames
leetcode/287-find-the-duplicate-number.scala
Scala
gpl-2.0
440
object Test extends App { trait SpecialException extends Throwable {} try { throw new Exception } catch { case e : SpecialException => { println("matched SpecialException: " + e) assume(e.isInstanceOf[SpecialException]) } case e : Exception => { assume(e.isInstanceOf[Exception]) } } }
lampepfl/dotty
tests/pos/t1168.scala
Scala
apache-2.0
334
package com.github.ligangty.scala.jsoup.nodes import com.github.ligangty.scala.jsoup.helper.Strings import TextNode._ import com.github.ligangty.scala.jsoup.helper.Validator._ /** * A text node. */ class TextNode extends Node { private[nodes] var textVal: String = null /** * Create a new TextNode representing the supplied (unencoded) text). * * @param text raw text * @param baseUri base uri * @see #createFromEncoded(String, String) */ def this(text: String, baseUri: String) { this() this.baseUriVal = baseUri this.textVal = text } def nodeName(): String = "#text" /** * Get the text content of this text node. * @return Unencoded, normalised text. * @see TextNode#getWholeText() */ def text(): String = TextNode.normaliseWhitespace(getWholeText) /** * Set the text content of this text node. * @param text unencoded text * @return this, for chaining */ def text(text: String): TextNode = { this.textVal = text if (attributesVal != null) { attributesVal.put(TEXT_KEY, text) } this } /** Get the (unencoded) text of this text node, including any newlines and spaces present in the original. @return text */ def getWholeText: String = if (attributesVal == null) { textVal } else { attributesVal.get(TEXT_KEY) } /** Test if this text node is blank -- that is, empty or only whitespace (including newlines). @return true if this document is empty or only whitespace, false if it contains any text content. */ def isBlank: Boolean = Strings.isBlank(getWholeText) /** * Split this text node into two nodes at the specified string offset. After splitting, this node will contain the * original text up to the offset, and will have a new text node sibling containing the text after the offset. * @param offset string offset point to split node at. * @return the newly created text node containing the text after the offset. */ def splitText(offset: Int): TextNode = { isTrue(offset >= 0, "Split offset must be not be negative") isTrue(offset < textVal.length, "Split offset must not be greater than current text length") val head: String = getWholeText.substring(0, offset) val tail: String = getWholeText.substring(offset) text(head) val tailNode: TextNode = new TextNode(tail, this.baseUri) if (parent != null) { parent.addChildren(siblingIndex + 1, tailNode) } tailNode } private[nodes] def outerHtmlHead(accum: StringBuilder, depth: Int, out: Document.OutputSettings) { if (out.prettyPrint && ((siblingIndex == 0 && parentNodeVal.isInstanceOf[Element] && parentNodeVal.asInstanceOf[Element].tag.isFormatAsBlock && !isBlank) || (out.outline && siblingNodes.size > 0 && !isBlank))) { indent(accum, depth, out) } val normaliseWhite: Boolean = out.prettyPrint && parent.isInstanceOf[Element] && !Element.preserveWhitespace(parent) Entities.escape(accum, getWholeText, out, false, normaliseWhite, false) } private[nodes] def outerHtmlTail(accum: StringBuilder, depth: Int, out: Document.OutputSettings) { } // attribute fiddling. create on first access. private def ensureAttributes(): Unit = { if (attributesVal == null) { attributesVal = new Attributes attributesVal.put(TEXT_KEY, textVal) } } override def attr(attributeKey: String): String = { ensureAttributes() super.attr(attributeKey) } override def attributes: Attributes = { ensureAttributes() super.attributes } override def attr(attributeKey: String, attributeValue: String): Node = { ensureAttributes() super.attr(attributeKey, attributeValue) } override def hasAttr(attributeKey: String): Boolean = { ensureAttributes() super.hasAttr(attributeKey) } override def removeAttr(attributeKey: String): Node = { ensureAttributes() super.removeAttr(attributeKey) } override def absUrl(attributeKey: String): String = { ensureAttributes() super.absUrl(attributeKey) } override def equals(o: Any): Boolean = o match { case t: TextNode if this eq t => true case t: TextNode if !super.equals(t) => false case t: TextNode if this.textVal != null && this.textVal != t.textVal => false case t: TextNode if this.textVal == null && t.textVal != null => false case t: TextNode => true case _ => false } override def hashCode: Int = { 31 * super.hashCode + (if (textVal != null) { textVal.## } else { 0 }) } } object TextNode { /* TextNode is a node, and so by default comes with attributes and children. The attributes are seldom used, but use memory, and the child nodes are never used. So we don't have them, and override accessors to attributes to create them as needed on the fly. */ private val TEXT_KEY = "text" /** * Create a new TextNode from HTML encoded (aka escaped) data. * @param encodedText Text containing encoded HTML (e.g. &amp;lt;) * @param baseUri Base uri * @return TextNode containing unencoded data (e.g. &lt;) */ def createFromEncoded(encodedText: String, baseUri: String): TextNode = new TextNode(Entities.unescape(encodedText), baseUri) private[nodes] def normaliseWhitespace(text: String): String = Strings.normaliseWhitespace(text) private[nodes] def stripLeadingWhitespace(text: String): String = text.replaceFirst("^\\\\s+", "") private[nodes] def lastCharIsWhitespace(sb: java.lang.StringBuilder): Boolean = sb.length != 0 && sb.charAt(sb.length - 1) == ' ' }
ligangty/scalajsoup
src/main/scala/com/github/ligangty/scala/jsoup/nodes/TextNode.scala
Scala
mit
5,605
package com.gilt.thehand import com.gilt.thehand.rules._ import com.gilt.thehand.rules.logical.{Or, Not, And} import com.gilt.thehand.rules.typed.{StringIn, LongIn} /** * A place to drop general tests that cross class lines. */ class RuleParserSpec extends AbstractRuleSpec { val testCases = Map( And(Or(StringIn("1", "2"), LongIn(10, 31)), And(StringIn("2", "3"), Not(LongIn(3, 4)))) -> ( Set(Context(2)), Set(Context(3), Context("1")) ) ) runTests(testCases) "DefaultParser" should "parse a nested rule correctly" in { val rule = DefaultParser.fromString("And(Or(Not(False),False), LongIn(2, 3, 4), True)") assert(!rule.matches(Context(1))) assert(rule.matches(Context(2))) assert(rule.matches(Context(3))) assert(rule.matches(Context("4"))) } }
gilt/the-hand
src/test/scala/com/gilt/thehand/RuleParserSpec.scala
Scala
apache-2.0
812
/* * Copyright 2015 Roberto Tyley * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.madgag.playgithub.auth import com.madgag.playgithub.auth.AuthenticatedSessions.{AccessToken, RedirectToPathAfterAuthKey} import com.madgag.scalagithub.GitHubCredentials import play.api.mvc.Security.{AuthenticatedBuilder, AuthenticatedRequest} import play.api.mvc._ import java.nio.file.Path import scala.concurrent.{ExecutionContext, Future} object Actions { type AuthRequest[A] = AuthenticatedRequest[A, GitHubCredentials] class GitHubAuthenticatedAction(scopes: Seq[String], workingDir: Path, parser: BodyParser[AnyContent])( implicit ec: ExecutionContext, authClient: Client, accessTokenProvider: AccessToken.Provider ) extends AuthenticatedBuilder[GitHubCredentials]( { req: RequestHeader => accessTokenProvider(req).flatMap(accessKey => GitHubCredentials.forAccessKey(accessKey, workingDir).toOption) }, parser, onUnauthorized = implicit req => authClient.redirectForAuthWith(scopes).addingToSession(RedirectToPathAfterAuthKey -> req.path) ) class AuthenticatedActionToGHRequest(implicit val executionContext: ExecutionContext) extends ActionTransformer[AuthRequest, GHRequest] { def transform[A](request: AuthRequest[A]) = Future.successful(new GHRequest[A](request.user, request)) } def gitHubAction(scopes: Seq[String], workingDir: Path, parser: BodyParser[AnyContent])(implicit authClient: Client, accessTokenProvider: AccessToken.Provider, ec: ExecutionContext) = (new GitHubAuthenticatedAction(scopes, workingDir, parser)) andThen (new AuthenticatedActionToGHRequest) }
rtyley/play-git-hub
src/main/scala/com/madgag/playgithub/auth/Actions.scala
Scala
gpl-3.0
2,142
import sbt._ import Keys._ import scala.util.{Failure, Success} object FMPP { def preprocessorSettings = inConfig(Compile)(Seq(sourceGenerators += fmpp.taskValue, fmpp := fmppTask.value)) ++ Seq( libraryDependencies ++= Seq( ("net.sourceforge.fmpp" % "fmpp" % "0.9.16" % FmppConfig.name).intransitive, "org.freemarker" % "freemarker" % "2.3.31" % FmppConfig.name, "oro" % "oro" % "2.0.8" % FmppConfig.name, "org.beanshell" % "bsh" % "2.0b5" % FmppConfig.name, "xml-resolver" % "xml-resolver" % "1.2" % FmppConfig.name ), ivyConfigurations += FmppConfig, FmppConfig / fullClasspath := update.map { _ select configurationFilter(FmppConfig.name) map Attributed.blank }.value, Compile / packageSrc / mappings ++= { val fmppSrc = (Compile / sourceDirectory).value / "scala" val inFiles = fmppSrc ** "*.fm" ((Compile / managedSources).value.pair(Path.relativeTo((Compile / sourceManaged).value) | Path.flat)) ++ // Add generated sources to sources JAR (inFiles pair (Path.relativeTo(fmppSrc) | Path.flat)) // Add *.fm files to sources JAR } ) /* FMPP Task */ val fmpp = taskKey[Seq[File]]("fmpp") val FmppConfig = config("fmpp").hide def fmppTask = Def.task { val s = streams.value val output = (Compile / sourceManaged).value val fmppSrc = (Compile / sourceDirectory).value / "scala" val inFiles = (fmppSrc ** "*.fm").get.toSet val fmppRunner = (fmpp / runner).value val fmppClasspath = (FmppConfig / fullClasspath).value val cachedFun = FileFunction.cached(s.cacheDirectory / "fmpp", inStyle = FilesInfo.lastModified, outStyle = FilesInfo.exists) { (in: Set[File]) => IO.delete((output ** "*.scala").get) val args = "--expert" :: "-q" :: "-S" :: fmppSrc.getPath :: "-O" :: output.getPath :: "--replace-extensions=fm, scala" :: "-M" :: "execute(**/*.fm), ignore(**/*)" :: Nil val errors = fmppRunner.run("fmpp.tools.CommandLine", fmppClasspath.files, args, s.log) errors match { case Success(value) => value case Failure(exception) => sys.error(exception.getMessage) } (output ** "*.scala").get.toSet } cachedFun(inFiles).toSeq } }
slick/slick
project/FMPP.scala
Scala
bsd-2-clause
2,218
package models import com.datastax.spark.connector.mapper.JavaBeanColumnMapper /** * Created by cenk on 25/02/15. */ object Metrics { implicit object Mapper extends JavaBeanColumnMapper[Metric] }
cenkbircanoglu/sparkScala
src/main/scala/models/Metrics.scala
Scala
mit
203
package com.stackmob.customcode.dev package test package server package sdk package data package dataservice import org.specs2.Specification import org.specs2.mock.Mockito trait CountObjects extends BaseTestGroup { this: Specification with Mockito with CustomMatchers => case class CountObjects() extends BaseTestContext { private val count = 1L override protected lazy val defaults = { val getResponse = new ResponseDetails(200, headers = List("content-range" -> s"0-0/$count"), body = count.toString.getBytesUTF8) val datastore = new MockStackMobDatastore(getResponse, ResponseDetails(200), ResponseDetails(200), ResponseDetails(200)) val (map, obj, _, _) = super.defaults (map, obj, datastore, dataService(datastore)) } private val (_, _, datastore, svc) = defaults def correctSchema = { val countRes = svc.countObjects(schemaName) must beEqualTo(count) val correctSchema = datastore.getCalls.get(0).schema must beEqualTo(schemaName) countRes and correctSchema } } }
matthewfarwell/stackmob-customcode-dev
src/test/scala/com/stackmob/customcode/dev/test/server/sdk/data/dataservice/CountObjects.scala
Scala
apache-2.0
1,068
package lib import cats.data.NonEmptyChain import cats.data.Validated.Invalid import helpers.BasePlaySpec import scala.io.Source class ServerParserSpec extends BasePlaySpec { def configParser: ConfigParser = app.injector.instanceOf[ConfigParser] val uri = "file:///test" val source = ProxyConfigSource( uri = uri, version = "0.0.1" ) "empty" in { configParser.parse(uri, " ").validate() must be( Invalid(NonEmptyChain("Missing uri", "Missing version")) ) } "hostHeaderValue" in { Seq("http://user.api.flow.io", "https://user.api.flow.io").foreach { host => Server("user", host, logger).hostHeaderValue must be( "user.api.flow.io" ) } } "single server w/ no operations" in { val spec = """ version: 0.0.1 servers: - name: test host: https://test.api.flow.io """ validOrErrors(configParser.parse(uri, spec).validate()) must be( ProxyConfig( sources = Seq(source), servers = Seq( Server("test", "https://test.api.flow.io", logger) ), operations = Nil ) ) } "single server w/ operations" in { val spec = """ version: 1.2.3 servers: - name: user host: https://user.api.flow.io operations: - method: GET path: /users server: user - method: POST path: /users server: user - method: GET path: /users/:id server: user """ val user = Server( "user", "https://user.api.flow.io", logger = logger ) val cfg = validOrErrors( configParser.parse(uri, spec).validate() ) cfg.sources must be(Seq(source.copy(version = "1.2.3"))) cfg.servers must be(Seq(user)) cfg.operations must be( Seq( Operation(Route(Method.Get, "/users"), user), Operation(Route(Method.Post, "/users"), user), Operation(Route(Method.Get, "/users/:id"), user) ) ) } "latest production config" in { val uri = "https://s3.amazonaws.com/io.flow.aws-s3-public/util/api-proxy/production.config" val contents = Source.fromURL(uri).mkString val config = validOrErrors(configParser.parse(uri, contents).validate()) Seq("user", "organization", "catalog").foreach { name => val server = config.servers.find(_.name == name).getOrElse { sys.error(s"Failed to find server[$name]") } server.host must be(s"https://$name.api.flow.io") } val index = Index(config) Seq( (Method.Get, "/users", "user"), (Method.Get, "/organizations", "organization"), (Method.Get, "/:organization/catalog", "catalog") ).foreach { case (method, path, server) => val op = index.resolve(method, path).getOrElse { sys.error(s"Failed to resolve path[$path]") } op.server.name must be(server) op.route.method must be(method) op.route.path must be(path) } } "latest development config" in { val uri = "https://s3.amazonaws.com/io.flow.aws-s3-public/util/api-proxy/development.config" val contents = Source.fromURL(uri).mkString val config = validOrErrors(configParser.parse(uri, contents).validate()) Map( "user" -> "http://localhost:6021", "organization" -> "http://localhost:6081", "catalog" -> "http://localhost:6071" ).foreach { case (name, host) => val server = config.servers.find(_.name == name).getOrElse { sys.error(s"Failed to find server[$name]") } server.host must be(host) } val index = Index(config) Seq( (Method.Get, "/users", "user"), (Method.Get, "/organizations", "organization"), (Method.Get, "/:organization/catalog", "catalog") ).foreach { case (method, path, server) => val op = index.resolve(method, path).getOrElse { sys.error(s"Failed to resolve path[$path]") } op.server.name must be(server) op.route.method must be(method) op.route.path must be(path) } } "internal routes" in { val uris = Seq( "https://s3.amazonaws.com/io.flow.aws-s3-public/util/api-proxy/development.config", "https://s3.amazonaws.com/io.flow.aws-s3-public/util/api-internal-proxy/development.config" ) val proxyConfigFetcher = app.injector.instanceOf[ProxyConfigFetcher] val config = validOrErrors(proxyConfigFetcher.load(uris)) Seq("currency", "currency-internal").foreach { name => config.servers.find(_.name == name).getOrElse { sys.error(s"Failed to find server[$name]") } } val index = Index(config) val op1 = index.resolve(Method.Get, "/test/currency/rates").get op1.server.name must be("currency") op1.route.method must be(Method.Get) op1.route.path must be("/:organization/currency/rates") val op2 = index.resolve(Method.Get, "/internal/currency/rates").get op2.server.name must be("currency-internal") op2.route.method must be(Method.Get) op2.route.path must be("/internal/currency/rates") } }
flowvault/proxy
test/lib/ServerParserSpec.scala
Scala
mit
4,978
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.streaming.continuous import java.sql.Timestamp import org.apache.spark.{SparkContext, SparkException} import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart} import org.apache.spark.sql._ import org.apache.spark.sql.execution.streaming._ import org.apache.spark.sql.execution.streaming.continuous._ import org.apache.spark.sql.execution.streaming.sources.ContinuousMemoryStream import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf.{CONTINUOUS_STREAMING_EPOCH_BACKLOG_QUEUE_SIZE, MIN_BATCHES_TO_RETAIN} import org.apache.spark.sql.streaming.{StreamTest, Trigger} import org.apache.spark.sql.test.TestSparkSession class ContinuousSuiteBase extends StreamTest { // We need more than the default local[2] to be able to schedule all partitions simultaneously. override protected def createSparkSession = new TestSparkSession( new SparkContext( "local[10]", "continuous-stream-test-sql-context", sparkConf.set("spark.sql.testkey", "true"))) protected def waitForRateSourceTriggers(query: ContinuousExecution, numTriggers: Int): Unit = { query.awaitEpoch(0) // This is called after waiting first epoch to be committed, so we can just treat // it as partition readers for rate source are already initialized. val firstCommittedTime = System.nanoTime() val deltaNs = (numTriggers * 1000 + 300) * 1000000L var toWaitNs = firstCommittedTime + deltaNs - System.nanoTime() while (toWaitNs > 0) { Thread.sleep(toWaitNs / 1000000) toWaitNs = firstCommittedTime + deltaNs - System.nanoTime() } } protected def waitForRateSourceCommittedValue( query: ContinuousExecution, desiredValue: Long, maxWaitTimeMs: Long): Unit = { def readHighestCommittedValue(c: ContinuousExecution): Option[Long] = { c.committedOffsets.lastOption.map { case (_, offset) => offset match { case o: RateStreamOffset => o.partitionToValueAndRunTimeMs.map { case (_, ValueRunTimeMsPair(value, _)) => value }.max } } } val maxWait = System.currentTimeMillis() + maxWaitTimeMs while (System.currentTimeMillis() < maxWait && readHighestCommittedValue(query).getOrElse(Long.MinValue) < desiredValue) { Thread.sleep(100) } if (System.currentTimeMillis() > maxWait) { logWarning(s"Couldn't reach desired value in $maxWaitTimeMs milliseconds!" + s"Current highest committed value is ${readHighestCommittedValue(query)}") } } // A continuous trigger that will only fire the initial time for the duration of a test. // This allows clean testing with manual epoch advancement. protected val longContinuousTrigger = Trigger.Continuous("1 hour") override protected val defaultTrigger = Trigger.Continuous(100) } class ContinuousSuite extends ContinuousSuiteBase { import IntegratedUDFTestUtils._ import testImplicits._ test("basic") { val input = ContinuousMemoryStream[Int] testStream(input.toDF())( AddData(input, 0, 1, 2), CheckAnswer(0, 1, 2), StopStream, AddData(input, 3, 4, 5), StartStream(), CheckAnswer(0, 1, 2, 3, 4, 5)) } test("SPARK-29642: basic with various types") { val input = ContinuousMemoryStream[String] testStream(input.toDF())( AddData(input, "0", "1", "2"), CheckAnswer("0", "1", "2")) val input2 = ContinuousMemoryStream[(String, Timestamp)] val timestamp = Timestamp.valueOf("2015-06-11 10:10:10.100") testStream(input2.toDF())( AddData(input2, ("0", timestamp), ("1", timestamp)), CheckAnswer(("0", timestamp), ("1", timestamp))) } test("map") { val input = ContinuousMemoryStream[Int] val df = input.toDF().map(_.getInt(0) * 2) testStream(df)( AddData(input, 0, 1), CheckAnswer(0, 2), StopStream, AddData(input, 2, 3, 4), StartStream(), CheckAnswer(0, 2, 4, 6, 8)) } test("flatMap") { val input = ContinuousMemoryStream[Int] val df = input.toDF().flatMap(r => Seq(0, r.getInt(0), r.getInt(0) * 2)) testStream(df)( AddData(input, 0, 1), CheckAnswer((0 to 1).flatMap(n => Seq(0, n, n * 2)): _*), StopStream, AddData(input, 2, 3, 4), StartStream(), CheckAnswer((0 to 4).flatMap(n => Seq(0, n, n * 2)): _*)) } test("filter") { val input = ContinuousMemoryStream[Int] val df = input.toDF().where('value > 2) testStream(df)( AddData(input, 0, 1), CheckAnswer(), StopStream, AddData(input, 2, 3, 4), StartStream(), CheckAnswer(3, 4)) } test("deduplicate") { val input = ContinuousMemoryStream[Int] val df = input.toDF().dropDuplicates() val except = intercept[AnalysisException] { testStream(df)(StartStream()) } assert(except.message.contains( "Continuous processing does not support Deduplicate operations.")) } test("timestamp") { val input = ContinuousMemoryStream[Int] val df = input.toDF().select(current_timestamp()) val except = intercept[AnalysisException] { testStream(df)(StartStream()) } assert(except.message.contains( "Continuous processing does not support current time operations.")) } test("subquery alias") { withTempView("memory") { val input = ContinuousMemoryStream[Int] input.toDF().createOrReplaceTempView("memory") val test = spark.sql("select value from memory where value > 2") testStream(test)( AddData(input, 0, 1), CheckAnswer(), StopStream, AddData(input, 2, 3, 4), StartStream(), CheckAnswer(3, 4)) } } test("repeatedly restart") { val input = ContinuousMemoryStream[Int] val df = input.toDF() testStream(df)( StartStream(), AddData(input, 0, 1), CheckAnswer(0, 1), StopStream, StartStream(), StopStream, StartStream(), StopStream, StartStream(), StopStream, AddData(input, 2, 3), StartStream(), CheckAnswer(0, 1, 2, 3), StopStream) } test("task failure kills the query") { val input = ContinuousMemoryStream[Int] val df = input.toDF() // Get an arbitrary task from this query to kill. It doesn't matter which one. var taskId: Long = -1 val listener = new SparkListener() { override def onTaskStart(start: SparkListenerTaskStart): Unit = { taskId = start.taskInfo.taskId } } spark.sparkContext.addSparkListener(listener) try { testStream(df)( StartStream(Trigger.Continuous(100)), AddData(input, 0, 1, 2, 3), Execute { _ => // Wait until a task is started, then kill its first attempt. eventually(timeout(streamingTimeout)) { assert(taskId != -1) } spark.sparkContext.killTaskAttempt(taskId) }, ExpectFailure[SparkException] { e => e.getCause != null && e.getCause.getCause.isInstanceOf[ContinuousTaskRetryException] }) } finally { spark.sparkContext.removeSparkListener(listener) } } test("query without test harness") { val df = spark.readStream .format("rate") .option("numPartitions", "2") .option("rowsPerSecond", "2") .load() .select('value) val query = df.writeStream .format("memory") .queryName("noharness") .trigger(Trigger.Continuous(100)) .start() val expected = Set(0, 1, 2, 3) val continuousExecution = query.asInstanceOf[StreamingQueryWrapper].streamingQuery.asInstanceOf[ContinuousExecution] waitForRateSourceCommittedValue(continuousExecution, expected.max, 20 * 1000) query.stop() val results = spark.read.table("noharness").collect() assert(expected.map(Row(_)).subsetOf(results.toSet), s"Result set ${results.toSet} are not a superset of $expected!") } Seq(TestScalaUDF("udf"), TestPythonUDF("udf"), TestScalarPandasUDF("udf")).foreach { udf => test(s"continuous mode with various UDFs - ${udf.prettyName}") { assume( shouldTestScalarPandasUDFs && udf.isInstanceOf[TestScalarPandasUDF] || shouldTestPythonUDFs && udf.isInstanceOf[TestPythonUDF] || udf.isInstanceOf[TestScalaUDF]) val input = ContinuousMemoryStream[Int] val df = input.toDF() testStream(df.select(udf(df("value")).cast("int")))( AddData(input, 0, 1, 2), CheckAnswer(0, 1, 2), StopStream, AddData(input, 3, 4, 5), StartStream(), CheckAnswer(0, 1, 2, 3, 4, 5)) } } } class ContinuousStressSuite extends ContinuousSuiteBase { import testImplicits._ test("only one epoch") { val df = spark.readStream .format("rate") .option("numPartitions", "5") .option("rowsPerSecond", "500") .load() .select('value) testStream(df)( StartStream(longContinuousTrigger), AwaitEpoch(0), Execute { exec => waitForRateSourceTriggers(exec.asInstanceOf[ContinuousExecution], 5) }, IncrementEpoch(), StopStream, CheckAnswerRowsContains(scala.Range(0, 2500).map(Row(_))) ) } test("automatic epoch advancement") { val df = spark.readStream .format("rate") .option("numPartitions", "5") .option("rowsPerSecond", "500") .load() .select('value) testStream(df)( StartStream(Trigger.Continuous(2012)), AwaitEpoch(0), Execute { exec => waitForRateSourceTriggers(exec.asInstanceOf[ContinuousExecution], 5) }, IncrementEpoch(), StopStream, CheckAnswerRowsContains(scala.Range(0, 2500).map(Row(_)))) } test("restarts") { val df = spark.readStream .format("rate") .option("numPartitions", "5") .option("rowsPerSecond", "500") .load() .select('value) testStream(df)( StartStream(Trigger.Continuous(1012)), AwaitEpoch(2), StopStream, StartStream(Trigger.Continuous(1012)), AwaitEpoch(4), StopStream, StartStream(Trigger.Continuous(1012)), AwaitEpoch(5), StopStream, StartStream(Trigger.Continuous(1012)), AwaitEpoch(6), StopStream, StartStream(Trigger.Continuous(1012)), AwaitEpoch(8), StopStream, StartStream(Trigger.Continuous(1012)), StopStream, StartStream(Trigger.Continuous(1012)), AwaitEpoch(15), StopStream, CheckAnswerRowsContains(scala.Range(0, 2500).map(Row(_)))) } } class ContinuousMetaSuite extends ContinuousSuiteBase { import testImplicits._ // We need to specify spark.sql.streaming.minBatchesToRetain to do the following test. override protected def createSparkSession = new TestSparkSession( new SparkContext( "local[10]", "continuous-stream-test-sql-context", sparkConf.set("spark.sql.testkey", "true") .set(MIN_BATCHES_TO_RETAIN.key, "2"))) test("SPARK-24351: check offsetLog/commitLog retained in the checkpoint directory") { withTempDir { checkpointDir => val input = ContinuousMemoryStream[Int] val df = input.toDF().mapPartitions(iter => { // Sleep the task thread for 300 ms to make sure epoch processing time 3 times // longer than epoch creating interval. So the gap between last committed // epoch and currentBatchId grows over time. Thread.sleep(300) iter.map(row => row.getInt(0) * 2) }) testStream(df)( StartStream(trigger = Trigger.Continuous(100), checkpointLocation = checkpointDir.getAbsolutePath), AddData(input, 1), CheckAnswer(2), // Make sure epoch 2 has been committed before the following validation. AwaitEpoch(2), StopStream, AssertOnQuery(q => { q.commitLog.getLatest() match { case Some((latestEpochId, _)) => val commitLogValidateResult = q.commitLog.get(latestEpochId - 1).isDefined && q.commitLog.get(latestEpochId - 2).isEmpty val offsetLogValidateResult = q.offsetLog.get(latestEpochId - 1).isDefined && q.offsetLog.get(latestEpochId - 2).isEmpty commitLogValidateResult && offsetLogValidateResult case None => false } }) ) } } } class ContinuousEpochBacklogSuite extends ContinuousSuiteBase { import testImplicits._ override protected def createSparkSession = new TestSparkSession( new SparkContext( "local[1]", "continuous-stream-test-sql-context", sparkConf.set("spark.sql.testkey", "true"))) // This test forces the backlog to overflow by not standing up enough executors for the query // to make progress. test("epoch backlog overflow") { withSQLConf((CONTINUOUS_STREAMING_EPOCH_BACKLOG_QUEUE_SIZE.key, "10")) { val df = spark.readStream .format("rate") .option("numPartitions", "2") .option("rowsPerSecond", "500") .load() .select('value) testStream(df)( StartStream(Trigger.Continuous(1)), ExpectFailure[IllegalStateException] { e => e.getMessage.contains("queue has exceeded its maximum") } ) } } }
witgo/spark
sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala
Scala
apache-2.0
14,180
package com.eevolution.context.dictionary.domain.api.repository import com.eevolution.context.dictionary._ /** * Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala * Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 06/11/17. */ trait ViewAttributeRepository [ViewAttribute , Int] extends api.Repostory [ViewAttribute , Int] { }
adempiere/ADReactiveSystem
dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/repository/ViewAttributeRepository.scala
Scala
gpl-3.0
1,143
import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} import scala.tools.reflect.ToolBox object Test extends dotty.runtime.LegacyApp { val tb = cm.mkToolBox() val idsym = tb.typecheck(q"type Id[X] = X").symbol.asType val idTC1 = idsym.info println(idTC1) println(appliedType(idTC1, List(typeOf[Int]))) println("===") val idTC2 = idsym.toType.etaExpand println(idTC2) println(appliedType(idTC2, List(typeOf[Int]))) println(appliedType(idTC2, List(typeOf[Int])).dealias) }
yusuke2255/dotty
tests/pending/run/reflection-idtc.scala
Scala
bsd-3-clause
529
package org.bitcoins.commons.serializers import org.bitcoins.commons.jsonmodels.bitcoind.GetBlockHeaderResult import org.bitcoins.commons.jsonmodels.bitcoind.RpcOpts.LockUnspentOutputParameter import org.bitcoins.commons.serializers.JsonReaders.jsToSatoshis import org.bitcoins.core.api.dlc.wallet.db.IncomingDLCOfferDb import org.bitcoins.core.api.wallet.CoinSelectionAlgo import org.bitcoins.core.api.wallet.db.SpendingInfoDb import org.bitcoins.core.crypto._ import org.bitcoins.core.currency.{Bitcoins, Satoshis} import org.bitcoins.core.dlc.accounting.DLCWalletAccounting import org.bitcoins.core.hd.{AddressType, HDPath} import org.bitcoins.core.number.{Int32, UInt16, UInt32, UInt64} import org.bitcoins.core.protocol.blockchain.Block import org.bitcoins.core.protocol.dlc.models.DLCStatus._ import org.bitcoins.core.protocol.dlc.models._ import org.bitcoins.core.protocol.script.{ ScriptPubKey, ScriptWitness, ScriptWitnessV0, WitnessScriptPubKey } import org.bitcoins.core.protocol.tlv._ import org.bitcoins.core.protocol.transaction.{ Transaction, TransactionOutPoint, TransactionOutput } import org.bitcoins.core.protocol.{BitcoinAddress, BlockStamp} import org.bitcoins.core.psbt.InputPSBTRecord.PartialSignature import org.bitcoins.core.psbt.PSBT import org.bitcoins.core.serializers.PicklerKeys import org.bitcoins.core.util.TimeUtil import org.bitcoins.core.util.TimeUtil._ import org.bitcoins.core.wallet.fee.SatoshisPerVirtualByte import org.bitcoins.core.wallet.utxo.{AddressLabelTag, TxoState} import org.bitcoins.crypto._ import scodec.bits.ByteVector import ujson._ import upickle.default._ import java.io.File import java.net.{InetSocketAddress, URI} import java.nio.file.Path import java.time.Instant import java.util.Date import scala.util.Try object Picklers { implicit val pathPickler: ReadWriter[Path] = readwriter[String].bimap(_.toString, str => new File(str).toPath) implicit val inetSocketAddress: ReadWriter[InetSocketAddress] = readwriter[String].bimap( addr => s"${addr.getHostName}:${addr.getPort}", str => { val uri = new URI("tcp://" + str) InetSocketAddress.createUnresolved(uri.getHost, uri.getPort) }) implicit val byteVectorPickler: ReadWriter[ByteVector] = readwriter[String].bimap(_.toHex, str => ByteVector.fromValidHex(str)) implicit val bitcoinAddressPickler: ReadWriter[BitcoinAddress] = readwriter[String] .bimap(_.value, BitcoinAddress.fromString) implicit val bitcoinsPickler: ReadWriter[Bitcoins] = readwriter[Double].bimap(_.toBigDecimal.toDouble, Bitcoins(_)) implicit val satoshisPickler: ReadWriter[Satoshis] = readwriter[Long].bimap(_.toLong, Satoshis.apply) implicit val schnorrNoncePickler: ReadWriter[SchnorrNonce] = readwriter[String].bimap(_.hex, SchnorrNonce.fromHex) implicit val enumEventDescriptorPickler: ReadWriter[ EnumEventDescriptorV0TLV] = readwriter[String].bimap(_.hex, EnumEventDescriptorV0TLV.fromHex) implicit val digitDecompEventDescriptorPickler: ReadWriter[ DigitDecompositionEventDescriptorV0TLV] = readwriter[String].bimap(_.hex, DigitDecompositionEventDescriptorV0TLV.fromHex) implicit val eventDescriptorPickler: ReadWriter[EventDescriptorTLV] = readwriter[String].bimap(_.hex, EventDescriptorTLV.fromHex) implicit val oracleEventVoPickler: ReadWriter[OracleEventV0TLV] = readwriter[String].bimap(_.hex, OracleEventV0TLV.fromHex) implicit val instantPickler: ReadWriter[Instant] = readwriter[Long].bimap(_.getEpochSecond, Instant.ofEpochSecond) implicit val datePickler: ReadWriter[Date] = readwriter[String].bimap(TimeUtil.iso8601ToString, TimeUtil.iso8601ToDate) implicit val aesPasswordPickler: ReadWriter[AesPassword] = readwriter[String].bimap(_.toStringSensitive, AesPassword.fromString) implicit val sha256DigestBEPickler: ReadWriter[Sha256DigestBE] = readwriter[String].bimap(_.hex, Sha256DigestBE.fromHex) implicit val sha256DigestPickler: ReadWriter[Sha256Digest] = readwriter[String].bimap(_.hex, Sha256Digest.fromHex) implicit val doubleSha256DigestBEPickler: ReadWriter[DoubleSha256DigestBE] = readwriter[String].bimap(_.hex, DoubleSha256DigestBE.fromHex) implicit val uInt32Pickler: ReadWriter[UInt32] = readwriter[Long].bimap(_.toLong, long => UInt32(long)) implicit val satoshisPerVirtualBytePickler: ReadWriter[ SatoshisPerVirtualByte] = readwriter[Long] .bimap(_.toLong, long => SatoshisPerVirtualByte(Satoshis(long))) implicit val oracleInfoPickler: ReadWriter[OracleInfo] = readwriter[String].bimap(_.hex, OracleInfo.fromHex) implicit val oracleAnnouncementPickler: ReadWriter[OracleAnnouncementTLV] = readwriter[String].bimap(_.hex, OracleAnnouncementTLV.fromHex) implicit val contractInfoPickler: ReadWriter[ContractInfo] = readwriter[String].bimap(_.hex, ContractInfo.fromHex) implicit val contractInfoTLVPickler: ReadWriter[ContractInfoV0TLV] = readwriter[String].bimap(_.hex, ContractInfoV0TLV.fromHex) implicit val schnorrDigitalSignaturePickler: ReadWriter[ SchnorrDigitalSignature] = readwriter[String].bimap(_.hex, SchnorrDigitalSignature.fromHex) implicit val partialSignaturePickler: ReadWriter[PartialSignature] = readwriter[String].bimap(_.hex, PartialSignature.fromHex) implicit val lnMessageDLCOfferTLVPickler: ReadWriter[LnMessage[DLCOfferTLV]] = readwriter[String].bimap(_.hex, LnMessageFactory(DLCOfferTLV).fromHex) implicit val txoStatePickler: ReadWriter[TxoState] = { readwriter[String].bimap(_.toString.toLowerCase, TxoState.fromString) } implicit val privKeyPathPickler: ReadWriter[HDPath] = { readwriter[String].bimap(_.toString, HDPath.fromString) } implicit val scriptPubKeyPickler: ReadWriter[ScriptPubKey] = { readwriter[String].bimap(_.asmHex, ScriptPubKey.fromAsmHex(_)) } private def parseWitnessElements(arr: ujson.Arr): ScriptWitness = { val stackElements = arr.value.toVector.map { case obj: ujson.Obj => val witnessStr = obj(PicklerKeys.witnessKey).str ByteVector.fromValidHex(witnessStr) case x: ujson.Value => sys.error(s"Expected witness json object, got=$x") } ScriptWitness.apply(stackElements.reverse) } private def writeWitnessElements(witness: ScriptWitness): ujson.Arr = { val vec: Vector[ujson.Obj] = witness.stack.reverse.map { w => ujson.Obj(PicklerKeys.witnessKey -> Str(w.toHex)) }.toVector ujson.Arr.from(vec) } implicit val scriptWitnessPickler: ReadWriter[ScriptWitness] = { readwriter[Arr].bimap(writeWitnessElements, parseWitnessElements) } private def writeOutput(o: TransactionOutput): Obj = { Obj( PicklerKeys.satoshisKey -> writeJs(o.value.satoshis), PicklerKeys.scriptPubKeyKey -> writeJs(o.scriptPubKey) ) } private def readOutput(obj: Obj): TransactionOutput = { val sats = Satoshis(obj(PicklerKeys.satoshisKey).num.toLong) val scriptPubKey = ScriptPubKey.fromAsmHex(obj(PicklerKeys.scriptPubKeyKey).str) TransactionOutput(sats, scriptPubKey) } implicit val txOutputPickler: ReadWriter[TransactionOutput] = readwriter[Obj].bimap(writeOutput, readOutput) private def writeSpendingInfoDb(si: SpendingInfoDb): Obj = { Obj( PicklerKeys.idKey -> { si.id match { case None => ujson.Null case Some(id) => Num(id.toDouble) } }, PicklerKeys.outPointKey -> writeJs(si.outPoint), PicklerKeys.outputKey -> writeJs(si.output), PicklerKeys.hdPathKey -> writeJs(si.privKeyPath), PicklerKeys.redeemScriptKey -> writeJs(si.redeemScriptOpt), PicklerKeys.witnessKey -> writeJs(si.scriptWitnessOpt), PicklerKeys.stateKey -> writeJs(si.state), PicklerKeys.txIdKey -> writeJs(si.txid), PicklerKeys.spendingTxIdKey -> writeJs(si.spendingTxIdOpt) ) } private def readSpendingInfoDb(obj: Obj): SpendingInfoDb = { val id = obj(PicklerKeys.idKey).numOpt.map(_.toLong) val outpoint = upickle.default.read[TransactionOutPoint](obj(PicklerKeys.outPointKey)) val output = upickle.default.read[TransactionOutput](obj(PicklerKeys.outputKey)) val hdPath = upickle.default.read[HDPath](obj(PicklerKeys.hdPathKey)) val redeemScript = upickle.default.read[Option[ScriptPubKey]]( obj(PicklerKeys.redeemScriptKey)) val scriptWitness = upickle.default.read[Option[ScriptWitness]](obj(PicklerKeys.witnessKey)) val state = upickle.default.read[TxoState](obj(PicklerKeys.stateKey)) val txId = upickle.default.read[DoubleSha256DigestBE](obj(PicklerKeys.txIdKey)) val spendingTxId = upickle.default.read[Option[DoubleSha256DigestBE]]( obj(PicklerKeys.spendingTxIdKey)) SpendingInfoDb( id = id, outpoint = outpoint, output = output, hdPath = hdPath, redeemScriptOpt = redeemScript, scriptWitnessOpt = scriptWitness, state = state, txId = txId, spendingTxIdOpt = spendingTxId ) } implicit val spendingInfoDbPickler: ReadWriter[SpendingInfoDb] = { readwriter[Obj].bimap(writeSpendingInfoDb, readSpendingInfoDb) } private def parseU64(str: ujson.Str): UInt64 = { UInt64(BigInt(str.str)) } private def parseFundingInput(obj: ujson.Obj): FundingInputTLV = { val inputSerialId = parseU64(obj(PicklerKeys.inputSerialIdKey).str) val prevTx = Transaction.fromHex(obj(PicklerKeys.prevTxKey).str) val prevTxVout = obj(PicklerKeys.prevTxVoutKey).num.toLong val sequence = UInt32(obj(PicklerKeys.sequenceKey).num.toLong) val maxWitnessLen = UInt16(obj(PicklerKeys.maxWitnessLenKey).num.toLong) val redeemScriptStr = obj(PicklerKeys.redeemScriptKey).str val redeemScriptOpt = if (redeemScriptStr.nonEmpty) { val spk = WitnessScriptPubKey.fromAsmHex(obj(PicklerKeys.redeemScriptKey).str) Some(spk) } else { None } FundingInputV0TLV( inputSerialId, prevTx, UInt32(prevTxVout), sequence, maxWitnessLen, redeemScriptOpt ) } private def parseFundingInputs(arr: ujson.Arr): Vector[FundingInputTLV] = { arr.value.toVector.map { case inputObj: ujson.Obj => parseFundingInput(inputObj) case x: ujson.Value => sys.error(s"Expected obj, got=$x") } } private def parseCetAdaptorSignatures(obj: ujson.Obj): CETSignaturesTLV = { val ecAdaptorSignaturesArr = obj(PicklerKeys.ecdsaAdaptorSignaturesKey).arr val adaptorSigs = parseAdaptorSignatures(ecAdaptorSignaturesArr) CETSignaturesV0TLV(adaptorSigs) } private def parseAdaptorSignatures( arr: ujson.Arr): Vector[ECAdaptorSignature] = { arr.value.toVector.map { case obj: ujson.Obj => ECAdaptorSignature.fromHex(obj(PicklerKeys.signatureKey).str) case x: ujson.Value => sys.error(s"Excpected string for ecdsa adaptor siganture, got obj=$x") } } private def writeAdaptorSignatures( sigs: Vector[ECAdaptorSignature]): Vector[ujson.Obj] = { sigs.map { sig => ujson.Obj(PicklerKeys.signatureKey -> Str(sig.hex)) } } private def writeCetAdaptorSigs( cetSignaturesTLV: CETSignaturesTLV): ujson.Obj = { cetSignaturesTLV match { case v0: CETSignaturesV0TLV => val sigsVec = writeAdaptorSignatures(v0.sigs) ujson.Obj( PicklerKeys.ecdsaAdaptorSignaturesKey -> ujson.Arr.from(sigsVec)) } } private def readAcceptTLV(obj: ujson.Obj): DLCAcceptTLV = { val tempContractId = Sha256Digest.fromHex(obj(PicklerKeys.tempContractIdKey).str) val acceptCollateral = Satoshis( obj(PicklerKeys.acceptCollateralKey).num.toLong) val fundingPubKey = ECPublicKey.fromHex(obj(PicklerKeys.fundingPubKeyKey).str) val payoutSpk = ScriptPubKey.fromAsmHex(obj(PicklerKeys.payoutSpkKey).str) val payoutSerialId = parseU64(obj(PicklerKeys.payoutSerialIdKey).str) val fundingInputs = parseFundingInputs( obj(PicklerKeys.fundingInputsKey).arr) val changeSpk = ScriptPubKey.fromAsmHex(obj(PicklerKeys.changeSpkKey).str) val changeSerialId = parseU64(obj(PicklerKeys.changeSerialIdKey).str) val cetAdaptorSigs = parseCetAdaptorSignatures( obj(PicklerKeys.cetAdaptorSignaturesKey).obj) val refundSignature = ECDigitalSignature.fromHex(obj(PicklerKeys.refundSignatureKey).str) val negotiationFields = { obj(PicklerKeys.negotiationFieldsKey).strOpt match { case Some(str) => sys.error(s"Don't know how to parse negotiation fields, got=$str") case None => NegotiationFieldsTLV.empty } } val acceptTLV = DLCAcceptTLV( tempContractId = tempContractId, totalCollateralSatoshis = acceptCollateral, fundingPubKey = fundingPubKey, payoutSPK = payoutSpk, payoutSerialId = payoutSerialId, fundingInputs = fundingInputs, changeSPK = changeSpk, changeSerialId = changeSerialId, cetSignatures = cetAdaptorSigs, refundSignature = refundSignature, negotiationFields = negotiationFields ) acceptTLV } private def writeAcceptTLV(accept: DLCAcceptTLV): ujson.Obj = { Obj( PicklerKeys.tempContractIdKey -> Str(accept.tempContractId.hex), PicklerKeys.acceptCollateralKey -> Num( accept.totalCollateralSatoshis.toLong.toDouble), PicklerKeys.fundingPubKeyKey -> Str(accept.fundingPubKey.hex), PicklerKeys.payoutSpkKey -> Str(accept.payoutSPK.asmHex), PicklerKeys.payoutSerialIdKey -> Str( accept.payoutSerialId.toBigInt.toString()), PicklerKeys.fundingInputsKey -> writeJs(accept.fundingInputs), PicklerKeys.changeSpkKey -> Str(accept.changeSPK.asmHex), PicklerKeys.changeSerialIdKey -> Str( accept.changeSerialId.toBigInt.toString()), PicklerKeys.cetAdaptorSignaturesKey -> writeCetAdaptorSigs( accept.cetSignatures), PicklerKeys.refundSignatureKey -> Str(accept.refundSignature.hex), PicklerKeys.negotiationFieldsKey -> ujson.Null ) } private def parseFundingSignatures(obj: ujson.Obj): FundingSignaturesTLV = { val fundingSignatures: Vector[ujson.Value] = obj( PicklerKeys.fundingSignaturesKey).arr.toVector val witV0 = paresFundingSignaturesArr(fundingSignatures) FundingSignaturesV0TLV(witV0) } private def paresFundingSignaturesArr( arr: Vector[ujson.Value]): Vector[ScriptWitnessV0] = { arr.map { case obj: ujson.Obj => val witnessElementsArr = obj(PicklerKeys.witnessElementsKey).arr val witness: ScriptWitness = { parseWitnessElements(witnessElementsArr) } val scriptWitnessV0 = witness.asInstanceOf[ScriptWitnessV0] scriptWitnessV0 case x => sys.error(s"Expected array of objects for funding signatures, got=$x") } } private def writeFundingSignatures( fundingSigs: FundingSignaturesTLV): ujson.Obj = { val sigs: Vector[ujson.Obj] = fundingSigs match { case v0: FundingSignaturesV0TLV => val witnessJson: Vector[Obj] = { v0.witnesses.map { wit => val witJson = writeWitnessElements(wit) ujson.Obj(PicklerKeys.witnessElementsKey -> witJson) } } witnessJson } ujson.Obj( PicklerKeys.fundingSignaturesKey -> ujson.Arr.from(sigs) ) } private def readSignTLV(obj: ujson.Obj): DLCSignTLV = { val contractId = ByteVector.fromValidHex(obj(PicklerKeys.contractIdKey).str) val adaptorSigs = parseCetAdaptorSignatures( obj(PicklerKeys.cetAdaptorSignaturesKey).obj) val refundSignature = ECDigitalSignature.fromHex(obj(PicklerKeys.refundSignatureKey).str) val fundingSignatures = parseFundingSignatures( obj(PicklerKeys.fundingSignaturesKey).obj) val signTLV = DLCSignTLV(contractId, adaptorSigs, refundSignature, fundingSignatures) signTLV } private def writeSignTLV(sign: DLCSignTLV): ujson.Obj = { ujson.Obj( PicklerKeys.contractIdKey -> sign.contractId.toHex, PicklerKeys.cetAdaptorSignaturesKey -> writeCetAdaptorSigs( sign.cetSignatures), PicklerKeys.refundSignatureKey -> ujson.Str(sign.refundSignature.hex), PicklerKeys.fundingSignaturesKey -> writeFundingSignatures(sign.fundingSignatures) ) } implicit val dlcAcceptTLVPickler: ReadWriter[DLCAcceptTLV] = { readwriter[ujson.Obj].bimap(writeAcceptTLV, readAcceptTLV) } implicit val dlcSignTLVPickler: ReadWriter[DLCSignTLV] = { readwriter[ujson.Obj].bimap(writeSignTLV, readSignTLV) } implicit val lnMessageDLCAcceptTLVPickler: ReadWriter[ LnMessage[DLCAcceptTLV]] = readwriter[String].bimap(_.hex, LnMessageFactory(DLCAcceptTLV).fromHex) implicit val lnMessageDLCSignTLVPickler: ReadWriter[LnMessage[DLCSignTLV]] = readwriter[String].bimap(_.hex, LnMessageFactory(DLCSignTLV).fromHex) implicit val blockStampPickler: ReadWriter[BlockStamp] = readwriter[String].bimap(_.mkString, BlockStamp.fromString) implicit val psbtPickler: ReadWriter[PSBT] = readwriter[String].bimap(_.base64, PSBT.fromString) implicit val transactionPickler: ReadWriter[Transaction] = readwriter[String].bimap(_.hex, Transaction.fromHex) implicit val blockPickler: ReadWriter[Block] = { readwriter[String].bimap(_.hex, Block.fromHex) } implicit val extPubKeyPickler: ReadWriter[ExtPublicKey] = readwriter[String].bimap(_.toString, ExtPublicKey.fromString) implicit val transactionOutPointPickler: ReadWriter[TransactionOutPoint] = readwriter[String].bimap(_.hex, TransactionOutPoint.fromHex) implicit val coinSelectionAlgoPickler: ReadWriter[CoinSelectionAlgo] = readwriter[String].bimap(_.toString, CoinSelectionAlgo.fromString) implicit val addressLabelTagPickler: ReadWriter[AddressLabelTag] = readwriter[String].bimap(_.name, AddressLabelTag) implicit val lockUnspentOutputParameterPickler: ReadWriter[ LockUnspentOutputParameter] = readwriter[Value].bimap(_.toJson, LockUnspentOutputParameter.fromJson) // can't make implicit because it will overlap with ones needed for cli val announcementV0JsonWriter: Writer[OracleAnnouncementV0TLV] = writer[Obj].comap { announcement => val noncesJson = announcement.eventTLV.nonces.map { nonce => Str(nonce.hex) } val descriptorJson = announcement.eventTLV.eventDescriptor match { case EnumEventDescriptorV0TLV(outcomes) => Obj("outcomes" -> outcomes.map(Str(_)), "hex" -> announcement.eventTLV.eventDescriptor.hex) case numeric: NumericEventDescriptorTLV => Obj( "base" -> Num(numeric.base.toLong.toDouble), "isSigned" -> Bool(numeric.isSigned), "unit" -> Str(numeric.unit), "precision" -> Num(numeric.precision.toLong.toDouble), "hex" -> announcement.eventTLV.eventDescriptor.hex ) } val maturityStr = TimeUtil.iso8601ToString(Date.from(announcement.eventTLV.maturation)) val eventJson = Obj("nonces" -> noncesJson, "maturity" -> Str(maturityStr), "descriptor" -> descriptorJson, "eventId" -> Str(announcement.eventTLV.eventId)) Obj( "announcementSignature" -> Str(announcement.announcementSignature.hex), "publicKey" -> Str(announcement.publicKey.hex), "event" -> eventJson, "hex" -> announcement.hex ) } // can't make implicit because it will overlap with ones needed for cli val oracleAnnouncementTLVJsonWriter: Writer[OracleAnnouncementTLV] = writer[Value].comap { case v0: OracleAnnouncementV0TLV => writeJs(v0)(announcementV0JsonWriter) } // can't make implicit because it will overlap with ones needed for cli val oracleAttestmentV0Writer: Writer[OracleAttestmentV0TLV] = writer[Obj].comap { attestments => val sigsJson = attestments.sigs.map(sig => Str(sig.hex)) val valuesJson = attestments.outcomes.map(Str(_)) Obj("eventId" -> Str(attestments.eventId), "signatures" -> sigsJson, "values" -> valuesJson, "hex" -> attestments.hex) } implicit val fundingInputV0Writer: Writer[FundingInputTLV] = writer[Value].comap { case v0: FundingInputV0TLV => writeJs(v0)(fundingInputWriter) } implicit val fundingInputWriter: Writer[FundingInputV0TLV] = writer[Obj].comap { input => import input._ val redeemScriptJson = redeemScriptOpt match { case Some(rs) => Str(rs.hex) case None => Str("") } Obj( "inputSerialId" -> Str(inputSerialId.toBigInt.toString()), "prevTx" -> Str(prevTx.hex), "prevTxVout" -> Num(prevTxVout.toLong.toDouble), "sequence" -> Num(sequence.toLong.toDouble), "maxWitnessLen" -> Num(maxWitnessLen.toLong.toDouble), "redeemScript" -> redeemScriptJson ) } implicit val tlvPointReader: Reader[TLVPoint] = { reader[Obj].map { obj: Obj => val map = obj.value val outcome = map(PicklerKeys.outcomeKey).num.toLong val payout = jsToSatoshis(map(PicklerKeys.payoutKey)) val extraPrecision = map(PicklerKeys.extraPrecisionKey).num.toInt TLVPoint(outcome, payout, extraPrecision) } } implicit val tlvPointWriter: Writer[TLVPoint] = { writer[Obj].comap { point => Obj( PicklerKeys.outcomeKey -> Num(point.outcome.toDouble), PicklerKeys.payoutKey -> Num(point.value.toLong.toDouble), PicklerKeys.extraPrecisionKey -> Num(point.extraPrecision.toDouble) ) } } implicit val hyperbolaPayoutCurvePieceTLVWriter: Writer[ HyperbolaPayoutCurvePieceTLV] = { writer[Obj].comap { piece => Obj( PicklerKeys.usePositivePiece -> Bool(piece.usePositivePiece), PicklerKeys.translateOutcome -> Num( piece.translateOutcome.toBigDecimal.toDouble), PicklerKeys.translatePayout -> Num( piece.translatePayout.toBigDecimal.toDouble), PicklerKeys.a -> Num(piece.a.toBigDecimal.toDouble), PicklerKeys.b -> Num(piece.b.toBigDecimal.toDouble), PicklerKeys.c -> Num(piece.c.toBigDecimal.toDouble), PicklerKeys.d -> Num(piece.d.toBigDecimal.toDouble) ) } } implicit val payoutFunctionV0TLVWriter: Writer[PayoutFunctionV0TLV] = { def endpoint(json: Value, isEndpoint: Boolean): Value = json match { case obj: Obj => //drop old value on the floor if there is one obj.value.put(PicklerKeys.isEndpointKey, Bool(isEndpoint)) Obj(obj.value) case v: Value => v } writer[Obj].comap { payoutFunc => val endPointsJs = payoutFunc.endpoints.map { point => endpoint(writeJs(point), isEndpoint = true) } val midPointJs = payoutFunc.pieces.flatMap { case polynomialPiece: PolynomialPayoutCurvePieceTLV => polynomialPiece.midpoints.map { point => endpoint(writeJs(point), isEndpoint = false) } case hyperbolaPiece: HyperbolaPayoutCurvePieceTLV => Vector(writeJs(hyperbolaPiece)) } val points = (endPointsJs ++ midPointJs).sortBy(_.obj(PicklerKeys.outcomeKey).num) Obj(PicklerKeys.pointsKey -> points) } } implicit val payoutFunctionV0TLVReader: Reader[PayoutFunctionV0TLV] = { reader[Obj].map { obj: Obj => val pointsArr = obj(PicklerKeys.pointsKey).arr val points: Vector[TLVPoint] = pointsArr.map { case x @ (_: Arr | _: Num | Null | _: Bool | _: Str) => sys.error( s"Cannot have $x when parsing payout curve points, expected json object") case obj: Obj => upickle.default.read[TLVPoint](obj) }.toVector DLCPayoutCurve .fromPoints(points, serializationVersion = DLCSerializationVersion.Beta) .toTLV } } implicit val roundingIntervalsV0TLVWriter: Writer[RoundingIntervalsV0TLV] = writer[Obj].comap { roundingIntervals => import roundingIntervals._ val intervalsJs = intervalStarts.map { i => Obj("beginInterval" -> Num(i._1.toDouble), "roundingMod" -> Num(i._2.toLong.toDouble)) } Obj("intervals" -> intervalsJs) } implicit val contractDescriptorV0: ReadWriter[ContractDescriptorV0TLV] = { readwriter[Value].bimap(contractV0Writer, contractV0Reader) } private def contractV0Reader(value: Value): ContractDescriptorV0TLV = { parseContractDescriptor(value) } private def contractV0Writer(v0: ContractDescriptorV0TLV): Value = { val outcomesJs: ujson.Obj = v0.outcomes.map { case (outcome, payout) => outcome -> Num(payout.toLong.toDouble) } Obj(PicklerKeys.outcomesKey -> outcomesJs, "hex" -> v0.hex) } implicit val contractDescriptorV1Writer: Writer[ContractDescriptorV1TLV] = writer[Obj].comap { v1 => import v1._ Obj("numDigits" -> Num(numDigits.toDouble), "payoutFunction" -> writeJs(payoutFunction), "roundingIntervals" -> writeJs(roundingIntervals), "hex" -> v1.hex) } implicit val contractDescriptorWriter: Writer[ContractDescriptorTLV] = writer[Value].comap { case v0: ContractDescriptorV0TLV => writeJs(v0)(contractDescriptorV0) case v1: ContractDescriptorV1TLV => writeJs(v1)(contractDescriptorV1Writer) } implicit val oracleInfoV0TLVWriter: Writer[OracleInfoV0TLV] = writer[Obj].comap { oracleInfo => Obj( "announcement" -> writeJs(oracleInfo.announcement)( oracleAnnouncementTLVJsonWriter)) } implicit val oracleInfoV1TLVWriter: Writer[OracleInfoV1TLV] = writer[Obj].comap { oracleInfo => import oracleInfo._ Obj("threshold" -> Num(threshold.toDouble), "announcements" -> oracles.map(o => writeJs(o)(oracleAnnouncementTLVJsonWriter))) } implicit val oracleParamsV0TLVWriter: Writer[OracleParamsV0TLV] = writer[Obj].comap { params => import params._ Obj("maxErrorExp" -> Num(maxErrorExp.toDouble), "minFailExp" -> Num(minFailExp.toDouble), "maximizeCoverage" -> Bool(maximizeCoverage)) } implicit val oracleParamsTLVWriter: Writer[OracleParamsTLV] = writer[Value].comap { case v0: OracleParamsV0TLV => writeJs(v0) } implicit val oracleInfoV2TLVWriter: Writer[OracleInfoV2TLV] = writer[Obj].comap { oracleInfo => import oracleInfo._ Obj("threshold" -> Num(threshold.toDouble), "announcements" -> oracles.map(o => writeJs(o)(oracleAnnouncementTLVJsonWriter)), "params" -> writeJs(params)) } implicit val oracleInfoTLVWriter: Writer[OracleInfoTLV] = writer[Value].comap { case v0: OracleInfoV0TLV => writeJs(v0) case v1: OracleInfoV1TLV => writeJs(v1) case v2: OracleInfoV2TLV => writeJs(v2) } // can't make implicit because it will overlap with ones needed for cli val contractInfoV0TLVJsonWriter: Writer[ContractInfoV0TLV] = writer[Obj].comap { contractInfo => import contractInfo._ Obj( PicklerKeys.totalCollateralKey -> writeJs(totalCollateral), PicklerKeys.contractDescriptorKey -> writeJs(contractDescriptor), PicklerKeys.oracleInfoKey -> writeJs(oracleInfo) ) } val contractInfoV1TLVJsonWriter: Writer[ContractInfoV1TLV] = { writer[Obj].comap { contractInfo => val arrayVec: Vector[ujson.Obj] = contractInfo.contractOraclePairs.map { case (c, o) => val contractDescriptorJson = writeJs(c) val oracleInfoJson = writeJs(o) ujson.Obj(PicklerKeys.contractDescriptorKey -> contractDescriptorJson, PicklerKeys.oracleInfoKey -> oracleInfoJson) } val arrayJson = ujson.Arr.from(arrayVec) Obj( PicklerKeys.totalCollateralKey -> Num( contractInfo.totalCollateral.toLong.toDouble), PicklerKeys.pairsKey -> arrayJson ) } } val contractInfoJsonWriter: Writer[ContractInfoTLV] = { writer[ujson.Value].comap { case contractInfoV0TLV: ContractInfoV0TLV => writeJs(contractInfoV0TLV)(contractInfoV0TLVJsonWriter) case contractInfoV1TLV: ContractInfoV1TLV => writeJs(contractInfoV1TLV)(contractInfoV1TLVJsonWriter) } } implicit val offerTLVWriter: Writer[DLCOfferTLV] = writer[Obj].comap { offer => import offer._ Obj( "contractFlags" -> Str(contractFlags.toHexString), "chainHash" -> Str(chainHash.hex), "contractInfo" -> writeJs(contractInfo)(contractInfoJsonWriter), "fundingPubKey" -> Str(fundingPubKey.hex), "payoutSPK" -> Str(payoutSPK.hex), "payoutSerialId" -> Num(payoutSerialId.toBigInt.toDouble), "offerCollateral" -> Num(totalCollateralSatoshis.toLong.toDouble), "fundingInputs" -> fundingInputs.map(i => writeJs(i)), "changeSPK" -> Str(changeSPK.hex), "changeSerialId" -> Str(changeSerialId.toBigInt.toString()), "fundOutputSerialId" -> Num(fundOutputSerialId.toBigInt.toDouble), "feeRatePerVb" -> Num(feeRate.toLong.toDouble), "cetLocktime" -> Num(contractMaturityBound.toUInt32.toLong.toDouble), "refundLocktime" -> Num(contractTimeout.toUInt32.toLong.toDouble), PicklerKeys.tempContractIdKey -> Str(offer.tempContractId.hex) ) } implicit val offeredW: Writer[Offered] = writer[Obj].comap { offered => import offered._ Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble) ) } implicit val acceptedComputingAdaptorSigsW: Writer[ AcceptedComputingAdaptorSigs] = writer[Obj].comap { accepted => import accepted._ Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractId" -> Str(contractId.toHex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble) ) } implicit val acceptedW: Writer[Accepted] = writer[Obj].comap { accepted => import accepted._ Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractId" -> Str(contractId.toHex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble) ) } implicit val signedComputingAdaptorSigsW: Writer[SignedComputingAdaptorSigs] = writer[Obj].comap { signed => import signed._ Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractId" -> Str(contractId.toHex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble), "fundingTxId" -> Str(fundingTxId.hex) ) } implicit val signedW: Writer[Signed] = writer[Obj].comap { signed => import signed._ Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractId" -> Str(contractId.toHex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble), "fundingTxId" -> Str(fundingTxId.hex) ) } implicit val broadcastedW: Writer[Broadcasted] = writer[Obj].comap { broadcasted => import broadcasted._ Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractId" -> Str(contractId.toHex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble), "fundingTxId" -> Str(fundingTxId.hex) ) } implicit val confirmedW: Writer[Confirmed] = writer[Obj].comap { confirmed => import confirmed._ Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractId" -> Str(contractId.toHex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble), "fundingTxId" -> Str(fundingTxId.hex) ) } private val counterPartyPayoutKey: String = "counterPartyPayout" implicit val claimedW: Writer[Claimed] = writer[Obj].comap { claimed => import claimed._ val (oraclesJs, outcomesJs) = oracleOutcome match { case EnumOracleOutcome(oracles, outcome) => (Arr.from(oracles.map(o => Str(o.announcement.hex))), Str(outcome.outcome)) case numeric: NumericOracleOutcome => (Arr.from(numeric.oracles.map(_.announcement.hex)), Arr.from(numeric.outcomes.map(o => Arr.from(o.digits)))) } Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractId" -> Str(contractId.toHex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble), "fundingTxId" -> Str(fundingTxId.hex), "closingTxId" -> Str(closingTxId.hex), "oracleSigs" -> oracleSigs.map(sig => Str(sig.hex)), "outcomes" -> outcomesJs, "oracles" -> oraclesJs, PicklerKeys.myPayout -> Num(claimed.myPayout.satoshis.toLong.toDouble), counterPartyPayoutKey -> Num( claimed.counterPartyPayout.satoshis.toLong.toDouble), PicklerKeys.pnl -> Num(claimed.pnl.satoshis.toLong.toDouble), PicklerKeys.rateOfReturn -> Num(claimed.rateOfReturn.toDouble) ) } implicit val remoteClaimedW: Writer[RemoteClaimed] = writer[Obj].comap { remoteClaimed => import remoteClaimed._ val (oraclesJs, outcomesJs) = oracleOutcome match { case EnumOracleOutcome(oracles, outcome) => (Arr.from(oracles.map(o => Str(o.announcement.hex))), Str(outcome.outcome)) case numeric: NumericOracleOutcome => (Arr.from(numeric.oracles.map(_.announcement.hex)), Arr.from(numeric.outcomes.map(o => Arr.from(o.digits)))) } Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractId" -> Str(contractId.toHex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble), "fundingTxId" -> Str(fundingTxId.hex), "closingTxId" -> Str(closingTxId.hex), "oracleSigs" -> oracleSigs.map(sig => Str(sig.hex)), "outcomes" -> outcomesJs, "oracles" -> oraclesJs, PicklerKeys.myPayout -> Num( remoteClaimed.myPayout.satoshis.toLong.toDouble), counterPartyPayoutKey -> Num( remoteClaimed.counterPartyPayout.satoshis.toLong.toDouble), PicklerKeys.pnl -> Num(remoteClaimed.pnl.satoshis.toLong.toDouble), PicklerKeys.rateOfReturn -> Num(remoteClaimed.rateOfReturn.toDouble) ) } implicit val refundedW: Writer[Refunded] = writer[Obj].comap { refunded => import refunded._ Obj( "state" -> Str(statusString), "dlcId" -> Str(dlcId.hex), "isInitiator" -> Bool(isInitiator), "lastUpdated" -> Str(iso8601ToString(lastUpdated)), PicklerKeys.tempContractIdKey -> Str(tempContractId.hex), "contractId" -> Str(contractId.toHex), "contractInfo" -> Str(contractInfo.hex), "contractMaturity" -> Num( timeouts.contractMaturity.toUInt32.toLong.toDouble), "contractTimeout" -> Num( timeouts.contractTimeout.toUInt32.toLong.toDouble), "feeRate" -> Num(feeRate.toLong.toDouble), "totalCollateral" -> Num(totalCollateral.satoshis.toLong.toDouble), "localCollateral" -> Num(localCollateral.satoshis.toLong.toDouble), "remoteCollateral" -> Num(remoteCollateral.satoshis.toLong.toDouble), "fundingTxId" -> Str(fundingTxId.hex), "closingTxId" -> Str(closingTxId.hex), PicklerKeys.myPayout -> Num(refunded.myPayout.satoshis.toLong.toDouble), counterPartyPayoutKey -> Num( refunded.counterPartyPayout.satoshis.toLong.toDouble), PicklerKeys.pnl -> Num(refunded.pnl.satoshis.toLong.toDouble), PicklerKeys.rateOfReturn -> Num(refunded.rateOfReturn.toDouble) ) } implicit val dlcOfferAddW: Writer[IncomingDLCOfferDb] = writer[Obj].comap { offerDb => Obj( "hash" -> offerDb.hash.hex, "receivedAt" -> Num(offerDb.receivedAt.getEpochSecond.toDouble), "peer" -> offerDb.peer.map(Str).getOrElse(Null), "message" -> offerDb.message.map(Str).getOrElse(Null), "offerTLV" -> offerDb.offerTLV.hex ) } implicit val dlcOfferRemoveW: Writer[Sha256Digest] = writer[Value].comap { offerHash => writeJs(offerHash.hex) } implicit val dlcStatusW: Writer[DLCStatus] = writer[Value].comap { case o: Offered => writeJs(o)(offeredW) case a: AcceptedComputingAdaptorSigs => writeJs(a)(acceptedComputingAdaptorSigsW) case a: Accepted => writeJs(a)(acceptedW) case s: SignedComputingAdaptorSigs => writeJs(s)(signedComputingAdaptorSigsW) case s: Signed => writeJs(s)(signedW) case b: Broadcasted => writeJs(b)(broadcastedW) case c: Confirmed => writeJs(c)(confirmedW) case c: Claimed => writeJs(c)(claimedW) case r: RemoteClaimed => writeJs(r)(remoteClaimedW) case r: Refunded => writeJs(r)(refundedW) } implicit val dlcOfferAddR: Reader[IncomingDLCOfferDb] = reader[Obj].map { obj => val hash = Sha256Digest(obj("hash").str) val peer = Try(obj("peer").str).toOption val message = Try(obj("message").str).toOption val receivedAt = Instant.ofEpochSecond(obj("receivedAt").num.toLong) val offerTLV = DLCOfferTLV.fromHex(obj("offerTLV").str) IncomingDLCOfferDb(hash = hash, peer = peer, message = message, receivedAt = receivedAt, offerTLV = offerTLV) } implicit val dlcOfferRemoveR: Reader[Sha256Digest] = reader[Value].map { obj => Sha256Digest(obj.str) } implicit val dlcStatusR: Reader[DLCStatus] = reader[Obj].map { obj => val dlcId = Sha256Digest(obj("dlcId").str) val state = DLCState.fromString(obj("state").str) val lastUpdated = iso8601ToInstant(obj("lastUpdated").str) val isInitiator = obj("isInitiator").bool val tempContractId = Sha256Digest(obj(PicklerKeys.tempContractIdKey).str) val contractInfoTLV = ContractInfoV0TLV(obj("contractInfo").str) val contractMaturity = BlockStamp(UInt32(obj("contractMaturity").num.toLong)) val contractTimeout = BlockStamp(UInt32(obj("contractTimeout").num.toLong)) val feeRate = SatoshisPerVirtualByte.fromLong(obj("feeRate").num.toLong) val totalCollateral = Satoshis(obj("totalCollateral").num.toLong) val localCollateral = Satoshis(obj("localCollateral").num.toLong) lazy val contractId = ByteVector.fromValidHex(obj("contractId").str) lazy val fundingTxId = DoubleSha256DigestBE(obj("fundingTxId").str) lazy val closingTxId = DoubleSha256DigestBE(obj("closingTxId").str) lazy val oracleSigs = obj("oracleSigs").arr .map(value => SchnorrDigitalSignature(value.str)) .toVector lazy val outcomesJs = obj("outcomes") lazy val outcomes = outcomesJs.strOpt match { case Some(value) => Vector(EnumOutcome(value)) case None => outcomesJs.arr.map { outcomeJs => val digits = outcomeJs.arr.map(value => value.num.toInt) UnsignedNumericOutcome(digits.toVector) }.toVector } lazy val oraclesJs = obj("oracles") lazy val oracles = oraclesJs.arr.map { value => val announcementTLV = OracleAnnouncementTLV(value.str) SingleOracleInfo(announcementTLV) }.toVector lazy val oracleOutcome = outcomes.head match { case outcome: EnumOutcome => EnumOracleOutcome(oracles.asInstanceOf[Vector[EnumSingleOracleInfo]], outcome) case UnsignedNumericOutcome(_) => val numericOutcomes = outcomes.map(_.asInstanceOf[UnsignedNumericOutcome]) val numericOracles = oracles.map(_.asInstanceOf[NumericSingleOracleInfo]) NumericOracleOutcome(numericOracles.zip(numericOutcomes)) case signed: SignedNumericOutcome => throw new IllegalArgumentException(s"Unexpected outcome $signed") } lazy val myPayoutJs = obj(PicklerKeys.myPayout) lazy val myPayoutOpt = myPayoutJs.numOpt.map(sats => Satoshis(sats.toLong)) lazy val theirPayoutJs = obj(counterPartyPayoutKey) lazy val theirPayoutOpt = theirPayoutJs.numOpt.map(sats => Satoshis(sats.toLong)) state match { case DLCState.Offered => Offered( dlcId, isInitiator, lastUpdated, tempContractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral ) case DLCState.AcceptComputingAdaptorSigs => AcceptedComputingAdaptorSigs( dlcId, isInitiator, lastUpdated, tempContractId, contractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral ) case DLCState.Accepted => Accepted( dlcId, isInitiator, lastUpdated, tempContractId, contractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral ) case DLCState.SignComputingAdaptorSigs => SignedComputingAdaptorSigs( dlcId, isInitiator, lastUpdated, tempContractId, contractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral, fundingTxId ) case DLCState.Signed => Signed( dlcId, isInitiator, lastUpdated, tempContractId, contractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral, fundingTxId ) case DLCState.Broadcasted => Broadcasted( dlcId, isInitiator, lastUpdated, tempContractId, contractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral, fundingTxId ) case DLCState.Confirmed => Confirmed( dlcId, isInitiator, lastUpdated, tempContractId, contractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral, fundingTxId ) case DLCState.Claimed => Claimed( dlcId, isInitiator, lastUpdated, tempContractId, contractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral, fundingTxId, closingTxId, oracleSigs, oracleOutcome, myPayout = myPayoutOpt.get, counterPartyPayout = theirPayoutOpt.get ) case DLCState.RemoteClaimed => require(oracleSigs.size == 1, "Remote claimed should only have one oracle sig") RemoteClaimed( dlcId, isInitiator, lastUpdated, tempContractId, contractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral, fundingTxId, closingTxId, oracleSigs.head, oracleOutcome, myPayout = myPayoutOpt.get, counterPartyPayout = theirPayoutOpt.get ) case DLCState.Refunded => Refunded( dlcId, isInitiator, lastUpdated, tempContractId, contractId, ContractInfo.fromTLV(contractInfoTLV), DLCTimeouts(contractMaturity, contractTimeout), feeRate, totalCollateral, localCollateral, fundingTxId, closingTxId, myPayout = myPayoutOpt.get, counterPartyPayout = theirPayoutOpt.get ) } } implicit val dlcWalletAccountingWriter: Writer[DLCWalletAccounting] = { writer[Obj].comap { walletAccounting: DLCWalletAccounting => Obj( PicklerKeys.myCollateral -> Num( walletAccounting.myCollateral.satoshis.toLong.toDouble), PicklerKeys.theirCollateral -> Num( walletAccounting.theirCollateral.satoshis.toLong.toDouble), PicklerKeys.myPayout -> Num( walletAccounting.myPayout.satoshis.toLong.toDouble), PicklerKeys.theirPayout -> Num( walletAccounting.theirPayout.satoshis.toLong.toDouble), PicklerKeys.pnl -> Num(walletAccounting.pnl.satoshis.toLong.toDouble), PicklerKeys.rateOfReturn -> Num(walletAccounting.rateOfReturn.toDouble) ) } } implicit val mnemonicCodePickler: ReadWriter[MnemonicCode] = readwriter[String].bimap( _.words.mkString(" "), str => MnemonicCode.fromWords(str.split(' ').toVector)) implicit val extPrivateKeyPickler: ReadWriter[ExtPrivateKey] = readwriter[String].bimap(ExtKey.toString, ExtPrivateKey.fromString) implicit val oracleAnnouncementTLV: ReadWriter[OracleAnnouncementV0TLV] = readwriter[String].bimap(_.hex, OracleAnnouncementV0TLV.fromHex) implicit val oracleAttestmentTLV: ReadWriter[OracleAttestmentTLV] = readwriter[String].bimap(_.hex, OracleAttestmentTLV.fromHex) implicit val oracleAttestmentV0TLV: ReadWriter[OracleAttestmentV0TLV] = readwriter[String].bimap(_.hex, OracleAttestmentV0TLV.fromHex) implicit val ecPublicKeyPickler: ReadWriter[ECPublicKey] = readwriter[String].bimap(_.hex, ECPublicKey.fromHex) implicit val addressTypePickler: ReadWriter[AddressType] = readwriter[String].bimap(_.toString, AddressType.fromString) def parseContractDescriptor(payoutsVal: Value): ContractDescriptorV0TLV = { val outcomes = payoutsVal(PicklerKeys.outcomesKey) val payouts: Vector[(String, Satoshis)] = outcomes.obj.toVector.map { case (outcome, payoutJs) => val payout = jsToSatoshis(payoutJs.num) (outcome, payout) } ContractDescriptorV0TLV(outcomes = payouts) } private def readBlockHeaderResult(obj: Obj): GetBlockHeaderResult = { val hash = DoubleSha256DigestBE.fromHex(obj(PicklerKeys.hashKey).str) val confirmations = obj(PicklerKeys.confirmationsKey).num.toInt val height = obj(PicklerKeys.heightKey).num.toInt val version = obj(PicklerKeys.versionKey).num.toInt val versionHex = Int32.fromHex(obj(PicklerKeys.versionHexKey).str) val merkleroot = DoubleSha256DigestBE.fromHex(obj(PicklerKeys.merklerootKey).str) val time = UInt32(obj(PicklerKeys.timeKey).num.toLong) val mediantime = UInt32(obj(PicklerKeys.mediantimeKey).num.toLong) val nonce = UInt32(obj(PicklerKeys.nonceKey).num.toLong) val bits = UInt32.fromHex(obj(PicklerKeys.bitsKey).str) val difficulty = obj(PicklerKeys.difficultyKey).num val chainWork = obj(PicklerKeys.chainworkKey).str val previousBlockHash = obj(PicklerKeys.previousblockhashKey).strOpt.map { str => DoubleSha256DigestBE.fromHex(str) } val nextblockhash = obj(PicklerKeys.nextblockhashKey).strOpt.map { str => DoubleSha256DigestBE.fromHex(str) } GetBlockHeaderResult( hash = hash, confirmations = confirmations, height = height, version = version, versionHex = versionHex, merkleroot = merkleroot, time = time, mediantime = mediantime, nonce = nonce, bits = bits, difficulty = difficulty, chainwork = chainWork, previousblockhash = previousBlockHash, nextblockhash = nextblockhash ) } private def writeBlockHeaderResult(header: GetBlockHeaderResult): Obj = { val json = Obj( PicklerKeys.rawKey -> Str(header.blockHeader.hex), PicklerKeys.hashKey -> Str(header.hash.hex), PicklerKeys.confirmationsKey -> Num(header.confirmations), PicklerKeys.heightKey -> Num(header.height), PicklerKeys.versionKey -> Num(header.version.toLong.toDouble), PicklerKeys.versionHexKey -> Str(Int32(header.version).hex), PicklerKeys.merklerootKey -> Str(header.merkleroot.hex), PicklerKeys.timeKey -> Num(header.time.toBigInt.toDouble), PicklerKeys.mediantimeKey -> Num(header.mediantime.toLong.toDouble), PicklerKeys.nonceKey -> Num(header.nonce.toBigInt.toDouble), PicklerKeys.bitsKey -> Str(header.bits.hex), PicklerKeys.difficultyKey -> Num(header.difficulty.toDouble), PicklerKeys.chainworkKey -> Str(header.chainwork), PicklerKeys.previousblockhashKey -> { header.previousblockhash.map(_.hex) match { case Some(str) => Str(str) case None => ujson.Null } }, PicklerKeys.nextblockhashKey -> { header.nextblockhash.map(_.hex) match { case Some(str) => Str(str) case None => ujson.Null } } ) json } implicit val getBlockHeaderResultPickler: ReadWriter[GetBlockHeaderResult] = { readwriter[ujson.Obj] .bimap(writeBlockHeaderResult(_), readBlockHeaderResult(_)) } }
bitcoin-s/bitcoin-s
app-commons/src/main/scala/org/bitcoins/commons/serializers/Picklers.scala
Scala
mit
55,369
package filodb.core.reprojector import filodb.core.Types._ /** * FlushPolicy's check for new flush cycle opportunities by looking at the active memtables and using * some heuristics to determine what datasets to flush next. Flush means to flipBuffers() and cause * the Active table to be swapped into Locked state. */ trait FlushPolicy { /** * Determine the next dataset and version to flush using some heuristic. * Should ignore currently flushing datasets (ie Locked memtable is nonempty) * @returns None if it is not time to flush yet, or all datasets are already being flushed. * Some((dataset, version)) for the next flushing candidate. */ def nextFlush(memtable: MemTable): Option[(TableName, Int)] } /** * A really dumb flush policy based purely on the total # of rows across all datasets. * Flushes if the total # of rows is equal to or exceeding the maxTotalRows. */ class NumRowsFlushPolicy(maxTotalRows: Long) extends FlushPolicy { override def toString: String = s"NumRowsFlushPolicy($maxTotalRows)" def nextFlush(memtable: MemTable): Option[(TableName, Int)] = { val activeRows = memtable.allNumRows(MemTable.Active, nonZero = true) val flushingRows = memtable.flushingDatasets val totalRows = activeRows.map(_._2).sum + flushingRows.map(_._2).sum if (totalRows < maxTotalRows) { None } else { val flushingSet = flushingRows.map(_._1).toSet val notFlushingRows = activeRows.filterNot { case (dv, numRows) => flushingSet contains dv } if (notFlushingRows.isEmpty) { None } else { val highest = notFlushingRows.foldLeft(notFlushingRows.head) { case (highest, (nameVer, numRows)) => if (numRows > highest._2) (nameVer, numRows) else highest } Some(highest._1) } } } }
YanjieGao/FiloDB
core/src/main/scala/filodb.core/reprojector/FlushPolicy.scala
Scala
apache-2.0
1,827
package net.selenate.server import com.typesafe.config.ConfigFactory import com.typesafe.config.{ Config, ConfigValueFactory } import scala.collection.JavaConversions._ import settings.PoolSettings object C extends CUtils { val CONFIG = ConfigFactory.empty() .withFallback(loadAppUser) .withFallback(loadAppReference) .withFallback(loadAkkaReference) logTrace(s"""Effective config: $CONFIG""") val BRANCH = branch val OS_NAME = sys.props("os.name") object Server extends { val TRYO_TRACE = CONFIG.getBoolean("server.tryo-trace") object Locations extends { val RECORDINGS = CONFIG.getString("server.locations.recordings") } object Timeouts extends { val PAGE_LOAD = parseTimeout(CONFIG.getString("server.timeouts.page-load")) val SHUTDOWN = parseTimeout(CONFIG.getString("server.timeouts.shutdown")) val VNC_STARTUP = parseTimeout(CONFIG.getString("server.timeouts.vnc-startup")) } object Pool extends { val SIZE = CONFIG.getString("server.pool.size") val DISPLAY = CONFIG.getString("server.pool.display") val DISPLAY_WIDTH = CONFIG.getString("server.pool.display-width") val DISPLAY_HEIGHT = CONFIG.getString("server.pool.display-height") val VNC_HOST = CONFIG.getString("server.pool.vnc-host") val BINARY = if (CONFIG.hasPath("server.pool.binary")) Some(CONFIG.getString("server.pool.binary")) else None val PREFS = CONFIG.getObject("server.pool.prefs").unwrapped.toMap.mapValues(_.toString) } } }
tferega/selenate
code/Server/src/main/scala/net/selenate/server/C.scala
Scala
bsd-3-clause
1,577
/* * Copyright 2013 - 2017 Outworkers Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.outworkers.domain.test import java.util.UUID case class OtherPackageExample( id: UUID, name: String, firstName: String, email: String, sample: Int, double: Double, bigDec: BigDecimal, long: Long ) case class SimpleFoo(name: Option[String]) case class NestedCollections( id: UUID, text: String, nestedList: List[List[String]], nestedListSet: List[Set[String]], props: Map[String, List[String]], doubleProps: Map[Set[String], List[String]] ) case class NestedOptions( id: Option[UUID], name: Option[String], firstName: Option[String], user: Option[User], collections: Option[CollectionSample] ) case class CustomEmailWrapper(email: String) case class EdgeCase( id: UUID, email: CustomEmailWrapper ) import java.util.UUID import org.outworkers.domain.test._ case class User( id: UUID, firstName: String, lastName: String, email: String ) case class NestedUser( timestamp: Long, user: User ) case class CollectionSample( id: UUID, firstName: String, lastName: String, sh: Short, b: Byte, name: String, email: String, tests: List[String], sets: List[String], map: Map[String, String], emails: List[String], opt: Option[Int] ) case class TupleRecord(id: UUID, tp: (String, Long)) case class TupleCollectionRecord(id: UUID, tuples: List[(Int, String)]) case class NestedOtherPackage( id: UUID, otherPkg: OtherPackageExample, emails: List[String] ) case class TestRow( key: String, list: List[String], setText: Set[String], mapTextToText: Map[String, String], setInt: Set[Int], mapIntToText: Map[Int, String], mapIntToInt: Map[Int, Int] ) trait RoleType extends Enumeration { //represents built-in role types. type RoleType = Value val Leader = Value("leader") val AllianceMember = Value("member") } object RoleType extends RoleType case class Membership( memberId: String, entityType: String, allianceId: String, role: RoleType.Value = RoleType.Leader, rankId: String ) case class NpeRecipe( id: UUID, name: String, title: String, author: String, description: String, ingredients: Set[String], props: Map[String, String] )
websudos/util
util-samplers/src/test/scala/org/outworkers/domain/test/domain.scala
Scala
bsd-2-clause
2,777
/* * Copyright 2017 Datamountaineer. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datamountaineer.streamreactor.connect.hbase.config import com.datamountaineer.streamreactor.common.config.base.const.TraitConfigConst._ import com.datamountaineer.streamreactor.connect.hbase.kerberos.KerberosSettings /** * Created by tomasfartaria on 10/04/2017. */ object HBaseConfigConstants extends KerberosSettings { val CONNECTOR_PREFIX = "connect.hbase" val COLUMN_FAMILY = s"${CONNECTOR_PREFIX}.column.family" val COLUMN_FAMILY_DOC = "The hbase column family." val KCQL_QUERY = s"${CONNECTOR_PREFIX}.${KCQL_PROP_SUFFIX}" val KCQL_DOC = "KCQL expression describing field selection and routes." val ERROR_POLICY = s"${CONNECTOR_PREFIX}.${ERROR_POLICY_PROP_SUFFIX}" val ERROR_POLICY_DOC: String = """Specifies the action to be taken if an error occurs while inserting the data. |There are two available options: |NOOP - the error is swallowed |THROW - the error is allowed to propagate. |RETRY - The exception causes the Connect framework to retry the message. The number of retries is based on |The error will be logged automatically""".stripMargin val ERROR_POLICY_DEFAULT = "THROW" val ERROR_RETRY_INTERVAL = s"${CONNECTOR_PREFIX}.${RETRY_INTERVAL_PROP_SUFFIX}" val ERROR_RETRY_INTERVAL_DOC = "The time in milliseconds between retries." val ERROR_RETRY_INTERVAL_DEFAULT = "60000" val NBR_OF_RETRIES = s"${CONNECTOR_PREFIX}.${MAX_RETRIES_PROP_SUFFIX}" val NBR_OF_RETRIES_DOC = "The maximum number of times to try the write again." val NBR_OF_RETIRES_DEFAULT = 20 val PROGRESS_COUNTER_ENABLED: String = PROGRESS_ENABLED_CONST val PROGRESS_COUNTER_ENABLED_DOC = "Enables the output for how many records have been processed" val PROGRESS_COUNTER_ENABLED_DEFAULT = false val PROGRESS_COUNTER_ENABLED_DISPLAY = "Enable progress counter" val HBASE_CONFIG_DIR = s"$CONNECTOR_PREFIX.conf.dir" val HBASE_CONFIG_DIR_DOC = "The HBase configuration directory." val HBASE_CONFIG_DIR_DEFAULT: String = null val HBASE_CONFIG_DIR_DISPLAY = "HBase Config Folder" }
datamountaineer/stream-reactor
kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/config/HBaseConfigConstants.scala
Scala
apache-2.0
2,656
package com.twitter.server.handler import com.twitter.finagle.Service import com.twitter.io.Buf import com.twitter.server.util.HttpUtils._ import com.twitter.server.util.JsonConverter import com.twitter.server.view.ThreadsView import com.twitter.util.Future import java.lang.management.ManagementFactory import scala.collection.JavaConverters._ private[server] object ThreadsHandler { type StackTrace = Seq[StackTraceElement] case class ThreadInfo( thread: Thread, stack: StackTrace, isIdle: Boolean) private val IdleClassAndMethod: Set[(String, String)] = Set( ("sun.nio.ch.EPollArrayWrapper", "epollWait"), ("sun.nio.ch.KQueueArrayWrapper", "kevent0") ) } /** * "Controller" for displaying the current state of threads. * * Possibilities for future endeavors in the web ui: * - group threads by "similarity" * - provide a mechanism for exp/imp * - javascript control for searching within stacktraces * * @see [[ThreadsView]] */ class ThreadsHandler extends Service[Request, Response] { import ThreadsHandler._ def apply(req: Request): Future[Response] = if (expectsHtml(req)) htmlResponse(req) else jsonResponse(req) private def jsonResponse(req: Request): Future[Response] = { val stacks = Thread.getAllStackTraces().asScala.map { case (thread, stack) => thread.getId().toString -> Map( "thread" -> thread.getName(), "daemon" -> thread.isDaemon(), "state" -> thread.getState(), "priority" -> thread.getPriority(), "stack" -> stack.toSeq.map(_.toString) ) } val msg = Map("threads" -> stacks) newOk(JsonConverter.writeToString(msg)) } private def htmlResponse(req: Request): Future[Response] = { // first, gather the data val raw: Seq[ThreadInfo] = Thread.getAllStackTraces.asScala.toMap.map { case (thread, stack) => ThreadInfo(thread, stack.toSeq, isIdle = false) }.toSeq val withIdle = markedIdle(raw) val sorted = withIdle.sortWith { case (t1, t2) => (t1.isIdle, t2.isIdle) match { case (true, false) => false case (false, true) => true case _ => t1.thread.getId < t2.thread.getId } } val view = new ThreadsView(sorted, deadlockedIds) val rendered = view() newResponse( // note: contentType must be explicit here because of `IndexView.isFragment` contentType = "text/html;charset=UTF-8", content = Buf.Utf8(rendered) ) } private def markedIdle(in: Seq[ThreadInfo]): Seq[ThreadInfo] = { // pretty obvious they are idle def idleState(state: Thread.State): Boolean = state == Thread.State.TIMED_WAITING || state == Thread.State.WAITING // Threads that say they are runnable, but are actually doing nothing. def idleRunnable(info: ThreadInfo): Boolean = info.stack.headOption.exists { elem => IdleClassAndMethod.contains((elem.getClassName, elem.getMethodName)) } in.map { info => if (idleState(info.thread.getState) || idleRunnable(info)) { info.copy(isIdle = true) } else { info } } } private def deadlockedIds: Seq[Long] = { val ids = Option(ManagementFactory.getThreadMXBean.findDeadlockedThreads()) ids.map(_.toSeq).getOrElse(Nil) } }
travisbrown/twitter-server
src/main/scala/com/twitter/server/handler/ThreadsHandler.scala
Scala
apache-2.0
3,319
package de.mineformers.core.client.renderer.shape import de.mineformers.core.client.renderer.RenderParams /** * Sphere * * @author PaleoCrafter */ class Sphere(radius: Double, accuracy: Int, filled: Boolean = true) extends Shape(List(new Circle(radius, accuracy, filled)), RenderParams.start().face().build())
MineFormers/MFCore
src/main/scala/de/mineformers/core/client/renderer/shape/Sphere.scala
Scala
mit
316
package demo import org.scalajs.dom._ /* * Copyright (C) 24/08/16 // mathieu.leclaire@openmole.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ import org.scalajs.dom.Element import scaladget.bootstrapnative.bsn._ import com.raquo.laminar.api.L._ object ToastDemo { val sc = sourcecode.Text { val myToast = toast(ToastHeader("My header", backgroundColor = "#ffc107"), "My important message", delay = Some(2000)) val mySecondToast = toast(ToastHeader("My second header", backgroundColor = "#dc3545"), "My important message") val myThirdToast = toast(ToastHeader("My third header", backgroundColor = "#17a2b8"), "My important message") val toaster = toastStack(bottomRightPosition, unstackOnClose = true) div( button(btn_warning_outline, "Toast with delay", onClick --> { _ => toaster.stackAndShow(myToast) }), button(btn_danger_outline, marginLeft := "15", "Toast again !", onClick --> { _ => toaster.stackAndShow(mySecondToast) }), button(btn_info_outline, marginLeft := "15", "Toast again !", onClick --> { _ => toaster.stackAndShow(myThirdToast) }), toaster.render ) } val elementDemo = new ElementDemo { def title: String = "Toast" def code: String = sc.source def element: HtmlElement = sc.value } }
openmole/scaladget
bootstrapDemo/src/main/scala/demo/ToastDemo.scala
Scala
agpl-3.0
1,903
package taczombie.model import taczombie.model.util.Logger trait GameObject extends Logger { val id : Int val coords : (Int, Int) override def hashCode(): Int = id } trait StaticGameObject extends GameObject trait VersatileGameObject extends GameObject { /// Calculate and return a leftover pair for (host, visitor) Gameobjects def isVisitedBy (versatileGameObject : VersatileGameObject) : (VersatileGameObject, VersatileGameObject) } trait NonHuman extends VersatileGameObject trait Collectable extends NonHuman case class Wall(id : Int, coords : (Int,Int)) extends StaticGameObject case class Coin(id : Int, val coords : (Int,Int)) extends Collectable { def isVisitedBy (versatileGameObject : VersatileGameObject) = versatileGameObject match { case humanToken : HumanToken => { logger.+=(humanToken + "collected coin") (null, humanToken.updated(newCoins = humanToken.coins + 1, newScore = humanToken.score+1)) } case zombieToken : ZombieToken => (this, zombieToken) } } case class Powerup(id : Int, val coords : (Int,Int), time : Int = 5) extends Collectable{ def isVisitedBy (versatileGameObject : VersatileGameObject) = versatileGameObject match { case humanToken : HumanToken => (null, humanToken.updated(newPowerupTime = humanToken.powerupTime+defaults.defaultPowerupTime, newScore = humanToken.score+1)) case zombieToken : ZombieToken => (this, zombieToken) } } trait PlayerToken extends VersatileGameObject { val id : Int def coins : Int def score : Int def powerupTime : Int val frozenTime : Int require(frozenTime >= 0) val dead : Boolean def updated(newCoords : (Int,Int) = coords, newCoins : Int = coins, newScore : Int = score, newPowerupTime : Int = powerupTime, newFrozenTime : Int = frozenTime, newDead : Boolean = dead) : PlayerToken def updatedDecrementCounters() : PlayerToken } case class HumanToken(id : Int, coords : (Int,Int), coins : Int = 0, score : Int = 0, powerupTime : Int = 0, frozenTime : Int = 0, dead : Boolean = false) extends PlayerToken { require(powerupTime >= 0) override def updated(newCoords : (Int,Int) = this.coords, newCoins : Int = this.coins, newScore : Int = this.score, newPowerupTime : Int = this.powerupTime, newFrozenTime : Int = this.frozenTime, newDead : Boolean = this.dead) = { // checks val checkedNewPowerUpTime = if (newPowerupTime < 0) 0 else newPowerupTime val checkedNewFrozenTime = if (newFrozenTime < 0) 0 else newFrozenTime new HumanToken(this.id, newCoords, newCoins, newScore, checkedNewPowerUpTime, checkedNewFrozenTime, newDead) } def updatedDecrementCounters() : HumanToken = { updated(newPowerupTime = this.powerupTime-1, newFrozenTime = this.frozenTime-1) } def isVisitedBy (versatileGameObject : VersatileGameObject) = { versatileGameObject match { case zombieToken : ZombieToken => { (zombieToken.dead, this.powerupTime) match { case (false, 0) => { logger.+=(this + " death by " + zombieToken) (this.updated(newScore = this.score-defaults.defaultKillScore, newDead = true), zombieToken) } case (false, _) => { logger.+=(this + " killed " + zombieToken) (this.updated(newScore = this.score+defaults.defaultKillScore),zombieToken.updated(newDead = true)) } case (true, _) => (this, zombieToken) // spawn! } } case humanToken : HumanToken => (this, humanToken) } } } case class ZombieToken(id : Int, coords : (Int,Int), frozenTime : Int = 0, dead : Boolean = false) extends NonHuman with PlayerToken { def coins = 0 def score = 0 def powerupTime = 0 def updated(newCoords : (Int,Int) = this.coords, newCoins : Int = this.coins, newScore : Int = this.score, newPowerupTime : Int = this.powerupTime, newFrozenTime : Int = this.frozenTime, newDead : Boolean = this.dead) : ZombieToken = { val checkedNewFrozenTime = if (newFrozenTime < 0) 0 else newFrozenTime new ZombieToken(this.id, newCoords, checkedNewFrozenTime, newDead) } def updatedDecrementCounters() : ZombieToken = { updated(newFrozenTime = this.frozenTime-1) } def isVisitedBy (versatileGameObject : VersatileGameObject) = { versatileGameObject match { case humanToken : HumanToken => { (this.dead, humanToken.powerupTime) match { case (true, _) => (this, humanToken) case (false, 0) => (this, humanToken.updated(newScore = humanToken.score-defaults.defaultKillScore, newDead = true)) case (false, _) => (this.updated(newDead = true), humanToken.updated(newScore = humanToken.score+defaults.defaultKillScore)) } } case zombieToken : ZombieToken => (this, zombieToken) } } }
mahieke/TacZombie
model/src/main/scala/taczombie/model/GameObject.scala
Scala
gpl-2.0
5,536
/* * (c) Copyright 2016 Hewlett Packard Enterprise Development LP * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cogx.compiler.codegenerator.opencl.hyperkernels.discretecosinetransform import cogx.compiler.codegenerator.opencl.hyperkernels.fastfouriertransform._ import scala.collection.mutable.ArrayBuffer /** A "Planner" for synthesizing OpenCL kernels that implement a DCT that * transforms the rows of a 2D scalar field, where the rows have been * interleaved as described in the paper: * * "Image compression using the discrete * cosine transform," Watson, Mathematica Journal, 4(1), 1994 * * Compared to the fastfouriertransform package, this discretecosinetransform * package involves the addition of extra "twiddling" required * as described in the above paper. However, a reimplemenation could reduce * a lot of the computation here and perhaps eliminate the need for the * interleaving. Perhaps a 2X performance improvement could be achieved. * * @param deviceMaxWorkItemsPerWorkGroup The maximum number of work items that * the OpenCL platform will support within a work group. * @param rows The number of rows in the input field. * @param columns The number of columns in the input field. * @param forward True for a forward DCT transform, false for an inverse * transform. */ private[cogx] class DCTPlanner(deviceMaxWorkItemsPerWorkGroup: Int, rows: Int, columns: Int, forward: Boolean) { val sizes = Array(rows, columns) val size = new ClFFTDim3(rows, columns) // Only tested currently on 2D transform require(size.dimension == 2) // kernel for the dct var dctKernel: ClFFTKernelInfo = null // Maximum size of signal for which local memory transposed based // fft is sufficient i.e. no global mem transpose (communication) // is needed val max_localmem_fft_size = 2048 // Maximum work items per work group allowed. This, along with max_radix below controls // maximum local memory being used by fft kernels of this plan. Set to 256 by default var max_work_item_per_workgroup = deviceMaxWorkItemsPerWorkGroup // Maximum base radix for local memory fft ... this controls the maximum register // space used by work items. Currently defaults to 16 val max_radix = 16 // Device depended parameter that tells how many work-items need to be read consecutive // values to make sure global memory access by work-items of a work-group result in // coalesced memory access to utilize full bandwidth e.g. on NVidia tesla, this is 16 val min_mem_coalesce_width = 16 // Number of local memory banks. This is used to generate kernel with local memory // transposes with appropriate padding to avoid bank conflicts to local memory // e.g. on NVidia it is 16. val num_local_mem_banks = 16 // Create the kernel. DCT1D(X) // Debugging junk val Verbose = false if (Verbose) { println("DCTPlanner: forward = " + forward) println(" dimensions = " + sizes.length) println(" sizes: " + sizes(0) + " x " + sizes(1)) println(clSourceCode(0)) } /** Get the OpenCL source code for the synthesized FFT, one pass each dimension. */ def clSourceCode: String = dctKernel.kernel_string.toString() def workDimensions: WorkDimensions = getKernelWorkDimensions(dctKernel) def allSourceCode = clSourceCode def kernelNames: String = dctKernel.kernel_name private def getKernelWorkDimensions(kernelInfo: ClFFTKernelInfo): WorkDimensions = { val lWorkItems: Int = kernelInfo.num_workitems_per_workgroup val batchSize = kernelInfo.dir match { case X => size.y * size.z case Y => size.z case Z => 1 } val workGroupMultiplier = kernelInfo.dir match { case X => Math.ceil(batchSize.toDouble / kernelInfo.num_xforms_per_workgroup).toInt case Y => batchSize case Z => batchSize } val numWorkGroups = kernelInfo.num_workgroups * workGroupMultiplier val gWorkItems = numWorkGroups * lWorkItems WorkDimensions(batchSize, gWorkItems, lWorkItems) } private def log2(x: Int): Int = if (x <= 1) 0 else 1 + log2(x / 2) private def isPowerOf2(x: Int) = x == (1 << log2(x)) // For any n, this function decomposes n into factors for loacal memory tranpose // based fft. Factors (radices) are sorted such that the first one (radixArray[0]) // is the largest. This base radix determines the number of registers used by each // work item and product of remaining radices determine the size of work group needed. // To make things concrete with and example, suppose n = 1024. It is decomposed into // 1024 = 16 x 16 x 4. Hence kernel uses float2 a[16], for local in-register fft and // needs 16 x 4 = 64 work items per work group. So kernel first performance 64 length // 16 ffts (64 work items working in parallel) following by transpose using local // memory followed by again 64 length 16 ffts followed by transpose using local memory // followed by 256 length 4 ffts. For the last step since with size of work group is // 64 and each work item can array for 16 values, 64 work items can compute 256 length // 4 ffts by each work item computing 4 length 4 ffts. // Similarly for n = 2048 = 8 x 8 x 8 x 4, each work group has 8 x 8 x 4 = 256 work // iterms which each computes 256 (in-parallel) length 8 ffts in-register, followed // by transpose using local memory, followed by 256 length 8 in-register ffts, followed // by transpose using local memory, followed by 256 length 8 in-register ffts, followed // by transpose using local memory, followed by 512 length 4 in-register ffts. Again, // for the last step, each work item computes two length 4 in-register ffts and thus // 256 work items are needed to compute all 512 ffts. // For n = 32 = 8 x 4, 4 work items first compute 4 in-register // lenth 8 ffts, followed by transpose using local memory followed by 8 in-register // length 4 ffts, where each work item computes two length 4 ffts thus 4 work items // can compute 8 length 4 ffts. However if work group size of say 64 is choosen, // each work group can compute 64/ 4 = 16 size 32 ffts (batched transform). // Users can play with these parameters to figure what gives best performance on // their particular device i.e. some device have less register space thus using // smaller base radix can avoid spilling ... some has small local memory thus // using smaller work group size may be required etc private def getRadixArray(size: Int): Array[Int] = { size match { case 2 => Array(2) case 4 => Array(4) case 8 => Array(8) case 16 => Array(8, 2) case 32 => Array(8, 4) case 64 => Array(8, 8) case 128 => Array(8, 4, 4) case 256 => Array(4, 4, 4, 4) case 512 => Array(8, 8, 8) case 1024 => Array(16, 16, 4) case 2048 => Array(8, 8, 8, 4) case _ => throw new RuntimeException("Illegal FFT dimension size.") } } private def getRadixArray(size: Int, maxRadix: Int): Array[Int] = { require(maxRadix > 1) val buffer = ArrayBuffer[Int]() var n = size while (n > maxRadix) { buffer.append(maxRadix) n /= maxRadix } buffer.append(n) buffer.toArray } private def insertHeader(kernelString: StringBuilder, kernelName: String): Unit = { // The HyperKernel framework controls the header generation, so do nothing. } // Cog approach to supporting split real/imag buffer layout prior to actually // passing in two buffers for each field. private def insertCogDefines(kernelString: StringBuilder): Unit = { kernelString.append( s"""|#define dir (%dirVal%) |#define S (%batchSize%) |for (int tensorElement = 0; | tensorElement < _tensorElements; | tensorElement++) { |""".stripMargin ) } private def insertCogUndefines(kernelString: StringBuilder): Unit = { kernelString.append( s"""| barrier(CLK_LOCAL_MEM_FENCE); |#undef dir |#undef S |} |""".stripMargin ) } private def insertVariables(kStream: StringBuilder, maxRadix: Int): Unit = { kStream.append( s"""| int i, j, r, indexIn, indexOut, index, tid, bNum, xNum, k, l; | int s, ii, jj, offset; | float2 w; | float ang, angf, ang1; | __local float *lMemStore, *lMemLoad; | float2 a[$maxRadix]; | int lId = get_local_id(0); | int groupId = get_group_id(0); |""".stripMargin ) } /** Twiddles the values in the a[] arrays (each thread has it's own) required * by the DCT when doing the forward transform on rows. For an explanation, * see "Image compression using the discrete cosine transform," Watson, * Mathematica Journal, 4(1), 1994. * * NOTE: This only works for sizes 256 and larger, reason unknown. */ private def rowTwiddle(kernelString: StringBuilder, aIndex: Int, gIndex: Int, inverse: Boolean) { require(size.x.toInt >= 256, "DCT can only handle sizes 256 and larger (BUG)") val column = gIndex.toString + " + lId" val twiddleString = """ | { | float2 twiddleFactor; | if ((%column%) == 0) | twiddleFactor = (float2)(1.0f, 0.0f); | else { | float realPart = 0.0f; | float imagPart = (%sign%M_PI * (%column%)) / (2 * %lineLength%); | float2 exp = | complexExp((float2)(realPart, imagPart)); | twiddleFactor = | complexMultiplyReal(exp, %sqrt2%); | } | a[%index%] = complexMultiply(a[%index%], twiddleFactor); | } | barrier(CLK_LOCAL_MEM_FENCE); | """.stripMargin. replaceAll("%index%", aIndex.toString). replaceAll("%column%", column.toString). replaceAll("%lineLength%", size.x.toInt.toString). replaceAll("%sign%", if (inverse) "" else "-"). replaceAll("%sqrt2%", math.sqrt(2).toFloat.toString + "f") kernelString.append(twiddleString) } /** Write out a real or complex value to global memory. */ private def formattedStore(kernelString: StringBuilder, aIndex: Int, gIndex: Int) { //************************************************************************** // We add additional twiddle factors for the DCT just before writing out. if (forward) rowTwiddle(kernelString, aIndex, gIndex, inverse = false) // End of DCT twiddling //************************************************************************** kernelString.append( s"""| column = $gIndex; | @outElementNonlocal0 = a[$aIndex].x; |""".stripMargin ) } /** Read a real or complex value from global memory. * * We do the normalization required by the DFT here, dividing each input pixel * by the square root of the number of columns. */ private def formattedLoad(kernelString: StringBuilder, aIndex: Int, gIndex: Int) { val normalize = (1f / math.sqrt(columns).toFloat).toString + "f" kernelString.append( s"""| column = $gIndex; | a[$aIndex] = (float2) (readElementNonlocal(@in0) * $normalize, 0.0f); |""".stripMargin ) //************************************************************************** // We add additional twiddle factors for the DCT just after reading in. if (!forward) rowTwiddle(kernelString, aIndex, gIndex, inverse = true) // End of DCT twiddling //************************************************************************** } // These 2 routines clean up the code generation quite a bit and further localize // the I/O handling, making it easier to adapt to changes in Cog HyperKernel I/O. -RJC /** Bump the input pointer by an offset (measured in elements) */ private def adjustInPtr(kernelString: StringBuilder, offsetStr: String) { kernelString.append(s" fieldName(@in0) += $offsetStr;\\n") } /** Bump the output pointer by an offset (measured in elements) */ private def adjustOutPtr(kernelString: StringBuilder, offsetStr: String) { kernelString.append(s" fieldName(@out0) += $offsetStr;\\n") } private def insertGlobalLoadsAndTranspose(kernelString: StringBuilder, N: Int, numWorkItemsPerXForm: Int, numXFormsPerWG: Int, R0: Int, mem_coalesce_width: Int): Int = { val lMemSize = 0 if (numXFormsPerWG > 1) { kernelString.append(s" s = S & ${numXFormsPerWG - 1};\\n") } if (numWorkItemsPerXForm >= mem_coalesce_width) { kernelString.append( s"""| ii = lId; | jj = 0; | offset = mad24(groupId, $N, ii); |""".stripMargin ) adjustInPtr(kernelString, "offset") adjustOutPtr(kernelString, "offset") for (i <- 0 until R0) { formattedLoad(kernelString, i, i * numWorkItemsPerXForm) } } lMemSize } private def insertGlobalStoresAndTranspose(kernelString: StringBuilder, N: Int, maxRadix: Int, Nr: Int, numWorkItemsPerXForm: Int, numXFormsPerWG: Int, mem_coalesce_width: Int): Int = { val lMemSize = 0 val numIter = maxRadix / Nr var indent = "" if (numWorkItemsPerXForm >= mem_coalesce_width) { if (numXFormsPerWG > 1) { kernelString.append(" if( !s || (groupId < get_num_groups(0)-1) || (jj < s) ) {\\n") indent = " " } for (i <- 0 until maxRadix) { val j = i % numIter val k = i / numIter val ind = j * Nr + k formattedStore(kernelString, ind, i * numWorkItemsPerXForm) } if (numXFormsPerWG > 1) kernelString.append(" }\\n") } lMemSize } private def insertfftKernel(kernelString: StringBuilder, Nr: Int, numIter: Int) { for (i <- 0 until numIter) kernelString.append(s" fftKernel$Nr(a+${i*Nr}, dir);\\n") } private def insertTwiddleKernel(kernelString: StringBuilder, Nr: Int, numIter: Int, Nprev: Int, len: Int, numWorkItemsPerXForm: Int) { import kernelString.append val logNPrev: Int = log2(Nprev) for (z <- 0 until numIter) { if (z == 0) { if (Nprev > 1) append(s" angf = (float) (ii >> $logNPrev);\\n") else append(s" angf = (float) ii;\\n") } else { if (Nprev > 1) append(s" angf = (float) ((${z*numWorkItemsPerXForm} + ii) >> $logNPrev);\\n") else append(s" angf = (float) (${z*numWorkItemsPerXForm} + ii);\\n") } for (k <- 1 until Nr) { val ind: Int = z * Nr + k append( s"""| ang = dir * ( 2.0f * M_PI * $k.0f / $len.0f ) * angf; | w = (float2)(native_cos(ang), native_sin(ang)); | a[$ind] = complexMul(a[$ind], w); |""".stripMargin ) } } } /** Returns a tuple of the lMemSize, offset and midPad. */ private def getPadding(numWorkItemsPerXForm: Int, Nprev: Int, numWorkItemsReq: Int, numXFormsPerWG: Int, Nr: Int, numBanks: Int): (Int, Int, Int) = { val offset = if ((numWorkItemsPerXForm <= Nprev) || (Nprev >= numBanks)) 0 else { val numRowsReq = Math.min(numWorkItemsPerXForm, numBanks) / Nprev val numColsReq = Nprev * Math.min(1, numRowsReq / Nr) numColsReq } val midPad = if (numWorkItemsPerXForm >= numBanks || numXFormsPerWG == 1) 0 else { val bankNum = ( (numWorkItemsReq + offset) * Nr ) & (numBanks - 1) Math.max(0, numWorkItemsPerXForm - bankNum) } val lMemSize = ( numWorkItemsReq + offset) * Nr * numXFormsPerWG + midPad * (numXFormsPerWG - 1) (lMemSize, offset, midPad) } // private def getPadding(numWorkItemsPerXForm: Int, Nprev: Int, // numWorkItemsReq: Int, numXFormsPerWG: Int, // Nr: Int, numBanks: Int): fftPadding = // { // var offset = 0 // var midPad = 0 // if ((numWorkItemsPerXForm <= Nprev) || (Nprev >= numBanks)) // offset = 0 // else { // val numRowsReq = // (if (numWorkItemsPerXForm < numBanks) numWorkItemsPerXForm else numBanks) / Nprev // var numColsReq = 1 // if (numRowsReq > Nr) // numColsReq = numRowsReq / Nr // numColsReq = Nprev * numColsReq // offset = numColsReq // } // if (numWorkItemsPerXForm >= numBanks || numXFormsPerWG == 1) { // midPad = 0 // } // else { // val bankNum = ((numWorkItemsReq + offset) * Nr) & (numBanks - 1) // if (bankNum >= numWorkItemsPerXForm) { // midPad = 0 // } // else { // midPad = numWorkItemsPerXForm - bankNum // } // } // val lMemSize = (numWorkItemsReq + offset) * Nr * numXFormsPerWG + midPad * (numXFormsPerWG - 1) // new fftPadding(lMemSize, offset, midPad) // } private def insertLocalStores(kernelString: StringBuilder, numIter: Int, Nr: Int, numWorkItemsPerXForm: Int, numWorkItemsReq: Int, offset: Int, comp: String) { import kernelString.append for (z <- 0 until numIter) { for (k <- 0 until Nr) { val index = k*(numWorkItemsReq + offset) + z*numWorkItemsPerXForm append(s" lMemStore[$index] = a[${z*Nr + k}].$comp;\\n") } } append(" barrier(CLK_LOCAL_MEM_FENCE);\\n") } private def insertLocalLoads(kernelString: StringBuilder, n: Int, Nr: Int, Nrn: Int, Nprev: Int, Ncurr: Int, numWorkItemsPerXForm: Int, numWorkItemsReq: Int, offset: Int, comp: String) { import kernelString.append val numWorkItemsReqN: Int = n / Nrn val interBlockHNum: Int = math.max( Nprev / numWorkItemsPerXForm, 1) val interBlockHStride: Int = numWorkItemsPerXForm val vertWidth: Int = Math.min(Nr, math.max(numWorkItemsPerXForm / Nprev, 1)) val vertNum: Int = Nr / vertWidth val vertStride: Int = ( n / Nr + offset ) * vertWidth val iter: Int = Math.max( numWorkItemsReqN / numWorkItemsPerXForm, 1) val intraBlockHStride: Int = Nprev * Math.max( numWorkItemsPerXForm / (Nprev*Nr), 1) val stride: Int = numWorkItemsReq / Nrn for (i <- 0 until iter) { val ii: Int = i / (interBlockHNum * vertNum) val zz: Int = i % (interBlockHNum * vertNum) val jj: Int = zz % interBlockHNum val kk: Int = zz / interBlockHNum for (z <- 0 until Nrn) { val st: Int = kk * vertStride + jj * interBlockHStride + ii * intraBlockHStride + z * stride append(s" a[${i*Nrn + z}].$comp = lMemLoad[$st];\\n") } } append(" barrier(CLK_LOCAL_MEM_FENCE);\\n") } private def insertLocalLoadIndexArithmatic(kernelString: StringBuilder, Nprev: Int, Nr: Int, numWorkItemsReq: Int, numWorkItemsPerXForm: Int, numXFormsPerWG: Int, offset: Int, midPad: Int) { import kernelString.append val Ncurr: Int = Nprev * Nr val logNcurr: Int = log2(Ncurr) val logNprev: Int = log2(Nprev) val incr: Int = (numWorkItemsReq + offset) * Nr + midPad if (Ncurr < numWorkItemsPerXForm) { if (Nprev == 1) append(s" j = ii & ${Ncurr - 1};\\n") else append(s" j = (ii & ${Ncurr - 1}) >> $logNprev;\\n") if (Nprev == 1) append(s" i = ii >> $logNcurr;\\n") else append(s" i = mad24(ii >> $logNcurr, $Nprev, ii & ${Nprev - 1});\\n") } else { if (Nprev == 1) append(" j = ii;\\n") else append(s" j = ii >> $logNprev;\\n") if (Nprev == 1) append(" i = 0;\\n") else append(s" i = ii & ${Nprev - 1};\\n") } if (numXFormsPerWG > 1) append(s" i = mad24(jj, $incr, i);\\n") append(s" lMemLoad = sMem + mad24(j, ${numWorkItemsReq + offset}, i);\\n") } private def insertLocalStoreIndexArithmatic(kernelString: StringBuilder, numWorkItemsReq: Int, numXFormsPerWG: Int, Nr: Int, offset: Int, midPad: Int) { import kernelString.append if (numXFormsPerWG == 1) append(s" lMemStore = sMem + ii;\\n") else append(s" lMemStore = sMem + mad24(jj, ${(numWorkItemsReq + offset)*Nr + midPad}, ii);\\n") } private def createLocalMemfftKernelString() { val n: Int = size.x require(n <= max_work_item_per_workgroup * max_radix, "signal length too big for local mem fft.") val radixArray = { val noMaxRadixArray = getRadixArray(n) require(noMaxRadixArray.length > 0, "no radix array supplied.") if (n/noMaxRadixArray(0) > max_work_item_per_workgroup) getRadixArray(n, max_radix) else noMaxRadixArray } def numRadix = radixArray.length require(radixArray(0) <= max_radix, "max radix choosen is greater than allowed.") require(n/radixArray(0) <= max_work_item_per_workgroup, "required work items per xform greater than maximum work items allowed per work group for local mem fft.") radixArray.foreach(i => require(i > 0 && isPowerOf2(i), s"Expecting power of two radix, found $i.")) val tmpLen = radixArray.foldLeft(1)(_ * _) require(tmpLen == n, s"product of radices $tmpLen choosen doesn't match the length of signal $n.") val localString: StringBuilder = new StringBuilder val kCount: Int = 1 val kernelName = "fft" + kCount + size + radixArrayToString(radixArray) + "_S%batchSize%_%dirName%" val numWorkItemsPerXForm: Int = n / radixArray(0) val numWorkItemsPerWG: Int = if (numWorkItemsPerXForm <= 64) 64 else numWorkItemsPerXForm require(numWorkItemsPerWG <= max_work_item_per_workgroup, "max work items per workgroup exceeded.") val numXFormsPerWG: Int = numWorkItemsPerWG / numWorkItemsPerXForm val kInfo: ClFFTKernelInfo = new ClFFTKernelInfo(kernelName, dir = X, num_workgroups = 1, num_xforms_per_workgroup = numXFormsPerWG, num_workitems_per_workgroup = numWorkItemsPerWG, in_place_possible = true) dctKernel = kInfo val kernelString = kInfo.kernel_string import kernelString.append val N: Array[Int] = radixArray val maxRadix: Int = N(0) val lMemSize1 = insertGlobalLoadsAndTranspose(localString, n, numWorkItemsPerXForm, numXFormsPerWG, maxRadix, min_mem_coalesce_width) kInfo.setMinLMemSize(lMemSize1) val xcomp: String = "x" val ycomp: String = "y" var Nprev: Int = 1 var len: Int = n for (r <- 0 until numRadix) { val numIter: Int = N(0) / N(r) val numWorkItemsReq: Int = n / N(r) val Ncurr: Int = Nprev * N(r) insertfftKernel(localString, N(r), numIter) if (r < (numRadix - 1)) { insertTwiddleKernel(localString, N(r), numIter, Nprev, len, numWorkItemsPerXForm) val (padLMemSize, offset, midPad) = getPadding(numWorkItemsPerXForm, Nprev, numWorkItemsReq, numXFormsPerWG, N(r), num_local_mem_banks) kInfo.setMinLMemSize(padLMemSize) insertLocalStoreIndexArithmatic(localString, numWorkItemsReq, numXFormsPerWG, N(r), offset, midPad) insertLocalLoadIndexArithmatic(localString, Nprev, N(r), numWorkItemsReq, numWorkItemsPerXForm, numXFormsPerWG, offset, midPad) insertLocalStores(localString, numIter, N(r), numWorkItemsPerXForm, numWorkItemsReq, offset, xcomp) insertLocalLoads(localString, n, N(r), N(r+1), Nprev, Ncurr, numWorkItemsPerXForm, numWorkItemsReq, offset, xcomp) insertLocalStores(localString, numIter, N(r), numWorkItemsPerXForm, numWorkItemsReq, offset, ycomp) insertLocalLoads(localString, n, N(r), N(r+1), Nprev, Ncurr, numWorkItemsPerXForm, numWorkItemsReq, offset, ycomp) Nprev = Ncurr len = len / N(r) } } val lMemSize2 = insertGlobalStoresAndTranspose(localString, n, maxRadix, N(numRadix - 1), numWorkItemsPerXForm, numXFormsPerWG, min_mem_coalesce_width) kInfo.setMinLMemSize(lMemSize2) append("{\\n") if (kInfo.lmem_size > 0) append(s" __local float sMem[${kInfo.lmem_size}];\\n") insertCogDefines(kernelString) insertVariables(kernelString, maxRadix) append(localString) // This is ugly. The original code hacks the input parameters by incrementing // them, meaning we need to reset them to their original values before // beginning the next iteration of the loop. Ugly. val inputParameterReset = """ | { | // Reset input parameters to their entry values. | int offset = mad24((int) get_group_id(0), (int) %n%, (int) get_local_id(0)); | fieldName(@in0) -= offset; | fieldName(@out0) -= offset; | } """.stripMargin.replaceAll("%n%", n.toString) append(inputParameterReset) insertCogUndefines(kernelString) append("}\\n") } /** * Converts a radix array to a string by grabbing the non-zero values * of "radixArray" and concatenating them into a string. For example, * the array (4, 2, 1, 0, 0) would be converted to: "_radix_4_2_1". Note that * zeros in the array are ignored. */ private def radixArrayToString(radixArray: Array[Int]): String = { var string = "_radix" for (radix <- radixArray) { if (radix != 0) string += "_" + radix.toString } string } def DCT1D(dir: ClFFTKernelDir) { createLocalMemfftKernelString() } }
hpe-cct/cct-core
src/main/scala/cogx/compiler/codegenerator/opencl/hyperkernels/discretecosinetransform/DCTPlanner.scala
Scala
apache-2.0
27,429
package io.finch import cats.Eval import com.twitter.finagle.OAuth2 import com.twitter.finagle.http.Status import com.twitter.finagle.oauth2.{AuthInfo, DataHandler, GrantHandlerResult, OAuthError} package object oauth2 { private[this] val handleOAuthError: PartialFunction[Throwable, Output[Nothing]] = { case e: OAuthError => val bearer = Seq("error=\\"" + e.errorType + "\\"") ++ (if (!e.description.isEmpty) Seq("error_description=\\"" + e.description + "\\"") else Nil) Output.failure(e, Status(e.statusCode)) .withHeader("WWW-Authenticate" -> s"Bearer ${bearer.mkString(", ")}") } /** * An [[Endpoint]] that takes a request (with access token) and authorizes it with respect to a * given `dataHandler`. */ def authorize[U](dataHandler: DataHandler[U]): Endpoint[AuthInfo[U]] = Endpoint.embed(items.MultipleItems)(i => Some((i, Eval.later(OAuth2.authorize(i.request, dataHandler).map(Output.payload(_))))) ).handle(handleOAuthError) /** * An [[Endpoint]] that takes a request (with user credentials) and issues an access token for it * with respect to a given `dataHandler`. */ def issueAccessToken[U](dataHandler: DataHandler[U]): Endpoint[GrantHandlerResult] = Endpoint.embed(items.MultipleItems)(i => Some((i, Eval.later(OAuth2.issueAccessToken(i.request, dataHandler).map(Output.payload(_))))) ).handle(handleOAuthError) }
travisbrown/finch
oauth2/src/main/scala/io/finch/oauth2/package.scala
Scala
apache-2.0
1,423
package org.jetbrains.plugins.scala.lang.psi.implicits import com.intellij.openapi.progress.ProgressManager import com.intellij.openapi.project.Project import com.intellij.psi.PsiElement import com.intellij.psi.util.PsiTreeUtil import org.jetbrains.plugins.scala.autoImport.GlobalExtensionMethod import org.jetbrains.plugins.scala.autoImport.GlobalMember.findGlobalMembers import org.jetbrains.plugins.scala.caches.ModTracker import org.jetbrains.plugins.scala.extensions.{PsiClassExt, PsiElementExt} import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.MixinNodes import org.jetbrains.plugins.scala.lang.psi.implicits.ImplicitCollector.ImplicitState import org.jetbrains.plugins.scala.lang.psi.stubs.index.ExtensionIndex import org.jetbrains.plugins.scala.lang.psi.types.api.{Any, FunctionType} import org.jetbrains.plugins.scala.lang.psi.types.recursiveUpdate.ScSubstitutor import org.jetbrains.plugins.scala.lang.psi.types.{ConstraintSystem, ConstraintsResult, ScType} import org.jetbrains.plugins.scala.lang.psi.{ElementScope, ScalaPsiUtil} import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult import org.jetbrains.plugins.scala.macroAnnotations.CachedInUserData import org.jetbrains.plugins.scala.project.ProjectContext import org.jetbrains.plugins.scala.util.CommonQualifiedNames.AnyFqn class ExtensionMethodData(val function: ScFunction, rawExtensionTargetType: ScType, rawReturnType: ScType, val substitutor: ScSubstitutor) { private lazy val paramType: ScType = substitutor.followed(ScalaPsiUtil.undefineMethodTypeParams(function))(rawExtensionTargetType) private lazy val returnType: ScType = substitutor(rawReturnType) def withSubstitutor(newSubstitutor: ScSubstitutor): ExtensionMethodData = new ExtensionMethodData(function, rawExtensionTargetType, rawReturnType, newSubstitutor) override def toString: String = function.name def application(fromType: ScType, place: PsiElement): Option[ExtensionMethodApplication] = { // to prevent infinite recursion if (PsiTreeUtil.isContextAncestor(function.nameContext, place, false)) return None ProgressManager.checkCanceled() fromType.conforms(paramType, ConstraintSystem.empty, checkWeak = true) match { case ConstraintsResult.Left => None case _ => if (function.hasTypeParameters || function.extensionMethodOwner.exists(_.typeParameters.nonEmpty)) { implicit val projectContext: ProjectContext = function.projectContext implicit val elementScope: ElementScope = function.elementScope val functionType = FunctionType(Any, Seq(fromType.tryExtractDesignatorSingleton)) val implicitState = ImplicitState( place = place, tp = functionType, expandedTp = functionType, coreElement = None, isImplicitConversion = true, searchImplicitsRecursively = 0, extensionData = None, fullInfo = true, previousRecursionState = Some(ImplicitsRecursionGuard.currentMap) ) val resolveResult = new ScalaResolveResult(function, ScSubstitutor.empty) val collector = new ImplicitCollector(implicitState) val compatible = collector.checkFunctionByType(resolveResult, withLocalTypeInference = true, checkFast = false) for { srr <- compatible resultType <- ExtensionConversionHelper.specialExtractParameterType(srr) } yield ExtensionMethodApplication(resultType, srr.implicitParameters) } else Some(ExtensionMethodApplication(returnType)) } } } object ExtensionMethodData { def apply(globalExtensionMethod: GlobalExtensionMethod): Option[ExtensionMethodData] = ExtensionMethodData(globalExtensionMethod.function, globalExtensionMethod.substitutor) def apply(function: ScFunction, substitutor: ScSubstitutor): Option[ExtensionMethodData] = { ProgressManager.checkCanceled() rawExtensionMethodCheck(function).map(_.withSubstitutor(substitutor)) } def getPossibleExtensionMethods(expr: ScExpression): Map[GlobalExtensionMethod, ExtensionMethodApplication] = if (expr.isInScala3File) { expr.getTypeWithoutImplicits().toOption match { case None => Map.empty case Some(originalType) => val withSuperClasses = originalType.widen.extractClass match { case Some(cls) => MixinNodes.allSuperClasses(cls).map(_.qualifiedName) + cls.qualifiedName + AnyFqn case _ => Set(AnyFqn) } val scope = expr.resolveScope implicit val project: Project = expr.getProject (for { qName <- withSuperClasses extensionMethodCandidate <- ExtensionIndex.extensionMethodCandidatesForFqn(qName, scope) if ImplicitProcessor.isAccessible(extensionMethodCandidate, expr) method <- findGlobalMembers(extensionMethodCandidate, scope)(GlobalExtensionMethod) data <- ExtensionMethodData(method) application <- data.application(originalType, expr) } yield method -> application) .toMap } } else Map.empty @CachedInUserData(function, ModTracker.libraryAware(function)) private def rawExtensionMethodCheck(function: ScFunction): Option[ExtensionMethodData] = for { retType <- function.returnType.toOption ext <- function.extensionMethodOwner targetTypeElem <- ext.targetTypeElement targetType <- targetTypeElem.`type`().toOption } yield new ExtensionMethodData(function, targetType, retType, ScSubstitutor.empty) }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/implicits/ExtensionMethodData.scala
Scala
apache-2.0
5,806
package app.components.semanticui import japgolly.scalajs.react import japgolly.scalajs.react.Children import japgolly.scalajs.react.vdom.VdomNode import scala.scalajs.js object Container { val component = react.JsComponent[js.Object, Children.Varargs, Null](SemanticUiComponents.Container) def apply()(children: VdomNode*) = { val props = js.Dynamic.literal( ) component(props)(children:_*) } }
Igorocky/lesn
client/src/main/scala/app/components/semanticui/Container.scala
Scala
mit
419
package test.containters import org.scalatest.Spec import org.scalatest.matchers.ShouldMatchers import org.scalatest.mock.MockitoSugar import org.mockito.Mockito._ import vog.substance.Substance import vog.substance.containers.{NoLayerException, BaseContainer} import java.awt.image.ImageObserver import swing.Graphics2D import java.util.Random import vog.cache.Image /** * @author Ivyl */ class BaseContainerSpec extends Spec with ShouldMatchers with MockitoSugar { describe("Base Container when adding substance") { val container = new BaseContainer val substance = mock[Substance] val layerName = "first"; it("should go right") { container.addSubstance(layerName, substance) } it("should be not be invoked because it isn't in order") { container.foreachOrdered(_.behave()) verifyZeroInteractions(substance) } it("should be used when in layer is included in order") { container.order = container.order ::: List(layerName) container.foreachOrdered(_.behave()) verify(substance).behave() } var subs = List[Substance]() it("should do this for all elements on all layers") { for(i <- 1 to 10) { val layerName = "layer" + i container.order = layerName :: container.order for(j <- 1 to 10) { val sub = mock[Substance] subs = sub :: subs container.addSubstance(layerName, sub) } } container.foreachOrdered(_.behave()) subs.foreach(verify(_).behave()) } it("should draw all") { val graphics = mock[Graphics2D] val observer = mock[ImageObserver] container.drawAll(graphics, observer) subs.foreach(verify(_).draw(graphics, observer)) } it("should behave all") { container.behaveAll() subs.foreach(verify(_, times(2)).behave()) //from previous } it("should remove all marked as dead") { class TestSubstance extends Substance { protected def behavior() {} var image: Option[Image] = None } val rand = new Random for (i <- 1 to 100) { val layerName = "testu" + i container.order = layerName :: container.order for (j <- 1 to 100) { val substance = new TestSubstance if (rand.nextBoolean) { substance.die() } container.addSubstance(layerName, substance) } } container.removeDead() container.foreachOrdered(_.isDead should be (false)) } it("should thorw exception when non-existing layer is ordered") { container.order = "foo" :: container.order evaluating { container.foreachOrdered(_.behave()) } should produce [NoLayerException] } } }
ivyl/vog-engine
test/containters/BaseContainerSpec.scala
Scala
mit
2,732
package com.containant.casestudies /** This example shows how ContainAnt encompasses all of * the Hoos-Hsu' Programming-by-Optimization examples, by * solving their DHeap optimization problem. */ import com.containant._ import com.containant.heuristics._ import com.containant.casestudies.util._ object CS5DHeap { ////////////////////////////////////////////////////////////////////// // Configuration val _seed: Int = 0xDEADBEEF val _runs: Int = 10 object Hmma extends AntHeuristic { override val _maxPheromone: Double = 50000 override val _evaporationRate: Double = 0.4 override val _iterations: Int = 1000 override val _antNumber: Int = 1 override val _minimumFraction: Double = 0.10 override val _recursionDepth: Int = 5 override val RNG: java.util.Random = new java.util.Random(_seed) override def toString: String = "mma" } object Hgre extends GrEvoHeuristic { override val _population: Int = 100 override val _length: Int = 7 override val _maxChoice: Int = 15 override val _tournamentSize = 5 override val _generations = 10 override val _recursionDepth = 5 override val RNG: java.util.Random = new java.util.Random(_seed) override def toString: String = "gre" } object Hran extends RandomHeuristic { override val _iterations = 1000 override val _recursionDepth = 5 override val RNG: java.util.Random = new java.util.Random(_seed) override def toString: String = "ran" } ////////////////////////////////////////////////////////////////////// // Problem Description def target(heap: DHeap): Double = heap.measureAccess //1.0/heap.measureAccess.toDouble class A(val value: Int) // arity class E(val value: Int) // expansion_factor class I(val value: Int) // initial_size object HeapModule extends Module { def dheap(arity: A, expansion_factor: E, initial_size: I): DHeap = new DHeap(arity.value, expansion_factor.value, initial_size.value) val a2 = new A(2) val a3 = new A(3) val a4 = new A(4) val a5 = new A(5) val a6 = new A(6) val a7 = new A(7) val a8 = new A(8) val e2 = new E(2) val e3 = new E(3) val e4 = new E(4) val e5 = new E(5) val e6 = new E(6) val e7 = new E(7) val e8 = new E(8) val e9 = new E(9) val e10 = new E(10) val e11 = new E(11) val e12 = new E(12) val e13 = new E(13) val e14 = new E(14) val e15 = new E(15) val e16 = new E(16) val i2 = new I(2) val i4 = new I(4) val i8 = new I(8) val i16 = new I(16) val i32 = new I(32) val i64 = new I(64) val i128 = new I(128) val i256 = new I(256) val i512 = new I(512) val i1024 = new I(1024) val i2048 = new I(2048) val i4096 = new I(4096) val i8192 = new I(8192) val i16384 = new I(16384) val i32768 = new I(32768) val i9 = new I(9) val i27 = new I(27) val i81 = new I(81) val i243 = new I(243) val i729 = new I(729) val i2187 = new I(2187) val i6561 = new I(6561) val i19683 = new I(19683) val i59049 = new I(59049) } ////////////////////////////////////////////////////////////////////// // Experiment Details def main(args: Array[String]): Unit = { import com.containant.casestudies.Framework println("\\n-----------------------------") println("Case Study 5: Dynamic Heap") println("Runs: " + _runs) val comparison = Framework.experiment[DHeap](Hmma, Hgre, _runs, HeapModule, target) val reference = Framework.experiment[DHeap](Hran, Hran, _runs, HeapModule, target) println("heuristic,min,mean,max,var") println(comparison.summary1) println(comparison.summary2) println(reference.summary1) println("p: " + comparison.pvalue) println() } }
zaklogician/ContainAnt-devel
src/main/scala/com/containant/casestudies/CS5DHeap.scala
Scala
bsd-3-clause
3,850
/* * Copyright 2015 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.play.audit.http import play.api.mvc.{Headers, Session} import uk.gov.hmrc.play.audit.EventKeys import uk.gov.hmrc.play.audit.http.connector.AuditProvider import uk.gov.hmrc.play.http.logging._ import uk.gov.hmrc.play.http.{HeaderNames, SessionKeys} import scala.util.Try case class UserId(value: String) extends AnyVal case class Token(value: String) extends AnyVal case class HeaderCarrier(authorization: Option[Authorization] = None, userId: Option[UserId] = None, token: Option[Token] = None, forwarded: Option[ForwardedFor] = None, sessionId: Option[SessionId] = None, requestId: Option[RequestId] = None, requestChain: RequestChain = RequestChain.init, nsStamp: Long = System.nanoTime(), extraHeaders: Seq[(String, String)] = Seq(), trueClientIp: Option[String] = None, trueClientPort: Option[String] = None) extends LoggingDetails with HeaderProvider with AuditProvider { import EventKeys._ /** * @return the time, in nanoseconds, since this header carrier was created */ def age = System.nanoTime() - nsStamp val names = HeaderNames lazy val headers: Seq[(String, String)] = { List(requestId.map(rid => names.xRequestId -> rid.value), sessionId.map(sid => names.xSessionId -> sid.value), forwarded.map(f => names.xForwardedFor -> f.value), token.map(t => names.token -> t.value), Some(names.xRequestChain -> requestChain.value), authorization.map(auth => names.authorisation -> auth.value), trueClientIp.map(HeaderNames.trueClientIp ->_), trueClientPort.map(HeaderNames.trueClientPort ->_) ).flatten.toList ++ extraHeaders } def withExtraHeaders(headers:(String, String)*) : HeaderCarrier = { this.copy(extraHeaders = extraHeaders ++ headers) } private lazy val auditTags = Map[String, String]( names.xRequestId -> requestId.map(_.value).getOrElse("-"), names.xSessionId -> sessionId.map(_.value).getOrElse("-"), "clientIP" -> trueClientIp.getOrElse("-"), "clientPort" -> trueClientPort.getOrElse("-") ) private lazy val auditDetails = Map[String, String]( "ipAddress" -> forwarded.map(_.value).getOrElse("-"), names.authorisation -> authorization.map(_.value).getOrElse("-"), names.token -> token.map(_.value).getOrElse("-") ) def toAuditTags(transactionName: String, path: String) = { auditTags ++ Map[String, String]( TransactionName -> transactionName, Path -> path ) } def toAuditDetails(details: (String, String)*) = auditDetails ++ details } object HeaderCarrier { @deprecated("use fromHeadersAndSession", "1.6.0") def fromHeaders(headers: Headers) = { val authorization = headers.get(HeaderNames.authorisation).map(Authorization) val token = headers.get(HeaderNames.token).map(Token) val forwardedFor = headers.get(HeaderNames.xForwardedFor).map(ForwardedFor) val sessionId = headers.get(HeaderNames.xSessionId).map(SessionId) val requestTimestamp = Try[Long] { headers.get(HeaderNames.xRequestTimestamp).map(_.toLong).getOrElse(System.nanoTime()) }.toOption val requestId = headers.get(HeaderNames.xRequestId).map(RequestId) new HeaderCarrier(authorization, None, token, forwardedFor, sessionId, requestId, buildRequestChain(headers.get(HeaderNames.xRequestChain)), requestTimestamp.getOrElse(System.nanoTime())) } @deprecated("use fromHeadersAndSession", "1.6.0") def fromSessionAndHeaders(session: Session, headers: Headers) = { def getSessionId: Option[String] = session.get(SessionKeys.sessionId).fold[Option[String]](headers.get(HeaderNames.xSessionId))(Some(_)) HeaderCarrier( authorization = session.get(SessionKeys.authToken).map(Authorization), userId = session.get(SessionKeys.userId).map(UserId), token = session.get(SessionKeys.token).map(Token), forwarded = ((headers.get(HeaderNames.trueClientIp), headers.get(HeaderNames.xForwardedFor)) match { case (tcip, None) => tcip case (None | Some(""), xff) => xff case (Some(tcip), Some(xff)) if xff.startsWith(tcip) => Some(xff) case (Some(tcip), Some(xff)) => Some(s"$tcip, $xff") }).map(ForwardedFor), sessionId = getSessionId.map(SessionId), requestId = headers.get(HeaderNames.xRequestId).map(RequestId), requestChain = buildRequestChain(headers.get(HeaderNames.xRequestChain)), nsStamp = requestTimestamp(headers) ) } def fromHeadersAndSession(headers: Headers, session: Option[Session]=None) = { session.fold(fromHeaderss(headers)) { fromSession(headers, _) } } private def getSessionId(s: Session, headers: Headers) = s.get(SessionKeys.sessionId).fold[Option[String]](headers.get(HeaderNames.xSessionId))(Some(_)) private def fromHeaderss(headers: Headers): HeaderCarrier = { HeaderCarrier( headers.get(HeaderNames.authorisation).map(Authorization), None, headers.get(HeaderNames.token).map(Token), forwardedFor(headers), headers.get(HeaderNames.xSessionId).map(SessionId), headers.get(HeaderNames.xRequestId).map(RequestId), buildRequestChain(headers.get(HeaderNames.xRequestChain)), requestTimestamp(headers), Seq.empty, headers.get(HeaderNames.trueClientIp), headers.get(HeaderNames.trueClientPort) ) } private def fromSession(headers: Headers, s: Session): HeaderCarrier = { HeaderCarrier( s.get(SessionKeys.authToken).map(Authorization), s.get(SessionKeys.userId).map(UserId), s.get(SessionKeys.token).map(Token), forwardedFor(headers), getSessionId(s, headers).map(SessionId), headers.get(HeaderNames.xRequestId).map(RequestId), buildRequestChain(headers.get(HeaderNames.xRequestChain)), requestTimestamp(headers), Seq.empty, headers.get(HeaderNames.trueClientIp), headers.get(HeaderNames.trueClientPort) ) } private def forwardedFor(headers: Headers): Option[ForwardedFor] = { ((headers.get(HeaderNames.trueClientIp), headers.get(HeaderNames.xForwardedFor)) match { case (tcip, None) => tcip case (None | Some(""), xff) => xff case (Some(tcip), Some(xff)) if xff.startsWith(tcip) => Some(xff) case (Some(tcip), Some(xff)) => Some(s"$tcip, $xff") }).map(ForwardedFor) } def buildRequestChain(currentChain: Option[String]): RequestChain = { currentChain match { case None => RequestChain.init case Some(chain) => RequestChain(chain).extend } } def requestTimestamp(headers: Headers): Long = headers .get(HeaderNames.xRequestTimestamp) .flatMap(tsAsString => Try(tsAsString.toLong).toOption) .getOrElse(System.nanoTime()) }
liquidarmour/http-verbs
src/main/scala/uk/gov/hmrc/play/audit/http/HeaderCarrier.scala
Scala
apache-2.0
7,567
package com.joshcough.minecraft /** * This plugin and this code is not intended for use. * It is just an empty plugin that is required for turning this library * into a plugin, so that the API and Ermine can be on the classpath for * plugins that want to use this API. */ class ErmineLibPlugin extends org.bukkit.plugin.java.JavaPlugin { override def onEnable : Unit = {} override def onDisable : Unit = {} }
joshcough/ErMinecraft
ermineLibPlugin/src/main/scala/com/joshcough/minecraft/ErmineLibPlugin.scala
Scala
mit
420
/* * Copyright (c) 2014-2020 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.reactive.observers import monix.execution.ChannelType import monix.execution.ChannelType.MultiProducer import monix.reactive.OverflowStrategy import monix.reactive.observers.buffers.BuildersImpl /** Interface describing [[monix.reactive.Observer Observer]] wrappers * that are thread-safe (can receive concurrent events) and that * return an immediate `Continue` when receiving `onNext` * events. Meant to be used by data sources that cannot uphold the * no-concurrent events and the back-pressure related requirements * (i.e. data-sources that cannot wait on `Future[Ack]` for * sending the next event). * * Implementations of this interface have the following contract: * * - `onNext` / `onError` / `onComplete` of this interface MAY be * called concurrently * - `onNext` SHOULD return an immediate `Continue`, as long as the * buffer is not full and the underlying observer hasn't signaled * `Stop` (N.B. due to the asynchronous nature, `Stop` signaled * by the underlying observer may be noticed later, so * implementations of this interface make no guarantee about queued * events - which could be generated, queued and dropped on the * floor later) * - `onNext` MUST return an immediate `Stop` result, after it * notices that the underlying observer signaled `Stop` (due to * the asynchronous nature of observers, this may happen later and * queued events might get dropped on the floor) * - in general the contract for the underlying Observer is fully * respected (grammar, non-concurrent notifications, etc...) * - when the underlying observer canceled (by returning `Stop`), * or when a concurrent upstream data source triggered an error, * this SHOULD eventually be noticed and acted upon * - as long as the buffer isn't full and the underlying observer * isn't `Stop`, then implementations of this interface SHOULD * not lose events in the process * - the buffer MAY BE either unbounded or bounded, in case of * bounded buffers, then an appropriate overflowStrategy needs to be set for * when the buffer overflows - either an `onError` triggered in the * underlying observer coupled with a `Stop` signaled to the * upstream data sources, or dropping events from the head or the * tail of the queue, or attempting to apply back-pressure, etc... * * See [[monix.reactive.OverflowStrategy OverflowStrategy]] for the buffer * policies available. */ trait BufferedSubscriber[-A] extends Subscriber[A] private[reactive] trait Builders { /** Given an [[OverflowStrategy]] wraps a [[Subscriber]] into a * buffered subscriber. */ def apply[A]( subscriber: Subscriber[A], bufferPolicy: OverflowStrategy[A], producerType: ChannelType.ProducerSide = MultiProducer): Subscriber[A] /** Given an synchronous [[OverflowStrategy overflow strategy]] wraps * a [[Subscriber]] into a buffered subscriber. */ def synchronous[A]( subscriber: Subscriber[A], bufferPolicy: OverflowStrategy.Synchronous[A], producerType: ChannelType.ProducerSide = MultiProducer): Subscriber.Sync[A] /** Builds a batched buffered subscriber. * * A batched buffered subscriber buffers incoming events while * the `underlying` is busy and then sends a whole sequence at once. * * The underlying buffer size will be able to support at least * `maxSize` items. When the `maxSize` is reached, the subscriber * will back-pressure the source. * * So a batched buffered subscriber is implicitly delivering * the back-pressure overflow strategy. */ def batched[A]( underlying: Subscriber[List[A]], bufferSize: Int, producerType: ChannelType.ProducerSide = MultiProducer): Subscriber[A] } object BufferedSubscriber extends Builders with BuildersImpl
alexandru/monifu
monix-reactive/shared/src/main/scala/monix/reactive/observers/BufferedSubscriber.scala
Scala
apache-2.0
4,559
package uk.vitalcode.dateparser.token import java.text.DateFormatSymbols import java.time.DayOfWeek import java.util.Locale import scala.util.Try final case class WeekDay(value: DayOfWeek, index: Int = 0) extends DateToken object WeekDay extends TokenCompanion[WeekDay] { private val weekdays: Seq[String] = new DateFormatSymbols(Locale.UK).getWeekdays.map(w => w.toLowerCase) private val shortWeekdays: Seq[String] = new DateFormatSymbols(Locale.UK).getShortWeekdays.map(w => w.toLowerCase) private def getDayOfWeek(index: Int): DayOfWeek = DayOfWeek.of(if (index > 1) index - 1 else 7) override def of(token: String, index: Int): Try[WeekDay] = Try { val tokenLowCase = token.toLowerCase val weekdaysIndex = weekdays.indexOf(tokenLowCase) val shortWeekdaysIndex = shortWeekdays.indexOf(tokenLowCase) if (weekdaysIndex != -1) WeekDay(getDayOfWeek(weekdaysIndex), index) else if (shortWeekdaysIndex != -1) WeekDay(getDayOfWeek(shortWeekdaysIndex), index) else throw new Exception(s"Error while parsing [$token] as a WeekDay token") } }
vitalcode/date-time-range-parser
src/main/scala/uk/vitalcode/dateparser/token/WeekDay.scala
Scala
mit
1,079
object Solution extends App { def f[A](xs: List[A]) = xs.foldLeft(0)((i,_) => i + 1) println(f(io.Source.stdin.getLines.toList.map(_.trim).map(_.toInt))) }
itsbruce/hackerrank
func/intro/length.scala
Scala
unlicense
160
package com.themillhousegroup.sausagefactory import org.specs2.mutable.Specification import java.lang.{ IllegalArgumentException, UnsupportedOperationException } import scala.Predef._ import com.themillhousegroup.sausagefactory.test.CaseClassSpecification import com.themillhousegroup.sausagefactory.test.CaseClassFixtures._ class ReadIntoFlatCaseClassSpec extends Specification with CaseClassSpecification { case class CannotCreate(first: String, second: String) "Reading maps into case classes - flat structures -" should { "Reject a case class that won't be instantiable" in new CaseClassScope( buildMap("a", "b")) { readInto[CannotCreate] must beAFailedTry[CannotCreate].withThrowable[UnsupportedOperationException] } "Support single-level mapping of simple strings" in new CaseClassScope( buildMap("foo", "bar", "baz")) { val readResult = readIntoResult[AllStrings] readResult must not beNull readResult.first must beEqualTo("foo") readResult.second must beEqualTo("bar") readResult.third must beEqualTo("baz") } "Return a failed Try: IllegalArgumentException if a field is missing" in new CaseClassScope( buildMap("foo", "bar")) { readInto[AllStrings] must beAFailedTry[AllStrings].withThrowable[IllegalArgumentException] } "Support single-level mapping of optional strings - present" in new CaseClassScope( buildMap("foo", "bar", "baz")) { val readResult: OptionalStrings = readIntoResult[OptionalStrings] readResult must not beNull readResult.first must beEqualTo("foo") readResult.second must beEqualTo("bar") readResult.third must beSome("baz") } "Support single-level mapping of optional strings - absent" in new CaseClassScope( buildMap("foo", "bar")) { val readResult: OptionalStrings = readIntoResult[OptionalStrings] readResult must not beNull readResult.first must beEqualTo("foo") readResult.second must beEqualTo("bar") readResult.third must beNone } "Support single-level mapping of mixed types" in new CaseClassScope( buildMap("foo", 6, 9)) { val readResult = readIntoResult[MixedBunch] readResult must not beNull readResult.first must beEqualTo("foo") readResult.second must beSome(6) readResult.third must beEqualTo(9) } "Support single-level mapping where a member is a list of simple types" in new CaseClassScope( buildMap("a", List("x", "y", "z"), 9)) { val readResult = readIntoResult[BasicWithList] readResult must not beNull readResult.first must beEqualTo("a") readResult.second must containTheSameElementsAs(Seq("x", "y", "z")) readResult.third must beEqualTo(9) } "Support single-level mapping where a member is a set of simple types" in new CaseClassScope( buildMap("a", Set("x", "y", "z"), 9)) { val readResult = readIntoResult[BasicWithSet] readResult must not beNull readResult.first must beEqualTo("a") readResult.second must containTheSameElementsAs(Seq("x", "y", "z")) readResult.third must beEqualTo(9) } "Support single-level mapping where a member is a simple map" in new CaseClassScope( buildMap("a", buildMap("x", "y", "z"), 99)) { val readResult = readIntoResult[BasicWithMap] readResult must not beNull readResult.first must beEqualTo("a") readResult.second must havePairs("first" -> "x", "second" -> "y", "third" -> "z") readResult.third must beEqualTo(99) } } }
themillhousegroup/sausagefactory
src/test/scala/com/themillhousegroup/sausagefactory/ReadIntoFlatCaseClassSpec.scala
Scala
mit
3,592
package scala.c.engine class StagingAreaPrimitive extends StandardTest { "bool test" should "print the correct results" in { val code = """ #include <stdbool.h> void main() { bool x = false; printf("%d\\n", x); }""" checkResults(code) } } class RobustPrimitiveTest extends StandardTest { "robust primitive test" should "print the correct results" in { val combo = List('a', 'b', 'e', 'f', 'g', 'h').combinations(2).toList val perms = combo.flatMap{case List(x,y) => List((x,y),(y,x))} val uniques = perms.toSet.toList val code = """ void main() { int x = 0; int a = 43; char b = 65; float c = 0.0f; double d = 0.0; long e = 254345; short f = 3544; unsigned int g = 776; unsigned short h = 345; """ + uniques.map{case (x,y) => s""" $x += $y; $x -= $y; $x *= $y; $x < $y; $x > $y; $x <= $y; $x >= $y; printf("%d\\\\n", $x); """ }.reduce(_ + "\\n" + _) ++ List('a', 'b', 'e', 'f').map{ x => s""" $x = 0; $x = $x + 2; $x = $x - 1; $x >>= 1; $x <<= 1; $x -= 5; $x += 2; printf("%d\\\\n", $x);""" }.reduce(_ + "\\n" + _) ++ List('c', 'd').map{ x => s""" $x = 0; $x = $x + 2.0; $x = $x - 1.0; $x /= 2.0; $x *= 5.0; printf("%f\\\\n", $x);""" }.reduce(_ + "\\n" + _) + "}" checkResults(code) } } class IntegerPromotions extends StandardTest { "Character promotion" should "print the correct results" in { // https://www.tutorialspoint.com/c_standard_library/limits_h.htm val code = """ int main() { char cresult, c1, c2, c3; c1 = 100; c2 = 3; c3 = 4; cresult = c1 * c2 / c3; printf("%d\\n", cresult); return(0); } """ checkResults(code) } } class LimitsTest extends StandardTest { "A limits.h test" should "print the correct results" in { // https://www.tutorialspoint.com/c_standard_library/limits_h.htm val code = """ #include <limits.h> int main() { printf("The number of bits in a byte %d\\n", CHAR_BIT); printf("The minimum value of SIGNED CHAR = %d\\n", SCHAR_MIN); printf("The maximum value of SIGNED CHAR = %d\\n", SCHAR_MAX); printf("The maximum value of UNSIGNED CHAR = %d\\n", UCHAR_MAX); printf("The minimum value of SHORT INT = %d\\n", SHRT_MIN); printf("The maximum value of SHORT INT = %d\\n", SHRT_MAX); printf("The minimum value of INT = %d\\n", INT_MIN); printf("The maximum value of INT = %d\\n", INT_MAX); printf("The minimum value of CHAR = %d\\n", CHAR_MIN); printf("The maximum value of CHAR = %d\\n", CHAR_MAX); printf("The minimum value of LONG = %ld\\n", LONG_MIN); printf("The maximum value of LONG = %ld\\n", LONG_MAX); return(0); } """ checkResults(code) } } class SizeOfTest extends StandardTest { "A sizeof call on an different typed variables" should "print the correct results" in { val code = """ void main() { int x; double y; short z; char b; long c; long long d; float e; printf("%d %d %d %d %d %d %d\\n", sizeof(x), sizeof(y), sizeof(z), sizeof(b), sizeof(c), sizeof(d), sizeof(e)); }""" checkResults(code) } "A variable self reference using sizeof" should "print the correct results" in { val code = """ void main() { int blah = sizeof(blah); int *blah2 = malloc(sizeof(*blah2)); printf("%d %d\\n", blah, sizeof(blah2)); }""" checkResults(code) } "A sizeof call on raw types" should "print the correct results" in { val code = """ void main() { printf("%d %d %d %d %d %d %d %d %d\\n", sizeof(int), sizeof(double), sizeof(short), sizeof(float), sizeof(char), sizeof(long), sizeof(long long), sizeof(void), sizeof(void*)); }""" checkResults(code) } "A sizeof call on an array type" should "print the correct results" in { val code = """ void main() { int x[5]; char y[5]; long long z[5]; printf("%d %d\\n", sizeof(x), sizeof(y), sizeof(z)); }""" checkResults(code) } "A sizeof call on an array element" should "print the correct results" in { val code = """ void main() { int a[5]; char b[5]; long c[5]; short d[5]; float e[5]; double f[5]; long long g[5]; printf("%d %d %d %d %d %d %d\\n", sizeof(a[3]), sizeof(b[3]), sizeof(c[3]), sizeof(d[3]), sizeof(e[3]), sizeof(f[3]), sizeof(g[3])); }""" checkResults(code) } "A sizeof call on a pointer element" should "print the correct results" in { val code = """ void main() { int *a; char *b; long *c; short *d; float *e; double *f; long long *g; printf("%d %d %d %d %d %d %d\\n", sizeof(a[3]), sizeof(b[3]), sizeof(c[3]), sizeof(d[3]), sizeof(e[3]), sizeof(f[3]), sizeof(g[3])); }""" checkResults(code) } "A sizeof call on an array of shorts" should "print the correct results" in { val code = """ void main() { short x[5] = {1,2,3,4,5}; printf("%d\\n", sizeof(x) / sizeof(x[0])); }""" checkResults(code) } "A sizeof call on a field" should "print the correct results" in { val code = """ void main() { struct regex_info { int brackets[100]; }; struct regex_info x; printf("%d\\n", sizeof(x.brackets)); }""" checkResults(code) } "A sizeof call on a pointer to array variable" should "print the correct results" in { val code = """ int main() { int a[][3] = {1, 2, 3, 4, 5, 6}; int (*ptr)[3] = a; printf("%d %d %d", sizeof(a), sizeof(ptr), sizeof(*ptr)); }""" checkResults(code) } "A sizeof on a 2d array variable" should "print the correct results" in { val code = """ int main() { int a[2][3] = {1, 2, 3, 4, 5, 6}; printf("%d\\n", sizeof(a)); printf("%d\\n", sizeof(a[0])); }""" checkResults(code) } "A sizeof call on an uninitialized pointer to array variable 2" should "print the correct results" in { val code = """ int main() { int (*ptr)[3]; printf("%d %d", sizeof(ptr), sizeof(*ptr)); }""" checkResults(code) } "array typedefs" should "print the correct results" in { val code = """ void main() { typedef int Test[10]; Test x; x[1] = 10; printf("%d\\n", x[1]); printf("%d\\n", sizeof(Test)); printf("%d\\n", sizeof(x)); printf("%d\\n", sizeof(&x)); printf("%d\\n", sizeof(&x[1])); printf("%d\\n", sizeof(++x[1])); }""" checkResults(code) } } class RolloverTest extends StandardTest { "char rollover test" should "print the correct results" in { val code = """ void main() { char x = 128; char xplusone = 128 + 1; unsigned char y = 255; unsigned char yplusone = y + 1; int x2 = x; int y2 = y; int x3 = xplusone; int y3 = yplusone; printf("%d %d %d %d\\n", x2, y2, y3, x3); }""" checkResults(code) } "short rollover test" should "print the correct results" in { val code = """ void main() { short x = 32767; short xplusone = 32767 + 1; unsigned short y = 65535; unsigned short yplusone = y + 1; int x2 = x; int y2 = y; int y3 = yplusone; int x3 = xplusone; printf("%d %d %d %d\\n", x2, y2, y3, x3); }""" checkResults(code) } } class PrimitiveTest extends StandardTest { "char test" should "print the correct results" in { val code = """ void main() { char x = 'd'; int y = 16; char z = y; char null = '\\0'; printf("%c %c\\n", x, z); printf("%c\\n", null); printf("%c %c %c\\n", x, z, null); }""" checkResults(code) } "hex test" should "print the correct results" in { val code = """ void main() { int x = 0xFFFFFFFF; int i = 0x5f3759df - ( x >> 1 ); printf("%d %d\\n", x, i); }""" checkResults(code) } "short test" should "print the correct results" in { val code = """ void main() { short x = 32767; printf("%d\\n", x); }""" checkResults(code) } "short overflow test" should "print the correct results" in { val code = """ void main() { short x = 1000000; printf("%d\\n", x); }""" checkResults(code) } "unsigned int prints from hex" should "print the correct results" in { val code = """ const unsigned int prime = 0x01000193; // 16777619 const unsigned int seed = 0x811C9DC5; // 2166136261 void main() { printf("%d %d\\n", prime, seed); return 0; } """ checkResults(code, false) } "unsigned char test" should "print the correct results" in { val code = """ int test(unsigned char oneByte) { return oneByte; } void main() { printf("%d\\n", test(176)); return 0; } """ checkResults(code) } "unsigned char array and clobbering test on unsigned types using unary expressions" should "print the correct results" in { val code = """ void main() { int i = 0; unsigned char *y = calloc(12, 1); unsigned char *x = y; y[2] = 'a'; y[3] = 'b'; y[11] = 'z'; *x = 100; ++*x; ++x; ++*x; ++*x; ++x; *x = 100; ++*x; ++*x; for (i = 0; i < 12; i++) { putchar(*x); x++; } return 0; } """ checkResults(code, false) } "char ptr initialized to string" should "print the correct results" in { val code = """ void main() { char *test = "TestString"; printf("%s\\n", test); return 0; } """ checkResults(code) } "unsigned types as function arguments" should "print the correct results" in { val code = """ int intTest(unsigned int data) { return data; } int shortTest(unsigned short data) { return data; } short shortTest2(unsigned short data) { return data; } void main() { printf("%d %d %d\\n", intTest(4294967241), shortTest(4294967241), shortTest2(38233)); return 0; } """ checkResults(code) } }
bdwashbu/cEngine
tests/scala/c/engine/PrimitiveTest.scala
Scala
apache-2.0
11,363
package dpla.ingestion3.mappers.providers import dpla.ingestion3.mappers.utils.Document import dpla.ingestion3.messages.{IngestMessage, MessageCollector} import dpla.ingestion3.model._ import org.scalatest.{BeforeAndAfter, FlatSpec} import scala.xml.NodeSeq class P2PMappingTest extends FlatSpec with BeforeAndAfter { implicit val msgCollector: MessageCollector[IngestMessage] = new MessageCollector[IngestMessage] val mapping = new P2PMapping "A P2PMapping" should "have the correct provider name" in { assert(mapping.getProviderName === Some("p2p")) } it should "get the correct original ID" in { val result = mapping.originalId( header( <identifier> oai:plains2peaks:Pine_River_2019-01:oai:prlibrary.cvlcollections.org:54 </identifier> ) ) assert(result === Some("oai:plains2peaks:Pine_River_2019-01:oai:prlibrary.cvlcollections.org:54")) } it should "create the correct DPLA URI" in { val result = mapping.dplaUri( header( <identifier> oai:plains2peaks:Pine_River_2019-01:oai:prlibrary.cvlcollections.org:54 </identifier> ) ) val expected = Some(URI("http://dp.la/api/items/9314d4b80e857cbc478d9c7d281fd14e")) assert(result === expected) } it should "return the correct data provider" in { val result = mapping.dataProvider( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <note type="ownership">Foo</note> </mods:mods> ) ).headOption.getOrElse(EdmAgent()).name assert(result === Some("Foo")) } it should "return the correct intermediate provider" in { val result = mapping.intermediateProvider( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:note type="admin">Foo</mods:note> </mods:mods> ) ).headOption.getOrElse(EdmAgent()).name assert(result === Some("Foo")) } it should "return the correct edmRights URI" in { val result = mapping.edmRights( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:accessCondition type="use and reproduction"> http://rightsstatements.org/vocab/CNE/1.0/ </mods:accessCondition> </mods:mods> ) ).headOption.getOrElse(EdmWebResource(uri = URI(""))) assert(result === URI("http://rightsstatements.org/vocab/CNE/1.0/")) } it should "return the correct isShownAt" in { val result = mapping.isShownAt( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:location> <mods:url usage="primary display">http://digital.denverlibrary.org/utils/getthumbnail/collection/p15330coll22/id/75547</mods:url> </mods:location> </mods:mods> ) ).headOption.getOrElse(EdmWebResource(uri = URI(""))).uri assert(result === URI("http://digital.denverlibrary.org/utils/getthumbnail/collection/p15330coll22/id/75547")) } it should "return the originalRecord" in { val result = mapping.originalRecord(metadata(Seq())) assert(result.contains("<record")) } it should "return the preview" in { val result = mapping.preview( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:location> <mods:url access="preview">http://cdm16079.contentdm.oclc.org/cdm/ref/collection/p15330coll22/id/75547</mods:url> </mods:location> </mods:mods> ) ).headOption.getOrElse(EdmWebResource(uri = URI(""))).uri assert(result === URI("http://cdm16079.contentdm.oclc.org/cdm/ref/collection/p15330coll22/id/75547")) } it should "return the provider" in { val result = mapping.provider(metadata(Seq())) assert(result === EdmAgent( name = Some("Plains to Peaks Collective"), uri = Some(URI("http://dp.la/api/contributor/p2p")) ) ) } it should "extract a contributor" in { val result = mapping.contributor( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:name> <mods:namePart>Rinehart, A. E. (Alfred Evans)</mods:namePart> <mods:role> <mods:roleTerm type="text">contributor</mods:roleTerm> </mods:role> </mods:name> </mods:mods> ) ).headOption.getOrElse(EdmAgent()).name assert(result === Some("Rinehart, A. E. (Alfred Evans)")) } it should "extract a creator" in { val result = mapping.creator( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:name> <mods:namePart>Rinehart, A. E. (Alfred Evans)</mods:namePart> <mods:role> <mods:roleTerm type="text">creator</mods:roleTerm> </mods:role> </mods:name> </mods:mods> ) ).headOption.getOrElse(EdmAgent()).name assert(result === Some("Rinehart, A. E. (Alfred Evans)")) } it should "extract dates" in { val result = mapping.date( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:originInfo> <mods:dateCreated keyDate="yes">[1890-1900?]</mods:dateCreated> </mods:originInfo> </mods:mods> ) ).headOption.getOrElse(EdmTimeSpan()).originalSourceDate assert(result === Some("[1890-1900?]")) } it should "extract descriptions" in { val result = mapping.description( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:abstract>Studio portrait of a boy dressed in a tailored wool pinstripe suit with a jacket, cut out at the waist, and a skirt. The jacket has decorative braided cord frogs. He wears stockings and high leather shoes with buttons. He holds a cane with a carved handle and leans on a cement chair or bench.</mods:abstract> </mods:mods> ) ).headOption.getOrElse("") assert(result === "Studio portrait of a boy dressed in a tailored wool pinstripe suit with a jacket, cut out at the waist, and a skirt. The jacket has decorative braided cord frogs. He wears stockings and high leather shoes with buttons. He holds a cane with a carved handle and leans on a cement chair or bench.") } it should "extract extent" in { val result = mapping.extent( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:physicalDescription> <mods:extent>1 photographic print on card mount : albumen ; 21 x 10 cm. (8 1/2 x 4 in.)</mods:extent> </mods:physicalDescription> </mods:mods> ) ).headOption.getOrElse("") assert(result === "1 photographic print on card mount : albumen ; 21 x 10 cm. (8 1/2 x 4 in.)") } it should "extract identifier" in { val result = mapping.identifier( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:recordInfo> <mods:recordIdentifier>oai:prlibrary.cvlcollections.org:69</mods:recordIdentifier> </mods:recordInfo> </mods:mods> ) ).headOption.getOrElse("") assert(result === "oai:prlibrary.cvlcollections.org:69") } it should "extract language" in { val result = mapping.language( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:language> <mods:languageTerm>English</mods:languageTerm> </mods:language> </mods:mods> ) ).headOption.getOrElse(SkosConcept()).providedLabel assert(result === Some("English")) } it should "extract subject" in { val result = mapping.subject( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:subject> <mods:topic>Clothing &amp; dress--19th century</mods:topic> </mods:subject> <mods:subject> <mods:name>Milton Bradley</mods:name> </mods:subject> <mods:subject> <mods:genre>Funky outfits</mods:genre> </mods:subject> </mods:mods> ) ).map(_.providedLabel.getOrElse("")) assert(result.contains("Clothing & dress--19th century")) assert(result.contains("Milton Bradley")) assert(result.contains("Funky outfits")) } it should "extract title" in { val result = mapping.title( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:titleInfo type="alternative"> <mods:title>Alt Title</mods:title> </mods:titleInfo> <mods:titleInfo> <mods:title>The English Paitent</mods:title> </mods:titleInfo> </mods:mods> ) ) assert(result === Seq("The English Paitent")) } it should "extract alternate title" in { val result = mapping.alternateTitle( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:titleInfo type="alternative"> <mods:title>Alt Title</mods:title> </mods:titleInfo> <mods:titleInfo> <mods:title>The English Paitent</mods:title> </mods:titleInfo> </mods:mods> ) ) assert(result === Seq("Alt Title")) } it should "extract type" in { val result = mapping.`type`( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:typeOfResource>Image</mods:typeOfResource> </mods:mods> ) ).headOption.getOrElse("") assert(result === "Image") } it should "extract type and split on ;" in { val result = mapping.`type`( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:typeOfResource>Image; Sound</mods:typeOfResource> </mods:mods> ) ) assert(result === Seq("Image", "Sound")) } it should "extract publisher" in { val result = mapping.publisher( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:originInfo> <mods:publisher>The New York Times</mods:publisher> </mods:originInfo> </mods:mods> ) ).headOption.getOrElse(EdmAgent()).name.getOrElse("") assert(result === "The New York Times") } it should "extract format" in { val result = mapping.format( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:physicalDescription> <mods:form>Tubular Bells</mods:form> </mods:physicalDescription> </mods:mods> ) ).headOption.getOrElse("") assert(result === "Tubular Bells") } it should "extract place" in { val result = mapping.place( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:subject> <mods:geographic>Bag End</mods:geographic> </mods:subject> </mods:mods> ) ).headOption.getOrElse(DplaPlace()).name.getOrElse("") assert(result === "Bag End") } it should "extract relation" in { val result = mapping.relation( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:relatedItem type="series"> <mods:titleInfo> <mods:title>Game of Thrones</mods:title> </mods:titleInfo> </mods:relatedItem> </mods:mods> ) ).headOption.getOrElse(Left("")) assert(result === Left("Game of Thrones")) } it should "extract collection" in { val result = mapping.collection( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:relatedItem type="host"> <mods:titleInfo> <mods:title>HBO Videos</mods:title> </mods:titleInfo> </mods:relatedItem> </mods:mods> ) ).headOption.getOrElse(DcmiTypeCollection()).title.getOrElse("") assert(result === "HBO Videos") } it should "extract media master" in { val result = mapping.mediaMaster( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:location> <mods:url access="raw object">http://full.frame/1</mods:url> </mods:location> </mods:mods> ) ) val expected = Seq("http://full.frame/1").map(stringOnlyWebResource) assert(result === expected) } it should "extract iiif manifest" in { val result = mapping.iiifManifest( metadata( <mods:mods xmlns:mods="http://www.loc.gov/mods/v3"> <mods:location> <mods:url note="iiif-manifest">http://iiif-manifest//1</mods:url> </mods:location> </mods:mods> ) ) val expected = Seq("http://iiif-manifest//1").map(URI) assert(result === expected) } def metadata(metadata: NodeSeq) = record(Seq(), metadata) def header(header: NodeSeq) = record(header, Seq()) def record(header: NodeSeq, metadata: NodeSeq): Document[NodeSeq] = Document( <record xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.openarchives.org/OAI/2.0/"> <header> {header} </header> <metadata> {metadata} </metadata> </record> ) }
dpla/ingestion3
src/test/scala/dpla/ingestion3/mappers/providers/P2PMappingTest.scala
Scala
mit
13,196
/** * Copyright (C) 2010 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.processor.pdf import java.io.{ByteArrayOutputStream, OutputStream} import java.net.URI import java.net.URLDecoder.{decode ⇒ decodeURL} import java.util.{List ⇒ JList} import com.lowagie.text.pdf._ import com.lowagie.text.{Image, Rectangle} import org.orbeon.dom.Element import org.orbeon.dom.saxon.DocumentWrapper import org.orbeon.exception.OrbeonFormatter import org.orbeon.io.UriScheme import org.orbeon.oxf.http.HttpMethod.GET import org.orbeon.oxf.pipeline.api.{FunctionLibrary, PipelineContext} import org.orbeon.oxf.processor.generator.URLGeneratorBase import org.orbeon.oxf.processor.pdf.PDFTemplateProcessor._ import org.orbeon.oxf.processor.serializer.legacy.HttpBinarySerializer import org.orbeon.oxf.processor.serializer.{BinaryTextXMLReceiver, HttpSerializerBase} import org.orbeon.oxf.processor.{ProcessorImpl, ProcessorInput, ProcessorInputOutputInfo} import org.orbeon.oxf.resources.URLFactory import org.orbeon.oxf.util.IOUtils._ import org.orbeon.oxf.util.StringUtils._ import org.orbeon.oxf.util._ import org.orbeon.oxf.xml.NamespaceMapping import org.orbeon.oxf.xml.dom4j.{Dom4jUtils, LocationData} import org.orbeon.saxon.om.{Item, NodeInfo, ValueRepresentation} import org.orbeon.saxon.value.{FloatValue, Int64Value} import scala.collection.JavaConverters._ import scala.util.control.NonFatal /** * The PDF Template processor reads a PDF template and performs textual annotations on it. */ class PDFTemplateProcessor extends HttpBinarySerializer with Logging {// TODO: HttpBinarySerializer is deprecated addInputInfo(new ProcessorInputOutputInfo("model", PDFTemplateModelNamespaceURI)) addInputInfo(new ProcessorInputOutputInfo("data")) protected def getDefaultContentType = "application/pdf" protected def readInput( pipelineContext : PipelineContext, input : ProcessorInput, config : HttpSerializerBase.Config, outputStream : OutputStream ): Unit = { val configDocument = readCacheInputAsDOM4J(pipelineContext, "model")// TODO: should we use "config"? val configRoot = configDocument.getRootElement val templateRoot = configRoot.element("template") val instanceDocument = readInputAsOrbeonDom(pipelineContext, input) val instanceDocumentInfo = new DocumentWrapper(instanceDocument, null, XPath.GlobalConfiguration) // Create PDF reader val templateReader = { val templateHref = templateRoot.attributeValue("href") Option(ProcessorImpl.getProcessorInputSchemeInputName(templateHref)) match { case Some(inputName) ⇒ val os = new ByteArrayOutputStream readInputAsSAX(pipelineContext, inputName, new BinaryTextXMLReceiver(os)) new PdfReader(os.toByteArray) case None ⇒ new PdfReader(URLFactory.createURL(templateHref)) } } useAndClose(new PdfStamper(templateReader, outputStream)) { stamper ⇒ stamper.setFormFlattening(true) // Initial context val initialContext = ElementContext( pipelineContext = pipelineContext, logger = new IndentedLogger(Logger), contentByte = null, acroFields = stamper.getAcroFields, pageWidth = 0, pageHeight = 0, pageNumber = -1, variables = Map(), element = configRoot, contextSeq = Seq(instanceDocumentInfo), contextPosition = 1, offsetX = 0, offsetY = 0, fontFamily = "Courier", fontSize = 14, fontPitch = 15.9f ) // Add substitution fonts for Acrobat fields for (element ← configRoot.elements("substitution-font").asScala) { val fontFamilyOrPath = decodeURL(element.attributeValue("font-family"), "utf-8") val embed = element.attributeValue("embed") == "true" try initialContext.acroFields.addSubstitutionFont(createFont(fontFamilyOrPath, embed)) catch { case NonFatal(t) ⇒ warn("could not load font", Seq( "font-family" → fontFamilyOrPath, "embed" → embed.toString, "throwable" → OrbeonFormatter.format(t)))(initialContext.logger) } } // Iterate through template pages for (pageNumber ← 1 to templateReader.getNumberOfPages) { val pageSize = templateReader.getPageSize(pageNumber) val variables = Map[String, ValueRepresentation]( "page-count" → new Int64Value(templateReader.getNumberOfPages), "page-number" → new Int64Value(pageNumber), "page-width" → new FloatValue(pageSize.getWidth), "page-height" → new FloatValue(pageSize.getHeight) ) // Context for the page val pageContext = initialContext.copy( contentByte = stamper.getOverContent(pageNumber), pageWidth = pageSize.getWidth, pageHeight = pageSize.getHeight, pageNumber = pageNumber, variables = variables ) handleElements(pageContext, configRoot.elements.asScala) // Handle preview grid (NOTE: This can be heavy in memory) if (templateRoot.attributeValue("show-grid") == "true") stampGrid(pageContext) } // no document.close() ? } } // How to handle known elements val Handlers = Map[String, ElementContext ⇒ Unit]( "group" → handleGroup, "repeat" → handleRepeat, "field" → handleField, "barcode" → handleBarcode, "image" → handleImage ) def handleElements(context: ElementContext, statements: Seq[Element]): Unit = // Iterate through statements for (element ← statements) { // Context for this element val newContext = context.copy(element = element) // Check whether this statement applies to the current page def hasPageNumber = newContext.att("page") ne null def pageNumberMatches = Option(newContext.att("page")) exists (_.toInt == newContext.pageNumber) if (! hasPageNumber || pageNumberMatches) Handlers.get(element.getName) foreach (_.apply(newContext)) } def handleGroup(context: ElementContext): Unit = { val xpathContext = Option(context.att("ref")) match { case Some(ref) ⇒ Option(context.evaluateSingle(ref).asInstanceOf[Item]) map (ref ⇒ (Seq(ref), 1)) case None ⇒ Some(context.contextSeq, context.contextPosition) } // Handle group only if we have a context xpathContext foreach { case (contextSeq, contextPosition) ⇒ val newGroupContext = context.copy( contextSeq = contextSeq, contextPosition = contextPosition, offsetX = context.resolveFloat("offset-x", context.offsetX, context.offsetX), offsetY = context.resolveFloat("offset-y", context.offsetY, context.offsetY), fontPitch = context.resolveFloat("font-pitch", 0f, context.fontPitch), fontFamily = context.resolveString("font-family", context.fontFamily), fontSize = context.resolveFloat("font-size", 0f, context.fontSize)) handleElements(newGroupContext, newGroupContext.element.elements.asScala) } } def handleRepeat(context: ElementContext): Unit = { val ref = Option(context.att("ref")) getOrElse context.att("nodeset") val iterations = context.evaluate(ref) for (iterationIndex ← 1 to iterations.size) { val offsetIncrementX = context.resolveFloat("offset-x", 0f, 0f) val offsetIncrementY = context.resolveFloat("offset-y", 0f, 0f) val iterationContext = context.copy( contextSeq = iterations, contextPosition = iterationIndex, offsetX = context.offsetX + (iterationIndex - 1) * offsetIncrementX, offsetY = context.offsetY + (iterationIndex - 1) * offsetIncrementY ) handleElements(iterationContext, context.element.elements.asScala) } } private val FieldTypesWithValues = Set( AcroFields.FIELD_TYPE_RADIOBUTTON, AcroFields.FIELD_TYPE_LIST, AcroFields.FIELD_TYPE_COMBO, AcroFields.FIELD_TYPE_CHECKBOX // NOTE: Checkboxes are not linked: each checkbox is its own control. ) def handleField(context: ElementContext): Unit = Option(context.att("acro-field-name")) match { case Some(fieldNameExpr) ⇒ // Acrobat field val fieldName = context.evaluateAsString(fieldNameExpr) if (findFieldPage(context.acroFields, fieldName) contains context.pageNumber) { Option(context.acroFields.getFieldItem(fieldName)) foreach { item ⇒ // Field exists val exportValue = Option(context.att("export-value")) val valueExpr = exportValue orElse Option(context.att("value")) getOrElse context.att("ref") val value = context.evaluateAsString(valueExpr) // NOTE: We can obtain the list of allowed values with: // // context.acroFields.getAppearanceStates(fieldName) // // This also returns (sometimes? always?) an "Off" value. val fieldType = context.acroFields.getFieldType(fieldName) // export-value → set field types with values // value → set field types without values if (exportValue.isDefined == FieldTypesWithValues(fieldType)) context.acroFields.setField(fieldName, value) } } case None ⇒ // Overlay text val leftPosition = context.resolveAVT("left", "left-position") val topPosition = context.resolveAVT("top", "top-position") val size = context.resolveAVT("size") val value = Option(context.att("value")) getOrElse context.att("ref") val fontAttributes = context.getFontAttributes val baseFont = createFont(fontAttributes.fontFamily, fontAttributes.embed) // Write value context.contentByte.beginText() context.contentByte.setFontAndSize(baseFont, fontAttributes.fontSize) val xPosition = leftPosition.toFloat + context.offsetX val yPosition = context.pageHeight - (topPosition.toFloat + context.offsetY) // Get value from instance Option(context.evaluateAsString(value)) foreach { text ⇒ // Iterate over characters and print them val len = math.min(text.length, Option(size) map (_.toInt) getOrElse Integer.MAX_VALUE) for (j ← 0 to len - 1) context.contentByte.showTextAligned( PdfContentByte.ALIGN_CENTER, text.substring(j, j + 1), xPosition + j.toFloat * fontAttributes.fontPitch, yPosition, 0 ) } context.contentByte.endText() } def handleBarcode(context: ElementContext): Unit = { val value = Option(context.att("value")) getOrElse context.att("ref") val barcodeType = Option(context.att("type")) getOrElse "CODE39" val height = Option(context.att("height")) map (_.toFloat) getOrElse 10.0f val xPosition = context.resolveAVT("left").toFloat + context.offsetX val yPosition = context.pageHeight - context.resolveAVT("top").toFloat + context.offsetY val text = context.evaluateAsString(value) val fontAttributes = context.getFontAttributes val baseFont = createFont(fontAttributes.fontFamily, fontAttributes.embed) val barcode = createBarCode(barcodeType) barcode.setCode(text) barcode.setBarHeight(height) barcode.setFont(baseFont) barcode.setSize(fontAttributes.fontSize) val barcodeImage = barcode.createImageWithBarcode(context.contentByte, null, null) barcodeImage.setAbsolutePosition(xPosition, yPosition) context.contentByte.addImage(barcodeImage) } def handleImage(context: ElementContext): Unit = { lazy val image = { val hrefAttribute = context.att("href") Option(ProcessorImpl.getProcessorInputSchemeInputName(hrefAttribute)) match { case Some(inputName) ⇒ val os = new ByteArrayOutputStream readInputAsSAX(context.pipelineContext, inputName, new BinaryTextXMLReceiver(os)) Image.getInstance(os.toByteArray) case None ⇒ val url = new URI(hrefAttribute) val cxr = Connection( method = GET, url = url, credentials = None, content = None, headers = Connection.buildConnectionHeadersCapitalizedIfNeeded( scheme = UriScheme.withName(url.getScheme), hasCredentials = false, customHeaders = URLGeneratorBase.extractHeaders(context.element), headersToForward = Connection.headersToForwardFromProperty, cookiesToForward = Connection.cookiesToForwardFromProperty, getHeader = Connection.getHeaderFromRequest(NetUtils.getExternalContext.getRequest))( logger = context.logger ), loadState = true, logBody = false)( logger = context.logger ).connect( saveState = true ) ConnectionResult.withSuccessConnection(cxr, closeOnSuccess = true) { is ⇒ val tempURLString = NetUtils.inputStreamToAnyURI(is, NetUtils.REQUEST_SCOPE, Logger) // NOTE: iText's Image.getInstance() closes the local URL's InputStream Image.getInstance(URLFactory.createURL(tempURLString)) } } } Option(context.att("acro-field-name")) match { case Some(fieldNameStr) ⇒ // Acrobat field val fieldName = context.evaluateAsString(fieldNameStr) if (findFieldPage(context.acroFields, fieldName) contains context.pageNumber) { Option(context.acroFields.getFieldPositions(fieldName)) foreach { positions ⇒ val rectangle = new Rectangle(positions(1), positions(2), positions(3), positions(4)) image.scaleToFit(rectangle.getWidth, rectangle.getHeight) val yPosition = positions(2) + rectangle.getHeight - image.getScaledHeight image.setAbsolutePosition( positions(1) + (rectangle.getWidth - image.getScaledWidth) / 2, yPosition ) context.contentByte.addImage(image) } } case None ⇒ // By position val xPosition = context.resolveAVT("left").toFloat + context.offsetX val yPosition = context.pageHeight - (context.resolveAVT("top").toFloat + context.offsetY) image.setAbsolutePosition(xPosition, yPosition) Option(context.resolveAVT("scale-percent")) foreach (scalePercent ⇒ image.scalePercent(scalePercent.toFloat)) Option(context.resolveAVT("dpi")) foreach { dpi ⇒ val dpiInt = dpi.toInt image.setDpi(dpiInt, dpiInt) } context.contentByte.addImage(image) } } def stampGrid(context: ElementContext): Unit = { val topPosition = 10f val baseFont = createFont("Courier", embed = false) val contentByte = context.contentByte val width = context.pageWidth val height = context.pageHeight contentByte.beginText() // 20-pixel lines and side legends contentByte.setFontAndSize(baseFont, 7f) for (w ← 0f to (width, 20f)) for (h ← 0f to (height, 2f)) contentByte.showTextAligned(PdfContentByte.ALIGN_CENTER, ".", w, height - h, 0) for (h ← 0f to (height, 20f)) for (w ← 0f to (width, 2f)) contentByte.showTextAligned(PdfContentByte.ALIGN_CENTER, ".", w, height - h, 0) for (w ← 0f to (width, 20f)) { contentByte.showTextAligned(PdfContentByte.ALIGN_CENTER, w.toString, w, height - topPosition, 0) contentByte.showTextAligned(PdfContentByte.ALIGN_CENTER, w.toString, w, topPosition, 0) } for (h ← 0f to (height, 20f)) { contentByte.showTextAligned(PdfContentByte.ALIGN_CENTER, h.toString, 5f, height - h, 0) contentByte.showTextAligned(PdfContentByte.ALIGN_CENTER, h.toString, width - 5f, height - h, 0) } // 10-pixel lines contentByte.setFontAndSize(baseFont, 3f) for (w ← 10f to (width, 10f)) for (h ← 0f to (height, 2f)) contentByte.showTextAligned(PdfContentByte.ALIGN_CENTER, ".", w, height - h, 0) for (h ← 10f to (height, 10f)) for (w ← 0f to (width, 2f)) contentByte.showTextAligned(PdfContentByte.ALIGN_CENTER, ".", w, height - h, 0) contentByte.endText() } } object PDFTemplateProcessor { val Logger = LoggerFactory.createLogger(classOf[PDFTemplateProcessor]) val PDFTemplateModelNamespaceURI = "http://www.orbeon.com/oxf/pdf-template/model" def createBarCode(barcodeType: String) = barcodeType match { case "CODE39" ⇒ new Barcode39 case "CODE128" ⇒ new Barcode128 case "EAN" ⇒ new BarcodeEAN case _ ⇒ new Barcode39 } case class FontAttributes(fontPitch: Float, fontFamily: String, fontSize: Float, embed: Boolean) case class ElementContext( pipelineContext : PipelineContext, logger : IndentedLogger, contentByte : PdfContentByte, acroFields : AcroFields, pageWidth : Float, pageHeight : Float, pageNumber : Int, variables : Map[String, ValueRepresentation], element : Element, contextSeq : Seq[Item], contextPosition : Int, offsetX : Float, offsetY : Float, fontFamily : String, fontSize : Float, fontPitch : Float ) { private def contextItem = contextSeq(contextPosition - 1) private def jVariables = variables.asJava private def functionLibrary = FunctionLibrary.instance def att(name: String) = element.attributeValue(name) def resolveFloat(name: String, offset: Float, default: Float) = Option(resolveAVT(name)) map (offset + _.toFloat) getOrElse default def resolveString(name: String, current: String) = Option(resolveAVT(name)) map identity getOrElse current def evaluateSingle(xpath: String): NodeInfo = XPathCache.evaluateSingle( contextSeq.asJava, contextPosition, xpath, NamespaceMapping(Dom4jUtils.getNamespaceContextNoDefault(element)), jVariables, functionLibrary, null, null, element.getData.asInstanceOf[LocationData], null ).asInstanceOf[NodeInfo] def evaluate(xpath: String): Seq[Item] = XPathCache.evaluate( contextSeq.asJava, contextPosition, xpath, NamespaceMapping(Dom4jUtils.getNamespaceContextNoDefault(element)), jVariables, functionLibrary, null, null, element.getData.asInstanceOf[LocationData], null ).asInstanceOf[JList[Item]].asScala def evaluateAsString(xpath: String): String = XPathCache.evaluateAsString( contextSeq.asJava, contextPosition, xpath, NamespaceMapping(Dom4jUtils.getNamespaceContextNoDefault(element)), jVariables, functionLibrary, null, null, element.getData.asInstanceOf[LocationData], null ) def resolveAVT(attributeName: String, otherAttributeName: String = null) = Option(att(attributeName)) orElse Option(Option(otherAttributeName) map att orNull) map ( XPathCache.evaluateAsAvt( contextItem, _, NamespaceMapping(Dom4jUtils.getNamespaceContextNoDefault(element)), jVariables, functionLibrary, null, null, element.getData.asInstanceOf[LocationData], null ) ) orNull def getFontAttributes = { val newFontPitch = Option(resolveAVT("font-pitch", "spacing")) map (_.toFloat) getOrElse fontPitch val newFontFamily = Option(resolveAVT("font-family")) getOrElse fontFamily val newFontSize = Option(resolveAVT("font-size")) map (_.toFloat) getOrElse fontSize FontAttributes(newFontPitch, newFontFamily, newFontSize, att("embed") == "true") } } // Create a font def createFont(fontFamilyOrPath: String, embed: Boolean) = BaseFont.createFont(fontFamilyOrPath, findFontEncoding(fontFamilyOrPath), embed) // PDF built-in fonts val BuiltinFonts = Set( "Courier", "Courier-Bold", "Courier-Oblique", "Courier-BoldOblique", "Helvetica", "Helvetica-Bold", "Helvetica-Oblique", "Helvetica-BoldOblique", "Symbol", "Times-Roman", "Times-Bold", "Times-Italic", "Times-BoldItalic", "ZapfDingbats" ) // Find an encoding suitable for the given font family def findFontEncoding(fontFamilyName: String) = { // The reason we do this is that specifying Identity-H or Identity-V with a Type1 font always fails as iText // tries to find an actual character encoding based on the value passed. For other font types, Identity-H and // Identity-V are handled. def isType1Font(name: String) = BuiltinFonts(name) || (name.splitTo(""".""").lastOption map (_.toLowerCase) exists Set("afm", "pfm")) if (isType1Font(fontFamilyName)) BaseFont.CP1252 else BaseFont.IDENTITY_H } def findFieldPage(acroFields: AcroFields, fieldName: String): Option[Int] = for { item ← Option(acroFields.getFieldItem(fieldName)) if item.size > 0 page = item.getPage(0).intValue } yield page }
brunobuzzi/orbeon-forms
src/main/scala/org/orbeon/oxf/processor/pdf/PDFTemplateProcessor.scala
Scala
lgpl-2.1
22,644
/** * SparklineData, Inc. -- http://www.sparklinedata.com/ * * Scala based Audience Behavior APIs * * Copyright 2014-2015 SparklineData, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sparkline.etl.operators import org.apache.spark.sql.DataFrame import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.{JoinType, Inner} import org.apache.spark.sql.catalyst.plans.logical.{Subquery, Join} /** * Created by Jitender on 7/29/15. */ object PerformJoin { def dataFrame(input1 : DataFrame, input2: DataFrame, joinOnColumns : Tuple2[String, String], operator : String = "eq", joinType : JoinType = Inner, removeMappedCols : Boolean = false) : DataFrame = { val leftColExpr = UnresolvedAttribute("i1." + joinOnColumns._1) val rightColExpr = UnresolvedAttribute("i2." + joinOnColumns._2) val joinExpr : Expression = operator match { case "eq" => EqualTo(leftColExpr, rightColExpr) case "lt" => LessThan(leftColExpr, rightColExpr) case "lte" => LessThanOrEqual(leftColExpr, rightColExpr) case "gt" => GreaterThan(leftColExpr, rightColExpr) case "gte" => GreaterThanOrEqual(leftColExpr, rightColExpr) } val plan = Join( Subquery("i1", input1.queryExecution.logical), Subquery("i2", input2.queryExecution.logical), joinType, Some(joinExpr) ) new DataFrame(input2.sqlContext, plan) } def dataFrameEx(input1 : DataFrame, alias1 : String = "i1", input2: DataFrame, alias2 : String = "i2", joinExpr : Expression, joinType : JoinType = Inner ) : DataFrame = { val plan = Join( Subquery(alias1, input1.queryExecution.logical), Subquery(alias2, input2.queryExecution.logical), joinType, Some(joinExpr) ) new DataFrame(input2.sqlContext, plan) } }
cubefyre/audience-behavior-semantic-etl
etl/src/main/scala/org/sparkline/etl/operators/PerformJoin.scala
Scala
apache-2.0
2,610
package cook.actor import cook.actor.TargetStatus import cook.actor.TaskType import cook.error.CookException trait ConsoleOutputter { def printError(e: CookException) def printUnknownError(e: Throwable) def update(targetStatus: TargetStatus, taskInfo: Set[(TaskType.Value, String)]) def stopStatusUpdate // NOTE(timgreen): return value Int is used to mark this request blocking. def blockToFinish: Int }
timgreen/cook
src/cook/actor/ConsoleOutputter.scala
Scala
apache-2.0
419
/* * Copyright (c) 2014-2018 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.tail import cats.laws._ import cats.laws.discipline._ import monix.eval.{Coeval, Task} import monix.execution.cancelables.BooleanCancelable import monix.execution.exceptions.DummyException import monix.execution.internal.Platform.recommendedBatchSize import monix.tail.batches.{Batch, BatchCursor} import scala.util.Failure object IterantMapBatchSuite extends BaseTestSuite { test("Iterant[Task].mapBatch(f) equivalence with List.flatMap(f andThen (_.toList))") { implicit s => check2 { (stream: Iterant[Task, Array[Int]], f: Array[Int] => Long) => val g = f andThen (Batch.apply(_)) stream.mapBatch(g).toListL <-> stream.toListL.map(_.flatMap(g andThen (_.toList))) } } test("Iterant[Task].mapBatch works for functions producing batches bigger than recommendedBatchSize") { implicit s => check2 { (list: List[Int], elem: Int) => val stream = Iterant[Task].nextBatchS(Batch.fromSeq(list, recommendedBatchSize), Task.delay(Iterant[Task].lastS[Int](elem))) val f: Int => List[Int] = List.fill(recommendedBatchSize * 2)(_) val received = stream.mapBatch(f andThen (Batch.fromSeq(_))).toListL val expected = stream.toListL.map(_.flatMap(f)) received <-> expected } } test("Iterant[Task].mapBatch can handle errors") { implicit s => val dummy = DummyException("dummy") val stream = Iterant[Task].raiseError[Int](dummy) assertEquals(stream, stream.mapBatch(Batch.apply(_))) } test("Iterant[Task].next.mapBatch guards against direct user code errors") { implicit s => val dummy = DummyException("dummy") var isCanceled = false val stream = Iterant[Task].nextS(1, Task.evalAsync(Iterant[Task].empty[Int])).guarantee(Task.evalAsync { isCanceled = true }) val result = stream.mapBatch[Int](_ => throw dummy).toListL.runToFuture s.tick() assertEquals(result.value, Some(Failure(dummy))) assert(isCanceled, "isCanceled should be true") } test("Iterant[Task].nextCursor.mapBatch guards against direct user code errors") { implicit s => val dummy = DummyException("dummy") var isCanceled = false val stream = Iterant[Task].nextCursorS(BatchCursor(1, 2, 3), Task.evalAsync(Iterant[Task].empty[Int])) .guarantee(Task.evalAsync { isCanceled = true }) val result = stream.mapBatch[Int](_ => throw dummy).toListL.runToFuture s.tick() assertEquals(result.value, Some(Failure(dummy))) assert(isCanceled, "isCanceled should be true") } test("Iterant[Task].mapBatch should protect against direct exceptions") { implicit s => check2 { (l: List[Int], idx: Int) => val dummy = DummyException("dummy") var effect = 0 val list = if (l.isEmpty) List(1) else l val iterant = arbitraryListToIterant[Task, Int](list, idx) val received = (iterant ++ Iterant[Task].of(1, 2)) .guarantee(Task.eval { effect += 1 }) .mapBatch[Int](_ => throw dummy) .completedL.map(_ => 0) .onErrorRecover { case _: DummyException => effect } received <-> Task.pure(1) } } test("Iterant[Task].mapBatch should protect against broken batches") { implicit s => check1 { (prefix: Iterant[Task, Int]) => val dummy = DummyException("dummy") val cursor = new ThrowExceptionCursor(dummy) val error = Iterant[Task].nextCursorS(cursor, Task.now(Iterant[Task].empty[Int])) val stream = (prefix.onErrorIgnore ++ error).mapBatch(Batch.apply(_)) stream <-> prefix.onErrorIgnore ++ Iterant[Task].haltS[Int](Some(dummy)) } } test("Iterant[Task].mapBatch protects against broken cursors") { implicit s => check1 { (iter: Iterant[Task, Int]) => val dummy = DummyException("dummy") val suffix = Iterant[Task].nextCursorS[Int](new ThrowExceptionCursor(dummy), Task.now(Iterant[Task].empty)) val stream = iter.onErrorIgnore ++ suffix val received = stream.mapBatch(Batch.apply(_)) received <-> iter.onErrorIgnore.mapBatch(Batch.apply(_)) ++ Iterant[Task].haltS[Int](Some(dummy)) } } test("Iterant[Task].mapBatch should protect against broken generators") { implicit s => check1 { (prefix: Iterant[Task, Int]) => val dummy = DummyException("dummy") val cursor = new ThrowExceptionBatch(dummy) val error = Iterant[Task].nextBatchS(cursor, Task.now(Iterant[Task].empty[Int])) val stream = (prefix.onErrorIgnore ++ error).mapBatch(Batch.apply(_)) stream <-> prefix.onErrorIgnore ++ Iterant[Task].haltS[Int](Some(dummy)) } } test("Iterant[Task].mapBatch suspends side effects") { implicit s => check1 { stream: Iterant[Task, Int] => stream.mapBatch(Batch.apply(_)) <-> stream.mapBatch(Batch.apply(_)) } } test("Iterant[Coeval].mapBatch works for infinite cursors") { implicit s => check1 { (el: Int) => val stream = Iterant[Coeval].nextCursorS(BatchCursor.continually(el), Coeval.now(Iterant[Coeval].empty[Int])) val received = stream.mapBatch(Batch.apply(_)).take(10).toListL val expected = Coeval(Stream.continually(el).take(10).toList) received <-> expected } } test("Iterant[Coeval].mapBatch triggers guarantee on exception") { _ => check1 { (iter: Iterant[Coeval, Int]) => val cancelable = BooleanCancelable() val dummy = DummyException("dummy") val suffix = Iterant[Coeval].nextCursorS[Int](new ThrowExceptionCursor(dummy), Coeval.now(Iterant[Coeval].empty)) val stream = (iter.onErrorIgnore ++ suffix).guarantee(Coeval.eval(cancelable.cancel())) intercept[DummyException] { stream.mapBatch(Batch.apply(_)).toListL.value() } cancelable.isCanceled } } test("Iterant[Coeval].mapBatch can handle errors") { implicit s => val dummy = DummyException("dummy") val stream = Iterant[Coeval].raiseError[Int](dummy) assertEquals(stream, stream.mapBatch(Batch.apply(_))) } test("Iterant[Coeval].next.mapBatch guards against direct user code errors") { _ => val dummy = DummyException("dummy") var isCanceled = false val stream = Iterant[Coeval].nextS(1, Coeval(Iterant[Coeval].empty[Int])).guarantee(Coeval { isCanceled = true }) val result = stream.mapBatch[Int](_ => throw dummy).toListL.runTry() assertEquals(result, Failure(dummy)) assert(isCanceled, "isCanceled should be true") } test("Iterant[Coeval].nextCursor.mapBatch guards against direct user code errors") { _ => val dummy = DummyException("dummy") var isCanceled = false val stream = Iterant[Coeval].nextCursorS(BatchCursor(1, 2, 3), Coeval(Iterant[Coeval].empty[Int])).guarantee(Coeval { isCanceled = true }) val result = stream.mapBatch[Int](_ => throw dummy).toListL.runTry() assertEquals(result, Failure(dummy)) assert(isCanceled, "isCanceled should be true") } test("Iterant[Coeval].mapBatch should protect against direct exceptions") { implicit s => check2 { (l: List[Int], idx: Int) => val dummy = DummyException("dummy") val list = if (l.isEmpty) List(1) else l val iterant = arbitraryListToIterant[Coeval, Int](list, idx) val received = (iterant ++ Iterant[Coeval].now(1)).mapBatch[Int](_ => throw dummy) received <-> Iterant[Coeval].haltS[Int](Some(dummy)) } } test("Iterant[Coeval].mapBatch should protect against broken batches") { implicit s => check1 { (prefix: Iterant[Coeval, Int]) => val dummy = DummyException("dummy") val cursor: BatchCursor[Int] = new ThrowExceptionCursor(dummy) val error = Iterant[Coeval].nextCursorS(cursor, Coeval.now(Iterant[Coeval].empty[Int])) val stream = (prefix ++ error).mapBatch(Batch.apply(_)) stream <-> prefix ++ Iterant[Coeval].haltS[Int](Some(dummy)) } } test("Iterant[Coeval].mapBatch should protect against broken generators") { implicit s => check1 { (prefix: Iterant[Coeval, Int]) => val dummy = DummyException("dummy") val cursor: Batch[Int] = new ThrowExceptionBatch(dummy) val error = Iterant[Coeval].nextBatchS(cursor, Coeval.now(Iterant[Coeval].empty[Int])) val stream = (prefix ++ error).mapBatch(Batch.apply(_)) stream <-> prefix ++ Iterant[Coeval].haltS[Int](Some(dummy)) } } test("Iterant[Coeval].mapBatch preserves the source guarantee") { implicit s => var effect = 0 val stop = Coeval.eval(effect += 1) val source = Iterant[Coeval].nextCursorS(BatchCursor(1, 2, 3), Coeval.now(Iterant[Coeval].empty[Int])).guarantee(stop) val stream = source.mapBatch(Batch.apply(_)) stream.completedL.value() assertEquals(effect, 1) } }
Wogan/monix
monix-tail/shared/src/test/scala/monix/tail/IterantMapBatchSuite.scala
Scala
apache-2.0
9,362
import sbt._ import Keys._ object BuildSettings { val buildOrganization = "liberty" val buildVersion = "0.0.1" val buildScalaVersion = "2.10.4" val buildSettings = Defaults.defaultSettings ++ Seq ( organization := buildOrganization, version := buildVersion, scalaVersion := buildScalaVersion ) } object Resolvers { val mavenRepo = DefaultMavenRepository val libraryResolvers = Seq (mavenRepo) } object Dependencies { val ivyVersion = "2.4.0" val zincVersion = "0.3.7" val scalatestVersion = "2.2.4" val ivyDependencyManager = "org.apache.ivy" % "ivy" % ivyVersion val zincCompiler = "com.typesafe.zinc" % "zinc" % zincVersion val scalatest = "org.scalatest" % "scalatest_2.10" % scalatestVersion % "test" } object LibertyBuild extends Build { import Resolvers._ import Dependencies._ import BuildSettings._ val coreDeps = Seq ( ivyDependencyManager, zincCompiler, scalatest ) lazy val liberty = Project ( id = "liberty", base = file("."), settings = buildSettings ++ Seq (resolvers := libraryResolvers, libraryDependencies ++= coreDeps) ) }
tmrts/Liberty
project/Build.scala
Scala
mit
1,198
package nl.rabobank.oss.rules.dsl.nl.grammar.meta import nl.rabobank.oss.rules.derivations.Derivation import nl.rabobank.oss.rules.dsl.nl.grammar.{DslCondition, GegevenWord, ListBerekenStart, SingularBerekenStart} import nl.rabobank.oss.rules.facts.{ListFact, SingularFact} import nl.rabobank.oss.rules.utils.FileSourcePosition import scala.annotation.compileTimeOnly import scala.language.experimental.macros import scala.reflect.internal.util.SourceFile import scala.reflect.macros.blackbox._ object DslMacros { @compileTimeOnly("This macro stores the source position of a 'Gegeven' during compile time, no use during runtime") def captureGegevenSourcePositionMacroImpl(c: Context)(condition: c.Expr[DslCondition]): c.Expr[GegevenWord] = { val (filename, line, column, start, length) = extractSourcePosition(c) c.universe.reify { new GegevenWord(condition.splice, FileSourcePosition(filename.splice, line.splice, column.splice, start.splice, length.splice)) } } @compileTimeOnly("This macro stores the source position of a 'Bereken' during compile time, no use during runtime") def captureSingularBerekenSourcePositionMacroImpl[A : c.WeakTypeTag](c: Context)(fact: c.Expr[SingularFact[A]]): c.Expr[SingularBerekenStart[A]] = { import c.universe._ val (filename, line, column, start, length) = extractSourcePosition(c) val conditionExpr: c.Expr[DslCondition] = c.Expr[DslCondition](Select(c.prefix.tree, TermName("condition"))) c.universe.reify { new SingularBerekenStart[A](conditionExpr.splice, fact.splice, List(), FileSourcePosition(filename.splice, line.splice, column.splice, start.splice, length.splice)) } } @compileTimeOnly("This macro stores the source position of a 'Bereken' during compile time, no use during runtime") def captureSingularBerekenSourcePositionWithAccumulatorMacroImpl[A : c.WeakTypeTag](c: Context)(fact: c.Expr[SingularFact[A]]): c.Expr[SingularBerekenStart[A]] = { import c.universe._ val (filename, line, column, start, length) = extractSourcePosition(c) val conditionExpr: c.Expr[DslCondition] = c.Expr[DslCondition](Select(c.prefix.tree, TermName("condition"))) val derivationsExpr: c.Expr[List[Derivation]] = c.Expr[List[Derivation]](Select(c.prefix.tree, TermName("derivations"))) c.universe.reify { new SingularBerekenStart[A](conditionExpr.splice, fact.splice, derivationsExpr.splice, FileSourcePosition(filename.splice, line.splice, column.splice, start.splice, length.splice)) } } @compileTimeOnly("This macro stores the source position of a 'Bereken' during compile time, no use during runtime") def captureListBerekenSourcePositionMacroImpl[A : c.WeakTypeTag](c: Context)(fact: c.Expr[ListFact[A]]): c.Expr[ListBerekenStart[A]] = { import c.universe._ val (filename, line, column, start, length) = extractSourcePosition(c) val conditionExpr: c.Expr[DslCondition] = c.Expr[DslCondition](Select(c.prefix.tree, TermName("condition"))) c.universe.reify { new ListBerekenStart[A](conditionExpr.splice, fact.splice, List(), FileSourcePosition(filename.splice, line.splice, column.splice, start.splice, length.splice)) } } @compileTimeOnly("This macro stores the source position of a 'Bereken' during compile time, no use during runtime") def captureListBerekenSourcePositionWithAccumulatorMacroImpl[A : c.WeakTypeTag](c: Context)(fact: c.Expr[ListFact[A]]): c.Expr[ListBerekenStart[A]] = { import c.universe._ val (filename, line, column, start, length) = extractSourcePosition(c) val conditionExpr: c.Expr[DslCondition] = c.Expr[DslCondition](Select(c.prefix.tree, TermName("condition"))) val derivationsExpr: c.Expr[List[Derivation]] = c.Expr[List[Derivation]](Select(c.prefix.tree, TermName("derivations"))) c.universe.reify { new ListBerekenStart[A](conditionExpr.splice, fact.splice, derivationsExpr.splice, FileSourcePosition(filename.splice, line.splice, column.splice, start.splice, length.splice)) } } def extractSourcePosition(c: Context): (c.Expr[String], c.Expr[Int], c.Expr[Int], c.Expr[Int], c.Expr[Int]) = { import c.universe._ // scalastyle:ignore val Apply(methodName, _) = c.macroApplication val position: c.Position = methodName.pos val source: SourceFile = position.source val line = c.Expr(Literal(Constant(position.line))) val column = c.Expr(Literal(Constant(position.column))) val start = c.Expr(Literal(Constant(position.focus.start))) val length = c.Expr(Literal(Constant(methodName.symbol.name.toString().length))) val filename = c.Expr(Literal(Constant(source.file.name))) (filename, line, column, start, length) } }
scala-rules/scala-rules
engine/src/main/scala/nl/rabobank/oss/rules/dsl/nl/grammar/meta/DslMacros.scala
Scala
mit
4,661
/** * The core trait of this library is [[conduit.GenPipe]] which represents a * single piece of computation. It's implementation is hidden, all operations * on its instances are performed using methods in [[conduit.Pipe$ conduit.Pipe]] object. */ package object conduit { /** * A simplified pipe that doesn't care about the result of its upstream. * * It is usually suffucient for most purposes. */ type Pipe[-I,+O,+R] = GenPipe[Any,I,O,R] /** * A pipe that cannot produce any output, only a final result. */ type Sink[-I,+R] = Pipe[I,Nothing,R] /** * A pipe that cares neither about what input it receives nor the result of * its upstream. */ type Source[+O,+R] = Pipe[Any,O,R] /** * A pipe that ''cannot'' receive any input. The difference over * [[conduit.Source]] is that `Source` can serve as downstream for anything, * while `NoInput` can serve as downstream only for a pipe with no output * ([[conduit.Sink]]). `NoInput` cares about the final result of its * upstream. */ type NoInput[-U,+O,+R] = GenPipe[U,Nothing,O,R] /** * Represents a pipe that feeds a part of its output back as its own input. * This is primarily useful for implementing leftovers, when a component * needs to return a part of its input back. */ type Feedback[-U,I,+O,+R] = GenPipe[U,I,Either[I,O],R] }
ppetr/scala-conduit
src/main/scala/conduit/package.scala
Scala
gpl-3.0
1,382
package org.jetbrains.plugins.scala.lang.resolve import com.intellij.psi.PsiReference import org.junit.Assert._ /** * Created by katejim on 5/26/16. */ class ResolvePackagesWithBacktickeds extends ScalaResolveTestCase { override def folderPath: String = s"${super.folderPath}resolve/packages/backtickeds" override protected def sourceRootPath: String = folderPath private def checkReference(): Unit = { val ref: PsiReference = findReferenceAtCaret() assertTrue(ref.resolve != null) } def testInFileBacktickedPackage(): Unit = checkReference() def testFromJavaPackage(): Unit = checkReference() def testClassInPackageWithJavaKeyword(): Unit = checkReference() def testJavaClass(): Unit = checkReference() def testScalaClass(): Unit = checkReference() def testMethodInBactickedsPackage(): Unit = checkReference() }
JetBrains/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/lang/resolve/ResolvePackagesWithBacktickeds.scala
Scala
apache-2.0
860
/* * sbt * Copyright 2011 - 2018, Lightbend, Inc. * Copyright 2008 - 2010, Mark Harrah * Licensed under Apache License 2.0 (see LICENSE) */ package sbt package internal package scripted import java.io.File import sbt.util.{ Logger, LoggerContext, Level } import sbt.internal.util.{ Appender, ManagedLogger, ConsoleAppender, BufferedAppender } import sbt.io.IO.wrapNull import sbt.io.{ DirectoryFilter, HiddenFileFilter } import sbt.io.syntax._ import sbt.internal.io.Resources import java.util.concurrent.atomic.AtomicInteger object ScriptedRunnerImpl { def run( resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String], handlersProvider: HandlersProvider ): Unit = { val context = LoggerContext(useLog4J = System.getProperty("sbt.log.uselog4j", "false") == "true") val runner = new ScriptedTests(resourceBaseDirectory, bufferLog, handlersProvider) val logger = newLogger(context) val allTests = get(tests, resourceBaseDirectory, logger) flatMap { case ScriptedTest(group, name) => runner.scriptedTest(group, name, logger, context) } runAll(allTests) } def runAll(tests: Seq[() => Option[String]]): Unit = { val errors = for (test <- tests; err <- test()) yield err if (errors.nonEmpty) sys.error(errors.mkString("Failed tests:\n\t", "\n\t", "\n")) } def get(tests: Seq[String], baseDirectory: File, log: ManagedLogger): Seq[ScriptedTest] = if (tests.isEmpty) listTests(baseDirectory, log) else parseTests(tests) def listTests(baseDirectory: File, log: ManagedLogger): Seq[ScriptedTest] = (new ListTests(baseDirectory, _ => true, log)).listTests def parseTests(in: Seq[String]): Seq[ScriptedTest] = for (testString <- in) yield { val Array(group, name) = testString.split("/").map(_.trim) ScriptedTest(group, name) } private[sbt] val generateId: AtomicInteger = new AtomicInteger private[sbt] def newLogger(context: LoggerContext): ManagedLogger = { val loggerName = "scripted-" + generateId.incrementAndGet context.logger(loggerName, None, None) } } final class ScriptedTests( resourceBaseDirectory: File, bufferLog: Boolean, handlersProvider: HandlersProvider, stripQuotes: Boolean ) { def this(resourceBaseDirectory: File, bufferLog: Boolean, handlersProvider: HandlersProvider) = this(resourceBaseDirectory, bufferLog, handlersProvider, true) private val testResources = new Resources(resourceBaseDirectory) private val appender: Appender = ConsoleAppender() val ScriptFilename = "test" val PendingScriptFilename = "pending" def scriptedTest(group: String, name: String, log: xsbti.Logger): Seq[() => Option[String]] = scriptedTest(group, name, Logger.xlog2Log(log)) @deprecated("Use scriptedTest that takes a LoggerContext", "1.4.0") def scriptedTest( group: String, name: String, log: ManagedLogger, ): Seq[() => Option[String]] = scriptedTest(group, name, (_ => ()), log, LoggerContext.globalContext) def scriptedTest( group: String, name: String, log: ManagedLogger, context: LoggerContext ): Seq[() => Option[String]] = scriptedTest(group, name, (_ => ()), log, context) @deprecated("Use scriptedTest that provides LoggerContext", "1.4.0") def scriptedTest( group: String, name: String, prescripted: File => Unit, log: ManagedLogger, ): Seq[() => Option[String]] = scriptedTest(group, name, prescripted, log, LoggerContext.globalContext) def scriptedTest( group: String, name: String, prescripted: File => Unit, log: ManagedLogger, context: LoggerContext, ): Seq[() => Option[String]] = { for (groupDir <- (resourceBaseDirectory * group).get(); nme <- (groupDir * name).get()) yield { val g = groupDir.getName val n = nme.getName val str = s"$g / $n" () => { println("Running " + str) testResources.readWriteResourceDirectory(g, n) { testDirectory => val disabled = new File(testDirectory, "disabled").isFile if (disabled) { log.info("D " + str + " [DISABLED]") None } else { try { scriptedTest(str, testDirectory, prescripted, log, context); None } catch { case _: TestException | _: PendingTestSuccessException => Some(str) } } } } } } private def scriptedTest( label: String, testDirectory: File, prescripted: File => Unit, log: ManagedLogger, context: LoggerContext, ): Unit = { val buffered = BufferedAppender(appender) context.clearAppenders(log.name) context.addAppender(log.name, (buffered -> Level.Debug)) if (bufferLog) { buffered.record() } def createParser() = { // val fileHandler = new FileCommands(testDirectory) // // val sbtHandler = new SbtHandler(testDirectory, launcher, buffered, launchOpts) // new TestScriptParser(Map('$' -> fileHandler, /* '>' -> sbtHandler, */ '#' -> CommentHandler)) val scriptConfig = new ScriptConfig(label, testDirectory, log) new TestScriptParser(handlersProvider getHandlers scriptConfig) } val (file, pending) = { val normal = new File(testDirectory, ScriptFilename) val pending = new File(testDirectory, PendingScriptFilename) if (pending.isFile) (pending, true) else (normal, false) } val pendingString = if (pending) " [PENDING]" else "" def runTest(): Unit = { val run = new ScriptRunner val parser = createParser() run(parser.parse(file, stripQuotes)) } def testFailed(): Unit = { if (pending) buffered.clearBuffer() else buffered.stopBuffer() log.error("x " + label + pendingString) } try { prescripted(testDirectory) runTest() log.info("+ " + label + pendingString) if (pending) throw new PendingTestSuccessException(label) } catch { case e: TestException => testFailed() e.getCause match { case null | _: java.net.SocketException => log.error(" " + e.getMessage) case _ => if (!pending) e.printStackTrace } if (!pending) throw e case e: PendingTestSuccessException => testFailed() log.error(" Mark as passing to remove this failure.") throw e case e: Exception => testFailed() if (!pending) throw e } finally { buffered.clearBuffer() } } } // object ScriptedTests extends ScriptedRunner { // val emptyCallback: File => Unit = { _ => () } // } final case class ScriptedTest(group: String, name: String) { override def toString = group + "/" + name } object ListTests { def list(directory: File, filter: java.io.FileFilter) = wrapNull(directory.listFiles(filter)) } import ListTests._ final class ListTests(baseDirectory: File, accept: ScriptedTest => Boolean, log: Logger) { def filter = DirectoryFilter -- HiddenFileFilter def listTests: Seq[ScriptedTest] = { list(baseDirectory, filter) flatMap { group => val groupName = group.getName listTests(group).map(ScriptedTest(groupName, _)) } } private[this] def listTests(group: File): Seq[String] = { val groupName = group.getName val allTests = list(group, filter).sortBy(_.getName) if (allTests.isEmpty) { log.warn("No tests in test group " + groupName) Seq.empty } else { val (included, skipped) = allTests.toList.partition(test => accept(ScriptedTest(groupName, test.getName))) if (included.isEmpty) log.warn("Test group " + groupName + " skipped.") else if (skipped.nonEmpty) { log.warn("Tests skipped in group " + group.getName + ":") skipped.foreach(testName => log.warn(" " + testName.getName)) } Seq(included.map(_.getName): _*) } } } class PendingTestSuccessException(label: String) extends Exception { override def getMessage: String = s"The pending test $label succeeded. Mark this test as passing to remove this failure." }
xuwei-k/xsbt
internal/util-scripted/src/main/scala/sbt/internal/scripted/ScriptedTests.scala
Scala
apache-2.0
8,183
// See the LICENCE.txt file distributed with this work for additional // information regarding copyright ownership. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package scray.hdfs.io.write class WriteResult( val isClosed: Boolean, val message: String, val bytesInserted: Long) { def this(message: String) { this(false, message, -1L) } def getBytesInserted(): Long = { bytesInserted } }
scray/scray
scray-hdfs/modules/scray-hdfs-writer/src/main/scala/scray/hdfs/io/write/WriteResult.scala
Scala
apache-2.0
927
package org.broadinstitute.dsde.vault.datamanagement.model object Properties { val CreatedBy = "createdBy" val CreatedDate = "createdDate" val ModifiedBy = "modifiedBy" val ModifiedDate = "modifiedDate" }
broadinstitute/vault-datamanagement
src/main/scala/org/broadinstitute/dsde/vault/datamanagement/model/Properties.scala
Scala
bsd-3-clause
213
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.abondar.scalabasic import Element.elem; //case class sealed abstract class Expr case class Var(name:String) extends Expr case class Number(num:Double) extends Expr case class UnOp(operator:String,arg:Expr) extends Expr case class BinOp(operator:String,left:Expr,right:Expr) extends Expr class ExprFormatter { //operators in groups of increaseing precedence private val opGroups = Array( Set("|","||"), Set("&","&&"), Set("^"), Set("==","!="), Set("<","<=",">",">="), Set("+","-"), Set("*","%") ) //mapping from operators to precedence private val precedence = { val assocs = for{ i<-0 until opGroups.length op<-opGroups(i) } yield op->i assocs.toMap } private val unaryPrecendence = opGroups.length private val fractionPrecedence = -1 private def format(e:Expr,enclPrec:Int): Element = e match{ case Var(name)=> elem(name) case Number(num)=> def stripDot(s:String) = if (s endsWith ".0") s.substring(0, s.length -2) else s elem(stripDot(num.toString)) case UnOp(op,arg) => elem(op) beside format(arg,unaryPrecendence) case BinOp("/",left,right) => val top = format(left,fractionPrecedence) val bot = format(right, fractionPrecedence) val line = elem('-',top.width max bot.width,1) val frac = top above line above bot if (enclPrec != fractionPrecedence) frac else elem(" ") beside frac beside elem(" ") case BinOp(op,left,right) => val opPrec = precedence(op) val l = format(left,opPrec) val r = format(right, opPrec) val oper = l beside elem(" "+op+" ") beside r if (enclPrec <= opPrec) oper else elem("(") beside oper beside elem(")") } def format(e:Expr): Element = format(e,0) }
Dr762/ScalaBase
src/main/scala/org/abondar/scalabasic/Expr.scala
Scala
apache-2.0
2,141
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.computations import org.joda.time.LocalDate import uk.gov.hmrc.ct.box.{CtOptionalInteger, CtValidation} package object offPayRollWorking { val opwApplies2020 = new LocalDate("2017-04-05") def isOPWEnabled(apEndDate: LocalDate) = apEndDate.isAfter(opwApplies2020) def DeductionCannotBeGreaterThanProfit(profit: CtOptionalInteger, loss: CtOptionalInteger): Set[CtValidation] ={ (loss.value, profit.value) match { case (Some(lossValue),Some(profitValue)) if lossValue > profitValue => Set(CtValidation(Some(loss.getClass.getSimpleName), s"error.${loss.getClass.getSimpleName}.exceeds.${profit.getClass.getSimpleName}")) case (_ , _) => Set.empty } } }
hmrc/ct-calculations
src/main/scala/uk/gov/hmrc/ct/computations/offPayRollWorking/package.scala
Scala
apache-2.0
1,310
/* * Copyright 2018 Analytics Zoo Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.zoo.pipeline.api.keras.layers import com.intel.analytics.bigdl.nn.keras.{UpSampling1D => BigDLUpSampling1D} import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric import com.intel.analytics.bigdl.utils.Shape import com.intel.analytics.zoo.pipeline.api.Net import scala.reflect.ClassTag /** * UpSampling layer for 1D inputs. * Repeats each temporal step 'length' times along the time axis. * The input of this layer should be 3D. * * When you use this layer as the first layer of a model, you need to provide the argument * inputShape (a Single Shape, does not include the batch dimension). * * @param length Integer. UpSampling factor. Default is 2. * @param inputShape A Single Shape, does not include the batch dimension. * @tparam T The numeric type of parameter(e.g. weight, bias). Only support float/double now. */ class UpSampling1D[T: ClassTag]( override val length: Int = 2, override val inputShape: Shape = null)(implicit ev: TensorNumeric[T]) extends BigDLUpSampling1D[T](length, inputShape) with Net {} object UpSampling1D { def apply[@specialized(Float, Double) T: ClassTag]( length: Int = 2, inputShape: Shape = null)(implicit ev: TensorNumeric[T]): UpSampling1D[T] = { new UpSampling1D[T](length, inputShape) } }
intel-analytics/analytics-zoo
zoo/src/main/scala/com/intel/analytics/zoo/pipeline/api/keras/layers/UpSampling1D.scala
Scala
apache-2.0
1,918
// Copyright 2014 Commonwealth Bank of Australia // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.twitter.scalding package typed import cascading.tap.hadoop.PartitionTap import cascading.tap.local.{ FileTap, PartitionTap => LocalPartitionTap } import cascading.tap.{ SinkMode, Tap } import cascading.tuple.Fields /** * Trait to assist with creating partitioned sources. * * Apart from the abstract members below, `hdfsScheme` and `localScheme` also need to be set. * Note that for both of them the sink fields need to be set to only include the actual fields * that should be written to file and not the partition fields. */ trait PartitionSchemed[P, T] extends SchemedSource with TypedSink[(P, T)] with Mappable[(P, T)] with HfsTapProvider { def path: String def template: String def valueSetter: TupleSetter[T] def valueConverter: TupleConverter[T] def partitionSetter: TupleSetter[P] def partitionConverter: TupleConverter[P] def fields: Fields // The partition fields, offset by the value arity. def partitionFields = PartitionUtil.toFields(valueSetter.arity, valueSetter.arity + partitionSetter.arity) /* Advertise all the sinkFields, both the value and partition ones, this needs to be like this even though it is the incorrect sink fields, otherwise scalding validation falls over. The sink fields of the scheme itself then to be over written to only include the actual sink fields. */ override def sinkFields: Fields = fields.append(partitionFields) /** * Combine both the partition and value converter to extract the data from a flat cascading tuple * into a pair of `P` and `T`. */ override def converter[U >: (P, T)] = PartitionUtil.converter[P, T, U](valueConverter, partitionConverter) /** Flatten a pair of `P` and `T` into a cascading tuple.*/ override def setter[U <: (P, T)] = PartitionUtil.setter[P, T, U](valueSetter, partitionSetter) /** Creates the taps for local and hdfs mode.*/ override def createTap(readOrWrite: AccessMode)(implicit mode: Mode): Tap[_, _, _] = mode match { case Local(_) => { val fileTap = new FileTap(localScheme, path, SinkMode.REPLACE) new LocalPartitionTap(fileTap, new TemplatePartition(partitionFields, template), SinkMode.UPDATE) .asInstanceOf[Tap[_, _, _]] } case Hdfs(_, _) => { val hfs = createHfsTap(hdfsScheme, path, SinkMode.REPLACE) new PartitionTap(hfs, new TemplatePartition(partitionFields, template), SinkMode.UPDATE) .asInstanceOf[Tap[_, _, _]] } case hdfsTest @ HadoopTest(_, _) => { val hfs = createHfsTap(hdfsScheme, hdfsTest.getWritePathFor(this), SinkMode.REPLACE) new PartitionTap(hfs, new TemplatePartition(partitionFields, template), SinkMode.UPDATE) .asInstanceOf[Tap[_, _, _]] } case _ => TestTapFactory(this, hdfsScheme).createTap(readOrWrite) } }
tresata/scalding
scalding-core/src/main/scala/com/twitter/scalding/typed/PartitionSchemed.scala
Scala
apache-2.0
3,469
package ca.andrewmcburney.skeleton.files import org.scalatest._ /** * Test suite for SkeletonFile.scala */ class SkeletonFileSpec extends FlatSpec with Matchers { "SkeletonFile" should "true should be true" in { true should be (true) } }
skeleton-cli/skeleton
src/test/scala/ca/andrewmcburney/skeleton/files/SkeletonFileSpec.scala
Scala
apache-2.0
254
/* * Copyright 2016 Coral realtime streaming analytics (http://coral-streaming.github.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.coral.actors.transform import akka.actor.Props import io.coral.actors.{SimpleEmitTrigger, CoralActor} import io.coral.lib.Random import org.json4s.JsonAST.JNothing import org.json4s.{JObject, JValue} object SampleActor { implicit val formats = org.json4s.DefaultFormats def getParams(json: JValue) = { for { fraction <- (json \\ "params" \\ "fraction").extractOpt[Double] } yield { fraction } } def apply(json: JValue): Option[Props] = { getParams(json).map(_ => Props(classOf[SampleActor], json, Random)) } } class SampleActor(json: JObject, random: Random) extends CoralActor(json) with SimpleEmitTrigger { val fraction: Double = SampleActor.getParams(json).get var randomStream: Stream[Boolean] = random.binomial(fraction) def next(): Boolean = { val value = randomStream.head randomStream = randomStream.tail value } override def simpleEmitTrigger(json: JObject): Option[JValue] = { next() match { case false => Some(JNothing) case true => Some(json) } } }
coral-streaming/coral
src/main/scala/io/coral/actors/transform/SampleActor.scala
Scala
apache-2.0
1,671
/* * Copyright University of Basel, Graphics and Vision Research Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package scalismo.faces.io import java.io._ import scalismo.faces.FacesTestSuite import scalismo.color.ColorSpaceOperations.implicits._ import scalismo.color.{ColorSpaceOperations, RGB, RGBA} import scalismo.faces.color._ import scalismo.faces.image.{BufferedImageConverter, PixelImage} import scalismo.faces.utils.LanguageUtilities import scala.reflect.ClassTag class ImageIOTests extends FacesTestSuite { /** number of cycles to test*/ val repetitions = 2 /** reconstruction tolerance, IO is only 8 bit! */ val tolerance = math.sqrt(4*math.pow(1.0/255.0,2.0)) // 8 bit write/read val img = randomImageRGB(37, 67) val imgG = img.map(_.gray) val imgA = img.map(RGBA(_)) /** evaluate difference of repeated identity transforms */ def diffOfIdentityTransform[A: ClassTag](image: PixelImage[A], identityTransform: PixelImage[A] => PixelImage[A]) (implicit converter: BufferedImageConverter[A], ops: ColorSpaceOperations[A]) : Double = { val wrImage: PixelImage[A] = repeatIdentityTransformation(image,identityTransform) val diff = PixelImage(image.width, image.height, (x, y) => image(x, y) - wrImage(x, y)) math.sqrt(diff.values.map(ops.normSq).max) } /** evaluate difference of repeated identity transforms */ def diffOfRGBIdentityTransform(image: PixelImage[RGB], identityTransform: PixelImage[RGB] => PixelImage[RGB]) (implicit converter: BufferedImageConverter[RGB]) : Double = { val wrImage: PixelImage[RGB] = repeatIdentityTransformation(image,identityTransform) val diff = PixelImage(image.width, image.height, (x, y) => math.pow(image(x, y).r - wrImage(x, y).r, 2) + math.pow(image(x, y).g - wrImage(x, y).g, 2) + math.pow(image(x, y).b - wrImage(x, y).b, 2)) math.sqrt(diff.values.max) } /** evaluate difference of repeated identity transforms */ def diffOfRGBAIdentityTransform(image: PixelImage[RGBA], identityTransform: PixelImage[RGBA] => PixelImage[RGBA]) (implicit converter: BufferedImageConverter[RGBA]) : Double = { val wrImage: PixelImage[RGBA] = repeatIdentityTransformation(image,identityTransform) val diff = PixelImage(image.width, image.height, (x, y) => math.pow(image(x, y).r - wrImage(x, y).r, 2) + math.pow(image(x, y).g - wrImage(x, y).g, 2) + math.pow(image(x, y).b - wrImage(x, y).b, 2) + math.pow(image(x, y).a - wrImage(x, y).a, 2)) math.sqrt(diff.values.max) } /** execute repeated identity transforms */ def repeatIdentityTransformation[A](image: PixelImage[A], identity: PixelImage[A] => PixelImage[A]) (implicit converter: BufferedImageConverter[A]) : PixelImage[A] = LanguageUtilities.iterate(image, repetitions)(identity) /** perform a write-read cycle for an image */ def writeReadCycle[A](img: PixelImage[A])(implicit conv: BufferedImageConverter[A]): PixelImage[A] = { val f = File.createTempFile("faces-scala-iotest", ".png") f.deleteOnExit() PixelImageIO.write[A](img, f).get PixelImageIO.read[A](f).get } /** perform a write-read cycle for an image */ def writeReadStreamCycle[A](img: PixelImage[A])(implicit conv: BufferedImageConverter[A]): PixelImage[A] = { val os = new ByteArrayOutputStream() assert(PixelImageIO.writeToStream[A](img, os).isSuccess) val is = new ByteArrayInputStream(os.toByteArray) PixelImageIO.readFromStream[A](is).get } describe("A random RGB color image") { it("survives a write-read cycle unaltered") { diffOfRGBIdentityTransform(img,writeReadCycle[RGB]) should be <= tolerance } } describe("A random RGBA color image") { it("survives a write-read cycle unaltered") { diffOfRGBAIdentityTransform(imgA,writeReadCycle[RGBA]) should be <= tolerance } } describe("A random gray image") { it("survives a write-read cycle unaltered") { diffOfIdentityTransform(imgG,writeReadCycle[Double]) should be <= tolerance } } describe("The IO-Method") { it("writes and reads and image unaltered with streams") { diffOfRGBAIdentityTransform(imgA,writeReadStreamCycle[RGBA]) should be <= tolerance } } }
unibas-gravis/scalismo-faces
src/test/scala/scalismo/faces/io/ImageIOTests.scala
Scala
apache-2.0
4,866
/* * Copyright 2016 University of Basel, Graphics and Vision Research Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package scalismo.faces.image.pyramid import scalismo.color.ColorSpaceOperations import scalismo.faces.image.AccessMode.MirroredPositionFunctional import scalismo.faces.image.filter.ResampleFilter import scalismo.faces.image.{InterpolationKernel, PixelImage} import scala.reflect.ClassTag /** * Laplace pyramid implementation. * * @param imagePyramid used to construct the difference images. * @param expand A function that upscales the images. * @tparam A Pixel type of underlying images in the Pyramid. */ class LaplacePyramid[A: ClassTag](val imagePyramid: ImagePyramid[A], val expand: LaplacePyramid.ExpandFilter[A])(implicit ops: ColorSpaceOperations[A]) extends ImagePyramid[A] { import PixelImage.implicits._ override val levels: Int = imagePyramid.levels override val level: Seq[PixelImage[A]] = { val images = imagePyramid.level images.init.zip(images.tail).map(p => p._1 - expand.filter(p._2, p._1.width, p._1.height)) :+ images.last } /** * Reconstructs the original image using the expand function and the addition of images based on the passed ColorSpaceOperations ops. */ def reconstruct: PixelImage[A] = level.init.foldRight(level.last)((diff, combined) => expand.filter(combined, diff.width, diff.height) + diff) } object LaplacePyramid { /** Filters an image and considers size of next larger image in an image pyramid. */ trait ExpandFilter[A]{ def filter(image: PixelImage[A], upperWidth: Int, upperHeight: Int): PixelImage[A] } /** * Standard filter to be used to upscale the image. */ def interpolationKernel = InterpolationKernel.BilinearKernel /** * Standard method to upscale an image. */ def expand[A: ClassTag](implicit ops: ColorSpaceOperations[A]) = new ExpandFilter[A] { override def filter(image: PixelImage[A], upperWidth: Int, upperHeight: Int): PixelImage[A] = { import ColorSpaceOperations.implicits._ ResampleFilter.resampleImage(image.withAccessMode(MirroredPositionFunctional((a:A, b:A)=>2*:a-b)), upperWidth, upperHeight, interpolationKernel) } } /** * Standard way to construct a LaplacePyramid. * * @param image image to build the laplace pyramid from * @param reductions number of desired levels (-1 gives the maximum allowed levels). */ def apply[A: ClassTag](image: PixelImage[A], reductions: Int = -1)(implicit ops: ColorSpaceOperations[A]): LaplacePyramid[A] = { val imagePyramid = GaussPyramid(image, reductions) new LaplacePyramid[A](imagePyramid, expand) } }
unibas-gravis/scalismo-faces
src/main/scala/scalismo/faces/image/pyramid/LaplacePyramid.scala
Scala
apache-2.0
3,189
/* * Removes.scala * * Copyright 2017 wayfarerx <x@wayfarerx.net> (@thewayfarerx) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.wayfarerx.dreamsleeve.data package binary_data import scodec.Codec import scodec.codecs._ /** * Binary support for the remove factory object. */ trait Removes { /** The implicit change discriminator for removes. */ @inline final implicit def binaryAsChange: Discriminator[Change, Change.Remove, Int] = Removes.AsChange /** The implicit remove codec. */ @inline final implicit def binaryCodec: Codec[Change.Remove] = Removes.Codec } /** * Support for binary remove codecs. */ object Removes { /** The change discriminator for removes. */ val AsChange: Discriminator[Change, Change.Remove, Int] = Discriminator(5) /** The remove codec. */ val Codec: Codec[Change.Remove] = HashCodec.as[Change.Remove] }
wayfarerx/dreamsleeve
shared/data/src/main/scala/net/wayfarerx/dreamsleeve/data/binary_data/Removes.scala
Scala
apache-2.0
1,394
package org.scalacheck.ops.time.joda import org.joda.time.DateTime import org.scalacheck.Arbitrary import org.scalacheck.ops.time.GenericDateTimeGeneratorsSpec import scala.reflect.ClassTag final class JodaDateTimeGeneratorsSpec extends GenericDateTimeGeneratorsSpec(JodaDateTimeGenerators) { override protected val arbInstantType: Arbitrary[DateTime] = implicitly[Arbitrary[DateTime]] override protected val clsTagInstantType: ClassTag[DateTime] = implicitly[ClassTag[DateTime]] override protected val orderingInstantType: Ordering[DateTime] = Ordering.fromLessThan[DateTime](_.compareTo(_) < 0) }
AudaxHealthInc/scalacheck-ops
joda/src/test/scala/org/scalacheck/ops/time/joda/JodaDateTimeGeneratorsSpec.scala
Scala
apache-2.0
608
package scalariform.formatter import scalariform.parser._ abstract class AbstractExpressionFormatterTest extends AbstractFormatterTest { type Result = Expr def format(formatter: ScalaFormatter, result: Result): FormatResult = formatter.format(result)(FormatterState()) def parse(parser: ScalaParser): Result = parser.expr() }
mdr/scalariform
scalariform/src/test/scala/scalariform/formatter/AbstractExpressionFormatterTest.scala
Scala
mit
338
package wakfutcp.protocol.messages.client import wakfutcp.protocol.{ClientMessage, Codec} final case class CharacterDeletionMessage( characterId: Long ) extends ClientMessage { override val id = 2051 override val arch = 2 } object CharacterDeletionMessage { import Codec._ import cats.syntax.invariant._ implicit val codec: Codec[CharacterDeletionMessage] = long.imap(apply)(Function.unlift(unapply)) }
OpenWakfu/wakfutcp
protocol/src/main/scala/wakfutcp/protocol/messages/client/CharacterDeletionMessage.scala
Scala
mit
423
/** * Copyright 2015 Mohiva Organisation (license at mohiva dot com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import com.typesafe.sbt.SbtGhPages.GhPagesKeys._ import com.typesafe.sbt.SbtGhPages.ghpages import com.typesafe.sbt.SbtGit.GitKeys._ import com.typesafe.sbt.SbtGit.git import com.typesafe.sbt.SbtSite.SiteKeys._ import com.typesafe.sbt.SbtSite.site import sbt.Keys._ import sbt._ import sbtunidoc.Plugin._ ////******************************* //// Basic settings ////******************************* object BasicSettings extends AutoPlugin { override def trigger = allRequirements override def projectSettings = Seq( organization := "com.mohiva", version := "4.0.0-SNAPSHOT", resolvers ++= Dependencies.resolvers, scalaVersion := Dependencies.Versions.scalaVersion, crossScalaVersions := Dependencies.Versions.crossScala, scalacOptions ++= Seq( "-deprecation", // Emit warning and location for usages of deprecated APIs. "-feature", // Emit warning and location for usages of features that should be imported explicitly. "-unchecked", // Enable additional warnings where generated code depends on assumptions. "-Xfatal-warnings", // Fail the compilation if there are any warnings. "-Xlint", // Enable recommended additional warnings. "-Ywarn-adapted-args", // Warn if an argument list is modified to match the receiver. "-Ywarn-dead-code", // Warn when dead code is identified. "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures. "-Ywarn-nullary-override", // Warn when non-nullary overrides nullary, e.g. def foo() over def foo. "-Ywarn-numeric-widen" // Warn when numerics are widened. ), scalacOptions in Test ~= { (options: Seq[String]) => options filterNot (_ == "-Ywarn-dead-code") // Allow dead code in tests (to support using mockito). }, parallelExecution in Test := false ) } ////******************************* //// Scalariform settings ////******************************* object CodeFormatter extends AutoPlugin { import com.typesafe.sbt.SbtScalariform._ import scalariform.formatter.preferences.{ DoubleIndentClassDeclaration, FormatXml, PreserveDanglingCloseParenthesis } lazy val BuildConfig = config("build") extend Compile lazy val BuildSbtConfig = config("buildsbt") extend Compile lazy val prefs = Seq( ScalariformKeys.preferences := ScalariformKeys.preferences.value .setPreference(FormatXml, false) .setPreference(DoubleIndentClassDeclaration, false) .setPreference(PreserveDanglingCloseParenthesis, true) ) override def trigger = allRequirements override def projectSettings = defaultScalariformSettings ++ prefs ++ inConfig(BuildConfig)(configScalariformSettings) ++ inConfig(BuildSbtConfig)(configScalariformSettings) ++ Seq( scalaSource in BuildConfig := baseDirectory.value / "project", scalaSource in BuildSbtConfig := baseDirectory.value / "project", includeFilter in (BuildConfig, ScalariformKeys.format) := ("*.scala": FileFilter), includeFilter in (BuildSbtConfig, ScalariformKeys.format) := ("*.sbt": FileFilter), ScalariformKeys.format in Compile := { (ScalariformKeys.format in BuildSbtConfig).value (ScalariformKeys.format in BuildConfig).value (ScalariformKeys.format in Compile).value } ) } ////******************************* //// ScalaDoc settings ////******************************* object Doc extends AutoPlugin { import play.core.PlayVersion override def projectSettings = Seq( autoAPIMappings := true, apiURL := Some(url(s"http://api.silhouette.mohiva.com/$version/")), apiMappings ++= { implicit val cp = (fullClasspath in Compile).value Map( jarFor("com.typesafe.play", "play") -> url(s"http://www.playframework.com/documentation/${PlayVersion.current}/api/scala/"), scalaInstance.value.libraryJar -> url(s"http://www.scala-lang.org/api/${scalaVersion.value}/") ) } ) /** * Gets the JAR file for a package. * * @param organization The organization name. * @param name The name of the package. * @param cp The class path. * @return The file which points to the JAR. * @see http://stackoverflow.com/a/20919304/2153190 */ private def jarFor(organization: String, name: String)(implicit cp: Seq[Attributed[File]]): File = { (for { entry <- cp module <- entry.get(moduleID.key) if module.organization == organization if module.name.startsWith(name) jarFile = entry.data } yield jarFile).head } } ////******************************* //// APIDoc settings ////******************************* // @see https://github.com/paypal/horizon/blob/develop/src/main/scala/com/paypal/horizon/BuildUtilities.scala object APIDoc { lazy val files = Seq(file("CNAME")) lazy val settings = unidocSettings ++ site.settings ++ ghpages.settings ++ Seq( // Create version siteMappings <++= (mappings in (ScalaUnidoc, packageDoc), version).map { (mapping, ver) => for ((file, path) <- mapping) yield (file, s"$ver/$path") }, // Add custom files from site directory siteMappings <++= baseDirectory.map { dir => for (file <- files) yield (new File(dir.getAbsolutePath + "/site/" + file), file.name) }, // Do not delete old versions synchLocal <<= (privateMappings, updatedRepository, gitRunner, streams).map { (mappings, repo, git, s) => val betterMappings = mappings.map { case (file, tgt) => (file, repo / tgt) } IO.copy(betterMappings) repo }, git.remoteRepo := "git@github.com:mohiva/play-silhouette.git" ) } ////******************************* //// Maven settings ////******************************* object Publish extends AutoPlugin { import xerial.sbt.Sonatype._ override def trigger = allRequirements private val pom = { <scm> <url>git@github.com:mohiva/play-silhouette.git</url> <connection>scm:git:git@github.com:mohiva/play-silhouette.git</connection> </scm> <developers> <developer> <id>akkie</id> <name>Christian Kaps</name> <url>http://mohiva.com</url> </developer> <developer> <id>fernandoacorreia</id> <name>Fernando Correia</name> <url>http://www.fernandocorreia.info/</url> </developer> </developers> } override def projectSettings = sonatypeSettings ++ Seq( description := "Authentication library for Play Framework applications that supports several authentication methods, including OAuth1, OAuth2, OpenID, Credentials, Basic Authentication, Two Factor Authentication or custom authentication schemes", homepage := Some(url("http://silhouette.mohiva.com/")), licenses := Seq("Apache License" -> url("https://github.com/mohiva/play-silhouette/blob/master/LICENSE")), publishMavenStyle := true, publishArtifact in Test := false, pomIncludeRepository := { _ => false }, pomExtra := pom ) }
cemcatik/play-silhouette
project/BuildSettings.scala
Scala
apache-2.0
7,633
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.streaming import java.io.File import java.sql.Timestamp import java.util.{Locale, UUID} import scala.util.Random import org.apache.commons.io.FileUtils import org.scalatest.BeforeAndAfter import org.apache.spark.scheduler.ExecutorCacheTaskLocation import org.apache.spark.sql.{DataFrame, Row, SparkSession} import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec import org.apache.spark.sql.execution.streaming.{MemoryStream, StatefulOperatorStateInfo, StreamingSymmetricHashJoinExec, StreamingSymmetricHashJoinHelper} import org.apache.spark.sql.execution.streaming.state.{StateStore, StateStoreProviderId} import org.apache.spark.sql.functions._ import org.apache.spark.util.Utils abstract class StreamingJoinSuite extends StreamTest with StateStoreMetricsTest with BeforeAndAfter { import testImplicits._ before { SparkSession.setActiveSession(spark) // set this before force initializing 'joinExec' spark.streams.stateStoreCoordinator // initialize the lazy coordinator } after { StateStore.stop() } protected def setupStream(prefix: String, multiplier: Int): (MemoryStream[Int], DataFrame) = { val input = MemoryStream[Int] val df = input.toDF .select( 'value as "key", timestamp_seconds($"value") as s"${prefix}Time", ('value * multiplier) as s"${prefix}Value") .withWatermark(s"${prefix}Time", "10 seconds") (input, df) } protected def setupWindowedJoin(joinType: String) : (MemoryStream[Int], MemoryStream[Int], DataFrame) = { val (input1, df1) = setupStream("left", 2) val (input2, df2) = setupStream("right", 3) val windowed1 = df1.select('key, window('leftTime, "10 second"), 'leftValue) val windowed2 = df2.select('key, window('rightTime, "10 second"), 'rightValue) val joined = windowed1.join(windowed2, Seq("key", "window"), joinType) val select = if (joinType == "left_semi") { joined.select('key, $"window.end".cast("long"), 'leftValue) } else { joined.select('key, $"window.end".cast("long"), 'leftValue, 'rightValue) } (input1, input2, select) } protected def setupWindowedJoinWithLeftCondition(joinType: String) : (MemoryStream[Int], MemoryStream[Int], DataFrame) = { val (leftInput, df1) = setupStream("left", 2) val (rightInput, df2) = setupStream("right", 3) // Use different schemas to ensure the null row is being generated from the correct side. val left = df1.select('key, window('leftTime, "10 second"), 'leftValue) val right = df2.select('key, window('rightTime, "10 second"), 'rightValue.cast("string")) val joined = left.join( right, left("key") === right("key") && left("window") === right("window") && 'leftValue > 4, joinType) val select = if (joinType == "left_semi") { joined.select(left("key"), left("window.end").cast("long"), 'leftValue) } else if (joinType == "left_outer") { joined.select(left("key"), left("window.end").cast("long"), 'leftValue, 'rightValue) } else if (joinType == "right_outer") { joined.select(right("key"), right("window.end").cast("long"), 'leftValue, 'rightValue) } else { joined.select(left("key"), left("window.end").cast("long"), 'leftValue, right("key"), right("window.end").cast("long"), 'rightValue) } (leftInput, rightInput, select) } protected def setupWindowedJoinWithRightCondition(joinType: String) : (MemoryStream[Int], MemoryStream[Int], DataFrame) = { val (leftInput, df1) = setupStream("left", 2) val (rightInput, df2) = setupStream("right", 3) // Use different schemas to ensure the null row is being generated from the correct side. val left = df1.select('key, window('leftTime, "10 second"), 'leftValue) val right = df2.select('key, window('rightTime, "10 second"), 'rightValue.cast("string")) val joined = left.join( right, left("key") === right("key") && left("window") === right("window") && 'rightValue.cast("int") > 7, joinType) val select = if (joinType == "left_semi") { joined.select(left("key"), left("window.end").cast("long"), 'leftValue) } else if (joinType == "left_outer") { joined.select(left("key"), left("window.end").cast("long"), 'leftValue, 'rightValue) } else if (joinType == "right_outer") { joined.select(right("key"), right("window.end").cast("long"), 'leftValue, 'rightValue) } else { joined.select(left("key"), left("window.end").cast("long"), 'leftValue, right("key"), right("window.end").cast("long"), 'rightValue) } (leftInput, rightInput, select) } protected def setupJoinWithRangeCondition(joinType: String) : (MemoryStream[(Int, Int)], MemoryStream[(Int, Int)], DataFrame) = { val leftInput = MemoryStream[(Int, Int)] val rightInput = MemoryStream[(Int, Int)] val df1 = leftInput.toDF.toDF("leftKey", "time") .select('leftKey, timestamp_seconds($"time") as "leftTime", ('leftKey * 2) as "leftValue") .withWatermark("leftTime", "10 seconds") val df2 = rightInput.toDF.toDF("rightKey", "time") .select('rightKey, timestamp_seconds($"time") as "rightTime", ('rightKey * 3) as "rightValue") .withWatermark("rightTime", "10 seconds") val joined = df1.join( df2, expr("leftKey = rightKey AND " + "leftTime BETWEEN rightTime - interval 5 seconds AND rightTime + interval 5 seconds"), joinType) val select = if (joinType == "left_semi") { joined.select('leftKey, 'leftTime.cast("int")) } else { joined.select('leftKey, 'rightKey, 'leftTime.cast("int"), 'rightTime.cast("int")) } (leftInput, rightInput, select) } protected def setupSelfJoin(joinType: String) : (MemoryStream[(Int, Long)], DataFrame) = { val inputStream = MemoryStream[(Int, Long)] val df = inputStream.toDS() .select(col("_1").as("value"), timestamp_seconds($"_2").as("timestamp")) val leftStream = df.select(col("value").as("leftId"), col("timestamp").as("leftTime")) val rightStream = df // Introduce misses for ease of debugging .where(col("value") % 2 === 0) .select(col("value").as("rightId"), col("timestamp").as("rightTime")) val joined = leftStream .withWatermark("leftTime", "5 seconds") .join( rightStream.withWatermark("rightTime", "5 seconds"), expr("leftId = rightId AND rightTime >= leftTime AND " + "rightTime <= leftTime + interval 5 seconds"), joinType) val select = if (joinType == "left_semi") { joined.select(col("leftId"), col("leftTime").cast("int")) } else { joined.select(col("leftId"), col("leftTime").cast("int"), col("rightId"), col("rightTime").cast("int")) } (inputStream, select) } } class StreamingInnerJoinSuite extends StreamingJoinSuite { import testImplicits._ test("stream stream inner join on non-time column") { val input1 = MemoryStream[Int] val input2 = MemoryStream[Int] val df1 = input1.toDF.select('value as "key", ('value * 2) as "leftValue") val df2 = input2.toDF.select('value as "key", ('value * 3) as "rightValue") val joined = df1.join(df2, "key") testStream(joined)( AddData(input1, 1), CheckAnswer(), AddData(input2, 1, 10), // 1 arrived on input1 first, then input2, should join CheckNewAnswer((1, 2, 3)), AddData(input1, 10), // 10 arrived on input2 first, then input1, should join CheckNewAnswer((10, 20, 30)), AddData(input2, 1), // another 1 in input2 should join with 1 input1 CheckNewAnswer((1, 2, 3)), StopStream, StartStream(), AddData(input1, 1), // multiple 1s should be kept in state causing multiple (1, 2, 3) CheckNewAnswer((1, 2, 3), (1, 2, 3)), StopStream, StartStream(), AddData(input1, 100), AddData(input2, 100), CheckNewAnswer((100, 200, 300)) ) } test("stream stream inner join on windows - without watermark") { val input1 = MemoryStream[Int] val input2 = MemoryStream[Int] val df1 = input1.toDF .select('value as "key", timestamp_seconds($"value") as "timestamp", ('value * 2) as "leftValue") .select('key, window('timestamp, "10 second"), 'leftValue) val df2 = input2.toDF .select('value as "key", timestamp_seconds($"value") as "timestamp", ('value * 3) as "rightValue") .select('key, window('timestamp, "10 second"), 'rightValue) val joined = df1.join(df2, Seq("key", "window")) .select('key, $"window.end".cast("long"), 'leftValue, 'rightValue) testStream(joined)( AddData(input1, 1), CheckNewAnswer(), AddData(input2, 1), CheckNewAnswer((1, 10, 2, 3)), StopStream, StartStream(), AddData(input1, 25), CheckNewAnswer(), StopStream, StartStream(), AddData(input2, 25), CheckNewAnswer((25, 30, 50, 75)), AddData(input1, 1), CheckNewAnswer((1, 10, 2, 3)), // State for 1 still around as there is no watermark StopStream, StartStream(), AddData(input1, 5), CheckNewAnswer(), AddData(input2, 5), CheckNewAnswer((5, 10, 10, 15)) // No filter by any watermark ) } test("stream stream inner join on windows - with watermark") { val input1 = MemoryStream[Int] val input2 = MemoryStream[Int] val df1 = input1.toDF .select('value as "key", timestamp_seconds($"value") as "timestamp", ('value * 2) as "leftValue") .withWatermark("timestamp", "10 seconds") .select('key, window('timestamp, "10 second"), 'leftValue) val df2 = input2.toDF .select('value as "key", timestamp_seconds($"value") as "timestamp", ('value * 3) as "rightValue") .select('key, window('timestamp, "10 second"), 'rightValue) val joined = df1.join(df2, Seq("key", "window")) .select('key, $"window.end".cast("long"), 'leftValue, 'rightValue) testStream(joined)( AddData(input1, 1), CheckAnswer(), assertNumStateRows(total = 1, updated = 1), AddData(input2, 1), CheckAnswer((1, 10, 2, 3)), assertNumStateRows(total = 2, updated = 1), StopStream, StartStream(), AddData(input1, 25), CheckNewAnswer(), // watermark = 15, no-data-batch should remove 2 rows having window=[0,10] assertNumStateRows(total = 1, updated = 1), AddData(input2, 25), CheckNewAnswer((25, 30, 50, 75)), assertNumStateRows(total = 2, updated = 1), StopStream, StartStream(), AddData(input2, 1), CheckNewAnswer(), // Should not join as < 15 removed assertNumStateRows(total = 2, updated = 0), // row not add as 1 < state key watermark = 15 AddData(input1, 5), CheckNewAnswer(), // Same reason as above assertNumStateRows(total = 2, updated = 0, droppedByWatermark = 1) ) } test("stream stream inner join with time range - with watermark - one side condition") { import org.apache.spark.sql.functions._ val leftInput = MemoryStream[(Int, Int)] val rightInput = MemoryStream[(Int, Int)] val df1 = leftInput.toDF.toDF("leftKey", "time") .select('leftKey, timestamp_seconds($"time") as "leftTime", ('leftKey * 2) as "leftValue") .withWatermark("leftTime", "10 seconds") val df2 = rightInput.toDF.toDF("rightKey", "time") .select('rightKey, timestamp_seconds($"time") as "rightTime", ('rightKey * 3) as "rightValue") .withWatermark("rightTime", "10 seconds") val joined = df1.join(df2, expr("leftKey = rightKey AND leftTime < rightTime - interval 5 seconds")) .select('leftKey, 'leftTime.cast("int"), 'rightTime.cast("int")) testStream(joined)( AddData(leftInput, (1, 5)), CheckAnswer(), AddData(rightInput, (1, 11)), CheckNewAnswer((1, 5, 11)), AddData(rightInput, (1, 10)), CheckNewAnswer(), // no match as leftTime 5 is not < rightTime 10 - 5 assertNumStateRows(total = 3, updated = 3), // Increase event time watermark to 20s by adding data with time = 30s on both inputs AddData(leftInput, (1, 3), (1, 30)), CheckNewAnswer((1, 3, 10), (1, 3, 11)), assertNumStateRows(total = 5, updated = 2), AddData(rightInput, (0, 30)), CheckNewAnswer(), // event time watermark: max event time - 10 ==> 30 - 10 = 20 // so left side going to only receive data where leftTime > 20 // right side state constraint: 20 < leftTime < rightTime - 5 ==> rightTime > 25 // right state where rightTime <= 25 will be cleared, (1, 11) and (1, 10) removed assertNumStateRows(total = 4, updated = 1), // New data to right input should match with left side (1, 3) and (1, 5), as left state should // not be cleared. But rows rightTime <= 20 should be filtered due to event time watermark and // state rows with rightTime <= 25 should be removed from state. // (1, 20) ==> filtered by event time watermark = 20 // (1, 21) ==> passed filter, matched with left (1, 3) and (1, 5), not added to state // as 21 < state watermark = 25 // (1, 28) ==> passed filter, matched with left (1, 3) and (1, 5), added to state AddData(rightInput, (1, 20), (1, 21), (1, 28)), CheckNewAnswer((1, 3, 21), (1, 5, 21), (1, 3, 28), (1, 5, 28)), assertNumStateRows(total = 5, updated = 1, droppedByWatermark = 1), // New data to left input with leftTime <= 20 should be filtered due to event time watermark AddData(leftInput, (1, 20), (1, 21)), CheckNewAnswer((1, 21, 28)), assertNumStateRows(total = 6, updated = 1, droppedByWatermark = 1) ) } test("stream stream inner join with time range - with watermark - two side conditions") { import org.apache.spark.sql.functions._ val leftInput = MemoryStream[(Int, Int)] val rightInput = MemoryStream[(Int, Int)] val df1 = leftInput.toDF.toDF("leftKey", "time") .select('leftKey, timestamp_seconds($"time") as "leftTime", ('leftKey * 2) as "leftValue") .withWatermark("leftTime", "20 seconds") val df2 = rightInput.toDF.toDF("rightKey", "time") .select('rightKey, timestamp_seconds($"time") as "rightTime", ('rightKey * 3) as "rightValue") .withWatermark("rightTime", "30 seconds") val condition = expr( "leftKey = rightKey AND " + "leftTime BETWEEN rightTime - interval 10 seconds AND rightTime + interval 5 seconds") // This translates to leftTime <= rightTime + 5 seconds AND leftTime >= rightTime - 10 seconds // So given leftTime, rightTime has to be BETWEEN leftTime - 5 seconds AND leftTime + 10 seconds // // =============== * ======================== * ============================== * ==> leftTime // | | | // |<---- 5s -->|<------ 10s ------>| |<------ 10s ------>|<---- 5s -->| // | | | // == * ============================== * =========>============== * ===============> rightTime // // E.g. // if rightTime = 60, then it matches only leftTime = [50, 65] // if leftTime = 20, then it match only with rightTime = [15, 30] // // State value predicates // left side: // values allowed: leftTime >= rightTime - 10s ==> leftTime > eventTimeWatermark - 10 // drop state where leftTime < eventTime - 10 // right side: // values allowed: rightTime >= leftTime - 5s ==> rightTime > eventTimeWatermark - 5 // drop state where rightTime < eventTime - 5 val joined = df1.join(df2, condition).select('leftKey, 'leftTime.cast("int"), 'rightTime.cast("int")) testStream(joined)( // If leftTime = 20, then it match only with rightTime = [15, 30] AddData(leftInput, (1, 20)), CheckAnswer(), AddData(rightInput, (1, 14), (1, 15), (1, 25), (1, 26), (1, 30), (1, 31)), CheckNewAnswer((1, 20, 15), (1, 20, 25), (1, 20, 26), (1, 20, 30)), assertNumStateRows(total = 7, updated = 7), // If rightTime = 60, then it matches only leftTime = [50, 65] AddData(rightInput, (1, 60)), CheckNewAnswer(), // matches with nothing on the left AddData(leftInput, (1, 49), (1, 50), (1, 65), (1, 66)), CheckNewAnswer((1, 50, 60), (1, 65, 60)), // Event time watermark = min(left: 66 - delay 20 = 46, right: 60 - delay 30 = 30) = 30 // Left state value watermark = 30 - 10 = slightly less than 20 (since condition has <=) // Should drop < 20 from left, i.e., none // Right state value watermark = 30 - 5 = slightly less than 25 (since condition has <=) // Should drop < 25 from the right, i.e., 14 and 15 assertNumStateRows(total = 10, updated = 5), // 12 - 2 removed AddData(leftInput, (1, 30), (1, 31)), // 30 should not be processed or added to state CheckNewAnswer((1, 31, 26), (1, 31, 30), (1, 31, 31)), assertNumStateRows(total = 11, updated = 1, droppedByWatermark = 1), // only 31 added // Advance the watermark AddData(rightInput, (1, 80)), CheckNewAnswer(), // Event time watermark = min(left: 66 - delay 20 = 46, right: 80 - delay 30 = 50) = 46 // Left state value watermark = 46 - 10 = slightly less than 36 (since condition has <=) // Should drop < 36 from left, i.e., 20, 31 (30 was not added) // Right state value watermark = 46 - 5 = slightly less than 41 (since condition has <=) // Should drop < 41 from the right, i.e., 25, 26, 30, 31 assertNumStateRows(total = 6, updated = 1), // 12 - 6 removed AddData(rightInput, (1, 46), (1, 50)), // 46 should not be processed or added to state CheckNewAnswer((1, 49, 50), (1, 50, 50)), assertNumStateRows(total = 7, updated = 1, droppedByWatermark = 1) // 50 added ) } testQuietly("stream stream inner join without equality predicate") { val input1 = MemoryStream[Int] val input2 = MemoryStream[Int] val df1 = input1.toDF.select('value as "leftKey", ('value * 2) as "leftValue") val df2 = input2.toDF.select('value as "rightKey", ('value * 3) as "rightValue") val joined = df1.join(df2, expr("leftKey < rightKey")) val e = intercept[Exception] { val q = joined.writeStream.format("memory").queryName("test").start() input1.addData(1) q.awaitTermination(10000) } assert(e.toString.contains("Stream-stream join without equality predicate is not supported")) } test("stream stream self join") { val input = MemoryStream[Int] val df = input.toDF val join = df.select('value % 5 as "key", 'value).join( df.select('value % 5 as "key", 'value), "key") testStream(join)( AddData(input, 1, 2), CheckAnswer((1, 1, 1), (2, 2, 2)), StopStream, StartStream(), AddData(input, 3, 6), /* (1, 1) (1, 1) (2, 2) x (2, 2) = (1, 1, 1), (1, 1, 6), (2, 2, 2), (1, 6, 1), (1, 6, 6) (1, 6) (1, 6) */ CheckAnswer((3, 3, 3), (1, 1, 1), (1, 1, 6), (2, 2, 2), (1, 6, 1), (1, 6, 6))) } test("locality preferences of StateStoreAwareZippedRDD") { import StreamingSymmetricHashJoinHelper._ withTempDir { tempDir => val queryId = UUID.randomUUID val opId = 0 val path = Utils.createDirectory(tempDir.getAbsolutePath, Random.nextFloat.toString).toString val stateInfo = StatefulOperatorStateInfo(path, queryId, opId, 0L, 5) implicit val sqlContext = spark.sqlContext val coordinatorRef = sqlContext.streams.stateStoreCoordinator val numPartitions = 5 val storeNames = Seq("name1", "name2") val partitionAndStoreNameToLocation = { for (partIndex <- 0 until numPartitions; storeName <- storeNames) yield { (partIndex, storeName) -> s"host-$partIndex-$storeName" } }.toMap partitionAndStoreNameToLocation.foreach { case ((partIndex, storeName), hostName) => val providerId = StateStoreProviderId(stateInfo, partIndex, storeName) coordinatorRef.reportActiveInstance(providerId, hostName, s"exec-$hostName", Seq.empty) require( coordinatorRef.getLocation(providerId) === Some(ExecutorCacheTaskLocation(hostName, s"exec-$hostName").toString)) } val rdd1 = spark.sparkContext.makeRDD(1 to 10, numPartitions) val rdd2 = spark.sparkContext.makeRDD((1 to 10).map(_.toString), numPartitions) val rdd = rdd1.stateStoreAwareZipPartitions(rdd2, stateInfo, storeNames, coordinatorRef) { (_, left, right) => left.zip(right) } require(rdd.partitions.length === numPartitions) for (partIndex <- 0 until numPartitions) { val expectedLocations = storeNames.map { storeName => val hostName = partitionAndStoreNameToLocation((partIndex, storeName)) ExecutorCacheTaskLocation(hostName, s"exec-$hostName").toString }.toSet assert(rdd.preferredLocations(rdd.partitions(partIndex)).toSet === expectedLocations) } } } test("join between three streams") { val input1 = MemoryStream[Int] val input2 = MemoryStream[Int] val input3 = MemoryStream[Int] val df1 = input1.toDF.select('value as "leftKey", ('value * 2) as "leftValue") val df2 = input2.toDF.select('value as "middleKey", ('value * 3) as "middleValue") val df3 = input3.toDF.select('value as "rightKey", ('value * 5) as "rightValue") val joined = df1.join(df2, expr("leftKey = middleKey")).join(df3, expr("rightKey = middleKey")) testStream(joined)( AddData(input1, 1, 5), AddData(input2, 1, 5, 10), AddData(input3, 5, 10), CheckNewAnswer((5, 10, 5, 15, 5, 25))) } test("streaming join should require HashClusteredDistribution from children") { val input1 = MemoryStream[Int] val input2 = MemoryStream[Int] val df1 = input1.toDF.select('value as 'a, 'value * 2 as 'b) val df2 = input2.toDF.select('value as 'a, 'value * 2 as 'b).repartition('b) val joined = df1.join(df2, Seq("a", "b")).select('a) testStream(joined)( AddData(input1, 1.to(1000): _*), AddData(input2, 1.to(1000): _*), CheckAnswer(1.to(1000): _*), Execute { query => // Verify the query plan assert(query.lastExecution.executedPlan.collect { case j @ StreamingSymmetricHashJoinExec(_, _, _, _, _, _, _, _, _: ShuffleExchangeExec, ShuffleExchangeExec(_, _: ShuffleExchangeExec, _)) => j }.size == 1) }) } test("SPARK-26187 restore the stream-stream inner join query from Spark 2.4") { val inputStream = MemoryStream[(Int, Long)] val df = inputStream.toDS() .select(col("_1").as("value"), timestamp_seconds($"_2").as("timestamp")) val leftStream = df.select(col("value").as("leftId"), col("timestamp").as("leftTime")) val rightStream = df // Introduce misses for ease of debugging .where(col("value") % 2 === 0) .select(col("value").as("rightId"), col("timestamp").as("rightTime")) val query = leftStream .withWatermark("leftTime", "5 seconds") .join( rightStream.withWatermark("rightTime", "5 seconds"), expr("rightId = leftId AND rightTime >= leftTime AND " + "rightTime <= leftTime + interval 5 seconds"), joinType = "inner") .select(col("leftId"), col("leftTime").cast("int"), col("rightId"), col("rightTime").cast("int")) val resourceUri = this.getClass.getResource( "/structured-streaming/checkpoint-version-2.4.0-streaming-join/").toURI val checkpointDir = Utils.createTempDir().getCanonicalFile // Copy the checkpoint to a temp dir to prevent changes to the original. // Not doing this will lead to the test passing on the first run, but fail subsequent runs. FileUtils.copyDirectory(new File(resourceUri), checkpointDir) inputStream.addData((1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L)) testStream(query)( StartStream(checkpointLocation = checkpointDir.getAbsolutePath), /* Note: The checkpoint was generated using the following input in Spark version 2.4.0 AddData(inputStream, (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L)), // batch 1 - global watermark = 0 // states // left: (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L) // right: (2, 2L), (4, 4L) CheckNewAnswer((2, 2L, 2, 2L), (4, 4L, 4, 4L)), assertNumStateRows(7, 7), */ AddData(inputStream, (6, 6L), (7, 7L), (8, 8L), (9, 9L), (10, 10L)), // batch 2: same result as above test CheckNewAnswer((6, 6L, 6, 6L), (8, 8L, 8, 8L), (10, 10L, 10, 10L)), assertNumStateRows(11, 6), Execute { query => // Verify state format = 1 val f = query.lastExecution.executedPlan.collect { case f: StreamingSymmetricHashJoinExec => f } assert(f.size == 1) assert(f.head.stateFormatVersion == 1) } ) } } class StreamingOuterJoinSuite extends StreamingJoinSuite { import testImplicits._ import org.apache.spark.sql.functions._ test("left outer early state exclusion on left") { val (leftInput, rightInput, joined) = setupWindowedJoinWithLeftCondition("left_outer") testStream(joined)( MultiAddData(leftInput, 1, 2, 3)(rightInput, 3, 4, 5), // The left rows with leftValue <= 4 should generate their outer join row now and // not get added to the state. CheckNewAnswer(Row(3, 10, 6, "9"), Row(1, 10, 2, null), Row(2, 10, 4, null)), assertNumStateRows(total = 4, updated = 4), // We shouldn't get more outer join rows when the watermark advances. MultiAddData(leftInput, 20)(rightInput, 21), CheckNewAnswer(), AddData(rightInput, 20), CheckNewAnswer((20, 30, 40, "60")) ) } test("left outer early state exclusion on right") { val (leftInput, rightInput, joined) = setupWindowedJoinWithRightCondition("left_outer") testStream(joined)( MultiAddData(leftInput, 3, 4, 5)(rightInput, 1, 2, 3), // The right rows with rightValue <= 7 should never be added to the state. CheckNewAnswer(Row(3, 10, 6, "9")), // rightValue = 9 > 7 hence joined and added to state assertNumStateRows(total = 4, updated = 4), // When the watermark advances, we get the outer join rows just as we would if they // were added but didn't match the full join condition. MultiAddData(leftInput, 20)(rightInput, 21), // watermark = 10, no-data-batch computes nulls CheckNewAnswer(Row(4, 10, 8, null), Row(5, 10, 10, null)), AddData(rightInput, 20), CheckNewAnswer(Row(20, 30, 40, "60")) ) } test("right outer early state exclusion on left") { val (leftInput, rightInput, joined) = setupWindowedJoinWithLeftCondition("right_outer") testStream(joined)( MultiAddData(leftInput, 1, 2, 3)(rightInput, 3, 4, 5), // The left rows with leftValue <= 4 should never be added to the state. CheckNewAnswer(Row(3, 10, 6, "9")), // leftValue = 7 > 4 hence joined and added to state assertNumStateRows(total = 4, updated = 4), // When the watermark advances, we get the outer join rows just as we would if they // were added but didn't match the full join condition. MultiAddData(leftInput, 20)(rightInput, 21), // watermark = 10, no-data-batch computes nulls CheckNewAnswer(Row(4, 10, null, "12"), Row(5, 10, null, "15")), AddData(rightInput, 20), CheckNewAnswer(Row(20, 30, 40, "60")) ) } test("right outer early state exclusion on right") { val (leftInput, rightInput, joined) = setupWindowedJoinWithRightCondition("right_outer") testStream(joined)( MultiAddData(leftInput, 3, 4, 5)(rightInput, 1, 2, 3), // The right rows with rightValue <= 7 should generate their outer join row now and // not get added to the state. CheckNewAnswer(Row(3, 10, 6, "9"), Row(1, 10, null, "3"), Row(2, 10, null, "6")), assertNumStateRows(total = 4, updated = 4), // We shouldn't get more outer join rows when the watermark advances. MultiAddData(leftInput, 20)(rightInput, 21), CheckNewAnswer(), AddData(rightInput, 20), CheckNewAnswer((20, 30, 40, "60")) ) } test("windowed left outer join") { val (leftInput, rightInput, joined) = setupWindowedJoin("left_outer") testStream(joined)( // Test inner part of the join. MultiAddData(leftInput, 1, 2, 3, 4, 5)(rightInput, 3, 4, 5, 6, 7), CheckNewAnswer((3, 10, 6, 9), (4, 10, 8, 12), (5, 10, 10, 15)), MultiAddData(leftInput, 21)(rightInput, 22), // watermark = 11, no-data-batch computes nulls CheckNewAnswer(Row(1, 10, 2, null), Row(2, 10, 4, null)), assertNumStateRows(total = 2, updated = 12), AddData(leftInput, 22), CheckNewAnswer(Row(22, 30, 44, 66)), assertNumStateRows(total = 3, updated = 1) ) } test("windowed right outer join") { val (leftInput, rightInput, joined) = setupWindowedJoin("right_outer") testStream(joined)( // Test inner part of the join. MultiAddData(leftInput, 1, 2, 3, 4, 5)(rightInput, 3, 4, 5, 6, 7), CheckNewAnswer((3, 10, 6, 9), (4, 10, 8, 12), (5, 10, 10, 15)), MultiAddData(leftInput, 21)(rightInput, 22), // watermark = 11, no-data-batch computes nulls CheckNewAnswer(Row(6, 10, null, 18), Row(7, 10, null, 21)), assertNumStateRows(total = 2, updated = 12), AddData(leftInput, 22), CheckNewAnswer(Row(22, 30, 44, 66)), assertNumStateRows(total = 3, updated = 1) ) } Seq( ("left_outer", Row(3, null, 5, null)), ("right_outer", Row(null, 2, null, 5)) ).foreach { case (joinType: String, outerResult) => test(s"${joinType.replaceAllLiterally("_", " ")} with watermark range condition") { val (leftInput, rightInput, joined) = setupJoinWithRangeCondition(joinType) testStream(joined)( AddData(leftInput, (1, 5), (3, 5)), CheckAnswer(), AddData(rightInput, (1, 10), (2, 5)), CheckNewAnswer((1, 1, 5, 10)), AddData(rightInput, (1, 11)), CheckNewAnswer(), // no match as left time is too low assertNumStateRows(total = 5, updated = 5), // Increase event time watermark to 20s by adding data with time = 30s on both inputs AddData(leftInput, (1, 7), (1, 30)), CheckNewAnswer((1, 1, 7, 10), (1, 1, 7, 11)), assertNumStateRows(total = 7, updated = 2), AddData(rightInput, (0, 30)), // watermark = 30 - 10 = 20, no-data-batch computes nulls CheckNewAnswer(outerResult), assertNumStateRows(total = 2, updated = 1) ) } } // When the join condition isn't true, the outer null rows must be generated, even if the join // keys themselves have a match. test("left outer join with non-key condition violated") { val (leftInput, simpleLeftDf) = setupStream("left", 2) val (rightInput, simpleRightDf) = setupStream("right", 3) val left = simpleLeftDf.select('key, window('leftTime, "10 second"), 'leftValue) val right = simpleRightDf.select('key, window('rightTime, "10 second"), 'rightValue) val joined = left.join( right, left("key") === right("key") && left("window") === right("window") && 'leftValue > 10 && ('rightValue < 300 || 'rightValue > 1000), "left_outer") .select(left("key"), left("window.end").cast("long"), 'leftValue, 'rightValue) testStream(joined)( // leftValue <= 10 should generate outer join rows even though it matches right keys MultiAddData(leftInput, 1, 2, 3)(rightInput, 1, 2, 3), CheckNewAnswer(Row(1, 10, 2, null), Row(2, 10, 4, null), Row(3, 10, 6, null)), assertNumStateRows(total = 3, updated = 3), // only right 1, 2, 3 added MultiAddData(leftInput, 20)(rightInput, 21), // watermark = 10, no-data-batch cleared < 10 CheckNewAnswer(), assertNumStateRows(total = 2, updated = 2), // only 20 and 21 left in state AddData(rightInput, 20), CheckNewAnswer(Row(20, 30, 40, 60)), assertNumStateRows(total = 3, updated = 1), // leftValue and rightValue both satisfying condition should not generate outer join rows MultiAddData(leftInput, 40, 41)(rightInput, 40, 41), // watermark = 31 CheckNewAnswer((40, 50, 80, 120), (41, 50, 82, 123)), assertNumStateRows(total = 4, updated = 4), // only left 40, 41 + right 40,41 left in state MultiAddData(leftInput, 70)(rightInput, 71), // watermark = 60 CheckNewAnswer(), assertNumStateRows(total = 2, updated = 2), // only 70, 71 left in state AddData(rightInput, 70), CheckNewAnswer((70, 80, 140, 210)), assertNumStateRows(total = 3, updated = 1), // rightValue between 300 and 1000 should generate outer join rows even though it matches left MultiAddData(leftInput, 101, 102, 103)(rightInput, 101, 102, 103), // watermark = 91 CheckNewAnswer(), assertNumStateRows(total = 6, updated = 3), // only 101 - 103 left in state MultiAddData(leftInput, 1000)(rightInput, 1001), CheckNewAnswer( Row(101, 110, 202, null), Row(102, 110, 204, null), Row(103, 110, 206, null)), assertNumStateRows(total = 2, updated = 2) ) } test("SPARK-26187 self left outer join should not return outer nulls for already matched rows") { val (inputStream, query) = setupSelfJoin("left_outer") testStream(query)( AddData(inputStream, (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L)), // batch 1 - global watermark = 0 // states // left: (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L) // right: (2, 2L), (4, 4L) CheckNewAnswer((2, 2L, 2, 2L), (4, 4L, 4, 4L)), assertNumStateRows(7, 7), AddData(inputStream, (6, 6L), (7, 7L), (8, 8L), (9, 9L), (10, 10L)), // batch 2 - global watermark = 5 // states // left: (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L), (6, 6L), (7, 7L), (8, 8L), // (9, 9L), (10, 10L) // right: (6, 6L), (8, 8L), (10, 10L) // states evicted // left: nothing (it waits for 5 seconds more than watermark due to join condition) // right: (2, 2L), (4, 4L) // NOTE: look for evicted rows in right which are not evicted from left - they were // properly joined in batch 1 CheckNewAnswer((6, 6L, 6, 6L), (8, 8L, 8, 8L), (10, 10L, 10, 10L)), assertNumStateRows(13, 8), AddData(inputStream, (11, 11L), (12, 12L), (13, 13L), (14, 14L), (15, 15L)), // batch 3 // - global watermark = 9 <= min(9, 10) // states // left: (4, 4L), (5, 5L), (6, 6L), (7, 7L), (8, 8L), (9, 9L), (10, 10L), (11, 11L), // (12, 12L), (13, 13L), (14, 14L), (15, 15L) // right: (10, 10L), (12, 12L), (14, 14L) // states evicted // left: (1, 1L), (2, 2L), (3, 3L) // right: (6, 6L), (8, 8L) CheckNewAnswer( Row(12, 12L, 12, 12L), Row(14, 14L, 14, 14L), Row(1, 1L, null, null), Row(3, 3L, null, null)), assertNumStateRows(15, 7) ) } test("SPARK-26187 self right outer join should not return outer nulls for already matched rows") { val inputStream = MemoryStream[(Int, Long)] val df = inputStream.toDS() .select(col("_1").as("value"), timestamp_seconds($"_2").as("timestamp")) // we're just flipping "left" and "right" from left outer join and apply right outer join val leftStream = df // Introduce misses for ease of debugging .where(col("value") % 2 === 0) .select(col("value").as("leftId"), col("timestamp").as("leftTime")) val rightStream = df.select(col("value").as("rightId"), col("timestamp").as("rightTime")) val query = leftStream .withWatermark("leftTime", "5 seconds") .join( rightStream.withWatermark("rightTime", "5 seconds"), expr("leftId = rightId AND leftTime >= rightTime AND " + "leftTime <= rightTime + interval 5 seconds"), joinType = "rightOuter") .select(col("leftId"), col("leftTime").cast("int"), col("rightId"), col("rightTime").cast("int")) // we can just flip left and right in the explanation of left outer query test // to assume the status of right outer query, hence skip explaining here testStream(query)( AddData(inputStream, (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L)), CheckNewAnswer((2, 2L, 2, 2L), (4, 4L, 4, 4L)), assertNumStateRows(7, 7), AddData(inputStream, (6, 6L), (7, 7L), (8, 8L), (9, 9L), (10, 10L)), CheckNewAnswer((6, 6L, 6, 6L), (8, 8L, 8, 8L), (10, 10L, 10, 10L)), assertNumStateRows(13, 8), AddData(inputStream, (11, 11L), (12, 12L), (13, 13L), (14, 14L), (15, 15L)), CheckNewAnswer( Row(12, 12L, 12, 12L), Row(14, 14L, 14, 14L), Row(null, null, 1, 1L), Row(null, null, 3, 3L)), assertNumStateRows(15, 7) ) } test("SPARK-26187 restore the stream-stream outer join query from Spark 2.4") { val inputStream = MemoryStream[(Int, Long)] val df = inputStream.toDS() .select(col("_1").as("value"), timestamp_seconds($"_2").as("timestamp")) val leftStream = df.select(col("value").as("leftId"), col("timestamp").as("leftTime")) val rightStream = df // Introduce misses for ease of debugging .where(col("value") % 2 === 0) .select(col("value").as("rightId"), col("timestamp").as("rightTime")) val query = leftStream .withWatermark("leftTime", "5 seconds") .join( rightStream.withWatermark("rightTime", "5 seconds"), expr("rightId = leftId AND rightTime >= leftTime AND " + "rightTime <= leftTime + interval 5 seconds"), joinType = "leftOuter") .select(col("leftId"), col("leftTime").cast("int"), col("rightId"), col("rightTime").cast("int")) val resourceUri = this.getClass.getResource( "/structured-streaming/checkpoint-version-2.4.0-streaming-join/").toURI val checkpointDir = Utils.createTempDir().getCanonicalFile // Copy the checkpoint to a temp dir to prevent changes to the original. // Not doing this will lead to the test passing on the first run, but fail subsequent runs. FileUtils.copyDirectory(new File(resourceUri), checkpointDir) inputStream.addData((1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L)) /* Note: The checkpoint was generated using the following input in Spark version 2.4.0 AddData(inputStream, (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L)), // batch 1 - global watermark = 0 // states // left: (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L) // right: (2, 2L), (4, 4L) CheckNewAnswer((2, 2L, 2, 2L), (4, 4L, 4, 4L)), assertNumStateRows(7, 7), */ // we just fail the query if the checkpoint was create from less than Spark 3.0 val e = intercept[StreamingQueryException] { val writer = query.writeStream.format("console") .option("checkpointLocation", checkpointDir.getAbsolutePath).start() inputStream.addData((7, 7L), (8, 8L)) eventually(timeout(streamingTimeout)) { assert(writer.exception.isDefined) } throw writer.exception.get } assert(e.getMessage.toLowerCase(Locale.ROOT) .contains("the query is using stream-stream leftouter join with state format version 1")) } test("SPARK-29438: ensure UNION doesn't lead stream-stream join to use shifted partition IDs") { def constructUnionDf(desiredPartitionsForInput1: Int) : (MemoryStream[Int], MemoryStream[Int], MemoryStream[Int], DataFrame) = { val input1 = MemoryStream[Int](desiredPartitionsForInput1) val df1 = input1.toDF .select( 'value as "key", 'value as "leftValue", 'value as "rightValue") val (input2, df2) = setupStream("left", 2) val (input3, df3) = setupStream("right", 3) val joined = df2 .join(df3, df2("key") === df3("key") && df2("leftTime") === df3("rightTime"), "inner") .select(df2("key"), 'leftValue, 'rightValue) (input1, input2, input3, df1.union(joined)) } withTempDir { tempDir => val (input1, input2, input3, unionDf) = constructUnionDf(2) testStream(unionDf)( StartStream(checkpointLocation = tempDir.getAbsolutePath), MultiAddData( (input1, Seq(11, 12, 13)), (input2, Seq(11, 12, 13, 14, 15)), (input3, Seq(13, 14, 15, 16, 17))), CheckNewAnswer(Row(11, 11, 11), Row(12, 12, 12), Row(13, 13, 13), Row(13, 26, 39), Row(14, 28, 42), Row(15, 30, 45)), StopStream ) // We're restoring the query with different number of partitions in left side of UNION, // which leads right side of union to have mismatched partition IDs if it relies on // TaskContext.partitionId(). SPARK-29438 fixes this issue to not rely on it. val (newInput1, newInput2, newInput3, newUnionDf) = constructUnionDf(3) newInput1.addData(11, 12, 13) newInput2.addData(11, 12, 13, 14, 15) newInput3.addData(13, 14, 15, 16, 17) testStream(newUnionDf)( StartStream(checkpointLocation = tempDir.getAbsolutePath), MultiAddData( (newInput1, Seq(21, 22, 23)), (newInput2, Seq(21, 22, 23, 24, 25)), (newInput3, Seq(23, 24, 25, 26, 27))), CheckNewAnswer(Row(21, 21, 21), Row(22, 22, 22), Row(23, 23, 23), Row(23, 46, 69), Row(24, 48, 72), Row(25, 50, 75)) ) } } test("SPARK-32148 stream-stream join regression on Spark 3.0.0") { val input1 = MemoryStream[(Timestamp, String, String)] val df1 = input1.toDF .selectExpr("_1 as eventTime", "_2 as id", "_3 as comment") .withWatermark(s"eventTime", "2 minutes") val input2 = MemoryStream[(Timestamp, String, String)] val df2 = input2.toDF .selectExpr("_1 as eventTime", "_2 as id", "_3 as name") .withWatermark(s"eventTime", "4 minutes") val joined = df1.as("left") .join(df2.as("right"), expr(""" |left.id = right.id AND left.eventTime BETWEEN | right.eventTime - INTERVAL 30 seconds AND | right.eventTime + INTERVAL 30 seconds """.stripMargin), joinType = "leftOuter") val inputDataForInput1 = Seq( (Timestamp.valueOf("2020-01-01 00:00:00"), "abc", "has no join partner"), (Timestamp.valueOf("2020-01-02 00:00:00"), "abc", "joined with A"), (Timestamp.valueOf("2020-01-02 01:00:00"), "abc", "joined with B")) val inputDataForInput2 = Seq( (Timestamp.valueOf("2020-01-02 00:00:10"), "abc", "A"), (Timestamp.valueOf("2020-01-02 00:59:59"), "abc", "B"), (Timestamp.valueOf("2020-01-02 02:00:00"), "abc", "C")) val expectedOutput = Seq( (Timestamp.valueOf("2020-01-01 00:00:00"), "abc", "has no join partner", null, null, null), (Timestamp.valueOf("2020-01-02 00:00:00"), "abc", "joined with A", Timestamp.valueOf("2020-01-02 00:00:10"), "abc", "A"), (Timestamp.valueOf("2020-01-02 01:00:00"), "abc", "joined with B", Timestamp.valueOf("2020-01-02 00:59:59"), "abc", "B")) testStream(joined)( MultiAddData((input1, inputDataForInput1), (input2, inputDataForInput2)), CheckNewAnswer(expectedOutput.head, expectedOutput.tail: _*) ) } } class StreamingFullOuterJoinSuite extends StreamingJoinSuite { test("windowed full outer join") { val (leftInput, rightInput, joined) = setupWindowedJoin("full_outer") testStream(joined)( MultiAddData(leftInput, 1, 2, 3, 4, 5)(rightInput, 3, 4, 5, 6, 7), CheckNewAnswer(Row(3, 10, 6, 9), Row(4, 10, 8, 12), Row(5, 10, 10, 15)), // states // left: 1, 2, 3, 4 ,5 // right: 3, 4, 5, 6, 7 assertNumStateRows(total = 10, updated = 10), MultiAddData(leftInput, 21)(rightInput, 22), // Watermark = 11, should remove rows having window=[0,10]. CheckNewAnswer(Row(1, 10, 2, null), Row(2, 10, 4, null), Row(6, 10, null, 18), Row(7, 10, null, 21)), // states // left: 21 // right: 22 // // states evicted // left: 1, 2, 3, 4 ,5 (below watermark) // right: 3, 4, 5, 6, 7 (below watermark) assertNumStateRows(total = 2, updated = 2), AddData(leftInput, 22), CheckNewAnswer(Row(22, 30, 44, 66)), // states // left: 21, 22 // right: 22 assertNumStateRows(total = 3, updated = 1), StopStream, StartStream(), AddData(leftInput, 1), // Row not add as 1 < state key watermark = 12. CheckNewAnswer(), // states // left: 21, 22 // right: 22 assertNumStateRows(total = 3, updated = 0, droppedByWatermark = 1), AddData(rightInput, 5), // Row not add as 5 < state key watermark = 12. CheckNewAnswer(), // states // left: 21, 22 // right: 22 assertNumStateRows(total = 3, updated = 0, droppedByWatermark = 1) ) } test("full outer early state exclusion on left") { val (leftInput, rightInput, joined) = setupWindowedJoinWithLeftCondition("full_outer") testStream(joined)( MultiAddData(leftInput, 1, 2, 3)(rightInput, 3, 4, 5), // The left rows with leftValue <= 4 should generate their outer join rows now and // not get added to the state. CheckNewAnswer(Row(1, 10, 2, null, null, null), Row(2, 10, 4, null, null, null), Row(3, 10, 6, 3, 10, "9")), // states // left: 3 // right: 3, 4, 5 assertNumStateRows(total = 4, updated = 4), // Generate outer join result for all non-matched rows when the watermark advances. MultiAddData(leftInput, 20)(rightInput, 21), CheckNewAnswer(Row(null, null, null, 4, 10, "12"), Row(null, null, null, 5, 10, "15")), // states // left: 20 // right: 21 // // states evicted // left: 3 (below watermark) // right: 3, 4, 5 (below watermark) assertNumStateRows(total = 2, updated = 2), AddData(rightInput, 20), CheckNewAnswer(Row(20, 30, 40, 20, 30, "60")), // states // left: 20 // right: 21, 20 assertNumStateRows(total = 3, updated = 1) ) } test("full outer early state exclusion on right") { val (leftInput, rightInput, joined) = setupWindowedJoinWithRightCondition("full_outer") testStream(joined)( MultiAddData(leftInput, 3, 4, 5)(rightInput, 1, 2, 3), // The right rows with rightValue <= 7 should generate their outer join rows now, // and never be added to the state. // The right row with rightValue = 9 > 7, hence joined and added to state. CheckNewAnswer(Row(null, null, null, 1, 10, "3"), Row(null, null, null, 2, 10, "6"), Row(3, 10, 6, 3, 10, "9")), // states // left: 3, 4, 5 // right: 3 assertNumStateRows(total = 4, updated = 4), // Generate outer join result for all non-matched rows when the watermark advances. MultiAddData(leftInput, 20)(rightInput, 21), CheckNewAnswer(Row(4, 10, 8, null, null, null), Row(5, 10, 10, null, null, null)), // states // left: 20 // right: 21 // // states evicted // left: 3, 4, 5 (below watermark) // right: 3 (below watermark) assertNumStateRows(total = 2, updated = 2), AddData(rightInput, 20), CheckNewAnswer(Row(20, 30, 40, 20, 30, "60")), // states // left: 20 // right: 21, 20 assertNumStateRows(total = 3, updated = 1) ) } test("full outer join with watermark range condition") { val (leftInput, rightInput, joined) = setupJoinWithRangeCondition("full_outer") testStream(joined)( AddData(leftInput, (1, 5), (3, 5)), CheckNewAnswer(), // states // left: (1, 5), (3, 5) // right: nothing assertNumStateRows(total = 2, updated = 2), AddData(rightInput, (1, 10), (2, 5)), // Match left row in the state. CheckNewAnswer(Row(1, 1, 5, 10)), // states // left: (1, 5), (3, 5) // right: (1, 10), (2, 5) assertNumStateRows(total = 4, updated = 2), AddData(rightInput, (1, 9)), // Match left row in the state. CheckNewAnswer(Row(1, 1, 5, 9)), // states // left: (1, 5), (3, 5) // right: (1, 10), (2, 5), (1, 9) assertNumStateRows(total = 5, updated = 1), // Increase event time watermark to 20s by adding data with time = 30s on both inputs. AddData(leftInput, (1, 7), (1, 30)), CheckNewAnswer(Row(1, 1, 7, 9), Row(1, 1, 7, 10)), // states // left: (1, 5), (3, 5), (1, 7), (1, 30) // right: (1, 10), (2, 5), (1, 9) assertNumStateRows(total = 7, updated = 2), // Watermark = 30 - 10 = 20, no matched row. // Generate outer join result for all non-matched rows when the watermark advances. AddData(rightInput, (0, 30)), CheckNewAnswer(Row(3, null, 5, null), Row(null, 2, null, 5)), // states // left: (1, 30) // right: (0, 30) // // states evicted // left: (1, 5), (3, 5), (1, 5) (below watermark = 20) // right: (1, 10), (2, 5), (1, 9) (below watermark = 20) assertNumStateRows(total = 2, updated = 1) ) } test("self full outer join") { val (inputStream, query) = setupSelfJoin("full_outer") testStream(query)( AddData(inputStream, (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L)), CheckNewAnswer(Row(2, 2L, 2, 2L), Row(4, 4L, 4, 4L)), // batch 1 - global watermark = 0 // states // left: (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L) // right: (2, 2L), (4, 4L) assertNumStateRows(total = 7, updated = 7), AddData(inputStream, (6, 6L), (7, 7L), (8, 8L), (9, 9L), (10, 10L)), CheckNewAnswer(Row(6, 6L, 6, 6L), Row(8, 8L, 8, 8L), Row(10, 10L, 10, 10L)), // batch 2 - global watermark = 5 // states // left: (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L), (6, 6L), (7, 7L), (8, 8L), // (9, 9L), (10, 10L) // right: (6, 6L), (8, 8L), (10, 10L) // // states evicted // left: nothing (it waits for 5 seconds more than watermark due to join condition) // right: (2, 2L), (4, 4L) assertNumStateRows(total = 13, updated = 8), AddData(inputStream, (11, 11L), (12, 12L), (13, 13L), (14, 14L), (15, 15L)), CheckNewAnswer(Row(12, 12L, 12, 12L), Row(14, 14L, 14, 14L), Row(1, 1L, null, null), Row(3, 3L, null, null)), // batch 3 - global watermark = 9 // states // left: (4, 4L), (5, 5L), (6, 6L), (7, 7L), (8, 8L), (9, 9L), (10, 10L), (11, 11L), // (12, 12L), (13, 13L), (14, 14L), (15, 15L) // right: (10, 10L), (12, 12L), (14, 14L) // // states evicted // left: (1, 1L), (2, 2L), (3, 3L) // right: (6, 6L), (8, 8L) assertNumStateRows(total = 15, updated = 7) ) } } class StreamingLeftSemiJoinSuite extends StreamingJoinSuite { import testImplicits._ test("windowed left semi join") { val (leftInput, rightInput, joined) = setupWindowedJoin("left_semi") testStream(joined)( MultiAddData(leftInput, 1, 2, 3, 4, 5)(rightInput, 3, 4, 5, 6, 7), CheckNewAnswer(Row(3, 10, 6), Row(4, 10, 8), Row(5, 10, 10)), // states // left: 1, 2, 3, 4 ,5 // right: 3, 4, 5, 6, 7 assertNumStateRows(total = 10, updated = 10), MultiAddData(leftInput, 21)(rightInput, 22), // Watermark = 11, should remove rows having window=[0,10]. CheckNewAnswer(), // states // left: 21 // right: 22 // // states evicted // left: 1, 2, 3, 4 ,5 (below watermark) // right: 3, 4, 5, 6, 7 (below watermark) assertNumStateRows(total = 2, updated = 2), AddData(leftInput, 22), CheckNewAnswer(Row(22, 30, 44)), // Unlike inner/outer joins, given left input row matches with right input row, // we don't buffer the matched left input row to the state store. // // states // left: 21 // right: 22 assertNumStateRows(total = 2, updated = 0), StopStream, StartStream(), AddData(leftInput, 1), // Row not add as 1 < state key watermark = 12. CheckNewAnswer(), // states // left: 21 // right: 22 assertNumStateRows(total = 2, updated = 0, droppedByWatermark = 1), AddData(rightInput, 5), // Row not add as 5 < state key watermark = 12. CheckNewAnswer(), // states // left: 21 // right: 22 assertNumStateRows(total = 2, updated = 0, droppedByWatermark = 1) ) } test("left semi early state exclusion on left") { val (leftInput, rightInput, joined) = setupWindowedJoinWithLeftCondition("left_semi") testStream(joined)( MultiAddData(leftInput, 1, 2, 3)(rightInput, 3, 4, 5), // The left rows with leftValue <= 4 should not generate their semi join rows and // not get added to the state. CheckNewAnswer(Row(3, 10, 6)), // states // left: 3 // right: 3, 4, 5 assertNumStateRows(total = 4, updated = 4), // We shouldn't get more semi join rows when the watermark advances. MultiAddData(leftInput, 20)(rightInput, 21), CheckNewAnswer(), // states // left: 20 // right: 21 // // states evicted // left: 3 (below watermark) // right: 3, 4, 5 (below watermark) assertNumStateRows(total = 2, updated = 2), AddData(rightInput, 20), CheckNewAnswer((20, 30, 40)), // states // left: 20 // right: 21, 20 assertNumStateRows(total = 3, updated = 1) ) } test("left semi early state exclusion on right") { val (leftInput, rightInput, joined) = setupWindowedJoinWithRightCondition("left_semi") testStream(joined)( MultiAddData(leftInput, 3, 4, 5)(rightInput, 1, 2, 3), // The right rows with rightValue <= 7 should never be added to the state. // The right row with rightValue = 9 > 7, hence joined and added to state. CheckNewAnswer(Row(3, 10, 6)), // states // left: 3, 4, 5 // right: 3 assertNumStateRows(total = 4, updated = 4), // We shouldn't get more semi join rows when the watermark advances. MultiAddData(leftInput, 20)(rightInput, 21), CheckNewAnswer(), // states // left: 20 // right: 21 // // states evicted // left: 3, 4, 5 (below watermark) // right: 3 (below watermark) assertNumStateRows(total = 2, updated = 2), AddData(rightInput, 20), CheckNewAnswer((20, 30, 40)), // states // left: 20 // right: 21, 20 assertNumStateRows(total = 3, updated = 1) ) } test("left semi join with watermark range condition") { val (leftInput, rightInput, joined) = setupJoinWithRangeCondition("left_semi") testStream(joined)( AddData(leftInput, (1, 5), (3, 5)), CheckNewAnswer(), // states // left: (1, 5), (3, 5) // right: nothing assertNumStateRows(total = 2, updated = 2), AddData(rightInput, (1, 10), (2, 5)), // Match left row in the state. CheckNewAnswer((1, 5)), // states // left: (1, 5), (3, 5) // right: (1, 10), (2, 5) assertNumStateRows(total = 4, updated = 2), AddData(rightInput, (1, 9)), // No match as left row is already matched. CheckNewAnswer(), // states // left: (1, 5), (3, 5) // right: (1, 10), (2, 5), (1, 9) assertNumStateRows(total = 5, updated = 1), // Increase event time watermark to 20s by adding data with time = 30s on both inputs. AddData(leftInput, (1, 7), (1, 30)), CheckNewAnswer((1, 7)), // states // left: (1, 5), (3, 5), (1, 30) // right: (1, 10), (2, 5), (1, 9) assertNumStateRows(total = 6, updated = 1), // Watermark = 30 - 10 = 20, no matched row. AddData(rightInput, (0, 30)), CheckNewAnswer(), // states // left: (1, 30) // right: (0, 30) // // states evicted // left: (1, 5), (3, 5) (below watermark = 20) // right: (1, 10), (2, 5), (1, 9) (below watermark = 20) assertNumStateRows(total = 2, updated = 1) ) } test("self left semi join") { val (inputStream, query) = setupSelfJoin("left_semi") testStream(query)( AddData(inputStream, (1, 1L), (2, 2L), (3, 3L), (4, 4L), (5, 5L)), CheckNewAnswer((2, 2), (4, 4)), // batch 1 - global watermark = 0 // states // left: (2, 2L), (4, 4L) // (left rows with value % 2 != 0 is filtered per [[PushPredicateThroughJoin]]) // right: (2, 2L), (4, 4L) // (right rows with value % 2 != 0 is filtered per [[PushPredicateThroughJoin]]) assertNumStateRows(total = 4, updated = 4), AddData(inputStream, (6, 6L), (7, 7L), (8, 8L), (9, 9L), (10, 10L)), CheckNewAnswer((6, 6), (8, 8), (10, 10)), // batch 2 - global watermark = 5 // states // left: (2, 2L), (4, 4L), (6, 6L), (8, 8L), (10, 10L) // right: (6, 6L), (8, 8L), (10, 10L) // // states evicted // left: nothing (it waits for 5 seconds more than watermark due to join condition) // right: (2, 2L), (4, 4L) assertNumStateRows(total = 8, updated = 6), AddData(inputStream, (11, 11L), (12, 12L), (13, 13L), (14, 14L), (15, 15L)), CheckNewAnswer((12, 12), (14, 14)), // batch 3 - global watermark = 9 // states // left: (4, 4L), (6, 6L), (8, 8L), (10, 10L), (12, 12L), (14, 14L) // right: (10, 10L), (12, 12L), (14, 14L) // // states evicted // left: (2, 2L) // right: (6, 6L), (8, 8L) assertNumStateRows(total = 9, updated = 4) ) } }
cloud-fan/spark
sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
Scala
apache-2.0
59,197
package io.sqooba.oss.timeseries.zio import io.sqooba.oss.timeseries.immutable.TSEntry import zio.stream._ import zio.{Queue, Task, UIO, ZIO} class AppendableEntryStream[T]( finalizedSink: Queue[Take[Nothing, TSEntry[T]]], val finalizedEntries: Stream[Nothing, TSEntry[T]], fitter: ZEntryFitter[T] ) { def +=(elem: TSEntry[T]): Task[Unit] = addOne(elem) def ++=(xs: Seq[TSEntry[T]]): Task[Unit] = ZIO.foreach(xs)(addOne).unit def addOne(elem: TSEntry[T]): Task[Unit] = { fitter.addAndFitLast(elem).flatMap { case Some(entry) => finalizedSink.offer(Take.single(entry)).unit case None => ZIO.unit } } /** * Appends the last entry present in the fitter (if any) * and emits a terminating 'Take' to the queue. * * No entries should subsequently be added. */ def close(): UIO[Unit] = appendLastEntryIfRequired() *> finalizedSink.offer(Take.end).unit private def appendLastEntryIfRequired() = fitter.lastEntry.flatMap { case Some(e) => finalizedSink.offer(Take.single(e)).unit case None => ZIO.unit } } object AppendableEntryStream { def unbounded[T](compress: Boolean): UIO[AppendableEntryStream[T]] = { for { // Create an unbounded queue that receives Takes q <- Queue.unbounded[Take[Nothing, TSEntry[T]]] // Build a stream from the queue and do a `flattenTake` so we can // terminate a stream of entries by passing a Take.end to the queue // (Using #fromQueueWithShutdown() so the queue gets shutdown // once the stream has received the terminating entry) s = Stream.fromQueueWithShutdown(q).flattenTake fitter <- ZEntryFitter.init[T](compress) } yield new AppendableEntryStream(q, s, fitter) } }
Shastick/tslib
src/main/scala/io/sqooba/oss/timeseries/zio/AppendableEntryStream.scala
Scala
mit
1,791
package jp.pigumer.sbt.cloud.aws.autoscaling import com.amazonaws.services.autoscaling.AmazonAutoScaling import sbt._ trait AutoScalingKeys { lazy val awsAutoScaling = taskKey[AmazonAutoScaling]("AWS AutoScaling tasks") }
PigumerGroup/sbt-aws-cloudformation
src/main/scala/jp/pigumer/sbt/cloud/aws/autoscaling/AutoScalingKeys.scala
Scala
mit
228
package com.mesosphere.universe.v3.model import com.mesosphere.cosmos.circe.Decoders._ import com.twitter.util.Return import com.twitter.util.Throw import com.twitter.util.Try import io.circe.syntax.EncoderOps import io.circe.Decoder import io.circe.DecodingFailure import io.circe.Encoder import io.circe.HCursor import java.util.regex.Pattern final class Tag private(val value: String) extends AnyVal { override def toString: String = value } object Tag { val packageDetailsTagRegex: String = "^[^\\s]+$" val packageDetailsTagPattern: Pattern = Pattern.compile(packageDetailsTagRegex) def apply(s: String): Tag = validate(s).get def validate(s: String): Try[Tag] = { if (packageDetailsTagPattern.matcher(s).matches()) { Return(new Tag(s)) } else { Throw(new IllegalArgumentException( s"Value '$s' does not conform to expected format $packageDetailsTagRegex" )) } } implicit val encodePackageDefinitionTag: Encoder[Tag] = { Encoder.instance(_.value.asJson) } implicit val decodePackageDefinitionTag: Decoder[Tag] = Decoder.instance[Tag] { (c: HCursor) => c.as[String].map(validate(_)).flatMap { case Return(r) => Right(r) case Throw(ex) => val msg = ex.getMessage.replaceAllLiterally("assertion failed: ", "") Left(DecodingFailure(msg, c.history)) } } }
dcos/cosmos
cosmos-common/src/main/scala/com/mesosphere/universe/v3/model/Tag.scala
Scala
apache-2.0
1,382
package com.kasonchan.share import android.os.Bundle import android.app.Activity import android.view.Menu import android.widget.SeekBar import android.widget.EditText import android.widget.TextView import android.widget.TextView._ import android.text.TextWatcher import android.text.Editable import android.widget.SeekBar.OnSeekBarChangeListener import android.widget.SeekBar.OnSeekBarChangeListener._ import android.widget.Toast import android.util.Log import android.view.Gravity import android.view.ViewGroup /** * Created by kasonchan on 3/8/2015. */ class MainActivity extends Activity { override def onCreate(savedInstanceState: Bundle): Unit = { super.onCreate(savedInstanceState) // Hide default action bar super.getActionBar.hide() // Show content view setContentView(R.layout.activity_main) // Initialize components val afterTax: EditText = findViewById(R.id.after_tax).asInstanceOf[EditText] val tips: SeekBar = findViewById(R.id.add_tips_seekbar).asInstanceOf[SeekBar] val tipsAmount: TextView = findViewById(R.id.tips_amount).asInstanceOf[TextView] val afterTips: TextView = findViewById(R.id.after_tips).asInstanceOf[TextView] val splits: SeekBar = findViewById(R.id.split_seekbar).asInstanceOf[SeekBar] val splitAmount: TextView = findViewById(R.id.split_amount).asInstanceOf[TextView] val afterSplits: TextView = findViewById(R.id.after_splits).asInstanceOf[TextView] // Format decimal number to two decimal places val decimalFormat: java.text.DecimalFormat = new java.text.DecimalFormat("#.00") // Toast layout ando configuration val inflater = getLayoutInflater() val layout = inflater.inflate(R.layout.toast, findViewById(R.id.toast_layout).asInstanceOf[ViewGroup]) val toastMsg: TextView = layout.findViewById(R.id.toast_msg).asInstanceOf[TextView] // Toast message with short period of time val toast: Toast = new Toast(getApplicationContext()) toast.setDuration(Toast.LENGTH_SHORT) toast.setView(layout) /** * After tax input watcher */ val afterTaxWatcher = new TextWatcher() { // After total amount is input override def afterTextChanged(s: Editable) = { // Get the value from afterTax val afterTaxValue = getEditTextValue(afterTax) if (afterTaxValue.matches("")) { // Set toast text toastMsg.setText("Invalid input - empty amount") // Show toast toast.show() // Set after tips to empty afterTips.setText("") } else if (afterTaxValue.matches("0")) { // Set toast text toastMsg.setText("Invalid input - amount can not be 0") // Show toast toast.show() // Set after tips to empty afterTips.setText("") } else if (afterTaxValue.matches("0.0*")) { // Set toast text toastMsg.setText("Invalid input - amount can not be 0") // Show toast toast.show() // Set after tips to empty afterTips.setText("") } else { afterTips.setText(afterTaxValue) } } override def beforeTextChanged(s: CharSequence, start: Int, count: Int, after: Int) = { } override def onTextChanged(s: CharSequence, start: Int, before: Int, count: Int) = { } } afterTax.addTextChangedListener(afterTaxWatcher) /** * Add tips listener */ val tipsListener: OnSeekBarChangeListener = new OnSeekBarChangeListener() { override def onStopTrackingTouch(seekBar: SeekBar) = { val tipsValue: Int = seekBar.getProgress val afterTaxValue = afterTax.getText() if (afterTaxValue.toString().equals("")) { // Set toast text toastMsg.setText("Invalid input - empty amount") // Show toast toast.show() // Set after tips to empty afterTips.setText("") } else if (afterTaxValue.toString().equals("0")) { // Set toast text toastMsg.setText("Invalid input - amount can not be 0") // Show toast toast.show() // Set after tips to empty afterTips.setText("") } else if (afterTaxValue.toString().equals("0.00")) { // Set toast text toastMsg.setText("Invalid input - amount can not be 0") // Show toast toast.show() // Set after tips to empty afterTips.setText("") } else { // Calculate amount with added tips val afterTipsValue = java.lang.Double.parseDouble(afterTaxValue.toString()) * (1 + (tipsValue / 100.00)) // Show selected tips tipsAmount.setText(String.valueOf(tipsValue).concat("%")) // Show the amount with added tips afterTips.setText(String.valueOf(decimalFormat.format(afterTipsValue))) } } override def onStartTrackingTouch(seekBar: SeekBar) = { val tipsValue: Int = seekBar.getProgress tipsAmount.setText(String.valueOf(tipsValue).concat("%")) } override def onProgressChanged(seekBar: SeekBar, tipsValue: Int, fromUser: Boolean) = { tipsAmount.setText(String.valueOf(tipsValue).concat("%")) } } tips.setOnSeekBarChangeListener(tipsListener) /** * Split check listener */ val splitsListener: OnSeekBarChangeListener = new OnSeekBarChangeListener() { override def onStopTrackingTouch(seekBar: SeekBar) = { val splitsValue: Int = seekBar.getProgress + 2 val afterTipsValue = afterTips.getText() if (afterTipsValue.toString().equals("")) { // Set toast text toastMsg.setText("Invalid input - empty amount") // Show toast toast.show() // Set after tips to empty afterSplits.setText("") } else if (afterTipsValue.toString().equals("0")) { // Set toast text toastMsg.setText("Invalid input - amount can not be 0") // Show toast toast.show() // Set after tips to empty afterSplits.setText("") } else if (afterTipsValue.toString().equals("0.00")) { // Set toast text toastMsg.setText("Invalid input - amount can not be 0") // Show toast toast.show() // Set after tips to empty afterSplits.setText("") } else { // Calculate amount with added tips val afterSplitsValue = java.lang.Double.parseDouble(afterTipsValue.toString()) / splitsValue // Show selected splits splitAmount.setText(String.valueOf(splitsValue)) // Show the amount with added tips afterSplits.setText(String.valueOf(decimalFormat.format(afterSplitsValue))) } } override def onStartTrackingTouch(seekBar: SeekBar) = { val splitsValue: Int = seekBar.getProgress splitAmount.setText(String.valueOf(splitsValue)) } override def onProgressChanged(seekBar: SeekBar, splitsValue: Int, fromUser: Boolean) = { splitAmount.setText(String.valueOf(splitsValue + 2)) } } splits.setOnSeekBarChangeListener(splitsListener) } def getEditTextValue(editText: EditText): String = { editText.getText.toString } def getTextViewValue(textView: TextView): Double = { val textViewValue = textView.getText.toString if (textViewValue.matches("")) 0 else textViewValue.toString().toDouble } def getSeekBarValue(seekBar: SeekBar): Int = { val seekBarValue = seekBar.getProgress seekBarValue } }
KasonChan/tips_and_share
src/com/kasonchan/share/MainActivity.scala
Scala
mit
7,828
package cakesolutions.kafka import cakesolutions.kafka.KafkaTopicPartition.{Partition, Topic} import org.apache.kafka.clients.producer.ProducerRecord import org.apache.kafka.common.TopicPartition /** * Helper functions for creating Kafka's `ProducerRecord`s. * * The producer records hold the data that is to be written to Kafka. * The producer records are compatible with Kafka's own `KafkaProducer` and the [[KafkaProducer]] in this library. */ object KafkaProducerRecord { /** * Destination for Kafka producer records. */ object Destination { /** * Destination by topic. * * Selects the destination for producer records by topic only. */ def apply(topic: Topic): Destination = Destination(topic, None) /** * Destination by topic and partition. * * Selects the destination for producer records to a specific topic and partition. */ def apply(topic: Topic, partition: Partition): Destination = Destination(topic, Some(partition)) /** * Destination by topic and partition. * * Selects the destination for producer records to a specific topic and partition. */ def apply(topicPartition: TopicPartition): Destination = Destination(topicPartition.topic(), topicPartition.partition()) } /** * Destination for Kafka producer records. */ final case class Destination(topic: Topic, partition: Option[Partition]) /** * Create a producer record with an optional key. * * @param topic the topic where the record will be appended to * @param key optional key that will be included in the record * @param value the value that will be included in the record * @tparam Key type of the key * @tparam Value type of the value * @return producer record */ def apply[Key, Value](topic: String, key: Option[Key], value: Value): ProducerRecord[Key, Value] = key match { case Some(k) => new ProducerRecord(topic, k, value) case None => new ProducerRecord(topic, value) } /** * Create a producer record with topic, key, and value. * * @param topic the topic where the record will be appended to * @param key the key that will be included in the record * @param value the value that will be included in the record * @tparam Key type of the key * @tparam Value type of the value * @return producer record */ def apply[Key, Value](topic: String, key: Key, value: Value): ProducerRecord[Key, Value] = new ProducerRecord(topic, key, value) /** * Create a producer record without a key. * * @param topic topic to which record is being sent * @param value the value that will be included in the record * @tparam Key type of the key * @tparam Value type of the value * @return producer record */ def apply[Key, Value](topic: String, value: Value): ProducerRecord[Key, Value] = new ProducerRecord(topic, value) /** * Create a producer record from a topic selection, optional key, value, and optional timestamp. * * @param topicPartitionSelection the topic (with optional partition) where the record will be appended to * @param key the key that will be included in the record * @param value the value that will be included in the record * @param timestamp the timestamp of the record * @tparam Key type of the key * @tparam Value type of the value * @return producer record */ def apply[Key >: Null, Value]( topicPartitionSelection: Destination, key: Option[Key] = None, value: Value, timestamp: Option[Long] = None ): ProducerRecord[Key, Value] = { val topic = topicPartitionSelection.topic val partition = topicPartitionSelection.partition.map(i => i: java.lang.Integer).orNull val nullableKey = key.orNull val nullableTimestamp = timestamp.map(i => i: java.lang.Long).orNull new ProducerRecord[Key, Value](topic, partition, nullableTimestamp, nullableKey, value) } /** * Create producer records from a sequence of values. * All the records will have the given topic and no key. * * @param topic topic to write the records to * @param values values of the records */ def fromValues[Value](topic: String, values: Seq[Value]): Seq[ProducerRecord[Nothing, Value]] = values.map(value => KafkaProducerRecord(topic, value)) /** * Create producer records from a single key and multiple values. * All the records will have the given topic and key. * * @param topic topic to write the records to * @param key key of the records * @param values values of the records */ def fromValuesWithKey[Key, Value](topic: String, key: Option[Key], values: Seq[Value]): Seq[ProducerRecord[Key, Value]] = values.map(value => KafkaProducerRecord(topic, key, value)) /** * Create producer records from topics and values. * All the records will have no key. * * @param valuesWithTopic a sequence of topic and value pairs */ def fromValuesWithTopic[Value](valuesWithTopic: Seq[(String, Value)]): Seq[ProducerRecord[Nothing, Value]] = valuesWithTopic.map { case (topic, value) => KafkaProducerRecord(topic, value) } /** * Create producer records from key-value pairs. * All the records will have the given topic. * * @param topic topic to write the records to * @param keyValues a sequence of key and value pairs */ def fromKeyValues[Key, Value](topic: String, keyValues: Seq[(Option[Key], Value)]): Seq[ProducerRecord[Key, Value]] = keyValues.map { case (key, value) => KafkaProducerRecord(topic, key, value) } /** * Create producer records from topic, key, and value triples. * * @param keyValuesWithTopic a sequence of topic, key, and value triples. */ def fromKeyValuesWithTopic[Key, Value](keyValuesWithTopic: Iterable[(String, Option[Key], Value)]): Iterable[ProducerRecord[Key, Value]] = keyValuesWithTopic.map { case (topic, key, value) => KafkaProducerRecord(topic, key, value) } }
simonsouter/scala-kafka-client
client/src/main/scala/cakesolutions/kafka/KafkaProducerRecord.scala
Scala
mit
6,092
package com.github.probe.android import android.app.Service trait RichService { this: Service => def application = getApplication.asInstanceOf[XApplication] def settings = application.settings }
khernyo/freezing-ninja
android/src/main/scala/com/github/probe/android/RichService.scala
Scala
apache-2.0
201
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.vertx.scala.core.http import org.vertx.java.core.http.{ HttpServerFileUpload => JHttpServerFileUpload } import java.nio.charset.Charset import org.vertx.scala.Self import org.vertx.scala.core.streams.ReadStream /** * @author Galder Zamarreño */ final class HttpServerFileUpload private[scala] (val asJava: JHttpServerFileUpload) extends Self with ReadStream { override type J = JHttpServerFileUpload /** * Stream the content of this upload to the given filename. */ def streamToFileSystem(filename: String): HttpServerFileUpload = wrap(asJava.streamToFileSystem(filename)) /** * Returns the filename which was used when upload the file. */ def filename(): String = asJava.filename() /** * Returns the name of the attribute */ def name(): String = asJava.name() /** * Returns the contentType for the upload */ def contentType(): String = asJava.contentType() /** * Returns the contentTransferEncoding for the upload */ def contentTransferEncoding(): String = asJava.contentTransferEncoding() /** * Returns the charset for the upload */ def charset(): Charset = asJava.charset() /** * Returns the size of the upload (in bytes) */ def size(): Long = asJava.size() } object HttpServerFileUpload { def apply(internal: JHttpServerFileUpload) = new HttpServerFileUpload(internal) }
galderz/mod-lang-scala
src/main/scala/org/vertx/scala/core/http/HttpServerFileUpload.scala
Scala
apache-2.0
1,997
/* * Copyright 2015 ligaDATA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ligadata.metadataapiservice import akka.actor.{Actor, ActorRef} import akka.event.Logging import akka.io.IO import spray.routing.RequestContext import spray.httpx.SprayJsonSupport import spray.client.pipelining._ import scala.util.{ Success, Failure } import com.ligadata.MetadataAPI._ import com.ligadata.kamanja.metadata._ import com.ligadata.AuditAdapterInfo.AuditConstants import scala.util.control._ import org.apache.logging.log4j._ object UpdateSourceModelService { case class UpdateJava(sourceCode:String) case class UpdateScala(sourceCode:String) } class UpdateSourceModelService(requestContext: RequestContext, userid:Option[String], password:Option[String], cert:Option[String], modelname: Option[String]) extends Actor { import UpdateSourceModelService._ implicit val system = context.system import system.dispatcher val log = Logging(system, getClass) val APIName = "UpdateSourceModelService" val loggerName = this.getClass.getName val logger = LogManager.getLogger(loggerName) def receive = { case UpdateJava(sourceCode) => { log.debug("Updating java model") updateJava(sourceCode) context.stop(self) } case UpdateScala(sourceCode) => { log.debug("Updating scala model") updateScala(sourceCode) context.stop(self) } } def updateScala(pmmlStr:String) = { log.debug("Requesting UpdateSourceModel {}",pmmlStr) val usersModelName=userid.getOrElse("")+"."+modelname.getOrElse("") logger.debug("user model name is: "+usersModelName) if (!MetadataAPIImpl.checkAuth(userid,password,cert, MetadataAPIImpl.getPrivilegeName("update","model"))) { // MetadataAPIImpl.logAuditRec(userid,Some(AuditConstants.WRITE),AuditConstants.UPDATEOBJECT,pmmlStr,AuditConstants.FAIL,"",nameVal) requestContext.complete(new ApiResult(ErrorCodeConstants.Failure, APIName, null, "Error:UPDATE not allowed for this user").toString ) }else if((modelname.getOrElse(""))=="") { requestContext.complete(new ApiResult(ErrorCodeConstants.Failure, APIName, null, "Failed to add model. No model configuration name supplied. Please specify in the header the model configuration name where the key is 'modelname' and the value is the name of the configuration.").toString ) } else { val apiResult = MetadataAPIImpl.UpdateModel(pmmlStr,"scala",usersModelName,userid) requestContext.complete(apiResult) } } def updateJava(pmmlStr:String) = { log.debug("Requesting UpdateSourceModel {}",pmmlStr) val usersModelName=userid.getOrElse("")+"."+modelname.getOrElse("") logger.debug("(Put request) user model name is: "+usersModelName) if (!MetadataAPIImpl.checkAuth(userid,password,cert, MetadataAPIImpl.getPrivilegeName("update","model"))) { // MetadataAPIImpl.logAuditRec(userid,Some(AuditConstants.WRITE),AuditConstants.UPDATEOBJECT,pmmlStr,AuditConstants.FAIL,"",nameVal) requestContext.complete(new ApiResult(ErrorCodeConstants.Failure, APIName, null, "Error:UPDATE not allowed for this user").toString ) }else if((modelname.getOrElse(""))=="") { requestContext.complete(new ApiResult(ErrorCodeConstants.Failure, APIName, null, "Failed to add model. No model configuration name supplied. Please specify in the header the model configuration name where the key is 'modelname' and the value is the name of the configuration.").toString ) } else { val apiResult = MetadataAPIImpl.UpdateModel(pmmlStr,"java",usersModelName,userid) requestContext.complete(apiResult) } } }
traytonwhite/Kamanja
trunk/MetadataAPIService/src/main/scala/com/ligadata/metadataapiservice/UpdateSourceModelService.scala
Scala
apache-2.0
4,162
package webhooq.model.dao import com.hazelcast.nio.DataSerializable import webhooq.logging.WebhooqLogger import java.net.{URISyntaxException, URI} import java.io.{IOException, DataInput, DataOutput} /** */ case class DeliveryRef (var message_id:Option[MessageRef]=None, var uri:Option[URI]=None) extends DataSerializable with WebhooqLogger { def this() { this(None,None) } def writeData(out: DataOutput):Unit = { if (wqLog.isDebugEnabled) wqLog.debug("writeData started") this.message_id.getOrElse(throw new IOException("DeliveryRef.message_id may not be None")).writeData(out) out.writeUTF(uri.getOrElse(throw new IOException("DeliveryRef.uri may not be None")).toString) if (wqLog.isDebugEnabled) wqLog.debug("writeData finished") } def readData(in: DataInput): Unit = { if (wqLog.isDebugEnabled) wqLog.debug("readData started") val message_id = new MessageRef() message_id.readData(in) this.message_id = Option(message_id) try { this.uri = Option(new URI(in.readUTF()))} catch { case e:URISyntaxException => throw new IOException("while attempting to parse DeliveryRef.uri", e)} if (wqLog.isDebugEnabled) wqLog.debug("readData finished") } } object DeliveryRef { def apply(message_id:MessageRef, uri:URI):DeliveryRef = new DeliveryRef(Option(message_id), Option(uri)) }
webhooq/webhooq
src/main/scala/webhooq/model/dao/DeliveryRef.scala
Scala
apache-2.0
1,343
package cpup.mc.tweak.content import cpup.mc.lib.CPupModHolder import cpup.mc.lib.content.CPupRecipe import cpup.mc.tweak.CPupTweak trait BaseRecipe extends CPupRecipe with CPupModHolder[CPupTweak.type] { def mod = CPupTweak }
CoderPuppy/cpup-tweak-mc
src/main/scala/cpup/mc/tweak/content/BaseRecipe.scala
Scala
mit
230
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.consumer.storage import java.util.concurrent._ import java.util.concurrent.atomic._ import java.util.concurrent.locks._ class MemoryOffsetStorage extends OffsetStorage { val offsetAndLock = new ConcurrentHashMap[(Int, String), (AtomicLong, Lock)] def reserve(node: Int, topic: String): Long = { val key = (node, topic) if(!offsetAndLock.containsKey(key)) offsetAndLock.putIfAbsent(key, (new AtomicLong(0), new ReentrantLock)) val (offset, lock) = offsetAndLock.get(key) lock.lock offset.get } def commit(node: Int, topic: String, offset: Long) = { val (highwater, lock) = offsetAndLock.get((node, topic)) highwater.set(offset) lock.unlock offset } }
griddynamics/kafka
core/src/main/scala/kafka/consumer/storage/MemoryOffsetStorage.scala
Scala
apache-2.0
1,530
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.expressions.validation import org.apache.flink.table.api.ValidationException import org.apache.flink.table.expressions.utils.MapTypeTestBase import org.junit.Test class MapTypeValidationTest extends MapTypeTestBase { @Test(expected = classOf[ValidationException]) def testWrongKeyType(): Unit = { testSqlApi("f2[12]", "FAIL") } @Test(expected = classOf[ValidationException]) def testUnsupportedComparisonType(): Unit = { testSqlApi("f6 <> f2", "FAIL") } @Test(expected = classOf[ValidationException]) def testEmptyMap(): Unit = { testSqlApi("MAP[]", "FAIL") } @Test(expected = classOf[ValidationException]) def testUnsupportedMapImplicitTypeCastSql(): Unit = { testSqlApi("MAP['k1', 'string', 'k2', 12]", "FAIL") } }
ueshin/apache-flink
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/expressions/validation/MapTypeValidationTest.scala
Scala
apache-2.0
1,601
/* TestUtils: Lots of implicit conversions to easy Denotation tests * * The Denotation intermediate representation is fairly verbose, and fairly painful * to write when constructing unit tests. TestUtils provides a large number of * implicit conversions for making this easier, such as converting the scala literal * 7 into IntLit(7,"7",SRange.unknown). */ package tarski import utility.Locations._ import utility.Utility._ import org.apache.commons.lang.StringEscapeUtils._ import tarski.AST._ import tarski.Arounds._ import tarski.Base._ import tarski.Denotations._ import tarski.Environment.{Env, PlaceInfo} import tarski.Items._ import tarski.Tokens._ import tarski.Types._ import tarski.Mods._ import scala.language.implicitConversions object TestUtils { // Location implicit conversions private val r = SRange.unknown private val a = SGroup.unknown implicit def toLoc[A](x: A): Loc[A] = Loc(x,r) implicit def toGrouped[A](x: A): Grouped[A] = Grouped(x,a) implicit def toGroupLocs(n: Int): List[SGroup] = List.fill(n)(a) // Empty lists implicit def toEmptyList(xs: List[Nothing]): EmptyList.type = EmptyList // AST implicit conversions implicit def toAExp(i: Int): AExp = IntALit(i.toString,r) implicit def toAExp(s: String): AExp = NameAExp(s,r) implicit def toAExp(b: Boolean): AExp = NameAExp(if (b) "true" else "false",r) implicit def toAExps(e: AExp): List[AExp] = List(e) implicit def toOAExp(e: AExp): Option[AExp] = Some(e) implicit def toOAExp[A](e: A)(implicit to: A => AExp): Option[AExp] = Some(to(e)) implicit def toAStmt(e: AExp): AStmt = ExpAStmt(e) implicit def toAStmts(e: AExp): List[AStmt] = List(ExpAStmt(e)) implicit def toAStmts(s: AStmt): List[AStmt] = List(s) implicit def toAStmtsC(e: AExp): CommaList[AStmt] = SingleList(ExpAStmt(e)) implicit def toAStmtsC(s: AStmt): CommaList[AStmt] = SingleList(s) implicit def toAExp(t: LangType): AExp = NameAExp(t.name,r) implicit def toAExps[A](xs: KList[A])(implicit to: A => AExp): KList[AExp] = xs map to implicit def toAExps[A](x: A)(implicit to: A => AExp): SingleList[AExp] = SingleList(to(x)) implicit def toAVarDecls(v: AVarDecl): KList[AVarDecl] = SingleList(v) implicit def toMods(m: Mod): Mods = List(Loc(m,r)) // Denotation implicit conversions implicit def toExp(b: Boolean): Exp = BooleanLit(b,r) implicit def toExp(i: Int): Exp = IntLit(i,i.toString,r) implicit def toExp(i: Long): Exp = LongLit(i,s"${i}L",r) implicit def toExp(c: Char): Exp = CharLit(c,"'"+escapeJava(c.toString)+"'",r) implicit def toExp(d: Double): Exp = DoubleLit(d,d.toString,r) implicit def toExp(s: String): Exp = StringLit(s,'"'+escapeJava(s)+'"',r) implicit def toExp(x: Local): Exp = LocalExp(x,r) implicit def toExp(x: ThisOrSuper): Exp = ThisOrSuperExp(x,r) implicit def toExps[A](xs: List[A])(implicit to: A => Exp): List[Exp] = xs map to implicit def toExps(e: Exp): List[Exp] = List(e) implicit def toOExp[A](x: A)(implicit to: A => Exp): Option[Exp] = Some(to(x)) implicit def toOExp(e: Exp): Option[Exp] = Some(e) // Callable implicit conversions implicit def toCall(x: MethodItem)(implicit env: Env): NotTypeApply = if (x.isStatic) MethodDen(None,x,r) else impossible // Type implicit conversions implicit def toType(c: ClassItem): ClassType = c.simple implicit def toTypeArgs[A](ts: List[A])(implicit to: A => TypeArg): List[TypeArg] = ts map to // Statement implicit conversions implicit def toStmt(e: StmtExp)(implicit env: Env): Stmt = ExpStmt(e,env) implicit def toStmt[A](x: A)(implicit to: A => StmtExp, env: Env): Stmt = ExpStmt(to(x),env) implicit def toStmts(e: StmtExp)(implicit env: Env): List[Stmt] = List(ExpStmt(e,env)) implicit def toStmts(s: Stmt): List[Stmt] = List(s) // Variable declarations, for statements, etc. implicit def toVarDecl[A](v: (Local,A))(implicit to: A => Exp, env: Env): VarDecl = VarDecl(v._1,r,Nil,Some(r,to(v._2)),env) implicit def toVarDecls[A](v: A)(implicit to: A => VarDecl): List[VarDecl] = List(to(v)) implicit def toForInit(n: List[Nothing])(implicit env: Env): ForInit = ForExps(Nil,r,env) implicit def toForInit(e: Exp)(implicit env: Env): ForInit = ForExps(List(e),r,env) // Inside a function with a bunch of locals def localEnv(locals: Item*): Env = { val X = NormalClassItem("XX", LocalPkg) val f = NormalMethodItem("ff", X, Nil, VoidType, Nil, false) Env(Array(f,X) ++ locals, Map((f,2),(X,2)) ++ locals.map((_,1)).toMap[Item,Int], PlaceInfo(f)) } def localEnvWithBase(locals: Item*): Env = { val X = NormalClassItem("XX", LocalPkg) val f = NormalMethodItem("ff", X, Nil, VoidType, Nil, false) testEnv.extend(Array(f,X) ++ locals, Map((f,2),(X,2)) ++ locals.map((_,1)).toMap[Item,Int]) .move(PlaceInfo(f)) } def assertIn[A](x: A, xs: Set[A]): Unit = if (!xs.contains(x)) throw new AssertionError("assertIn failed:\\nx = "+x+"\\nxs = "+xs.mkString("\\n ")) private def noClean(s: String) = s def assertSetsEqual[A](exp: Traversable[A], got: Traversable[A], clean: String => String = noClean): Unit = { def s(n: Name, xs: Set[A]) = f"\\n$n%-7s = ${xs map (x => clean(x.toString)) mkString "\\n "}" val e = exp.toSet val g = got.toSet if (e != g) throw new AssertionError("assertSetsEqual failed:" +s("exp",e)+s("got",g)+s("exp-got",e--g)+s("got-exp",g--e)) } }
eddysystems/eddy
tests/src/tarski/TestUtils.scala
Scala
bsd-2-clause
5,414
package com.teambytes.shadow import akka.actor.{Status, ActorRef, Actor, ActorLogging} import akka.util.Timeout import spray.can.Http import spray.http._ trait ClientActor extends Actor with ActorLogging { protected implicit def timeout: Timeout protected def host: String protected def port: Int protected def connecting(commander: ActorRef, request: HttpRequest): Receive = { case _: Http.Connected => // once connected, we can send the request across the connection sender() ! request context.become(waitingForResponse(commander)) case Http.CommandFailed(Http.Connect(address, _, _, _, _)) => log.warning("Could not connect to {}", address) commander ! Status.Failure(new RuntimeException("Connection error")) context.stop(self) } protected def waitingForResponse(commander: ActorRef): Receive = { case response @ HttpResponse(status, entity, _, _) => log.info("Connection-Level API: received {} response with {} bytes", status, entity.data.length) sender() ! Http.Close context.become(waitingForClose(commander, response)) case ev @ (Http.SendFailed(_) | Timedout(_)) => log.warning("Received {}", ev) commander ! Status.Failure(new RuntimeException("Request error")) context.stop(self) } protected def waitingForClose(commander: ActorRef, response: HttpResponse): Receive = { case ev: Http.ConnectionClosed => log.debug("Connection closed ({})", ev) commander ! response context.stop(self) case Http.CommandFailed(Http.Close) => log.warning("Could not close connection") commander ! Status.Failure(new RuntimeException("Connection close error")) context.stop(self) } protected def updateRequest(request: HttpRequest, updateHeader: Boolean = true): HttpRequest = { val updatedHeaders = if(updateHeader) { request.headers.filter(_.isNot(HttpHeaders.Host.lowercaseName)).+:(HttpHeaders.Host(host, if (port == 80) 0 else port)) } else { request.headers } request.copy( uri = request.uri.copy(authority = new Uri.Authority(host = Uri.Host(host), port = port)) ).withHeaders(updatedHeaders) } }
grahamar/shadow
src/main/scala/com/teambytes/shadow/ClientActor.scala
Scala
mit
2,199
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package models.businessmatching import org.scalatestplus.mockito.MockitoSugar import jto.validation.{Invalid, Path, Valid} import jto.validation.ValidationError import models.DateOfChange import org.joda.time.LocalDate import play.api.i18n.Messages import play.api.libs.json._ import utils.AmlsSpec class BusinessActivitiesSpec extends AmlsSpec with MockitoSugar { import jto.validation.forms.Rules._ "The BusinessActivities model" must { "successfully validate" when { "a few check boxes are selected" in { val model1 = Map("businessActivities[]" -> Seq("04", "01", "03")) BusinessActivities.formReads.validate(model1) must be(Valid(BusinessActivities(Set(EstateAgentBusinessService, AccountancyServices, BillPaymentServices)))) val model2 = Map("businessActivities[]" -> Seq("05", "06", "07")) BusinessActivities.formReads.validate(model2) must be(Valid(BusinessActivities(Set(HighValueDealing, MoneyServiceBusiness, TrustAndCompanyServices)))) BusinessActivities.formReads.validate(Map("businessActivities[]" -> Seq("08"))) must be(Valid(BusinessActivities(Set(TelephonePaymentService)))) } "residential business activity check box is selected" in { val model = Map("businessActivities" -> Seq("08")) BusinessActivities.formReads.validate(model) must be(Valid(BusinessActivities(Set(TelephonePaymentService)))) } } "fail validation" when { "given missing data represented by an empty Map" in { BusinessActivities.formReads.validate(Map.empty) must be(Invalid(Seq((Path \ "businessActivities") -> Seq(ValidationError("error.required.bm.register.service"))))) } "given invalid data" in { val model = Map("businessActivities[]" -> Seq("01", "99", "04")) BusinessActivities.formReads.validate(model) must be(Invalid(Seq((Path \ "businessActivities" \ 1 \ "businessActivities") -> Seq(ValidationError("error.invalid"))))) } } "write correct data for businessActivities value" when { "additionalActivities are not present" when { "single activities selected" in { BusinessActivities.formWrites.writes(BusinessActivities(Set(AccountancyServices))) must be(Map("businessActivities[]" -> Seq("01"))) } "multiple activities selected" in { BusinessActivities.formWrites.writes(BusinessActivities(Set(TelephonePaymentService, TrustAndCompanyServices, HighValueDealing))) must be(Map("businessActivities[]" -> Seq("08", "07", "05"))) } } "additionalActivities are present" when { "single activities selected" in { BusinessActivities.formWrites.writes(BusinessActivities(Set(AccountancyServices), Some(Set(HighValueDealing)))) must be(Map("businessActivities[]" -> Seq("05"))) } "multiple activities selected" in { BusinessActivities.formWrites.writes(BusinessActivities( Set(TelephonePaymentService, TrustAndCompanyServices, HighValueDealing), Some(Set(AccountancyServices, TelephonePaymentService)) )) must be(Map("businessActivities[]" -> Seq("01", "08"))) } } } "get the value for each activity type" in { BusinessActivities.getValue(EstateAgentBusinessService) must be("04") BusinessActivities.getValue(AccountancyServices) must be("01") BusinessActivities.getValue(HighValueDealing) must be("05") BusinessActivities.getValue(MoneyServiceBusiness) must be("06") BusinessActivities.getValue(TrustAndCompanyServices) must be("07") BusinessActivities.getValue(TelephonePaymentService) must be("08") } "get the message for each activity type" in { AccountancyServices.getMessage(false) must be(Messages("businessmatching.registerservices.servicename.lbl.01")) ArtMarketParticipant.getMessage(false) must be(Messages("businessmatching.registerservices.servicename.lbl.02")) BillPaymentServices.getMessage(false) must be(Messages("businessmatching.registerservices.servicename.lbl.03")) EstateAgentBusinessService.getMessage(false) must be(Messages("businessmatching.registerservices.servicename.lbl.04")) HighValueDealing.getMessage(false) must be(Messages("businessmatching.registerservices.servicename.lbl.05")) MoneyServiceBusiness.getMessage(false) must be(Messages("businessmatching.registerservices.servicename.lbl.06")) TrustAndCompanyServices.getMessage(false) must be(Messages("businessmatching.registerservices.servicename.lbl.07")) TelephonePaymentService.getMessage(false) must be(Messages("businessmatching.registerservices.servicename.lbl.08")) } "get the phrased message for each activity type" in { AccountancyServices.getMessage(true) must be(Messages("businessmatching.registerservices.servicename.lbl.01.phrased")) ArtMarketParticipant.getMessage(true) must be(Messages("businessmatching.registerservices.servicename.lbl.02.phrased")) BillPaymentServices.getMessage(true) must be(Messages("businessmatching.registerservices.servicename.lbl.03.phrased")) EstateAgentBusinessService.getMessage(true) must be(Messages("businessmatching.registerservices.servicename.lbl.04.phrased")) HighValueDealing.getMessage(true) must be(Messages("businessmatching.registerservices.servicename.lbl.05.phrased")) MoneyServiceBusiness.getMessage(true) must be(Messages("businessmatching.registerservices.servicename.lbl.06.phrased")) TrustAndCompanyServices.getMessage(true) must be(Messages("businessmatching.registerservices.servicename.lbl.07.phrased")) TelephonePaymentService.getMessage(true) must be(Messages("businessmatching.registerservices.servicename.lbl.08.phrased")) } "JSON validation" when { "additionalActivities are not present" must { "successfully validate given an enum value" in { val json = Json.obj("businessActivities" -> Seq("05", "06", "07")) Json.fromJson[BusinessActivities](json) must be(JsSuccess(BusinessActivities(Set(MoneyServiceBusiness, TrustAndCompanyServices, TelephonePaymentService)))) Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("01", "02", "03"))) must be(JsSuccess(BusinessActivities(Set(AccountancyServices, BillPaymentServices, EstateAgentBusinessService)))) Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("04"))) must be(JsSuccess(BusinessActivities(Set(HighValueDealing)))) Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("08"))) must be(JsSuccess(BusinessActivities(Set(ArtMarketParticipant)))) } "fail when on invalid data" in { Json.fromJson[BusinessActivities](Json.obj("businessActivity" -> "01")) must be(JsError((JsPath \ "businessActivities") -> play.api.libs.json.JsonValidationError("error.path.missing"))) } } "additionalActivities are present" must { "successfully validate given an enum value" in { Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("05", "06", "07"), "additionalActivities" -> Seq("01", "02"))) must be(JsSuccess( BusinessActivities( Set(MoneyServiceBusiness, TrustAndCompanyServices, TelephonePaymentService), Some(Set(AccountancyServices, BillPaymentServices)) ) )) Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("01", "02", "03"), "additionalActivities" -> Seq("04", "05", "06"))) must be(JsSuccess( BusinessActivities( Set(AccountancyServices, BillPaymentServices, EstateAgentBusinessService), Some(Set(HighValueDealing, MoneyServiceBusiness, TrustAndCompanyServices)) ) )) Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("04"), "additionalActivities" -> Seq("07"))) must be(JsSuccess(BusinessActivities(Set(HighValueDealing), Some(Set(TelephonePaymentService))))) } "fail given invalid data" in { Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("01"), "additionalActivities" -> Seq("11"))) must be(JsError((JsPath \ "additionalActivities") -> play.api.libs.json.JsonValidationError("error.invalid"))) } } "removeActivities are present" must { "successfully validate given an enum value" in { Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("05", "06", "07"), "removeActivities" -> Seq("01", "02"))) must be(JsSuccess( BusinessActivities( Set(MoneyServiceBusiness, TrustAndCompanyServices, TelephonePaymentService), None, Some(Set(AccountancyServices, BillPaymentServices)) ) )) Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("01", "02", "03"), "removeActivities" -> Seq("04", "05", "06"))) must be(JsSuccess( BusinessActivities( Set(AccountancyServices, BillPaymentServices, EstateAgentBusinessService), None, Some(Set(HighValueDealing, MoneyServiceBusiness, TrustAndCompanyServices)) ) )) Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("04"), "removeActivities" -> Seq("07"))) must be(JsSuccess(BusinessActivities(Set(HighValueDealing), None, Some(Set(TelephonePaymentService))))) } "fail given invalid data" in { Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq("01"), "removeActivities" -> Seq("11"))) must be(JsError((JsPath \ "removeActivities") -> play.api.libs.json.JsonValidationError("error.invalid"))) } } "dateOfChange is present" must { "successfully valida given a date" in { val json = Json.obj( "businessActivities" -> Seq("05", "06", "07"), "dateOfChange" -> "1990-02-24" ) Json.fromJson[BusinessActivities](json) must be(JsSuccess(BusinessActivities( Set(MoneyServiceBusiness, TrustAndCompanyServices, TelephonePaymentService), None, None, Some(DateOfChange(new LocalDate(1990, 2,24))) ))) } } } "validate json write" when { "additionalActivities are not present" in { Json.toJson(BusinessActivities(Set(HighValueDealing, EstateAgentBusinessService))) must be(Json.obj("businessActivities" -> Seq("04", "03"))) } "additionalActivities are present" in { Json.toJson(BusinessActivities(Set(HighValueDealing, EstateAgentBusinessService), Some(Set(AccountancyServices, BillPaymentServices)))) must be(Json.obj("businessActivities" -> Seq("04", "03"), "additionalActivities" -> Seq("01", "02"))) } "removeActivities are present" in { Json.toJson(BusinessActivities(Set(HighValueDealing, EstateAgentBusinessService), None, Some(Set(AccountancyServices, BillPaymentServices)))) must be(Json.obj("businessActivities" -> Seq("04", "03"), "removeActivities" -> Seq("01", "02"))) } "dateOfChange is present" in { Json.toJson(BusinessActivities( Set(HighValueDealing, EstateAgentBusinessService), None, None, Some(DateOfChange(new LocalDate(1990, 2,24))) )) must be(Json.obj( "businessActivities" -> Seq("04", "03"), "dateOfChange" -> "1990-02-24" )) } } "throw error for invalid data" in { Json.fromJson[BusinessActivities](Json.obj("businessActivities" -> Seq(JsString("20")))) must be(JsError(JsPath \ "businessActivities", play.api.libs.json.JsonValidationError("error.invalid"))) } } "The hasBusinessOrAdditionalActivity method" must { "return true" when { "only businessActivities contains the activity" in { val model = BusinessActivities(Set(AccountancyServices, MoneyServiceBusiness)) model.hasBusinessOrAdditionalActivity(AccountancyServices) mustBe true } "only additionalActivities contains the activity" in { val model = BusinessActivities(Set(AccountancyServices), Some(Set(HighValueDealing))) model.hasBusinessOrAdditionalActivity(HighValueDealing) mustBe true } } "return false" when { "neither businessActivities or additionalActivities contains the activity" in { val model = BusinessActivities(Set(AccountancyServices), Some(Set(MoneyServiceBusiness))) model.hasBusinessOrAdditionalActivity(HighValueDealing) mustBe false } } } }
hmrc/amls-frontend
test/models/businessmatching/BusinessActivitiesSpec.scala
Scala
apache-2.0
13,670
package test.utils.scalacheck import scodec.bits.BitVector import org.scalacheck._ object Generators { def genLSB() = for { n ← Gen.choose(1, 0x7f) } yield BitVector(n.toByte) def genMSB() = for { n ← Gen.choose(0x80, 0xff) tail ← genBV() } yield BitVector(n.toByte) ++ tail def genBV(): Gen[BitVector] = Gen.resize(8, Gen.oneOf(genMSB(), genLSB())) }
hardikamal/actor-platform
actor-server/actor-tests/src/test/scala/test/utils/scalacheck/Generators.scala
Scala
mit
384
/* * Copyright (C) 2012 The Regents of The University California. * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shark.memstore2 import java.io.{DataInput, DataOutput} import java.nio.ByteBuffer import java.util.{List => JList} import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory import org.apache.hadoop.hive.serde2.objectinspector.StructField import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector import org.apache.hadoop.io.Writable import shark.memstore2.column.ColumnBuilder /** * Used to build a TablePartition. This is used in the serializer to convert a * partition of data into columnar format and to generate a TablePartition. */ class TablePartitionBuilder(oi: StructObjectInspector, initialColumnSize: Int) extends Writable { var numRows: Long = 0 val fields: JList[_ <: StructField] = oi.getAllStructFieldRefs val columnBuilders = Array.tabulate[ColumnBuilder[_]](fields.size) { i => val columnBuilder = ColumnBuilder.create(fields.get(i).getFieldObjectInspector) columnBuilder.initialize(initialColumnSize) columnBuilder } def incrementRowCount() { numRows += 1 } def append(columnIndex: Int, o: Object, oi: ObjectInspector) { columnBuilders(columnIndex).append(o, oi) } def stats: TablePartitionStats = new TablePartitionStats(columnBuilders.map(_.stats), numRows) def build: TablePartition = new TablePartition(numRows, columnBuilders.map(_.build)) // We don't use these, but want to maintain Writable interface for SerDe override def write(out: DataOutput) {} override def readFields(in: DataInput) {} }
sameeragarwal/blinkdb_dev
src/main/scala/shark/memstore2/TablePartitionBuilder.scala
Scala
apache-2.0
2,346
package hooktest object Locale { private val logger = Logger.getLogger() def getLanguage(): String = { java.util.Locale.getDefault().getLanguage() match { case "ja" => "ja" // Japanese case _ => "en" // Other } } def convLang(lang: String, msg: String): String = { lang match { case DataID.English => msg case DataID.Japanese => LocaleData.get_ja(msg) case _ => { logger.warn(s"Not supported language: $lang") msg } } } } private object LocaleData { private val JapaneseMessage = Map( "Double Launch?" -> "二重起動していませんか?", "Failed mouse hook install" -> "マウスフックのインストールに失敗しました", "Properties does not exist" -> "設定ファイルが存在しません", "Unknown Command" -> "不明なコマンド", "Command Error" -> "コマンドのエラー", "Error" -> "エラー", "Question" -> "質問", "Stopped" -> "停止中", "Runnable" -> "実行中", "Properties Name" -> "設定ファイル名", "Add Properties" -> "設定ファイル追加", "Invalid Name" -> "無効な名前", "Invalid Number" -> "無効な数値", "Name Error" -> "名前のエラー", "Delete properties" -> "設定ファイル削除", "Set Text" -> "テキストを設定", "Trigger" -> "トリガー", "LR (Left <<-->> Right)" -> "左右 (左 <<-->> 右)", "Left (Left -->> Right)" -> "左 (左 -->> 右)", "Right (Right -->> Left)" -> "右 (右 -->> 左)", "Middle" -> "中央", "X1" -> "X1 (拡張1)", "X2" -> "X2 (拡張2)", "LeftDrag" -> "左ドラッグ", "RightDrag" -> "右ドラッグ", "MiddleDrag" -> "中央ドラッグ", "X1Drag" -> "X1 ドラッグ", "X2Drag" -> "X2 ドラッグ", "None" -> "なし", "Send MiddleClick" -> "中央クリック送信", "Dragged Lock" -> "ドラッグ後固定", "Keyboard" -> "キーボード", "ON / OFF" -> "有効 / 無効", "VK_CONVERT (Henkan)" -> "VK_CONVERT (変換)", "VK_NONCONVERT (Muhenkan)" -> "VK_NONCONVERT (無変換)", "VK_LWIN (Left Windows)" -> "VK_LWIN (左 Windows)", "VK_RWIN (Right Windows)" -> "VK_RWIN (右 Windows)", "VK_LSHIFT (Left Shift)" -> "VK_LSHIFT (左 Shift)", "VK_RSHIFT (Right Shift)" -> "VK_RSHIFT (右 Shift)", "VK_LCONTROL (Left Ctrl)" -> "VK_LCONTROL (左 Ctrl)", "VK_RCONTROL (Right Ctrl)" -> "VK_RCONTROL (右 Ctrl)", "VK_LMENU (Left Alt)" -> "VK_LMENU (左 Alt)", "VK_RMENU (Right Alt)" -> "VK_RMENU (右 Alt)", "Accel Table" -> "加速テーブル", "Custom Table" -> "カスタムテーブル", "Priority" -> "プロセス優先度", "High" -> "高", "Above Normal" -> "通常以上", "Normal" -> "通常", "Set Number" -> "パラメーターを設定", "pollTimeout" -> "同時押し判定時間", "scrollLocktime" -> "スクロールモード固定判定時間", "verticalThreshold" -> "垂直スクロール閾値", "horizontalThreshold" -> "水平スクロール閾値", "Real Wheel Mode" -> "擬似ホイールモード", "wheelDelta" -> "ホイール回転値", "vWheelMove" -> "垂直ホイール移動値", "hWheelMove" -> "水平ホイール移動値", "quickFirst" -> "初回の素早い反応", "quickTurn" -> "折り返しの素早い反応", "VH Adjuster" -> "垂直/水平スクロール調整", "Fixed" -> "固定", "Switching" -> "切り替え", "firstPreferVertical" -> "初回垂直スクロール優先", "firstMinThreshold" -> "初回判定閾値", "switchingThreshold" -> "切り替え閾値", "Properties" -> "設定ファイル", "Reload" -> "再読み込み", "Save" -> "保存", "Open Dir" -> "フォルダを開く", "Add" -> "追加", "Delete" -> "削除", "Default" -> "デフォルト", "Cursor Change" -> "カーソル変更", "Horizontal Scroll" -> "水平スクロール", "Reverse Scroll (Flip)" -> "垂直スクロール反転", "Swap Scroll (V.H)" -> "垂直/水平スクロール入れ替え", "Pass Mode" -> "制御停止", "Language" -> "言語", "English" -> "英語", "Japanese" -> "日本語", "Info" -> "情報", "Name" -> "名前", "Version" -> "バージョン", "Exit" -> "終了", ) def get_ja(msg: String): String = { JapaneseMessage.get(msg) match { case Some(ja_msg) => ja_msg case None => msg } } }
ykon/w10wheel
src/main/scala/hooktest/Locale.scala
Scala
mit
5,107
/* * Copyright 2006 - 2013 * Stefan Balev <stefan.balev@graphstream-project.org> * Julien Baudry <julien.baudry@graphstream-project.org> * Antoine Dutot <antoine.dutot@graphstream-project.org> * Yoann Pigné <yoann.pigne@graphstream-project.org> * Guilhelm Savin <guilhelm.savin@graphstream-project.org> * * This file is part of GraphStream <http://graphstream-project.org>. * * GraphStream is a library whose purpose is to handle static or dynamic * graph, create them from scratch, file or any source and display them. * * This program is free software distributed under the terms of two licenses, the * CeCILL-C license that fits European law, and the GNU Lesser General Public * License. You can use, modify and/ or redistribute the software under the terms * of the CeCILL-C license as circulated by CEA, CNRS and INRIA at the following * URL <http://www.cecill.info> or under the terms of the GNU LGPL as published by * the Free Software Foundation, either version 3 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * The fact that you are presently reading this means that you have had * knowledge of the CeCILL-C and LGPL licenses and that you accept their terms. */ package org.graphstream.ui.j2dviewer.renderer.shape.swing //import java.awt._ //import java.awt.geom._ import org.graphstream.ui.geom._ import org.graphstream.ui.graphicGraph._ import org.graphstream.ui.graphicGraph.stylesheet._ import org.graphstream.ui.graphicGraph.stylesheet.StyleConstants.Units import org.graphstream.ui.j2dviewer._ import org.graphstream.ui.j2dviewer.renderer._ import org.graphstream.ui.util._ import org.graphstream.ui.j2dviewer.renderer.shape._ import scala.math._ /** * Base for shapes centered around one point. */ trait AreaShape extends Shape with Area with Fillable with Strokable with Shadowable with Decorable { def configureForGroup(bck:Backend, style:Style, camera:Camera) { configureFillableForGroup(bck, style, camera) configureStrokableForGroup(style, camera) configureShadowableForGroup(style, camera) configureDecorableForGroup(style, camera) configureAreaForGroup(style, camera) } def configureForElement(bck:Backend, element:GraphicElement, skel:Skeleton, camera:Camera) { configureFillableForElement(element.getStyle, camera, element) configureDecorableForElement(bck, camera, element, skel) configureAreaForElement(bck, camera, skel.asInstanceOf[AreaSkeleton], element, theDecor) } } trait AreaOnConnectorShape extends Shape with AreaOnConnector with Fillable with Strokable with Shadowable { def configureForGroup(bck:Backend, style:Style, camera:Camera) { configureFillableForGroup(bck, style, camera) configureStrokableForGroup(style, camera) configureShadowableForGroup(style, camera) configureAreaOnConnectorForGroup(style, camera) } def configureForElement(bck:Backend, element:GraphicElement, skel:Skeleton, camera:Camera) { configureFillableForElement(element.getStyle, camera, element) configureAreaOnConnectorForElement(element.asInstanceOf[GraphicEdge], element.getStyle, camera) } } /** * Base for shapes rendered between two points. */ trait ConnectorShape extends Shape with Connector with Decorable { def configureForGroup(bck:Backend, style:Style, camera:Camera) { configureDecorableForGroup(style, camera) configureConnectorForGroup(style, camera) } def configureForElement(bck:Backend, element:GraphicElement, skel:Skeleton, camera:Camera) { configureDecorableForElement(bck, camera, element, skel) configureConnectorForElement(camera, element.asInstanceOf[GraphicEdge], skel.asInstanceOf[ConnectorSkeleton] /* TODO check this ! */) } } trait LineConnectorShape extends ConnectorShape with FillableLine with StrokableLine with ShadowableLine { override def configureForGroup(bck:Backend, style:Style, camera:Camera) { super.configureForGroup(bck, style, camera) configureFillableLineForGroup(bck, style, camera, theSize) configureStrokableLineForGroup(style, camera) configureShadowableLineForGroup(style, camera) } override def configureForElement(bck:Backend, element:GraphicElement, skel:Skeleton, camera:Camera) { configureFillableLineForElement(element.getStyle, camera, element) super.configureForElement(bck, element, skel, camera) } } trait AreaConnectorShape extends ConnectorShape with Fillable with Strokable with Shadowable { override def configureForGroup(bck:Backend, style:Style, camera:Camera) { configureFillableForGroup(bck, style, camera) configureStrokableForGroup(style, camera) configureShadowableForGroup(style, camera) super.configureForGroup(bck, style, camera) } override def configureForElement(bck:Backend, element:GraphicElement, skel:Skeleton, camera:Camera) { configureFillableForElement(element.getStyle, camera, element) super.configureForElement(bck, element, skel, camera) }} trait RectangularAreaShape extends AreaShape { val theShape:java.awt.geom.RectangularShape protected def make(bck:Backend, camera:Camera) { val w = theSize.x val h = theSize.y theShape.setFrame(theCenter.x-w/2, theCenter.y-h/2, w, h) } protected def makeShadow(bck:Backend, camera:Camera) { val x = theCenter.x + theShadowOff.x val y = theCenter.y + theShadowOff.y val w = theSize.x + theShadowWidth.x * 2 val h = theSize.y + theShadowWidth.y * 2 theShape.setFrame(x-w/2, y-h/2, w, h) } def renderShadow(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton) { makeShadow(bck, camera) cast(bck.graphics2D, theShape) } def render(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton ) { make(bck, camera ) fill(bck.graphics2D, theShape, camera) stroke(bck.graphics2D, theShape) decorArea(bck, camera, skel.iconAndText, element, theShape) } } abstract class OrientableRectangularAreaShape extends RectangularAreaShape with Orientable { var p:Point3 = null var angle = 0.0 var w = 0.0 var h = 0.0 var oriented = false override def configureForGroup(bck:Backend, style:Style, camera:Camera) { super.configureForGroup(bck, style, camera) configureOrientableForGroup(style, camera) oriented = (style.getSpriteOrientation != StyleConstants.SpriteOrientation.NONE) } override def configureForElement(bck:Backend, element:GraphicElement, skel:Skeleton, camera:Camera) { super.configureForElement(bck, element, skel, camera) configureOrientableForElement(camera, element.asInstanceOf[GraphicSprite] /* Check This XXX TODO !*/); } protected override def make(bck:Backend, camera:Camera) { make(bck, false, camera) } protected override def makeShadow(bck:Backend, camera:Camera) { make(bck, true, camera) } protected def make(bck:Backend, forShadow:Boolean, camera:Camera) { if (oriented) { val theDirection = new Vector2( target.x - theCenter.x, target.y - theCenter.y ) theDirection.normalize var x = theCenter.x var y = theCenter.y if( forShadow ) { x += theShadowOff.x y += theShadowOff.y } p = camera.transformGuToPx(x, y, 0) // Pass to pixels, the image will be drawn in pixels. angle = acos(theDirection.dotProduct( 1, 0 )) if( theDirection.y > 0 ) // The angle is always computed for acute angles angle = ( Pi - angle ) w = camera.metrics.lengthToPx(theSize.x, Units.GU) h = camera.metrics.lengthToPx(theSize.y, Units.GU) theShape.setFrame(0, 0, w, h) } else { if (forShadow) super.makeShadow(bck, camera) else super.make(bck, camera) } } override def renderShadow(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton) { make(bck, true, camera) val g = bck.graphics2D if (oriented) { val Tx = g.getTransform val Tr = new java.awt.geom.AffineTransform Tr.translate( p.x, p.y ) // 3. Position the image at its position in the graph. Tr.rotate( angle ) // 2. Rotate the image from its center. Tr.translate( -w/2, -h/2 ) // 1. Position in center of the image. g.setTransform( Tr ) // An identity matrix. cast(g, theShape) g.setTransform( Tx ) // Restore the original transform } else { super.renderShadow(bck, camera, element, skel) } } override def render(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton) { make(bck, false, camera) val g = bck.graphics2D if (oriented) { val Tx = g.getTransform val Tr = new java.awt.geom.AffineTransform Tr.translate( p.x, p.y ) // 3. Position the image at its position in the graph. Tr.rotate( angle ) // 2. Rotate the image from its center. Tr.translate( -w/2, -h/2 ) // 1. Position in center of the image. g.setTransform( Tr ) // An identity matrix. stroke(g, theShape) fill(g, theShape, camera) g.setTransform( Tx ) // Restore the original transform theShape.setFrame(theCenter.x-w/2, theCenter.y-h/2, w, h) decorArea(bck, camera, skel.iconAndText, element, theShape) } else { super.render(bck, camera, element, skel) } } } abstract class PolygonalShape extends AreaShape { var theShape = new java.awt.geom.Path2D.Double def renderShadow(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton) { makeShadow(bck, camera) cast(bck.graphics2D, theShape) } def render(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton) { val g = bck.graphics2D make(bck, camera) fill(g, theShape, camera) stroke(g, theShape) decorArea(bck, camera, skel.iconAndText, element, theShape) } } class LineShape extends LineConnectorShape { protected var theShapeL = new java.awt.geom.Line2D.Double protected var theShapeC = new java.awt.geom.CubicCurve2D.Double protected var theShape:java.awt.Shape = null // Command protected def make(bck:Backend, camera:Camera) { val from = skel.from val to = skel.to if( skel.isCurve ) { val ctrl1 = skel(1) val ctrl2 = skel(2) theShapeC.setCurve( from.x, from.y, ctrl1.x, ctrl1.y, ctrl2.x, ctrl2.y, to.x, to.y ) theShape = theShapeC } else { theShapeL.setLine( from.x, from.y, to.x, to.y ) theShape = theShapeL } } protected def makeShadow(bck:Backend, camera:Camera) { var x0 = skel.from.x + theShadowOff.x var y0 = skel.from.y + theShadowOff.y var x1 = skel.to.x + theShadowOff.x var y1 = skel.to.y + theShadowOff.y if( skel.isCurve ) { var ctrlx0 = skel(1).x + theShadowOff.x var ctrly0 = skel(1).y + theShadowOff.y var ctrlx1 = skel(2).x + theShadowOff.x var ctrly1 = skel(2).y + theShadowOff.y theShapeC.setCurve( x0, y0, ctrlx0, ctrly0, ctrlx1, ctrly1, x1, y1 ) theShape = theShapeC } else { theShapeL.setLine( x0, y0, x1, y1 ) theShape = theShapeL } } def renderShadow(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton) { makeShadow(bck, camera) cast(bck.graphics2D, theShape) } def render(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton) { val g = bck.graphics2D make(bck, camera) stroke(g, theShape ) fill(g, theSize, theShape) decorConnector(bck, camera, skel.iconAndText, element, theShape) } } /** * A cubic curve shape. */ class PolylineEdgeShape extends LineConnectorShape with ShowCubics { protected var theShape = new java.awt.geom.Path2D.Double // Command protected def make(bck:Backend, camera:Camera) { val n = skel.size theShape.reset theShape.moveTo(skel(0).x, skel(0).y) for(i <- 1 until n) { theShape.lineTo(skel(i).x, skel(i).y) } } protected def makeShadow(bck:Backend, camera:Camera) { } def renderShadow(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton) { makeShadow(bck, camera) cast(bck.graphics2D, theShape) } def render(bck:Backend, camera:Camera, element:GraphicElement, skel:Skeleton) { val g = bck.graphics2D make(bck, camera) stroke(g, theShape) fill(g, theSize, theShape) decorConnector(bck, camera, skel.iconAndText, element, theShape) } }
prismsoul/gedgraph
sources/prismsoul.genealogy.gedgraph/gs-ui/org/graphstream/ui/j2dviewer/renderer/shape/swing/BaseShapes.scala
Scala
gpl-2.0
12,584
package se.gigurra.wallace.comm import scala.collection.mutable object TopicManager { trait Client[MessageType] { def post(topic: String, message: MessageType) def subscribed(topic: String) def unsubscribed(topic: String) } } class TopicManager[MessageType](topicFactory: String => Topic[MessageType]) { import TopicManager._ private val topics = new mutable.HashMap[String, Topic[MessageType]]() private val subscriptions = new mutable.HashMap[(Client[MessageType], String), Subscription[MessageType]]() def subscribe(client: Client[MessageType], topicName: String): Unit = { if (subscriptions.contains((client, topicName))) throw new RuntimeException(s"Client $client already subscribes to $topicName") val subscription = getOrCreateTopic(topicName).subscribe() subscriptions.put((client, topicName), subscription) client.subscribed(topicName) subscription.stream.foreach(client.post(topicName, _)) } def unsubscribe(client: Client[MessageType], topicName: String): Unit = { subscriptions.remove((client, topicName)) match { case Some(subscription) => subscription.unsubscribe() client.unsubscribed(topicName) case None => throw new RuntimeException(s"Client $client does not subscribe to $topicName") } } def post(topicName: String, message: MessageType): Unit = { getOrCreateTopic(topicName).publish(message) } private[this] def getOrCreateTopic(name: String): Topic[MessageType] = { topics.getOrElseUpdate(name, topicFactory(name)) } }
GiGurra/Wall-Ace
lib_comm/src/main/scala/se/gigurra/wallace/comm/TopicManager.scala
Scala
gpl-2.0
1,570
package spray.json package lenses /** * The read lens can extract child values out of a JsValue hierarchy. A read lens * is parameterized with a type constructor. This allows to extracts not only scalar * values but also sequences or optional values. * @tparam M */ trait ReadLens[M[_]] { /** * Given a parent JsValue, tries to extract the child value. * @return `Right(value)` if the lens read succeeds. `Left(error)` if the lens read fails. */ def retr: JsValue ⇒ Validated[M[JsValue]] /** * Given a parent JsValue extracts and tries to convert the JsValue into * a value of type `T` */ def tryGet[T: Reader](value: JsValue): Validated[M[T]] /** * Given a parent JsValue extracts and converts a JsValue into a value of * type `T` or throws an exception. */ def get[T: Reader](value: JsValue): M[T] /** * Lifts a predicate for a converted value for this lens up to the * parent level. The returned predicate will return false for values * which fail to read. */ def is[U: Reader](f: U ⇒ Boolean): JsPred }
savulchik/json-lenses
src/main/scala/spray/json/lenses/ReadLens.scala
Scala
apache-2.0
1,074
/* NSC -- new Scala compiler * Copyright 2005-2013 LAMP/EPFL * @author Paul Phillips */ package scala.tools.nsc package interpreter import scala.language.implicitConversions import scala.reflect.api.{Universe => ApiUniverse} import scala.reflect.runtime.{universe => ru} /** A class which the repl utilizes to expose predefined objects. * The base implementation is empty; the standard repl implementation * is StdReplVals. */ abstract class ReplVals { } class StdReplVals(final val r: ILoop) extends ReplVals { final lazy val repl = r final lazy val intp = r.intp final lazy val power = r.power final lazy val reader = r.in final lazy val vals = this final lazy val global: intp.global.type = intp.global final lazy val isettings = intp.isettings final lazy val completion = reader.completion final lazy val history = reader.history final lazy val phased = power.phased final lazy val analyzer = global.analyzer object treedsl extends { val global: intp.global.type = intp.global } with ast.TreeDSL { } final lazy val typer = analyzer.newTyper( analyzer.rootContext( power.unit("").asInstanceOf[analyzer.global.CompilationUnit] ) ) def lastRequest = intp.lastRequest class ReplImplicits extends power.Implicits2 { import intp.global.Symbol private val tagFn = ReplVals.mkCompilerTypeFromTag[intp.global.type](global) implicit def mkCompilerTypeFromTag(sym: Symbol) = tagFn(sym) } final lazy val replImplicits = new ReplImplicits def typed[T <: analyzer.global.Tree](tree: T): T = typer.typed(tree).asInstanceOf[T] } object ReplVals { /** Latest attempt to work around the challenge of foo.global.Type * not being seen as the same type as bar.global.Type even though * the globals are the same. Dependent method types to the rescue. */ def mkCompilerTypeFromTag[T <: Global](global: T) = { import global._ /** We can't use definitions.compilerTypeFromTag directly because we're passing * it to map and the compiler refuses to perform eta expansion on a method * with a dependent return type. (Can this be relaxed?) To get around this * I have this forwarder which widens the type and then cast the result back * to the dependent type. */ def compilerTypeFromTag(t: ApiUniverse # WeakTypeTag[_]): Global#Type = definitions.compilerTypeFromTag(t) class AppliedTypeFromTags(sym: Symbol) { def apply[M](implicit m1: ru.TypeTag[M]): Type = if (sym eq NoSymbol) NoType else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type]) def apply[M1, M2](implicit m1: ru.TypeTag[M1], m2: ru.TypeTag[M2]): Type = if (sym eq NoSymbol) NoType else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type], compilerTypeFromTag(m2).asInstanceOf[Type]) } (sym: Symbol) => new AppliedTypeFromTags(sym) } }
felixmulder/scala
src/repl/scala/tools/nsc/interpreter/ReplVals.scala
Scala
bsd-3-clause
3,080
package meerkat import scala.reflect.BeanProperty // configuration object Config extends ul.Props { var serverIsRunning = false var server:ul.netx.ServerJetty = null // parsed command line arguments object Args extends ul.GetArgs { @BeanProperty var help = false @BeanProperty var exitTimeout = 0 @BeanProperty var port = 8080 @BeanProperty var browser = "default" @BeanProperty var browserDelay = 5000 @BeanProperty var startPage = "index.html" @BeanProperty var staticDir = "./static" @BeanProperty var stopDelay = 1000 @BeanProperty var socketPort = 502 @BeanProperty var argv = "" @BeanProperty var fxTitle = "Meerkat" @BeanProperty var fxFullScreen = false @BeanProperty var fxNoClose = false @BeanProperty var fxNoResize = false @BeanProperty var fxNoMinMax = false @BeanProperty var fxMax = false props.attrs ++= List( new ul.PropAttr("help","/?;-h;--help","show help page") ,new ul.PropAttr("port","-p;--port","server port") ,new ul.PropAttr("exitTimeout","-eto;--exit-timeout","server exit timeout, sec") ,new ul.PropAttr("browser","-br;--browser","started browser. none - don't start, default - start default, fx - JavaFX WebView") ,new ul.PropAttr("browserDelay","-bd;--browser-delay","browser start delay, ms") ,new ul.PropAttr("startPage","-sp;--start-page","start page") ,new ul.PropAttr("staticDir","-sd;--static-dir","static content location") ,new ul.PropAttr("stopDelay","-std;--stop-delay","delay before exit, ms") ,new ul.PropAttr("socketPort","-sct;--socket-port","Local socket port to use") ,new ul.PropAttr("argv","-argv","Extended arguments for webpage") ,new ul.PropAttr("fxTitle","-fxtitle","Title for FX application") ,new ul.PropAttr("fxFullScreen","-fxfullscreen","Enable FX full screen mode") ,new ul.PropAttr("fxNoClose","-fxnoclose","Prevent FX window from closing") ,new ul.PropAttr("fxNoResize","-fxnoresize","Prevent FX window from resising") ,new ul.PropAttr("fxNoMinMax","-fxnominmax","Remove FX window Min/Max decoration") ,new ul.PropAttr("fxMax","-fxmax","Maximize window") ) } }
edartuz/meerkat
repo/src/meerkat/Config.scala
Scala
mit
2,468
package example import scala.scalajs.js import org.scalajs.dom object ScalaJSExample extends js.JSApp { def main(): Unit = { dom.document.getElementById("scalajsShoutOut").textContent = "heyyy" } }
intelix/eventstreams
es-iface/es-web-scripts/src/main/scala/example/ScalaJSExample.scala
Scala
apache-2.0
208
/** * Copyright 2016 Matthew Farmer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package frmr.scyig import frmr.scyig.matching._ import frmr.scyig.matching.models._ import java.util.UUID import org.scalacheck._ import org.scalacheck.Gen._ import org.scalacheck.Arbitrary.arbitrary object Generators { val participantNameGen = for (name <- Gen.alphaStr) yield ParticipantName(name) val participantOrgGen = for (name <- Gen.alphaStr) yield ParticipantOrganization(name) val teamGen = for { name <- participantNameGen org <- participantOrgGen } yield { CompetingTeam(name, org) } val presidingJudgeGen = for { name <- participantNameGen org <- Gen.option(participantOrgGen) } yield { Judge(name, org, isPresiding = true) } val scoringJudgeGen = for { name <- participantNameGen org <- Gen.option(participantOrgGen) } yield { Judge(name, org, isScoring = true) } val participantGen: Gen[Participant] = for { participant <- Gen.oneOf(teamGen, presidingJudgeGen, scoringJudgeGen) } yield participant val participantsGen: Gen[Seq[Participant]] = for { participants <- Gen.listOf(participantGen) } yield participants val matchingEngineGen = for { participants <- participantsGen numberOfRooms <- Gen.choose[Int](1, 100) matchingPolicy = MatchingPolicy.default suggester = (participants)=>new RandomizedParticipantSuggester(participants) } yield { new MatchingEngine( participants, numberOfRooms, matchingPolicy, suggester ) } val byeGen = for (team <- teamGen) yield { Bye(team) } val trialGen = for { prosecution <- teamGen defense <- teamGen presidingJudge <- presidingJudgeGen scoringJudge <- scoringJudgeGen roomNumber <- Gen.choose(1, 100) } yield { Trial( prosecution, defense, presidingJudge, scoringJudge, roomNumber ) } val scheduledRoundMatchGen = Gen.oneOf(trialGen, byeGen) val multipleScheduledRoundMatchesGen = Gen.listOf(scheduledRoundMatchGen) }
farmdawgnation/scyig-judicial
src/test/scala/frmr/scyig/Generators.scala
Scala
apache-2.0
2,587
package pureconfig.module.cats import cats._ import com.typesafe.config.{Config, ConfigFactory, ConfigValue} import pureconfig._ import pureconfig.error.{ConfigReaderFailure, ConfigReaderFailures} package object instances { implicit val configReaderInstance: ApplicativeError[ConfigReader, ConfigReaderFailures] = new ApplicativeError[ConfigReader, ConfigReaderFailures] { def pure[A](x: A): ConfigReader[A] = ConfigReader.fromFunction { _ => Right(x) } def ap[A, B](ff: ConfigReader[A => B])(fa: ConfigReader[A]): ConfigReader[B] = ff.zip(fa).map { case (f, a) => f(a) } def raiseError[A](e: ConfigReaderFailures): ConfigReader[A] = ConfigReader.fromFunction { _ => Left(e) } def handleErrorWith[A](fa: ConfigReader[A])(f: ConfigReaderFailures => ConfigReader[A]): ConfigReader[A] = ConfigReader.fromFunction { cv => fa.from(cv) match { case Left(failures) => f(failures).from(cv) case r @ Right(_) => r } } } implicit val configWriterCatsInstance: ContravariantSemigroupal[ConfigWriter] = new ContravariantSemigroupal[ConfigWriter] { def contramap[A, B](fa: ConfigWriter[A])(f: B => A): ConfigWriter[B] = fa.contramap(f) def product[A, B](fa: ConfigWriter[A], fb: ConfigWriter[B]) = ConfigWriter.fromFunction[(A, B)] { case (a, b) => fb.to(b).withFallback(fa.to(a)) } } implicit val configConvertCatsInstance: InvariantSemigroupal[ConfigConvert] = new InvariantSemigroupal[ConfigConvert] { def imap[A, B](fa: ConfigConvert[A])(f: A => B)(g: B => A): ConfigConvert[B] = fa.xmap(f, g) def product[A, B](fa: ConfigConvert[A], fb: ConfigConvert[B]): ConfigConvert[(A, B)] = { val reader = fa.zip(fb) val writer = ConfigWriter.fromFunction[(A, B)] { case (a, b) => fb.to(b).withFallback(fa.to(a)) } ConfigConvert.fromReaderAndWriter(reader, writer) } } implicit val configValueEq: Eq[ConfigValue] = Eq.fromUniversalEquals implicit val configEq: Eq[Config] = Eq.fromUniversalEquals implicit val configReaderFailureEq: Eq[ConfigReaderFailure] = Eq.fromUniversalEquals implicit val configReaderFailuresEq: Eq[ConfigReaderFailures] = Eq.fromUniversalEquals implicit val configReaderFailuresSemigroup: Semigroup[ConfigReaderFailures] = Semigroup.instance(_ ++ _) implicit val configValueCatsSemigroup: Semigroup[ConfigValue] = Semigroup.instance((a, b) => b.withFallback(a)) implicit val configCatsMonoid: Monoid[Config] = Monoid.instance(ConfigFactory.empty, (a, b) => b.withFallback(a)) implicit val configObjectSourceCatsMonoid: Monoid[ConfigObjectSource] = Monoid.instance(ConfigSource.empty, (a, b) => b.withFallback(a)) }
pureconfig/pureconfig
modules/cats/src/main/scala/pureconfig/module/cats/instances/package.scala
Scala
mpl-2.0
2,821
package com.nabijaczleweli.minecrasmer.entity import com.nabijaczleweli.minecrasmer.reference.Reference import com.nabijaczleweli.minecrasmer.reference.Reference._ import com.nabijaczleweli.minecrasmer.util.IConfigurable import net.minecraftforge.common.config.Configuration import net.minecraftforge.fml.common.registry.VillagerRegistry import net.minecraftforge.fml.common.registry.VillagerRegistry.{VillagerCareer, VillagerProfession} object Villager extends IConfigurable { val electronicsVillagerProfession = new VillagerProfession(Reference.NAMESPACED_PREFIX + "electronic", Reference.NAMESPACED_PREFIX + "textures/entity/villager/electronic.png") final var electronicsVillagerID = 4637 override def load(config: Configuration) = electronicsVillagerID = config.getInt("electronicsVillagerID", CONFIG_ENTIRY_CATEGORY, electronicsVillagerID, 6, Int.MaxValue, "ID of the Electronics Vilager") def registerProfessions() { VillagerRegistry.instance register electronicsVillagerProfession new VillagerCareer(electronicsVillagerProfession, "electronic") } }
nabijaczleweli/ASMifier
src/main/scala/com/nabijaczleweli/minecrasmer/entity/Villager.scala
Scala
mit
1,132