code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package eu.monniot.redis.plugin
import redis.embedded.util.{Architecture => jArch}
/**
* Correspond to the redis.embedded.util.Architecture enum, but scalaified to be automatically
* imported in a sbt config file
*/
sealed trait Architecture {
def toJava: jArch
}
object Architecture {
object x86 extends Architecture {
def toJava: jArch = jArch.x86
}
object x86_64 extends Architecture {
def toJava: jArch = jArch.x86_64
}
}
| fmonniot/sbt-redis-plugin | src/main/scala/eu/monniot/redis/plugin/Architecture.scala | Scala | apache-2.0 | 456 |
package hu.frankdavid.ranking.gui
import java.awt.Toolkit
import com.apple.eawt.Application
import scalafx.application.JFXApp
import scalafx.application.JFXApp.PrimaryStage
import scalafx.scene.image.Image
import scalafx.Includes._
object Main extends JFXApp {
val icon = getClass.getResource("icon.png")
try {
Application.getApplication.setDockIconImage(Toolkit.getDefaultToolkit.getImage(icon))
} catch {
case _: Throwable =>
}
stage = new PrimaryStage() {
title = "Tournament Organizer"
icons += new Image(icon.openStream())
}
stage.scene = new PlayersWindow(stage)
}
| frankdavid/ranking | src/main/scala/hu/frankdavid/ranking/gui/Main.scala | Scala | apache-2.0 | 607 |
package com.twitter.util
import java.io.{File, FileWriter}
import scala.io.Source
import org.junit.runner.RunWith
import org.scalatest.WordSpec
import org.scalatest.junit.JUnitRunner
import com.twitter.io.TempFile
@RunWith(classOf[JUnitRunner])
class EvalTest extends WordSpec {
"Evaluator" should {
"apply('expression')" in {
assert((new Eval).apply[Int]("1 + 1") == 2)
}
"apply(new File(...))" in {
assert((new Eval).apply[Int](TempFile.fromResourcePath("/OnePlusOne.scala")) == 2)
}
"apply(new File(...), new File(...))" in {
val derived = (new Eval).apply[() => String](
TempFile.fromResourcePath("/Base.scala"),
TempFile.fromResourcePath("/Derived.scala"))
assert(derived() == "hello")
}
"apply(new File(...) with a dash in the name with target" in {
val f = File.createTempFile("eval", "target")
f.delete()
f.mkdir()
val e = new Eval(Some(f))
val sourceFile = TempFile.fromResourcePath("/file-with-dash.scala")
val res: String = e(sourceFile)
assert(res == "hello")
val className = e.fileToClassName(sourceFile)
val processedSource = e.sourceForString(Source.fromFile(sourceFile).getLines.mkString("\n"))
val fullClassName = "Evaluator__%s_%s.class".format(
className, e.uniqueId(processedSource, None))
val targetFileName = f.getAbsolutePath() + File.separator + fullClassName
val targetFile = new File(targetFileName)
assert(targetFile.exists)
}
"apply(new File(...) with target" in {
val f = File.createTempFile("eval", "target")
f.delete()
f.mkdir()
val e = new Eval(Some(f))
val sourceFile = TempFile.fromResourcePath("/OnePlusOne.scala")
val res: Int = e(sourceFile)
assert(res == 2)
// make sure it created a class file with the expected name
val className = e.fileToClassName(sourceFile)
val processedSource = e.sourceForString(Source.fromFile(sourceFile).getLines.mkString("\n"))
val fullClassName = "Evaluator__%s_%s.class".format(
className, e.uniqueId(processedSource, None))
val targetFileName = f.getAbsolutePath() + File.separator + fullClassName
val targetFile = new File(targetFileName)
assert(targetFile.exists)
val targetMod = targetFile.lastModified
// eval again, make sure it works
val res2: Int = e(sourceFile)
// and make sure it didn't create a new file (1 + checksum)
assert(f.listFiles.length == 2)
// and make sure it didn't update the file
val targetFile2 = new File(targetFileName)
assert(targetFile2.lastModified == targetMod)
// touch source, ensure no-recompile (checksum hasn't changed)
sourceFile.setLastModified(System.currentTimeMillis())
val res3: Int = e(sourceFile)
assert(res3 == 2)
// and make sure it didn't create a different file
assert(f.listFiles.length == 2)
// and make sure it updated the file
val targetFile3 = new File(targetFileName)
assert(targetFile3.lastModified == targetMod)
// append a newline, altering checksum, verify recompile
val writer = new FileWriter(sourceFile)
writer.write("//a comment\n2\n")
writer.close
val res4: Int = e(sourceFile)
assert(res4 == 2)
// and make sure it created a new file
val targetFile4 = new File(targetFileName)
assert(!targetFile4.exists)
}
"apply(InputStream)" in {
assert((new Eval).apply[Int](getClass.getResourceAsStream("/OnePlusOne.scala")) == 2)
}
"uses deprecated" in {
val deprecated = (new Eval).apply[() => String](
TempFile.fromResourcePath("/Deprecated.scala"))
assert(deprecated() == "hello")
}
"inPlace('expression')" in {
// Old object API works
Eval.compile("object Doubler { def apply(n: Int) = n * 2 }")
assert(Eval.inPlace[Int]("Doubler(2)") == 4)
assert(Eval.inPlace[Int]("Doubler(14)") == 28)
// New class API fails
// val eval = new Eval
// eval.compile("object Doubler { def apply(n: Int) = n * 2 }")
// assert(eval.inPlace[Int]("Doubler(2)") === 4)
// assert(eval.inPlace[Int]("Doubler(14)") === 28)
}
"check" in {
(new Eval).check("23")
intercept[Eval.CompilerException] {
(new Eval).check("invalid")
}
}
"#include" in {
val derived = Eval[() => String](
TempFile.fromResourcePath("/Base.scala"),
TempFile.fromResourcePath("/DerivedWithInclude.scala"))
assert(derived() == "hello")
assert(derived.toString == "hello, joe")
}
"recursive #include" in {
val derived = Eval[() => String](
TempFile.fromResourcePath("/Base.scala"),
TempFile.fromResourcePath("/IncludeInclude.scala"))
assert(derived() == "hello")
assert(derived.toString == "hello, joe; hello, joe")
}
"toSource returns post-processed code" in {
val derived = Eval.toSource(TempFile.fromResourcePath("/DerivedWithInclude.scala"))
assert(derived.contains("hello, joe"))
assert(derived.contains("new Base"))
}
"throws a compilation error when Ruby is #included" in {
intercept[Throwable] {
Eval[() => String](
TempFile.fromResourcePath("RubyInclude.scala")
)
}
}
"clean class names" in {
val e = new Eval()
// regular old scala file
assert(e.fileToClassName(new File("foo.scala")) == "foo")
// without an extension
assert(e.fileToClassName(new File("foo")) == "foo")
// with lots o dots
assert(e.fileToClassName(new File("foo.bar.baz")) == "foo$2ebar")
// with dashes
assert(e.fileToClassName(new File("foo-bar-baz.scala")) == "foo$2dbar$2dbaz")
// with crazy things
assert(e.fileToClassName(new File("foo$! -@@@")) == "foo$24$21$20$2d$40$40$40")
}
}
}
| luciferous/util | util-eval/src/test/scala/com/twitter/util/EvalTest.scala | Scala | apache-2.0 | 5,948 |
package tasks
class TransactionTask extends Task("performTransaction", MockTasks.transaction) {
} | Igerly/cisca | FuturesTest/src/tasks/TransactionTask.scala | Scala | mit | 101 |
object Versions {
val spark = "1.6.1"
val guava = "14.0.1"
val riakClient = "2.0.7"
val kafka = "0.8.2.2"
val jfree = "1.0.19"
val scalaChart = "0.4.2"
val powermokc = "1.6.4"
val jacksonModule = "2.4.4"
val junit = "4.11"
val jsonUnit = "1.5.1"
val hamrest = "1.3"
val mockito = "1.10.19"
val junitInterface = "0.11"
val riakTestDocker = "1.1"
}
| basho/spark-riak-connector | project/Versions.scala | Scala | apache-2.0 | 375 |
package blizzybotscala
/**
* Created by Anthony on 6/16/2015.
*/
class CommandList(args:String,room:String,user:String) {
val commands = Map[String, () => String](
"hello" -> hello,
"echo" -> echo
)
def hello(): String = {
return s"Hello, $args"
}
def echo(): String = {
return args
}
}
| NotBlizzard/blizzybotscala | src/main/scala/blizzybotscala/CommandList.scala | Scala | mit | 321 |
/*
* Copyright (c) 2012-2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich
package common.enrichments.registry.sqlquery
import scalaz.Validation.FlatMap._
// json4s
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.parseJson
import org.json4s.jackson.JsonMethods.asJsonNode
// specs2
import org.specs2.Specification
import org.specs2.scalaz.ValidationMatchers
// Iglu
import com.snowplowanalytics.iglu.client.{JsonSchemaPair, SchemaKey}
// This library
import common.outputs.EnrichedEvent
object SqlQueryEnrichmentIntegrationTest {
def continuousIntegration: Boolean = sys.env.get("CI") match {
case Some("true") => true
case _ => false
}
/**
* Helper function creating almost real [[JsonSchemaPair]] for context/unstruct event
* out of *valid* JSON string and [[SchemaKey]].
* Useful only if we're passing unstruct event or custom context (but not derived) straight into
* SqlQueryEnrichment.lookup method
*
* WARNING: this is REQUIRED to test custom contexts (not derived!) and unstruct event
*/
def createPair(key: SchemaKey, validJson: String): JsonSchemaPair = {
val hierarchy = parseJson(
s"""{"rootId":null,"rootTstamp":null,"refRoot":"events","refTree":["events","${key.name}"],"refParent":"events"}""")
(key,
asJsonNode(
("data", parseJson(validJson)) ~ (("hierarchy", hierarchy)) ~ (("schema", key.toJValue))))
}
def createDerived(key: SchemaKey, validJson: String): JObject =
("schema", key.toSchemaUri) ~ (("data", parseJson(validJson)))
}
import SqlQueryEnrichmentIntegrationTest._
class SqlQueryEnrichmentIntegrationTest extends Specification with ValidationMatchers {
skipAllUnless(continuousIntegration)
def is =
"This is an integration test for the SqlQueryEnrichment" ^
"Basic case" ! e1 ^
"All-features test" ! e2 ^
end
val SCHEMA_KEY = SchemaKey("com.snowplowanalytics.snowplow.enrichments",
"sql_query_enrichment_config",
"jsonschema",
"1-0-0")
def e1 = {
val configuration = parseJson("""
|{
| "vendor": "com.snowplowanalytics.snowplow.enrichments",
| "name": "sql_query_enrichment_config",
| "enabled": true,
| "parameters": {
| "inputs": [],
| "database": {
| "postgresql": {
| "host": "localhost",
| "port": 5432,
| "sslMode": false,
| "username": "enricher",
| "password": "supersecret1",
| "database": "sql_enrichment_test"
| }
| },
| "query": {
| "sql": "SELECT 42 AS \\"singleColumn\\""
| },
| "output": {
| "expectedRows": "AT_MOST_ONE",
| "json": {
| "schema": "iglu:com.acme/singleColumn/jsonschema/1-0-0",
| "describes": "ALL_ROWS",
| "propertyNames": "AS_IS"
| }
| },
| "cache": {
| "size": 3000,
| "ttl": 60
| }
| }
|}
""".stripMargin)
val event = new EnrichedEvent
val config = SqlQueryEnrichmentConfig.parse(configuration, SCHEMA_KEY)
val context = config.flatMap(_.lookup(event, Nil, Nil, Nil))
val correctContext = parseJson("""
|{
| "schema": "iglu:com.acme/singleColumn/jsonschema/1-0-0",
| "data": {
| "singleColumn": 42
| }
|}
""".stripMargin)
context must beSuccessful.like {
case List(json) => json must beEqualTo(correctContext)
}
}
/**
* Most complex test, it tests:
* + POJO inputs
* + unstruct event inputs
* + derived and custom contexts
* + colliding inputs
* + cache
*/
def e2 = {
val configuration = parseJson(
"""
|{
| "vendor": "com.snowplowanalytics.snowplow.enrichments",
| "name": "sql_query_enrichment_config",
| "enabled": true,
| "parameters": {
| "inputs": [
| {
| "placeholder": 1,
| "pojo": {
| "field": "geo_city"
| }
| },
|
| {
| "placeholder": 2,
| "json": {
| "field": "derived_contexts",
| "schemaCriterion": "iglu:org.openweathermap/weather/jsonschema/*-*-*",
| "jsonPath": "$.dt"
| }
| },
|
| {
| "placeholder": 3,
| "pojo": {
| "field": "user_id"
| }
| },
|
| {
| "placeholder": 3,
| "json": {
| "field": "contexts",
| "schemaCriterion": "iglu:com.snowplowanalytics.snowplow/client_session/jsonschema/1-*-*",
| "jsonPath": "$.userId"
| }
| },
|
| {
| "placeholder": 4,
| "json": {
| "field": "contexts",
| "schemaCriterion": "iglu:com.snowplowanalytics.snowplow/geolocation_context/jsonschema/1-1-*",
| "jsonPath": "$.speed"
| }
| },
|
| {
| "placeholder": 5,
| "json": {
| "field": "unstruct_event",
| "schemaCriterion": "iglu:com.snowplowanalytics.monitoring.kinesis/app_initialized/jsonschema/1-0-0",
| "jsonPath": "$.applicationName"
| }
| }
| ],
|
| "database": {
| "postgresql": {
| "host": "localhost",
| "port": 5432,
| "sslMode": false,
| "username": "enricher",
| "password": "supersecret1",
| "database": "sql_enrichment_test"
| }
| },
| "query": {
| "sql": "SELECT city, country, pk FROM enrichment_test WHERE city = ? AND date_time = ? AND name = ? AND speed = ? AND aux = ?;"
| },
| "output": {
| "expectedRows": "AT_MOST_ONE",
| "json": {
| "schema": "iglu:com.acme/demographic/jsonschema/1-0-0",
| "describes": "ALL_ROWS",
| "propertyNames": "CAMEL_CASE"
| }
| },
| "cache": {
| "size": 3000,
| "ttl": 60
| }
| }
|}
""".stripMargin)
val event1 = new EnrichedEvent
event1.setGeo_city("Krasnoyarsk")
val weatherContext1 = createDerived(
SchemaKey("org.openweathermap", "weather", "jsonschema", "1-0-0"),
"""{"main":{"humidity":78.0,"pressure":1010.0,"temp":260.91,"temp_min":260.15,"temp_max":261.15},"wind":{"speed":2.0,"deg":250.0,"var_end":270,"var_beg":200},"clouds":{"all":75},"weather":[{"main":"Snow","description":"light snow","id":600,"icon":"13d"},{"main":"Mist","description":"mist","id":701,"icon":"50d"}],"dt":"2016-01-07T10:10:34.000Z"}""")
event1.setUser_id("alice")
val geoContext1 = createPair(
SchemaKey("com.snowplowanalytics.snowplow", "geolocation_context", "jsonschema", "1-1-0"),
""" {"latitude": 12.5, "longitude": 32.1, "speed": 10.0} """)
val ue1 = createPair(SchemaKey("com.snowplowanalytics.monitoring.kinesis",
"app_initialized",
"jsonschema",
"1-0-0"),
""" {"applicationName": "ue_test_krsk"} """)
val event2 = new EnrichedEvent
event2.setGeo_city("London")
val weatherContext2 = createDerived(
SchemaKey("org.openweathermap", "weather", "jsonschema", "1-0-0"),
"""{"main":{"humidity":78.0,"pressure":1010.0,"temp":260.91,"temp_min":260.15,"temp_max":261.15},"wind":{"speed":2.0,"deg":250.0,"var_end":270,"var_beg":200},"clouds":{"all":75},"weather":[{"main":"Snow","description":"light snow","id":600,"icon":"13d"},{"main":"Mist","description":"mist","id":701,"icon":"50d"}],"dt":"2016-01-08T10:00:34.000Z"}""")
event2.setUser_id("bob")
val geoContext2 = createPair(
SchemaKey("com.snowplowanalytics.snowplow", "geolocation_context", "jsonschema", "1-1-0"),
""" {"latitude": 12.5, "longitude": 32.1, "speed": 25.0} """)
val ue2 = createPair(SchemaKey("com.snowplowanalytics.monitoring.kinesis",
"app_initialized",
"jsonschema",
"1-0-0"),
""" {"applicationName": "ue_test_london"} """)
val event3 = new EnrichedEvent
event3.setGeo_city("New York")
val weatherContext3 = createDerived(
SchemaKey("org.openweathermap", "weather", "jsonschema", "1-0-0"),
"""{"main":{"humidity":78.0,"pressure":1010.0,"temp":260.91,"temp_min":260.15,"temp_max":261.15},"wind":{"speed":2.0,"deg":250.0,"var_end":270,"var_beg":200},"clouds":{"all":75},"weather":[{"main":"Snow","description":"light snow","id":600,"icon":"13d"},{"main":"Mist","description":"mist","id":701,"icon":"50d"}],"dt":"2016-02-07T10:10:00.000Z"}""")
event3.setUser_id("eve")
val geoContext3 = createPair(
SchemaKey("com.snowplowanalytics.snowplow", "geolocation_context", "jsonschema", "1-1-0"),
""" {"latitude": 12.5, "longitude": 32.1, "speed": 2.5} """)
val ue3 = createPair(SchemaKey("com.snowplowanalytics.monitoring.kinesis",
"app_initialized",
"jsonschema",
"1-0-0"),
""" {"applicationName": "ue_test_ny"} """)
val event4 = new EnrichedEvent
event4.setGeo_city("London")
val weatherContext4 = createDerived(
SchemaKey("org.openweathermap", "weather", "jsonschema", "1-0-0"),
"""{"main":{"humidity":78.0,"pressure":1010.0,"temp":260.91,"temp_min":260.15,"temp_max":261.15},"wind":{"speed":2.0,"deg":250.0,"var_end":270,"var_beg":200},"clouds":{"all":75},"weather":[{"main":"Snow","description":"light snow","id":600,"icon":"13d"},{"main":"Mist","description":"mist","id":701,"icon":"50d"}],"dt":"2016-01-08T10:00:34.000Z"}""")
event4.setUser_id("eve") // This should be ignored because of clientSession4
val clientSession4 = createPair(
SchemaKey("com.snowplowanalytics.snowplow", "client_session", "jsonschema", "1-0-1"),
""" { "userId": "bob", "sessionId": "123e4567-e89b-12d3-a456-426655440000", "sessionIndex": 1, "previousSessionId": null, "storageMechanism": "SQLITE" } """)
val geoContext4 = createPair(
SchemaKey("com.snowplowanalytics.snowplow", "geolocation_context", "jsonschema", "1-1-0"),
""" {"latitude": 12.5, "longitude": 32.1, "speed": 25.0} """)
val ue4 = createPair(SchemaKey("com.snowplowanalytics.monitoring.kinesis",
"app_initialized",
"jsonschema",
"1-0-0"),
""" {"applicationName": "ue_test_london"} """)
val config = SqlQueryEnrichmentConfig.parse(configuration, SCHEMA_KEY)
val context1 =
config.flatMap(_.lookup(event1, List(weatherContext1), List(geoContext1), List(ue1)))
val result_context1 = parseJson("""|{"schema":"iglu:com.acme/demographic/jsonschema/1-0-0",
| "data": {
| "city": "Krasnoyarsk",
| "country": "Russia",
| "pk": 1}}""".stripMargin)
val context2 =
config.flatMap(_.lookup(event2, List(weatherContext2), List(geoContext2), List(ue2)))
val result_context2 = parseJson("""|{"schema":"iglu:com.acme/demographic/jsonschema/1-0-0",
| "data": {
| "city": "London",
| "country": "England",
| "pk": 2}}""".stripMargin)
val context3 =
config.flatMap(_.lookup(event3, List(weatherContext3), List(geoContext3), List(ue3)))
val result_context3 = parseJson("""|{"schema":"iglu:com.acme/demographic/jsonschema/1-0-0",
| "data": {
| "city": "New York",
| "country": "USA",
| "pk": 3}}
""".stripMargin)
val context4 = config.flatMap(
_.lookup(event4, List(weatherContext4), List(geoContext4, clientSession4), List(ue4)))
val result_context4 = parseJson("""|{"schema":"iglu:com.acme/demographic/jsonschema/1-0-0",
| "data": {
| "city": "London",
| "country": "England",
| "pk": 2}}""".stripMargin)
val res1 = context1 must beSuccessful.like {
case List(ctx) => ctx must beEqualTo(result_context1)
}
val res2 = context2 must beSuccessful.like {
case List(ctx) => ctx must beEqualTo(result_context2)
}
val res3 = context3 must beSuccessful.like {
case List(ctx) => ctx must beEqualTo(result_context3)
}
val res4 = context4 must beSuccessful.like {
case List(ctx) => ctx must beEqualTo(result_context4)
}
val cache = config.map(_.cache.actualLoad) must beSuccessful.like {
case size => size must beEqualTo(3)
}
res1.and(res2).and(res3).and(res4).and(cache)
}
}
| TimothyKlim/snowplow | 3-enrich/scala-common-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/SqlQueryEnrichmentIntegrationTest.scala | Scala | apache-2.0 | 15,710 |
/*
* Copyright © 2015 Lukas Rosenthaler, Benjamin Geer, Ivan Subotic,
* Tobias Schweizer, André Kilchenmann, and Sepideh Alassi.
* This file is part of Knora.
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi
import akka.agent.Agent
//TODO: Use the Knora Execution Context
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Holds flags that are passed to the 'webapi' server at startup.
*/
object StartupFlags {
/**
* Startup Flag Value: Reset Triplestore Content Operation Over HTTP
*/
val allowResetTriplestoreContentOperationOverHTTP = Agent(false)
/**
* Startup Flag Value: Load Demo Data
*/
val loadDemoData = Agent(false)
}
| nie-ine/Knora | webapi/src/main/scala/org/knora/webapi/StartupFlags.scala | Scala | agpl-3.0 | 1,317 |
package play.api.cache.redis.configuration
case class RedisSettingsTest(
invocationContext: String,
invocationPolicy: String,
timeout: RedisTimeouts,
recovery: String,
source: String,
prefix: Option[String] = None
) extends RedisSettings
| KarelCemus/play-redis | src/test/scala/play/api/cache/redis/configuration/RedisSettingsTest.scala | Scala | mpl-2.0 | 252 |
package wandou.math.algebra
final case class MatrixSlice(vector: Vector, index: Int)
| wandoulabs/wandou-math | wandou-math/src/main/scala/wandou/math/algebra/MatrixSlice.scala | Scala | apache-2.0 | 87 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.electro
import squants._
/**
* @author garyKeorkunian
* @since 0.1
*
* @param value value in [[squants.electro.OhmMeters]]
*/
final class Resistivity private (val value: Double, val unit: ResistivityUnit)
extends Quantity[Resistivity] {
def dimension = Resistivity
def /(that: Length): ElectricalResistance = Ohms(toOhmMeters / that.toMeters)
def /(that: ElectricalResistance): Length = Meters(toOhmMeters / that.toOhms)
def toOhmMeters = to(OhmMeters)
def inSiemensPerMeter = SiemensPerMeter(1d / toOhmMeters)
}
object Resistivity extends Dimension[Resistivity] {
private[electro] def apply[A](n: A, unit: ResistivityUnit)(implicit num: Numeric[A]) = new Resistivity(num.toDouble(n), unit)
def apply = parse _
def name = "Resistivity"
def primaryUnit = OhmMeters
def siUnit = OhmMeters
def units = Set(OhmMeters)
}
trait ResistivityUnit extends UnitOfMeasure[Resistivity] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = Resistivity(n, this)
}
object OhmMeters extends ResistivityUnit with PrimaryUnit with SiUnit {
def symbol = "Ω⋅m"
}
object ResistivityConversions {
lazy val ohmMeter = OhmMeters(1)
implicit class ResistivityConversions[A](n: A)(implicit num: Numeric[A]) {
def ohmMeters = OhmMeters(n)
}
implicit object ResistivityNumeric extends AbstractQuantityNumeric[Resistivity](Resistivity.primaryUnit)
}
| rmihael/squants | shared/src/main/scala/squants/electro/Resistivity.scala | Scala | apache-2.0 | 1,941 |
package mesosphere.marathon.upgrade
import akka.testkit.{ TestKit, TestActorRef }
import akka.actor.{ Props, ActorSystem }
import mesosphere.marathon.tasks.TaskTracker
import mesosphere.marathon.upgrade.StoppingBehavior.SynchronizeTasks
import org.scalatest.{ BeforeAndAfter, BeforeAndAfterAll, Matchers, FunSuiteLike }
import org.apache.mesos.SchedulerDriver
import org.scalatest.mock.MockitoSugar
import mesosphere.marathon.Protos.MarathonTask
import scala.collection.mutable
import scala.concurrent.{ Await, Promise }
import scala.concurrent.duration._
import mesosphere.marathon.event.MesosStatusUpdateEvent
import org.mockito.Mockito._
import org.apache.mesos.Protos.TaskID
import mesosphere.marathon.TaskUpgradeCanceledException
import mesosphere.marathon.state.{ AppDefinition, PathId }
class TaskKillActorTest
extends TestKit(ActorSystem("System"))
with FunSuiteLike
with Matchers
with BeforeAndAfterAll
with BeforeAndAfter
with MockitoSugar {
var taskTracker: TaskTracker = _
var driver: SchedulerDriver = _
before {
taskTracker = mock[TaskTracker]
driver = mock[SchedulerDriver]
}
override def afterAll(): Unit = {
super.afterAll()
system.shutdown()
}
test("Kill tasks") {
val taskA = MarathonTask.newBuilder().setId("taskA_id").build()
val taskB = MarathonTask.newBuilder().setId("taskB_id").build()
val tasks = Set(taskA, taskB)
val promise = Promise[Unit]()
val ref = TestActorRef(Props(classOf[TaskKillActor], driver, PathId("/test"), taskTracker, system.eventStream, tasks, promise))
watch(ref)
system.eventStream.publish(MesosStatusUpdateEvent("", taskA.getId, "TASK_KILLED", "", PathId.empty, "", Nil, ""))
system.eventStream.publish(MesosStatusUpdateEvent("", taskB.getId, "TASK_KILLED", "", PathId.empty, "", Nil, ""))
Await.result(promise.future, 5.seconds) should be(())
verify(driver).killTask(TaskID.newBuilder().setValue(taskA.getId).build())
verify(driver).killTask(TaskID.newBuilder().setValue(taskB.getId).build())
expectTerminated(ref)
}
test("Kill tasks with empty task list") {
val tasks = Set[MarathonTask]()
val promise = Promise[Unit]()
val ref = TestActorRef(Props(classOf[TaskKillActor], driver, PathId("/test"), taskTracker, system.eventStream, tasks, promise))
watch(ref)
Await.result(promise.future, 5.seconds) should be(())
verifyZeroInteractions(driver)
expectTerminated(ref)
}
test("Cancelled") {
val taskA = MarathonTask.newBuilder().setId("taskA_id").build()
val taskB = MarathonTask.newBuilder().setId("taskB_id").build()
val tasks = Set(taskA, taskB)
val promise = Promise[Unit]()
val ref = system.actorOf(Props(classOf[TaskKillActor], driver, PathId("/test"), taskTracker, system.eventStream, tasks, promise))
watch(ref)
system.stop(ref)
intercept[TaskUpgradeCanceledException] {
Await.result(promise.future, 5.seconds)
}.getMessage should equal("The operation has been cancelled")
expectTerminated(ref)
}
test("Task synchronization") {
val app = AppDefinition(id = PathId("/app"), instances = 2)
val promise = Promise[Unit]()
val taskA = MarathonTask.newBuilder().setId("taskA_id").build()
val taskB = MarathonTask.newBuilder().setId("taskB_id").build()
val tasks = mutable.Set(taskA, taskB)
when(taskTracker.get(app.id))
.thenReturn(Set.empty[MarathonTask])
val ref = TestActorRef[TaskKillActor](Props(classOf[TaskKillActor], driver, app.id, taskTracker, system.eventStream, tasks.toSet, promise))
watch(ref)
ref.underlyingActor.periodicalCheck.cancel()
ref ! SynchronizeTasks
Await.result(promise.future, 5.seconds) should be(())
expectTerminated(ref)
}
test("Send kill again after synchronization with task tracker") {
val taskA = MarathonTask.newBuilder().setId("taskA_id").build()
val taskB = MarathonTask.newBuilder().setId("taskB_id").build()
val appId = PathId("/test")
val tasks = Set(taskA, taskB)
val promise = Promise[Unit]()
val ref = TestActorRef[TaskKillActor](Props(classOf[TaskKillActor], driver, appId, taskTracker, system.eventStream, tasks, promise))
when(taskTracker.get(appId)).thenReturn(Set(taskA, taskB))
watch(ref)
ref.underlyingActor.periodicalCheck.cancel()
ref ! SynchronizeTasks
system.eventStream.publish(MesosStatusUpdateEvent("", taskA.getId, "TASK_KILLED", "", PathId.empty, "", Nil, ""))
system.eventStream.publish(MesosStatusUpdateEvent("", taskB.getId, "TASK_KILLED", "", PathId.empty, "", Nil, ""))
Await.result(promise.future, 5.seconds) should be(())
verify(driver, times(2)).killTask(TaskID.newBuilder().setValue(taskA.getId).build())
verify(driver, times(2)).killTask(TaskID.newBuilder().setValue(taskB.getId).build())
expectTerminated(ref)
}
}
| Kosta-Github/marathon | src/test/scala/mesosphere/marathon/upgrade/TaskKillActorTest.scala | Scala | apache-2.0 | 4,887 |
import java.util.concurrent._
import scala.util.DynamicVariable
package object common {
val forkJoinPool = new ForkJoinPool
abstract class TaskScheduler {
def schedule[T](body: => T): ForkJoinTask[T]
def parallel[A, B](taskA: => A, taskB: => B): (A, B) = {
val right = task {
taskB
}
val left = taskA
(left, right.join())
}
}
class DefaultTaskScheduler extends TaskScheduler {
def schedule[T](body: => T): ForkJoinTask[T] = {
val t = new RecursiveTask[T] {
def compute = body
}
Thread.currentThread match {
case wt: ForkJoinWorkerThread =>
t.fork()
case _ =>
forkJoinPool.execute(t)
}
t
}
}
val scheduler =
new DynamicVariable[TaskScheduler](new DefaultTaskScheduler)
def task[T](body: => T): ForkJoinTask[T] = {
scheduler.value.schedule(body)
}
def parallel[A, B](taskA: => A, taskB: => B): (A, B) = {
scheduler.value.parallel(taskA, taskB)
}
def parallel[A, B, C, D](taskA: => A, taskB: => B, taskC: => C, taskD: => D): (A, B, C, D) = {
val ta = task { taskA }
val tb = task { taskB }
val tc = task { taskC }
val td = taskD
(ta.join(), tb.join(), tc.join(), td)
}
}
| mitochon/hexercise | src/mooc/parprog/week4barneshut/src/main/scala/common/package.scala | Scala | mit | 1,259 |
package com.twitter.finagle.loadbalancer
import com.twitter.conversions.time._
import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver}
import com.twitter.finagle.util.{Rng, DefaultTimer}
import com.twitter.finagle.{ServiceFactory, NoBrokersAvailableException}
import com.twitter.util.{Activity, Duration, Future, Timer, Time}
import scala.util.Random
/**
* Constructor methods for various load balancers. The methods take balancer
* specific parameters and return a [[LoadBalancerFactory]] that allows you
* to easily inject a balancer into the Finagle stack via client configuration.
*/
object Balancers {
/** Default MaxEffort used in constructors below. */
val MaxEffort: Int = 5
/**
* An O(1), concurrent, weighted least-loaded fair load balancer.
* This uses the ideas behind "power of 2 choices" [1] combined with
* O(1) biased coin flipping through the aliasing method, described
* in [[com.twitter.finagle.util.Drv Drv]].
*
* @param maxEffort the maximum amount of "effort" we're willing to
* expend on a load balancing decision without reweighing.
*
* @param rng The PRNG used for flipping coins. Override for
* deterministic tests.
*
* [1] Michael Mitzenmacher. 2001. The Power of Two Choices in
* Randomized Load Balancing. IEEE Trans. Parallel Distrib. Syst. 12,
* 10 (October 2001), 1094-1104.
*/
def p2c(
maxEffort: Int = MaxEffort,
rng: Rng = Rng.threadLocal
): LoadBalancerFactory = new LoadBalancerFactory {
def newBalancer[Req, Rep](
endpoints: Activity[Set[ServiceFactory[Req, Rep]]],
sr: StatsReceiver,
exc: NoBrokersAvailableException
): ServiceFactory[Req, Rep] =
new P2CBalancer(endpoints, maxEffort, rng, sr, exc) {
private[this] val gauge = sr.addGauge("p2c")(1)
}
}
/**
* Like [[p2c]] but using the Peak EWMA load metric.
*
* Peak EWMA uses a moving average over an endpoint's round-trip time (RTT) that is
* highly sensitive to peaks. This average is then weighted by the number of outstanding
* requests, effectively increasing our resolution per-request. It is designed to react
* to slow endpoints more quickly than least-loaded by penalizing them when they exhibit
* slow response times. This load metric operates under the assumption that a loaded
* endpoint takes time to recover and so it is generally safe for the advertised load
* to incorporate an endpoint's history. However, this assumption breaks down in the
* presence of long polling clients.
*
* @param decayTime The window of latency observations.
*
* @param maxEffort the maximum amount of "effort" we're willing to
* expend on a load balancing decision without reweighing.
*
* @param rng The PRNG used for flipping coins. Override for
* deterministic tests.
*
*/
def p2cPeakEwma(
decayTime: Duration = 10.seconds,
maxEffort: Int = MaxEffort,
rng: Rng = Rng.threadLocal
): LoadBalancerFactory = new LoadBalancerFactory {
def newBalancer[Req, Rep](
endpoints: Activity[Set[ServiceFactory[Req, Rep]]],
sr: StatsReceiver,
exc: NoBrokersAvailableException
): ServiceFactory[Req, Rep] =
new P2CBalancerPeakEwma(endpoints, decayTime, maxEffort, rng, sr, exc) {
private[this] val gauge = sr.addGauge("p2cPeakEwma")(1)
override def close(when: Time): Future[Unit] = {
gauge.remove()
super.close(when)
}
}
}
/**
* An efficient strictly least-loaded balancer that maintains
* an internal heap. Note, because weights are not supported by
* the HeapBalancer they are ignored when the balancer is constructed.
*/
def heap(rng: Random = new Random): LoadBalancerFactory =
new LoadBalancerFactory {
def newBalancer[Req, Rep](
endpoints: Activity[Set[ServiceFactory[Req, Rep]]],
sr: StatsReceiver,
exc: NoBrokersAvailableException
): ServiceFactory[Req, Rep] = {
new HeapBalancer(endpoints, sr, exc, rng) {
private[this] val gauge = sr.addGauge("heap")(1)
override def close(when: Time): Future[Unit] = {
gauge.remove()
super.close(when)
}
}
}
}
/**
* The aperture load-band balancer balances load to the smallest
* subset ("aperture") of services so that:
*
* 1. The concurrent load, measured over a window specified by
* `smoothWin`, to each service stays within the load band, delimited
* by `lowLoad` and `highLoad`.
* 2. Services receive load proportional to the ratio of their
* weights.
*
* Unavailable services are not counted--the aperture expands as
* needed to cover those that are available.
*/
def aperture(
smoothWin: Duration = 5.seconds,
lowLoad: Double = 0.5,
highLoad: Double = 2,
minAperture: Int = 1,
timer: Timer = DefaultTimer.twitter,
maxEffort: Int = MaxEffort,
rng: Rng = Rng.threadLocal
): LoadBalancerFactory = new LoadBalancerFactory {
def newBalancer[Req, Rep](
endpoints: Activity[Set[ServiceFactory[Req, Rep]]],
sr: StatsReceiver,
exc: NoBrokersAvailableException
): ServiceFactory[Req, Rep] = {
new ApertureLoadBandBalancer(endpoints, smoothWin, lowLoad,
highLoad, minAperture, maxEffort, rng, timer, sr, exc) {
private[this] val gauge = sr.addGauge("aperture")(1)
override def close(when: Time): Future[Unit] = {
gauge.remove()
super.close(when)
}
}
}
}
}
| rojanu/finagle | finagle-core/src/main/scala/com/twitter/finagle/loadbalancer/Balancers.scala | Scala | apache-2.0 | 5,579 |
package gov.uk.dvla.vehicles.acquire.runner
import cucumber.api.CucumberOptions
import cucumber.api.junit.Cucumber
import org.junit.runner.RunWith
@RunWith(classOf[Cucumber])
@CucumberOptions(
features = Array("acceptance-tests/src/test/resources/gherkin/CheckKeeperEndDateBeforeProccessingAcquireTransaction.feature"),
glue = Array("gov.uk.dvla.vehicles.acquire.stepdefs"),
tags = Array("@working","~@Ignore")
)
class CheckKeeperEndDateBeforeProccessingAcquireTransaction {
}
| dvla/vehicles-acquire-online | acceptance-tests/src/test/scala/gov/uk/dvla/vehicles/acquire/runner/CheckKeeperEndDateBeforeProccessingAcquireTransaction.scala | Scala | mit | 486 |
package com.twitter.finagle.toggle
import com.twitter.finagle.server.ServerInfo
import com.twitter.finagle.stats.{InMemoryStatsReceiver, NullStatsReceiver}
import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import scala.collection.JavaConverters._
@RunWith(classOf[JUnitRunner])
class StandardToggleMapTest extends FunSuite {
private def newRegistry(): ConcurrentMap[String, ToggleMap] =
new ConcurrentHashMap[String, ToggleMap]()
test("registeredLibraries") {
val uniqueLibName = s"com.twitter.${System.nanoTime}"
assert(!StandardToggleMap.registeredLibraries.contains(uniqueLibName))
val tm = StandardToggleMap(uniqueLibName, NullStatsReceiver)
assert(StandardToggleMap.registeredLibraries.contains(uniqueLibName))
assert(tm == StandardToggleMap.registeredLibraries(uniqueLibName))
}
// note the underlying utility (Toggle.validateId) is heavily tested in ToggleTest
test("apply validates libraryName") {
def assertNotAllowed(libraryName: String): Unit = {
intercept[IllegalArgumentException] {
StandardToggleMap(libraryName, NullStatsReceiver)
}
}
assertNotAllowed("")
assertNotAllowed("A")
assertNotAllowed("finagle")
assertNotAllowed("com.toggle!")
}
test("apply returns the same instance for a given libraryName") {
val name = "com.twitter.Test"
val registry = newRegistry()
val tm0 =
StandardToggleMap(name, NullStatsReceiver, ToggleMap.newMutable(), ServerInfo(), registry)
val tm1 =
StandardToggleMap(name, NullStatsReceiver, ToggleMap.newMutable(), ServerInfo(), registry)
assert(tm0 eq tm1)
}
test("apply with a known libraryName") {
flag.overrides.let(Map.empty) {
// should load `ServiceLoadedToggleTestA`
val tm = StandardToggleMap("com.twitter.finagle.toggle.test.A", NullStatsReceiver)
val togs = tm.iterator.toSeq
assert(togs.size == 1)
assert(togs.head.id == "com.toggle.a")
}
}
test("apply with an unknown libraryName") {
flag.overrides.let(Map.empty) {
val tm = StandardToggleMap("com.twitter.finagle.toggle.test.ZZZ", NullStatsReceiver)
assert(tm.iterator.isEmpty)
val toggle = tm("com.toggle.XYZ")
assert(!toggle.isDefinedAt(245))
intercept[UnsupportedOperationException] {
toggle(245)
}
}
}
test("apply with a duplicate libraryName") {
intercept[IllegalStateException] {
StandardToggleMap("com.twitter.finagle.toggle.test.B", NullStatsReceiver)
}
}
test("apply with resource-based configs") {
val togMap = StandardToggleMap(
// this will have corresponding file(s) in test/resources/com/twitter/toggles/configs/
"com.twitter.finagle.toggle.tests.StandardToggleMapTest",
NullStatsReceiver,
NullToggleMap,
ServerInfo.Empty,
newRegistry()
)
val togs = togMap.iterator.toSeq
def assertFraction(id: String, fraction: Double): Unit = {
togs.find(_.id == id) match {
case None => fail(s"$id not found in $togs")
case Some(md) => assert(md.fraction == fraction)
}
}
assertFraction("com.twitter.service-overrides-on", 1.0)
assertFraction("com.twitter.service-overrides-off", 0.0)
assertFraction("com.twitter.not-in-service-overrides", 0.0)
}
test("apply with resource-based configs and overrides") {
val serverInfo: ServerInfo = new ServerInfo {
def environment: Option[String] = Some("staging")
def id: String = "testing"
def instanceId: Option[Long] = None
def clusterId: String = id
}
val togMap = StandardToggleMap(
// this will have corresponding file(s) in test/resources/com/twitter/toggles/configs/
"com.twitter.finagle.toggle.tests.EnvOverlays",
NullStatsReceiver,
NullToggleMap,
serverInfo,
newRegistry()
)
val togs = togMap.iterator.toSeq
def assertFraction(id: String, fraction: Double): Unit = {
togs.find(_.id == id) match {
case None => fail(s"$id not found in $togs")
case Some(md) => assert(md.fraction == fraction)
}
}
assertFraction("com.twitter.base-is-off", 1.0)
assertFraction("com.twitter.only-in-base", 0.0)
}
test("selectResource ignores duplicate inputs") {
// this will have a corresponding file in test/resources/com/twitter/toggles/configs/
val rsc = getClass.getClassLoader
.getResources(
"com/twitter/toggles/configs/com.twitter.finagle.toggle.tests.StandardToggleMapTest.json"
)
.asScala
.toSeq
.head
val selected = StandardToggleMap.selectResource("configName", Seq(rsc, rsc))
assert(selected == rsc)
}
test("selectResource fails with multiple unique inputs") {
// these will have a corresponding file in test/resources/com/twitter/toggles/configs/
val rsc1 = getClass.getClassLoader
.getResources(
"com/twitter/toggles/configs/com.twitter.finagle.toggle.tests.StandardToggleMapTest.json"
)
.asScala
.toSeq
.head
val rsc2 = getClass.getClassLoader
.getResources("com/twitter/toggles/configs/com.twitter.finagle.toggle.tests.Valid.json")
.asScala
.toSeq
.head
intercept[IllegalArgumentException] {
StandardToggleMap.selectResource("configName", Seq(rsc1, rsc2))
}
}
test("Toggles use correct ordering") {
// we want to see what the fractions are when the toggle
// exists in multiple places.
// we'll use "a", which is service loaded to use 1.0
// we can test what happens by modifying that flag and in-memory toggle
def assertFraction(togMap: ToggleMap, fraction: Double): Unit = {
val togs = togMap.iterator.toSeq
assert(togs.size == 1)
assert(togs.head.id == "com.toggle.a")
assert(togs.head.fraction == fraction)
}
val inMem = ToggleMap.newMutable()
// should load `ServiceLoadedToggleTestA`
val togMap = StandardToggleMap(
"com.twitter.finagle.toggle.test.A",
NullStatsReceiver,
inMem,
ServerInfo.Empty,
newRegistry()
)
flag.overrides.letClear("com.toggle.a") {
// start without the flag or in-memory, and only the service loaded
assertFraction(togMap, 1.0)
// now set the flag, and verify we pick that up
flag.overrides.let("com.toggle.a", 0.5) {
assertFraction(togMap, 0.5)
// now set the in-memory version, verify we use that
inMem.put("com.toggle.a", 0.3)
assertFraction(togMap, 0.3)
// remove the in-memory value and stop using it
inMem.remove("com.toggle.a")
assertFraction(togMap, 0.5)
}
// now we are back outside of the flag being set,
// verify its still using the service loaded setting
assertFraction(togMap, 1.0)
// change in-memory and make sure that its used
inMem.put("com.toggle.a", 0.8)
assertFraction(togMap, 0.8)
}
}
test("Toggles are observed") {
val toggleName = "com.toggle.Test"
val libraryName = "com.twitter.finagle.toggle.test.Observed"
val stats = new InMemoryStatsReceiver()
val inMem = ToggleMap.newMutable()
// start with the toggle turned on.
inMem.put(toggleName, 1.0)
val togMap = StandardToggleMap(libraryName, stats, inMem, ServerInfo.Empty, newRegistry())
val gauge = stats.gauges(Seq("toggles", libraryName, "checksum"))
val initial = gauge()
// turn the toggle off and make sure the checksum changes
inMem.put(toggleName, 0.0)
assert(initial != gauge())
}
test("components") {
val inMem = ToggleMap.newMutable()
val togMap = StandardToggleMap(
"com.twitter.components",
NullStatsReceiver,
inMem,
ServerInfo.Empty,
newRegistry()
)
val components = ToggleMap.components(togMap)
assert(5 == components.size, components.mkString(", "))
assert(components.exists(_ eq inMem))
}
}
| mkhq/finagle | finagle-toggle/src/test/scala/com/twitter/finagle/toggle/StandardToggleMapTest.scala | Scala | apache-2.0 | 8,062 |
/*
mls: basic machine learning algorithms for Scala
Copyright (C) 2014 Davi Pereira dos Santos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ml.classifiers
import al.strategies.Strategy
import clean.lib.{CM, Ds}
import ml.Pattern
import ml.models.Model
import util.Datasets
import scala.util.Random
case class BestClassifCV50_10foldKappa(ds: Ds, r: Int, f: Int, s: Strategy, queries: Seq[Pattern], fqueries: Seq[Pattern], seed: Int, poolForKNN: Seq[Pattern]) extends Learner with CM {
override lazy val toString = s"BestClassifCV50_10foldk: $ds"
lazy val id = ds.read(s"select c from classif5010foldk where s=${s.id} and l=${s.learner.id} and r=$r and f=$f") match {
case List(Vector(x)) => x.toInt
case List() =>
val res = classif.id
ds.write(s"insert into classif5010foldk values (${s.id},${s.learner.id},$r,$f,$res)")
res
case x => ds.error(s"problemas: $x")
}
lazy val abr: String = classif.abr
lazy val attPref: String = classif.attPref
lazy val boundaryType: String = classif.boundaryType
lazy val learners = Seq(
KNNBatcha(5, "eucl", poolForKNN, weighted = true)
, C45()
, RF(seed)
, NBBatch()
, SVMLibRBF(seed)
)
lazy val classif = learners.maxBy { l =>
val qs = new Random(seed).shuffle(if (qf(l)) fqueries.toVector else queries.toVector)
Datasets.kfoldCV(qs, 10) { (tr, ts, foldnr, minsize) =>
kappa(l.build(tr).confusion(ts))
}.sum
}
override lazy val querFiltro = qf(classif)
def update(model: Model, fast_mutable: Boolean, semcrescer: Boolean)(pattern: Pattern) = classif.update(model, fast_mutable, semcrescer)(pattern)
def expected_change(model: Model)(pattern: Pattern) = classif.expected_change(model)(pattern)
def build(pool: Seq[Pattern]) = classif.build(pool)
val context: String = "bestcv50-10foldk"
}
| active-learning/active-learning-scala | src/main/scala/ml/classifiers/BestClassifCV50_10foldKappa.scala | Scala | gpl-2.0 | 2,474 |
// timber -- Copyright 2012-2021 -- Justin Patterson
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalawag.timber.slf4j.receiver
import org.scalawag.timber.api.Entry
import org.scalawag.timber.backend.receiver.formatter.EntryFormatter
import ch.qos.logback.core.{FileAppender, Context}
import ch.qos.logback.core.rolling._
package object logback {
def file(
filename: String,
name: Option[String] = None,
append: Option[Boolean] = None,
prudent: Option[Boolean] = None
)(implicit formatter: EntryFormatter, context: LogbackContext) = {
val encoder = new EncoderAdapter(formatter)
encoder.setContext(context)
context.add(encoder)
val appender = new FileAppender[Entry]
appender.setContext(context)
appender.setFile(filename)
appender.setEncoder(encoder)
name.foreach(appender.setName)
prudent.foreach(appender.setPrudent)
append.foreach(appender.setAppend)
context.add(appender)
appender
}
def rollingFile(
filename: String,
rollingPolicy: RollingPolicy,
triggeringPolicy: Option[TriggeringPolicy[Entry]] = None,
name: Option[String] = None,
append: Option[Boolean] = None,
prudent: Option[Boolean] = None
)(implicit formatter: EntryFormatter, context: LogbackContext) = {
val encoder = new EncoderAdapter(formatter)
encoder.setContext(context)
context.add(encoder)
val appender = new RollingFileAppender[Entry]
appender.setContext(context)
appender.setFile(filename)
appender.setEncoder(encoder)
rollingPolicy.setParent(appender)
appender.setRollingPolicy(rollingPolicy)
triggeringPolicy.foreach(appender.setTriggeringPolicy)
name.foreach(appender.setName)
prudent.foreach(appender.setPrudent)
append.foreach(appender.setAppend)
context.add(appender)
appender
}
def timeBasedRollingPolicy(
fileNamePattern: String,
maxHistory: Option[Int] = None,
cleanHistoryOnStart: Option[Boolean] = None
)(implicit context: LogbackContext) = {
val policy = new TimeBasedRollingPolicy[Entry]
policy.setContext(context)
policy.setFileNamePattern(fileNamePattern)
maxHistory.foreach(policy.setMaxHistory)
cleanHistoryOnStart.foreach(policy.setCleanHistoryOnStart)
context.add(policy)
policy
}
def fixedWindowRollingPolicy(fileNamePattern: String, minIndex: Option[Int] = None, maxIndex: Option[Int] = None)(
implicit context: LogbackContext
) = {
val policy = new FixedWindowRollingPolicy
policy.setContext(context)
policy.setFileNamePattern(fileNamePattern)
minIndex.foreach(policy.setMinIndex)
maxIndex.foreach(policy.setMaxIndex)
context.add(policy)
policy
}
def sizeBasedTriggeringPolicy(maxFileSize: Option[String] = None)(implicit context: LogbackContext) = {
val policy = new SizeBasedTriggeringPolicy[Entry]
policy.setContext(context)
maxFileSize.foreach(policy.setMaxFileSize)
context.add(policy)
policy
}
}
| scalawag/timber | timber-logback-support/src/main/scala/org/scalawag/timber/slf4j/receiver/logback/package.scala | Scala | apache-2.0 | 3,528 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming
import java.io.File
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.catalyst.plans.logical.Range
import org.apache.spark.sql.catalyst.util.stringToFile
import org.apache.spark.sql.connector.read.streaming
import org.apache.spark.sql.connector.read.streaming.{ReadLimit, SupportsAdmissionControl}
import org.apache.spark.sql.execution.streaming.{LongOffset, MemoryStream, Offset, SerializedOffset, Source, StreamingExecutionRelation}
import org.apache.spark.sql.types.{LongType, StructType}
class TriggerAvailableNowSuite extends FileStreamSourceTest {
import testImplicits._
abstract class TestDataFrameProvider {
@volatile var currentOffset = 0L
def toDF: DataFrame
def incrementAvailableOffset(numNewRows: Int): Unit
def sourceName: String
}
class TestSource extends TestDataFrameProvider with Source {
override def getOffset: Option[Offset] = {
if (currentOffset <= 0) None else Some(LongOffset(currentOffset))
}
override def getBatch(start: Option[Offset], end: Offset): DataFrame = {
if (currentOffset == 0) currentOffset = getOffsetValue(end)
val plan = Range(
start.map(getOffsetValue).getOrElse(0L) + 1L, getOffsetValue(end) + 1L, 1, None,
isStreaming = true)
Dataset.ofRows(spark, plan)
}
override def incrementAvailableOffset(numNewRows: Int): Unit = {
currentOffset += numNewRows
}
override def toDF: DataFrame =
Dataset.ofRows(spark, StreamingExecutionRelation(this, spark))
override def schema: StructType = new StructType().add("value", LongType)
override def stop(): Unit = {}
private def getOffsetValue(offset: Offset): Long = {
offset match {
case s: SerializedOffset => LongOffset(s).offset
case l: LongOffset => l.offset
case _ => throw new IllegalArgumentException("incorrect offset type: " + offset)
}
}
override def sourceName: String = this.getClass.getName
}
class TestSourceWithAdmissionControl extends TestSource with SupportsAdmissionControl {
override def getDefaultReadLimit: ReadLimit = ReadLimit.maxRows(1) // this will be overridden
override def latestOffset(startOffset: streaming.Offset, limit: ReadLimit): streaming.Offset = {
val currentOffset = getOffset
assert(currentOffset.nonEmpty,
"the latestOffset should be called after incrementAvailableOffset")
currentOffset.get
}
}
class TestMicroBatchStream extends TestDataFrameProvider {
private lazy val memoryStream = MemoryStream[Long](0, spark.sqlContext)
override def toDF: DataFrame = memoryStream.toDF()
override def incrementAvailableOffset(numNewRows: Int): Unit = {
for (_ <- 1 to numNewRows) {
currentOffset += 1
memoryStream.addData(currentOffset)
}
}
// remove the trailing `$` in the class name
override def sourceName: String = MemoryStream.getClass.getSimpleName.dropRight(1)
}
Seq(
new TestSource,
new TestSourceWithAdmissionControl,
new TestMicroBatchStream
).foreach { testSource =>
test(s"TriggerAvailableNow for multiple sources with ${testSource.getClass}") {
withTempDirs { (src, target) =>
val checkpoint = new File(target, "chk").getCanonicalPath
val targetDir = new File(target, "data").getCanonicalPath
var lastFileModTime: Option[Long] = None
/** Create a text file with a single data item */
def createFile(data: Int): File = {
val file = stringToFile(new File(src, s"$data.txt"), data.toString)
if (lastFileModTime.nonEmpty) file.setLastModified(lastFileModTime.get + 1000)
lastFileModTime = Some(file.lastModified)
file
}
// Set up a query to read text files one at a time
val df1 = spark
.readStream
.option("maxFilesPerTrigger", 1)
.text(src.getCanonicalPath)
val df2 = testSource.toDF
def startQuery(): StreamingQuery = {
df1.union(df2).writeStream
.format("parquet")
.trigger(Trigger.AvailableNow)
.option("checkpointLocation", checkpoint)
.start(targetDir)
}
testSource.incrementAvailableOffset(3)
createFile(7)
createFile(8)
createFile(9)
val q = startQuery()
try {
assert(q.awaitTermination(streamingTimeout.toMillis))
// only one batch has data in both sources, thus counted, see SPARK-24050
assert(q.recentProgress.count(_.numInputRows != 0) == 1)
q.recentProgress.foreach { p =>
assert(p.sources.exists(_.description.startsWith(testSource.sourceName)))
}
checkAnswer(sql(s"SELECT * from parquet.`$targetDir`"),
Seq(1, 2, 3, 7, 8, 9).map(_.toString).toDF())
} finally {
q.stop()
}
testSource.incrementAvailableOffset(3)
createFile(10)
createFile(11)
createFile(12)
// run a second query
val q2 = startQuery()
try {
assert(q2.awaitTermination(streamingTimeout.toMillis))
// only one batch has data in both sources, thus counted, see SPARK-24050
assert(q2.recentProgress.count(_.numInputRows != 0) == 1)
q2.recentProgress.foreach { p =>
assert(p.sources.exists(_.description.startsWith(testSource.sourceName)))
}
checkAnswer(sql(s"SELECT * from parquet.`$targetDir`"), (1 to 12).map(_.toString).toDF())
} finally {
q2.stop()
}
}
}
}
Seq(
new TestSource,
new TestSourceWithAdmissionControl,
new TestMicroBatchStream
).foreach { testSource =>
test(s"TriggerAvailableNow for single source with ${testSource.getClass}") {
val tableName = "trigger_available_now_test_table"
withTable(tableName) {
val df = testSource.toDF
def startQuery(): StreamingQuery = {
df.writeStream
.format("memory")
.queryName(tableName)
.trigger(Trigger.AvailableNow)
.start()
}
testSource.incrementAvailableOffset(3)
val q = startQuery()
try {
assert(q.awaitTermination(streamingTimeout.toMillis))
assert(q.recentProgress.count(_.numInputRows != 0) == 1)
q.recentProgress.foreach { p =>
assert(p.sources.exists(_.description.startsWith(testSource.sourceName)))
}
checkAnswer(spark.table(tableName), (1 to 3).toDF())
} finally {
q.stop()
}
testSource.incrementAvailableOffset(3)
// run a second query
val q2 = startQuery()
try {
assert(q2.awaitTermination(streamingTimeout.toMillis))
assert(q2.recentProgress.count(_.numInputRows != 0) == 1)
q2.recentProgress.foreach { p =>
assert(p.sources.exists(_.description.startsWith(testSource.sourceName)))
}
checkAnswer(spark.table(tableName), (1 to 6).toDF())
} finally {
q2.stop()
}
}
}
}
}
| shaneknapp/spark | sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala | Scala | apache-2.0 | 8,024 |
package org.reactivecouchbase.rs.scaladsl.json
import com.couchbase.client.java.document.json.{JsonArray, JsonNull, JsonObject}
import play.api.libs.json._
private[json] object JsonConverter {
import collection.JavaConverters._
def convertJsonValue(value: JsValue): Any = value match {
case JsNull => JsonNull.INSTANCE
case JsString(s) => s
case JsBoolean(b) => b
case JsNumber(n) => n.bigDecimal
case JsArray(values) => values.foldLeft(JsonArray.create())((a, b) => a.add(convertJsonValue(b)))
case JsObject(values) => values.toSeq.foldLeft(JsonObject.create())((a, b) => a.put(b._1, convertJsonValue(b._2)))
case _ => throw new RuntimeException("Unknown type")
}
def convertToJson(value: JsObject): JsonObject =
value.value.toSeq.foldLeft(JsonObject.create())((a, b) => a.put(b._1, convertJsonValue(b._2)))
def convertToJsValue(value: Any): JsValue = value match {
case a: JsonObject => JsObject(a.toMap.asScala.toMap.mapValues(convertToJsValue))
case a: JsonArray => JsArray(a.toList.asScala.toIndexedSeq.map(convertToJsValue))
case a: Boolean => JsBoolean(a)
case a: Double => JsNumber(a)
case a: Long => JsNumber(a)
case a: Int => JsNumber(a)
case a: String => JsString(a)
case null => JsNull
case _ => throw new RuntimeException("Unknown type")
}
def safeConversion(json: AnyRef): JsValue = {
json match {
case s: String => Json.parse(s)
case a => convertToJsValue(a)
}
}
}
| ReactiveCouchbase/reactivecouchbase-rs-core | src/main/scala/org/reactivecouchbase/rs/scaladsl/json/converter.scala | Scala | apache-2.0 | 1,585 |
package svez.akka.stream.stages
import akka.NotUsed
import akka.stream.SinkShape
import akka.stream.scaladsl.{GraphDSL, Sink, Source}
import akka.stream.testkit.TestSubscriber
import cats.data.NonEmptyList
import cats.syntax.validated._
class PartitionValidatedNelSpec extends StageSpec {
"PartitionValidatedNel" should "partition a flow of ValidatedNel[E, A] in two flows of E and A" in new Test {
val src = Source(List(
1.valid[NonEmptyList[String]],
2.valid[NonEmptyList[String]],
NonEmptyList.of("BOOM!", "KABOOM!").invalid[Int],
3.valid[NonEmptyList[String]],
NonEmptyList.of("BOOM 2!").invalid[Int]
))
src.runWith(testSink)
successProbe.request(3)
failureProbe.request(3)
successProbe.expectNext(1)
successProbe.expectNext(2)
successProbe.expectNext(3)
failureProbe.expectNext("BOOM!")
failureProbe.expectNext("KABOOM!")
failureProbe.expectNext("BOOM 2!")
successProbe.expectComplete()
failureProbe.expectComplete()
}
trait Test {
val failureProbe = TestSubscriber.probe[String]()
val successProbe = TestSubscriber.probe[Int]()
val testSink = Sink.fromGraph(GraphDSL.create() { implicit builder: GraphDSL.Builder[NotUsed] ⇒
import GraphDSL.Implicits._
import svez.akka.stream.stages.partitions._
val valStage = builder.add(PartitionValidatedNel[String, Int]())
valStage.invalid ~> Sink.fromSubscriber(failureProbe)
valStage.valid ~> Sink.fromSubscriber(successProbe)
SinkShape(valStage.in)
})
}
} | svezfaz/akka-stream-fp | core/src/test/scala/svez/akka/stream/stages/PartitionValidatedNelSpec.scala | Scala | apache-2.0 | 1,557 |
package week4
/**
* Created by keid on 02/10/2016.
*/
import java.util.NoSuchElementException
/**
* Created by keid on 25/09/2016.
*/
trait List[+T] {
def isEmpty: scala.Boolean
def head: T
def tail: List[T]
def prepend[U >: T](elem: U): List[U] = new Cons(elem, this)
}
// val only eval at initialisation and vals are field overriding the abstract method of the trait
class Cons[T](val head: T, val tail: List[T]) extends List[T]{
def isEmpty = false
}
object Nil extends List[Nothing]{
def isEmpty: scala.Boolean = true
def head: Nothing = throw new NoSuchElementException("Nil.head")
def tail: Nothing = throw new NoSuchElementException("Nil.tail")
}
object List{
// List(1, 2)
def apply[T](x1: T, x2: T): List[T] = new Cons(x1, new Cons(x2, Nil))
def apply[T](): List[T] = Nil
} | kevllino/scala-specialization | 00-ProgFun/worksheets/src/main/scala/week4/List.scala | Scala | mit | 819 |
/*
* Copyright (c) 2011-2017 Interfaculty Department of Geoinformatics, University of
* Salzburg (Z_GIS) & Institute of Geological and Nuclear Sciences Limited (GNS Science)
* in the SMART Aquifer Characterisation (SAC) programme funded by the New Zealand
* Ministry of Business, Innovation and Employment (MBIE)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services
import javax.inject.Inject
import akka.actor.{Actor, ActorRef}
import play.api.libs.concurrent.InjectedActorSupport
import services.LuceneIndexBuilderMasterActor.GetSpecificIndexBuilder
object LuceneIndexBuilderMasterActor {
case class GetSpecificIndexBuilder(catalogueName: String)
}
/** at that point just a vehicle to get an injected child actor
* TODO put a "create index" here
*/
class LuceneIndexBuilderMasterActor @Inject()(luceneIndexBuilderActorFactory: LuceneIndexBuilderActor.Factory) extends Actor with
InjectedActorSupport {
override def receive: Receive = {
case GetSpecificIndexBuilder(catalogueName) => {
val indexBuilderActor: ActorRef = injectedChild(luceneIndexBuilderActorFactory(), s"indexBuilder-$catalogueName")
sender() ! indexBuilderActor
}
}
}
| ZGIS/smart-csw-ingester | app/services/LuceneIndexBuilderMasterActor.scala | Scala | apache-2.0 | 1,704 |
package com.algos.inprogress
object MaxDifference {
def main(args: Array[String]): Unit = {
//println(maxDiff(Array(3, 5, 11, 1, 3, 1, 7, 0, 5)))
maxDiffK(Array(3, 4, 1, 3, 2, 5), 3)
}
//3, 2, 5, 4, 8,
def maxDiff(arr: Array[Int]): Int = {
var minSoFarAt = 0
var maxDiff = 0
var maxDiffAt = 0
for (i <- 1 until arr.length) {
val at = arr(i)
if (at < arr(minSoFarAt)) {
minSoFarAt = i
} else if ((at - arr(minSoFarAt)) > maxDiff) {
maxDiff = at - arr(minSoFarAt)
maxDiffAt = i
}
}
maxDiff
}
def maxDiffK(arr: Array[Int], k: Int): Int = {
val diffs: Array[Array[Int]] = Array.ofDim[Int](arr.length, arr.length)
for (i <- 0 until arr.length) {
var curMin = arr(i)
var curMaxDiff = 0
for (j <- i until arr.length) {
if (curMin > arr(j)) {
curMin = arr(j)
} else if (arr(j) - curMin > curMaxDiff) {
curMaxDiff = arr(j) - curMin
}
diffs(i)(j) = curMaxDiff
}
}
var bestAtN = diffs(0)
println(bestAtN.toList)
for (i <- 1 until k) {
val newBestAtN = Array.fill(arr.length)(0)
for (j <- (i + 1) until arr.length) {
val curDiffsSum = bestAtN(j-1) + diffs(j)(arr.length-1) //Hmm, probably only need to store the prefix sum array, instead of 2d
if (curDiffsSum > newBestAtN(j - 1)) {
newBestAtN(j) = curDiffsSum
} else {
newBestAtN(j) = newBestAtN(j - 1)
}
}
bestAtN = newBestAtN
println(bestAtN.toList)
}
println(diffs.map(_.toList + "\n").toList)
println(bestAtN.toList)
0
}
}
| waxmittmann/Algorithms-with-Scala | src/main/scala/com/algos/inprogress/MaxDifference.scala | Scala | mit | 1,670 |
package wdl.expression
import wdl.WdlExpression
import org.scalatest.{FlatSpec, Matchers}
class WdlExpressionSpec extends FlatSpec with Matchers {
val expr: String => WdlExpression = WdlExpression.fromString
/* String-ification */
"Expression Evaluator string-ifier" should "Make strings out of + expressions" in {
expr("1 + 2").toWomString shouldEqual "1 + 2"
}
it should "Make strings out of - expressions" in {
expr("1 - 2").toWomString shouldEqual "1 - 2"
}
it should "Make strings out of * expressions" in {
expr("1 * 2").toWomString shouldEqual "1 * 2"
}
it should "Make strings out of / expressions" in {
expr("1 / 2").toWomString shouldEqual "1 / 2"
}
it should "Make strings out of % expressions" in {
expr("1 % 2").toWomString shouldEqual "1 % 2"
}
it should "Make strings out of < expressions" in {
expr("1 < 2").toWomString shouldEqual "1 < 2"
}
it should "Make strings out of <= expressions" in {
expr("1 <= 2").toWomString shouldEqual "1 <= 2"
}
it should "Make strings out of > expressions" in {
expr("1 > 2").toWomString shouldEqual "1 > 2"
}
it should "Make strings out of >= expressions" in {
expr("1 >= 2").toWomString shouldEqual "1 >= 2"
}
it should "Make strings out of == expressions" in {
expr("1 == 2").toWomString shouldEqual "1 == 2"
}
it should "Make strings out of != expressions" in {
expr("1 != 2").toWomString shouldEqual "1 != 2"
}
it should "Make strings out of && expressions" in {
expr("1 && 2").toWomString shouldEqual "1 && 2"
}
it should "Make strings out of || expressions" in {
expr("1 || 2").toWomString shouldEqual "1 || 2"
}
it should "Make strings out of expression with strings in it" in {
expr("\\"a\\" + \\"b\\"").toWomString shouldEqual "\\"a\\" + \\"b\\""
}
it should "Make strings out of expression with floats in it" in {
expr("1.1 + 2.2").toWomString shouldEqual "1.1 + 2.2"
}
it should "Make strings out of expression with identifiers in it" in {
expr("foo + bar").toWomString shouldEqual "foo + bar"
}
it should "Make strings out of member access expressions" in {
expr("a.b.c").toWomString shouldEqual "a.b.c"
}
it should "Make strings out of function calls" in {
expr("a(b, c)").toWomString shouldEqual "a(b, c)"
}
it should "Make strings out of array/map lookups" in {
expr("a[0]").toWomString shouldEqual "a[0]"
}
it should "Make strings out of unary minus" in {
expr("-2").toWomString shouldEqual "-2"
}
it should "Make strings out of unary plus" in {
expr("+2").toWomString shouldEqual "+2"
}
it should "Make strings out of logical not" in {
expr("!2").toWomString shouldEqual "!2"
}
it should "Make strings out of booleans" in {
expr("true != false").toWomString shouldEqual "true != false"
}
}
| ohsu-comp-bio/cromwell | wdl/src/test/scala/wdl/expression/WdlExpressionSpec.scala | Scala | bsd-3-clause | 2,844 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval
import monix.execution.exceptions.{CompositeException, DummyException}
import monix.execution.internal.Platform
import scala.util.{Failure, Success}
object CoevalGuaranteeSuite extends BaseTestSuite {
test("finalizer is evaluated on success") { _ =>
var input = Option.empty[Int]
val coeval = Coeval(1).map(_ + 1).guarantee(Coeval.eval { input = Some(1) })
val result = coeval.runTry()
assertEquals(input, Some(1))
assertEquals(result, Success(2))
}
test("finalizer is evaluated on error") { _ =>
val dummy = DummyException("dummy")
var input = Option.empty[Int]
val coeval = Coeval.raiseError[Int](dummy).guarantee(Coeval.eval { input = Some(1) })
val result = coeval.runTry()
assertEquals(input, Some(1))
assertEquals(result, Failure(dummy))
}
test("if finalizer throws, report finalizer error and signal first error") { _ =>
val useError = DummyException("dummy")
val finalizerError = DummyException("finalizer")
val coeval = Coeval(1)
.flatMap(_ => Coeval.raiseError[Int](useError))
.guarantee(Coeval.raiseError[Unit](finalizerError))
coeval.runTry() match {
case Failure(error) =>
if (Platform.isJVM) {
assertEquals(error, useError)
error.getSuppressed match {
case Array(error2) =>
assertEquals(error2, finalizerError)
case _ =>
fail("Unexpected suppressed errors list: " + error.getSuppressed.toList)
}
} else
error match {
case CompositeException(Seq(`useError`, `finalizerError`)) =>
() // pass
case _ =>
fail(s"Unexpected error: $error")
}
case other =>
fail(s"Unexpected result: $other")
}
}
}
| alexandru/monifu | monix-eval/shared/src/test/scala/monix/eval/CoevalGuaranteeSuite.scala | Scala | apache-2.0 | 2,487 |
/*
* Copyright (C) 2014 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.core
import java.io.File
import org.openmole.core.workspace.{ TmpDirectory, Workspace }
import org.openmole.tool.file._
import org.openmole.tool.hash._
import squants.time.Time
import scala.util.{ Failure, Success, Try }
package object fileservice {
def lockFile(f: File) = {
val lock = new File(f.getPath + "-lock")
lock.createNewFile()
lock
}
implicit class FileServiceDecorator(file: File) {
def cache(get: File ⇒ Unit): File = {
lockFile(file).withLock { _ ⇒
if (!file.exists())
try get(file)
catch {
case t: Throwable ⇒
file.delete()
throw t
}
}
file
}
def updateIfTooOld(tooOld: Time)(update: File ⇒ Unit) = {
def timeStamp(f: File) = new File(f.getPath + "-timestamp")
lockFile(file).withLock { _ ⇒
val ts = timeStamp(file)
val upToDate =
if (!file.exists || !ts.exists) false
else
Try(ts.content.toLong) match {
case Success(v) ⇒ v + tooOld.millis > System.currentTimeMillis
case Failure(_) ⇒ ts.delete; false
}
if (!upToDate) {
update(file)
ts.content = System.currentTimeMillis.toString
}
}
file
}
}
}
| openmole/openmole | openmole/core/org.openmole.core.fileservice/src/main/scala/org/openmole/core/fileservice/package.scala | Scala | agpl-3.0 | 2,039 |
/*
* Copyright (c) 2015. Gaëtan La Marca
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.security
import models.dao.user.userDao
import models.entity.user.User
import play.api.mvc._
import play.api.db.slick.Config.driver.simple._
import play.api.db.slick._
import play.api.Play.current
trait Secured {
def username(request: RequestHeader) = request.session.get(Security.username)
def onUnauthorized(request: RequestHeader) = Results.Redirect(routes.Authentication.login)
def withAuth(f: => String => Request[AnyContent] => Result) = {
Security.Authenticated(username, onUnauthorized) { user =>
Action(request => f(user)(request))
}
}
def withUser(f: User => Request[AnyContent] => Result) = withAuth { username => implicit request =>
val user = DB.withSession {implicit request =>
userDao.findByUserName(username).first
}
user match {
case user : User => f (user) (request)
case _ => onUnauthorized (request)
}
}
}
| glamarca/cuam | app/controllers/security/Secured.scala | Scala | apache-2.0 | 1,518 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.recommendation
import java.{util => ju}
import java.io.IOException
import java.util.Locale
import scala.collection.mutable
import scala.reflect.ClassTag
import scala.util.{Sorting, Try}
import scala.util.hashing.byteswap64
import com.github.fommil.netlib.BLAS.{getInstance => blas}
import org.apache.hadoop.fs.Path
import org.json4s.DefaultFormats
import org.json4s.JsonDSL._
import org.apache.spark.{Dependency, Partitioner, ShuffleDependency, SparkContext}
import org.apache.spark.annotation.{DeveloperApi, Since}
import org.apache.spark.internal.Logging
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.linalg.BLAS
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.util._
import org.apache.spark.ml.util.Instrumentation.instrumented
import org.apache.spark.mllib.linalg.CholeskyDecomposition
import org.apache.spark.mllib.optimization.NNLS
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.{BoundedPriorityQueue, Utils}
import org.apache.spark.util.collection.{OpenHashMap, OpenHashSet, SortDataFormat, Sorter}
import org.apache.spark.util.random.XORShiftRandom
/**
* Common params for ALS and ALSModel.
*/
private[recommendation] trait ALSModelParams extends Params with HasPredictionCol {
/**
* Param for the column name for user ids. Ids must be integers. Other
* numeric types are supported for this column, but will be cast to integers as long as they
* fall within the integer value range.
* Default: "user"
* @group param
*/
val userCol = new Param[String](this, "userCol", "column name for user ids. Ids must be within " +
"the integer value range.")
/** @group getParam */
def getUserCol: String = $(userCol)
/**
* Param for the column name for item ids. Ids must be integers. Other
* numeric types are supported for this column, but will be cast to integers as long as they
* fall within the integer value range.
* Default: "item"
* @group param
*/
val itemCol = new Param[String](this, "itemCol", "column name for item ids. Ids must be within " +
"the integer value range.")
/** @group getParam */
def getItemCol: String = $(itemCol)
/**
* Attempts to safely cast a user/item id to an Int. Throws an exception if the value is
* out of integer range or contains a fractional part.
*/
protected[recommendation] val checkedCast = udf { (n: Any) =>
n match {
case v: Int => v // Avoid unnecessary casting
case v: Number =>
val intV = v.intValue
// Checks if number within Int range and has no fractional part.
if (v.doubleValue == intV) {
intV
} else {
throw new IllegalArgumentException(s"ALS only supports values in Integer range " +
s"and without fractional part for columns ${$(userCol)} and ${$(itemCol)}. " +
s"Value $n was either out of Integer range or contained a fractional part that " +
s"could not be converted.")
}
case _ => throw new IllegalArgumentException(s"ALS only supports values in Integer range " +
s"for columns ${$(userCol)} and ${$(itemCol)}. Value $n was not numeric.")
}
}
/**
* Param for strategy for dealing with unknown or new users/items at prediction time.
* This may be useful in cross-validation or production scenarios, for handling user/item ids
* the model has not seen in the training data.
* Supported values:
* - "nan": predicted value for unknown ids will be NaN.
* - "drop": rows in the input DataFrame containing unknown ids will be dropped from
* the output DataFrame containing predictions.
* Default: "nan".
* @group expertParam
*/
val coldStartStrategy = new Param[String](this, "coldStartStrategy",
"strategy for dealing with unknown or new users/items at prediction time. This may be " +
"useful in cross-validation or production scenarios, for handling user/item ids the model " +
"has not seen in the training data. Supported values: " +
s"${ALSModel.supportedColdStartStrategies.mkString(",")}.",
(s: String) =>
ALSModel.supportedColdStartStrategies.contains(s.toLowerCase(Locale.ROOT)))
/** @group expertGetParam */
def getColdStartStrategy: String = $(coldStartStrategy).toLowerCase(Locale.ROOT)
}
/**
* Common params for ALS.
*/
private[recommendation] trait ALSParams extends ALSModelParams with HasMaxIter with HasRegParam
with HasPredictionCol with HasCheckpointInterval with HasSeed {
/**
* Param for rank of the matrix factorization (positive).
* Default: 10
* @group param
*/
val rank = new IntParam(this, "rank", "rank of the factorization", ParamValidators.gtEq(1))
/** @group getParam */
def getRank: Int = $(rank)
/**
* Param for number of user blocks (positive).
* Default: 10
* @group param
*/
val numUserBlocks = new IntParam(this, "numUserBlocks", "number of user blocks",
ParamValidators.gtEq(1))
/** @group getParam */
def getNumUserBlocks: Int = $(numUserBlocks)
/**
* Param for number of item blocks (positive).
* Default: 10
* @group param
*/
val numItemBlocks = new IntParam(this, "numItemBlocks", "number of item blocks",
ParamValidators.gtEq(1))
/** @group getParam */
def getNumItemBlocks: Int = $(numItemBlocks)
/**
* Param to decide whether to use implicit preference.
* Default: false
* @group param
*/
val implicitPrefs = new BooleanParam(this, "implicitPrefs", "whether to use implicit preference")
/** @group getParam */
def getImplicitPrefs: Boolean = $(implicitPrefs)
/**
* Param for the alpha parameter in the implicit preference formulation (nonnegative).
* Default: 1.0
* @group param
*/
val alpha = new DoubleParam(this, "alpha", "alpha for implicit preference",
ParamValidators.gtEq(0))
/** @group getParam */
def getAlpha: Double = $(alpha)
/**
* Param for the column name for ratings.
* Default: "rating"
* @group param
*/
val ratingCol = new Param[String](this, "ratingCol", "column name for ratings")
/** @group getParam */
def getRatingCol: String = $(ratingCol)
/**
* Param for whether to apply nonnegativity constraints.
* Default: false
* @group param
*/
val nonnegative = new BooleanParam(
this, "nonnegative", "whether to use nonnegative constraint for least squares")
/** @group getParam */
def getNonnegative: Boolean = $(nonnegative)
/**
* Param for StorageLevel for intermediate datasets. Pass in a string representation of
* `StorageLevel`. Cannot be "NONE".
* Default: "MEMORY_AND_DISK".
*
* @group expertParam
*/
val intermediateStorageLevel = new Param[String](this, "intermediateStorageLevel",
"StorageLevel for intermediate datasets. Cannot be 'NONE'.",
(s: String) => Try(StorageLevel.fromString(s)).isSuccess && s != "NONE")
/** @group expertGetParam */
def getIntermediateStorageLevel: String = $(intermediateStorageLevel)
/**
* Param for StorageLevel for ALS model factors. Pass in a string representation of
* `StorageLevel`.
* Default: "MEMORY_AND_DISK".
*
* @group expertParam
*/
val finalStorageLevel = new Param[String](this, "finalStorageLevel",
"StorageLevel for ALS model factors.",
(s: String) => Try(StorageLevel.fromString(s)).isSuccess)
/** @group expertGetParam */
def getFinalStorageLevel: String = $(finalStorageLevel)
setDefault(rank -> 10, maxIter -> 10, regParam -> 0.1, numUserBlocks -> 10, numItemBlocks -> 10,
implicitPrefs -> false, alpha -> 1.0, userCol -> "user", itemCol -> "item",
ratingCol -> "rating", nonnegative -> false, checkpointInterval -> 10,
intermediateStorageLevel -> "MEMORY_AND_DISK", finalStorageLevel -> "MEMORY_AND_DISK",
coldStartStrategy -> "nan")
/**
* Validates and transforms the input schema.
*
* @param schema input schema
* @return output schema
*/
protected def validateAndTransformSchema(schema: StructType): StructType = {
// user and item will be cast to Int
SchemaUtils.checkNumericType(schema, $(userCol))
SchemaUtils.checkNumericType(schema, $(itemCol))
// rating will be cast to Float
SchemaUtils.checkNumericType(schema, $(ratingCol))
SchemaUtils.appendColumn(schema, $(predictionCol), FloatType)
}
}
/**
* Model fitted by ALS.
*
* @param rank rank of the matrix factorization model
* @param userFactors a DataFrame that stores user factors in two columns: `id` and `features`
* @param itemFactors a DataFrame that stores item factors in two columns: `id` and `features`
*/
@Since("1.3.0")
class ALSModel private[ml] (
@Since("1.4.0") override val uid: String,
@Since("1.4.0") val rank: Int,
@transient val userFactors: DataFrame,
@transient val itemFactors: DataFrame)
extends Model[ALSModel] with ALSModelParams with MLWritable {
/** @group setParam */
@Since("1.4.0")
def setUserCol(value: String): this.type = set(userCol, value)
/** @group setParam */
@Since("1.4.0")
def setItemCol(value: String): this.type = set(itemCol, value)
/** @group setParam */
@Since("1.3.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group expertSetParam */
@Since("2.2.0")
def setColdStartStrategy(value: String): this.type = set(coldStartStrategy, value)
private val predict = udf { (featuresA: Seq[Float], featuresB: Seq[Float]) =>
if (featuresA != null && featuresB != null) {
var dotProduct = 0.0f
var i = 0
while (i < rank) {
dotProduct += featuresA(i) * featuresB(i)
i += 1
}
dotProduct
} else {
Float.NaN
}
}
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema)
// create a new column named map(predictionCol) by running the predict UDF.
val predictions = dataset
.join(userFactors,
checkedCast(dataset($(userCol))) === userFactors("id"), "left")
.join(itemFactors,
checkedCast(dataset($(itemCol))) === itemFactors("id"), "left")
.select(dataset("*"),
predict(userFactors("features"), itemFactors("features")).as($(predictionCol)))
getColdStartStrategy match {
case ALSModel.Drop =>
predictions.na.drop("all", Seq($(predictionCol)))
case ALSModel.NaN =>
predictions
}
}
@Since("1.3.0")
override def transformSchema(schema: StructType): StructType = {
// user and item will be cast to Int
SchemaUtils.checkNumericType(schema, $(userCol))
SchemaUtils.checkNumericType(schema, $(itemCol))
SchemaUtils.appendColumn(schema, $(predictionCol), FloatType)
}
@Since("1.5.0")
override def copy(extra: ParamMap): ALSModel = {
val copied = new ALSModel(uid, rank, userFactors, itemFactors)
copyValues(copied, extra).setParent(parent)
}
@Since("1.6.0")
override def write: MLWriter = new ALSModel.ALSModelWriter(this)
/**
* Returns top `numItems` items recommended for each user, for all users.
* @param numItems max number of recommendations for each user
* @return a DataFrame of (userCol: Int, recommendations), where recommendations are
* stored as an array of (itemCol: Int, rating: Float) Rows.
*/
@Since("2.2.0")
def recommendForAllUsers(numItems: Int): DataFrame = {
recommendForAll(userFactors, itemFactors, $(userCol), $(itemCol), numItems)
}
/**
* Returns top `numItems` items recommended for each user id in the input data set. Note that if
* there are duplicate ids in the input dataset, only one set of recommendations per unique id
* will be returned.
* @param dataset a Dataset containing a column of user ids. The column name must match `userCol`.
* @param numItems max number of recommendations for each user.
* @return a DataFrame of (userCol: Int, recommendations), where recommendations are
* stored as an array of (itemCol: Int, rating: Float) Rows.
*/
@Since("2.3.0")
def recommendForUserSubset(dataset: Dataset[_], numItems: Int): DataFrame = {
val srcFactorSubset = getSourceFactorSubset(dataset, userFactors, $(userCol))
recommendForAll(srcFactorSubset, itemFactors, $(userCol), $(itemCol), numItems)
}
/**
* Returns top `numUsers` users recommended for each item, for all items.
* @param numUsers max number of recommendations for each item
* @return a DataFrame of (itemCol: Int, recommendations), where recommendations are
* stored as an array of (userCol: Int, rating: Float) Rows.
*/
@Since("2.2.0")
def recommendForAllItems(numUsers: Int): DataFrame = {
recommendForAll(itemFactors, userFactors, $(itemCol), $(userCol), numUsers)
}
/**
* Returns top `numUsers` users recommended for each item id in the input data set. Note that if
* there are duplicate ids in the input dataset, only one set of recommendations per unique id
* will be returned.
* @param dataset a Dataset containing a column of item ids. The column name must match `itemCol`.
* @param numUsers max number of recommendations for each item.
* @return a DataFrame of (itemCol: Int, recommendations), where recommendations are
* stored as an array of (userCol: Int, rating: Float) Rows.
*/
@Since("2.3.0")
def recommendForItemSubset(dataset: Dataset[_], numUsers: Int): DataFrame = {
val srcFactorSubset = getSourceFactorSubset(dataset, itemFactors, $(itemCol))
recommendForAll(srcFactorSubset, userFactors, $(itemCol), $(userCol), numUsers)
}
/**
* Returns a subset of a factor DataFrame limited to only those unique ids contained
* in the input dataset.
* @param dataset input Dataset containing id column to user to filter factors.
* @param factors factor DataFrame to filter.
* @param column column name containing the ids in the input dataset.
* @return DataFrame containing factors only for those ids present in both the input dataset and
* the factor DataFrame.
*/
private def getSourceFactorSubset(
dataset: Dataset[_],
factors: DataFrame,
column: String): DataFrame = {
factors
.join(dataset.select(column), factors("id") === dataset(column), joinType = "left_semi")
.select(factors("id"), factors("features"))
}
/**
* Makes recommendations for all users (or items).
*
* Note: the previous approach used for computing top-k recommendations
* used a cross-join followed by predicting a score for each row of the joined dataset.
* However, this results in exploding the size of intermediate data. While Spark SQL makes it
* relatively efficient, the approach implemented here is significantly more efficient.
*
* This approach groups factors into blocks and computes the top-k elements per block,
* using dot product and an efficient [[BoundedPriorityQueue]] (instead of gemm).
* It then computes the global top-k by aggregating the per block top-k elements with
* a [[TopByKeyAggregator]]. This significantly reduces the size of intermediate and shuffle data.
* This is the DataFrame equivalent to the approach used in
* [[org.apache.spark.mllib.recommendation.MatrixFactorizationModel]].
*
* @param srcFactors src factors for which to generate recommendations
* @param dstFactors dst factors used to make recommendations
* @param srcOutputColumn name of the column for the source ID in the output DataFrame
* @param dstOutputColumn name of the column for the destination ID in the output DataFrame
* @param num max number of recommendations for each record
* @return a DataFrame of (srcOutputColumn: Int, recommendations), where recommendations are
* stored as an array of (dstOutputColumn: Int, rating: Float) Rows.
*/
private def recommendForAll(
srcFactors: DataFrame,
dstFactors: DataFrame,
srcOutputColumn: String,
dstOutputColumn: String,
num: Int): DataFrame = {
import srcFactors.sparkSession.implicits._
val srcFactorsBlocked = blockify(srcFactors.as[(Int, Array[Float])])
val dstFactorsBlocked = blockify(dstFactors.as[(Int, Array[Float])])
val ratings = srcFactorsBlocked.crossJoin(dstFactorsBlocked)
.as[(Seq[(Int, Array[Float])], Seq[(Int, Array[Float])])]
.flatMap { case (srcIter, dstIter) =>
val m = srcIter.size
val n = math.min(dstIter.size, num)
val output = new Array[(Int, Int, Float)](m * n)
var i = 0
val pq = new BoundedPriorityQueue[(Int, Float)](num)(Ordering.by(_._2))
srcIter.foreach { case (srcId, srcFactor) =>
dstIter.foreach { case (dstId, dstFactor) =>
// We use F2jBLAS which is faster than a call to native BLAS for vector dot product
val score = BLAS.f2jBLAS.sdot(rank, srcFactor, 1, dstFactor, 1)
pq += dstId -> score
}
pq.foreach { case (dstId, score) =>
output(i) = (srcId, dstId, score)
i += 1
}
pq.clear()
}
output.toSeq
}
// We'll force the IDs to be Int. Unfortunately this converts IDs to Int in the output.
val topKAggregator = new TopByKeyAggregator[Int, Int, Float](num, Ordering.by(_._2))
val recs = ratings.as[(Int, Int, Float)].groupByKey(_._1).agg(topKAggregator.toColumn)
.toDF("id", "recommendations")
val arrayType = ArrayType(
new StructType()
.add(dstOutputColumn, IntegerType)
.add("rating", FloatType)
)
recs.select($"id".as(srcOutputColumn), $"recommendations".cast(arrayType))
}
/**
* Blockifies factors to improve the efficiency of cross join
* TODO: SPARK-20443 - expose blockSize as a param?
*/
private def blockify(
factors: Dataset[(Int, Array[Float])],
blockSize: Int = 4096): Dataset[Seq[(Int, Array[Float])]] = {
import factors.sparkSession.implicits._
factors.mapPartitions(_.grouped(blockSize))
}
}
@Since("1.6.0")
object ALSModel extends MLReadable[ALSModel] {
private val NaN = "nan"
private val Drop = "drop"
private[recommendation] final val supportedColdStartStrategies = Array(NaN, Drop)
@Since("1.6.0")
override def read: MLReader[ALSModel] = new ALSModelReader
@Since("1.6.0")
override def load(path: String): ALSModel = super.load(path)
private[ALSModel] class ALSModelWriter(instance: ALSModel) extends MLWriter {
override protected def saveImpl(path: String): Unit = {
val extraMetadata = "rank" -> instance.rank
DefaultParamsWriter.saveMetadata(instance, path, sc, Some(extraMetadata))
val userPath = new Path(path, "userFactors").toString
instance.userFactors.write.format("parquet").save(userPath)
val itemPath = new Path(path, "itemFactors").toString
instance.itemFactors.write.format("parquet").save(itemPath)
}
}
private class ALSModelReader extends MLReader[ALSModel] {
/** Checked against metadata when loading model */
private val className = classOf[ALSModel].getName
override def load(path: String): ALSModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
implicit val format = DefaultFormats
val rank = (metadata.metadata \ "rank").extract[Int]
val userPath = new Path(path, "userFactors").toString
val userFactors = sparkSession.read.format("parquet").load(userPath)
val itemPath = new Path(path, "itemFactors").toString
val itemFactors = sparkSession.read.format("parquet").load(itemPath)
val model = new ALSModel(metadata.uid, rank, userFactors, itemFactors)
metadata.getAndSetParams(model)
model
}
}
}
/**
* Alternating Least Squares (ALS) matrix factorization.
*
* ALS attempts to estimate the ratings matrix `R` as the product of two lower-rank matrices,
* `X` and `Y`, i.e. `X * Yt = R`. Typically these approximations are called 'factor' matrices.
* The general approach is iterative. During each iteration, one of the factor matrices is held
* constant, while the other is solved for using least squares. The newly-solved factor matrix is
* then held constant while solving for the other factor matrix.
*
* This is a blocked implementation of the ALS factorization algorithm that groups the two sets
* of factors (referred to as "users" and "products") into blocks and reduces communication by only
* sending one copy of each user vector to each product block on each iteration, and only for the
* product blocks that need that user's feature vector. This is achieved by pre-computing some
* information about the ratings matrix to determine the "out-links" of each user (which blocks of
* products it will contribute to) and "in-link" information for each product (which of the feature
* vectors it receives from each user block it will depend on). This allows us to send only an
* array of feature vectors between each user block and product block, and have the product block
* find the users' ratings and update the products based on these messages.
*
* For implicit preference data, the algorithm used is based on
* "Collaborative Filtering for Implicit Feedback Datasets", available at
* http://dx.doi.org/10.1109/ICDM.2008.22, adapted for the blocked approach used here.
*
* Essentially instead of finding the low-rank approximations to the rating matrix `R`,
* this finds the approximations for a preference matrix `P` where the elements of `P` are 1 if
* r is greater than 0 and 0 if r is less than or equal to 0. The ratings then act as 'confidence'
* values related to strength of indicated user
* preferences rather than explicit ratings given to items.
*/
@Since("1.3.0")
class ALS(@Since("1.4.0") override val uid: String) extends Estimator[ALSModel] with ALSParams
with DefaultParamsWritable {
import org.apache.spark.ml.recommendation.ALS.Rating
@Since("1.4.0")
def this() = this(Identifiable.randomUID("als"))
/** @group setParam */
@Since("1.3.0")
def setRank(value: Int): this.type = set(rank, value)
/** @group setParam */
@Since("1.3.0")
def setNumUserBlocks(value: Int): this.type = set(numUserBlocks, value)
/** @group setParam */
@Since("1.3.0")
def setNumItemBlocks(value: Int): this.type = set(numItemBlocks, value)
/** @group setParam */
@Since("1.3.0")
def setImplicitPrefs(value: Boolean): this.type = set(implicitPrefs, value)
/** @group setParam */
@Since("1.3.0")
def setAlpha(value: Double): this.type = set(alpha, value)
/** @group setParam */
@Since("1.3.0")
def setUserCol(value: String): this.type = set(userCol, value)
/** @group setParam */
@Since("1.3.0")
def setItemCol(value: String): this.type = set(itemCol, value)
/** @group setParam */
@Since("1.3.0")
def setRatingCol(value: String): this.type = set(ratingCol, value)
/** @group setParam */
@Since("1.3.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("1.3.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.3.0")
def setRegParam(value: Double): this.type = set(regParam, value)
/** @group setParam */
@Since("1.3.0")
def setNonnegative(value: Boolean): this.type = set(nonnegative, value)
/** @group setParam */
@Since("1.4.0")
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.3.0")
def setSeed(value: Long): this.type = set(seed, value)
/** @group expertSetParam */
@Since("2.0.0")
def setIntermediateStorageLevel(value: String): this.type = set(intermediateStorageLevel, value)
/** @group expertSetParam */
@Since("2.0.0")
def setFinalStorageLevel(value: String): this.type = set(finalStorageLevel, value)
/** @group expertSetParam */
@Since("2.2.0")
def setColdStartStrategy(value: String): this.type = set(coldStartStrategy, value)
/**
* Sets both numUserBlocks and numItemBlocks to the specific value.
*
* @group setParam
*/
@Since("1.3.0")
def setNumBlocks(value: Int): this.type = {
setNumUserBlocks(value)
setNumItemBlocks(value)
this
}
@Since("2.0.0")
override def fit(dataset: Dataset[_]): ALSModel = instrumented { instr =>
transformSchema(dataset.schema)
import dataset.sparkSession.implicits._
val r = if ($(ratingCol) != "") col($(ratingCol)).cast(FloatType) else lit(1.0f)
val ratings = dataset
.select(checkedCast(col($(userCol))), checkedCast(col($(itemCol))), r)
.rdd
.map { row =>
Rating(row.getInt(0), row.getInt(1), row.getFloat(2))
}
instr.logPipelineStage(this)
instr.logDataset(dataset)
instr.logParams(this, rank, numUserBlocks, numItemBlocks, implicitPrefs, alpha, userCol,
itemCol, ratingCol, predictionCol, maxIter, regParam, nonnegative, checkpointInterval,
seed, intermediateStorageLevel, finalStorageLevel)
val (userFactors, itemFactors) = ALS.train(ratings, rank = $(rank),
numUserBlocks = $(numUserBlocks), numItemBlocks = $(numItemBlocks),
maxIter = $(maxIter), regParam = $(regParam), implicitPrefs = $(implicitPrefs),
alpha = $(alpha), nonnegative = $(nonnegative),
intermediateRDDStorageLevel = StorageLevel.fromString($(intermediateStorageLevel)),
finalRDDStorageLevel = StorageLevel.fromString($(finalStorageLevel)),
checkpointInterval = $(checkpointInterval), seed = $(seed))
val userDF = userFactors.toDF("id", "features")
val itemDF = itemFactors.toDF("id", "features")
val model = new ALSModel(uid, $(rank), userDF, itemDF).setParent(this)
copyValues(model)
}
@Since("1.3.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
@Since("1.5.0")
override def copy(extra: ParamMap): ALS = defaultCopy(extra)
}
/**
* :: DeveloperApi ::
* An implementation of ALS that supports generic ID types, specialized for Int and Long. This is
* exposed as a developer API for users who do need other ID types. But it is not recommended
* because it increases the shuffle size and memory requirement during training. For simplicity,
* users and items must have the same type. The number of distinct users/items should be smaller
* than 2 billion.
*/
@DeveloperApi
object ALS extends DefaultParamsReadable[ALS] with Logging {
/**
* :: DeveloperApi ::
* Rating class for better code readability.
*/
@DeveloperApi
case class Rating[@specialized(Int, Long) ID](user: ID, item: ID, rating: Float)
@Since("1.6.0")
override def load(path: String): ALS = super.load(path)
/** Trait for least squares solvers applied to the normal equation. */
private[recommendation] trait LeastSquaresNESolver extends Serializable {
/** Solves a least squares problem with regularization (possibly with other constraints). */
def solve(ne: NormalEquation, lambda: Double): Array[Float]
}
/** Cholesky solver for least square problems. */
private[recommendation] class CholeskySolver extends LeastSquaresNESolver {
/**
* Solves a least squares problem with L2 regularization:
*
* min norm(A x - b)^2^ + lambda * norm(x)^2^
*
* @param ne a [[NormalEquation]] instance that contains AtA, Atb, and n (number of instances)
* @param lambda regularization constant
* @return the solution x
*/
override def solve(ne: NormalEquation, lambda: Double): Array[Float] = {
val k = ne.k
// Add scaled lambda to the diagonals of AtA.
var i = 0
var j = 2
while (i < ne.triK) {
ne.ata(i) += lambda
i += j
j += 1
}
CholeskyDecomposition.solve(ne.ata, ne.atb)
val x = new Array[Float](k)
i = 0
while (i < k) {
x(i) = ne.atb(i).toFloat
i += 1
}
ne.reset()
x
}
}
/** NNLS solver. */
private[recommendation] class NNLSSolver extends LeastSquaresNESolver {
private var rank: Int = -1
private var workspace: NNLS.Workspace = _
private var ata: Array[Double] = _
private var initialized: Boolean = false
private def initialize(rank: Int): Unit = {
if (!initialized) {
this.rank = rank
workspace = NNLS.createWorkspace(rank)
ata = new Array[Double](rank * rank)
initialized = true
} else {
require(this.rank == rank)
}
}
/**
* Solves a nonnegative least squares problem with L2 regularization:
*
* min_x_ norm(A x - b)^2^ + lambda * n * norm(x)^2^
* subject to x >= 0
*/
override def solve(ne: NormalEquation, lambda: Double): Array[Float] = {
val rank = ne.k
initialize(rank)
fillAtA(ne.ata, lambda)
val x = NNLS.solve(ata, ne.atb, workspace)
ne.reset()
x.map(x => x.toFloat)
}
/**
* Given a triangular matrix in the order of fillXtX above, compute the full symmetric square
* matrix that it represents, storing it into destMatrix.
*/
private def fillAtA(triAtA: Array[Double], lambda: Double) {
var i = 0
var pos = 0
var a = 0.0
while (i < rank) {
var j = 0
while (j <= i) {
a = triAtA(pos)
ata(i * rank + j) = a
ata(j * rank + i) = a
pos += 1
j += 1
}
ata(i * rank + i) += lambda
i += 1
}
}
}
/**
* Representing a normal equation to solve the following weighted least squares problem:
*
* minimize \sum,,i,, c,,i,, (a,,i,,^T^ x - d,,i,,)^2^ + lambda * x^T^ x.
*
* Its normal equation is given by
*
* \sum,,i,, c,,i,, (a,,i,, a,,i,,^T^ x - d,,i,, a,,i,,) + lambda * x = 0.
*
* Distributing and letting b,,i,, = c,,i,, * d,,i,,
*
* \sum,,i,, c,,i,, a,,i,, a,,i,,^T^ x - b,,i,, a,,i,, + lambda * x = 0.
*/
private[recommendation] class NormalEquation(val k: Int) extends Serializable {
/** Number of entries in the upper triangular part of a k-by-k matrix. */
val triK = k * (k + 1) / 2
/** A^T^ * A */
val ata = new Array[Double](triK)
/** A^T^ * b */
val atb = new Array[Double](k)
private val da = new Array[Double](k)
private val upper = "U"
private def copyToDouble(a: Array[Float]): Unit = {
var i = 0
while (i < k) {
da(i) = a(i)
i += 1
}
}
/** Adds an observation. */
def add(a: Array[Float], b: Double, c: Double = 1.0): this.type = {
require(c >= 0.0)
require(a.length == k)
copyToDouble(a)
blas.dspr(upper, k, c, da, 1, ata)
if (b != 0.0) {
blas.daxpy(k, b, da, 1, atb, 1)
}
this
}
/** Merges another normal equation object. */
def merge(other: NormalEquation): this.type = {
require(other.k == k)
blas.daxpy(ata.length, 1.0, other.ata, 1, ata, 1)
blas.daxpy(atb.length, 1.0, other.atb, 1, atb, 1)
this
}
/** Resets everything to zero, which should be called after each solve. */
def reset(): Unit = {
ju.Arrays.fill(ata, 0.0)
ju.Arrays.fill(atb, 0.0)
}
}
/**
* :: DeveloperApi ::
* Implementation of the ALS algorithm.
*
* This implementation of the ALS factorization algorithm partitions the two sets of factors among
* Spark workers so as to reduce network communication by only sending one copy of each factor
* vector to each Spark worker on each iteration, and only if needed. This is achieved by
* precomputing some information about the ratings matrix to determine which users require which
* item factors and vice versa. See the Scaladoc for `InBlock` for a detailed explanation of how
* the precomputation is done.
*
* In addition, since each iteration of calculating the factor matrices depends on the known
* ratings, which are spread across Spark partitions, a naive implementation would incur
* significant network communication overhead between Spark workers, as the ratings RDD would be
* repeatedly shuffled during each iteration. This implementation reduces that overhead by
* performing the shuffling operation up front, precomputing each partition's ratings dependencies
* and duplicating those values to the appropriate workers before starting iterations to solve for
* the factor matrices. See the Scaladoc for `OutBlock` for a detailed explanation of how the
* precomputation is done.
*
* Note that the term "rating block" is a bit of a misnomer, as the ratings are not partitioned by
* contiguous blocks from the ratings matrix but by a hash function on the rating's location in
* the matrix. If it helps you to visualize the partitions, it is easier to think of the term
* "block" as referring to a subset of an RDD containing the ratings rather than a contiguous
* submatrix of the ratings matrix.
*/
@DeveloperApi
def train[ID: ClassTag]( // scalastyle:ignore
ratings: RDD[Rating[ID]],
rank: Int = 10,
numUserBlocks: Int = 10,
numItemBlocks: Int = 10,
maxIter: Int = 10,
regParam: Double = 0.1,
implicitPrefs: Boolean = false,
alpha: Double = 1.0,
nonnegative: Boolean = false,
intermediateRDDStorageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK,
finalRDDStorageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK,
checkpointInterval: Int = 10,
seed: Long = 0L)(
implicit ord: Ordering[ID]): (RDD[(ID, Array[Float])], RDD[(ID, Array[Float])]) = {
require(!ratings.isEmpty(), s"No ratings available from $ratings")
require(intermediateRDDStorageLevel != StorageLevel.NONE,
"ALS is not designed to run without persisting intermediate RDDs.")
val sc = ratings.sparkContext
// Precompute the rating dependencies of each partition
val userPart = new ALSPartitioner(numUserBlocks)
val itemPart = new ALSPartitioner(numItemBlocks)
val blockRatings = partitionRatings(ratings, userPart, itemPart)
.persist(intermediateRDDStorageLevel)
val (userInBlocks, userOutBlocks) =
makeBlocks("user", blockRatings, userPart, itemPart, intermediateRDDStorageLevel)
userOutBlocks.count() // materialize blockRatings and user blocks
val swappedBlockRatings = blockRatings.map {
case ((userBlockId, itemBlockId), RatingBlock(userIds, itemIds, localRatings)) =>
((itemBlockId, userBlockId), RatingBlock(itemIds, userIds, localRatings))
}
val (itemInBlocks, itemOutBlocks) =
makeBlocks("item", swappedBlockRatings, itemPart, userPart, intermediateRDDStorageLevel)
itemOutBlocks.count() // materialize item blocks
// Encoders for storing each user/item's partition ID and index within its partition using a
// single integer; used as an optimization
val userLocalIndexEncoder = new LocalIndexEncoder(userPart.numPartitions)
val itemLocalIndexEncoder = new LocalIndexEncoder(itemPart.numPartitions)
// These are the user and item factor matrices that, once trained, are multiplied together to
// estimate the rating matrix. The two matrices are stored in RDDs, partitioned by column such
// that each factor column resides on the same Spark worker as its corresponding user or item.
val seedGen = new XORShiftRandom(seed)
var userFactors = initialize(userInBlocks, rank, seedGen.nextLong())
var itemFactors = initialize(itemInBlocks, rank, seedGen.nextLong())
val solver = if (nonnegative) new NNLSSolver else new CholeskySolver
var previousCheckpointFile: Option[String] = None
val shouldCheckpoint: Int => Boolean = (iter) =>
sc.checkpointDir.isDefined && checkpointInterval != -1 && (iter % checkpointInterval == 0)
val deletePreviousCheckpointFile: () => Unit = () =>
previousCheckpointFile.foreach { file =>
try {
val checkpointFile = new Path(file)
checkpointFile.getFileSystem(sc.hadoopConfiguration).delete(checkpointFile, true)
} catch {
case e: IOException =>
logWarning(s"Cannot delete checkpoint file $file:", e)
}
}
if (implicitPrefs) {
for (iter <- 1 to maxIter) {
userFactors.setName(s"userFactors-$iter").persist(intermediateRDDStorageLevel)
val previousItemFactors = itemFactors
itemFactors = computeFactors(userFactors, userOutBlocks, itemInBlocks, rank, regParam,
userLocalIndexEncoder, implicitPrefs, alpha, solver)
previousItemFactors.unpersist()
itemFactors.setName(s"itemFactors-$iter").persist(intermediateRDDStorageLevel)
// TODO: Generalize PeriodicGraphCheckpointer and use it here.
val deps = itemFactors.dependencies
if (shouldCheckpoint(iter)) {
itemFactors.checkpoint() // itemFactors gets materialized in computeFactors
}
val previousUserFactors = userFactors
userFactors = computeFactors(itemFactors, itemOutBlocks, userInBlocks, rank, regParam,
itemLocalIndexEncoder, implicitPrefs, alpha, solver)
if (shouldCheckpoint(iter)) {
ALS.cleanShuffleDependencies(sc, deps)
deletePreviousCheckpointFile()
previousCheckpointFile = itemFactors.getCheckpointFile
}
previousUserFactors.unpersist()
}
} else {
for (iter <- 0 until maxIter) {
itemFactors = computeFactors(userFactors, userOutBlocks, itemInBlocks, rank, regParam,
userLocalIndexEncoder, solver = solver)
if (shouldCheckpoint(iter)) {
val deps = itemFactors.dependencies
itemFactors.checkpoint()
itemFactors.count() // checkpoint item factors and cut lineage
ALS.cleanShuffleDependencies(sc, deps)
deletePreviousCheckpointFile()
previousCheckpointFile = itemFactors.getCheckpointFile
}
userFactors = computeFactors(itemFactors, itemOutBlocks, userInBlocks, rank, regParam,
itemLocalIndexEncoder, solver = solver)
}
}
val userIdAndFactors = userInBlocks
.mapValues(_.srcIds)
.join(userFactors)
.mapPartitions({ items =>
items.flatMap { case (_, (ids, factors)) =>
ids.view.zip(factors)
}
// Preserve the partitioning because IDs are consistent with the partitioners in userInBlocks
// and userFactors.
}, preservesPartitioning = true)
.setName("userFactors")
.persist(finalRDDStorageLevel)
val itemIdAndFactors = itemInBlocks
.mapValues(_.srcIds)
.join(itemFactors)
.mapPartitions({ items =>
items.flatMap { case (_, (ids, factors)) =>
ids.view.zip(factors)
}
}, preservesPartitioning = true)
.setName("itemFactors")
.persist(finalRDDStorageLevel)
if (finalRDDStorageLevel != StorageLevel.NONE) {
userIdAndFactors.count()
itemFactors.unpersist()
itemIdAndFactors.count()
userInBlocks.unpersist()
userOutBlocks.unpersist()
itemInBlocks.unpersist()
itemOutBlocks.unpersist()
blockRatings.unpersist()
}
(userIdAndFactors, itemIdAndFactors)
}
/**
* Factor block that stores factors (Array[Float]) in an Array.
*/
private type FactorBlock = Array[Array[Float]]
/**
* A mapping of the columns of the items factor matrix that are needed when calculating each row
* of the users factor matrix, and vice versa.
*
* Specifically, when calculating a user factor vector, since only those columns of the items
* factor matrix that correspond to the items that that user has rated are needed, we can avoid
* having to repeatedly copy the entire items factor matrix to each worker later in the algorithm
* by precomputing these dependencies for all users, storing them in an RDD of `OutBlock`s. The
* items' dependencies on the columns of the users factor matrix is computed similarly.
*
* =Example=
*
* Using the example provided in the `InBlock` Scaladoc, `userOutBlocks` would look like the
* following:
*
* {{{
* userOutBlocks.collect() == Seq(
* 0 -> Array(Array(0, 1), Array(0, 1)),
* 1 -> Array(Array(0), Array(0))
* )
* }}}
*
* Each value in this map-like sequence is of type `Array[Array[Int]]`. The values in the
* inner array are the ranks of the sorted user IDs in that partition; so in the example above,
* `Array(0, 1)` in partition 0 refers to user IDs 0 and 6, since when all unique user IDs in
* partition 0 are sorted, 0 is the first ID and 6 is the second. The position of each inner
* array in its enclosing outer array denotes the partition number to which item IDs map; in the
* example, the first `Array(0, 1)` is in position 0 of its outer array, denoting item IDs that
* map to partition 0.
*
* In summary, the data structure encodes the following information:
*
* * There are ratings with user IDs 0 and 6 (encoded in `Array(0, 1)`, where 0 and 1 are the
* indices of the user IDs 0 and 6 on partition 0) whose item IDs map to partitions 0 and 1
* (represented by the fact that `Array(0, 1)` appears in both the 0th and 1st positions).
*
* * There are ratings with user ID 3 (encoded in `Array(0)`, where 0 is the index of the user
* ID 3 on partition 1) whose item IDs map to partitions 0 and 1 (represented by the fact that
* `Array(0)` appears in both the 0th and 1st positions).
*/
private type OutBlock = Array[Array[Int]]
/**
* In-link block for computing user and item factor matrices.
*
* The ALS algorithm partitions the columns of the users factor matrix evenly among Spark workers.
* Since each column of the factor matrix is calculated using the known ratings of the correspond-
* ing user, and since the ratings don't change across iterations, the ALS algorithm preshuffles
* the ratings to the appropriate partitions, storing them in `InBlock` objects.
*
* The ratings shuffled by item ID are computed similarly and also stored in `InBlock` objects.
* Note that this means every rating is stored twice, once as shuffled by user ID and once by item
* ID. This is a necessary tradeoff, since in general a rating will not be on the same worker
* when partitioned by user as by item.
*
* =Example=
*
* Say we have a small collection of eight items to offer the seven users in our application. We
* have some known ratings given by the users, as seen in the matrix below:
*
* {{{
* Items
* 0 1 2 3 4 5 6 7
* +---+---+---+---+---+---+---+---+
* 0 | |0.1| | |0.4| | |0.7|
* +---+---+---+---+---+---+---+---+
* 1 | | | | | | | | |
* +---+---+---+---+---+---+---+---+
* U 2 | | | | | | | | |
* s +---+---+---+---+---+---+---+---+
* e 3 | |3.1| | |3.4| | |3.7|
* r +---+---+---+---+---+---+---+---+
* s 4 | | | | | | | | |
* +---+---+---+---+---+---+---+---+
* 5 | | | | | | | | |
* +---+---+---+---+---+---+---+---+
* 6 | |6.1| | |6.4| | |6.7|
* +---+---+---+---+---+---+---+---+
* }}}
*
* The ratings are represented as an RDD, passed to the `partitionRatings` method as the `ratings`
* parameter:
*
* {{{
* ratings.collect() == Seq(
* Rating(0, 1, 0.1f),
* Rating(0, 4, 0.4f),
* Rating(0, 7, 0.7f),
* Rating(3, 1, 3.1f),
* Rating(3, 4, 3.4f),
* Rating(3, 7, 3.7f),
* Rating(6, 1, 6.1f),
* Rating(6, 4, 6.4f),
* Rating(6, 7, 6.7f)
* )
* }}}
*
* Say that we are using two partitions to calculate each factor matrix:
*
* {{{
* val userPart = new ALSPartitioner(2)
* val itemPart = new ALSPartitioner(2)
* val blockRatings = partitionRatings(ratings, userPart, itemPart)
* }}}
*
* Ratings are mapped to partitions using the user/item IDs modulo the number of partitions. With
* two partitions, ratings with even-valued user IDs are shuffled to partition 0 while those with
* odd-valued user IDs are shuffled to partition 1:
*
* {{{
* userInBlocks.collect() == Seq(
* 0 -> Seq(
* // Internally, the class stores the ratings in a more optimized format than
* // a sequence of `Rating`s, but for clarity we show it as such here.
* Rating(0, 1, 0.1f),
* Rating(0, 4, 0.4f),
* Rating(0, 7, 0.7f),
* Rating(6, 1, 6.1f),
* Rating(6, 4, 6.4f),
* Rating(6, 7, 6.7f)
* ),
* 1 -> Seq(
* Rating(3, 1, 3.1f),
* Rating(3, 4, 3.4f),
* Rating(3, 7, 3.7f)
* )
* )
* }}}
*
* Similarly, ratings with even-valued item IDs are shuffled to partition 0 while those with
* odd-valued item IDs are shuffled to partition 1:
*
* {{{
* itemInBlocks.collect() == Seq(
* 0 -> Seq(
* Rating(0, 4, 0.4f),
* Rating(3, 4, 3.4f),
* Rating(6, 4, 6.4f)
* ),
* 1 -> Seq(
* Rating(0, 1, 0.1f),
* Rating(0, 7, 0.7f),
* Rating(3, 1, 3.1f),
* Rating(3, 7, 3.7f),
* Rating(6, 1, 6.1f),
* Rating(6, 7, 6.7f)
* )
* )
* }}}
*
* @param srcIds src ids (ordered)
* @param dstPtrs dst pointers. Elements in range [dstPtrs(i), dstPtrs(i+1)) of dst indices and
* ratings are associated with srcIds(i).
* @param dstEncodedIndices encoded dst indices
* @param ratings ratings
* @see [[LocalIndexEncoder]]
*/
private[recommendation] case class InBlock[@specialized(Int, Long) ID: ClassTag](
srcIds: Array[ID],
dstPtrs: Array[Int],
dstEncodedIndices: Array[Int],
ratings: Array[Float]) {
/** Size of the block. */
def size: Int = ratings.length
require(dstEncodedIndices.length == size)
require(dstPtrs.length == srcIds.length + 1)
}
/**
* Initializes factors randomly given the in-link blocks.
*
* @param inBlocks in-link blocks
* @param rank rank
* @return initialized factor blocks
*/
private def initialize[ID](
inBlocks: RDD[(Int, InBlock[ID])],
rank: Int,
seed: Long): RDD[(Int, FactorBlock)] = {
// Choose a unit vector uniformly at random from the unit sphere, but from the
// "first quadrant" where all elements are nonnegative. This can be done by choosing
// elements distributed as Normal(0,1) and taking the absolute value, and then normalizing.
// This appears to create factorizations that have a slightly better reconstruction
// (<1%) compared picking elements uniformly at random in [0,1].
inBlocks.map { case (srcBlockId, inBlock) =>
val random = new XORShiftRandom(byteswap64(seed ^ srcBlockId))
val factors = Array.fill(inBlock.srcIds.length) {
val factor = Array.fill(rank)(random.nextGaussian().toFloat)
val nrm = blas.snrm2(rank, factor, 1)
blas.sscal(rank, 1.0f / nrm, factor, 1)
factor
}
(srcBlockId, factors)
}
}
/**
* A rating block that contains src IDs, dst IDs, and ratings, stored in primitive arrays.
*/
private[recommendation] case class RatingBlock[@specialized(Int, Long) ID: ClassTag](
srcIds: Array[ID],
dstIds: Array[ID],
ratings: Array[Float]) {
/** Size of the block. */
def size: Int = srcIds.length
require(dstIds.length == srcIds.length)
require(ratings.length == srcIds.length)
}
/**
* Builder for [[RatingBlock]]. `mutable.ArrayBuilder` is used to avoid boxing/unboxing.
*/
private[recommendation] class RatingBlockBuilder[@specialized(Int, Long) ID: ClassTag]
extends Serializable {
private val srcIds = mutable.ArrayBuilder.make[ID]
private val dstIds = mutable.ArrayBuilder.make[ID]
private val ratings = mutable.ArrayBuilder.make[Float]
var size = 0
/** Adds a rating. */
def add(r: Rating[ID]): this.type = {
size += 1
srcIds += r.user
dstIds += r.item
ratings += r.rating
this
}
/** Merges another [[RatingBlockBuilder]]. */
def merge(other: RatingBlock[ID]): this.type = {
size += other.srcIds.length
srcIds ++= other.srcIds
dstIds ++= other.dstIds
ratings ++= other.ratings
this
}
/** Builds a [[RatingBlock]]. */
def build(): RatingBlock[ID] = {
RatingBlock[ID](srcIds.result(), dstIds.result(), ratings.result())
}
}
/**
* Groups an RDD of [[Rating]]s by the user partition and item partition to which each `Rating`
* maps according to the given partitioners. The returned pair RDD holds the ratings, encoded in
* a memory-efficient format but otherwise unchanged, keyed by the (user partition ID, item
* partition ID) pair.
*
* Performance note: This is an expensive operation that performs an RDD shuffle.
*
* Implementation note: This implementation produces the same result as the following but
* generates fewer intermediate objects:
*
* {{{
* ratings.map { r =>
* ((srcPart.getPartition(r.user), dstPart.getPartition(r.item)), r)
* }.aggregateByKey(new RatingBlockBuilder)(
* seqOp = (b, r) => b.add(r),
* combOp = (b0, b1) => b0.merge(b1.build()))
* .mapValues(_.build())
* }}}
*
* @param ratings raw ratings
* @param srcPart partitioner for src IDs
* @param dstPart partitioner for dst IDs
* @return an RDD of rating blocks in the form of ((srcBlockId, dstBlockId), ratingBlock)
*/
private def partitionRatings[ID: ClassTag](
ratings: RDD[Rating[ID]],
srcPart: Partitioner,
dstPart: Partitioner): RDD[((Int, Int), RatingBlock[ID])] = {
val numPartitions = srcPart.numPartitions * dstPart.numPartitions
ratings.mapPartitions { iter =>
val builders = Array.fill(numPartitions)(new RatingBlockBuilder[ID])
iter.flatMap { r =>
val srcBlockId = srcPart.getPartition(r.user)
val dstBlockId = dstPart.getPartition(r.item)
val idx = srcBlockId + srcPart.numPartitions * dstBlockId
val builder = builders(idx)
builder.add(r)
if (builder.size >= 2048) { // 2048 * (3 * 4) = 24k
builders(idx) = new RatingBlockBuilder
Iterator.single(((srcBlockId, dstBlockId), builder.build()))
} else {
Iterator.empty
}
} ++ {
builders.view.zipWithIndex.filter(_._1.size > 0).map { case (block, idx) =>
val srcBlockId = idx % srcPart.numPartitions
val dstBlockId = idx / srcPart.numPartitions
((srcBlockId, dstBlockId), block.build())
}
}
}.groupByKey().mapValues { blocks =>
val builder = new RatingBlockBuilder[ID]
blocks.foreach(builder.merge)
builder.build()
}.setName("ratingBlocks")
}
/**
* Builder for uncompressed in-blocks of (srcId, dstEncodedIndex, rating) tuples.
*
* @param encoder encoder for dst indices
*/
private[recommendation] class UncompressedInBlockBuilder[@specialized(Int, Long) ID: ClassTag](
encoder: LocalIndexEncoder)(
implicit ord: Ordering[ID]) {
private val srcIds = mutable.ArrayBuilder.make[ID]
private val dstEncodedIndices = mutable.ArrayBuilder.make[Int]
private val ratings = mutable.ArrayBuilder.make[Float]
/**
* Adds a dst block of (srcId, dstLocalIndex, rating) tuples.
*
* @param dstBlockId dst block ID
* @param srcIds original src IDs
* @param dstLocalIndices dst local indices
* @param ratings ratings
*/
def add(
dstBlockId: Int,
srcIds: Array[ID],
dstLocalIndices: Array[Int],
ratings: Array[Float]): this.type = {
val sz = srcIds.length
require(dstLocalIndices.length == sz)
require(ratings.length == sz)
this.srcIds ++= srcIds
this.ratings ++= ratings
var j = 0
while (j < sz) {
this.dstEncodedIndices += encoder.encode(dstBlockId, dstLocalIndices(j))
j += 1
}
this
}
/** Builds a [[UncompressedInBlock]]. */
def build(): UncompressedInBlock[ID] = {
new UncompressedInBlock(srcIds.result(), dstEncodedIndices.result(), ratings.result())
}
}
/**
* A block of (srcId, dstEncodedIndex, rating) tuples stored in primitive arrays.
*/
private[recommendation] class UncompressedInBlock[@specialized(Int, Long) ID: ClassTag](
val srcIds: Array[ID],
val dstEncodedIndices: Array[Int],
val ratings: Array[Float])(
implicit ord: Ordering[ID]) {
/** Size the of block. */
def length: Int = srcIds.length
/**
* Compresses the block into an `InBlock`. The algorithm is the same as converting a sparse
* matrix from coordinate list (COO) format into compressed sparse column (CSC) format.
* Sorting is done using Spark's built-in Timsort to avoid generating too many objects.
*/
def compress(): InBlock[ID] = {
val sz = length
assert(sz > 0, "Empty in-link block should not exist.")
sort()
val uniqueSrcIdsBuilder = mutable.ArrayBuilder.make[ID]
val dstCountsBuilder = mutable.ArrayBuilder.make[Int]
var preSrcId = srcIds(0)
uniqueSrcIdsBuilder += preSrcId
var curCount = 1
var i = 1
while (i < sz) {
val srcId = srcIds(i)
if (srcId != preSrcId) {
uniqueSrcIdsBuilder += srcId
dstCountsBuilder += curCount
preSrcId = srcId
curCount = 0
}
curCount += 1
i += 1
}
dstCountsBuilder += curCount
val uniqueSrcIds = uniqueSrcIdsBuilder.result()
val numUniqueSrdIds = uniqueSrcIds.length
val dstCounts = dstCountsBuilder.result()
val dstPtrs = new Array[Int](numUniqueSrdIds + 1)
var sum = 0
i = 0
while (i < numUniqueSrdIds) {
sum += dstCounts(i)
i += 1
dstPtrs(i) = sum
}
InBlock(uniqueSrcIds, dstPtrs, dstEncodedIndices, ratings)
}
private def sort(): Unit = {
val sz = length
// Since there might be interleaved log messages, we insert a unique id for easy pairing.
val sortId = Utils.random.nextInt()
logDebug(s"Start sorting an uncompressed in-block of size $sz. (sortId = $sortId)")
val start = System.nanoTime()
val sorter = new Sorter(new UncompressedInBlockSort[ID])
sorter.sort(this, 0, length, Ordering[KeyWrapper[ID]])
val duration = (System.nanoTime() - start) / 1e9
logDebug(s"Sorting took $duration seconds. (sortId = $sortId)")
}
}
/**
* A wrapper that holds a primitive key.
*
* @see [[UncompressedInBlockSort]]
*/
private class KeyWrapper[@specialized(Int, Long) ID: ClassTag](
implicit ord: Ordering[ID]) extends Ordered[KeyWrapper[ID]] {
var key: ID = _
override def compare(that: KeyWrapper[ID]): Int = {
ord.compare(key, that.key)
}
def setKey(key: ID): this.type = {
this.key = key
this
}
}
/**
* [[SortDataFormat]] of [[UncompressedInBlock]] used by [[Sorter]].
*/
private class UncompressedInBlockSort[@specialized(Int, Long) ID: ClassTag](
implicit ord: Ordering[ID])
extends SortDataFormat[KeyWrapper[ID], UncompressedInBlock[ID]] {
override def newKey(): KeyWrapper[ID] = new KeyWrapper()
override def getKey(
data: UncompressedInBlock[ID],
pos: Int,
reuse: KeyWrapper[ID]): KeyWrapper[ID] = {
if (reuse == null) {
new KeyWrapper().setKey(data.srcIds(pos))
} else {
reuse.setKey(data.srcIds(pos))
}
}
override def getKey(
data: UncompressedInBlock[ID],
pos: Int): KeyWrapper[ID] = {
getKey(data, pos, null)
}
private def swapElements[@specialized(Int, Float) T](
data: Array[T],
pos0: Int,
pos1: Int): Unit = {
val tmp = data(pos0)
data(pos0) = data(pos1)
data(pos1) = tmp
}
override def swap(data: UncompressedInBlock[ID], pos0: Int, pos1: Int): Unit = {
swapElements(data.srcIds, pos0, pos1)
swapElements(data.dstEncodedIndices, pos0, pos1)
swapElements(data.ratings, pos0, pos1)
}
override def copyRange(
src: UncompressedInBlock[ID],
srcPos: Int,
dst: UncompressedInBlock[ID],
dstPos: Int,
length: Int): Unit = {
System.arraycopy(src.srcIds, srcPos, dst.srcIds, dstPos, length)
System.arraycopy(src.dstEncodedIndices, srcPos, dst.dstEncodedIndices, dstPos, length)
System.arraycopy(src.ratings, srcPos, dst.ratings, dstPos, length)
}
override def allocate(length: Int): UncompressedInBlock[ID] = {
new UncompressedInBlock(
new Array[ID](length), new Array[Int](length), new Array[Float](length))
}
override def copyElement(
src: UncompressedInBlock[ID],
srcPos: Int,
dst: UncompressedInBlock[ID],
dstPos: Int): Unit = {
dst.srcIds(dstPos) = src.srcIds(srcPos)
dst.dstEncodedIndices(dstPos) = src.dstEncodedIndices(srcPos)
dst.ratings(dstPos) = src.ratings(srcPos)
}
}
/**
* Creates in-blocks and out-blocks from rating blocks.
*
* @param prefix prefix for in/out-block names
* @param ratingBlocks rating blocks
* @param srcPart partitioner for src IDs
* @param dstPart partitioner for dst IDs
* @return (in-blocks, out-blocks)
*/
private def makeBlocks[ID: ClassTag](
prefix: String,
ratingBlocks: RDD[((Int, Int), RatingBlock[ID])],
srcPart: Partitioner,
dstPart: Partitioner,
storageLevel: StorageLevel)(
implicit srcOrd: Ordering[ID]): (RDD[(Int, InBlock[ID])], RDD[(Int, OutBlock)]) = {
val inBlocks = ratingBlocks.map {
case ((srcBlockId, dstBlockId), RatingBlock(srcIds, dstIds, ratings)) =>
// The implementation is a faster version of
// val dstIdToLocalIndex = dstIds.toSet.toSeq.sorted.zipWithIndex.toMap
val start = System.nanoTime()
val dstIdSet = new OpenHashSet[ID](1 << 20)
dstIds.foreach(dstIdSet.add)
val sortedDstIds = new Array[ID](dstIdSet.size)
var i = 0
var pos = dstIdSet.nextPos(0)
while (pos != -1) {
sortedDstIds(i) = dstIdSet.getValue(pos)
pos = dstIdSet.nextPos(pos + 1)
i += 1
}
assert(i == dstIdSet.size)
Sorting.quickSort(sortedDstIds)
val dstIdToLocalIndex = new OpenHashMap[ID, Int](sortedDstIds.length)
i = 0
while (i < sortedDstIds.length) {
dstIdToLocalIndex.update(sortedDstIds(i), i)
i += 1
}
logDebug(
"Converting to local indices took " + (System.nanoTime() - start) / 1e9 + " seconds.")
val dstLocalIndices = dstIds.map(dstIdToLocalIndex.apply)
(srcBlockId, (dstBlockId, srcIds, dstLocalIndices, ratings))
}.groupByKey(new ALSPartitioner(srcPart.numPartitions))
.mapValues { iter =>
val builder =
new UncompressedInBlockBuilder[ID](new LocalIndexEncoder(dstPart.numPartitions))
iter.foreach { case (dstBlockId, srcIds, dstLocalIndices, ratings) =>
builder.add(dstBlockId, srcIds, dstLocalIndices, ratings)
}
builder.build().compress()
}.setName(prefix + "InBlocks")
.persist(storageLevel)
val outBlocks = inBlocks.mapValues { case InBlock(srcIds, dstPtrs, dstEncodedIndices, _) =>
val encoder = new LocalIndexEncoder(dstPart.numPartitions)
val activeIds = Array.fill(dstPart.numPartitions)(mutable.ArrayBuilder.make[Int])
var i = 0
val seen = new Array[Boolean](dstPart.numPartitions)
while (i < srcIds.length) {
var j = dstPtrs(i)
ju.Arrays.fill(seen, false)
while (j < dstPtrs(i + 1)) {
val dstBlockId = encoder.blockId(dstEncodedIndices(j))
if (!seen(dstBlockId)) {
activeIds(dstBlockId) += i // add the local index in this out-block
seen(dstBlockId) = true
}
j += 1
}
i += 1
}
activeIds.map { x =>
x.result()
}
}.setName(prefix + "OutBlocks")
.persist(storageLevel)
(inBlocks, outBlocks)
}
/**
* Compute dst factors by constructing and solving least square problems.
*
* @param srcFactorBlocks src factors
* @param srcOutBlocks src out-blocks
* @param dstInBlocks dst in-blocks
* @param rank rank
* @param regParam regularization constant
* @param srcEncoder encoder for src local indices
* @param implicitPrefs whether to use implicit preference
* @param alpha the alpha constant in the implicit preference formulation
* @param solver solver for least squares problems
* @return dst factors
*/
private def computeFactors[ID](
srcFactorBlocks: RDD[(Int, FactorBlock)],
srcOutBlocks: RDD[(Int, OutBlock)],
dstInBlocks: RDD[(Int, InBlock[ID])],
rank: Int,
regParam: Double,
srcEncoder: LocalIndexEncoder,
implicitPrefs: Boolean = false,
alpha: Double = 1.0,
solver: LeastSquaresNESolver): RDD[(Int, FactorBlock)] = {
val numSrcBlocks = srcFactorBlocks.partitions.length
val YtY = if (implicitPrefs) Some(computeYtY(srcFactorBlocks, rank)) else None
val srcOut = srcOutBlocks.join(srcFactorBlocks).flatMap {
case (srcBlockId, (srcOutBlock, srcFactors)) =>
srcOutBlock.view.zipWithIndex.map { case (activeIndices, dstBlockId) =>
(dstBlockId, (srcBlockId, activeIndices.map(idx => srcFactors(idx))))
}
}
val merged = srcOut.groupByKey(new ALSPartitioner(dstInBlocks.partitions.length))
dstInBlocks.join(merged).mapValues {
case (InBlock(dstIds, srcPtrs, srcEncodedIndices, ratings), srcFactors) =>
val sortedSrcFactors = new Array[FactorBlock](numSrcBlocks)
srcFactors.foreach { case (srcBlockId, factors) =>
sortedSrcFactors(srcBlockId) = factors
}
val dstFactors = new Array[Array[Float]](dstIds.length)
var j = 0
val ls = new NormalEquation(rank)
while (j < dstIds.length) {
ls.reset()
if (implicitPrefs) {
ls.merge(YtY.get)
}
var i = srcPtrs(j)
var numExplicits = 0
while (i < srcPtrs(j + 1)) {
val encoded = srcEncodedIndices(i)
val blockId = srcEncoder.blockId(encoded)
val localIndex = srcEncoder.localIndex(encoded)
val srcFactor = sortedSrcFactors(blockId)(localIndex)
val rating = ratings(i)
if (implicitPrefs) {
// Extension to the original paper to handle rating < 0. confidence is a function
// of |rating| instead so that it is never negative. c1 is confidence - 1.
val c1 = alpha * math.abs(rating)
// For rating <= 0, the corresponding preference is 0. So the second argument of add
// is only there for rating > 0.
if (rating > 0.0) {
numExplicits += 1
}
ls.add(srcFactor, if (rating > 0.0) 1.0 + c1 else 0.0, c1)
} else {
ls.add(srcFactor, rating)
numExplicits += 1
}
i += 1
}
// Weight lambda by the number of explicit ratings based on the ALS-WR paper.
dstFactors(j) = solver.solve(ls, numExplicits * regParam)
j += 1
}
dstFactors
}
}
/**
* Computes the Gramian matrix of user or item factors, which is only used in implicit preference.
* Caching of the input factors is handled in [[ALS#train]].
*/
private def computeYtY(factorBlocks: RDD[(Int, FactorBlock)], rank: Int): NormalEquation = {
factorBlocks.values.aggregate(new NormalEquation(rank))(
seqOp = (ne, factors) => {
factors.foreach(ne.add(_, 0.0))
ne
},
combOp = (ne1, ne2) => ne1.merge(ne2))
}
/**
* Encoder for storing (blockId, localIndex) into a single integer.
*
* We use the leading bits (including the sign bit) to store the block id and the rest to store
* the local index. This is based on the assumption that users/items are approximately evenly
* partitioned. With this assumption, we should be able to encode two billion distinct values.
*
* @param numBlocks number of blocks
*/
private[recommendation] class LocalIndexEncoder(numBlocks: Int) extends Serializable {
require(numBlocks > 0, s"numBlocks must be positive but found $numBlocks.")
private[this] final val numLocalIndexBits =
math.min(java.lang.Integer.numberOfLeadingZeros(numBlocks - 1), 31)
private[this] final val localIndexMask = (1 << numLocalIndexBits) - 1
/** Encodes a (blockId, localIndex) into a single integer. */
def encode(blockId: Int, localIndex: Int): Int = {
require(blockId < numBlocks)
require((localIndex & ~localIndexMask) == 0)
(blockId << numLocalIndexBits) | localIndex
}
/** Gets the block id from an encoded index. */
@inline
def blockId(encoded: Int): Int = {
encoded >>> numLocalIndexBits
}
/** Gets the local index from an encoded index. */
@inline
def localIndex(encoded: Int): Int = {
encoded & localIndexMask
}
}
/**
* Partitioner used by ALS. We require that getPartition is a projection. That is, for any key k,
* we have getPartition(getPartition(k)) = getPartition(k). Since the default HashPartitioner
* satisfies this requirement, we simply use a type alias here.
*/
private[recommendation] type ALSPartitioner = org.apache.spark.HashPartitioner
/**
* Private function to clean up all of the shuffles files from the dependencies and their parents.
*/
private[spark] def cleanShuffleDependencies[T](
sc: SparkContext,
deps: Seq[Dependency[_]],
blocking: Boolean = false): Unit = {
// If there is no reference tracking we skip clean up.
sc.cleaner.foreach { cleaner =>
/**
* Clean the shuffles & all of its parents.
*/
def cleanEagerly(dep: Dependency[_]): Unit = {
if (dep.isInstanceOf[ShuffleDependency[_, _, _]]) {
val shuffleId = dep.asInstanceOf[ShuffleDependency[_, _, _]].shuffleId
cleaner.doCleanupShuffle(shuffleId, blocking)
}
val rdd = dep.rdd
val rddDeps = rdd.dependencies
if (rdd.getStorageLevel == StorageLevel.NONE && rddDeps != null) {
rddDeps.foreach(cleanEagerly)
}
}
deps.foreach(cleanEagerly)
}
}
}
| tejasapatil/spark | mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala | Scala | apache-2.0 | 68,246 |
package com.twitter.finagle.netty4.codec
import com.twitter.finagle.Failure
import com.twitter.finagle.codec.FrameEncoder
import com.twitter.finagle.netty4.BufAsByteBuf
import com.twitter.io.Buf
import com.twitter.util.NonFatal
import io.netty.channel.{ChannelPromise, ChannelHandlerContext, ChannelOutboundHandlerAdapter}
/**
* A netty4 channel handler which encodes outbound `Out`-typed messages into
* [[io.netty.buffer.ByteBuf ByteBufs]].
*
* @note for positioning in the pipeline, this handler constitutes the boundary
* between an `Out`-typed application frame and netty's ByteBufs. Install
* outbound handlers around it accordingly.
*/
private[netty4] class EncodeHandler[Out](frameEncoder: FrameEncoder[Out]) extends ChannelOutboundHandlerAdapter {
override def isSharable = true
override def write(ctx: ChannelHandlerContext, msg: Any, promise: ChannelPromise): Unit = {
val encoded =
try { frameEncoder(msg.asInstanceOf[Out]) } catch {
case NonFatal(e) =>
ctx.pipeline.fireExceptionCaught(Failure("encoding failure", e))
Buf.Empty
}
if (!encoded.isEmpty) super.write(ctx, BufAsByteBuf.Owned(encoded), promise)
}
}
| liamstewart/finagle | finagle-netty4/src/main/scala/com/twitter/finagle/netty4/codec/EncodeHandler.scala | Scala | apache-2.0 | 1,202 |
/*
* Copyright 2015 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.linkedin.playparseq.s.stores
import com.linkedin.parseq.Task
import java.util.Collections
import java.util.concurrent.ConcurrentHashMap
import javax.inject.Singleton
import play.api.libs.typedmap.TypedKey
import play.api.mvc.RequestHeader
import scala.collection.JavaConverters._
import scala.collection.mutable.{Set => MutableSet}
/**
* The trait ParSeqTaskStore defines putting ParSeq Task into store and getting all Tasks out of store.
* During a request, all ParSeq Tasks will be stored in the ParSeqTaskStore when they run, and will be retrieved when it
* needs to generate the ParSeq Trace of the current request. The put/get APIs can only be properly used after the API
* initialize is called for setting up the store.
*
* @author Yinan Ding (yding@linkedin.com)
*/
trait ParSeqTaskStore {
/**
* The method put puts ParSeq Task into store.
*
* @param task The ParSeq Task
* @param requestHeader The Request
*/
def put(task: Task[_])(implicit requestHeader: RequestHeader)
/**
* The method get gets all Tasks from one request out of store as an immutable Set.
*
* @param requestHeader The Request
* @return A Set of Tasks
*/
def get(implicit requestHeader: RequestHeader): Set[Task[_]]
/**
* The method initialize sets up the store properly for put/get APIs.
*
* @param request The origin Request
* @tparam T The type parameter of the Request
* @return The Request with store set up properly
*/
def initialize[T <: RequestHeader](request: T): T
}
/**
* The class ParSeqTaskStoreImpl is an implementation of the trait [[ParSeqTaskStore]], whose store exists inside the
* attribute of the request.
* However, the attribute is only initialized when you use the ParSeqTraceAction for the ParSeq Trace feature. The
* store will still work correctly without ParSeqTraceAction when not using ParSeqTraceAction, but act like dummy.
*
* @author Yinan Ding (yding@linkedin.com)
*/
@Singleton
class ParSeqTaskStoreImpl extends ParSeqTaskStore {
/**
* The field ArgumentsKey is the default key of ParSeq Tasks.
*/
val ArgumentsKey: TypedKey[MutableSet[Task[_]]] = TypedKey("ParSeqTasks")
/**
* @inheritdoc
*/
override def put(task: Task[_])(implicit requestHeader: RequestHeader): Unit = getOption.map(_.add(task))
/**
* @inheritdoc
*/
override def get(implicit requestHeader: RequestHeader): Set[Task[_]] = getOption.map(_.toSet).getOrElse(Set.empty)
/**
* @inheritdoc
*/
override def initialize[T <: RequestHeader](request: T): T = request.addAttr(ArgumentsKey, Collections.newSetFromMap[Task[_]](new ConcurrentHashMap).asScala).asInstanceOf[T]
/**
* The method getOption gets the optional mutable Set of Tasks from one request out of store for modifications.
*
* @param requestHeader The Request
* @return A Set of Tasks
*/
private[this] def getOption(implicit requestHeader: RequestHeader) = requestHeader.attrs.get(ArgumentsKey)
}
| linkedin/play-parseq | src/core/scala/app/com/linkedin/playparseq/s/stores/ParSeqTaskStore.scala | Scala | apache-2.0 | 3,472 |
import scala.quoted._
import Macros._
object Test {
def main(args: Array[String]): Unit = {
unrolledForeach((x: Int) => 2)
}
}
| som-snytt/dotty | tests/pos-macros/i4734/Test_2.scala | Scala | apache-2.0 | 136 |
/**
* Exercise 4:
*
* The previous implementation needed a special case when n < 1. Show how you can
* avoid this with foldLeft. (Look at the Scaladoc for foldLeft. It’s like reduceLeft,
* except that the first value in the chain of combined values is supplied in the call.)
*
**/
def factorial(num: Int) = (1 to num).foldLeft(1)(_ * _)
factorial(5)
| ragmha/scala-impatient | solutions/high-order-func/ex4.scala | Scala | mit | 354 |
package uaa
import com.excilys.ebi.gatling.core.feeder.Feeder
import java.util.concurrent.ConcurrentLinkedQueue
import collection.JavaConversions._
import util.Random
import collection.{mutable}
/**
* Various types of feeders for SCIM resources
* @author Vidya Valmikinathan
*/
case class UniqueUsernamePasswordFeeder(usrs: Seq[User], password: Option[String] = None) extends Feeder[String] {
private val users = new ConcurrentLinkedQueue[User](usrs)
println("%d users".format(users.size()))
def next = {
val user = users.remove()
val pass = password match { case None => user.password; case Some(p) => p}
Map("username" -> user.username, "password" -> pass)
}
def hasNext = !users.isEmpty
}
case class UniqueGroupFeeder(usrs: Seq[User] = Config.users, grps: Seq[Group] = Config.groups, grpSize: Int = Config.avgGroupSize) extends Feeder[String] {
private val nameFeeder = new UniqueDisplayNameFeeder(grps)
private val memberFeeder = new RandomGroupMemberFeeder(usrs, grpSize)
def hasNext = nameFeeder.hasNext && memberFeeder.hasNext
def next = {
val map = mutable.HashMap.empty[String, String]
map.putAll(nameFeeder.next)
map.putAll(memberFeeder.next)
println("next group: %s" format(map))
map.toMap
}
}
case class UniqueDisplayNameFeeder(grps: Seq[Group]) extends Feeder[String] {
private val groups = new ConcurrentLinkedQueue[Group](grps)
println("%d groups".format(groups.size()))
def next = {
Map("displayName" -> groups.remove().displayName)
}
def hasNext = !groups.isEmpty
}
case class SequentialDisplayNameFeeder(grps: Seq[Group] = Config.groups, resetAfter: Int = (Config.groups.size-1)) extends Feeder[String] {
private val groups = grps
private var counter = -1
def hasNext = !groups.isEmpty
def next = {
if (counter >= resetAfter)
counter = -1
counter += 1
println("next group: " + groups.get(counter).displayName)
Map("displayName" -> groups.get(counter).displayName)
}
}
case class RandomGroupMemberFeeder(usrs: Seq[User] = Config.users, n: Int = Config.avgGroupSize) extends Feeder[String] {
private val users = usrs
private val randGen = new Random
private val num = n
println("picking %d members from %d users" format(num, users.size))
def next = {
val members = mutable.HashMap.empty[String, String]
(1 to num).foreach { i =>
members += (("memberName_" + i) -> users.get(randGen.nextInt(users.size)).username)
}
println("next member set: %s" format(members))
members.toMap
}
def hasNext = !users.isEmpty && num > 0
}
case class ConstantFeeder(key: String = "constantKey", value: String = "constantValue") extends Feeder[String] {
def hasNext = true
def next = {
Map(key -> value)
}
}
| andybondar/cf-installer | uaa/gatling/src/main/scala/uaa/ScimFeeders.scala | Scala | gpl-3.0 | 2,777 |
import scala.reflect.runtime.universe._
object Test extends App {
def foo[T: TypeTag] = println(implicitly[TypeTag[T]])
foo
} | felixmulder/scala | test/files/run/macro-undetparams-implicitval/Test.scala | Scala | bsd-3-clause | 130 |
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.concurrent.atomic.padded
import monifu.concurrent.atomic
trait AtomicBuilder[T, R <: atomic.Atomic[T]] extends atomic.AtomicBuilder[T, R]
object AtomicBuilder extends Implicits.Level3
private[padded] object Implicits {
trait Level1 {
implicit def AtomicRefBuilder[T]: AtomicBuilder[T, AtomicAny[T]] =
new AtomicBuilder[T, AtomicAny[T]] {
def buildInstance(initialValue: T) =
AtomicAny(initialValue)
}
}
trait Level2 extends Level1 {
implicit def AtomicNumberBuilder[T : Numeric]: AtomicBuilder[T, AtomicNumberAny[T]] =
new AtomicBuilder[T, AtomicNumberAny[T]] {
def buildInstance(initialValue: T) =
AtomicNumberAny(initialValue)
}
}
trait Level3 extends Level2 {
implicit val AtomicIntBuilder =
new AtomicBuilder[Int, AtomicInt] {
def buildInstance(initialValue: Int) =
AtomicInt(initialValue)
}
implicit val AtomicLongBuilder =
new AtomicBuilder[Long, AtomicLong] {
def buildInstance(initialValue: Long) =
AtomicLong(initialValue)
}
implicit val AtomicBooleanBuilder =
new AtomicBuilder[Boolean, AtomicBoolean] {
def buildInstance(initialValue: Boolean) =
AtomicBoolean(initialValue)
}
implicit val AtomicByteBuilder =
new AtomicBuilder[Byte, AtomicByte] {
def buildInstance(initialValue: Byte): AtomicByte =
AtomicByte(initialValue)
}
implicit val AtomicCharBuilder =
new AtomicBuilder[Char, AtomicChar] {
def buildInstance(initialValue: Char): AtomicChar =
AtomicChar(initialValue)
}
implicit val AtomicShortBuilder =
new AtomicBuilder[Short, AtomicShort] {
def buildInstance(initialValue: Short): AtomicShort =
AtomicShort(initialValue)
}
implicit val AtomicFloatBuilder =
new AtomicBuilder[Float, AtomicFloat] {
def buildInstance(initialValue: Float): AtomicFloat =
AtomicFloat(initialValue)
}
implicit val AtomicDoubleBuilder =
new AtomicBuilder[Double, AtomicDouble] {
def buildInstance(initialValue: Double): AtomicDouble =
AtomicDouble(initialValue)
}
}
}
| sergius/monifu | core/js/src/main/scala/monifu/concurrent/atomic/padded/AtomicBuilder.scala | Scala | apache-2.0 | 2,931 |
package org.dsa.utils
import java.io.Serializable
import java.util.{PriorityQueue => JPriorityQueue}
import scala.collection.JavaConverters._
import scala.collection.generic.Growable
/**
* Created by xubo on 2017/1/15.
*/
class DSABoundedPriorityQueue[A](maxSize: Int)(implicit ord: Ordering[A]) extends Iterable[A] with Growable[A] with Serializable {
private val underlying = new JPriorityQueue[A](maxSize, ord)
override def +=(elem: A): DSABoundedPriorityQueue.this.type = {
if (size < maxSize) {
underlying.offer(elem)
} else {
maybeReplaceLowest(elem)
}
this
}
override def clear(): Unit = {
underlying.clear()
}
override def iterator: Iterator[A] = underlying.iterator.asScala
private def maybeReplaceLowest(a: A): Boolean = {
val head = underlying.peek()
if (head != null && ord.gt(a, head)) {
underlying.poll()
underlying.offer(a)
} else {
false
}
}
}
| xubo245/CloudSW | src/main/scala/org/dsa/utils/DSABoundedPriorityQueue.scala | Scala | gpl-2.0 | 953 |
package com.jamontes79.scala.movielist.utils.json
import com.jamontes79.scala.movielist.utils.MyUtils.Service
/**
* Created by alberto on 29/9/15.
*/
trait JsonServices {
def loadJson: Service[JsonRequest, JsonResponse]
def loadJsonConfig: Service[JsonRequest, JsonResponseConfig]
def loadJsonTrailer: Service[JsonRequest, JsonResponseTrailer]
def loadJsonImages: Service[JsonRequest, JsonResponseImages]
def loadJsonMovieDetail: Service[JsonRequest, JsonResponseMovieDetail]
}
trait JsonServicesComponent {
val jsonServices: JsonServices
}
| jamontes79/movieList | src/main/scala/com/jamontes79/scala/movielist/utils/json/JsonServicesComponent.scala | Scala | apache-2.0 | 559 |
package org.bitcoins.core.crypto
import org.bitcoins.core.gen.CryptoGenerators
import org.bitcoins.core.number.UInt32
import org.bitcoins.core.util.BitcoinSLogger
import org.scalacheck.{ Gen, Prop, Properties }
import scala.util.Success
class ExtKeySpec extends Properties("ExtKeySpec") {
private val logger = BitcoinSLogger.logger
property("serialization symmetry") = {
Prop.forAll(CryptoGenerators.extKey) { extKey =>
ExtKey.fromString(extKey.toString) == Success(extKey) &&
ExtKey(extKey.bytes) == extKey
}
}
private def nonHardened: Gen[UInt32] = Gen.choose(0L, ((1L << 31) - 1)).map(UInt32(_))
private def hardened: Gen[UInt32] = Gen.choose(1L << 31, (1L << 32) - 1).map(UInt32(_))
property("derivation identity 1") = {
Prop.forAllNoShrink(CryptoGenerators.extPrivateKey, nonHardened, nonHardened, nonHardened) { (m, a, b, c) =>
//https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#the-key-tree
//N(m/a/b/c) = N(m/a/b)/c = N(m/a)/b/c = N(m)/a/b/c = M/a/b/c
val path1 = m.deriveChildPrivKey(a).deriveChildPrivKey(b).deriveChildPrivKey(c).extPublicKey
val path2 = m.deriveChildPrivKey(a).deriveChildPrivKey(b).extPublicKey.deriveChildPubKey(c).get
val path3 = m.deriveChildPrivKey(a).extPublicKey.deriveChildPubKey(b).get.deriveChildPubKey(c).get
val path4 = m.extPublicKey.deriveChildPubKey(a).get.deriveChildPubKey(b).get.deriveChildPubKey(c).get
path1 == path2 && path2 == path3 && path3 == path4
}
}
property("derivation identity 2") = {
Prop.forAllNoShrink(CryptoGenerators.extPrivateKey, hardened, nonHardened, nonHardened) { (m, aH, b, c) =>
//https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#the-key-tree
//N(m/aH/b/c) = N(m/aH/b)/c = N(m/aH)/b/c
val path1 = m.deriveChildPrivKey(aH).deriveChildPrivKey(b).deriveChildPrivKey(c).extPublicKey
val path2 = m.deriveChildPrivKey(aH).deriveChildPrivKey(b).extPublicKey.deriveChildPubKey(c).get
val path3 = m.deriveChildPrivKey(aH).extPublicKey.deriveChildPubKey(b).get.deriveChildPubKey(c).get
path1 == path2 && path2 == path3
}
}
}
| Christewart/bitcoin-s-core | src/test/scala/org/bitcoins/core/crypto/ExtKeySpec.scala | Scala | mit | 2,154 |
/* This code is based on the SOM class library.
*
* Copyright (c) 2001-2016 see AUTHORS.md file
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the 'Software'), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package sieve
class SieveBenchmark extends benchmarks.Benchmark[Int] {
override def run(): Int = {
val flags = Array.fill(5000)(true)
return sieve(flags, 5000)
}
def sieve(flags: Array[Boolean], size: Int): Int = {
var primeCount = 0
(2 until size).foreach { i =>
if (flags(i - 1)) {
primeCount += 1
var k = i + i
while (k <= size) {
flags(k - 1) = false
k += i
}
}
}
primeCount
}
override def check(result: Int): Boolean =
result == 669
}
| cedricviaccoz/scala-native | benchmarks/src/main/scala/sieve/SieveBenchmark.scala | Scala | bsd-3-clause | 1,724 |
package com.github.diegopacheco.sandbox.scripts.scala.basic
object HelloWorld {
def p(m: String) {
println(m)
}
def main(args: Array[String]) {
p("Hello, world!")
}
} | diegopacheco/scala-playground | scala-playground/src/com/github/diegopacheco/sandbox/scripts/scala/basic/Hello.scala | Scala | unlicense | 188 |
package gv.jleon
package crypto
import test._
import Prop._
import Digestion._
object DigestionProperties extends Properties("Digestion")
with DigestionGenerators {
property("consistency") =
forAll { (d: Digestion, bytes: Bytes) ⇒
d.digest(bytes) sameElements d.digest(bytes)
}
}
| mouchtaris/jleon | src/test/scala-2.12/gv/jleon/crypto/DigestionProperties.scala | Scala | mit | 306 |
package org.pgscala.converters
/** Do not edit - generated in Builder / PGFloatConverterBuilder.scala */
object PGFloatConverter extends PGConverter[Float] {
val PGType = PGNullableFloatConverter.pgType
def toPGString(f: Float) =
PGNullableFloatConverter.floatToString(java.lang.Float valueOf f)
val defaultValue: Float = 0.0f
def fromPGString(f: String) =
if (f eq null)
defaultValue
else
PGNullableFloatConverter.stringToFloat(f)
}
| melezov/pgscala | converters-scala/src/generated/scala/org/pgscala/converters/core/PGFloatConverter.scala | Scala | bsd-3-clause | 471 |
import java.util.Collection
class Foo extends Collection[Int]
class Bar extends Collection[List[_ <: String]]
class Baz[T] extends Collection[T]
trait Xyz[T] {
def foo(x: T): Boolean
}
trait Symbolic {
def --? : Int
def --!(i: Int): Unit
def unary_~ : Long
}
trait Bippy[T1, T2, T3] extends collection.IterableOps[(T2, String), List, List[(T2, String)]] with Xyz[T3]
class Dingus extends Bippy[String, Set[Int], List[Int]]
| lrytz/scala | test/files/neg/abstract-report2.scala | Scala | apache-2.0 | 438 |
package org.apache.spark.util.notebook.hack
import scala.reflect.runtime.universe
import org.apache.log4j.AppenderSkeleton
import org.apache.log4j.spi.LoggingEvent
object SparkClosureUtilHack {
val CLEAN_SUSCCESS = "Object cleaned and serializable!"
}
/*
* See modules/spark/src/main/scala/notebook/spark/util/SparkClosureUtil.scala
*/
trait SparkClosureUtilHack {
private val CC = org.apache.spark.util.ClosureCleaner
def appendTo: String => Unit = println
def clean(o:Object) = {
CC.clean(o, java.lang.Boolean.TRUE, java.lang.Boolean.TRUE)
// if we reach this point... it's good!
SparkClosureUtilHack.CLEAN_SUSCCESS
}
private val sparkClosureUtilInternalLogger: org.apache.log4j.Logger = {
// open the logger from the closure cleaner
val logMethod = CC.getClass.getDeclaredMethod("log")
logMethod.setAccessible(true)
val logSlf4J = logMethod.invoke(CC)
require(logSlf4J.isInstanceOf[org.slf4j.Logger])
val loggerInF = logSlf4J.getClass.getDeclaredField("logger")
loggerInF.setAccessible(true)
val loggerIn = loggerInF.get(logSlf4J)
require(loggerIn.isInstanceOf[org.apache.log4j.Logger])
val logger = loggerIn.asInstanceOf[org.apache.log4j.Logger]
logger
}
// define appender to show the cleaning logs
class HackAppender extends AppenderSkeleton {
override def append(event:LoggingEvent) {
appendTo(event.getMessage.toString)
}
override def close() {
}
override def requiresLayout() = {
false
}
}
// create appender added to logger and the capacity to restore the initial state
private val hackAppender = new HackAppender()
hackAppender.setThreshold(org.apache.log4j.Level.DEBUG)
hackAppender.activateOptions()
sparkClosureUtilInternalLogger.addAppender(hackAppender)
private val restoreLogger = {
val level = sparkClosureUtilInternalLogger.getLevel
() => {
sparkClosureUtilInternalLogger.removeAppender(hackAppender)
sparkClosureUtilInternalLogger.setLevel(level)
}
}
sparkClosureUtilInternalLogger.setLevel(org.apache.log4j.Level.DEBUG)
lazy val restore = restoreLogger()
}
| andypetrella/spark-notebook | modules/spark/src/main/scala/notebook/spark/util/hack/SparkClosureUtilHack.scala | Scala | apache-2.0 | 2,150 |
package org.scalaide.ui.wizards
import org.eclipse.core.resources.IContainer
import org.eclipse.core.resources.IFile
import org.eclipse.core.resources.IResource
import org.eclipse.core.runtime.IPath
import org.eclipse.core.runtime.Path
import org.scalaide.core.IScalaPlugin
import org.scalaide.core.compiler.IScalaPresentationCompiler.Implicits._
import org.scalaide.core.internal.project.ScalaProject
import org.scalaide.util.internal.Commons
import org.scalaide.util.internal.eclipse.ProjectUtils
import scalariform.lexer._
object ScalaFileCreator {
val VariableTypeName = "type_name"
val VariablePackageName = "package_name"
import scala.reflect.runtime._
private[this] val st = universe.asInstanceOf[JavaUniverse]
val ScalaKeywords = st.nme.keywords map (_.toString())
val JavaKeywords = st.javanme.keywords map (_.toString())
}
trait ScalaFileCreator extends FileCreator {
import ScalaFileCreator._
import ProjectUtils._
private[wizards] type FileExistenceCheck = IContainer => Validation
override def templateVariables(folder: IContainer, name: String): Map[String, String] =
generateTemplateVariables(name)
override def initialPath(res: IResource): String = {
val srcDirs = sourceDirs(res.getProject())
generateInitialPath(
path = res.getFullPath(),
srcDirs = srcDirs,
isDirectory = res.getType() == IResource.FOLDER)
}
override def validateName(folder: IContainer, name: String): Validation = {
if (!ScalaProject.isScalaProject(folder.getProject()))
Invalid("Not a Scala project")
else
doValidation(name) match {
case Left(v) => v
case Right(f) => f(folder)
}
}
override def create(folder: IContainer, name: String): IFile = {
val filePath = name.replace('.', '/')
folder.getFile(new Path(s"$filePath.scala"))
}
override def completionEntries(folder: IContainer, name: String): Seq[String] = {
val ret = projectAsJavaProject(folder.getProject()) map { jp =>
val root = jp.findPackageFragmentRoot(folder.getFullPath())
val pkgs = root.getChildren().map(_.getElementName())
val ignoreCaseMatcher = s"(?i)\\\\Q$name\\\\E.*"
pkgs.filter(_.matches(ignoreCaseMatcher))
}
ret.fold(Seq[String]())(identity)
}
/**
* `path` is the path of the element which is selected when the wizard is
* created. `srcDirs` contains all source folders of the project where `path`
* is part of. `isDirectory` describes if the last element of `path` references
* a directory.
*/
private[wizards] def generateInitialPath(path: IPath, srcDirs: Seq[IPath], isDirectory: Boolean): String = {
srcDirs.find(_.isPrefixOf(path))
.map(srcDir => path.removeFirstSegments(srcDir.segmentCount()))
.map(pkgOrFilePath => if (isDirectory) pkgOrFilePath else pkgOrFilePath.removeLastSegments(1))
.map(_.segments().mkString("."))
.map(pkg => if (pkg.isEmpty()) "" else s"$pkg.")
.getOrElse("")
}
private[wizards] def doValidation(name: String): Either[Invalid, FileExistenceCheck] = {
if (name.isEmpty())
Left(Invalid("No file path specified"))
else
validateFullyQualifiedType(name)
}
private[wizards] def validateFullyQualifiedType(fullyQualifiedType: String): Either[Invalid, FileExistenceCheck] = {
def isValidScalaTypeIdent(str: String) = {
val conformsToIdentToken = ScalaLexer.tokenise(str, forgiveErrors = true).size == 2
conformsToIdentToken && !ScalaKeywords.contains(str)
}
val parts = Commons.split(fullyQualifiedType, '.')
if (parts.last.isEmpty)
Left(Invalid("No type name specified"))
else {
def packageIdentCheck =
parts.init.find(!isValidScalaPackageIdent(_)) map (e => s"'$e' is not a valid package name")
def typeIdentCheck =
Seq(parts.last).find(!isValidScalaTypeIdent(_)) map (e => s"'$e' is not a valid type name")
packageIdentCheck orElse typeIdentCheck match {
case Some(e) => Left(Invalid(e))
case _ => Right(checkTypeExists(_, fullyQualifiedType))
}
}
}
private[wizards] def isValidScalaPackageIdent(str: String): Boolean = {
val validIdent =
str.nonEmpty &&
Character.isJavaIdentifierStart(str.head) &&
str.tail.forall(Character.isJavaIdentifierPart)
validIdent && !ScalaKeywords.contains(str) && !JavaKeywords.contains(str)
}
private[wizards] def checkTypeExists(folder: IContainer, fullyQualifiedType: String): Validation = {
val path = fullyQualifiedType.replace('.', '/')
if (folder.getFile(new Path(s"$path.scala")).exists())
Invalid("File already exists")
else {
val scalaProject = IScalaPlugin().asScalaProject(folder.getProject())
val typeExists = scalaProject flatMap { scalaProject =>
scalaProject.presentationCompiler { compiler =>
compiler.asyncExec {
compiler.rootMirror.getClassIfDefined(fullyQualifiedType) != compiler.NoSymbol
}.getOption()
}.flatten
} getOrElse false
if (typeExists)
Invalid("Type already exists")
else
Valid
}
}
private[wizards] def generateTemplateVariables(pkg: String): Map[String, String] = {
val splitPos = pkg.lastIndexOf('.')
if (splitPos < 0)
Map(VariableTypeName -> pkg)
else
Map(
VariablePackageName -> pkg.substring(0, splitPos),
VariableTypeName -> pkg.substring(splitPos+1))
}
}
| Kwestor/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/ui/wizards/ScalaFileCreator.scala | Scala | bsd-3-clause | 5,479 |
package com.twitter.util
import java.util.Arrays
import java.util.concurrent.atomic.{LongAdder, AtomicInteger}
private[twitter] object WindowedAdder {
/**
* Create a time-windowed version of a LongAdder.
*
* None of the operations on this data structure entails an allocation,
* unless invoking now does.
*
* `range` and `now` are expected to have the same units.
*
* @param range The range of time to be kept in the adder.
*
* @param slices The number of slices that are maintained; a higher
* number of slices means finer granularity but also more memory
* consumption. Must be more than 1.
*
* @param now the current time. for testing.
*/
def apply(range: Long, slices: Int, now: () => Long): WindowedAdder = {
require(slices > 1)
new WindowedAdder(range / slices, slices - 1, now)
}
}
private[twitter] class WindowedAdder private[WindowedAdder] (
window: Long,
N: Int,
now: () => Long) {
private[this] val writer = new LongAdder()
@volatile private[this] var gen = 0
private[this] val expiredGen = new AtomicInteger(gen)
// Since we only write into the head bucket, we simply maintain
// counts in an array; these are written to rarely, but are read
// often.
private[this] val buf = new Array[Long](N)
@volatile private[this] var i = 0
@volatile private[this] var old = now()
private[this] def expired(): Unit = {
if (!expiredGen.compareAndSet(gen, gen + 1))
return
// At the time of add, we were likely up to date,
// so we credit it to the current slice.
buf(i) = writer.sumThenReset()
i = (i + 1) % N
// If it turns out we've skipped a number of
// slices, we adjust for that here.
val nskip = math.min(((now() - old) / window - 1).toInt, N)
if (nskip > 0) {
val r = math.min(nskip, N - i)
Arrays.fill(buf, i, i + r, 0L)
Arrays.fill(buf, 0, nskip - r, 0L)
i = (i + nskip) % N
}
old = now()
gen += 1
}
/** Reset the state of the adder */
def reset(): Unit = {
Arrays.fill(buf, 0, N, 0L)
writer.reset()
old = now()
}
/** Increment the adder by 1 */
def incr(): Unit = add(1)
/** Increment the adder by `x` */
def add(x: Int): Unit = {
if ((now() - old) >= window)
expired()
writer.add(x)
}
/** Retrieve the current sum of the adder */
def sum(): Long = {
if ((now() - old) >= window)
expired()
val _ = gen // Barrier.
var sum = writer.sum()
var i = 0
while (i < N) {
sum += buf(i)
i += 1
}
sum
}
}
| twitter/util | util-core/src/main/scala/com/twitter/util/WindowedAdder.scala | Scala | apache-2.0 | 2,576 |
/**
* Exercise 6:
* Write a for loop for computing the product of the Unicode codes of all
* letters in a string.
* For example, the product of the characters in "Hello" is 9415087488L.
*
**/
var prod:BigInt = 1
for(c <- "Hello") prod *= c
prod // => 9415087488: BigInt
| ragmha/scala-impatient | solutions/control-structures-and-functions/ex6.scala | Scala | mit | 273 |
package controllers
import scala.collection.immutable
import models._
import play.api.data.Form
import play.api.data.Forms._
import play.api.i18n.{Lang, Messages}
import play.api.data.validation.Constraints._
import constraints.FormConstraints
import controllers.NeedLogin.Authenticated
import helpers.ItemInquiryMail
import play.Logger
import play.api.db.Database
import play.api.mvc.{AnyContent, MessagesAbstractController, MessagesControllerComponents}
class ItemInquiryReserveBase(
cc: MessagesControllerComponents,
fc: FormConstraints,
authenticated: Authenticated,
implicit val db: Database,
itemInquiryMail: ItemInquiryMail,
implicit val localeInfoRepo: LocaleInfoRepo,
implicit val siteItemRepo: SiteItemRepo,
implicit val shoppingCartItemRepo: ShoppingCartItemRepo
) extends MessagesAbstractController(cc) {
val idSubmitForm: Form[Long] = Form(
single(
"id" -> longNumber
)
)
def itemInquiryForm: Form[CreateItemInquiryReservation] = Form(
mapping(
"siteId" -> longNumber,
"itemId" -> longNumber,
"name" -> text.verifying(nonEmpty, maxLength(128)),
"email" -> text.verifying(fc.emailConstraint: _*),
"inquiryBody" -> text.verifying(nonEmpty, maxLength(8192))
)(CreateItemInquiry.apply)(CreateItemInquiry.unapply)
).asInstanceOf[Form[CreateItemInquiryReservation]]
def itemReservationForm: Form[CreateItemInquiryReservation] = Form(
mapping(
"siteId" -> longNumber,
"itemId" -> longNumber,
"name" -> text.verifying(nonEmpty, maxLength(128)),
"email" -> text.verifying(fc.emailConstraint: _*),
"comment" -> text.verifying(minLength(0), maxLength(8192))
)(CreateItemReservation.apply)(CreateItemReservation.unapply)
).asInstanceOf[Form[CreateItemInquiryReservation]]
def startItemInquiry(
siteId: Long, itemId: Long
) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
Ok(
views.html.itemInquiry(itemInfo(siteId, itemId, request.acceptLanguages.toList), inquiryStartForm(siteId, itemId, login.storeUser))
)
}
def startItemReservation(
siteId: Long, itemId: Long
) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
Ok(
views.html.itemReservation(itemInfo(siteId, itemId, request.acceptLanguages.toList), reservationStartForm(siteId, itemId, login.storeUser))
)
}
def inquiryStartForm(
siteId: Long, itemId: Long, user: StoreUser
)(
implicit login: LoginSession
): Form[_ <: CreateItemInquiryReservation] = itemInquiryForm.fill(
CreateItemInquiry(
siteId, itemId,
user.fullName,
user.email, ""
)
)
def reservationStartForm(
siteId: Long, itemId: Long, user: StoreUser
)(
implicit login: LoginSession
): Form[_ <: CreateItemInquiryReservation] = itemReservationForm.fill(
CreateItemReservation(
siteId, itemId,
user.fullName,
user.email, ""
).asInstanceOf[CreateItemInquiryReservation]
)
def itemInfo(
siteId: Long, itemId: Long, langs: List[Lang]
): (Site, ItemName) = db.withConnection { implicit conn =>
siteItemRepo.getWithSiteAndItem(siteId, ItemId(itemId), localeInfoRepo.getDefault(langs))
}.get
def amendReservationForm(
rec: ItemInquiry, fields: immutable.Map[Symbol, String]
): Form[_ <: CreateItemInquiryReservation] = itemReservationForm.fill(
CreateItemReservation(
rec.siteId, rec.itemId.id,
rec.submitUserName,
rec.email,
fields('Message)
)
)
def amendItemReservationStart(inqId: Long) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
db.withConnection { implicit conn =>
val id = ItemInquiryId(inqId)
val rec = ItemInquiry(id)
val fields = ItemInquiryField(id)
Ok(
views.html.amendItemReservation(
id,
itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList),
amendReservationForm(rec, fields)
)
)
}
}
def amendItemReservation(inqId: Long) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
val id = ItemInquiryId(inqId)
val (rec: ItemInquiry, fields: immutable.Map[Symbol, String]) = db.withConnection { implicit conn =>
(ItemInquiry(id), ItemInquiryField(id))
}
itemReservationForm.bindFromRequest.fold(
formWithErrors => {
Logger.error("Validation error in ItemInquiryReserveBase.amendItemReservation." + formWithErrors + ".")
BadRequest(views.html.amendItemReservation(id, itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList), formWithErrors))
},
info => db.withTransaction { implicit conn =>
info.update(id)
Ok(
views.html.itemReservationConfirm(
ItemInquiry(id), ItemInquiryField(id), itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList), idSubmitForm.fill(inqId)
)
)
}
)
}
def confirmItemInquiry(
siteId: Long, itemId: Long
) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
itemInquiryForm.bindFromRequest.fold(
formWithErrors => {
Logger.error("Validation error in ItemInquiryReserveBase.submitItemInquiry." + formWithErrors + ".")
BadRequest(views.html.itemInquiry(itemInfo(siteId, itemId, request.acceptLanguages.toList), formWithErrors))
},
info => db.withConnection { implicit conn =>
val rec: ItemInquiry = info.save(login.storeUser)
Redirect(routes.ItemInquiryReserve.submitItemInquiryStart(rec.id.get.id))
}
)
}
def amendInquiryForm(
rec: ItemInquiry, fields: immutable.Map[Symbol, String]
): Form[_ <: CreateItemInquiryReservation] = itemReservationForm.fill(
CreateItemInquiry(
rec.siteId, rec.itemId.id,
rec.submitUserName,
rec.email,
fields('Message)
)
)
def amendItemInquiryStart(inqId: Long) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
db.withConnection { implicit conn =>
val id = ItemInquiryId(inqId)
val rec = ItemInquiry(id)
val fields = ItemInquiryField(id)
Ok(
views.html.amendItemInquiry(
id,
itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList),
amendInquiryForm(rec, fields)
)
)
}
}
def amendItemInquiry(inqId: Long) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
val id = ItemInquiryId(inqId)
val (rec: ItemInquiry, fields: immutable.Map[Symbol, String]) = db.withConnection { implicit conn =>
(ItemInquiry(id), ItemInquiryField(id))
}
itemInquiryForm.bindFromRequest.fold(
formWithErrors => {
Logger.error("Validation error in ItemInquiryReserveBase.amendItemInquiry." + formWithErrors + ".")
BadRequest(
views.html.amendItemInquiry(
id,
itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList),
formWithErrors
)
)
},
info => db.withTransaction { implicit conn =>
info.update(id)
Ok(
views.html.itemReservationConfirm(
rec, fields, itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList), idSubmitForm.fill(inqId)
)
)
}
)
}
def confirmItemReservation(
siteId: Long, itemId: Long
) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
itemReservationForm.bindFromRequest.fold(
formWithErrors => {
Logger.error("Validation error in ItemInquiryReserveBase.confirmItemReservation." + formWithErrors + ".")
BadRequest(views.html.itemReservation(itemInfo(siteId, itemId, request.acceptLanguages.toList), formWithErrors))
},
info => db.withConnection { implicit conn =>
val rec: ItemInquiry = info.save(login.storeUser)
Redirect(routes.ItemInquiryReserve.submitItemReservationStart(rec.id.get.id))
}
)
}
def submitItemInquiryStart(inquiryId: Long) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
db.withConnection { implicit conn =>
val id = ItemInquiryId(inquiryId)
val rec = ItemInquiry(id)
val fields = ItemInquiryField(id)
Ok(
views.html.itemInquiryConfirm(
rec, fields, itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList), idSubmitForm.fill(inquiryId)
)
)
}
}
def submitItemInquiry(inquiryId: Long) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
idSubmitForm.bindFromRequest.fold(
formWithErrors => db.withConnection { implicit conn =>
val id = ItemInquiryId(inquiryId)
val rec = ItemInquiry(id)
val fields = ItemInquiryField(id)
Logger.error("Validation error in ItemInquiryReserveBase.submitItemInquiry." + formWithErrors + ".")
BadRequest(
views.html.itemInquiryConfirm(
rec, fields, itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList), idSubmitForm.fill(inquiryId)
)
)
},
id => db.withConnection { implicit conn =>
if (ItemInquiry.changeStatus(ItemInquiryId(id), ItemInquiryStatus.SUBMITTED) == 0) {
throw new Error("Record update fail id = " + id)
}
Redirect(routes.Application.index).flashing("message" -> Messages("itemInquirySubmit"))
}
)
}
def submitItemReservationStart(inquiryId: Long) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
db.withConnection { implicit conn =>
val id = ItemInquiryId(inquiryId)
val rec = ItemInquiry(id)
val fields = ItemInquiryField(id)
Ok(
views.html.itemReservationConfirm(
rec, fields, itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList), idSubmitForm.fill(inquiryId)
)
)
}
}
def submitItemReservation(inquiryId: Long) = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
idSubmitForm.bindFromRequest.fold(
formWithErrors => db.withConnection { implicit conn =>
val id = ItemInquiryId(inquiryId)
val rec = ItemInquiry(id)
val fields = ItemInquiryField(id)
Logger.error("Validation error in ItemInquiryReserveBase.submitItemReservation." + formWithErrors + ".")
BadRequest(
views.html.itemReservationConfirm(
rec, fields, itemInfo(rec.siteId, rec.itemId.id, request.acceptLanguages.toList), idSubmitForm.fill(inquiryId)
)
)
},
longId => db.withConnection { implicit conn =>
val id: ItemInquiryId = ItemInquiryId(longId)
val inquiry: ItemInquiry = ItemInquiry(id)
val fields: immutable.Map[Symbol, String] = ItemInquiryField(id)
itemInquiryMail.send(login.storeUser, inquiry, fields, localeInfoRepo.getDefault(request.acceptLanguages.toList))
if (ItemInquiry.changeStatus(id, ItemInquiryStatus.SUBMITTED) == 0) {
throw new Error("Record update fail id = " + id)
}
Redirect(routes.Application.index).flashing("message" -> Messages("itemReservationSubmit"))
}
)
}
}
| ruimo/store2 | app/controllers/ItemInquiryReserveBase.scala | Scala | apache-2.0 | 11,707 |
package me.abarrow.ScalaSubNet
import scala.collection.mutable
import scala.io.StdIn
import java.io.File
import java.text.DecimalFormat
import me.abarrow.ScalaSubNet.utils.FileUtils
class VideoEstimator (val wordContext:WordContext, val mkvToolNix:MKVToolNix,
val cSubNet:CSubNet, val inputVidFolder:String, val outputFolder:String){
val videoCSVSpec = new VideoCSVSpec(wordContext)
val unknownWords:mutable.HashSet[String] = mutable.HashSet[String]()
val outputFolderFile:File = new File(outputFolder)
FileUtils.createDirsIfNeeded(outputFolder)
val ratingsPath = new File(outputFolderFile, "ratings.csv").getAbsolutePath
val subsFolder = new File(outputFolderFile, "subs")
val subsPath = new File(outputFolderFile, "subs.csv").getAbsolutePath
val ratedVidsPath = new File(outputFolderFile, "rated.csv").getAbsolutePath
val unratedVidsPath = new File(outputFolderFile, "unrated.csv").getAbsolutePath
val estimatedVidsPath = new File(outputFolderFile, "estimates.csv").getAbsolutePath
val sanityVidsPath = new File(outputFolderFile, "sanity.csv").getAbsolutePath
val networkPath = new File(outputFolderFile, "network.net").getAbsolutePath
val unknownWordsPath = new File(outputFolderFile, "unknown_words.txt").getAbsolutePath
def execute(startingStage:Int = 0): Unit = {
if (startingStage <= 0) {
createRatingList()
}
if (startingStage <= 1) {
extractSubtitles()
}
if (startingStage <= 2) {
calculateVideoStats()
}
if (startingStage <= 3) {
trainNetwork()
}
if (startingStage <= 4) {
executeNetwork()
}
showRatings()
}
def createRatingList():Unit = {
val listCSV = new CSV(Array("path", "rating"))
val videosAndRatings = getVideosCommandLine(inputVidFolder).foreach{x =>
listCSV.addRow(Array(x._1, x._2.toString()))
}
listCSV.save(ratingsPath)
}
def extractSubtitles():Unit = {
val subCSV = new CSV(Array("path", "text", "rating"))
FileUtils.createDirsIfNeeded(subsFolder.getAbsolutePath)
val ratingsCSV = CSV.load(ratingsPath).toMaps().foreach({ m =>
val path = m("path")
val ratingStr = m("rating")
val rating = ratingStr.toInt
if (VideoUtils.isMKVVideo(path) && (rating >= 0) && (rating <= 10)) {
val subtitles = mkvToolNix.extractBestSubTrack(path)
if (subtitles.isDefined) {
val videoNameAndType = FileUtils.getFileNameAndExtension(path)
val destFile = File.createTempFile(videoNameAndType._1, ".txt", subsFolder)
FileUtils.saveUTF8File(destFile, ASSParser.parse(subtitles.get))
subCSV.addRow(Array(path, destFile.getAbsolutePath, ratingStr))
}
}
})
subCSV.save(subsPath)
}
def calculateVideoStats():Unit = {
val videosAndRatings = CSV.load(subsPath).toMaps().map{m =>
(m("path"), m("text"), m("rating").toInt)
}.filter { x => (x._3 >= 0) && (x._3 <= 10) }.groupBy { x => x._3 > 0 }
computeVideoData(videosAndRatings.getOrElse(true, Array()), ratedVidsPath)
computeVideoData(videosAndRatings.getOrElse(false, Array()), unratedVidsPath)
FileUtils.saveUTF8Text(unknownWordsPath, unknownWords.mkString("\\n"))
}
private def computeVideoData(videos: Array[(String, String, Int)], outputCSVPath: String): Unit = {
val csv = videoCSVSpec.emptySubtitleStatsCSV
videos.foreach { x =>
addTextStats(csv, x._1, FileUtils.openUTF8Text(x._2), x._3)
}
csv.save(outputCSVPath);
}
def trainNetwork():Unit = {
val labelColumns = videoCSVSpec.labelCSVColumns.size
cSubNet.train(ratedVidsPath, videoCSVSpec.inputCSVColumns.size, networkPath, None, labelCols = labelColumns)
}
def executeNetwork():Unit = {
val labelColumns = videoCSVSpec.labelCSVColumns.size
cSubNet.execute(networkPath, unratedVidsPath, estimatedVidsPath, labelCols = labelColumns)
cSubNet.execute(networkPath, ratedVidsPath, sanityVidsPath, labelCols = labelColumns)
}
def showRatings():Unit = {
println("Sanity")
CSV.load(sanityVidsPath).toMaps().foreach { f =>
val rating = (videoCSVSpec.outputCSVColumns.map { x => f.get(x).get.toDouble }.reduce { (x, y) => x + y } * 9) + 1
println(f.get("name").get + " -> " + rating);
}
println("Estimates")
CSV.load(estimatedVidsPath).toMaps().foreach { f =>
val rating = (videoCSVSpec.outputCSVColumns.map { x => f.get(x).get.toDouble }.reduce { (x, y) => x + y } * 9) + 1
println(f.get("name").get + " -> " + rating);
}
}
private def addVideoStats(csv: CSV, targetFilePath: String, rating: Int): Unit = {
val subtitles = mkvToolNix.extractBestSubTrack(targetFilePath)
if (subtitles.isDefined) {
}
}
private def getVideosCommandLine(folder: String): Array[(String, Int)] = {
val help = () => {
println("Rate each item from 1 (worst) to 10 (best)")
println("* : Videos in the folder have different ratings")
println("0 or ? : The video or folder contains videos that haven't been seen")
println("! : Ignore")
}
help()
FileUtils.evaluateDeepFilesInFolder(folder, { x =>
var result: Option[Int] = None;
var parsing = x.isDirectory() || VideoUtils.isMKVVideo(x.getAbsolutePath)
while (parsing) {
print(x + " Rating: ")
val input = StdIn.readLine();
if (x.isDirectory() && input == "*") { //ignore ratings
parsing = false
} else if (input == "?") { //unrated
parsing = false
result = Some(0)
} else if (input == "!") { //skip
parsing = false
result = Some(-1)
} else {
try {
val inputInt = input.toInt
if (inputInt >= 0 && inputInt <= 10) {
result = Some(inputInt)
parsing = false
} else {
help()
}
} catch {
case nfe: java.lang.NumberFormatException => help()
}
}
}
result
}).filter { x => x._2 >= 0 && VideoUtils.isMKVVideo(x._1) }
}
private def addTextStats(csv: CSV, name: String, textContents: String, rating: Int): Unit = {
val rawWords: Array[String] = FileUtils.splitApartWords(textContents)
val qwords: Array[QualifiedWord] = rawWords.map { x => QualifiedWord(x) }.filter { x => x != null }
val words: Array[String] = qwords.map { x => x.word }
if (words.length == 0) {
return
}
var sentenceIdx = 0
val sentences: Map[Int, Array[QualifiedWord]] = qwords.groupBy { x =>
val oldIdx = sentenceIdx
if (x.endsSentence) {
sentenceIdx += 1
}
oldIdx
}
val numSentences = sentenceIdx
if (numSentences == 0) {
return
}
val numQuestions = sentences.count(p => p._2.last.punct == Some('?'))
val wordLengthDist = words.groupBy { x => x.length() }.map { f => (f._1, f._2.size) }
val sentenceLengthDist = sentences.groupBy { x => x._2.length }.map { f => (f._1, f._2.size) }
val avgSentenceLength = words.length.toDouble / numSentences
val avgWordLength = words.map { x => x.length() }.reduce { (a, b) => a + b }.doubleValue() / words.length
val uniqueWords = words.toSet
val groupedWords = uniqueWords.groupBy { x =>
val result = wordContext.wordList.find { y => y._2(x) }
if (result.isDefined) result.get._1 else wordContext.unknownWordType
}
val otherWords = groupedWords.getOrElse(wordContext.unknownWordType, Set())
val decimalFormat = new DecimalFormat("#")
decimalFormat.setMaximumFractionDigits(15)
decimalFormat.setMaximumIntegerDigits(15)
val mainEntries = Map[String, String]("name" -> name,
"avg_sentence_length" -> decimalFormat.format(avgSentenceLength),
"avg_word_length" -> decimalFormat.format(avgWordLength),
"unique_ratio" -> decimalFormat.format(uniqueWords.size.doubleValue() / words.size),
"question_ratio" -> decimalFormat.format(numQuestions.doubleValue() / numSentences))
val wordTypeEntries = wordContext.wordTypesAndUnknown.map { f =>
f + "_ratio" -> decimalFormat.format(groupedWords.getOrElse(f, Set()).size.doubleValue() / uniqueWords.size)
}.toMap
//val ratingEntries = (2 to 10).map { x => ("rated_ge_" + x, if (rating >= x) "1" else "0") }.toMap
val decimalRating = (rating - 1.0d) / 9;
val ratingEntries = Map[String,String]("rating" -> decimalFormat.format(decimalRating))
csv.addRow(mainEntries ++ wordTypeEntries ++ ratingEntries)
//println(name + " Content:")
//println(textContents)
//println(otherWords.mkString("\\n"))
unknownWords ++= otherWords
}
} | Abarrowman/SubNet | ScalaSubNet/src/me/abarrow/ScalaSubNet/VideoEstimator.scala | Scala | mit | 8,698 |
/*
* Copyright 2007-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package util {
import _root_.org.specs._
import _root_.org.specs.runner._
import common._
object ListHelpersSpec extends Specification with ListHelpers {
"The ListHelpers first_? function" should {
"return an Empty can if the list is empty" in {
first_?((Nil: List[Int]))((i: Int) => true) must_== Empty
}
"return an Empty can if no element in the list satisfies the predicate" in {
first_?(List(1, 2, 3))((i: Int) => i < 0) must_== Empty
}
"return a Full can with the first element in the list satisfying the predicate" in {
first_?(List(1, 2, 3))((i: Int) => i > 0) must_== Full(1)
}
}
"The ListHelpers first function" should {
"return an Empty can if the list is empty" in {
first((Nil: List[Int]))((i: Int) => Full(1)) must_== Empty
}
"return an Empty can if no element in the list returns a Full can when applied a function" in {
first(List(1, 2, 3))((i: Int) => Empty) must_== Empty
}
"return the first Full can returned by a function f over the list elements" in {
val f = (i: Int) => i >= 2 match { case true => Full(3) case false => Empty }
first(List(1, 2, 3))(f) must_== Full(3)
}
}
"The ciGet function on Lists of pairs of string" should {
"return Empty if the list is Nil" in {
(Nil: List[(String, String)]).ciGet("") must_== Empty
}
"return Empty if no pair has the key as its first element" in {
List(("one", "1"), ("two", "2")).ciGet("three") must_== Empty
}
"return a Full can with the first second value of a pair matching the key" in {
List(("one", "1"), ("two", "2")).ciGet("one") must_== Full("1")
}
"return a Full can with the first second value of a pair matching the key case-insensitively" in {
List(("one", "1"), ("two", "2"), ("two", "3")).ciGet("two") must_== Full("2")
}
}
"The ListHelpers enumToList and enumToStringList functions" should {
"convert a java enumeration to a List" in {
val v: _root_.java.util.Vector[Int] = new _root_.java.util.Vector[Int]
v.add(1); v.add(2)
enumToList(v.elements) must_== List(1, 2)
}
"convert a java enumeration containing any kind of object to a List of Strings" in {
val v: _root_.java.util.Vector[Any] = new _root_.java.util.Vector[Any]
v.add(1); v.add("hello")
enumToStringList(v.elements) must_== List("1", "hello")
}
}
"The ListHelpers head function (headOr on a list object)" should {
"return the first element of a list" in {
List(1).headOr(2) must_== 1
}
"return a default value if the list is empty" in {
head(Nil, 2) must_== 2
}
"not evaluate the default valueif list is not empty" in {
head(List(1), {error("stop"); 2}) must_== 1
}
}
"The ListHelpers listIf function" should {
"create a List containing an element if the predicate is true" in {
listIf(true)(1) must_== List(1)
}
"return an empty List if the predicate is false" in {
listIf(false)(1) must_== Nil
}
"not evaluate its argument if the predicate is false" in {
listIf(false)({error("stop"); 1}) must_== Nil
}
}
"The ListHelpers rotateList function (rotate method on a List object)" should {
"create a List of all the circular permutations of a given list" in {
List(1, 2, 3).rotate must_== List(List(1, 2, 3), List(2, 3, 1), List(3, 1, 2))
}
}
"The ListHelpers permuteList function (permute method on a List object)" should {
"create a List of all the permutations of a given list" in {
List(1, 2, 3).permute must_== List(List(1, 2, 3), List(1, 3, 2), List(2, 3, 1), List(2, 1, 3), List(3, 1, 2), List(3, 2, 1))
}
}
"The ListHelpers permuteWithSublists function (permuteAll method on a List object)" should {
"create a List of all the permutations of a given list" in {
List(1, 2, 3).permuteAll must_== List(List(2, 1, 3), List(3, 2, 1), List(1, 3, 2),
List(2, 3, 1), List(3, 1, 2), List(1, 2, 3),
List(3, 2), List(1, 2), List(1, 3),
List(2, 3), List(2, 1), List(3, 1),
List(2), List(1), List(3))
}
}
"The ListHelpers" should {
"provide an or method on Lists returning the list itself if not empty or another list if it is empty" in {
List(1).or(List(2)) must_== List(1)
(Nil: List[Int]).or(List(2)) must_== List(2)
}
"provide a str method on Lists joining the toString value of all elements" in {
List("h", "e", "l", "l", "o").str must_== "hello"
}
"provide a comma method on Lists being an alias for mkString(\\", \\")" in {
List("hello", "world").comma must_== "hello, world"
}
"provide a join method on Lists being an alias for mkString" in {
List("hello", "world").join(", ") must_== "hello, world"
}
"provide a ? method return true iff the list is not empty" in {
List().? must beFalse
List(1).? must beTrue
}
"provide a replace method to replace one element of the list at a given position (0-based index)." +
" If the position is negative, the first element is replaced" in {
List(1, 2, 3).replace(1, 4) must_== List(1, 4, 3)
List(1, 2, 3).replace(4, 4) must_== List(1, 2, 3)
List(1, 2, 3).replace(-1, 4) must_== List(4, 2, 3)
}
}
}
class ListHelpersSpecTest extends JUnit4(ListHelpersSpec)
}
}
| jeppenejsum/liftweb | framework/lift-base/lift-util/src/test/scala/net/liftweb/util/ListHelpersSpec.scala | Scala | apache-2.0 | 6,179 |
import example.Lists
/**
* Created by asantos on 13/03/17.
*/
object Main extends App {
println(Lists.max(List(1,3,2)))
}
| alvsanand/scala-spark-big-data | example/src/main/scala/Main.scala | Scala | apache-2.0 | 129 |
/**
* @author Francisco Miguel Arámburo Torres - atfm05@gmail.com
*/
package http
import scala.concurrent.Future
import play.api.libs.json._
import requests.Request
import requests.EngineImportRequest
import requests.EngineComputeRequest
import requests.ControlComputeRequest
import shared.WebService
import shared.Log
/** Module that handles the reports to the Census Control
* server by sending http requests to it with the report
* as json.
*/
object OutReports {
/** [[shared.WebService]] instance to be used. */
var service: WebService = null
/** Creates and sets a new [[shared.WebService]] instance.
*
* @param _host of the server.
* @param _port of the server.
*/
def setService (_host: String, _port: Int): Unit =
service = new WebService {
val host = _host
val port = _port
val user = null
val password = null
}
/** Sends a POST to the server reporting a request.
*
* @param token of the request that is being reported.
*/
def report (token: String): Unit = {
if (service == null) return
val data = Json.obj("token" -> token)
service.post("/census/report", data, { (error, response) =>
if (error) Log.error("Unreachable Census Control server.")
})
}
/** Sends a POST to the server reporting an error.
*
* @param token of the requests that had the error.
* @param error that occurred.
*/
def error (token: String, error: String): Unit = {
if (service == null) return
val data = Json.obj(
"token" -> token,
"error" -> error
)
service.post("/census/error", data, { (error, response) =>
if (error) Log.error("Unreachable Census Control server.")
})
}
/** Success reports. */
object Report {
/** Reports that a graph import finished in a Census Engine instance. */
def engineImportFinished (request: Request): Unit = {
report(request.token)
Log.info(s"Graph import finished.")
}
/** Reports that a computation finished in a Census Engine instance. */
def engineComputeFinished (request: Request): Unit = {
report(request.token)
Log.info(s"Computation with token:${request.token} finished.")
}
/** Reports that a whole computation finished for a Census Control server. */
def controlComputeFinished (request: Request): Unit = {
report(request.token)
Log.info(s"Computation with token:${request.token} finished.")
}
}
/** Error reports. */
object Error {
/** Graph import error reports: */
/** Reports that the Neo4j database couldn't be reached */
def unreachableNeo4j (request: Request): Unit = {
error(request.token, "unreachable-neo4j")
Log.error(s"Unreachable Neo4j server on graph import.")
}
/** Reports that the Neo4j database returned an empty set when importing. */
def emptyNeo4j (request: Request): Unit = {
error(request.token, "empty-neo4j")
Log.error(s"Empty Neo4j database with provided tag.")
}
/** Reports that an error occurred when importing the database. */
def importFailed (request: Request): Unit = {
error(request.token, "import-failed")
Log.error(s"Graph import failed.")
}
/** Computation error reports: */
/** Reports that a computation failed. */
def computationFailed (request: Request): Unit = {
error(request.token, "computation-failed")
Log.error(s"Computation failed.")
}
/** Reports that a computation couldn't be done because there was no graph importation before. */
def computationNotReady (request: Request): Unit = {
error(request.token, "missing-graph")
Log.error(s"Couldn't start computation, the graph was not properly imported.")
}
}
}
| FrancoAra/census | app/http/OutReports.scala | Scala | mit | 3,790 |
package org.jmotor.tools.http
import org.asynchttpclient.{ AsyncCompletionHandler, AsyncHttpClient, BoundRequestBuilder, Response }
import scala.concurrent.{ ExecutionContext, Future, Promise }
/**
* Component:
* Description:
* Date: 2018/2/8
*
* @author AI
*/
object AsyncHttpClientConversions {
implicit class BoundRequestBuilderWrapper(request: BoundRequestBuilder) {
implicit def toFuture: Future[Response] = {
val result = Promise[Response]
request.execute(new AsyncCompletionHandler[Response]() {
override def onCompleted(response: Response): Response = {
result.success(response)
response
}
override def onThrowable(t: Throwable): Unit = {
result.failure(t)
}
})
result.future
}
}
}
| aiyanbo/search.maven.org-scala-sdk | src/main/scala/org/jmotor/tools/http/AsyncHttpClientConversions.scala | Scala | apache-2.0 | 802 |
import scala.util.parsing.combinator.Parsers
abstract class BaseParser extends Parsers {
type Elem = Char
protected def oneOf(ps : Iterable[Elem]) =
elem("oneOf(" ++ ps.mkString ++ ")", ch => ps exists (ch ==))
protected def allExcept(ps : Iterable[Elem]) =
elem("allExcept(" ++ ps.mkString ++ ")", ch => ps forall (ch !=))
protected def servername = host
protected def host = hostaddr | hostname
protected def hostname = {
val shortname = (letter | number) ~ rep(letter | number | '-') ~ rep(letter | number) ^^
{ case c ~ as ~ bs => (c :: as ++ bs) mkString }
val d_shortname = '.' ~ shortname ^^ { case _ ~ sn => "." ++ sn }
shortname ~ rep(d_shortname) ^^
{ case sn ~ rest => DomainName(sn ++ (rest mkString)) }
}
protected def hostaddr = {
val d_number = '.' ~ rep1(number) ^^
{ case _ ~ n => "." ++ n }
val ip4addr = rep1(number) ~ rep1(d_number) ^^
{ case s ~ ss => (s ++ ss) mkString }
val hexdigit = number | oneOf("aAbBcCdDeEfF")
val c_hexdigit = ':' ~ rep1(hexdigit) ^^
{ case _ ~ d => ":" ++ d}
val ip6addr_plain = rep1(hexdigit) ~ rep1(c_hexdigit) ^^
{ case d ~ ds => (d ++ ds) mkString }
val ip6addr_ip4 = acceptSeq("0:0:0:0:0:") ~ (acceptSeq("0") | acceptSeq("FFFF")) ~ ':' ~ ip4addr ^^
{ case a ~ b ~ _ ~ d => ((a ++ b) mkString) ++ ":" ++ (d mkString)}
val ip6addr = ip6addr_ip4 ||| ip6addr_plain
ip4addr ^^
{ s => IP4Addr(s) } |
ip6addr ^^
{ s => IP6Addr(s) }
}
protected def user = rep1(allExcept(" \\0\\r\\n@%!")) ^^ {_ mkString}
protected def nick = {
def special = oneOf("_[]\\\\|`^{}")
(letter | special) ~ rep(letter | number | special | '-') ^^
{ case l ~ ls => (l :: ls) mkString }
}
protected def space = rep1(' ') ^^ {_ mkString}
protected def number = oneOf("0123456789")
protected def uppercase = oneOf("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
protected def lowercase = oneOf("abcdefghijklmnopqrstuvwxyz")
protected def letter = uppercase | lowercase
protected def crlf = acceptSeq("\\r\\n")
type ResultType
def parse : Parser[ResultType]
def parseString(s : String) = {
import scala.util.parsing.input.CharSequenceReader;
parse(new CharSequenceReader(s))
}
}
object MessageParser extends BaseParser {
private def message = {
val c_prefix_s = ':' ~ prefix ~ space ^^ { case _ ~ p ~ _ => p }
opt(c_prefix_s) ~ command ~ params ~ crlf ^^
{ case oPrfx ~ cmds ~ ((ls, otr)) ~ _ => Message(oPrfx, cmds, ls, otr) }
}
private def prefix : Parser[Source] = {
val e_user = '!' ~ user ^^ { case _ ~ u => u }
val a_host = '@' ~ host ^^ { case _ ~ h => h }
servername ||| nick ~ opt(e_user) ~ opt(a_host) ^^
{ case nick ~ oUser ~ oHost =>
Person(Some(nick), oUser, oHost, None)
}
}
private def command =
rep1(letter) ^^
{ _ mkString
} |
number ~ number ~ number ^^
{ case a ~ b ~ c =>
(a :: b :: c :: Nil) mkString
}
private def params : Parser[(List[String],Option[String])] = {
val cln_trailing =
':' ~ trailing ^^ { case _ ~ t => (Nil, Some(t)) }
val middle_params =
middle ~ params ^^ { case p ~ ((ps, t)) => (p :: ps, t)}
opt(space) ~ opt(cln_trailing | middle_params) ^^
{ case _ ~ rest =>
rest getOrElse (Nil, None)
}
}
private def middle = allExcept(" :\\r\\n\\0") ~ rep(allExcept(" \\r\\n\\0")) ^^
{ case c ~ cs =>
(c :: cs) mkString
}
private def trailing = rep(allExcept("\\r\\n\\0")) ^^ {_ mkString}
type ResultType = Message
override def parse = phrase(message)
}
object TargetParser extends BaseParser {
private def target : Parser[List[Target]] = {
val sep_to = ',' ~ to ^^ { case _ ~ s => s}
to ~ rep(sep_to) ^^ { case r ~ rs => r :: rs }
}
private def to : Parser[Target] = {
val p_host = '%' ~ host ^^
{ case _ ~ h => h }
val a_servername = '@' ~ servername ^^
{ case _ ~ sn => sn }
val e_user_a_host = '!' ~ user ~ '@' ~ host ^^
{ case _ ~ u ~ _ ~ h => (u, h) }
val user_server = user ~ opt(p_host) ~ a_servername ^^
{ case u ~ oh ~ sn => Person(None, Some(u), oh, Some(sn)) }
val user_host = user ~ p_host ^^
{ case u ~ h => Person(None, Some(u), Some(h), None) }
val nick_optqualuser = nick ~ opt(e_user_a_host) ^^
{ case n ~ opt => opt match {
case Some((u,h)) => Person(Some(n), Some(u), Some(h), None)
case None => Person(Some(n), None, None, None)
}}
channel | targetmask | (nick_optqualuser ||| user_server ||| user_host)
}
private def targetmask = failure("fail") ^^ {_ => Channel("a" ++ "foo", None) }
private def channelid = uppercase | number
private def channel = {
val e_channelid = '!' ~ channelid ^^ { case _ ~ cid => cid }
val c_chstring = ':' ~ chstring ^^ { case _ ~ cs => cs }
(oneOf("#+&") | e_channelid) ~ chstring ~ opt(c_chstring) ^^
{ case a ~ b ~ c => Channel(a.toString ++ b, c) }
}
private def chstring = rep1(allExcept(" \\f\\0\\r\\n,:")) ^^ {_ mkString}
type ResultType = List[Target]
override def parse = phrase(target)
}
| demellj/iricala | src/Parsers.scala | Scala | apache-2.0 | 5,332 |
package prog_gen
import ir._
import ir.ast._
import ir.interpreter.Interpreter
import opencl.executor.{Eval, Execute, TestWithExecutor}
import opencl.ir._
import opencl.ir.pattern.{MapSeq, ReduceSeq, toGlobal}
import org.junit.Assert._
import org.junit._
import rewriting.{EnabledMappings, Lower}
import scala.language.reflectiveCalls
object ProgGenIssues extends TestWithExecutor
class ProgGenIssues{
@Test
def hlGenResultNotEqual1(): Unit = {
val f = Eval("val add = UserFun(\\"add\\", Array(\\"x\\", \\"y\\"), \\"\\"\\"|{ return x+y; }\\"\\"\\".stripMargin, Seq(Float, Float), Float).setScalaFun (xs => xs.head.asInstanceOf[Float] + xs(1).asInstanceOf[Float])\\nfun(Float, ArrayTypeWSWC(ArrayTypeWSWC(Float, 32), 32), ArrayTypeWSWC(Float, 32),(p_0, p_1, p_2) => FunCall(Map(fun((p_3) => FunCall(Reduce(fun((p_4, p_5) => FunCall(add, p_4, p_5))), FunCall(add, p_0, p_3), FunCall(Map(fun((p_6) => FunCall(add, p_6, p_6))), FunCall(Join(), p_1))))), FunCall(Map(fun((p_7) => FunCall(add, p_7, p_7))), p_2)))")
val fs = Lower.mapCombinations(f,
EnabledMappings(global0 = true, global01 = false, global10 = false, false, false, group0 = false, group01 = false, group10 = false))
val lower = fs.head
TypeChecker(lower)
val Args = InputGenerator()(fs.head)
val output_int = Interpreter(f).->[Vector[Vector[Float]]].runAndFlatten(Args:_*).toArray[Float]
val (output_exe,_)= Execute(1,32)[Array[Float]](lower,Args:_*)
assertArrayEquals(output_int, output_exe, 0.0f)
}
@Test
def issue76(): Unit = {
val f = fun(
Float,
ArrayTypeWSWC(Float,32),
(p236,p116) =>{
Map(fun((p200) =>
add(p236,add(p236,p200))
)) $ p116
})
val fs = Lower.mapCombinations(f,
EnabledMappings(global0 = true, global01 = false, global10 = false, false, false, group0 = false, group01 = false, group10 = false))
val Args = InputGenerator()(fs.head)
val output_int = Interpreter(f).->[Vector[Float]].run(Args:_*).toArray[Float]
val (output_exe,_)= Execute(1,1024)[Array[Float]](fs.head,Args:_*)
assertArrayEquals(output_int, output_exe, 0.0f)
}
@Test
def issue78(): Unit = {
val f = fun(
Float,
ArrayTypeWSWC(Float,32),
(p252,p174) =>
toGlobal(MapSeq(fun((p30) =>
add(p252,p30)
)))(ReduceSeq(fun((p89,p156) =>
add(p89,p156)
))(id $ p252,p174))
)
val args = InputGenerator()(f)
val output_int = Interpreter(f).->[Vector[Float]].run(args:_*).toArray[Float]
val (output_exe,_)= Execute(1,1024)[Array[Float]](f, args:_*)
assertArrayEquals(output_int, output_exe, 0.0f)
}
}
| lift-project/lift | src/test/prog_gen/ProgGenIssues.scala | Scala | mit | 2,657 |
package space.spacelift.mq.proxy.patterns
import akka.actor.{Actor, ActorLogging}
trait Subscriber extends Actor with ActorLogging {
def processor: Processor
}
| Spacelift/akka-mq-proxies | akka-mq-proxies/src/main/scala/space/spacelift/mq/proxy/patterns/Subscriber.scala | Scala | mit | 164 |
/*
* Copyright 2007-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package http
import net.liftweb.common._
import net.liftweb._
import util._
import Helpers._
import scala.xml.{NodeSeq, Elem}
/**
* The same StatefulSnippet instance is used across a given page rendering.
* <br/>
* If the StatefulSnippet is used to render a form, a hidden field is added to
* the form that causes the same instance to be used on the page that is the
* target of the form submission.
* <br/>
* If you want to keep the same snippet for a page rendered via a link (<a
* href...>) use the StatefulSnippet.link method to create the link. This will
* cause the registerThisSnippet method to be called and the same instance will
* be used on the target page.
* <pre>
* class CountGame extends StatefulSnippet {
* val dispatch: DispatchIt = {
* case "run" => run _
* }
*
* def run(xhtml: NodeSeq): NodeSeq = {
* if (lastGuess == number) {
* bind("count", chooseTemplate("choose", "win", xhtml), "number" --> number, "count" --> count)
* } else {
* bind("count", chooseTemplate("choose", "guess", xhtml),
* "input" --> text("", guess _),
* "last" --> lastGuess.map(v => if (v < number) v+" is low" else v+"is high").openOr("Make first Guess")
* )
* }
*
* private def guess(in: String) {
* count += 1
* lastGuess = Full(toInt(in))
* }
*
* private val number = 1 + randomInt(100)
* private var lastGuess: Box[Int] = Empty
* private var count = 0
*
* }
* </pre>
*/
trait StatefulSnippet extends DispatchSnippet {
private[this] var _names: Set[String] = Set()
def addName(name: String) {
synchronized {
_names = _names + name
}
}
def names: Set[String] = synchronized {
_names
}
def registerThisSnippet() = names.foreach(n => S.overrideSnippetForClass(n, this))
def unregisterThisSnippet() = names.foreach(n => S.unsetSnippetForClass(n))
/**
* create an anchor tag around a body
*
* @to - the target
* @param func - the function to invoke when the link is clicked
* @param body - the NodeSeq to wrap in the anchor tag
* @attrs - the (optional) attributes for the HTML element
*/
def link(to: String, func: () => Any, body: NodeSeq, attrs: SHtml.ElemAttr*): Elem =
SHtml.link(to, () => { registerThisSnippet(); func() }, body, attrs: _*)
def redirectTo(where: String) = S.redirectTo(where, registerThisSnippet)
/**
* Merge the SHtml into the form
*/
private[http] def mergeIntoForm(isForm: Boolean, res: NodeSeq, toMerge: => NodeSeq): NodeSeq = {
val formElem = Helpers.findOption(res){
case e: Elem if e.label == "form" && null == e.prefix=> Some(e)
case _ => None
}
if (formElem.isDefined) {
import util.Helpers._
("form *" #> ((kids: NodeSeq) => toMerge ++ kids))(res)
} else if (isForm) {
toMerge ++ res
} else {
res
}
}
}
/**
* Mix this into a StatefulSnippet if you want a defined render method.
*/
trait RenderDispatch {
/**
* The pre-defined dispatch
*/
def dispatch: PartialFunction[String, NodeSeq => NodeSeq] = Map("render" -> render _)
/**
* You have to define this method
*/
def render(in: NodeSeq): NodeSeq
}
/**
* Mix this into a StatefulSnippet if you want a defined render method. Differs
* from RenderDispatch because the render method returns a NodeSeq => NodeSeq
*/
trait RenderFuncDispatch {
/**
* The pre-defined dispatch
*/
def dispatch: PartialFunction[String, NodeSeq => NodeSeq] = Map("render" -> render)
/**
* You have to define this method
*/
def render: NodeSeq => NodeSeq
}
/**
* The simple composition of StatefulSnippet, Whence and RenderFuncDispatch.
* This is the common use of stateful snippets and makes things easier.
*/
trait SimpleStateful extends StatefulSnippet with Whence with RenderFuncDispatch
trait DispatchSnippet {
type DispatchIt = PartialFunction[String, NodeSeq => NodeSeq]
def dispatch: DispatchIt
}
| wsaccaco/lift | framework/lift-base/lift-webkit/src/main/scala/net/liftweb/http/StatefulSnippet.scala | Scala | apache-2.0 | 4,599 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.api.ValidationException
import org.apache.flink.table.api.scala._
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
class PartitionableSinkTest extends TableTestBase {
private val util = batchTestUtil()
util.addTableSource[(Long, Long, Long)]("MyTable", 'a, 'b, 'c)
createTable("sink", shuffleBy = false)
private def createTable(name: String, shuffleBy: Boolean): Unit = {
util.tableEnv.executeSql(
s"""
|create table $name (
| a bigint,
| b bigint,
| c bigint
|) partitioned by (b, c) with (
| 'connector' = 'filesystem',
| 'path' = '/non',
| ${if (shuffleBy) "'sink.shuffle-by-partition.enable'='true'," else ""}
| 'format' = 'testcsv'
|)
|""".stripMargin)
}
@Test
def testStatic(): Unit = {
util.verifyPlanInsert("INSERT INTO sink PARTITION (b=1, c=1) SELECT a FROM MyTable")
}
@Test
def testDynamic(): Unit = {
util.verifyPlanInsert("INSERT INTO sink SELECT a, b, c FROM MyTable")
}
@Test
def testDynamicShuffleBy(): Unit = {
createTable("sinkShuffleBy", shuffleBy = true)
util.verifyPlanInsert("INSERT INTO sinkShuffleBy SELECT a, b, c FROM MyTable")
}
@Test
def testPartial(): Unit = {
util.verifyPlanInsert("INSERT INTO sink PARTITION (b=1) SELECT a, c FROM MyTable")
}
@Test(expected = classOf[ValidationException])
def testWrongStatic(): Unit = {
util.verifyPlanInsert("INSERT INTO sink PARTITION (a=1) SELECT b, c FROM MyTable")
}
@Test(expected = classOf[ValidationException])
def testWrongFields(): Unit = {
util.verifyPlanInsert("INSERT INTO sink PARTITION (b=1) SELECT a, b, c FROM MyTable")
}
@Test
def testStaticWithValues(): Unit = {
thrown.expect(classOf[ValidationException])
thrown.expectMessage(
"INSERT INTO <table> PARTITION statement only support SELECT clause for now," +
" 'VALUES ROW(5)' is not supported yet")
util.verifyPlanInsert("INSERT INTO sink PARTITION (b=1, c=1) VALUES (5)")
}
}
| hequn8128/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/PartitionableSinkTest.scala | Scala | apache-2.0 | 3,009 |
package de.cameonet.xlifftools
import java.io.{File, FileInputStream}
import java.util.Properties
import com.fasterxml.jackson.core.JsonParseException
import com.typesafe.config.{ConfigFactory, Config}
import org.clapper.argot.ArgotConverters._
import org.clapper.argot._
import play.api.libs.json.{JsObject, Json}
import scala.io.Source
/**
* User: Björn Reimer
* Date: 15.10.14
* Time: 14:47
*/
object Main {
def main(args: Array[String]) {
// set charset to utf-8
System.setProperty("file.encoding", "UTF-8")
val parser = new ArgotParser("cameoXliffTools")
val mode = parser.option[String](List("m", "mode"), "MODE", "[import|export|merge] required")
val jsonSource = parser.option[File](List("s", "source"), "FILE", "source file for JSON import") {
(s, opt) =>
val file = new File(s)
if (!file.exists) {
parser.usage("Source file \\"" + s + "\\" does not exist.")
}
if (!file.isFile) {
parser.usage("Source \\"" + s + "\\" is not a file.")
}
file
}
val xliffDefault = "xliff"
val maybeXliffDir = parser.option[File]("xliff-dir", "DIRECTORY", "path to xliff files. Default: ./" + xliffDefault) {
(s, opt) => new File(s)
}
val jsonDefault = "json"
val maybeJsonDir = parser.option[File]("json-dir", "DIRECTORY", "path to json files. Default: ./" + jsonDefault) {
(s, opt) => new File(s)
}
val propertiesDefault = "properties"
val maybePropertiesDir = parser.option[File]("properties-dir", "DIRECTORY", "path to java property files. Default: ./" + propertiesDefault) {
(s, opt) => new File(s)
}
val languages = parser.multiOption[String](List("l", "languages"), "LANG", "Will be created if it does not exist.")
try {
parser.parse(args)
val xliffDir: File = maybeXliffDir.value.getOrElse(new File(xliffDefault))
val jsonDir: File = maybeJsonDir.value.getOrElse(new File(jsonDefault))
val propertiesDir: File = maybePropertiesDir.value.getOrElse(new File(propertiesDefault))
// check directories
if (!xliffDir.exists() && !xliffDir.mkdirs()) {
throw new ConversionException("could not create xliff-dir: " + xliffDir.getAbsolutePath)
}
if (!xliffDir.isDirectory) {
parser.usage("not a directory: " + xliffDir.getAbsolutePath)
}
if (!jsonDir.exists() && !jsonDir.mkdirs()) {
throw new ConversionException("could not create json-dir: " + jsonDir.getAbsolutePath)
}
if (!jsonDir.isDirectory) {
parser.usage("not a directory: " + jsonDir.getAbsolutePath)
}
if (!propertiesDir.exists() && !propertiesDir.mkdirs()) {
throw new ConversionException("could not create properties-dir: " + propertiesDir.getAbsolutePath)
}
if (!propertiesDir.isDirectory) {
parser.usage("not a directory: " + propertiesDir.getAbsolutePath)
}
mode.value match {
case Some("import") =>
// check if source file exists
jsonSource.value match {
case None => parser.usage("no source file defined")
case Some(file) => importFile(file, xliffDir, languages.value)
}
case Some("export") => exportToJson(jsonDir, xliffDir)
case Some("merge") =>
// check if source file exists
jsonSource.value match {
case None => parser.usage("no source file defined")
case Some(file) => mergeJson(file, xliffDir)
}
case _ => parser.usage("No mode selected.")
}
println("DONE")
} catch {
case e: ArgotUsageException => println(e.message)
case e: ConversionException => println("ERROR: " + e.message)
case e: JsonParseException => println("Error parsing json: " + e.getMessage)
}
}
def importFile(sourceFile: File, xliffDir: File, languages: Seq[String]): Unit = {
// get source language from name of jsonFile
val sourceLang = getFileNameWithoutExtention(sourceFile)
println("Using \\"" + sourceLang + "\\" as source language")
// get existing xliff files
val xliffFiles: Array[File] = xliffDir.listFiles.filter(_.getName.endsWith(".xlf"))
// find languages that do not exist
val newLanguages = languages.filter(lang => !xliffFiles.exists(getFileNameWithoutExtention(_).equals(lang)))
// read existing xliff files and create empty ones for new languages
val xliffs: Seq[Xliff] = xliffFiles.toSeq.map(XliffFactory(_)) ++ newLanguages.map(XliffFactory(_, sourceLang))
// get file type of source from extension and update xliffs accordingly
sourceFile.getName.split('.').last match {
case "json" =>
// parse json
val json = Json.parse(Source.fromFile(sourceFile).mkString).as[JsObject]
xliffs.foreach(_.updateFromJson(json).writeToFile(xliffDir))
case "properties" =>
// parse properties file
val properties: Config = ConfigFactory.parseFile(sourceFile)
xliffs.foreach(_.updateFromProperties(properties).writeToFile(xliffDir))
case x => throw new ConversionException("unsupported file type: " + x)
}
}
def exportToJson(jsonDir: File, xliffDir: File): Unit = {
// read existing xliff files
val xliffs: Seq[Xliff] = xliffDir.listFiles.filter(_.getName.endsWith(".xlf")).toSeq.map(XliffFactory(_))
xliffs.foreach(_.writeToJsonFile(jsonDir))
}
def mergeJson(jsonFile: File, xliffDir: File): Unit = {
// find language of json file
val lang = getFileNameWithoutExtention(jsonFile)
// parse json
val json = Json.parse(Source.fromFile(jsonFile).mkString).as[JsObject]
// try to find corresponding xliff
val xliffFiles = xliffDir.listFiles.filter(_.getName.endsWith(".xlf"))
xliffFiles.find(file => getFileNameWithoutExtention(file).equals(lang)) match {
case None => throw new ConversionException("no corresponding xliff found: " + jsonFile)
case Some(file) => XliffFactory(file).setTargetsFromJson(json).writeToFile(xliffDir)
}
}
def getFileNameWithoutExtention(file: File): String = file.getName.replaceFirst("[.][^.]+$", "")
}
class ConversionException(val message: String) extends RuntimeException(message)
| memoConnect/cameoXliffTools | src/main/scala/de/cameonet/xlifftools/Main.scala | Scala | agpl-3.0 | 6,261 |
package controllers
import helpers.TestWithApplication
import helpers.acquire.CookieFactoryForUnitSpecs
import helpers.UnitSpec
import models.AcquireCacheKeyPrefix.CookiePrefix
import models.BusinessChooseYourAddressFormModel.BusinessChooseYourAddressCacheKey
import models.VehicleLookupFormModel.VehicleLookupFormModelCacheKey
import pages.acquire.{BeforeYouStartPage, SetupTradeDetailsPage, VehicleLookupPage}
import play.api.test.Helpers.{LOCATION, OK, SEE_OTHER}
import play.api.test.FakeRequest
import uk.gov.dvla.vehicles.presentation.common
import common.model.MicroserviceResponseModel.MsResponseCacheKey
import common.model.SetupTradeDetailsFormModel.setupTradeDetailsCacheKey
import common.model.VehicleAndKeeperDetailsModel.vehicleAndKeeperLookupDetailsCacheKey
import common.testhelpers.CookieHelper.{fetchCookiesFromHeaders, verifyCookieHasBeenDiscarded}
class KeeperStillOnRecordUnitSpec extends UnitSpec {
"present" should {
"display the page" in new TestWithApplication {
whenReady(present) { r =>
r.header.status should equal(OK)
}
}
"redirect to setup trade details when no cookies are in the request" in new TestWithApplication {
whenReady(presentWithNoCookies) { r =>
r.header.status should equal(SEE_OTHER)
r.header.headers.get(LOCATION) should equal(Some(SetupTradeDetailsPage.address))
}
}
}
"clicking buyAnotherVehicle button" should {
"remove all vehicle related cookies and redirect to vehicle lookup page" in new TestWithApplication {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupFormModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupResponse())
val result = keeperStillOnRecord.buyAnotherVehicle(request)
whenReady(result) { r =>
val cookies = fetchCookiesFromHeaders(r)
cookies.size should equal(3)
verifyCookieHasBeenDiscarded(VehicleLookupFormModelCacheKey, cookies)
verifyCookieHasBeenDiscarded(vehicleAndKeeperLookupDetailsCacheKey, cookies)
verifyCookieHasBeenDiscarded(MsResponseCacheKey, cookies)
r.header.status should equal(SEE_OTHER)
r.header.headers.get(LOCATION) should equal(Some(VehicleLookupPage.address))
}
}
}
"clicking finish button" should {
"move to the before you start page and remove cookies" in new TestWithApplication {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.setupTradeDetails())
.withCookies(CookieFactoryForUnitSpecs.businessChooseYourAddress())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupFormModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
val result = keeperStillOnRecord.finish(request)
whenReady(result) { r =>
r.header.status should equal(SEE_OTHER)
r.header.headers.get(LOCATION) should equal(Some(BeforeYouStartPage.address))
val cookies = fetchCookiesFromHeaders(r)
val cookieNames = List(
setupTradeDetailsCacheKey,
BusinessChooseYourAddressCacheKey,
VehicleLookupFormModelCacheKey,
vehicleAndKeeperLookupDetailsCacheKey
)
cookieNames.foreach(verifyCookieHasBeenDiscarded(_, cookies))
}
}
}
private lazy val keeperStillOnRecord = {
injector.getInstance(classOf[KeeperStillOnRecord])
}
private lazy val present = {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupFormModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
keeperStillOnRecord.present(request)
}
private lazy val presentWithNoCookies = {
val request = FakeRequest()
keeperStillOnRecord.present(request)
}
}
| dvla/vehicles-acquire-online | test/controllers/KeeperStillOnRecordUnitSpec.scala | Scala | mit | 3,877 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate.log
import akka.actor._
import akka.pattern.ask
import akka.testkit._
import akka.util.Timeout
import com.rbmhtechnology.eventuate._
import com.rbmhtechnology.eventuate.DurableEvent._
import com.rbmhtechnology.eventuate.EventsourcingProtocol._
import com.rbmhtechnology.eventuate.ReplicationFilter.NoFilter
import com.rbmhtechnology.eventuate.ReplicationProtocol._
import com.rbmhtechnology.eventuate.SingleLocationSpec._
import com.rbmhtechnology.eventuate.utilities._
import com.typesafe.config._
import org.scalatest._
import scala.collection.immutable.Seq
object EventLogSpec {
class ProcessIdFilter(processId: String) extends ReplicationFilter {
override def apply(event: DurableEvent): Boolean =
event.processId == processId
}
val config: Config = ConfigFactory.parseString(
"""
|akka.loglevel = "ERROR"
|akka.test.single-expect-default = 20s
|
|eventuate.snapshot.filesystem.dir = target/test-snapshot
""".stripMargin)
val emitterIdA = "A"
val emitterIdB = "B"
val remoteLogId = "R1"
val allFilter = new ReplicationFilter {
override def apply(event: DurableEvent): Boolean = false
}
def event(payload: Any): DurableEvent =
event(payload, VectorTime(), emitterIdA)
def event(payload: Any, emitterAggregateId: Option[String]): DurableEvent =
event(payload, VectorTime(), emitterIdA, emitterAggregateId, Set())
def event(payload: Any, emitterAggregateId: Option[String], customDestinationAggregateIds: Set[String]): DurableEvent =
event(payload, VectorTime(), emitterIdA, emitterAggregateId, customDestinationAggregateIds)
def event(payload: Any, vectorTimestamp: VectorTime, emitterId: String, emitterAggregateId: Option[String] = None, customDestinationAggregateIds: Set[String] = Set()): DurableEvent =
DurableEvent(payload, emitterId, emitterAggregateId, customDestinationAggregateIds, 0L, vectorTimestamp, UndefinedLogId, UndefinedLogId, UndefinedSequenceNr)
implicit class RemoteDurableEvent(event: DurableEvent) {
def remote: DurableEvent = event.copy(processId = remoteLogId)
}
}
trait EventLogSpecSupport extends WordSpecLike with Matchers with SingleLocationSpec {
import EventLogSpec._
var _replyToProbe: TestProbe = _
var _replicatorProbe: TestProbe = _
var _notificationProbe: TestProbe = _
var _generatedEmittedEvents: Vector[DurableEvent] = Vector.empty
var _generatedReplicatedEvents: Vector[DurableEvent] = Vector.empty
def replyToProbe: TestProbe = _replyToProbe
def replicatorProbe: TestProbe = _replicatorProbe
def notificationProbe: TestProbe = _notificationProbe
def generatedEmittedEvents: Vector[DurableEvent] = _generatedEmittedEvents
def generatedReplicatedEvents: Vector[DurableEvent] = _generatedReplicatedEvents
override def beforeEach(): Unit = {
super.beforeEach()
_replyToProbe = TestProbe()
_replicatorProbe = TestProbe()
_notificationProbe = TestProbe()
}
override def afterEach(): Unit = {
_generatedEmittedEvents = Vector.empty
_generatedReplicatedEvents = Vector.empty
super.afterEach()
}
def timestamp(a: Long = 0L, b: Long= 0L) = (a, b) match {
case (0L, 0L) => VectorTime()
case (a, 0L) => VectorTime(logId -> a)
case (0L, b) => VectorTime(remoteLogId -> b)
case (a, b) => VectorTime(logId -> a, remoteLogId -> b)
}
def currentSequenceNr: Long = {
log.tell(GetEventLogClock, replyToProbe.ref)
replyToProbe.expectMsgClass(classOf[GetEventLogClockSuccess]).clock.sequenceNr
}
def expectedEmittedEvents(events: Seq[DurableEvent], offset: Long = 0): Seq[DurableEvent] =
events.zipWithIndex.map {
case (event, idx) => event.copy(vectorTimestamp = timestamp(offset + idx), processId = logId, localLogId = logId, localSequenceNr = offset + idx)
}
def expectedReplicatedEvents(events: Seq[DurableEvent], offset: Long): Seq[DurableEvent] =
events.zipWithIndex.map {
case (event, idx) => event.copy(processId = remoteLogId, localLogId = logId, localSequenceNr = offset + idx)
}
def writeEmittedEvents(events: Seq[DurableEvent], log: ActorRef = log): Seq[DurableEvent] = {
val offset = currentSequenceNr + 1L
val expected = expectedEmittedEvents(events, offset)
log ! Write(events, system.deadLetters, replyToProbe.ref, 0, 0)
replyToProbe.expectMsg(WriteSuccess(expected, 0, 0))
expected
}
def writeReplicatedEvents(events: Seq[DurableEvent], replicationProgress: Long, remoteLogId: String = remoteLogId): Seq[DurableEvent] = {
val offset = currentSequenceNr + 1L
val expected = expectedReplicatedEvents(events, offset)
log.tell(ReplicationWrite(events, replicationProgress, remoteLogId, VectorTime()), replicatorProbe.ref)
replicatorProbe.expectMsgPF() { case ReplicationWriteSuccess(_, `replicationProgress`, _, _, _) => }
expected
}
def writeReplicationProgress(replicationProgress: Long, expectedStoredReplicationProgress: Long, remoteLogId: String = remoteLogId): Unit = {
log.tell(ReplicationWrite(Seq(), replicationProgress, remoteLogId, VectorTime()), replicatorProbe.ref)
replicatorProbe.expectMsgPF() { case ReplicationWriteSuccess(0, `expectedStoredReplicationProgress`, _, _, _) => }
}
def registerCollaborator(aggregateId: Option[String] = None, collaborator: TestProbe = TestProbe()): TestProbe = {
log.tell(Replay(1L, 0, Some(collaborator.ref), aggregateId, 0), collaborator.ref)
collaborator.expectMsg(ReplaySuccess(Nil, 0L, 0))
collaborator
}
def generateEmittedEvents(emitterAggregateId: Option[String] = None, customDestinationAggregateIds: Set[String] = Set(), num: Int = 3): Unit = {
_generatedEmittedEvents ++= writeEmittedEvents((1 to num).map { i =>
DurableEvent(s"a-$i", emitterIdA, emitterAggregateId, customDestinationAggregateIds)
})
}
def generateReplicatedEvents(emitterAggregateId: Option[String] = None, customDestinationAggregateIds: Set[String] = Set(), num: Int = 3): Unit = {
_generatedReplicatedEvents ++= writeReplicatedEvents((1 to num).map { i =>
DurableEvent(s"b-$i", emitterIdB, emitterAggregateId, customDestinationAggregateIds, 0L, timestamp(0, i + 6), remoteLogId, remoteLogId, i + 6)
}, 17)
}
}
trait EventLogSpec extends TestKitBase with EventLogSpecSupport {
import EventLogSpec._
val dl = system.deadLetters
implicit val implicitTimeout = Timeout(timeoutDuration)
"An event log" must {
"write local events" in {
generateEmittedEvents()
generateEmittedEvents()
}
"write local events with undefined defaultRoutingDestination and undefined customRoutingDestinations and route them to collaborators with undefined aggregateId" in {
val collaborator = registerCollaborator(aggregateId = None)
generateEmittedEvents(emitterAggregateId = None, customDestinationAggregateIds = Set())
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
}
"write local events with undefined defaultRoutingDestination and defined customRoutingDestinations and route them to collaborators with undefined aggregateId" in {
val collaborator = registerCollaborator(aggregateId = None)
generateEmittedEvents(emitterAggregateId = None, customDestinationAggregateIds = Set("a1"))
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
}
"write local events with defined defaultRoutingDestination and undefined customRoutingDestinations and route them to collaborators with undefined aggregateId" in {
val collaborator = registerCollaborator(aggregateId = None)
generateEmittedEvents(emitterAggregateId = Some("a1"), customDestinationAggregateIds = Set())
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
}
"write local events with defined defaultRoutingDestination and defined customRoutingDestinations and route them to collaborators with undefined aggregateId" in {
val collaborator = registerCollaborator(aggregateId = None)
generateEmittedEvents(emitterAggregateId = Some("a1"), customDestinationAggregateIds = Set("a2"))
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
}
"write local events with undefined defaultRoutingDestination and undefined customRoutingDestinations and not route them to collaborators with defined aggregateId" in {
val collaborator = TestProbe()
registerCollaborator(aggregateId = Some("a1"), collaborator = collaborator)
registerCollaborator(aggregateId = None, collaborator = collaborator)
generateEmittedEvents(emitterAggregateId = None, customDestinationAggregateIds = Set())
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
}
"write local events with undefined defaultRoutingDestination and defined customRoutingDestinations and route them to collaborators with defined aggregateId" in {
val collaborator = TestProbe()
registerCollaborator(aggregateId = Some("a1"), collaborator = collaborator)
registerCollaborator(aggregateId = None, collaborator = collaborator)
generateEmittedEvents(emitterAggregateId = None, customDestinationAggregateIds = Set("a1"))
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
}
"write local events with defined defaultRoutingDestination and undefined customRoutingDestinations and route them to collaborators with defined aggregateId" in {
val collaborator = TestProbe()
registerCollaborator(aggregateId = Some("a1"), collaborator = collaborator)
registerCollaborator(aggregateId = None, collaborator = collaborator)
generateEmittedEvents(emitterAggregateId = Some("a1"), customDestinationAggregateIds = Set())
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
}
"write local events with defined defaultRoutingDestination and defined customRoutingDestinations and route them to collaborators with defined aggregateId" in {
val collaborator = TestProbe()
registerCollaborator(aggregateId = Some("a1"), collaborator = collaborator)
registerCollaborator(aggregateId = Some("a2"), collaborator = collaborator)
registerCollaborator(aggregateId = None, collaborator = collaborator)
generateEmittedEvents(emitterAggregateId = Some("a1"), customDestinationAggregateIds = Set("a2"))
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(0)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(1)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
collaborator.expectMsg(Written(generatedEmittedEvents(2)))
}
"reply with a failure message if write fails" in {
val events = Vector(
DurableEvent("boom", emitterIdA),
DurableEvent("okay", emitterIdA))
log ! Write(events, system.deadLetters, replyToProbe.ref, 0, 0)
replyToProbe.expectMsg(WriteFailure(events, IntegrationTestException, 0, 0))
}
"write replicated events" in {
generateReplicatedEvents()
}
"write replicated events with undefined defaultRoutingDestination and undefined customRoutingDestinations and route them to collaborators with undefined aggregateId" in {
val collaborator = registerCollaborator(aggregateId = None)
generateReplicatedEvents(emitterAggregateId = None, customDestinationAggregateIds = Set())
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
}
"write replicated events with undefined defaultRoutingDestination and defined customRoutingDestinations and route them to collaborators with undefined aggregateId" in {
val collaborator = registerCollaborator(aggregateId = None)
generateReplicatedEvents(emitterAggregateId = None, customDestinationAggregateIds = Set("a1"))
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
}
"write replicated events with defined defaultRoutingDestination and undefined customRoutingDestinations and route them to collaborators with undefined aggregateId" in {
val collaborator = registerCollaborator(aggregateId = None)
generateReplicatedEvents(emitterAggregateId = Some("a1"), customDestinationAggregateIds = Set())
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
}
"write replicated events with defined defaultRoutingDestination and defined customRoutingDestinations and route them to collaborators with undefined aggregateId" in {
val collaborator = registerCollaborator(aggregateId = None)
generateReplicatedEvents(emitterAggregateId = Some("a1"), customDestinationAggregateIds = Set("a2"))
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
}
"write replicated events with undefined defaultRoutingDestination and undefined customRoutingDestinations and not route them to collaborators with defined aggregateId" in {
val collaborator = TestProbe()
registerCollaborator(aggregateId = Some("a1"), collaborator = collaborator)
registerCollaborator(aggregateId = None, collaborator = collaborator)
generateReplicatedEvents(emitterAggregateId = None, customDestinationAggregateIds = Set())
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
}
"write replicated events with undefined defaultRoutingDestination and defined customRoutingDestinations and route them to collaborators with defined aggregateId" in {
val collaborator = TestProbe()
registerCollaborator(aggregateId = Some("a1"), collaborator = collaborator)
registerCollaborator(aggregateId = None, collaborator = collaborator)
generateReplicatedEvents(emitterAggregateId = None, customDestinationAggregateIds = Set("a1"))
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
}
"write replicated events with defined defaultRoutingDestination and undefined customRoutingDestinations and route them to collaborators with defined aggregateId" in {
val collaborator = TestProbe()
registerCollaborator(aggregateId = Some("a1"), collaborator = collaborator)
registerCollaborator(aggregateId = None, collaborator = collaborator)
generateReplicatedEvents(emitterAggregateId = Some("a1"), customDestinationAggregateIds = Set())
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
}
"write replicated events with defined defaultRoutingDestination and defined customRoutingDestinations and route them to collaborators with defined aggregateId" in {
val collaborator = TestProbe()
registerCollaborator(aggregateId = Some("a1"), collaborator = collaborator)
registerCollaborator(aggregateId = Some("a2"), collaborator = collaborator)
registerCollaborator(aggregateId = None, collaborator = collaborator)
generateReplicatedEvents(emitterAggregateId = Some("a1"), customDestinationAggregateIds = Set("a2"))
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(0)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(1)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
collaborator.expectMsg(Written(generatedReplicatedEvents(2)))
}
"write replicated events and update the replication progress map" in {
log.tell(GetReplicationProgresses, replyToProbe.ref)
replyToProbe.expectMsg(GetReplicationProgressesSuccess(Map()))
generateReplicatedEvents()
log.tell(GetReplicationProgresses, replyToProbe.ref)
replyToProbe.expectMsg(GetReplicationProgressesSuccess(Map(remoteLogId -> 17L)))
}
"reply with a failure message if replication fails" in {
val events: Vector[DurableEvent] = Vector(
DurableEvent("boom", emitterIdB, None, Set(), 0L, timestamp(0, 7), remoteLogId, remoteLogId, 7),
DurableEvent("okay", emitterIdB, None, Set(), 0L, timestamp(0, 8), remoteLogId, remoteLogId, 8))
log.tell(ReplicationWrite(events, 8, remoteLogId, VectorTime()), replicatorProbe.ref)
replicatorProbe.expectMsg(ReplicationWriteFailure(IntegrationTestException))
}
"reply with a failure message if replication fails and not update the replication progress map" in {
log.tell(GetReplicationProgresses, replyToProbe.ref)
replyToProbe.expectMsg(GetReplicationProgressesSuccess(Map()))
val events: Vector[DurableEvent] = Vector(
DurableEvent("boom", emitterIdB, None, Set(), 0L, timestamp(0, 7), remoteLogId, remoteLogId, 7),
DurableEvent("okay", emitterIdB, None, Set(), 0L, timestamp(0, 8), remoteLogId, remoteLogId, 8))
log.tell(ReplicationWrite(events, 8, remoteLogId, VectorTime()), replicatorProbe.ref)
replicatorProbe.expectMsg(ReplicationWriteFailure(IntegrationTestException))
log.tell(GetReplicationProgresses, replyToProbe.ref)
replyToProbe.expectMsg(GetReplicationProgressesSuccess(Map()))
}
"replay events from scratch" in {
generateEmittedEvents()
log.tell(Replay(1L, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents, generatedEmittedEvents.last.localSequenceNr, 0))
}
"replay events in batches" in {
generateEmittedEvents()
generateEmittedEvents()
log.tell(Replay(1L, 2, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(0, 2), generatedEmittedEvents(1).localSequenceNr, 0))
log.tell(Replay(generatedEmittedEvents(1).localSequenceNr + 1L, 2, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(2, 4), generatedEmittedEvents(3).localSequenceNr, 0))
log.tell(Replay(generatedEmittedEvents(3).localSequenceNr + 1L, 2, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(4, 6), generatedEmittedEvents(5).localSequenceNr, 0))
}
"replay events from a custom position" in {
generateEmittedEvents()
log.tell(Replay(3L, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(2, 3), generatedEmittedEvents(2).localSequenceNr, 0))
// custom position > last sequence number
log.tell(Replay(5L, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(Nil, 4L, 0))
}
"replay events from the default log if request aggregateId is not defined" in {
generateEmittedEvents(customDestinationAggregateIds = Set("a1"))
log.tell(Replay(1L, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents, generatedEmittedEvents.last.localSequenceNr, 0))
}
"replay events from the index if request aggregateId is defined" in {
generateEmittedEvents(customDestinationAggregateIds = Set("a1"))
log.tell(Replay(1L, None, Some("a1"), 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents, generatedEmittedEvents.last.localSequenceNr, 0))
}
"replay events from the index with proper isolation" in {
generateEmittedEvents(customDestinationAggregateIds = Set("a1"))
generateEmittedEvents(customDestinationAggregateIds = Set("a2"))
log.tell(Replay(1L, None, Some("a1"), 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(0, 3), generatedEmittedEvents(2).localSequenceNr, 0))
}
"replay events from the index and from a custom position" in {
generateEmittedEvents(customDestinationAggregateIds = Set("a1"))
generateEmittedEvents(customDestinationAggregateIds = Set("a2"))
log.tell(Replay(2L, None, Some("a1"), 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(1, 3), generatedEmittedEvents(2).localSequenceNr, 0))
log.tell(Replay(5L, None, Some("a1"), 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(Nil, 4L, 0))
log.tell(Replay(2L, None, Some("a2"), 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(3, 6), generatedEmittedEvents(5).localSequenceNr, 0))
log.tell(Replay(5L, None, Some("a2"), 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(4, 6), generatedEmittedEvents(5).localSequenceNr, 0))
}
"not replay events with non-matching aggregateId if request aggregateId is defined" in {
generateEmittedEvents(customDestinationAggregateIds = Set("a1"))
log.tell(Replay(1L, None, Some("a2"), 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(Nil, 0L, 0))
}
"reply with a failure message if replay fails" in {
log.tell(Replay(ErrorSequenceNr, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplayFailure(IntegrationTestException, 0))
}
"replication-read local events" in {
generateEmittedEvents()
log.tell(ReplicationRead(1, Int.MaxValue, Int.MaxValue, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedEmittedEvents, 1, 3, UndefinedLogId, VectorTime(logId -> 3L)))
}
"replication-read local and replicated events" in {
generateEmittedEvents()
generateReplicatedEvents()
log.tell(ReplicationRead(1, Int.MaxValue, Int.MaxValue, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedEmittedEvents ++ generatedReplicatedEvents, 1, 6, UndefinedLogId, VectorTime(logId -> 3L, remoteLogId -> 9L)))
}
"replication-read events with a batch size limit" in {
generateEmittedEvents()
log.tell(ReplicationRead(1, 2, Int.MaxValue, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedEmittedEvents.take(2), 1, 2, UndefinedLogId, VectorTime(logId -> 3L)))
log.tell(ReplicationRead(1, 0, Int.MaxValue, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(Nil, 1, 0, UndefinedLogId, VectorTime(logId -> 3L)))
}
"replication-read events with a scan limit" in {
generateEmittedEvents()
log.tell(ReplicationRead(1, Int.MaxValue, 2, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedEmittedEvents.take(2), 1, 2, UndefinedLogId, VectorTime(logId -> 3L)))
log.tell(ReplicationRead(1, Int.MaxValue, 0, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(Nil, 1, 0, UndefinedLogId, VectorTime(logId -> 3L)))
}
"replication-read events from a custom position" in {
generateEmittedEvents()
log.tell(ReplicationRead(2, Int.MaxValue, Int.MaxValue, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedEmittedEvents.drop(1), 2, 3, UndefinedLogId, VectorTime(logId -> 3L)))
}
"replication-read events from a custom position with a batch size limit" in {
generateEmittedEvents()
log.tell(ReplicationRead(2, 1, Int.MaxValue, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedEmittedEvents.slice(1, 2), 2, 2, UndefinedLogId, VectorTime(logId -> 3L)))
}
"replication-read events from a custom position with a scan limit" in {
generateEmittedEvents()
log.tell(ReplicationRead(2, Int.MaxValue, 1, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedEmittedEvents.slice(1, 2), 2, 2, UndefinedLogId, VectorTime(logId -> 3L)))
}
"replication-read events with with a custom filter" in {
generateEmittedEvents()
generateReplicatedEvents()
log.tell(ReplicationRead(1, Int.MaxValue, Int.MaxValue, new ProcessIdFilter(remoteLogId), UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedReplicatedEvents, 1, 6, UndefinedLogId, VectorTime(logId -> 3L, remoteLogId -> 9L)))
log.tell(ReplicationRead(1, Int.MaxValue, Int.MaxValue, new ProcessIdFilter(logId), UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedEmittedEvents, 1, 6, UndefinedLogId, VectorTime(logId -> 3L, remoteLogId -> 9L)))
}
"limit replication read progress to given scan limit" in {
generateEmittedEvents()
log.tell(ReplicationRead(1, Int.MaxValue, 1, new ProcessIdFilter(remoteLogId), UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedReplicatedEvents, 1, 1, UndefinedLogId, VectorTime(logId -> 3L)))
}
"not replication-read events from index" in {
generateEmittedEvents(customDestinationAggregateIds = Set("a1"))
generateEmittedEvents(customDestinationAggregateIds = Set())
log.tell(ReplicationRead(1, Int.MaxValue, Int.MaxValue, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadSuccess(generatedEmittedEvents, 1, 6, UndefinedLogId, VectorTime(logId -> 6L)))
}
"reply with a failure message if replication-read fails" in {
log.tell(ReplicationRead(ErrorSequenceNr, Int.MaxValue, Int.MaxValue, NoFilter, UndefinedLogId, dl, VectorTime()), replyToProbe.ref)
replyToProbe.expectMsg(ReplicationReadFailure(ReplicationReadSourceException(IntegrationTestException.getMessage), UndefinedLogId))
}
"recover the current sequence number on (re)start" in {
generateEmittedEvents()
log.tell(GetEventLogClock, replyToProbe.ref)
replyToProbe.expectMsgType[GetEventLogClockSuccess].clock.sequenceNr should be(3L)
log ! "boom"
log.tell(GetEventLogClock, replyToProbe.ref)
replyToProbe.expectMsgType[GetEventLogClockSuccess].clock.sequenceNr should be(3L)
}
"recover an adjusted sequence number on restart" in {
val evt = DurableEvent("a", emitterIdA, processId = logId, vectorTimestamp = VectorTime(logId -> 5L), localLogId = logId, localSequenceNr = 1)
registerCollaborator(aggregateId = None, collaborator = replyToProbe)
log ! ReplicationWrite(List(evt), 1, remoteLogId, VectorTime())
replyToProbe.expectMsgType[Written]
(log ? AdjustEventLogClock).await
log ! "boom"
(log ? GetEventLogClock).mapTo[GetEventLogClockSuccess].await.clock.sequenceNr should be(5L)
}
"recover the replication progress on (re)start" in {
log.tell(SetReplicationProgress("x", 17), replyToProbe.ref)
replyToProbe.expectMsg(SetReplicationProgressSuccess("x", 17))
log.tell(SetReplicationProgress("y", 19), replyToProbe.ref)
replyToProbe.expectMsg(SetReplicationProgressSuccess("y", 19))
log.tell(GetReplicationProgresses, replyToProbe.ref)
replyToProbe.expectMsg(GetReplicationProgressesSuccess(Map("x" -> 17, "y" -> 19)))
log ! "boom"
log.tell(GetReplicationProgresses, replyToProbe.ref)
replyToProbe.expectMsg(GetReplicationProgressesSuccess(Map("x" -> 17, "y" -> 19)))
}
"update the replication progress if last read sequence nr > last replicated sequence nr" in {
log.tell(GetReplicationProgresses, replyToProbe.ref)
replyToProbe.expectMsg(GetReplicationProgressesSuccess(Map()))
writeReplicationProgress(19, 19)
log.tell(GetReplicationProgresses, replyToProbe.ref)
replyToProbe.expectMsg(GetReplicationProgressesSuccess(Map(EventLogSpec.remoteLogId -> 19L)))
}
"update an event's system timestamp" in {
log ! Write(List(event("a").copy(systemTimestamp = 3L)), system.deadLetters, replyToProbe.ref, 0, 0)
replyToProbe.expectMsgType[WriteSuccess].events.head.systemTimestamp should be(0L)
}
"update an emitted event's process id and vector timestamp during if the process id is not defined" in {
val evt = DurableEvent("a", emitterIdA, processId = UndefinedLogId)
val exp = DurableEvent("a", emitterIdA, processId = logId, vectorTimestamp = VectorTime(logId -> 1L), localLogId = logId, localSequenceNr = 1)
log ! Write(List(evt), system.deadLetters, replyToProbe.ref, 0, 0)
replyToProbe.expectMsgType[WriteSuccess].events.head should be(exp)
}
"not update an emitted event's process id and vector timestamp during if the process id is defined" in {
val evt = DurableEvent("a", emitterIdA, processId = emitterIdA, vectorTimestamp = VectorTime(emitterIdA -> 1L))
val exp = DurableEvent("a", emitterIdA, processId = emitterIdA, vectorTimestamp = VectorTime(emitterIdA -> 1L), localLogId = logId, localSequenceNr = 1)
log ! Write(List(evt), system.deadLetters, replyToProbe.ref, 0, 0)
replyToProbe.expectMsgType[WriteSuccess].events.head should be(exp)
}
"update a replicated event's process id and vector timestamp during if the process id is not defined" in {
val evt = DurableEvent("a", emitterIdA, processId = UndefinedLogId, vectorTimestamp = VectorTime(remoteLogId -> 1L))
val exp = DurableEvent("a", emitterIdA, processId = logId, vectorTimestamp = VectorTime(remoteLogId -> 1L, logId -> 1L), localLogId = logId, localSequenceNr = 1)
registerCollaborator(aggregateId = None, collaborator = replyToProbe)
log ! ReplicationWrite(List(evt), 5, remoteLogId, VectorTime())
replyToProbe.expectMsgType[Written].event should be(exp)
}
"not update a replicated event's process id and vector timestamp during if the process id is defined" in {
val evt = DurableEvent("a", emitterIdA, processId = emitterIdA, vectorTimestamp = VectorTime(emitterIdA -> 1L))
val exp = DurableEvent("a", emitterIdA, processId = emitterIdA, vectorTimestamp = VectorTime(emitterIdA -> 1L), localLogId = logId, localSequenceNr = 1)
registerCollaborator(aggregateId = None, collaborator = replyToProbe)
log ! ReplicationWrite(List(evt), 5, remoteLogId, VectorTime())
replyToProbe.expectMsgType[Written].event should be(exp)
}
"not write events to the target log that are in causal past of the target log" in {
val evt1 = DurableEvent("i", emitterIdB, vectorTimestamp = timestamp(0, 7), processId = remoteLogId)
val evt2 = DurableEvent("j", emitterIdB, vectorTimestamp = timestamp(0, 8), processId = remoteLogId)
val evt3 = DurableEvent("k", emitterIdB, vectorTimestamp = timestamp(0, 9), processId = remoteLogId)
registerCollaborator(aggregateId = None, collaborator = replyToProbe)
log ! ReplicationWrite(List(evt1, evt2), 5, remoteLogId, VectorTime())
log ! ReplicationWrite(List(evt2, evt3), 6, remoteLogId, VectorTime())
replyToProbe.expectMsgType[Written].event.payload should be("i")
replyToProbe.expectMsgType[Written].event.payload should be("j")
replyToProbe.expectMsgType[Written].event.payload should be("k")
}
"not read events from the source log that are in causal past of the target log (using the target time from the request)" in {
generateEmittedEvents()
log.tell(ReplicationRead(1, Int.MaxValue, Int.MaxValue, NoFilter, remoteLogId, dl, timestamp(1)), replyToProbe.ref)
replyToProbe.expectMsgType[ReplicationReadSuccess].events.map(_.payload) should be(Seq("a-2", "a-3"))
}
"not read events from the source log that are in causal past of the target log (using the target time from the cache)" in {
generateEmittedEvents()
log ! ReplicationWrite(Nil, 5, remoteLogId, timestamp(2)) // update time cache
log.tell(ReplicationRead(1, Int.MaxValue, Int.MaxValue, NoFilter, remoteLogId, dl, timestamp(1)), replyToProbe.ref)
replyToProbe.expectMsgType[ReplicationReadSuccess].events.map(_.payload) should be(Seq("a-3"))
}
"delete all events when requested sequence nr is higher than current" in {
generateEmittedEvents()
(log ? Delete(generatedEmittedEvents(2).localSequenceNr + 1)).await
log.tell(Replay(1L, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(Nil, 3L, 0))
}
"not replay deleted events" in {
generateEmittedEvents()
(log ? Delete(generatedEmittedEvents(1).localSequenceNr)).await
log.tell(Replay(1L, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(2, 3), generatedEmittedEvents(2).localSequenceNr, 0))
}
"not replay deleted events from an index" in {
generateEmittedEvents(customDestinationAggregateIds = Set("a"))
(log ? Delete(generatedEmittedEvents(1).localSequenceNr)).await
log.tell(Replay(1L, None, Some("a"), 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents.slice(2, 3), generatedEmittedEvents(2).localSequenceNr, 0))
}
"not delete future events when requested sequence nr is higher than current" in {
(log ? Delete(10)).await
generateEmittedEvents()
log.tell(Replay(1L, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(generatedEmittedEvents, generatedEmittedEvents.last.localSequenceNr, 0))
}
"not mark already deleted events as not deleted" in {
generateEmittedEvents()
log ! Delete(generatedEmittedEvents(2).localSequenceNr)
(log ? Delete(generatedEmittedEvents(1).localSequenceNr)).await
log.tell(Replay(1L, None, 0), replyToProbe.ref)
replyToProbe.expectMsg(ReplaySuccess(Nil, 3L, 0))
}
}
}
| ianclegg/eventuate | eventuate-core/src/it/scala/com/rbmhtechnology/eventuate/log/EventLogSpec.scala | Scala | apache-2.0 | 37,420 |
package beer.learning
import java.util.concurrent.ConcurrentHashMap
import scala.io.Source
import beer.data.judgments.NonMappedFeaturesPair
import java.io.PrintWriter
import java.util.TreeSet
import scala.collection.JavaConversions._
import beer.data.judgments.FeaturesPair
/**
* @author milos
*/
class FeatureNameMapping (mapping : ConcurrentHashMap[String, Int]) {
type RawFeatureMap = List[(String, Double)]
type FeatureMap = Array[Double]
// if #features is large this should be replaced by sparse vector
def mapNames(x:RawFeatureMap) : FeatureMap = {
val y = new Array[Double](mapping.size)
for((name, value) <- x){
y(mapping.get(name)) = value
}
y
}
def saveModel(filename:String) : Unit = {
val featureNames = new Array[String](mapping.size)
for((name, index) <- mapping){
featureNames(index)=name
}
val pw = new PrintWriter(filename)
for(index <- 0 until featureNames.size){
pw.println((index)+" "+featureNames(index))
}
pw.close()
}
def mapWMTpair(pair : NonMappedFeaturesPair) : FeaturesPair = {
val mWinner = this.mapNames(pair.winner)
val mLoser = this.mapNames(pair.loser)
new FeaturesPair(mWinner, mLoser)
}
}
object FeatureNameMapping{
def loadModel(filename:String) : FeatureNameMapping = {
var params = new ConcurrentHashMap[String, Int]()
for(line <- Source.fromFile(filename).getLines){
val fields = line.split(" ")
assert(fields.size == 2)
params.put(fields(1), fields(0).toInt)
}
new FeatureNameMapping(params)
}
def trainModelFromWMTpairs(pairs : List[NonMappedFeaturesPair]) : FeatureNameMapping = {
val allFeatureNames = scala.collection.mutable.Set[String]()
for(pair:NonMappedFeaturesPair <- pairs){
pair.winner.map{_._1}.foreach{allFeatureNames.add}
pair.loser.map{_._1}.foreach{allFeatureNames.add}
}
val mapping = new ConcurrentHashMap[String, Int]()
for((name, index) <- allFeatureNames.toList.sorted.zipWithIndex){
mapping.put(name, index)
}
new FeatureNameMapping(mapping)
}
}
| qingsongma/blend | tools/beer_2.0/src/beer/learning/FeatureNameMapping.scala | Scala | gpl-3.0 | 2,140 |
package com.productfoundry.akka.cqrs.publish
import akka.actor._
import com.productfoundry.akka.cqrs.publish.FanoutPublisher.PublicationHandler._
import com.productfoundry.akka.cqrs.publish.FanoutPublisher._
import com.productfoundry.akka.messaging.{ConfirmDelivery, Confirmable}
import scala.concurrent.duration._
import scala.language.existentials
class FanoutPublisher(val timeout: Duration = 30.minutes) extends Actor with ActorLogging {
private var subscribers: Set[ActorPath] = Set.empty
private var publishersByConfirmable: Map[Confirmable, ActorRef] = Map.empty
override def receive: Receive = {
case Subscribe(subscriber) =>
subscribers += subscriber
case Unsubscribe(subscriber) =>
subscribers -= subscriber
case confirmable: Confirmable =>
publishersByConfirmable.get(confirmable).fold {
val publisher = context.actorOf(Props(classOf[PublicationHandler], confirmable, subscribers, timeout))
publishersByConfirmable = publishersByConfirmable.updated(confirmable, publisher)
} { publisher =>
publisher ! PublicationHandler.RedeliverUnconfirmed
}
case Passivate(confirmable) =>
log.warning("Timeout handling: {}", confirmable)
publishersByConfirmable = publishersByConfirmable - confirmable
sender() ! PoisonPill
}
}
object FanoutPublisher {
case class Subscribe(subscriber: ActorPath)
case class Unsubscribe(subscriber: ActorPath)
private class PublicationHandler(confirmable: Confirmable, destinations: Set[ActorPath], timeout: Duration) extends Actor {
private var destinationsByDeliveryId: Map[Long, ActorPath] = Map(
destinations.toSeq.zipWithIndex.map {
case (destination, deliveryId) => deliveryId.toLong -> destination
}: _*
)
override def preStart(): Unit = {
publishUnconfirmed()
context.setReceiveTimeout(timeout)
}
override def receive: Receive = {
case ConfirmDelivery(deliveryId) =>
destinationsByDeliveryId = destinationsByDeliveryId - deliveryId
confirmIfCompleted()
case RedeliverUnconfirmed =>
publishUnconfirmed()
case ReceiveTimeout =>
context.parent ! Passivate(confirmable)
}
private def publishUnconfirmed(): Unit = {
destinationsByDeliveryId.foreach { case (deliveryId, destinationPath) =>
context.system.actorSelection(destinationPath) ! confirmable.requestConfirmation(deliveryId)
}
confirmIfCompleted()
}
private def confirmIfCompleted(): Unit = {
if (destinationsByDeliveryId.isEmpty) {
confirmable.confirmIfRequested()
context.stop(self)
}
}
}
case object PublicationHandler {
case object RedeliverUnconfirmed
case class Passivate(confirmable: Confirmable)
}
}
| Product-Foundry/akka-cqrs | core/src/main/scala/com/productfoundry/akka/cqrs/publish/FanoutPublisher.scala | Scala | apache-2.0 | 2,819 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.aggregate
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types._
/**
* Returns the last value of `child` for a group of rows. If the last value of `child`
* is `null`, it returns `null` (respecting nulls). Even if [[Last]] is used on an already
* sorted column, if we do partial aggregation and final aggregation (when mergeExpression
* is used) its result will not be deterministic (unless the input table is sorted and has
* a single partition, and we use a single reducer to do the aggregation.).
*/
@ExpressionDescription(
usage = """
_FUNC_(expr[, isIgnoreNull]) - Returns the last value of `expr` for a group of rows.
If `isIgnoreNull` is true, returns only non-null values.
""")
case class Last(child: Expression, ignoreNullsExpr: Expression) extends DeclarativeAggregate {
def this(child: Expression) = this(child, Literal.create(false, BooleanType))
private val ignoreNulls: Boolean = ignoreNullsExpr match {
case Literal(b: Boolean, BooleanType) => b
case _ =>
throw new AnalysisException("The second argument of First should be a boolean literal.")
}
override def children: Seq[Expression] = child :: ignoreNullsExpr :: Nil
override def nullable: Boolean = true
// Last is not a deterministic function.
override def deterministic: Boolean = false
// Return data type.
override def dataType: DataType = child.dataType
// Expected input data type.
override def inputTypes: Seq[AbstractDataType] = Seq(AnyDataType, BooleanType)
private lazy val last = AttributeReference("last", child.dataType)()
private lazy val valueSet = AttributeReference("valueSet", BooleanType)()
override lazy val aggBufferAttributes: Seq[AttributeReference] = last :: valueSet :: Nil
override lazy val initialValues: Seq[Literal] = Seq(
/* last = */ Literal.create(null, child.dataType),
/* valueSet = */ Literal.create(false, BooleanType)
)
override lazy val updateExpressions: Seq[Expression] = {
if (ignoreNulls) {
Seq(
/* last = */ If(IsNull(child), last, child),
/* valueSet = */ Or(valueSet, IsNotNull(child))
)
} else {
Seq(
/* last = */ child,
/* valueSet = */ Literal.create(true, BooleanType)
)
}
}
override lazy val mergeExpressions: Seq[Expression] = {
// Prefer the right hand expression if it has been set.
Seq(
/* last = */ If(valueSet.right, last.right, last.left),
/* valueSet = */ Or(valueSet.right, valueSet.left)
)
}
override lazy val evaluateExpression: AttributeReference = last
override def toString: String = s"last($child)${if (ignoreNulls) " ignore nulls"}"
}
| Panos-Bletsos/spark-cost-model-optimizer | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Last.scala | Scala | apache-2.0 | 3,595 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.contrib
import _root_.algebra.{Eq, Semigroup => ASemigroup, Monoid => AMonoid, Order => AOrder}
import _root_.scalaz.{Equal, Semigroup, Monoid, Order, Ordering}
package object algebra extends AlgebraInstancesLowPriority {
implicit def algebraOrder[A](implicit A: AOrder[A]): Order[A] =
Order.order((x, y) => Ordering.fromInt(A.compare(x, y)))
implicit def algebraMonoid[A](implicit A: AMonoid[A]): Monoid[A] =
Monoid.instance((x, y) => A.combine(x, y), A.empty)
}
sealed abstract class AlgebraInstancesLowPriority {
implicit def algebraEqual[A](implicit A: Eq[A]): Equal[A] =
Equal.equal(A.eqv)
implicit def algebraSemigroup[A](implicit A: ASemigroup[A]): Semigroup[A] =
Semigroup.instance((x, y) => A.combine(x, y))
}
| drostron/quasar | foundation/src/main/scala/quasar/contrib/algebra/package.scala | Scala | apache-2.0 | 1,368 |
package com.github.chengpohi.domain.query
import com.github.chengpohi.domain.user.User
import com.github.chengpohi.infrastructure.Repository
import com.github.chengpohi.infrastructure.util.AdjointUtils._
import com.github.chengpohi.repl.EQLInterpreter
import fastparse.core.Parsed.{Failure, Success}
trait CommandInterceptor {
val repository: Repository
val interpreter: EQLInterpreter
def intercept(us: User, command: String): QueryResponse = {
implicit val user: User = us
if (command.startsWith("go ")) {
return goto(command.replace("go ", ""))
}
val parsedResult = interpreter.parse.apply(command)
parsedResult match {
case Success(_, _) =>
val text = interpreter.render(parsedResult)
QueryResponse(Operation.TEXT_OP, text.safePretty)
case Failure(_, _, _) =>
QueryResponse(Operation.BOOKMARK_OP, repository.query(command))
}
}
def goto(query: String)(implicit user: User): QueryResponse = {
val maps = repository.query(query)
QueryResponse(Operation.GOTO_OP, maps)
}
}
object CommandInterceptor {
def apply(rep: Repository, eqlInterpreter: EQLInterpreter): CommandInterceptor = new CommandInterceptor {
override val repository: Repository = rep
override val interpreter: EQLInterpreter = eqlInterpreter
}
}
| chengpohi/coolmarks | modules/core-service/src/main/scala/com.github.chengpohi/domain/query/CommandInterceptor.scala | Scala | apache-2.0 | 1,318 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.cli
import org.bdgenomics.formats.avro.Genotype
import org.bdgenomics.adam.rdd.ADAMContext._
import org.kohsuke.args4j.{ Option => Args4jOption, Argument }
import org.apache.spark.rdd.RDD
import org.apache.spark.{ Logging, SparkContext }
import org.apache.hadoop.mapreduce.Job
import java.io.File
import org.bdgenomics.adam.models.SequenceDictionary
import scala.Option
object ADAM2Vcf extends ADAMCommandCompanion {
val commandName = "adam2vcf"
val commandDescription = "Convert an ADAM variant to the VCF ADAM format"
def apply(cmdLine: Array[String]) = {
new ADAM2Vcf(Args4j[ADAM2VcfArgs](cmdLine))
}
}
class ADAM2VcfArgs extends Args4jBase with ParquetArgs {
@Args4jOption(required = false, name = "-dict", usage = "Reference dictionary")
var dictionaryFile: File = _
@Argument(required = true, metaVar = "ADAM", usage = "The ADAM variant files to convert", index = 0)
var adamFile: String = _
@Argument(required = true, metaVar = "VCF", usage = "Location to write VCF data", index = 1)
var outputPath: String = null
@Args4jOption(required = false, name = "-coalesce", usage = "Set the number of partitions written to the ADAM output directory")
var coalesce: Int = -1
}
class ADAM2Vcf(val args: ADAM2VcfArgs) extends ADAMSparkCommand[ADAM2VcfArgs] with DictionaryCommand with Logging {
val companion = ADAM2Vcf
def run(sc: SparkContext, job: Job) {
var dictionary: Option[SequenceDictionary] = loadSequenceDictionary(args.dictionaryFile)
if (dictionary.isDefined)
log.info("Using contig translation")
val adamGTs: RDD[Genotype] = sc.loadParquetGenotypes(args.adamFile)
val coalescedRDD = if (args.coalesce > 0) {
adamGTs.coalesce(args.coalesce, true)
} else {
adamGTs
}
coalescedRDD.toVariantContext.adamVCFSave(args.outputPath, dict = dictionary)
}
}
| tomwhite/adam | adam-cli/src/main/scala/org/bdgenomics/adam/cli/ADAM2Vcf.scala | Scala | apache-2.0 | 2,672 |
package example
import common._
object Lists {
/**
* This method computes the sum of all elements in the list xs. There are
* multiple techniques that can be used for implementing this method, and
* you will learn during the class.
*
* For this example assignment you can use the following methods in class
* `List`:
*
* - `xs.isEmpty: Boolean` returns `true` if the list `xs` is empty
* - `xs.head: Int` returns the head element of the list `xs`. If the list
* is empty an exception is thrown
* - `xs.tail: List[Int]` returns the tail of the list `xs`, i.e. the the
* list `xs` without its `head` element
*
* ''Hint:'' instead of writing a `for` or `while` loop, think of a recursive
* solution.
*
* @param xs A list of natural numbers
* @return The sum of all elements in `xs`
*/
def sum(xs: List[Int]): Int = {
if(xs.isEmpty) 0
else xs.head + sum(xs.tail)
}
/**
* This method returns the largest element in a list of integers. If the
* list `xs` is empty it throws a `java.util.NoSuchElementException`.
*
* You can use the same methods of the class `List` as mentioned above.
*
* ''Hint:'' Again, think of a recursive solution instead of using looping
* constructs. You might need to define an auxiliary method.
*
* @param xs A list of natural numbers
* @return The largest element in `xs`
* @throws java.util.NoSuchElementException if `xs` is an empty list
*/
def max(xs: List[Int]): Int = {
def greater(a: Int, b: Int): Int = if(a > b) a else b
if(xs.isEmpty) throw new java.util.NoSuchElementException
else if(xs.length == 1) xs.head
else greater(xs.head, max(xs.tail))
}
}
| dreikanter/progfun | week1/example/src/main/scala/example/Lists.scala | Scala | cc0-1.0 | 1,723 |
package com.github.cuzfrog.webdriver
import scala.reflect.runtime.{universe => ru}
import scala.tools.reflect.ToolBox
/**
* Created by cuz on 1/17/17.
*/
private object RuntimeCompiler extends Logging {
private val tb = ru.runtimeMirror(getClass.getClassLoader).mkToolBox()
private def classDef(src: String) = {
logger.trace(s"Compile script.... md5 [${MD5(src)}]")
tb.parse(src)
}
def compileLogic(src: String): Function[String, _] = try {
val instance = tb.eval(classDef(src))
instance.asInstanceOf[Function1[String, _]]
} catch {
case e: Throwable =>
e.printStackTrace()
throw ScalaReflectionException("Runtime compilation failed.")
}
}
| cuzfrog/WebDriverServ | server/src/main/scala/com/github/cuzfrog/webdriver/RuntimeCompiler.scala | Scala | apache-2.0 | 692 |
package uk.gov.homeoffice.amazon.sqs.subscription
import uk.gov.homeoffice.amazon.sqs.Message
package object protocol {
sealed trait Protocol
case class Processed(message: Message) extends Protocol
case class ProcessingError(throwable: Throwable, message: Message) extends Protocol
} | UKHomeOffice/rtp-amazon-sqs-lib | src/main/scala/uk/gov/homeoffice/amazon/sqs/subscription/protocol/package.scala | Scala | mit | 294 |
package ems
import model.Speaker
import ems.converters._
import security.User
import unfiltered.response._
import unfiltered.request._
import unfiltered.directives._
import Directives._
import net.hamnaberg.json.collection._
trait SpeakerResources extends ResourceHelper {
def handleSpeakers(eventId: String, sessionId: String)(implicit user: User) = {
val get = for {
_ <- GET
base <- baseURIBuilder
href <- requestURI
session <- getOrElse(storage.getSession(eventId, sessionId), NotFound)
} yield {
val items = session.speakers.map(speakerToItem(base, eventId, sessionId)).toList
CollectionJsonResponse(JsonCollection(href, Nil, items, Nil, Some(makeTemplate("name", "email", "bio", "zip-code", "tags"))))
}
val post = for {
_ <- POST
_ <- authenticated(user)
base <- baseURIBuilder
session <- getOrElse(storage.getSession(eventId, sessionId), NotFound)
either <- withTemplate(t => toSpeaker(t, None))
speaker <- either
} yield {
val exists = session.speakers.exists(_.email == speaker.email)
if (exists) {
BadRequest ~> ResponseString("There already exists a speaker with this email")
}
else {
storage.saveSpeaker(eventId, sessionId, speaker).fold(
ex => InternalServerError ~> ResponseString(ex.getMessage),
saved => {
val href = base.segments("events", eventId, "sessions", sessionId, "speakers", saved.id.get).build()
Created ~> Location(href.toString)
}
)
}
}
get | post
}
def handleSpeaker(eventId: String, sessionId: String, speakerId: String)(implicit user: User) = {
val speaker = storage.getSpeaker(eventId, sessionId, speakerId)
for {
base <- baseURIBuilder
res <- handleObject(
speaker,
(t: Template) => toSpeaker(t, Some(speakerId)),
storage.saveSpeaker(eventId, sessionId, _: Speaker),
speakerToItem(base, eventId, sessionId),
Some((_: Speaker) => storage.removeSpeaker(eventId, sessionId, speakerId))
)(identity)
} yield {
res
}
}
def handleSpeakerPhoto(eventId: String, sessionId: String, speakerId: String)(implicit user: User) = {
val get = for {
_ <- GET
base <- baseURIBuilder
speaker <- getOrElse(storage.getSpeaker(eventId, sessionId, speakerId), NotFound)
} yield {
speaker.photo.map(i => Redirect(base.segments("binary", i.id.get).toString())).getOrElse(NotFound)
}
val imageType = when {
case RequestContentType(ct) if MIMEType.ImageAll.includes(MIMEType(ct).get) => ct
}.orElse(UnsupportedMediaType)
val post = for {
_ <- POST
ct <- imageType
cd <- contentDisposition
base <- baseURIBuilder
speaker <- getOrElse(storage.getSpeaker(eventId, sessionId, speakerId), NotFound)
is <- inputStream
} yield {
speaker.photo.foreach(ph => storage.binary.removeAttachment(ph.id.get))
val binary = storage.binary.saveAttachment(StreamingAttachment(cd.filename.orElse(cd.filenameSTAR.map(_.filename)).get, None, MIMEType(ct), is))
storage.updateSpeakerWithPhoto(eventId, sessionId, speakerId, binary).fold(
ex => InternalServerError ~> ResponseString(ex.getMessage),
_ => Created ~> Location(base.segments("binary", binary.id.get).toString())
)
}
get | post
}
}
| chrissearle/ems-redux | src/main/scala/ems/SpeakerResources.scala | Scala | apache-2.0 | 3,435 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples
import java.util.concurrent.TimeUnit
import org.apache.spark.sql.SparkSession
object HdfsTest {
/** Usage: HdfsTest [file] */
def main(args: Array[String]) {
if (args.length < 1) {
System.err.println("Usage: HdfsTest <file>")
System.exit(1)
}
val spark = SparkSession
.builder
.appName("HdfsTest")
.getOrCreate()
val file = spark.read.text(args(0)).rdd
val mapped = file.map(s => s.length).cache()
for (iter <- 1 to 10) {
val startTimeNs = System.nanoTime()
for (x <- mapped) { x + 2 }
val durationMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNs)
println(s"Iteration $iter took $durationMs ms")
}
println(s"File contents: ${file.map(_.toString).take(1).mkString(",").slice(0, 10)}")
println(s"Returned length(s) of: ${file.map(_.length).sum().toString}")
spark.stop()
}
}
// scalastyle:on println
| pgandhi999/spark | examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala | Scala | apache-2.0 | 1,783 |
package de.mirkokoester.luna.play.plugins
import de.mirkokoester.luna.player.PlayerActor
import play.api._
import play.api.libs.concurrent.Akka
import akka.actor._
class LunaPlayerPlugin(app: Application) extends Plugin {
implicit val application: Application = app
lazy val player = Akka.system.actorOf(PlayerActor.props, "player")
override def onStart() = {
println("started Plugin")
}
override def onStop() = {
}
override val enabled = true
}
object LunaPlayerPlugin {
val player: ActorRef = Play.current.plugin[LunaPlayerPlugin]
.getOrElse(throw new RuntimeException("LunaPlayer plugin not loaded"))
.player
}
| mkoester/luna-music-player | app/de.mirkokoester.luna/play/plugins/LunaPlayerPlugin.scala | Scala | apache-2.0 | 646 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.sync.queue
import com.waz.model.sync.{SyncJob, SyncRequest}
import com.waz.model.sync.SyncJob.Priority
import com.waz.model.sync.SyncRequest.{PostAssetStatus, RegisterPushToken}
import com.waz.model.{ConvId, MessageId, PushToken, SyncId}
import com.waz.service.assets2.UploadAssetStatus
import com.waz.specs.AndroidFreeSpec
import com.waz.sync.queue.SyncJobMerger.Merged
import scala.concurrent.duration._
class SyncRequestSpec extends AndroidFreeSpec {
scenario("RegisterPushToken") {
val job1 = SyncJob(SyncId(), RegisterPushToken(PushToken("token")), priority = Priority.High)
val job2 = SyncJob(SyncId(), RegisterPushToken(PushToken("token2")), priority = Priority.High)
job1.merge(job2) shouldEqual Merged(job1.copy(request = job2.request))
}
scenario("PostAssetStatus encoding decoding") {
val request = PostAssetStatus(ConvId(), MessageId(), Some(10.minutes), UploadAssetStatus.Failed)
SyncRequest.Decoder.apply(SyncRequest.Encoder(request)) shouldEqual request
}
}
| wireapp/wire-android-sync-engine | zmessaging/src/test/scala/com/waz/sync/queue/SyncRequestSpec.scala | Scala | gpl-3.0 | 1,726 |
import org.geneura.algorithm.evolutionary.MaxOnes._
object MaxOnesF extends App{
println( MaxOnes( Vector(true,false,true,true) ) )
println( MaxOnes( Vector(true,false,true,true,false,true,true,false,true,true,false,true,true) ) )
}
| JJ/scalEO | src/main/scala/MaxOnesF.scala | Scala | gpl-3.0 | 241 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.kafka
import org.geotools.feature.simple.SimpleFeatureImpl
import org.geotools.filter.identity.FeatureIdImpl
import org.joda.time.Instant
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.feature.simple.SimpleFeature
import scala.collection.JavaConversions._
object KafkaConsumerTestData {
val typeName = "track"
val sft = KafkaDataStoreHelper.createStreamingSFT(
SimpleFeatureTypes.createType(typeName, "trackId:String,*geom:LineString:srid=4326"), "/test")
val track0v0 = track("track0", "LineString (30 30, 30 30)")
val track0v1 = track("track0", "LineString (30 30, 35 30)")
val track0v2 = track("track0", "LineString (30 30, 35 30, 40 34)")
val track0v3 = track("track0", "LineString (30 30, 35 32, 40 34, 45 36)")
val track1v0 = track("track1", "LineString (50 20, 50 20)")
val track1v1 = track("track1", "LineString (50 20, 40 30)")
val track1v2 = track("track1", "LineString (50 20, 40 30, 30 30)")
val track2v0 = track("track2", "LineString (30 30, 30 30)")
val track2v1 = track("track2", "LineString (30 30, 30 25)")
val track2v2 = track("track2", "LineString (30 30, 30 25, 28 20)")
val track2v3 = track("track2", "LineString (30 30, 30 25, 25 20, 20 15)")
val track3v0 = track("track3", "LineString (0 60, 0 60)")
val track3v1 = track("track3", "LineString (0 60, 10 60)")
val track3v2 = track("track3", "LineString (0 60, 10 60, 20 55)")
val track3v3 = track("track3", "LineString (0 60, 10 60, 20 55, 30 40)")
val track3v4 = track("track3", "LineString (0 60, 10 60, 20 55, 30 40, 30 30)")
val messages: Seq[GeoMessage] = Seq(
// offset
CreateOrUpdate(new Instant(10993), track0v0), // 0
CreateOrUpdate(new Instant(11001), track3v0), // 1
CreateOrUpdate(new Instant(11549), track3v1), // 2
CreateOrUpdate(new Instant(11994), track0v1), // 3
CreateOrUpdate(new Instant(11995), track1v0), // 4
CreateOrUpdate(new Instant(11995), track3v2), // 5
CreateOrUpdate(new Instant(12998), track1v1), // 6
CreateOrUpdate(new Instant(13000), track2v0), // 7
CreateOrUpdate(new Instant(13002), track3v3), // 8
CreateOrUpdate(new Instant(13002), track0v2), // 9
CreateOrUpdate(new Instant(13444), track1v2), // 10
CreateOrUpdate(new Instant(13996), track2v1), // 11
CreateOrUpdate(new Instant(13999), track3v4), // 12
CreateOrUpdate(new Instant(14002), track0v3), // 13
Delete( new Instant(14005), "track1"), // 14
Delete( new Instant(14990), "track3"), // 15
CreateOrUpdate(new Instant(14999), track2v2), // 16
Delete( new Instant(15000), "track0"), // 17
Clear( new Instant(16003)), // 18
CreateOrUpdate(new Instant(16997), track2v3), // 19
Delete( new Instant(17000), "track3") // 20
)
def track(id: String, track: String): SimpleFeature = {
val geom = WKTUtils.read(track)
new SimpleFeatureImpl(List[Object](id, geom), sft, new FeatureIdImpl(id))
}
}
| giserh/geomesa | geomesa-kafka/geomesa-kafka-datastore/src/test/scala/org/locationtech/geomesa/kafka/KafkaConsumerTestData.scala | Scala | apache-2.0 | 3,601 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.schedulers
import java.util.concurrent.{CountDownLatch, TimeUnit, TimeoutException}
import minitest.TestSuite
import monix.execution.ExecutionModel.{AlwaysAsyncExecution, Default => DefaultExecutionModel}
import monix.execution.cancelables.SingleAssignCancelable
import monix.execution.exceptions.DummyException
import monix.execution.{Cancelable, Scheduler, UncaughtExceptionReporter}
import scala.concurrent.duration._
import scala.concurrent.{blocking, Await, Promise}
abstract class ExecutorSchedulerSuite extends TestSuite[SchedulerService] { self =>
var lastReportedFailure = null: Throwable
var lastReportedFailureLatch = null: CountDownLatch
val testsReporter = UncaughtExceptionReporter { ex =>
self.synchronized {
lastReportedFailure = ex
if (lastReportedFailureLatch != null)
lastReportedFailureLatch.countDown()
else
ex.printStackTrace()
}
}
override def tearDown(scheduler: SchedulerService): Unit = {
try assert(!scheduler.isShutdown)
finally scheduler.shutdown()
assert(scheduler.isShutdown, "scheduler.isShutdown")
val result = scheduler.awaitTermination(10.seconds)
assert(result, "scheduler.awaitTermination")
assert(scheduler.isTerminated, "scheduler.isTerminated")
}
def scheduleOnce(s: Scheduler, delay: FiniteDuration)(action: => Unit): Cancelable =
s.scheduleOnce(delay.length, delay.unit, runnableAction(action))
test("scheduleOnce with delay") { scheduler =>
val p = Promise[Long]()
val startedAt = System.nanoTime()
scheduleOnce(scheduler, 100.millis) {
p.success(System.nanoTime())
()
}
val timeTaken = Await.result(p.future, 3.second)
assert((timeTaken - startedAt).nanos.toMillis >= 100)
}
test("scheduleOnce with delay lower than 1.milli") { scheduler =>
val p = Promise[Int]()
scheduleOnce(scheduler, 20.nanos) { p.success(1); () }
assert(Await.result(p.future, 3.seconds) == 1)
}
test("scheduleOnce with delay and cancel") { scheduler =>
val p = Promise[Int]()
val task = scheduleOnce(scheduler, 100.millis) { p.success(1); () }
task.cancel()
intercept[TimeoutException] {
Await.result(p.future, 150.millis)
()
}
()
}
test("schedule with fixed delay") { scheduler =>
val sub = SingleAssignCancelable()
val p = Promise[Int]()
var value = 0
sub := scheduler.scheduleWithFixedDelay(
10,
50,
TimeUnit.MILLISECONDS,
runnableAction {
if (value + 1 == 4) {
value += 1
sub.cancel()
p.success(value)
()
} else if (value < 4) {
value += 1
}
})
assert(Await.result(p.future, 5.second) == 4)
}
test("schedule at fixed rate") { scheduler =>
val sub = SingleAssignCancelable()
val p = Promise[Int]()
var value = 0
sub := scheduler.scheduleAtFixedRate(
10,
50,
TimeUnit.MILLISECONDS,
runnableAction {
if (value + 1 == 4) {
value += 1
sub.cancel()
p.success(value)
()
} else if (value < 4) {
value += 1
}
})
assert(Await.result(p.future, 5.second) == 4)
}
test("execute local") { scheduler =>
var result = 0
def loop(n: Int): Unit =
scheduler.executeTrampolined { () =>
result += 1
if (n - 1 > 0) loop(n - 1)
}
val count = 100000
loop(count)
assertEquals(result, count)
}
test("change execution model") { scheduler =>
val s: Scheduler = scheduler
assertEquals(s.executionModel, DefaultExecutionModel)
val s2 = s.withExecutionModel(AlwaysAsyncExecution)
assertEquals(s.executionModel, DefaultExecutionModel)
assertEquals(s2.executionModel, AlwaysAsyncExecution)
}
test("reports errors on execute") { scheduler =>
val latch = new CountDownLatch(1)
self.synchronized {
lastReportedFailure = null
lastReportedFailureLatch = latch
}
try {
val ex = DummyException("dummy")
scheduler.execute(new Runnable {
override def run() =
throw ex
})
assert(latch.await(15, TimeUnit.MINUTES), "lastReportedFailureLatch.await")
self.synchronized(assertEquals(lastReportedFailure, ex))
} finally {
self.synchronized {
lastReportedFailure = null
lastReportedFailureLatch = null
}
}
}
test("reports errors on scheduleOnce") { scheduler =>
val latch = new CountDownLatch(1)
self.synchronized {
lastReportedFailure = null
lastReportedFailureLatch = latch
}
try {
val ex = DummyException("dummy")
scheduler.scheduleOnce(
1,
TimeUnit.MILLISECONDS,
new Runnable {
override def run() =
throw ex
})
assert(latch.await(15, TimeUnit.MINUTES), "lastReportedFailureLatch.await")
self.synchronized(assertEquals(lastReportedFailure, ex))
} finally {
self.synchronized {
lastReportedFailure = null
lastReportedFailureLatch = null
}
}
}
def runnableAction(f: => Unit): Runnable =
new Runnable { def run() = f }
}
object ComputationSchedulerSuite extends ExecutorSchedulerSuite {
def setup(): SchedulerService =
monix.execution.Scheduler
.forkJoin(name = "monix-tests-computation", parallelism = 4, maxThreads = 256, reporter = testsReporter)
}
object ForkJoinSchedulerSuite extends ExecutorSchedulerSuite {
def setup(): SchedulerService =
monix.execution.Scheduler
.forkJoin(name = "monix-tests-forkjoin", parallelism = 4, maxThreads = 256, reporter = testsReporter)
test("integrates with Scala's BlockContext") { scheduler =>
val threadsCount = 100
val latch = new CountDownLatch(100)
val finish = new CountDownLatch(1)
for (_ <- 0 until threadsCount)
scheduler.executeAsync { () =>
blocking {
latch.countDown()
finish.await(15, TimeUnit.MINUTES)
()
}
}
assert(latch.await(15, TimeUnit.MINUTES), "latch.await")
finish.countDown()
}
}
object FixedPoolSchedulerSuite extends ExecutorSchedulerSuite {
def setup(): SchedulerService =
monix.execution.Scheduler.fixedPool("monix-tests-fixedPool", poolSize = 4, reporter = testsReporter)
}
object SingleThreadSchedulerSuite extends ExecutorSchedulerSuite {
def setup(): SchedulerService =
monix.execution.Scheduler.singleThread("monix-tests-singleThread", reporter = testsReporter)
}
object CachedSchedulerSuite extends ExecutorSchedulerSuite {
def setup(): SchedulerService =
monix.execution.Scheduler.cached("monix-tests-cached", 1, 4, reporter = testsReporter)
}
object IOSchedulerSuite extends ExecutorSchedulerSuite {
def setup(): SchedulerService =
monix.execution.Scheduler.io("monix-tests-io", reporter = testsReporter)
}
| alexandru/monifu | monix-execution/jvm/src/test/scala/monix/execution/schedulers/ExecutorSchedulerSuite.scala | Scala | apache-2.0 | 7,604 |
/*
* This file is part of eCobertura.
*
* Copyright (c) 2010 Joachim Hofer
* All rights reserved.
*
* This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package ecobertura.core.data.filters
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConversions._
import java.util.{ArrayList, List => JavaList}
import org.eclipse.debug.core._
/**
* Helps creating ClassFilters from launch configurations
* or simply lists of class filters.
*/
object ClassFilters {
def apply(launchConfiguration: ILaunchConfiguration) : ClassFilters = {
val classFilterList = launchConfiguration.getAttribute("classFilters",
new ArrayList[String])
ClassFilters((
for (filterString <- classFilterList.asInstanceOf[JavaList[String]])
yield ClassFilter(filterString)) : _*)
}
def apply(classFilterArray: ClassFilter*) : ClassFilters = {
val classFilters = new ClassFilters
for (classFilter <- classFilterArray) classFilters.add(classFilter)
classFilters
}
}
/**
* Stores include/exclude class filters. Used as data model for the
* launch configuration's "filters" tab.
*/
class ClassFilters {
private var classFilters = ListBuffer[ClassFilter]()
def toArray = classFilters.toArray
def add(classFilter: ClassFilter) = classFilters += classFilter
def remove(classFilter: ClassFilter) = classFilters -= classFilter
def addToLaunchConfiguration(launchConfiguration: ILaunchConfigurationWorkingCopy) = {
val javaList: JavaList[String] = new ArrayList[String]
for (filter <- classFilters) javaList.add(filter.toAttributeString)
launchConfiguration.setAttribute("classFilters", javaList)
}
override def toString = "ClassFilters(%s)".format(classFilters.toString)
def isClassIncluded(reversedRelativePath: List[String]) : Boolean =
isClassIncluded(qualifiedClassNameFromReversedPath(reversedRelativePath), classFilters.toList)
private def qualifiedClassNameFromReversedPath(reversedRelativePath: List[String]) = {
reversedRelativePath.reverse.mkString(".")
}
private def isClassIncluded(className: String, classFilters: List[ClassFilter]) : Boolean = {
classFilters match {
case Nil => true
case classFilter :: tail =>
if (classFilter.includes(className)) isClassIncluded(className, tail) else false
}
}
} | jmhofer/eCobertura | ecobertura.core/src/main/scala/ecobertura/core/data/filters/ClassFilters.scala | Scala | epl-1.0 | 2,541 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.security
import java.io.File
import java.net.URI
import java.security.PrivilegedExceptionAction
import java.util.ServiceLoader
import java.util.concurrent.{ScheduledExecutorService, TimeUnit}
import scala.collection.mutable
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.security.{Credentials, UserGroupInformation}
import org.apache.spark.SparkConf
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.rpc.RpcEndpointRef
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.UpdateDelegationTokens
import org.apache.spark.security.HadoopDelegationTokenProvider
import org.apache.spark.ui.UIUtils
import org.apache.spark.util.{ThreadUtils, Utils}
/**
* Manager for delegation tokens in a Spark application.
*
* When delegation token renewal is enabled, this manager will make sure long-running apps can
* run without interruption while accessing secured services. It periodically logs in to the KDC
* with user-provided credentials, and contacts all the configured secure services to obtain
* delegation tokens to be distributed to the rest of the application.
*
* New delegation tokens are created once 75% of the renewal interval of the original tokens has
* elapsed. The new tokens are sent to the Spark driver endpoint. The driver is tasked with
* distributing the tokens to other processes that might need them.
*
* Renewal can be enabled in two different ways: by providing a principal and keytab to Spark, or by
* enabling renewal based on the local credential cache. The latter has the drawback that Spark
* can't create new TGTs by itself, so the user has to manually update the Kerberos ticket cache
* externally.
*
* This class can also be used just to create delegation tokens, by calling the
* `obtainDelegationTokens` method. This option does not require calling the `start` method nor
* providing a driver reference, but leaves it up to the caller to distribute the tokens that were
* generated.
*/
private[spark] class HadoopDelegationTokenManager(
protected val sparkConf: SparkConf,
protected val hadoopConf: Configuration,
protected val schedulerRef: RpcEndpointRef) extends Logging {
private val deprecatedProviderEnabledConfigs = List(
"spark.yarn.security.tokens.%s.enabled",
"spark.yarn.security.credentials.%s.enabled")
private val providerEnabledConfig = "spark.security.credentials.%s.enabled"
private val principal = sparkConf.get(PRINCIPAL).orNull
// The keytab can be a local: URI for cluster mode, so translate it to a regular path. If it is
// needed later on, the code will check that it exists.
private val keytab = sparkConf.get(KEYTAB).map { uri => new URI(uri).getPath() }.orNull
require((principal == null) == (keytab == null),
"Both principal and keytab must be defined, or neither.")
private val delegationTokenProviders = loadProviders()
logDebug("Using the following builtin delegation token providers: " +
s"${delegationTokenProviders.keys.mkString(", ")}.")
private var renewalExecutor: ScheduledExecutorService = _
/** @return Whether delegation token renewal is enabled. */
def renewalEnabled: Boolean = sparkConf.get(KERBEROS_RENEWAL_CREDENTIALS) match {
case "keytab" => principal != null
case "ccache" => UserGroupInformation.getCurrentUser().hasKerberosCredentials()
case _ => false
}
/**
* Start the token renewer. Requires a principal and keytab. Upon start, the renewer will
* obtain delegation tokens for all configured services and send them to the driver, and
* set up tasks to periodically get fresh tokens as needed.
*
* This method requires that a keytab has been provided to Spark, and will try to keep the
* logged in user's TGT valid while this manager is active.
*
* @return New set of delegation tokens created for the configured principal.
*/
def start(): Array[Byte] = {
require(renewalEnabled, "Token renewal must be enabled to start the renewer.")
require(schedulerRef != null, "Token renewal requires a scheduler endpoint.")
renewalExecutor =
ThreadUtils.newDaemonSingleThreadScheduledExecutor("Credential Renewal Thread")
val ugi = UserGroupInformation.getCurrentUser()
if (ugi.isFromKeytab()) {
// In Hadoop 2.x, renewal of the keytab-based login seems to be automatic, but in Hadoop 3.x,
// it is configurable (see hadoop.kerberos.keytab.login.autorenewal.enabled, added in
// HADOOP-9567). This task will make sure that the user stays logged in regardless of that
// configuration's value. Note that checkTGTAndReloginFromKeytab() is a no-op if the TGT does
// not need to be renewed yet.
val tgtRenewalTask = new Runnable() {
override def run(): Unit = {
ugi.checkTGTAndReloginFromKeytab()
}
}
val tgtRenewalPeriod = sparkConf.get(KERBEROS_RELOGIN_PERIOD)
renewalExecutor.scheduleAtFixedRate(tgtRenewalTask, tgtRenewalPeriod, tgtRenewalPeriod,
TimeUnit.SECONDS)
}
updateTokensTask()
}
def stop(): Unit = {
if (renewalExecutor != null) {
renewalExecutor.shutdownNow()
}
}
/**
* Fetch new delegation tokens for configured services, storing them in the given credentials.
*
* @param creds Credentials object where to store the delegation tokens.
*/
def obtainDelegationTokens(creds: Credentials): Unit = {
val currentUser = UserGroupInformation.getCurrentUser()
val hasKerberosCreds = principal != null ||
Option(currentUser.getRealUser()).getOrElse(currentUser).hasKerberosCredentials()
// Delegation tokens can only be obtained if the real user has Kerberos credentials, so
// skip creation when those are not available.
if (hasKerberosCreds) {
val freshUGI = doLogin()
freshUGI.doAs(new PrivilegedExceptionAction[Unit]() {
override def run(): Unit = {
val (newTokens, _) = obtainDelegationTokens()
creds.addAll(newTokens)
}
})
}
}
/**
* Fetch new delegation tokens for configured services.
*
* @return 2-tuple (credentials with new tokens, time by which the tokens must be renewed)
*/
private def obtainDelegationTokens(): (Credentials, Long) = {
val creds = new Credentials()
val nextRenewal = delegationTokenProviders.values.flatMap { provider =>
if (provider.delegationTokensRequired(sparkConf, hadoopConf)) {
provider.obtainDelegationTokens(hadoopConf, sparkConf, creds)
} else {
logDebug(s"Service ${provider.serviceName} does not require a token." +
s" Check your configuration to see if security is disabled or not.")
None
}
}.foldLeft(Long.MaxValue)(math.min)
(creds, nextRenewal)
}
// Visible for testing.
def isProviderLoaded(serviceName: String): Boolean = {
delegationTokenProviders.contains(serviceName)
}
protected def isServiceEnabled(serviceName: String): Boolean = {
val key = providerEnabledConfig.format(serviceName)
deprecatedProviderEnabledConfigs.foreach { pattern =>
val deprecatedKey = pattern.format(serviceName)
if (sparkConf.contains(deprecatedKey)) {
logWarning(s"${deprecatedKey} is deprecated. Please use ${key} instead.")
}
}
val isEnabledDeprecated = deprecatedProviderEnabledConfigs.forall { pattern =>
sparkConf
.getOption(pattern.format(serviceName))
.map(_.toBoolean)
.getOrElse(true)
}
sparkConf
.getOption(key)
.map(_.toBoolean)
.getOrElse(isEnabledDeprecated)
}
private def scheduleRenewal(delay: Long): Unit = {
val _delay = math.max(0, delay)
logInfo(s"Scheduling renewal in ${UIUtils.formatDuration(delay)}.")
val renewalTask = new Runnable() {
override def run(): Unit = {
updateTokensTask()
}
}
renewalExecutor.schedule(renewalTask, _delay, TimeUnit.MILLISECONDS)
}
/**
* Periodic task to login to the KDC and create new delegation tokens. Re-schedules itself
* to fetch the next set of tokens when needed.
*/
private def updateTokensTask(): Array[Byte] = {
try {
val freshUGI = doLogin()
val creds = obtainTokensAndScheduleRenewal(freshUGI)
val tokens = SparkHadoopUtil.get.serialize(creds)
logInfo("Updating delegation tokens.")
schedulerRef.send(UpdateDelegationTokens(tokens))
tokens
} catch {
case _: InterruptedException =>
// Ignore, may happen if shutting down.
null
case e: Exception =>
val delay = TimeUnit.SECONDS.toMillis(sparkConf.get(CREDENTIALS_RENEWAL_RETRY_WAIT))
logWarning(s"Failed to update tokens, will try again in ${UIUtils.formatDuration(delay)}!" +
" If this happens too often tasks will fail.", e)
scheduleRenewal(delay)
null
}
}
/**
* Obtain new delegation tokens from the available providers. Schedules a new task to fetch
* new tokens before the new set expires.
*
* @return Credentials containing the new tokens.
*/
private def obtainTokensAndScheduleRenewal(ugi: UserGroupInformation): Credentials = {
ugi.doAs(new PrivilegedExceptionAction[Credentials]() {
override def run(): Credentials = {
val (creds, nextRenewal) = obtainDelegationTokens()
// Calculate the time when new credentials should be created, based on the configured
// ratio.
val now = System.currentTimeMillis
val ratio = sparkConf.get(CREDENTIALS_RENEWAL_INTERVAL_RATIO)
val delay = (ratio * (nextRenewal - now)).toLong
scheduleRenewal(delay)
creds
}
})
}
private def doLogin(): UserGroupInformation = {
if (principal != null) {
logInfo(s"Attempting to login to KDC using principal: $principal")
require(new File(keytab).isFile(), s"Cannot find keytab at $keytab.")
val ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab)
logInfo("Successfully logged into KDC.")
ugi
} else if (!SparkHadoopUtil.get.isProxyUser(UserGroupInformation.getCurrentUser())) {
logInfo(s"Attempting to load user's ticket cache.")
val ccache = sparkConf.getenv("KRB5CCNAME")
val user = Option(sparkConf.getenv("KRB5PRINCIPAL")).getOrElse(
UserGroupInformation.getCurrentUser().getUserName())
UserGroupInformation.getUGIFromTicketCache(ccache, user)
} else {
UserGroupInformation.getCurrentUser()
}
}
private def loadProviders(): Map[String, HadoopDelegationTokenProvider] = {
val loader = ServiceLoader.load(classOf[HadoopDelegationTokenProvider],
Utils.getContextOrSparkClassLoader)
val providers = mutable.ArrayBuffer[HadoopDelegationTokenProvider]()
val iterator = loader.iterator
while (iterator.hasNext) {
try {
providers += iterator.next
} catch {
case t: Throwable =>
logDebug(s"Failed to load built in provider.", t)
}
}
// Filter out providers for which spark.security.credentials.{service}.enabled is false.
providers
.filter { p => isServiceEnabled(p.serviceName) }
.map { p => (p.serviceName, p) }
.toMap
}
}
| jkbradley/spark | core/src/main/scala/org/apache/spark/deploy/security/HadoopDelegationTokenManager.scala | Scala | apache-2.0 | 12,176 |
package skinny.micro.request
import java.io.InputStream
import javax.servlet.ServletInputStream
/**
* Encoded input stream in a Servlet request.
*/
private[skinny] class EncodedInputStream(
encoded: InputStream,
raw: ServletInputStream) extends ServletInputStream {
override def read(): Int = encoded.read()
override def read(b: Array[Byte]): Int = read(b, 0, b.length)
override def read(b: Array[Byte], off: Int, len: Int) = encoded.read(b, off, len)
}
| xerial/skinny-micro | micro/src/main/scala/skinny/micro/request/EncodedInputStream.scala | Scala | bsd-2-clause | 474 |
package fi.proweb.train.model.app
import fi.proweb.train.model.AppData
object TrainStation {
def apply(stationCode: String, title: String): TrainStation = {
val station = new TrainStation
station.stationCode = Some(stationCode)
station.title = Some(title)
station
}
}
class TrainStation extends AppData[TrainStation] {
var guid: Option[String] = None
var title: Option[String] = None
var stationCode: Option[String] = None
// Train has passed this station
var completed: Option[Boolean] = None
def makeCopy: TrainStation = {
val trainstation = new TrainStation
trainstation.guid = guid
trainstation.title = title
trainstation.stationCode = stationCode
trainstation.completed = completed
trainstation
}
override def toString = title.getOrElse("?") + " (" + guid.getOrElse("?") + ")"
} | roikonen/MissaJuna | app/fi/proweb/train/model/app/TrainStation.scala | Scala | apache-2.0 | 866 |
/**
* Copyright (c) 2013 Bernard Leach
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.leachbj.hsmsim.util
import scala.Array.canBuildFrom
import akka.util.ByteString
import akka.util.CompactByteString
object HexConverter {
def toHex(b: ByteString, sep: Option[String] = None): String = {
val bytes = b.toArray
sep match {
case None => bytes.map("%02X".format(_)).mkString
case _ => bytes.map("%02X".format(_)).mkString(sep.get)
}
}
def fromHex(b: String): ByteString = {
CompactByteString(b.replaceAll("[^0-9A-Fa-f]", "").sliding(2, 2).toArray.map(Integer.parseInt(_, 16).toByte))
}
}
| leachbj/hsm-emulator | hsmsim-akka/src/main/scala/org/leachbj/hsmsim/util/HexConverter.scala | Scala | mit | 1,670 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.linker.frontend
import scala.collection.mutable
import scala.concurrent._
import scala.util.Try
import org.scalajs.logging._
import org.scalajs.linker._
import org.scalajs.linker.standard._
import org.scalajs.linker.checker._
import org.scalajs.linker.analyzer._
import org.scalajs.linker.irio._
import org.scalajs.ir
import ir.Trees.{ClassDef, MethodDef}
import ir.Hashers
import Analysis._
/** Links the information from [[irio.VirtualScalaJSIRFile]]s into
* [[standard.LinkedClass LinkedClass]]es. Does a dead code elimination pass.
*/
final class BaseLinker(config: CommonPhaseConfig) {
import BaseLinker._
private val inputProvider = new InputProvider
private val methodSynthesizer = new MethodSynthesizer(inputProvider)
def link(irInput: Seq[VirtualScalaJSIRFile],
moduleInitializers: Seq[ModuleInitializer], logger: Logger,
symbolRequirements: SymbolRequirement, checkIR: Boolean)(
implicit ec: ExecutionContext): Future[LinkingUnit] = {
val allSymbolRequirements = {
symbolRequirements ++
ModuleInitializer.toSymbolRequirement(moduleInitializers)
}
val result = for {
_ <- inputProvider.update(irInput)
analysis <- logger.timeFuture("Linker: Compute reachability") {
analyze(allSymbolRequirements, logger)
}
linkResult <- logger.timeFuture("Linker: Assemble LinkedClasses") {
assemble(moduleInitializers, analysis)
}
} yield {
if (checkIR) {
logger.time("Linker: Check IR") {
val errorCount = IRChecker.check(linkResult, logger)
if (errorCount != 0) {
throw new LinkingException(
s"There were $errorCount IR checking errors.")
}
}
}
linkResult
}
result.andThen { case _ => inputProvider.cleanAfterRun() }
}
private def analyze(symbolRequirements: SymbolRequirement, logger: Logger)(
implicit ec: ExecutionContext): Future[Analysis] = {
def reportErrors(errors: Seq[Analysis.Error]) = {
require(errors.nonEmpty)
val maxDisplayErrors = {
val propName = "org.scalajs.linker.maxlinkingerrors"
Try(System.getProperty(propName, "20").toInt).getOrElse(20).max(1)
}
errors
.take(maxDisplayErrors)
.foreach(logError(_, logger, Level.Error))
val skipped = errors.size - maxDisplayErrors
if (skipped > 0)
logger.log(Level.Error, s"Not showing $skipped more linking errors")
throw new LinkingException("There were linking errors")
}
for {
analysis <- Analyzer.computeReachability(config, symbolRequirements,
allowAddingSyntheticMethods = true, inputProvider)
} yield {
if (analysis.errors.nonEmpty) {
reportErrors(analysis.errors)
}
analysis
}
}
private def assemble(moduleInitializers: Seq[ModuleInitializer],
analysis: Analysis)(implicit ec: ExecutionContext): Future[LinkingUnit] = {
def assembleClass(info: ClassInfo) = {
val classAndVersion = inputProvider.loadClassDefAndVersion(info.encodedName)
val syntheticMethods = methodSynthesizer.synthesizeMembers(info, analysis)
for {
(classDef, version) <- classAndVersion
syntheticMethods <- syntheticMethods
} yield {
linkedClassDef(classDef, version, syntheticMethods, info, analysis)
}
}
for {
linkedClassDefs <- Future.traverse(analysis.classInfos.values)(assembleClass)
} yield {
new LinkingUnit(config.coreSpec, linkedClassDefs.toList,
moduleInitializers.toList)
}
}
/** Takes a ClassDef and DCE infos to construct a stripped down LinkedClass.
*/
private def linkedClassDef(classDef: ClassDef, version: Option[String],
syntheticMethodDefs: Iterator[MethodDef],
analyzerInfo: ClassInfo, analysis: Analysis): LinkedClass = {
import ir.Trees._
val fields = mutable.Buffer.empty[FieldDef]
val methods = mutable.Buffer.empty[Versioned[MethodDef]]
val exportedMembers = mutable.Buffer.empty[Versioned[MemberDef]]
def linkedMethod(m: MethodDef) = {
val version = m.hash.map(Hashers.hashAsVersion(_))
new Versioned(m, version)
}
def linkedProperty(p: PropertyDef) = {
new Versioned(p, None)
}
classDef.memberDefs.foreach {
case field: FieldDef =>
if (analyzerInfo.isAnySubclassInstantiated)
fields += field
case m: MethodDef =>
val methodInfo =
analyzerInfo.methodInfos(m.flags.namespace)(m.encodedName)
if (methodInfo.isReachable) {
assert(m.body.isDefined,
s"The abstract method ${classDef.name.name}.${m.encodedName} " +
"is reachable.")
val linked = linkedMethod(m)
if (m.name.isInstanceOf[Ident])
methods += linked
else
exportedMembers += linked
}
case m: PropertyDef =>
if (analyzerInfo.isAnySubclassInstantiated)
exportedMembers += linkedProperty(m)
}
methods ++= syntheticMethodDefs.map(linkedMethod)
val topLevelExports =
classDef.topLevelExportDefs.map(new Versioned(_, version))
val kind =
if (analyzerInfo.isModuleAccessed) classDef.kind
else classDef.kind.withoutModuleAccessor
val ancestors = analyzerInfo.ancestors.map(_.encodedName)
new LinkedClass(
classDef.name,
kind,
classDef.jsClassCaptures,
classDef.superClass,
classDef.interfaces,
classDef.jsSuperClass,
classDef.jsNativeLoadSpec,
fields.toList,
methods.toList,
exportedMembers.toList,
topLevelExports,
classDef.optimizerHints,
classDef.pos,
ancestors.toList,
hasInstances = analyzerInfo.isAnySubclassInstantiated,
hasInstanceTests = analyzerInfo.areInstanceTestsUsed,
hasRuntimeTypeInfo = analyzerInfo.isDataAccessed,
version)
}
}
private object BaseLinker {
private class InputProvider extends Analyzer.InputProvider with MethodSynthesizer.InputProvider {
private var encodedNameToFile: collection.Map[String, VirtualScalaJSIRFile] = _
private var entryPoints: collection.Set[String] = _
private val cache = mutable.Map.empty[String, ClassDefAndInfoCache]
def update(irInput: Seq[VirtualScalaJSIRFile])(implicit ec: ExecutionContext): Future[Unit] = {
Future.traverse(irInput)(_.entryPointsInfo).map { infos =>
val encodedNameToFile = mutable.Map.empty[String, VirtualScalaJSIRFile]
val entryPoints = mutable.Set.empty[String]
for ((input, info) <- irInput.zip(infos)) {
// Remove duplicates. Just like the JVM
if (!encodedNameToFile.contains(info.encodedName))
encodedNameToFile += info.encodedName -> input
if (info.hasEntryPoint)
entryPoints += info.encodedName
}
this.encodedNameToFile = encodedNameToFile
this.entryPoints = entryPoints
}
}
def classesWithEntryPoints(): TraversableOnce[String] = entryPoints
def loadInfo(encodedName: String)(
implicit ec: ExecutionContext): Option[Future[Infos.ClassInfo]] =
getCache(encodedName).map(_.loadInfo(encodedNameToFile(encodedName)))
def loadClassDefAndVersion(encodedName: String)(
implicit ec: ExecutionContext): Future[(ClassDef, Option[String])] = {
val fileCache = getCache(encodedName).getOrElse {
throw new AssertionError(s"Cannot load file for class $encodedName")
}
fileCache.loadClassDefAndVersion(encodedNameToFile(encodedName))
}
def loadClassDef(encodedName: String)(
implicit ec: ExecutionContext): Future[ClassDef] = {
loadClassDefAndVersion(encodedName).map(_._1)
}
private def getCache(encodedName: String): Option[ClassDefAndInfoCache] = {
cache.get(encodedName).orElse {
if (encodedNameToFile.contains(encodedName)) {
val fileCache = new ClassDefAndInfoCache
cache += encodedName -> fileCache
Some(fileCache)
} else {
None
}
}
}
def cleanAfterRun(): Unit = {
encodedNameToFile = null
entryPoints = null
cache.retain((_, fileCache) => fileCache.cleanAfterRun())
}
}
private final class ClassDefAndInfoCache {
private var cacheUsed: Boolean = false
private var version: Option[String] = None
private var cacheUpdate: Future[(ClassDef, Infos.ClassInfo)] = _
def loadInfo(irFile: VirtualScalaJSIRFile)(
implicit ec: ExecutionContext): Future[Infos.ClassInfo] = {
update(irFile).map(_._2)
}
def loadClassDefAndVersion(irFile: VirtualScalaJSIRFile)(
implicit ec: ExecutionContext): Future[(ClassDef, Option[String])] = {
update(irFile).map(s => (s._1, version))
}
private def update(irFile: VirtualScalaJSIRFile)(
implicit ec: ExecutionContext): Future[(ClassDef, Infos.ClassInfo)] = synchronized {
/* If the cache was already used in this run, the classDef and info are
* already correct, no matter what the versions say.
*/
if (!cacheUsed) {
cacheUsed = true
val newVersion = irFile.version
if (version.isEmpty || newVersion.isEmpty ||
version.get != newVersion.get) {
version = newVersion
cacheUpdate = irFile.tree.map(t => (t, Infos.generateClassInfo(t)))
}
}
cacheUpdate
}
/** Returns true if the cache has been used and should be kept. */
def cleanAfterRun(): Boolean = synchronized {
val result = cacheUsed
cacheUsed = false
result
}
}
}
| SebsLittleHelpers/scala-js | linker/shared/src/main/scala/org/scalajs/linker/frontend/BaseLinker.scala | Scala | apache-2.0 | 10,024 |
package edu.gemini.itc.web.baseline
import edu.gemini.itc.baseline._
import edu.gemini.itc.baseline.util._
import edu.gemini.itc.shared._
import edu.gemini.itc.web.baseline.Baseline._
import org.scalacheck.{Arbitrary, Gen}
import org.specs2.ScalaCheck
import org.specs2.mutable.Specification
/**
* Spec which compares a limited amount of random ITC "recipe" executions with the expected outcome.
* Test are executed by using a hash value generated from the fixture as a key in a map
* that contains hash values of the expected output of the recipe execution (currently a string). This baseline
* map is stored as a resource file and needs to be updated whenever there are changes to the code that change
* the outputs. See [[BaselineTest]] for details.
*/
object BaselineAllSpec extends Specification with ScalaCheck {
// default number of tests is 100, that takes a bit too long
private val minTestsCnt = 10
// === ACQUISITION CAMERA
{
implicit val arbFixture: Arbitrary[Fixture[AcquisitionCamParameters]] = Arbitrary { Gen.oneOf(BaselineAcqCam.Fixtures) }
"Acquisition Camera calculations" should {
"match latest baseline" !
prop { f: Fixture[AcquisitionCamParameters] =>
checkAgainstBaseline(Baseline.from(f, executeAcqCamRecipe(f)))
}.set((minTestsOk, minTestsCnt))
}
}
// === F2
{
implicit val arbFixture: Arbitrary[Fixture[Flamingos2Parameters]] = Arbitrary { Gen.oneOf(BaselineF2.Fixtures) }
"Flamingos2 calculations" should {
"match latest baseline" !
prop { f: Fixture[Flamingos2Parameters] =>
checkAgainstBaseline(Baseline.from(f, executeF2Recipe(f)))
}.set((minTestsOk, minTestsCnt))
}
}
// === GMOS
{
implicit val arbFixture: Arbitrary[Fixture[GmosParameters]] = Arbitrary { Gen.oneOf(BaselineGmos.Fixtures) }
"GMOS calculations" should {
"match latest baseline" !
prop { f: Fixture[GmosParameters] =>
checkAgainstBaseline(Baseline.from(f, executeGmosRecipe(f)))
}.set((minTestsOk, minTestsCnt))
}
}
// === GNIRS
{
implicit val arbFixture: Arbitrary[Fixture[GnirsParameters]] = Arbitrary { Gen.oneOf(BaselineGnirs.Fixtures) }
"GNIRS calculations" should {
"match latest baseline" !
prop { f: Fixture[GnirsParameters] =>
checkAgainstBaseline(Baseline.from(f, executeGnirsRecipe(f)))
}.set((minTestsOk, minTestsCnt))
}
}
// === GSAOI
{
implicit val arbFixture: Arbitrary[Fixture[GsaoiParameters]] = Arbitrary { Gen.oneOf(BaselineGsaoi.Fixtures) }
"GSAOI calculations" should {
"match latest baseline" !
prop { f: Fixture[GsaoiParameters] =>
checkAgainstBaseline(Baseline.from(f, executeGsaoiRecipe(f)))
}.set((minTestsOk, minTestsCnt))
}
}
// === Michelle
{
implicit val arbFixture: Arbitrary[Fixture[MichelleParameters]] = Arbitrary { Gen.oneOf(BaselineMichelle.Fixtures) }
"Michelle calculations" should {
"match latest baseline" !
prop { f: Fixture[MichelleParameters] =>
checkAgainstBaseline(Baseline.from(f, executeMichelleRecipe(f)))
}.set((minTestsOk, minTestsCnt))
}
}
// === NIFS
{
implicit val arbFixture: Arbitrary[Fixture[NifsParameters]] = Arbitrary { Gen.oneOf(BaselineNifs.Fixtures) }
"NIFS calculations" should {
"match latest baseline" !
prop { f: Fixture[NifsParameters] =>
checkAgainstBaseline(Baseline.from(f, executeNifsRecipe(f)))
}.set((minTestsOk, minTestsCnt))
}
}
// === NIRI
{
implicit val arbFixture: Arbitrary[Fixture[NiriParameters]] = Arbitrary { Gen.oneOf(BaselineNiri.Fixtures) }
"NIRI calculations" should {
"match latest baseline" !
prop { f: Fixture[NiriParameters] =>
checkAgainstBaseline(Baseline.from(f, executeNiriRecipe(f)))
}.set((minTestsOk, minTestsCnt))
}
}
// === TRecs
{
implicit val arbFixture: Arbitrary[Fixture[TRecsParameters]] = Arbitrary { Gen.oneOf(BaselineTRecs.Fixtures) }
"TRecs calculations" should {
"match latest baseline" !
prop { f: Fixture[TRecsParameters] =>
checkAgainstBaseline(Baseline.from(f, executeTrecsRecipe(f)))
}.set((minTestsOk, minTestsCnt))
}
}
}
| arturog8m/ocs | bundle/edu.gemini.itc.web/src/test/scala/edu/gemini/itc/web/baseline/BaselineAllSpec.scala | Scala | bsd-3-clause | 4,315 |
package core.clustering.hybrid
import core.DataSet
import core.clustering.kmeans.KMeans
import core.clustering.model.CentroidModel
import core.clustering.pso.PSO
/**
* Created by Fernando on 5/12/2016.
*/
class HybridPSOKMeans (kmeans: KMeans,
pso: PSO){
def train(dataSet: DataSet): CentroidModel ={
val psoModel = pso.train(dataSet)
kmeans.train(psoModel,dataSet)
}
}
| fernandoj92/ScalaML | src/main/scala/core/clustering/hybrid/HybridPSOKMeans.scala | Scala | gpl-3.0 | 415 |
package scaldi
import org.scalatest.{Matchers, WordSpec}
import java.text.DateFormat
class InjectableSpec extends WordSpec with Matchers {
"Injectable" should {
"be useable within classes that are instantiated within module and have implicit injector" in {
val module = new TcpModule :: DynamicModule({ m =>
m.binding identifiedBy 'tcpHost to "test"
m.binding identifiedBy 'welcome to "Hello user!"
})
val binding = module.getBinding(List('tcpServer))
binding should be ('defined)
val server = binding.get.get.get.asInstanceOf[TcpServer]
server.port should equal (1234)
server.host should equal ("test")
server.getConnection.welcomeMessage should equal ("Hello user!")
}
"treat binding that return None as non-defined and use default or throw an exception if no default provided" in {
val module = new TcpModule :: DynamicModule(_.bind [String] identifiedBy 'welcome to None) :: DynamicModule({ m =>
m.binding identifiedBy 'tcpHost to "test"
m.binding identifiedBy 'welcome to "Hello user!"
})
val binding = module.getBinding(List('tcpServer))
binding should be ('defined)
val server = binding.get.get.get.asInstanceOf[TcpServer]
server.getConnection.welcomeMessage should equal ("Hi")
}
import scaldi.Injectable._
val defaultDb = PostgresqlDatabase("default_db")
"inject by type" in {
inject [Database] should equal (MysqlDatabase("my_app"))
inject [Database] (classOf[ConnectionProvider]) should equal (MysqlDatabase("my_app"))
}
"inject using identifiers" in {
val results = List (
inject [Database] ('database and 'local),
inject [Database] (identified by 'local and 'database),
inject [Database] (by default defaultDb and identified by 'database and 'local),
inject [Database] (by default new MysqlDatabase("my_app") and 'database and 'local),
inject [Database] ('database and "local" and by default defaultDb)
)
results should have size 5
results.distinct should (contain(MysqlDatabase("my_app"): Database) and have size (1))
}
"inject default if binding not found" in {
val results = List [Database] (
inject [Database] (identified by 'remote and by default new PostgresqlDatabase("default_db")),
inject [Database] (identified by 'remote is by default defaultDb),
inject [Database] ('remote is by default defaultDb),
inject [Database] ('remote which by default defaultDb),
inject [Database] ('remote that by default defaultDb),
inject [Database] (by default defaultDb and identified by 'remote),
inject (by default defaultDb),
inject (by default defaultDb and 'local),
inject (by default new PostgresqlDatabase("default_db"))
)
results should have size 9
results.distinct should (contain(defaultDb: Database) and have size 1)
}
"correctly inject provider" in {
var str1Counter = 0
var str2Counter = 0
implicit val injector = DynamicModule({ m =>
m.binding identifiedBy 'str1 to {
str1Counter = str1Counter + 1
s"str1 $str1Counter"
}
m.binding identifiedBy 'str2 toProvider {
str2Counter = str2Counter + 1
s"str2 $str2Counter"
}
})
val str1 = injectProvider[String]('str1)
val str2 = injectProvider[String]('str2)
str1() should equal ("str1 1")
str1() should equal ("str1 1")
str2() should equal ("str2 1")
str2() should equal ("str2 2")
str1Counter should equal (1)
str2Counter should equal (2)
}
"throw exception if no default provided and bonding not found" in {
an [InjectException] should be thrownBy inject [DateFormat]
}
"also be available in module, but use resulting (compised) injector" in {
val server = inject [Server] ('real and 'http)
server should equal (HttpServer("marketing.org", 8081))
}
"distinguish generic types" in {
val intAdder = inject [(Int, Int) => Int]
intAdder(2, 3) should equal (5)
val stringAdder = inject [(String, String) => String]
stringAdder("Hello", "World") should equal ("Hello, World")
}
"inject all using type parameter" in {
injectAllOfType [String] ('host) should
(contain("www.google.com") and contain("www.yahoo.com") and contain("www.github.com") and have size 3)
injectAllOfType [HttpServer] should
(contain(HttpServer("localhost", 80)) and contain(HttpServer("test", 8080)) and have size 2)
}
"inject all without type parameter" in {
injectAll('host).asInstanceOf[List[String]] should
(contain("www.google.com") and contain("www.yahoo.com") and contain("www.github.com") and have size 3)
injectAll('server).asInstanceOf[List[HttpServer]] should
(contain(HttpServer("localhost", 80)) and contain(HttpServer("test", 8080)) and have size 2)
}
}
implicit lazy val injector: Injector = mainModule :: marketingModule
val marketingModule = new Module {
bind [String] identifiedBy 'httpHost to "marketing.org"
}
val mainModule = new Module {
binding identifiedBy 'host and 'google to "www.google.com"
binding identifiedBy 'host and 'yahoo to "www.yahoo.com"
binding identifiedBy 'host and 'github to "www.github.com"
binding identifiedBy 'server to HttpServer("localhost", 80)
binding identifiedBy 'server to None
binding identifiedBy 'server to HttpServer("test", 8080)
binding identifiedBy 'intAdder to ((a: Int, b: Int) => a + b)
binding identifiedBy 'stringAdder to ((s1: String, s2: String) => s1 + ", " + s2)
bind [Int] identifiedBy 'httpPort to 8081
bind [Server] identifiedBy 'real and 'http to HttpServer(inject [String] ('httpHost), inject [Int] ('httpPort))
binding identifiedBy 'database and "local" to MysqlDatabase("my_app")
}
} | Mironor/scaldi | src/test/scala/scaldi/InjectableSpec.scala | Scala | apache-2.0 | 6,186 |
package it.polimi.genomics.core
import java.math.RoundingMode
import java.text.{DecimalFormat, DecimalFormatSymbols}
import java.util.Locale
//import it.polimi.genomics.core.DataTypes.{GNull, GInt, GString, GDouble}
/**
* Created by pietro on 10/03/15.
*/
object DataTypes {
/**
* SampleID, Chromosome, Start, Stop, Strand, Array of values
*/
type FlinkRegionType = (Long, String, Long, Long, Char, Array[GValue])
/**
* SampleID, AttributeName, AttributeValue
*/
type FlinkMetaType = (Long, String, String)
/**
* Used for grouping in GenometricMap
* ReferenceSampleID, ExperimentSampleID
*/
type FlinkMetaJoinType = (Long, Long, Boolean, Boolean)
/**
* Used for grouping in GenometricMap
* ReferenceSampleID, ExperimentSampleID
*/
type FlinkMetaJoinType3 = (Long, Long)
/**
* Used for grouping in GenomietricCover
* SampleID, List[GroupID]
*/
type FlinkMetaGroupType = (Long, List[Long])
/**
* Used for grouping in GenomietricCover
* SampleID, GroupID
*/
type FlinkMetaGroupType2 = (Long, Long)
/**
* deprecated
*/
type FlinkMetaJoinType2 = (Long, List[Long])
/**
* Data Type used for reagon operations in Spark
* We use Key / Value
* where the Key is the GRecordKey and the value is an array of values
*/
type GRECORD = (GRecordKey, Array[GValue])
type SparkMetaJoinType = (Long, Array[Long])
/**
* Work with the meta data as a Key /Value for Spark implementation
* Where the Key is the ID and thwe Value is the Att/Value pair
*/
type MetaType = (Long, (String, String))
// case class MetaType(ID:Long, Att:String, Value:String) extends Tuple3(ID,Att,Value)
}
@SerialVersionUID(2212l)
sealed trait GValue extends Serializable /*with Comparable[GValue]*/ with Ordered[GValue] {
def compare(o: GValue): Int = {
// Ordered: the order of GValue is GDouble/GInt > GString > GNull
this match {
case GDouble(f) => o match {
case GDouble(v) => f compare v
case GString(_) => 1
case GInt(v) => f compare v.toDouble
case GNull() => 1
}
case GInt(f) => o match {
case GDouble(v) => f.toDouble compare v
case GString(_) => 1
case GInt(v) => f compare v
case GNull() => 1
}
case GString(f) => o match {
case GDouble(_) => -1
case GString(v) => f compare v
case GInt(_) => -1
case GNull() => 1
}
case GNull() => o match {
case GNull() => 0
case _ => -1
}
}
}
}
/**
* Represents a @GValue that contains an integer
*
* @deprecated
* @param v
*/
@SerialVersionUID(2213l)
case class GInt(v: Int) extends GValue {
def this() = this(0)
override def toString(): String = {
v.toString
}
override def equals(other: Any): Boolean = {
other match {
case GInt(value) => value.equals(v)
case _ => false
}
}
}
/**
* Represents a @GValue that contains a number as a @Double
*
* @param v number
*/
@SerialVersionUID(2214l)
case class GDouble(v: Double) extends GValue {
def this() = this(0.0)
override def toString(): String = {
//TODO find a better way for rounding: this is FAST, 100x faster than formatter and bigdecimal but can give errors
// (Math.round(v * 1000000000000L) * 0.000000000001).toString // <-- NOT WORKING!!! second multiplication get mad
// (Math.round(v * 1000000000000L).toDouble / 1000000000000D ).toString // working quite good and fast
val dfs = new DecimalFormatSymbols(Locale.ENGLISH);
val df = new DecimalFormat("#.########", dfs);
df.setRoundingMode(RoundingMode.FLOOR);
df.format(v)
//v.toString // ROUNDING ERROR
}
override def equals(other: Any): Boolean = {
other match {
case GDouble(value) => value.equals(v)
case _ => false
}
}
}
/**
* Represents a @GValue that contains a @String
*
* @param v string
*/
@SerialVersionUID(2215l)
case class GString(v: String) extends GValue {
def this() = this(".")
override def toString(): String = {
v.toString
}
override def equals(other: Any): Boolean = {
other match {
case GString(value) => value.equals(v)
case _ => false
}
}
}
@SerialVersionUID(2216l)
case class GNull() extends GValue {
override def toString(): String = {
"null"
}
override def equals(other: Any): Boolean = {
other match {
case GNull() => true
case _ => false
}
}
}
| DEIB-GECO/GMQL | GMQL-Core/src/main/scala/it/polimi/genomics/core/DataTypes.scala | Scala | apache-2.0 | 4,527 |
package s99
import org.scalatest.{FunSpec, Matchers}
class P04Spec extends FunSpec with Matchers {
describe("length(List)") {
it("(*) Find the number of elements of a list.") {
(0 to 9).foreach { n =>
val l = List.fill(n)(0)
P04.length(l) should === (n)
}
}
}
describe("length2(List)") {
it("(*) Find the number of elements of a list.") {
(0 to 9).foreach { n =>
val l = List.fill(n)(0)
P04.length2(l) should === (n)
}
}
}
}
| qilab-/algorithm-problems | s-99/src/test/scala/s99/P04Spec.scala | Scala | unlicense | 510 |
package com.shocktrade.client
import com.shocktrade.client.contest._
import com.shocktrade.client.dialogs._
import com.shocktrade.client.directives._
import com.shocktrade.client.discover._
import com.shocktrade.client.news._
import com.shocktrade.client.posts.{PostController, PostService}
import com.shocktrade.client.profile._
import com.shocktrade.client.social._
import com.shocktrade.common.models.FacebookAppInfo
import io.scalajs.dom.html.browser.console
import io.scalajs.npm.angularjs.facebook.FacebookService
import io.scalajs.npm.angularjs.uirouter.{RouteProvider, RouteTo}
import io.scalajs.npm.angularjs.{Module, Scope, Timeout, angular}
import io.scalajs.social.facebook.{FB, FacebookAppConfig}
import io.scalajs.util.PromiseHelper.Implicits._
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import scala.scalajs.js
import scala.scalajs.js.annotation.JSExport
import scala.util.{Failure, Success}
/**
* ShockTrade Web Application Client
* @author Lawrence Daniels <lawrence.daniels@gmail.com>
*/
object WebClientJsApp {
@JSExport
def main(args: Array[String]): Unit = {
// create the application
val module = angular.createModule("shocktrade",
js.Array("ngAnimate", "ngCookies", "ngRoute", "ngSanitize", "nvd3", "angularFileUpload", "toaster", "ui.bootstrap"))
// add the custom directives
module.directiveOf[AvatarDirective]("avatar")
module.directiveOf[ChangeArrowDirective]("changearrow")
module.directiveOf[CountryDirective]("country")
module.directiveOf[NewsDirective]("news")
module.directiveOf[StockQuoteDirective]("stock-quote")
// add the custom filters
module.filter("abs", Filters.abs)
module.filter("bigNumber", Filters.bigNumber)
module.filter("capitalize", Filters.capitalize)
module.filter("duration", Filters.duration)
module.filter("escape", Filters.escape)
module.filter("newsDuration", Filters.newsDuration)
module.filter("quoteChange", Filters.quoteChange)
module.filter("quoteNumber", Filters.quoteNumber)
module.filter("yesno", Filters.yesNo)
// add the controllers and services
configureServices(module)
configureFactories(module)
configureControllers(module)
configureDialogs(module)
// define the routes
module.config({ ($routeProvider: RouteProvider) =>
// configure the routes
$routeProvider
.when("/about/investors", new RouteTo(templateUrl = "/views/about/investors.html"))
.when("/about/me", new RouteTo(templateUrl = "/views/about/me.html"))
.when("/about/us", new RouteTo(templateUrl = "/views/about/us.html"))
.when("/dashboard", new RouteTo(templateUrl = "/views/dashboard/dashboard.html", controller = classOf[DashboardController].getSimpleName))
.when("/dashboard/:contestId", new RouteTo(templateUrl = "/views/dashboard/dashboard.html", controller = classOf[DashboardController].getSimpleName))
.when("/discover", new RouteTo(templateUrl = "/views/discover/discover.html", controller = classOf[DiscoverController].getSimpleName))
.when("/explore", new RouteTo(templateUrl = "/views/explore/drill_down.html", controller = classOf[ExploreController].getSimpleName, reloadOnSearch = false))
.when("/home", new RouteTo(templateUrl = "/views/profile/home.html", controller = classOf[HomeController].getSimpleName))
.when("/news", new RouteTo(templateUrl = "/views/news/news_center.html", controller = classOf[NewsController].getSimpleName))
.when("/posts", new RouteTo(templateUrl = "/views/posts/index.html", controller = classOf[PostController].getSimpleName))
.when("/research", new RouteTo(templateUrl = "/views/research/research.html", controller = classOf[ResearchController].getSimpleName))
.when("/search", new RouteTo(templateUrl = "/views/contest/search.html", controller = classOf[GameSearchController].getSimpleName))
.otherwise(new RouteTo(redirectTo = "/about/us"))
()
})
// initialize the application
module.run({ ($rootScope: Scope, $timeout: Timeout,
MySessionService: MySessionService, SocialServices: SocialServices, WebSocketService: WebSocketService) =>
// configure the Social Network callbacks
configureSocialNetworkCallbacks($timeout, MySessionService, SocialServices)
// initialize the web socket service
WebSocketService.init()
})
}
private def configureDialogs(module: Module) {
module.serviceOf[ComposeMessageDialog]("ComposeMessageDialog")
module.serviceOf[InvitePlayerDialog]("InvitePlayerDialog")
module.serviceOf[NewGameDialog]("NewGameDialog")
module.serviceOf[NewOrderDialog]("NewOrderDialog")
module.serviceOf[NewsQuoteDialog]("NewsQuoteDialog")
module.serviceOf[PerksDialog]("PerksDialog")
module.serviceOf[ReactiveSearchService]("ReactiveSearchService")
module.serviceOf[SignUpDialog]("SignUpDialog")
module.serviceOf[TransferFundsDialog]("TransferFundsDialog")
module.controllerOf[ComposeMessageDialogController]("ComposeMessageDialogController")
module.controllerOf[InvitePlayerDialogController]("InvitePlayerDialogController")
module.controllerOf[NewGameDialogController]("NewGameDialogController")
module.controllerOf[NewOrderDialogController]("NewOrderDialogController")
module.controllerOf[NewsQuoteDialogController]("NewsQuoteDialogController")
module.controllerOf[PerksDialogController]("PerksDialogController")
module.controllerOf[SignUpDialogController]("SignUpController")
module.controllerOf[TransferFundsDialogController]("TransferFundsDialogController")
}
private def configureFactories(module: Module): Unit = {
module.factoryOf[UserFactory]("UserFactory")
}
private def configureServices(module: Module) {
module.serviceOf[ChatService]("ChatService")
module.serviceOf[ContestService]("ContestService")
module.serviceOf[ExploreService]("ExploreService")
module.serviceOf[FacebookService]("Facebook")
module.serviceOf[MarketStatusService]("MarketStatus")
module.serviceOf[MySessionService]("MySessionService")
module.serviceOf[NewsService]("NewsService")
module.serviceOf[PortfolioService]("PortfolioService")
module.serviceOf[UserProfileService]("UserProfileService")
module.serviceOf[PostService]("PostService")
module.serviceOf[QuoteCache]("QuoteCache")
module.serviceOf[QuoteService]("QuoteService")
module.serviceOf[ResearchService]("ResearchService")
module.serviceOf[SocialServices]("SocialServices")
module.serviceOf[UserService]("UserService")
module.serviceOf[WebSocketService]("WebSocketService")
}
private def configureControllers(module: Module) {
module.controllerOf[AwardsController]("AwardsController")
module.controllerOf[CashAccountController]("CashAccountController")
module.controllerOf[ChatController]("ChatController")
module.controllerOf[DashboardController]("DashboardController")
module.controllerOf[DiscoverController]("DiscoverController")
module.controllerOf[ExploreController]("ExploreController")
module.controllerOf[ExposureController]("ExposureController")
module.controllerOf[GameSearchController]("GameSearchController")
module.controllerOf[HomeController]("HomeController")
module.controllerOf[InformationBarController]("InformationBarController")
module.controllerOf[MainController]("MainController")
module.controllerOf[MarginAccountController]("MarginAccountController")
module.controllerOf[MyGamesController]("MyGamesController")
module.controllerOf[MyQuotesController]("MyQuotesController")
module.controllerOf[NavigationController]("NavigationController")
module.controllerOf[NewsController]("NewsController")
module.controllerOf[PortfolioController]("PortfolioController")
module.controllerOf[PostController]("PostController")
module.controllerOf[ResearchController]("ResearchController")
module.controllerOf[TradingHistoryController]("TradingHistoryController")
}
private def configureSocialNetworkCallbacks($timeout: Timeout, mySession: MySessionService, socialServices: SocialServices): Unit = {
socialServices.getFacebookAppInfo onComplete {
case Success(response) => initializeFacebookApp($timeout, mySession, response.data)
case Failure(e) => console.error("Error initializing Facebook App")
}
}
private def initializeFacebookApp($timeout: Timeout, mySession: MySessionService, appInfo: FacebookAppInfo) = {
// setup the initialization callback for Facebook
js.Dynamic.global.fbAsyncInit = () => {
console.log("fbAsyncInit: Setting up Facebook integration...")
val config = FacebookAppConfig(appId = appInfo.appId, status = true, xfbml = true)
FB.init(config)
console.log(s"Initialized Facebook SDK (App ID # ${config.appId}) and version (${config.version}) on the Angular Facebook service...")
}
}
}
| ldaniels528/shocktrade.js | app/client/angularjs/src/main/scala/com/shocktrade/client/WebClientJsApp.scala | Scala | apache-2.0 | 8,978 |
package sangria.util
import io.github.classgraph.ClassGraph
import sangria.parser.QueryParser
import sangria.parser.DeliveryScheme.Throw
import spray.json._
import scala.io.Source
import net.jcazevedo.moultingyaml._
import scala.collection.JavaConverters._
object FileUtil extends StringMatchers {
def loadQuery(name: String) =
loadResource("queries/" + name)
def loadYaml(name: String, root: String = "scenarios") =
loadResource(root + "/" + name).parseYaml
def loadScenarios(path: String, root: String = "scenarios") = this.synchronized {
val yamlResources = new ClassGraph()
.acceptPackages(root + "." + path)
.scan()
.getResourcesWithExtension("yaml")
.asScala
.groupBy(_.getPath)
.map { case (k, v) =>
(k, v.head)
} // deduplicate (`ClassGraph` gives duplicates for some reason)
.values
.toVector
yamlResources.map { resource =>
val name = resource.getPath.substring(resource.getPath.lastIndexOf("/") + 1)
val relativePath = resource.getPathRelativeToClasspathElement
val stream = this.getClass.getResourceAsStream("/" + relativePath)
val contents = Source.fromInputStream(stream, "UTF-8").mkString.parseYaml
ScenarioFile(name, relativePath, contents)
}
}
def loadSchema(path: String) =
QueryParser.parse(loadResource(path))
def loadTestData(path: String): Either[YamlValue, JsValue] = {
val text = loadResource(path)
if (path.endsWith(".yaml")) Left(text.parseYaml)
else if (path.endsWith(".json")) Right(text.parseJson)
else
throw new IllegalArgumentException(
s"Unsupported file format for test data '$path'. Only `*.json` and `*.yaml` files are supported.")
}
def loadResource(path: String) =
Option(this.getClass.getResourceAsStream("/" + path)) match {
case Some(res) => stripCarriageReturns(Source.fromInputStream(res, "UTF-8").mkString)
case None => throw new IllegalArgumentException("Resource not found: /" + path)
}
case class ScenarioFile(fileName: String, path: String, scenario: YamlValue) {
def folder = path.substring(0, path.lastIndexOf("/"))
}
}
| OlegIlyenko/sangria | modules/core/src/test/scala/sangria/util/FileUtil.scala | Scala | apache-2.0 | 2,172 |
/*******************************************************************************
Copyright (c) 2013, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.Tizen
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
import kr.ac.kaist.jsaf.analysis.cfg.{CFG, CFGExpr, InternalError}
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T, _}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.analysis.typing.domain.Heap
import kr.ac.kaist.jsaf.analysis.typing.domain.Context
import kr.ac.kaist.jsaf.analysis.typing.models.AbsBuiltinFunc
import kr.ac.kaist.jsaf.analysis.typing.models.AbsConstValue
object TIZENNFCAdapter extends Tizen {
private val name = "NFCAdapter"
/* predefined locations */
val loc_proto = newSystemRecentLoc(name + "Proto")
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_proto, prop_proto)
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("setPowered", AbsBuiltinFunc("tizen.NFCAdapter.setPowered",3)),
("setTagListener", AbsBuiltinFunc("tizen.NFCAdapter.setTagListener",2)),
("setPeerListener", AbsBuiltinFunc("tizen.NFCAdapter.setPeerListener",1)),
("unsetTagListener", AbsBuiltinFunc("tizen.NFCAdapter.unsetTagListener",0)),
("unsetPeerListener", AbsBuiltinFunc("tizen.NFCAdapter.unsetPeerListener",0)),
("getCachedMessage", AbsBuiltinFunc("tizen.NFCAdapter.getCachedMessage",0))
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("tizen.NFCAdapter.setPowered" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r1 = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val v_1 = getArgValue(h_1, ctx_1, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h_1, ctx_1, args, "length"))
val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2
val es =
if (v_1._1._3 </ BoolTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val (h_2, es_1) = n_arglen match {
case UIntSingle(n) if n == 1 =>
val h_2 = lset_this.foldLeft(HeapBot)((_h, l) => {
(_h + Helper.PropStore(h_1, l, AbsString.alpha("powered"), Value(v_1._1._3)))
})
(h_2, TizenHelper.TizenExceptionBot)
case UIntSingle(n) if n == 2 =>
val v_2 = getArgValue(h_1, ctx_1, args, "1")
val es1 =
if (v_2._1 <= PValueTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v_2._2.exists((l) => Helper.IsCallable(h_1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val h_2 = lset_this.foldLeft(HeapBot)((_h, l) => {
(_h + Helper.PropStore(h_1, l, AbsString.alpha("powered"), Value(v_1._1._3)))
})
val h_3 = TizenHelper.addCallbackHandler(h_2, AbsString.alpha("successCB"), Value(v_2._2), Value(UndefTop))
(h_3, es1 ++ es2)
case UIntSingle(n) if n == 3 =>
val v_2 = getArgValue(h_1, ctx_1, args, "1")
val v_3 = getArgValue(h_1, ctx_1, args, "2")
val es1 =
if (v_2._1 <= PValueTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v_2._2.exists((l) => Helper.IsCallable(h_1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es3 =
if (v_3._1 <= PValueTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es4 =
if (v_3._2.exists((l) => Helper.IsCallable(h_1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val h_2 = lset_this.foldLeft(HeapBot)((_h, l) => {
(_h + Helper.PropStore(h_1, l, AbsString.alpha("powered"), Value(v_1._1._3)))
})
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_serviceNotAvailableerr) ++ LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_3 = h_2.update(l_r1, o_arr2)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("successCB"), Value(v_2._2), Value(UndefTop))
val h_5 = TizenHelper.addCallbackHandler(h_4, AbsString.alpha("errorCB"), Value(v_3._2), Value(l_r1))
(h_5, es1 ++ es2 ++ es3 ++ es4)
case _ => {
(HeapBot, TizenHelper.TizenExceptionBot)
}
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ est)
((h_2, ctx_1), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.NFCAdapter.setTagListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r1 = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val v_1 = getArgValue(h_1, ctx_1, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h_1, ctx_1, args, "length"))
val (h_2, es) = v_1._2.foldLeft((h_1, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("onattach"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("ondetach"))
val es1 =
if (v1._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENnfc.loc_nfctag), T, T, T)))
val h_2 = _he._1.update(l_r1, o_arr)
val h_3 = TizenHelper.addCallbackHandler(h_2, AbsString.alpha("NFCTagDetectCB.onattach"), Value(v1._2), Value(l_r1))
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("NFCTagDetectCB.ondetach"), Value(v2._2), Value(UndefTop))
(h_4, _he._2 ++ es1 ++ es2)
})
val es_1 = n_arglen match {
case UIntSingle(n) if n == 2 =>
val v_2 = getArgValue(h_2, ctx_1, args, "1")
val es1 =
if (v_2._2.exists((l) => Helper.IsArray(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
es1
case _ => TizenHelper.TizenExceptionBot
}
val est = Set[WebAPIException](SecurityError, UnknownError, ServiceNotAvailableError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ est)
((h_2, ctx_1), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.NFCAdapter.setPeerListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r1 = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val v = getArgValue(h_1, ctx_1, args, "0")
val (h_2, es) = v._2.foldLeft((h_1, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("onattach"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("ondetach"))
val es1 =
if (v1._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENnfc.loc_nfcpeer), T, T, T)))
val h_2 = _he._1.update(l_r1, o_arr)
val h_3 = TizenHelper.addCallbackHandler(h_2, AbsString.alpha("NFCPeerDetectCB.onattach"), Value(v1._2), Value(l_r1))
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("NFCPeerDetectCB.ondetach"), Value(v2._2), Value(UndefTop))
(h_4, _he._2 ++ es1 ++ es2)
})
val est = Set[WebAPIException](SecurityError, UnknownError, ServiceNotAvailableError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ est)
((h_2, ctx_1), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.NFCAdapter.unsetTagListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError, ServiceNotAvailableError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.NFCAdapter.unsetPeerListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError, ServiceNotAvailableError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.NFCAdapter.getCachedMessage" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((Helper.ReturnStore(h, Value(PValue(NullTop), LocSet(TIZENnfc.loc_ndefmsg))), ctx), (he + h_e, ctxe + ctx_e))
}
))
)
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
} | daejunpark/jsaf | src/kr/ac/kaist/jsaf/analysis/typing/models/Tizen/TIZENNFCAdapter.scala | Scala | bsd-3-clause | 12,419 |
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2014 Adobe Systems Incorporated. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///////////////////////////////////////////////////////////////////////////
package com.adobe
package queries
// Spark.
import org.apache.spark.{SparkConf,SparkContext}
import org.apache.spark.SparkContext._
import org.apache.spark.storage.StorageLevel
import org.apache.spark.rdd.RDD
// Analytics Thrift objects.
import thrift.AnalyticsData
object Pageviews extends Query {
def colsNeeded = Seq("post_pagename")
def run(c: QueryConf) = {
val queryResult = c.data.map{dayData =>
dayData.filter(root=>(root.post_pagename != null) &&
!root.post_pagename.isEmpty())
.count()
}
if (c.profile) "[" + queryResult.map(_.toString).mkString(", ") + "]"
else {
html.PageViews("Pageviews", c.daysInRange.zip(queryResult)).toString
}
}
}
| alexanderfield/spindle | src/main/scala/queries/Pageviews.scala | Scala | apache-2.0 | 1,492 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.cogmath.algebra.complex
import cogx.cogmath.algebra.real.PoorFloat._
import cogx.cogmath.geometry.Shape
import cogx.utilities.Random
/** Complex numbers.
*
* @author Greg Snider
*/
@SerialVersionUID(7029811968224931195L)
class Complex(val real: Float, val imaginary: Float)
extends Serializable
{
/** The norm of the complex number. */
def normSq = real * real + imaginary * imaginary
/** The phase (angle) in radians.
*
* This is a number in the range (-Pi, Pi].
*/
def phase: Float = {
var angle = math.atan2(imaginary, real)
angle.toFloat
}
/** The length of the complex number. */
def magnitude = math.sqrt(normSq).toFloat
/** `this` * `that` */
def *(that: Complex) =
new Complex(this.real * that.real - this.imaginary * that.imaginary,
this.real * that.imaginary + this.imaginary * that.real)
/** `this` * `that` */
def *(that: Float) = new Complex(this.real * that, this.imaginary * that)
/** `this` + `that` */
def +(that: Complex) =
new Complex(this.real + that.real, this.imaginary + that.imaginary)
/** `this` + `that` */
def +(that: Float) =
new Complex(this.real + that, this.imaginary)
/** `this` - `that` */
def -(that: Complex) =
new Complex(this.real - that.real, this.imaginary - that.imaginary)
/** `this` - `that` */
def -(that: Float) =
new Complex(this.real - that, this.imaginary)
/** `this` / `that` */
def /(that: Float) = new Complex(real / that, imaginary / that)
/** `this` / `that` */
def /(that: Complex): Complex = {
val d = that.normSq
val real = (this.real * that.real + this.imaginary * that.imaginary) / d
val imag = (this.imaginary * that.real - this.real * that.imaginary) / d
new Complex(real, imag)
}
/** -`this` */
def unary_-() = new Complex(-(this.real), -(this.imaginary))
/** 1/`this` */
def reciprocal= {
val norm = this.normSq
new Complex(this.real/norm, -(this.imaginary)/norm)
}
/** e ^ this */
def exp: Complex = {
val x = new Complex(math.exp(this.real).toFloat, 0)
val y = new Complex(math.cos(this.imaginary).toFloat,
math.sin(this.imaginary).toFloat)
x * y
}
/** this ^ `exponent` */
def power(exponent: Int): Complex = {
if (exponent == 0)
new Complex(1, 0)
else {
val e = exponent.abs
val c = if (exponent < 0) (new Complex(1, 0) / this) else this
var result = new Complex(1, 0)
for (i <- 0 until e)
result *= c
result
}
}
/** Get the data in the tensor, flattened to a linear array. */
protected def getData = new ComplexArray(this)
/** Get the complex conjugate of this. */
def conjugate = new Complex(real, -imaginary)
def ~==(that: Complex) =
(real ~== that.real) && (imaginary ~== that.imaginary)
/** Test `this` and `other` for deep equality. */
override def equals(other: Any): Boolean =
other match {
case that: Complex =>
if (that canEqual this)
(this.real == that.real) && (this.imaginary == that.imaginary)
else
false
case that: Float =>
this.real == that && this.imaginary == 0f
case _ => false
}
/** Helper for equals. */
def canEqual(other: Any): Boolean = other.isInstanceOf[Complex]
/** Required because of overriding equals. */
override def hashCode: Int =
real.hashCode + imaginary.hashCode
/** Format the complex number as a String. */
override def toString = "(%8.3f, %8.3fi) ".format(real, imaginary)
/** Print out a complex number for debugging. */
def print = println(toString)
}
/** Factory for creating complex numbers. */
object Complex {
/** Random number generator. */
private val rand = new Random
// Shape of complex scalar tensor
private val shape = Shape()
/** Square root of -1. */
val I = new Complex(0, 1)
/** Create a complex number from the real and imaginary components. */
def apply(real: Float, imaginary: Float) = new Complex(real, imaginary)
/** Create a Complex number from polar coordinates. */
def polar(r: Float, phase: Float) =
new Complex(r * math.cos(phase).toFloat, r * math.sin(phase).toFloat)
/** Exponential of a complex number. */
def expc(c: Complex): Complex = {
Complex(math.cos(c.imaginary).toFloat, math.sin(c.imaginary).toFloat) *
math.exp(c.real).toFloat
}
/** Create a complex number with random real and imaginary components. */
def random: Complex = new Complex(rand.nextFloat, rand.nextFloat)
//implicit def floatToComplex(d: Float) = new Complex(d, 0)
//implicit def intToComplex(i: Int) = new Complex(i.toFloat, 0)
}
| hpe-cct/cct-core | src/main/scala/cogx/cogmath/algebra/complex/Complex.scala | Scala | apache-2.0 | 5,296 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.scala.examples.wordcount
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.examples.wordcount.util.WordCountData
/**
* Implements the "WordCount" program that computes a simple word occurrence
* histogram over text files in a streaming fashion.
*
* The input is a plain text file with lines separated by newline characters.
*
* Usage:
* {{{
* WordCount --input <path> --output <path>
* }}}
*
* If no parameters are provided, the program is run with default data from
* {@link WordCountData}.
*
* This example shows how to:
*
* - write a simple Flink Streaming program,
* - use tuple data types,
* - write and use transformation functions.
*
*/
object WordCount {
def main(args: Array[String]) {
// Checking input parameters
val params = ParameterTool.fromArgs(args)
// set up the execution environment
val env = StreamExecutionEnvironment.getExecutionEnvironment
// make parameters available in the web interface
env.getConfig.setGlobalJobParameters(params)
// get input data
val text =
// read the text file from given input path
if (params.has("input")) {
env.readTextFile(params.get("input"))
} else {
println("Executing WordCount example with default inputs data set.")
println("Use --input to specify file input.")
// get default test text data
env.fromElements(WordCountData.WORDS: _*)
}
val counts: DataStream[(String, Int)] = text
// split up the lines in pairs (2-tuples) containing: (word,1)
.flatMap(_.toLowerCase.split("\\W+"))
.filter(_.nonEmpty)
.map((_, 1))
// group by the tuple field "0" and sum up tuple field "1"
.keyBy(0)
.sum(1)
// emit result
if (params.has("output")) {
counts.writeAsText(params.get("output"))
} else {
println("Printing result to stdout. Use --output to specify output path.")
counts.print()
}
// execute program
env.execute("Streaming WordCount")
}
}
| tzulitai/flink | flink-examples/flink-examples-streaming/src/main/scala/org/apache/flink/streaming/scala/examples/wordcount/WordCount.scala | Scala | apache-2.0 | 2,921 |
package com.frenchcoder.scalamones.service
import akka.actor._
import akka.event.Logging
import com.frenchcoder.scalamones.elastic.ClusterStat._
import com.frenchcoder.scalamones.elastic.ElasticJsonProtocol
import com.frenchcoder.scalamones.elastic.Stat._
import com.frenchcoder.scalamones.service.KpiProvider.{KpiNotify, KpiUnMonitor, KpiMonitor}
import spray.client.pipelining._
import spray.http.Uri
import spray.httpx.SprayJsonSupport
import spray.json.{JsonFormat, DefaultJsonProtocol}
import spray.httpx.unmarshalling._
import scala.util.{Failure, Success}
import scala.concurrent.duration._
import scala.reflect._
object KpiProvider {
case class KpiMonitor(watcher: ActorRef)
case class KpiUnMonitor(watcher: ActorRef)
case class KpiNotify[T](kpi:T)
import SprayJsonSupport._
import ElasticJsonProtocol._
def startServices(baseUrl: Uri)(implicit c: ActorContext, s:SendReceive) : Map[String, ActorRef] = {
serviceMap map { case (key, value) => (key, c.actorOf(value(s, baseUrl))) }
}
private[service]
def nodeStatProps[T: JsonFormat](e: NodeStat => Option[T], path:String)(s:SendReceive, baseUrl: Uri): Props =
Props(new KpiProvider[NodesStat, Map[String, Option[T]]](s, baseUrl + path, (n => n.nodes map ( m => (m._1, e(m._2)))) ))
def noEnvelopeStatProps[T: FromResponseUnmarshaller](path: String)(s:SendReceive, baseUrl: Uri): Props =
Props(new KpiProvider[T, T](s, baseUrl + path, (n => n) ))
import ClusterStatProtocol._
private[service]
val serviceMap: Map[String, ((SendReceive, Uri) => Props)] = Map(
classTag[NodeJvmStat].toString() -> nodeStatProps[NodeJvmStat](_.jvm, "/_nodes/stats/jvm"),
classTag[NodeOsStat].toString() -> nodeStatProps[NodeOsStat](_.os, "/_nodes/stats/os"),
classTag[ClusterHealth].toString() -> noEnvelopeStatProps[ClusterHealth]("/_cluster/health"),
classTag[ClusterStat].toString() -> noEnvelopeStatProps[ClusterStat]("/_cluster/stats")
)
}
class KpiProvider[T: FromResponseUnmarshaller, U: FromResponseUnmarshaller](val s:SendReceive, val url:String, val extractor: T=>U) extends Actor {
val log = Logging(context.system, getClass)
// Remove import context._ to prevent ambiguous implicit ActorRefFactory in context & system
import context.dispatcher
import context.become
// Internal operation
case class SendRequest()
case class KpiError(error: Throwable)
val pipeline = s ~> unmarshal[T]
var watchers = Set.empty[ActorRef]
var latestValue: Option[U] = None
//self ! SendRequest
def receive = idle
def idle: Receive = {
case KpiMonitor(watcher) =>
log.debug("Monitor message received, become active")
watchers += watcher
// Schedule update every 5 seconds
val scheduledRequestor = context.system.scheduler.schedule(0.seconds, 5.seconds, self, SendRequest)
become(active(scheduledRequestor) orElse waitingForRequest(scheduledRequestor))
}
def active(scheduledRequestor: Cancellable): Receive = {
case KpiMonitor(watcher) =>
log.debug("Monitor message received, already active")
watchers += watcher
// Send latest value to watcher so it gets immediately a value
latestValue foreach (watcher ! KpiNotify(_))
case KpiUnMonitor(watcher) =>
log.debug("UnMonitor message received")
watchers -= watcher
if (watchers.isEmpty) {
log.debug("No more watchers, become idle")
scheduledRequestor.cancel()
latestValue = None
become(idle)
}
case KpiError(error) =>
// Todo Forward error to manager ?
become(active(scheduledRequestor) orElse waitingForRequest(scheduledRequestor))
}
def waitingForRequest(scheduledRequestor: Cancellable): Receive = {
case SendRequest =>
val f = pipeline {
Get(url)
}
f onComplete {
case Success(data) =>
log.debug(s"Received new data from ${url}")
if (data.isInstanceOf[T]) {
val embed = extractor(data.asInstanceOf[T])
if (embed.isInstanceOf[U]) {
// Notify self of new value
self ! KpiNotify(embed)
}
}
case Failure(error) =>
self ! KpiError(error)
log.error(error, s"Error while fetching data from ${url}")
}
become(active(scheduledRequestor) orElse waitingForResponse(scheduledRequestor))
}
def waitingForResponse(scheduledRequestor: Cancellable): Receive = {
case KpiNotify(data) =>
latestValue = Some(data.asInstanceOf[U])
watchers foreach( _ ! KpiNotify(data))
become(active(scheduledRequestor) orElse waitingForRequest(scheduledRequestor))
}
}
| jpthomasset/scalamones | src/main/scala-2.11/com/frenchcoder/scalamones/service/KpiProvider.scala | Scala | mit | 4,666 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.classes.HeroicCharacterClass
import io.truthencode.ddo.model.classes.HeroicCharacterClass.Artificer
import io.truthencode.ddo.support.requisite.{FeatRequisiteImpl, GrantsToClass, RequiresAllOfClass}
/**
* Created by adarr on 2/16/2017.
*/
protected[feats] trait ArtificerSkillMastery
extends FeatRequisiteImpl with Passive with GrantsToClass with RequiresAllOfClass {
override def allOfClass: Seq[(HeroicCharacterClass, Int)] = List((Artificer, 13))
override def grantToClass: Seq[(HeroicCharacterClass, Int)] = List((Artificer, 13))
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/ArtificerSkillMastery.scala | Scala | apache-2.0 | 1,292 |
package com.arcusys.valamis.util.mustache
import java.io.InputStream
import scala.io.Source
/**
* template
*/
class Mustache(
root: Token) extends MustacheHelperSupport {
def this(source: Source, open: String = "{{", close: String = "}}") =
this((new Parser {
val src = source
var otag = open
var ctag = close
}).parse())
def this(inputStream: InputStream) = {
this(Source.fromInputStream(inputStream))
}
def this(str: String) = this(Source.fromString(str))
def this(
str: String, open: String, close: String) = this(Source.fromString(str), open, close)
private val compiledTemplate = root
val globals: Map[String, Any] =
{
val excludedGlobals = List("wait", "toString", "hashCode", "getClass", "notify", "notifyAll")
Map(
(this.getClass().getMethods
.filter(x => {
val name = x.getName
val pt = x.getParameterTypes
(!name.startsWith("render$default")
) && (
!name.startsWith("product$default")
) && (
!name.startsWith("init$default")
) && (
!excludedGlobals.contains(name)
) && ((
pt.length == 0
) || (
pt.length == 1
&& pt(0) == classOf[String]
))
})
.map(x => {
x.getName ->
(if (x.getParameterTypes.length == 0) () => { x.invoke(this) }
else (str: String) => { x.invoke(this, str) })
})): _*
)
}
def render(
context: Any = null, partials: Map[String, Mustache] = Map(), callstack: List[Any] = List(this)): String = product(context, partials, callstack).toString
def product(
context: Any = null, partials: Map[String, Mustache] = Map(), callstack: List[Any] = List(this)): TokenProduct = compiledTemplate.render(context, partials, callstack)
}
| arcusys/Valamis | valamis-util/src/main/scala/com/arcusys/valamis/util/mustache/Mustache.scala | Scala | gpl-3.0 | 2,265 |
package inverse_macros.monads
import debug._
import org.junit.Assert._
import org.scalatest.FunSuite
import scala.language.implicitConversions
import Monads._
class ContinuationTest extends FunSuite {
def compare(s1: String)(s2: String) = {
val t1 = replaceFreshVariables(s1.replaceAll("\\s+", " "))
val t2 = replaceFreshVariables(s2.replaceAll("\\s+", " "))
assert(t1 == t2)
}
test("simple 1 @") {
import OptionMonad._
inverse_macros.transform {
10: @monad[Option[Int]]
val _1 = 10: @monad[Option[Int]]
println(_1)
20: @monad[Option[Int]]
val _2 = 20: @monad[Option[Int]]
println(_2)
30: @monad[Option[Int]]
val _3 = 30: @monad[Option[Int]]
println(_3)
40: @monad[Option[Int]]
val _4 = 40: @monad[Option[Int]]
println(_4)
50: @monad[Option[Int]]
val _5 = 50: @monad[Option[Int]]
println(_5)
0
}
}
test("simple 2 @") {
import OptionMonad._
inverse_macros.transform {
val _1 = 10: @monad[Option[Int]]
10: @monad[Option[Int]]
println(_1)
val _2 = 20: @monad[Option[Int]]
20: @monad[Option[Int]]
println(_2)
val _3 = 30: @monad[Option[Int]]
30: @monad[Option[Int]]
println(_3)
val _4 = 40: @monad[Option[Int]]
40: @monad[Option[Int]]
println(_4)
val _5 = 50: @monad[Option[Int]]
50: @monad[Option[Int]]
println(_5)
0
}
}
test("simple unit @") {
import OptionMonad._
inverse_macros.transform {
(): @monad[Option[Unit]]
(): @monad[Option[Unit]]
0
}
inverse_macros.transform {
val a = Array[Int](1)
reify[Unit, Option[Unit]] {
a(0) += Some(1).reflect
a(0) += None.asInstanceOf[Option[Int]].reflect
()
}
}
}
test("option") {
import OptionMonad._
inverse_macros.transform {
assert(reify[Int, Option[Int]](10: @monad[Option[Int]]) == Some(10))
assert(reify[Int, Option[Int]](Some(19).reflect + 1) == Some(20))
assert(reify[Int, Option[Int]](None.asInstanceOf[Option[Int]].reflect).isEmpty)
}
}
test("reader") {
import StateMonad._
inverse_macros.transform {
assert {
reify[Int, Int => (Int, Int)](1).apply(10)._2 == 1
}
assert {
reify[Int, Int => (Int, Int)](get[Int] + 1).apply(10)._2 == 11
}
assert {
reify[Int, Int => (Int, Int)](get[Int] + get[Int] + 1).apply(10)._2 == 21
}
assert {
reify[Int, Int => (Int, Int)](((x: Int) => (x, x + 1)).reflect).apply(10)._2 == 11
}
assert {
reify[Int, Int => (Int, Int)](((x: Int) => (x, x + 1)).reflect + 1).apply(10)._2 == 12
}
}
}
test("writer") {
import StateMonad._
inverse_macros.transform {
assert {
reify[Int, Int => (Int, Int)](10).apply(0)._2 == 10
}
assert {
reify[Int, Int => (Int, Int)] {
val x = 10
((_: Int) => (x, x)).reflect
((_: Int) => (20, x)).reflect
}.apply(0)._1 == 20
}
assert {
reify[Unit, Int => (Int, Unit)] {
put(10)
}.apply(0)._1 == 10
}
assert {
reify[Unit, Int => (Int, Unit)] {
put(10)
put(20)
}.apply(0)._1 == 20
}
}
}
}
| hiroshi-cl/InverseMacros | inverse_macros_libraries/src/test/scala/inverse_macros/monads/ContinuationTest.scala | Scala | bsd-2-clause | 3,351 |
package com.twitter.finagle.loadbalancer
/**
* The base type of the load balancer distributor. Distributors are
* updated nondestructively, but, as with nodes, may share some
* data across updates.
*
* @param vector the nodes over which we are currently balancing.
*/
protected[loadbalancer] abstract class DistributorT[Node](val vector: Vector[Node]) {
type This <: DistributorT[Node]
/**
* Pick the next node.
* This is the main entry point for a load balancer implementation.
*/
def pick(): Node
/**
* True if this distributor needs to be rebuilt. (For example, it
* may need to be updated with current availabilities.)
*/
def needsRebuild: Boolean
/**
* Rebuild this distributor.
*/
def rebuild(): This
/**
* Rebuild this distributor with a new vector.
*/
def rebuild(vector: Vector[Node]): This
}
| adriancole/finagle | finagle-core/src/main/scala/com/twitter/finagle/loadbalancer/DistributorT.scala | Scala | apache-2.0 | 862 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.