code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs/contributors
// License: http://www.gnu.org/licenses/lgpl-3.0.en.html
package org.ensime.io
import java.io.File
import java.nio.file._
import scala.util.Properties.jdkHome
import scalaz.std.list._
import scalaz.ioeffect.RTS
import org.scalatest._
import org.scalatest.Matchers._
import org.ensime.api._
import Canon.ops._
class CanonSpec extends FlatSpec with RTS {
lazy val file = new File(".")
lazy val canon = Canonised(file)
def Canonised[A: Canon](a: A): A = unsafePerformIO(a.canon)
"Canon" should "canon File" in {
Canonised(file) shouldBe canon
}
it should "canon List of Files" in {
Canonised(List(file)) shouldBe List(canon)
}
class MyFile(name: String) extends File(name)
it should "canon subtypes of File when used in File position" in {
val mine: File = new MyFile(".")
Canonised(mine) should not be (mine)
}
it should "canon an RpcRequest" in {
val request = TypeAtPointReq(Left(file), OffsetRange(100)): RpcRequest
val expected = TypeAtPointReq(Left(canon), OffsetRange(100))
Canonised(request) shouldBe expected
}
it should "canon an EnsimeServerMessage" in {
val response = Breakpoint(RawFile(file.toPath), 13): RpcResponse
val expected = Breakpoint(RawFile(canon.toPath), 13)
Canonised(response) shouldBe expected
}
// NOTE: doesn't delete contents
def withTempDir[T](a: File => T): T = {
val dir = Files.createTempDirectory("ensime").toFile
try a(dir)
finally dir.delete()
}
it should "canon a RawFile" in withTempDir { dir =>
val rawDir = RawFile(dir.toPath)
val ef = List(RawFile(file.toPath))
val expected = List(RawFile(canon.toPath))
Canonised(ef) shouldBe expected
}
it should "canon an ArchiveFile" in withTempDir { dir =>
val rawDir = RawFile(dir.toPath)
val src = Paths.get(s"$jdkHome/src.zip")
val entry = EnsimeFile(s"$src!/java/lang/String.java")
val expected =
ArchiveFile(Canonised(src), "/java/lang/String.java")
Canonised(List(entry)) shouldBe List(expected)
}
}
| ensime/ensime-server | api/src/test/scala/org/ensime/io/CanonSpec.scala | Scala | gpl-3.0 | 2,143 |
package com.gu.notificationschedule.dynamo
import java.time.Instant
import com.amazonaws.handlers.AsyncHandler
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync
import com.amazonaws.services.dynamodbv2.model._
import scala.jdk.CollectionConverters._
import scala.concurrent.Promise
case class ScheduleTableConfig(app: String, stage: String, stack: String) {
val scheduleTableName: String = s"$app-$stage-$stack"
}
case class NotificationsScheduleEntry(
uuid: String,
notification: String,
dueEpochSeconds: Long,
ttlEpochSeconds: Long
) {
}
trait NotificationSchedulePersistenceSync {
def querySync(): Seq[NotificationsScheduleEntry]
def writeSync(notificationsScheduleEntry: NotificationsScheduleEntry, maybeEpochSentS: Option[Long]): Unit
}
trait NotificationSchedulePersistenceAsync {
def writeAsync(notificationsScheduleEntry: NotificationsScheduleEntry, maybeEpochSentS: Option[Long]): Promise[Unit]
}
class NotificationSchedulePersistenceImpl(tableName: String, client: AmazonDynamoDBAsync) extends NotificationSchedulePersistenceSync with NotificationSchedulePersistenceAsync {
private val due_and_sent = "due_epoch_s_and_sent"
def querySync(): Seq[NotificationsScheduleEntry] = client.scan(new ScanRequest(tableName)
.withIndexName(due_and_sent)
.withFilterExpression("sent = :sent and due_epoch_s < :now")
.withExpressionAttributeValues(Map(
":sent" -> new AttributeValue().withS(false.toString),
":now" -> new AttributeValue().withN(Instant.now().getEpochSecond.toString)
).asJava)).getItems.asScala.toList.map(item => NotificationsScheduleEntry(
uuid = item.get("uuid").getS,
notification = item.get("notification").getS,
dueEpochSeconds = item.get("due_epoch_s").getN.toLong,
ttlEpochSeconds = item.get("ttl_epoch_s").getN.toLong
))
private def makePutItemRequest(notificationsScheduleEntry: NotificationsScheduleEntry, maybeEpochSentS: Option[Long]) = new PutItemRequest(tableName, (Map(
"uuid" -> new AttributeValue().withS(notificationsScheduleEntry.uuid),
"notification" -> new AttributeValue().withS(notificationsScheduleEntry.notification),
"due_epoch_s" -> new AttributeValue().withN(notificationsScheduleEntry.dueEpochSeconds.toString),
"ttl_epoch_s" -> new AttributeValue().withN(notificationsScheduleEntry.ttlEpochSeconds.toString),
"sent" -> new AttributeValue().withS(maybeEpochSentS.isDefined.toString)
) ++ maybeEpochSentS.map(epochSentS => Map("sent_epoch_s" -> new AttributeValue().withN(epochSentS.toString))).getOrElse(Map[String, AttributeValue]())).asJava
)
def writeAsync(notificationsScheduleEntry: NotificationsScheduleEntry, maybeEpochSentS: Option[Long]): Promise[Unit] = {
val request = makePutItemRequest(notificationsScheduleEntry, maybeEpochSentS)
val promise = Promise[Unit]
client.putItemAsync(request, new AsyncHandler[PutItemRequest, PutItemResult] {
override def onError(exception: Exception): Unit = promise.failure(exception)
override def onSuccess(request: PutItemRequest, result: PutItemResult): Unit = promise.success(())
})
promise
}
override def writeSync(notificationsScheduleEntry: NotificationsScheduleEntry, maybeEpochSentS: Option[Long]): Unit =
client.putItem(makePutItemRequest(notificationsScheduleEntry, maybeEpochSentS))
}
| guardian/mobile-n10n | commonscheduledynamodb/src/main/scala/com/gu/notificationschedule/dynamo/NotificationSchedulePersistenceImpl.scala | Scala | apache-2.0 | 3,532 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.engine.output.benchmark
import com.bwsw.sj.common.config.BenchmarkConfigNames.test
object OutputBenchmarkConfigNames {
val esHosts = test + ".es.hosts"
val jdbcHosts = test + ".jdbc.hosts"
val restHosts = test + ".restful.hosts"
val restPort = test + ".output.rest.port"
val silent = test + ".silent"
}
| bwsw/sj-platform | core/sj-output-streaming-engine/src/test/scala/com/bwsw/sj/engine/output/benchmark/OutputBenchmarkConfigNames.scala | Scala | apache-2.0 | 1,145 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import exceptions.TestCanceledException
import exceptions.TestFailedException
import Matchers._
class ConfigMapSpec extends Spec {
class Fruit {
override def toString = "a Fruit"
}
class Apple extends Fruit {
override def toString = "an Apple"
}
val fruit = new Fruit
val apple = new Apple
val cm = ConfigMap(
"string" -> "aStringValue",
"boolean" -> true,
"Boolean" -> new java.lang.Boolean(true),
"byte" -> 1.toByte,
"Byte" -> new java.lang.Byte(1.toByte),
"short" -> 1.toShort,
"Short" -> new java.lang.Short(1.toShort),
"int" -> 1,
"Integer" -> new java.lang.Integer(1),
"long" -> Long.MaxValue,
"Long" -> new java.lang.Long(Long.MaxValue),
"char" -> 'c',
"Char" -> new java.lang.Character('c'),
"float" -> 1.0F,
"Float" -> new java.lang.Float(1.0F),
"double" -> 1.0,
"Double" -> new java.lang.Double(1.0),
"apple" -> apple,
"fruit" -> fruit
)
object `A ConfigMap` {
def `should provide a nice syntax for getting a required entry` {
assert(cm.getRequired[String]("string") === "aStringValue")
assert(cm.getRequired[Int]("int") === 1)
}
def `should throw a TestCanceledException if a required entry is missing` {
val caught =
intercept[TestCanceledException] {
cm.getRequired[String]("t")
}
assert(caught.getMessage === Resources.configMapEntryNotFound("t"))
}
def `should throw a TestCanceledException if a required entry has an unexpected type` {
// Ensure supertype and subype is done correctly
assert(cm.getRequired[Apple]("apple") === apple)
assert(cm.getRequired[Fruit]("apple") === apple)
val caught1 =
intercept[TestCanceledException] {
cm.getRequired[Apple]("fruit")
}
assert(caught1.getMessage === Resources.configMapEntryHadUnexpectedType("fruit", "class " + fruit.getClass.getName, "class " + apple.getClass.getName, "a Fruit"))
// Ensure Boolean works
assert(cm.getRequired[Boolean]("boolean") === true)
assert(cm.getRequired[Boolean]("Boolean") === new java.lang.Boolean(true))
val caught2 =
intercept[TestCanceledException] {
cm.getRequired[Boolean]("string")
}
assert(caught2.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "boolean", "aStringValue"))
// Ensure Byte works
assert(cm.getRequired[Byte]("byte") === 1.toByte)
assert(cm.getRequired[Byte]("Byte") === new java.lang.Byte(1.toByte))
val caught3 =
intercept[TestCanceledException] {
cm.getRequired[Byte]("string")
}
assert(caught3.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "byte", "aStringValue"))
// Ensure Short works
assert(cm.getRequired[Short]("short") === 1.toShort)
assert(cm.getRequired[Short]("Short") === new java.lang.Short(1.toShort))
val caught4 =
intercept[TestCanceledException] {
cm.getRequired[Short]("string")
}
assert(caught4.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "short", "aStringValue"))
// Ensure Int works
assert(cm.getRequired[Int]("int") === 1)
assert(cm.getRequired[Int]("Integer") === new java.lang.Integer(1))
val caught5 =
intercept[TestCanceledException] {
cm.getRequired[Int]("string")
}
assert(caught5.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "int", "aStringValue"))
// Ensure Long works
assert(cm.getRequired[Long]("long") === Long.MaxValue)
assert(cm.getRequired[Long]("Long") === new java.lang.Long(Long.MaxValue))
val caught6 =
intercept[TestCanceledException] {
cm.getRequired[Long]("string")
}
assert(caught6.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "long", "aStringValue"))
// Ensure Char works
assert(cm.getRequired[Char]("char") === 'c')
assert(cm.getRequired[Char]("Char") === new java.lang.Character('c'))
// 510 287 1900
val caught7 =
intercept[TestCanceledException] {
cm.getRequired[Char]("string")
}
assert(caught7.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "char", "aStringValue"))
// Ensure Float works
assert(cm.getRequired[Float]("float") === 1.0F)
assert(cm.getRequired[Float]("Float") === new java.lang.Float(1.0F))
val caught8 =
intercept[TestCanceledException] {
cm.getRequired[Float]("string")
}
assert(caught8.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "float", "aStringValue"))
// Ensure Double works
assert(cm.getRequired[Double]("double") === 1.0)
assert(cm.getRequired[Double]("Double") === new java.lang.Double(1.0))
val caught9 =
intercept[TestCanceledException] {
cm.getRequired[Double]("string")
}
assert(caught9.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "double", "aStringValue"))
}
def `should provide a nice syntax for getting an optional entry` {
assert(cm.getOptional[String]("string") === Some("aStringValue"))
assert(cm.getOptional[Int]("int") === Some(1))
}
def `should return None if an optional entry is missing` {
assert(cm.getOptional[String]("t") === None)
}
def `should throw a TestCanceledException if an optional entry has an unexpected type` {
// Ensure supertype and subype is done correctly
assert(cm.getOptional[Apple]("apple") === Some(apple))
assert(cm.getOptional[Fruit]("apple") === Some(apple))
val caught1 =
intercept[TestCanceledException] {
cm.getOptional[Apple]("fruit")
}
assert(caught1.getMessage === Resources.configMapEntryHadUnexpectedType("fruit", "class " + fruit.getClass.getName, "class " + apple.getClass.getName, "a Fruit"))
// Ensure Boolean works
assert(cm.getOptional[Boolean]("boolean") === Some(true))
assert(cm.getOptional[Boolean]("Boolean") === Some(new java.lang.Boolean(true)))
val caught2 =
intercept[TestCanceledException] {
cm.getOptional[Boolean]("string")
}
assert(caught2.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "boolean", "aStringValue"))
// Ensure Byte works
assert(cm.getOptional[Byte]("byte") === Some(1.toByte))
assert(cm.getOptional[Byte]("Byte") === Some(new java.lang.Byte(1.toByte)))
val caught3 =
intercept[TestCanceledException] {
cm.getOptional[Byte]("string")
}
assert(caught3.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "byte", "aStringValue"))
// Ensure Short works
assert(cm.getOptional[Short]("short") === Some(1.toShort))
assert(cm.getOptional[Short]("Short") === Some(new java.lang.Short(1.toShort)))
val caught4 =
intercept[TestCanceledException] {
cm.getOptional[Short]("string")
}
assert(caught4.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "short", "aStringValue"))
// Ensure Int works
assert(cm.getOptional[Int]("int") === Some(1))
assert(cm.getOptional[Int]("Integer") === Some(new java.lang.Integer(1)))
val caught5 =
intercept[TestCanceledException] {
cm.getOptional[Int]("string")
}
assert(caught5.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "int", "aStringValue"))
// Ensure Long works
assert(cm.getOptional[Long]("long") === Some(Long.MaxValue))
assert(cm.getOptional[Long]("Long") === Some(new java.lang.Long(Long.MaxValue)))
val caught6 =
intercept[TestCanceledException] {
cm.getOptional[Long]("string")
}
assert(caught6.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "long", "aStringValue"))
// Ensure Char works
assert(cm.getOptional[Char]("char") === Some('c'))
assert(cm.getOptional[Char]("Char") === Some(new java.lang.Character('c')))
// 510 287 1900
val caught7 =
intercept[TestCanceledException] {
cm.getOptional[Char]("string")
}
assert(caught7.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "char", "aStringValue"))
// Ensure Float works
assert(cm.getOptional[Float]("float") === Some(1.0F))
assert(cm.getOptional[Float]("Float") === Some(new java.lang.Float(1.0F)))
val caught8 =
intercept[TestCanceledException] {
cm.getOptional[Float]("string")
}
assert(caught8.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "float", "aStringValue"))
// Ensure Double works
assert(cm.getOptional[Double]("double") === Some(1.0))
assert(cm.getOptional[Double]("Double") === Some(new java.lang.Double(1.0)))
val caught9 =
intercept[TestCanceledException] {
cm.getOptional[Double]("string")
}
assert(caught9.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "double", "aStringValue"))
}
def `should provide a nice syntax for getting an optional entry with a default value` {
assert(cm.getWithDefault[String]("string", "theDefault") === "aStringValue")
assert(cm.getWithDefault[Int]("int", 0) === 1)
assert(cm.getWithDefault[Int]("t", 0) === 0)
assert(cm.getWithDefault[String]("t", "theDefault") === "theDefault")
}
def `should throw a TestCanceledException if an optional entry requested with a default has an unexpected type` {
// Ensure supertype and subype is done correctly
assert(cm.getWithDefault[Apple]("apple", new Apple) === apple)
assert(cm.getWithDefault[Fruit]("apple", new Fruit) === apple)
val caught1 =
intercept[TestCanceledException] {
cm.getWithDefault[Apple]("fruit", new Apple)
}
assert(caught1.getMessage === Resources.configMapEntryHadUnexpectedType("fruit", "class " + fruit.getClass.getName, "class " + apple.getClass.getName, "a Fruit"))
// Ensure Boolean works
assert(cm.getWithDefault[Boolean]("boolean", false) === true)
assert(cm.getWithDefault[Boolean]("Boolean", false) === new java.lang.Boolean(true))
val caught2 =
intercept[TestCanceledException] {
cm.getWithDefault[Boolean]("string", false)
}
assert(caught2.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "boolean", "aStringValue"))
// Ensure Byte works
assert(cm.getWithDefault[Byte]("byte", 2.toByte) === 1.toByte)
assert(cm.getWithDefault[Byte]("Byte", 2.toByte) === new java.lang.Byte(1.toByte))
val caught3 =
intercept[TestCanceledException] {
cm.getWithDefault[Byte]("string", 2.toByte)
}
assert(caught3.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "byte", "aStringValue"))
// Ensure Short works
assert(cm.getWithDefault[Short]("short", 2.toShort) === 1.toShort)
assert(cm.getWithDefault[Short]("Short", 2.toShort) === new java.lang.Short(1.toShort))
val caught4 =
intercept[TestCanceledException] {
cm.getWithDefault[Short]("string", 2.toShort)
}
assert(caught4.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "short", "aStringValue"))
// Ensure Int works
assert(cm.getWithDefault[Int]("int", 2) === 1)
assert(cm.getWithDefault[Int]("Integer", 2) === new java.lang.Integer(1))
val caught5 =
intercept[TestCanceledException] {
cm.getWithDefault[Int]("string", 2)
}
assert(caught5.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "int", "aStringValue"))
// Ensure Long works
assert(cm.getWithDefault[Long]("long", 2.toLong) === Long.MaxValue)
assert(cm.getWithDefault[Long]("Long", 2.toLong) === new java.lang.Long(Long.MaxValue))
val caught6 =
intercept[TestCanceledException] {
cm.getWithDefault[Long]("string", 2.toLong)
}
assert(caught6.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "long", "aStringValue"))
// Ensure Char works
assert(cm.getWithDefault[Char]("char", 'z') === 'c')
assert(cm.getWithDefault[Char]("Char", 'z') === new java.lang.Character('c'))
// 510 287 1900
val caught7 =
intercept[TestCanceledException] {
cm.getWithDefault[Char]("string", 'z')
}
assert(caught7.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "char", "aStringValue"))
// Ensure Float works
assert(cm.getWithDefault[Float]("float", 2.0F) === 1.0F)
assert(cm.getWithDefault[Float]("Float", 2.0F) === new java.lang.Float(1.0F))
val caught8 =
intercept[TestCanceledException] {
cm.getWithDefault[Float]("string", 2.0F)
}
assert(caught8.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "float", "aStringValue"))
// Ensure Double works
assert(cm.getWithDefault[Double]("double", 2.0) === 1.0)
assert(cm.getWithDefault[Double]("Double", 2.0) === new java.lang.Double(1.0))
val caught9 =
intercept[TestCanceledException] {
cm.getWithDefault[Double]("string", 2.0)
}
assert(caught9.getMessage === Resources.configMapEntryHadUnexpectedType("string", "class java.lang.String", "double", "aStringValue"))
}
def `should work with 'contain (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain ("one" -> 1)
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain ("three" -> 3)
}
}
def `should work with 'contain oneOf (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain oneOf ("one" -> 1, "three" -> 3)
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain oneOf ("three" -> 3, "four" -> 4)
}
}
def `should work with 'contain noneOf (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain noneOf ("three" -> 3, "four" -> 4)
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain noneOf ("one" -> 1, "four" -> 4)
}
}
def `should work with 'contain allOf (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain allOf ("one" -> 1, "two" -> 2)
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain allOf ("one" -> 1, "four" -> 4)
}
}
def `should work with 'contain atLeastOneOf (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain atLeastOneOf ("one" -> 1, "two" -> 2)
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain atLeastOneOf ("three" -> 3, "four" -> 4)
}
}
def `should work with 'contain theSameElementsAs (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain theSameElementsAs List("one" -> 1, "two" -> 2)
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain theSameElementsAs List("three" -> 3, "four" -> 4)
}
}
def `should work with 'contain atMostOneOf (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain atMostOneOf ("three" -> 3, "four" -> 4)
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain atMostOneOf ("one" -> 1, "two" -> 2)
}
}
def `should work with 'contain only (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain only ("one" -> 1, "two" -> 2)
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain only ("one" -> 2)
}
}
def `should work with 'contain key (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain key ("one")
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain key ("three")
}
}
def `should work with 'contain value (...) syntax` {
ConfigMap("one" -> 1, "two" -> 2) should contain value (1)
intercept[TestFailedException] {
ConfigMap("one" -> 1, "two" -> 2) should contain value (3)
}
}
}
object `The ConfigMap companion object` {
def `should provide a factory method for constructing new ConfigMaps` {
assert(cm.size === 19)
val expected = new ConfigMap(
Map(
"string" -> "aStringValue",
"boolean" -> true,
"Boolean" -> new java.lang.Boolean(true),
"byte" -> 1.toByte,
"Byte" -> new java.lang.Byte(1.toByte),
"short" -> 1.toShort,
"Short" -> new java.lang.Short(1.toShort),
"int" -> 1,
"Integer" -> new java.lang.Integer(1),
"long" -> Long.MaxValue,
"Long" -> new java.lang.Long(Long.MaxValue),
"char" -> 'c',
"Char" -> new java.lang.Character('c'),
"float" -> 1.0F,
"Float" -> new java.lang.Float(1.0F),
"double" -> 1.0,
"Double" -> new java.lang.Double(1.0),
"apple" -> apple,
"fruit" -> fruit
)
)
assert(cm === expected)
}
def `should provide a factory method for constructing empty ConfigMaps` {
val emptyCm: ConfigMap = ConfigMap.empty
assert(emptyCm.size === 0)
}
}
}
| cheeseng/scalatest | scalatest-test/src/test/scala/org/scalatest/ConfigMapSpec.scala | Scala | apache-2.0 | 18,874 |
/*
* Copyright 2014 – 2015 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.specs2.Specification
import _root_.buildinfo.BuildInfo
import org.specs2.specification.{Snippets, Forms}
object install extends Specification with Snippets with Forms { lazy val is = "Installation Notes".title ^ s2"""
`$name` is published to Sonatype and can be installed with
your favourite dependency manger:
- [sbt](http://scala-sbt.org)
- [leiningen](http://leiningen.org/)
- [gradle](http://gradle.org)
- [maven](http://maven.apache.org)
#### SBT
```
libraryDependencies += "${me.groupId}" %% "${me.artifactId}" % "${me.version}"
```
#### Leiningen
```
[${me.groupId}/${me.artifactId} "${me.version}"]
```
#### Gradle
```
compile '${me.groupId}:${me.artifactId}:${me.version}'
```
#### Maven
```
<dependency>
<groupId>${me.groupId}</groupId>
<artifactId>${me.artifactId}</artifactId>
<version>${me.version}</version>
</dependency>
```
### Dependencies
$projectDependencies
### Other Modules
$projectModules
"""
def projectDependencies: String = moduleDependencies(
name, dependencies,
s"""`$name` has no additional dependencies besides scala ${BuildInfo.scalaVersion}.""")
def projectModules: String =
if (modules.nonEmpty)
s"""`$name` also comes with the following additional modules:
|
|${modules.map(moduleString).mkString("\\n")}
""".stripMargin
else ""
def moduleDependencies(name: String, dependencies: List[Dependency], empty: ⇒ String = ""): String =
if (dependencies.nonEmpty)
s"""`$name` depends on the following modules:
|
|${dependencies.map(_.toString).mkString("- `", "`\\n- `", "`\\n")}
""".stripMargin
else empty
def moduleString(m: Module): String =
s"""#### ${m.self.artifactId}
|
|`${m.self}`
|
|${moduleDependencies(m.self.artifactId, m.deps)}
""".stripMargin
val name = BuildInfo.name
val me = Dependency(name)
val dependencies = filterDeps(BuildInfo.dependencies)
val modules = BuildInfo.modules.map(Module(_))
case class Module(self: Dependency, deps: List[Dependency])
object Module {
def apply(nd: (String, Seq[String])): Module =
Module(Dependency(nd._1), filterDeps(nd._2))
}
case class Dependency(groupId: String, artifactId: String, version: String, scope: Option[String]) {
override def toString: String =
s""""$groupId" %% "$artifactId" % "$version"${scope.fold("")(s ⇒ " % \\"" + s + "\\"")}"""
}
object Dependency {
def apply(name: String): Dependency =
Dependency(BuildInfo.organization, name, BuildInfo.version, None)
def parse(s: String): Option[Dependency] = {
val parts = s.split(':')
if (parts.length == 3) {
val Array(group, art, version) = parts
Some(Dependency(group, art, version, None))
}
else if (parts.length == 4) {
val Array(group, art, version, scope) = parts
Some(Dependency(group, art, version, Some(scope)))
} else {
None
}
}
}
def filterDeps(deps: Seq[String]): List[Dependency] =
deps.flatMap(Dependency.parse)
.filterNot(_.scope.exists(_ == "provided"))
.filterNot(_.artifactId == "scala-library")
.distinct
.toList
}
| knutwalker/transducers-scala | guide/src/it/scala/install.scala | Scala | apache-2.0 | 3,819 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sources.tsextractors
import org.apache.flink.api.common.typeinfo.{SqlTimeTypeInfo, TypeInformation}
import org.apache.flink.table.api.{Types, ValidationException}
import org.apache.flink.table.expressions.{Cast, Expression, ResolvedFieldReference}
/**
* Converts an existing [[Long]], [[java.sql.Timestamp]], or
* timestamp formatted [[java.lang.String]] field (e.g., "2018-05-28 12:34:56.000") into
* a rowtime attribute.
*
* @param field The field to convert into a rowtime attribute.
*/
final class ExistingField(val field: String) extends TimestampExtractor {
override def getArgumentFields: Array[String] = Array(field)
@throws[ValidationException]
override def validateArgumentFields(argumentFieldTypes: Array[TypeInformation[_]]): Unit = {
val fieldType = argumentFieldTypes(0)
fieldType match {
case Types.LONG => // OK
case Types.SQL_TIMESTAMP => // OK
case Types.STRING => // OK
case _: TypeInformation[_] =>
throw ValidationException(
s"Field '$field' must be of type Long or Timestamp or String but is of type $fieldType.")
}
}
/**
* Returns an [[Expression]] that casts a [[Long]], [[java.sql.Timestamp]], or
* timestamp formatted [[java.lang.String]] field (e.g., "2018-05-28 12:34:56.000")
* into a rowtime attribute.
*/
override def getExpression(fieldAccesses: Array[ResolvedFieldReference]): Expression = {
val fieldAccess: Expression = fieldAccesses(0)
fieldAccess.resultType match {
case Types.LONG =>
// access LONG field
fieldAccess
case Types.SQL_TIMESTAMP =>
// cast timestamp to long
Cast(fieldAccess, Types.LONG)
case Types.STRING =>
Cast(Cast(fieldAccess, SqlTimeTypeInfo.TIMESTAMP), Types.LONG)
}
}
override def equals(other: Any): Boolean = other match {
case that: ExistingField => field == that.field
case _ => false
}
override def hashCode(): Int = {
field.hashCode
}
}
| yew1eb/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/tsextractors/ExistingField.scala | Scala | apache-2.0 | 2,835 |
package com.blockcypher.api.protocol
import org.scalacoin.currency.CurrencyUnit
import org.scalacoin.protocol.BitcoinAddress
/**
* Created by chris on 12/19/15.
*/
trait BlockCypherOutput {
def value : CurrencyUnit
def script : String
def addresses : Seq[BitcoinAddress]
def scriptType : String
}
case class BlockCypherOutputImpl(override val value : CurrencyUnit, override val script : String,
override val addresses : Seq[BitcoinAddress], override val scriptType : String) extends BlockCypherOutput
| Christewart/blockcypher-api | src/main/scala/com/blockcypher/api/protocol/BlockCypherOutput.scala | Scala | mit | 517 |
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.
package com.microsoft.ml.spark
class ValidateConfiguration extends TestBase {
test("Basic BrainScript config E2E") {
val relativeOutRoot = "out"
val remappedInPath = "in.txt"
val dataFormat = "text"
val config = new BrainScriptBuilder()
.setOutputRoot(relativeOutRoot)
.setInputFile(
remappedInPath,
dataFormat,
Map("features" -> InputShape(10000, "sparse"),
"labels" -> InputShape(1, "dense")))
val cb = new CNTKCommandBuilder(false)
.appendOverrideConfig(config.toOverrideConfig)
// TODO: add assertions to really validate instead
println(cb.buildCommand)
}
}
| rastala/mmlspark | src/cntk-train/src/test/scala/ValidateConfiguration.scala | Scala | mit | 796 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.blueeyes.util
import quasar.blueeyes._
import scala.concurrent.Future
import java.time.{Instant, LocalDateTime, Period}
trait Clock {
/** Returns the current time.
*/
def now(): LocalDateTime
def instant(): Instant
def nanoTime(): Long
/** Times how long the specified future takes to be delivered.
*/
def time[T](f: => Future[T]): Future[(Period, T)] = {
val start = now()
f.map { result =>
val end = now()
(start until end, result)
}
}
/** Times a block of code.
*/
def timeBlock[T](f: => T): (Period, T) = {
val start = now()
val result = f
val end = now()
(start until end, result)
}
}
object Clock {
val System = ClockSystem.realtimeClock
}
trait ClockSystem {
implicit val realtimeClock = new Clock {
def now(): LocalDateTime = dateTime.now()
def instant(): Instant = quasar.blueeyes.instant.now()
def nanoTime(): Long = System.nanoTime()
}
}
object ClockSystem extends ClockSystem
| jedesah/Quasar | blueeyes/src/main/scala/quasar/blueeyes/util/Clock.scala | Scala | apache-2.0 | 1,613 |
package io.buoyant.linkerd.protocol
import com.fasterxml.jackson.annotation.JsonIgnore
import com.twitter.finagle.{Filter, Service, ServiceFactory, Stack, Stackable}
import com.twitter.finagle.http.{Request, Response}
import com.twitter.finagle.stack.nilStack
import com.twitter.util.Future
import io.buoyant.config.PolymorphicConfig
abstract class HttpRequestAuthorizerConfig extends PolymorphicConfig { config =>
@JsonIgnore
def role: Stack.Role
@JsonIgnore
def description: String
@JsonIgnore
def parameters: Seq[Stack.Param[_]]
@JsonIgnore
def mk(params: Stack.Params): Filter[Request, Response, Request, Response]
@JsonIgnore
def module = new Stack.Module[ServiceFactory[Request, Response]] {
override def role: Stack.Role = config.role
override def description: String = config.description
override def parameters: Seq[Stack.Param[_]] = config.parameters
override def make(
params: Stack.Params,
next: Stack[ServiceFactory[Request, Response]]
): Stack[ServiceFactory[Request, Response]] = {
val filter = mk(params)
Stack.leaf(role, filter.andThen(next.make(params)))
}
}
}
object HttpRequestAuthorizerConfig {
object param {
case class RequestAuthorizer(loggerStack: Stack[ServiceFactory[Request, Response]])
implicit object RequestAuthorizer extends Stack.Param[RequestAuthorizer] {
val default = RequestAuthorizer(nilStack)
}
}
def module: Stackable[ServiceFactory[Request, Response]] =
new Stack.Module[ServiceFactory[Request, Response]] {
override val role = Stack.Role("HttpRequestAuthorizer")
override val description = "HTTP RequestAuthorizer"
override val parameters = Seq(implicitly[Stack.Param[param.RequestAuthorizer]])
def make(params: Stack.Params, next: Stack[ServiceFactory[Request, Response]]): Stack[ServiceFactory[Request, Response]] = {
val param.RequestAuthorizer(requestAuthorizerStack) = params[param.RequestAuthorizer]
requestAuthorizerStack ++ next
}
}
}
| linkerd/linkerd | linkerd/protocol/http/src/main/scala/io/buoyant/linkerd/protocol/HttpRequestAuthorizerConfig.scala | Scala | apache-2.0 | 2,041 |
/**
* Copyright (C) 2012-2013 Vadim Bartko (vadim.bartko@nevilon.com).
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* See file LICENSE.txt for License information.
*/
package com.nevilon.nomad.filter
import com.nevilon.nomad.logs.Logs
import com.nevilon.nomad.crawler.{URLUtils, Relation}
class UrlsCleaner extends Logs {
def cleanUrls(linksToClear: List[Relation], startUrl: String): List[Relation] = {
// linksToClear.
// map(_ => removeIncorrectUrls(_)).
// map(_ => normalizeUrls(_)).
// map(_ => removeSelfUrls(_)).
// map(_ => removeUrlsToAnotherDomain(_, startUrl)).
// map(_ => removeDuplicateUrls(_))
val withoutIncorrect = removeIncorrectUrls(linksToClear)
val normalized = normalizeUrls(withoutIncorrect)
val withoutSelfUrls = removeSelfUrls(normalized)
//val withoutUrlsToAnotherDomain = removeUrlsToAnotherDomain(withoutSelfUrls, startUrl)
val withoutDuplicates = removeDuplicateUrls(withoutSelfUrls)
withoutDuplicates
}
private def removeIncorrectUrls(linksToClear: List[Relation]): List[Relation] = {
linksToClear.
filter(urlRelation => !urlRelation.to.location.trim().isEmpty).
filter(urlRelation => !urlRelation.to.location.contains("@")).
filter(urlRelation => !urlRelation.to.location.startsWith("mailto:"))
}
private def removeSelfUrls(linksToClear: List[Relation]): List[Relation] = {
linksToClear.filter(relation => !relation.from.equals(relation.to))
}
private def normalizeUrls(linksToClear: List[Relation]): List[Relation] = {
linksToClear.map(relation => {
val normalizedLocation = URLUtils.normalize(relation.to.location)
new Relation(relation.from, relation.to.updateLocation(normalizedLocation))
})
}
private def removeDuplicateUrls(linksToClear: List[Relation]) = linksToClear.distinct
}
| hudvin/nomad | src/main/scala/com/nevilon/nomad/filter/UrlsCleaner.scala | Scala | gpl-2.0 | 2,078 |
package info.glennengstrand.io
import org.apache.http.message.BasicHttpEntityEnclosingRequest
import org.apache.http.impl.DefaultBHttpClientConnection
import org.apache.http.entity.{StringEntity,ContentType}
import org.apache.http.protocol.{HttpRequestExecutor,HttpCoreContext,HttpProcessor,HttpProcessorBuilder,RequestContent,RequestTargetHost,RequestConnControl,RequestUserAgent,RequestExpectContinue}
import org.apache.http.{HttpResponse,HttpHost}
import org.apache.http.util.EntityUtils
import java.util.UUID
import java.net.Socket
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.util.{Try, Success, Failure}
/** helpers functions for interfacing with elastic search */
object ElasticSearch {
val log = LoggerFactory.getLogger("info.glennengstrand.io.ElasticSearch")
val JSON_MEDIA_TYPE = "application/json"
/** set up the basic request for loading a document into the index */
def createIndexRequest(id: Long, key: String): BasicHttpEntityEnclosingRequest = {
val retVal = new BasicHttpEntityEnclosingRequest("PUT", s"/${IO.settings.get(IO.searchPath).asInstanceOf[String]}/${id}-${key}")
retVal.addHeader("Accept", JSON_MEDIA_TYPE)
retVal
}
/** set up the basic request for searching the index for documents */
def createSearchRequest(keywords: String): BasicHttpEntityEnclosingRequest = {
val retVal = new BasicHttpEntityEnclosingRequest("GET", s"/${IO.settings.get(IO.searchPath).asInstanceOf[String]}/_search?q=${keywords}")
retVal.addHeader("Accept", JSON_MEDIA_TYPE)
retVal
}
/** create the document to be indexed */
def createEntity(id: Long, key: String, story: String): StringEntity = {
val doc = Map(
"id" -> key,
"sender" -> id,
"story" -> story
)
val content = IO.toJson(doc)
new StringEntity(content, ContentType.create(JSON_MEDIA_TYPE, "UTF-8"))
}
/** fetch the child attribute from the parsed JSON results */
def fetchChild[T](data: Map[String, Any], name: String, default: T): T = {
data.contains(name) match {
case true => {
val r = data(name)
r match {
case rm: T => rm
case _ => default
}
}
case _ => default
}
}
/** extract the desired payload from the search results */
def extract(response: Map[String, Option[Any]], name: String): List[Long] = {
val outerHits = fetchChild[Map[String, Any]](response, "hits", Map())
val innerHits = fetchChild[List[Map[String, Any]]](outerHits, "hits", List())
innerHits.map(hit => {
val source = fetchChild[Map[String, Any]](hit, "_source", Map())
fetchChild[Double](source, name, 0.0d).toLong
}).toList
}
}
trait ElasticSearchSearcher extends PersistentDataStoreSearcher {
def search(terms: String): Iterable[Long] = {
val req = ElasticSearch.createSearchRequest(terms)
val client = new DefaultBHttpClientConnection(1000)
val s = new Socket(IO.settings.get(IO.searchHost).asInstanceOf[String], IO.settings.get(IO.searchPort).asInstanceOf[String].toInt)
client.bind(s)
client.sendRequestHeader(req)
client.flush()
val retVal = new StringBuilder()
if (client.isResponseAvailable(5000)) {
val res = client.receiveResponseHeader()
val status = res.getStatusLine().getStatusCode
if (status >= 300) {
ElasticSearch.log.warn(s"return status = ${status}")
} else {
client.receiveResponseEntity(res)
val response = res.getEntity().getContent
var c = response.read()
while (c != -1) {
retVal.append(c.asInstanceOf[Char])
c = response.read()
}
}
} else {
ElasticSearch.log.warn("no response")
}
s.close()
val r = retVal.toString()
ElasticSearch.extract(IO.fromJson(r).head, "sender")
}
def index(id: Long, content: String): Unit = {
val key = UUID.randomUUID().toString()
val req = ElasticSearch.createIndexRequest(id, key)
val se = ElasticSearch.createEntity(id, key, content)
req.setEntity(se)
val p = HttpProcessorBuilder.create()
.add(new RequestContent())
.add(new RequestTargetHost())
.add(new RequestConnControl())
.add(new RequestUserAgent("Feed/1.1"))
.add(new RequestExpectContinue(true)).build()
val e = new HttpRequestExecutor()
val c = HttpCoreContext.create()
val h = new HttpHost(IO.settings.get(IO.searchHost).asInstanceOf[String], IO.settings.get(IO.searchPort).asInstanceOf[String].toInt)
c.setTargetHost(h)
val client = new DefaultBHttpClientConnection(1000)
val s = new Socket(IO.settings.get(IO.searchHost).asInstanceOf[String], IO.settings.get(IO.searchPort).asInstanceOf[String].toInt)
client.bind(s)
e.preProcess(req, p, c)
val r = e.execute(req, client, c)
e.postProcess(r, p, c)
if (r.getStatusLine().getStatusCode >= 300) {
ElasticSearch.log.warn(r.getStatusLine().toString())
ElasticSearch.log.warn(EntityUtils.toString(r.getEntity))
}
s.close()
}
}
| gengstrand/clojure-news-feed | server/feed2/src/main/scala/info/glennengstrand/io/ElasticSearch.scala | Scala | epl-1.0 | 5,080 |
package pl.touk.nussknacker.ui.security.ssl
import java.net.URI
case class KeyStoreConfig(uri: URI, password: Array[Char]) | TouK/nussknacker | security/src/main/scala/pl/touk/nussknacker/ui/security/ssl/KeyStoreConfig.scala | Scala | apache-2.0 | 124 |
package demo
import sqltyped._
object TestData {
def drop = sql("DELETE FROM person").apply
def create = {
val newPerson = sqlk("INSERT INTO person (name, secret, interview) VALUES (?, ?, ?)")
val admin = newPerson("Admin", None, None)
val other = newPerson("Some other guy", None, None)
val interview = sqlk("INSERT INTO interview (held_by, rating) VALUES (?, ?)").apply(admin, Some(4.5))
val dick = newPerson("Dick Tracy", Some("secret"), Some(interview))
Db.newComment("My first comment.", admin, dick)
Db.newComment("My second comment.", other, dick)
}
}
| jonifreeman/sqltyped | demo/src/main/scala/testdata.scala | Scala | apache-2.0 | 596 |
package com.github.aalbul.irc.client.protocol
/**
* Created by nuru on 08.01.14.
*
* Control statement case classes
*/
trait ControlStatements {
case class SendPrivateMessage(user: String, message: String)
case class SendChannelMessage(channel: String, message: String, recipient: Option[String] = None)
case class ListChannels(min: Option[Int] = None, max: Option[Int] = None)
case class SendInvite(channel: String, user: String)
case class ChangeNick(newNick: String)
case class Ban(channel: String, hostMask: String)
case class UnBan(channel: String, hostMask: String)
case class GiveOp(channel: String, user: String)
case class DeOp(channel: String, user: String)
case class SetPrivateChannel(channel: String)
case class UnSetPrivateChannel(channel: String)
case class SetSecretChannel(channel: String)
case class UnSetSecretChannel(channel: String)
case class SetInviteOnly(channel: String)
case class UnSetInviteOnly(channel: String)
case class SetTopicProtection(channel: String)
case class UnSetTopicProtection(channel: String)
case class SetNoExternalMessages(channel: String)
case class UnSetNoExternalMessages(channel: String)
case class SetModerated(channel: String)
case class UnSetModerated(channel: String)
case class SetChannelLimit(channel: String, limit: Int)
case class UnSetChannelLimit(channel: String)
case class GiveVoice(channel: String, user: String)
case class DeVoice(channel: String, user: String)
case class SetChannelKey(channel: String, key: String)
case class RemoveChannelKey(channel: String, key: String)
case class ReJoin(channel: String, key: Option[String])
case class GiveHalfOp(channel: String, user: String)
case class DeHalfOp(channel: String, user: String)
case class GiveOwner(channel: String, user: String)
case class DeOwner(channel: String, user: String)
case class GiveSuperOp(channel: String, user: String)
case class DeSuperOp(channel: String, user: String)
case class GetChannelMode(channel: String)
case class Kick(channel: String, user: String, reason: Option[String])
case class SendChannelNotice(channel: String, notice: String)
case class LeaveChannel(channel: String, reason: Option[String])
case class SetChannelMode(channel: String, mode: String)
case class SetTopic(channel: String, topic: String)
case class SendWho(channel: String)
case class SendChannelCtcp(channel: String, command: String)
case class GetChannelInfo(channel: String)
case class JoinChannel(channel: String, key: Option[String])
case class GetUserInfo(user: String)
case class SendChannelAction(channel: String, action: String)
case class SendUserAction(user: String, action: String)
case class SendUserCtcp(user: String, command: String)
case class SendUserMode(user: String, mode: String)
case class SendUserNotice(user: String, notice: String)
case class GetEnabledCapabilities()
case class GetServerInfo()
case class Disconnect(message: Option[String])
case class SendFinger(user: String, message: String)
case class SendPing(user: String, value: String)
case class SendServerPong(response: String)
case class SendTime(user: String, time: String)
case class SendVersion(user: String, version: String)
}
| aalbul/reactive-irc | src/main/scala/com/github/aalbul/irc/client/protocol/ControlStatements.scala | Scala | gpl-3.0 | 3,257 |
package org.vlinderlang.vlinderc.ssa
import org.vlinderlang.vlinderc.ModuleName
import org.vlinderlang.vlinderc.`type`.Type
import scala.collection.immutable.ListMap
sealed abstract class ID(prefix: String) {
private val id = ID.nextID()
override def toString = s"$prefix$id"
}
object ID {
private var lastID = 0
private def nextID(): Int = synchronized {
lastID += 1
lastID
}
}
class BlockID extends ID("B")
class InstID extends ID("I")
case class CFG(entry: BlockID, blocks: Map[BlockID, Block]) {
def incoming(blockID: BlockID): Set[BlockID] = Set.empty
def outgoing(blockID: BlockID): Set[BlockID] = Set.empty
}
case class Block(insts: ListMap[InstID, Inst])
sealed abstract class Inst
case class CallInst(callee: InstID, arguments: Vector[InstID], tailcall: Boolean) extends Inst
case class LdargInst(index: Int) extends Inst
case class LdgblInst(module: ModuleName, member: String) extends Inst
case class LdboolInst(value: Boolean) extends Inst
case class LdstrInst(value: String) extends Inst
case class NewInst(`type`: Type) extends Inst
case class RetInst(value: InstID) extends Inst
case class StfldInst(target: InstID, field: String, value: InstID) extends Inst
class CFGBuilder {
private var cfg = {
val entryID = new BlockID
CFG(entryID, Map(entryID -> Block(ListMap.empty)))
}
var currentBlockID = cfg.entry
def block(): BlockID = {
val blockID = new BlockID
cfg = cfg.copy(blocks = cfg.blocks + (blockID -> Block(ListMap.empty)))
currentBlockID = blockID
blockID
}
def inst(inst: Inst): InstID = {
val instID = new InstID
val block = cfg.blocks(currentBlockID)
val newBlock = block.copy(insts = block.insts + (instID -> inst))
cfg = cfg.copy(blocks = cfg.blocks + (currentBlockID -> newBlock))
instID
}
def result: CFG = cfg
}
| vlinder-lang/vlinderc | src/main/scala/org/vlinderlang/vlinderc/ssa/ssa.scala | Scala | bsd-3-clause | 1,905 |
package nlpdata.util
import resource.managed
import resource.ManagedResource
import scala.util.Try
import java.nio.file.{Files, Path, Paths}
trait PackagePlatformExtensions {
protected[nlpdata] def loadFile(path: Path): ManagedResource[Iterator[String]] = {
import scala.collection.JavaConverters._
managed(Files.lines(path)).map(_.iterator.asScala)
}
protected[nlpdata] def saveFile(path: Path, contents: String): Try[Unit] =
Try(Files.write(path, contents.getBytes))
}
| julianmichael/nlpdata | nlpdata/src-jvm/nlpdata/util/PackagePlatformExtensions.scala | Scala | mit | 494 |
package scalacsv
import scala.util.parsing.combinator._
object Csv extends RegexParsers {
def apply(input: String): List[List[String]] = parseAll(table, input) match {
case Success(result, _) => result
case failure : NoSuccess => scala.sys.error(failure.msg)
}
override def skipWhitespace = false
def table: Parser[List[List[String]]] = repsep(row, row_delimiter)
def row: Parser[List[String]] = repsep(field, field_delimiter)
def field: Parser[String] = quoted_field | raw_field
def quoted_field: Parser[String] =
"\\"" ~> rep(charSeq | newline | """[^"]""".r) <~ "\\"" ^^ {
_.mkString
}
def charSeq: Parser[String] = "\\"\\"" ^^^ "\\""
def raw_field: Parser[String] = """[^,\\n\\r]*""".r
def row_delimiter: Parser[String] = newline
def field_delimiter: Parser[String] = ","
def newline: Parser[String] = "\\r\\n" | "\\n" | "\\r"
}
| kei10in/scalacsv | src/main/scala/Csv.scala | Scala | mit | 880 |
import org.junit.Test
import org.junit.Assert.assertEquals
class TestA {
@Test def testThree = {
assertEquals(A.three, 3)
}
}
| lampepfl/dotty | sbt-test/discovery/test-discovery/src/test/scala/TestA.scala | Scala | apache-2.0 | 135 |
import stainless.lang._
object Ackermann {
def ackermann(m: BigInt, n: BigInt): BigInt = {
require(m >= 0 && n >= 0)
if (m == 0) n + 1
else if (n == 0) ackermann(m - 1, 1)
else ackermann(m - 1, ackermann(m, n - 1))
} ensuring (_ >= 0)
}
| epfl-lara/stainless | frontends/benchmarks/termination/valid/Ackermann.scala | Scala | apache-2.0 | 258 |
package vulkan.wrapper.registry.command
import vulkan.wrapper.registry.vtype.VulkanType
import vulkan.wrapper.registry.{Registry, RegistryComponent, _}
import scala.xml.Node
class VulkanCommandParam(registry: Registry, val vulkanCommand: VulkanCommand, node: Node) extends VulkanNamedComponent(registry,node){
val len: Traversable[String] = (node \\@@ "len").seq.flatMap(_.split(",").seq)
val altlen: Traversable[String] = (node \\@@ "altlen").seq.flatMap(_.split(",").seq)
val clen: Traversable[String] = genClen()
val optional: Seq[Boolean] = (node \\@@ "optional").toSeq.flatMap(_.split(",").map(_.toBoolean))
val noautovalidity: Boolean = (node \\@@ "noautovalidity").exists(_.toBoolean)
val externsync: Traversable[String] = (node \\@@ "externsync").seq.flatMap(_.split(","))
val typeName: Option[VulkanType] = (node \\@\\ "type").flatMap(registry.types.byNameOption)
override val name: String = node @\\\\ "name"
private def genClen(_len: Traversable[String] = len,_altlen: Traversable[String] = altlen): Traversable[String] = {
if (_len.nonEmpty)
Traversable(if (_len.head.startsWith("latexmath:")) _altlen.head else _len.head) ++ genClen(_len.tail, if (_len.head.startsWith("latexmath:")) _altlen.tail else _altlen)
else
Traversable()
}
}
| MrInformatic/VulkanWrapper | src/vulkan/wrapper/registry/command/VulkanCommandParam.scala | Scala | mit | 1,284 |
/*
* Copyright 2017-2020 47 Degrees Open Source <https://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package higherkindness.mu.rpc.internal.server.monix
import monix.execution.Scheduler
import monix.reactive.Observable
import cats.data.Kleisli
import cats.effect.Effect
import higherkindness.mu.rpc.internal.server._
import higherkindness.mu.rpc.protocol.CompressionType
import io.grpc._
import io.grpc.ServerCall.Listener
import io.grpc.stub.ServerCalls
import natchez.{EntryPoint, Span}
object handlers {
def clientStreaming[F[_]: Effect, Req, Res](
f: Observable[Req] => F[Res],
compressionType: CompressionType
)(implicit S: Scheduler): ServerCallHandler[Req, Res] =
ServerCalls.asyncClientStreamingCall(
methods.clientStreamingMethod[F, Req, Res](f, compressionType)
)
def serverStreaming[F[_]: Effect, Req, Res](
f: Req => F[Observable[Res]],
compressionType: CompressionType
)(implicit S: Scheduler): ServerCallHandler[Req, Res] =
ServerCalls.asyncServerStreamingCall(
methods.serverStreamingMethod[F, Req, Res](f, compressionType)
)
def bidiStreaming[F[_]: Effect, Req, Res](
f: Observable[Req] => F[Observable[Res]],
compressionType: CompressionType
)(implicit S: Scheduler): ServerCallHandler[Req, Res] =
ServerCalls.asyncBidiStreamingCall(
methods.bidiStreamingMethod[F, Req, Res](f, compressionType)
)
def tracingClientStreaming[F[_]: Effect, Req, Res](
f: Observable[Req] => Kleisli[F, Span[F], Res],
descriptor: MethodDescriptor[Req, Res],
entrypoint: EntryPoint[F],
compressionType: CompressionType
)(implicit S: Scheduler): ServerCallHandler[Req, Res] =
new ServerCallHandler[Req, Res] {
def startCall(
call: ServerCall[Req, Res],
metadata: Metadata
): Listener[Req] = {
val kernel = extractTracingKernel(metadata)
val spanResource =
entrypoint.continueOrElseRoot(descriptor.getFullMethodName(), kernel)
val method = methods.clientStreamingMethod[F, Req, Res](
req => spanResource.use(span => f(req).run(span)),
compressionType
)
ServerCalls.asyncClientStreamingCall(method).startCall(call, metadata)
}
}
def tracingServerStreaming[F[_]: Effect, Req, Res](
f: Req => Kleisli[F, Span[F], Observable[Res]],
descriptor: MethodDescriptor[Req, Res],
entrypoint: EntryPoint[F],
compressionType: CompressionType
)(implicit S: Scheduler): ServerCallHandler[Req, Res] =
new ServerCallHandler[Req, Res] {
def startCall(
call: ServerCall[Req, Res],
metadata: Metadata
): Listener[Req] = {
val kernel = extractTracingKernel(metadata)
val spanResource =
entrypoint.continueOrElseRoot(descriptor.getFullMethodName(), kernel)
val method = methods.serverStreamingMethod[F, Req, Res](
req => spanResource.use(span => f(req).run(span)),
compressionType
)
ServerCalls.asyncServerStreamingCall(method).startCall(call, metadata)
}
}
def tracingBidiStreaming[F[_]: Effect, Req, Res](
f: Observable[Req] => Kleisli[F, Span[F], Observable[Res]],
descriptor: MethodDescriptor[Req, Res],
entrypoint: EntryPoint[F],
compressionType: CompressionType
)(implicit S: Scheduler): ServerCallHandler[Req, Res] =
new ServerCallHandler[Req, Res] {
def startCall(
call: ServerCall[Req, Res],
metadata: Metadata
): Listener[Req] = {
val kernel = extractTracingKernel(metadata)
val spanResource =
entrypoint.continueOrElseRoot(descriptor.getFullMethodName(), kernel)
val method = methods.bidiStreamingMethod[F, Req, Res](
req => spanResource.use(span => f(req).run(span)),
compressionType
)
ServerCalls.asyncBidiStreamingCall(method).startCall(call, metadata)
}
}
}
| frees-io/freestyle-rpc | modules/monix/src/main/scala/higherkindness/mu/rpc/internal/server/monix/handlers.scala | Scala | apache-2.0 | 4,516 |
/*
* This file is part of the regex project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gnieh.regex
import bytecode._
import org.scalatest._
/** Tests that various regular expressions features are correctly implemented
* by testing matches against strings.
*
* @author Lucas Satabin
*/
class MatchingTest extends FlatSpec with Matchers {
"A single character regular expression" should "match iff the string is this character" in {
val re = "a".re
re.isMatchedBy("a") should be(true)
}
it should "not match a string containing many times this character" in {
val re = "a".re
re.isMatchedBy("aaa") should be(false)
}
it should "not match a string containing another character" in {
val re = "a".re
re.isMatchedBy("ab") should be(false)
}
it should "not match a string a single other character" in {
val re = "a".re
re.isMatchedBy("b") should be(false)
}
"Alternative" should "match if at least one possibility matches the string" in {
val re = "ab|ac|ad|a.".re
re.isMatchedBy("ac") should be(true)
re.isMatchedBy("ab") should be(true)
re.isMatchedBy("ad") should be(true)
re.isMatchedBy("ae") should be(true)
}
it should "not match if the string does not matche any possibility" in {
val re = "ab|ac|ad|a.".re
re.isMatchedBy("abad") should be(false)
}
"Character set" should "match if the string is contained in this set" in {
val re = "[a-zA-Z_][a-z[A-Z]\\\\d_]*".re
re.isMatchedBy("some_identifier43") should be(true)
}
it should "not match if at a character is not in the set" in {
val re = "[a-zA-Z_][a-zA-Z0-9_]*".re
re.isMatchedBy("98toto") should be(false)
re.isMatchedBy("tété") should be(false)
}
"Negated character set" should "match any character not in the set" in {
val re = "[^a-z]+".re
re.isMatchedBy("+98_") should be(true)
}
it should "not match character in the set" in {
val re1 = "[^a-z]+".re
val re2 = "\\\\D+".re
val re3 = "\\\\W+".re
val re4 = "\\\\S+".re
re1.isMatchedBy("a_") should be(false)
re2.isMatchedBy("23") should be(false)
re3.isMatchedBy("a_") should be(false)
re4.isMatchedBy(" ") should be(false)
}
"Optional character" should "match if present" in {
val re = "a?".re
re.isMatchedBy("a") should be(true)
}
it should "match if not present" in {
val re = "a?".re
re.isMatchedBy("") should be(true)
}
it should "not match if some other character is present" in {
val re = "a?".re
re.isMatchedBy("b") should be(false)
}
"Starred character" should "match if present once" in {
val re = "a*".re
re.isMatchedBy("a") should be(true)
}
it should "match if present several times" in {
val re = "a*".re
re.isMatchedBy("aaaaaaaaaaaaaa") should be(true)
}
it should "match if not present" in {
val re = "a*".re
re.isMatchedBy("") should be(true)
}
it should "not match if at least one other character is present" in {
val re = "a*".re
re.isMatchedBy("aaaaabaaaaaa") should be(false)
}
"Plus character" should "match if present once" in {
val re = "a+".re
re.isMatchedBy("a") should be(true)
}
it should "match if present several times" in {
val re = "a+".re
re.isMatchedBy("aaaaaaaaaaaaaa") should be(true)
}
it should "not match if not present" in {
val re = "a+".re
re.isMatchedBy("") should be(false)
}
it should "not match if at least one other character is present" in {
val re = "a+".re
re.isMatchedBy("aaaaabaaaaaa") should be(false)
}
}
| gnieh/tekstlib | src/test/scala/gnieh/regex/MatchingTest.scala | Scala | apache-2.0 | 4,158 |
package recommender
import breeze.linalg.Vector
import org.apache.spark.SparkContext._
import org.apache.spark.mllib.recommendation.Rating
import org.apache.spark.rdd.RDD
/**
* Created by Ondra Fiedler on 24.7.14.
*/
/**
* Recommender which uses the k-nearest neighbors algorithm
* @param vectorsRDD Vectors representing a set of users. Ratings of users are taken from the Recommender's dataHolder if this field is not specified.
* @param numberOfNeighbors Number of neighbors
* @param distanceMetric Metric which determines similarity between users
* @param lazyStartup If false, then one recommendation is done at startup, to get all the lazy init actions done
*/
class KnnRecommender(vectorsRDD: RDD[UserVector], numberOfNeighbors: Int, distanceMetric: DistanceMetric = CosineDistance, lazyStartup: Boolean = false) extends RecommenderWithUserVectorRepresentation(vectorsRDD) with Serializable {
def this(k: Int, distanceMetric: DistanceMetric = CosineDistance) = this(UserSparseVector.convertRatingsFromHolderToUserSparseVectors(MainHolder.getDataHolder()), k, distanceMetric, false)
if (!lazyStartup) {
//"Recommendation" for getting all the lazy init actions done
recommend(new UserSparseVector(List(Rating(1, 1, 1)), MainHolder.getDataHolder().getNumberOfProducts()), 1)
vectorsPairRDD.lookup(0)
}
/**
* Get k nearest vectors from vector targetUser
* @param targetUser SparseVector which contains ratings(Double) of a user
* @return n nearest vectors from vector targetUser
*/
def getNearestNeighbors(targetUser: UserVector): Seq[UserVector] = {
//Count distance for every vector
val vectorsWithDistances = vectorsRDD.map(v => (distanceMetric.getDistance(targetUser, v), v))
//Ordering of vectors is by distance
implicit def cmp: Ordering[(Double, UserVector)] = Ordering.by[(Double, UserVector), Double](_._1)
//take k vectors with smallest distances
val kNearestVectors = vectorsWithDistances.takeOrdered(numberOfNeighbors).map(pair => pair._2)
kNearestVectors
}
/**
* Recommend
* @param vector Vector with ratings by target user
* @param numberOfRecommendedProducts Maximal number of products in the recommendation
* @return Ratings with recommended products
*/
override def recommend(vector: UserVector, numberOfRecommendedProducts: Int): Seq[Rating] = {
//Get k most similar users
val nearestNeighbors: Seq[Vector[Double]] = getNearestNeighbors(vector)
//Get average rating of every product used by similar user
val addedRatings = nearestNeighbors.reduce(_ + _)
val numberOfRatings = nearestNeighbors.map(vec => vec.map { value => if (value > 0) 1 else 0}).reduce(_ + _)
val averageRatings = addedRatings.activeIterator.map { tup => val i = tup._1
(tup._2 / numberOfRatings(i), i)
}
//Sort products by average rating
val averageRatingsSorted = averageRatings.toList.sortBy(p => -p._1)
//Convert to Ratings and exclude products already rated by the user
val productsAlreadyRatedByUser = vector.activeKeysIterator.toSeq
val recommendedProducts = averageRatingsSorted.map(p => Rating(0, p._2, p._1)).filter(rating => !productsAlreadyRatedByUser.contains(rating.product))
recommendedProducts.take(numberOfRecommendedProducts)
}
}
class UserNotFoundException extends Exception | OndraFiedler/spark-recommender | src/main/scala/recommender/KnnRecommender.scala | Scala | mit | 3,352 |
import sbt._
import sbt.Keys._
import sbt.complete._
import sbt.complete.DefaultParsers._
import sbtrelease.ReleasePlugin.autoImport._
import sbtrelease.{ReleasePlugin, Vcs}
import scala.sys.process._
/**
* == ReadmeRelease Plugin ==
*
* Changes the version in the README.md during a release.
*
*/
object ReadmeReleasePlugin extends AutoPlugin {
override def requires: Plugins = ReleasePlugin
override def trigger = AllRequirements
object autoImport {
/**
* Update the readme file during a release
*/
val updateReadme = ReleaseStep(updateReadmeStep)
/**
* Commits the readme changes.
*/
val commitReadme = ReleaseStep(commitReadmeStep)
}
private def updateReadmeStep(state: State): State = {
val extracted = Project.extract(state)
val releaseVersion = extracted.get(version)
val base = extracted.get(baseDirectory)
val readmeFile = base / "README.md"
val versionRegex = """(\\d{1,2}\\.\\d{1,2}\\.\\d{1,2})""".r
val updatedReadmeContent = versionRegex.replaceAllIn(
IO.read(readmeFile),
releaseVersion
)
IO.write(readmeFile, updatedReadmeContent)
state
}
private def commitReadmeStep(state: State): State = {
val log = toProcessLogger(state)
val base = vcs(state).baseDir
val sign = Project.extract(state).get(releaseVcsSign)
val signOff = Project.extract(state).get(releaseVcsSignOff)
val readmeFile = base / "README.md"
val relativePath = IO
.relativize(base, readmeFile)
.getOrElse(
"Version file [%s] is outside of this VCS repository with base directory [%s]!" format (readmeFile, base)
)
vcs(state).add(relativePath) !! log
val vcsAddOutput = (vcs(state).status !!).trim
if (vcsAddOutput.isEmpty) {
state.log.info("README.md hasn't been changed.")
} else {
vcs(state).commit("Update release version in readme", sign, signOff) ! log
}
state
}
/**
* Extracts the used vcs.
*
* Copied from the sbt-release plugin.
* @param state sbt state
* @return vcs implementation
*/
private def vcs(state: State): Vcs =
Project
.extract(state)
.get(releaseVcs)
.getOrElse(sys.error("Aborting release. Working directory is not a repository of a recognized VCS."))
/**
* Creates a ProcessLogger from the current sbt state.
*
* Copied from the sbt-release plugin.
* @param state
* @return a process logger
*/
private def toProcessLogger(state: State): ProcessLogger = new ProcessLogger {
override def err(s: => String): Unit = state.log.info(s)
override def out(s: => String): Unit = state.log.info(s)
override def buffer[T](f: => T): T = state.log.buffer(f)
}
}
| fsat/sbt-native-packager | project/ReadmeReleasePlugin.scala | Scala | bsd-2-clause | 2,761 |
package com.scalaAsm.x86
package Instructions
package General
// Description: Bit Population Count
// Category: general/bit
trait POPCNT extends InstructionDefinition {
val mnemonic = "POPCNT"
}
object POPCNT extends TwoOperands[POPCNT] with POPCNTImpl
trait POPCNTImpl extends POPCNT {
implicit object _0 extends TwoOp[r16, rm16] {
val opcode: TwoOpcodes = (0x0F, 0xB8) /r
val format = RegRmFormat
}
implicit object _1 extends TwoOp[r32, rm32] {
val opcode: TwoOpcodes = (0x0F, 0xB8) /r
val format = RegRmFormat
}
implicit object _2 extends TwoOp[r64, rm64] {
val opcode: TwoOpcodes = (0x0F, 0xB8) /r
override def prefix = REX.W(true)
val format = RegRmFormat
}
}
| bdwashbu/scala-x86-inst | src/main/scala/com/scalaAsm/x86/Instructions/General/POPCNT.scala | Scala | apache-2.0 | 713 |
package mesosphere.marathon
package core.task.update.impl.steps
//scalastyle:off
import javax.inject.Named
import akka.Done
import akka.actor.ActorRef
import com.google.inject.{ Inject, Provider }
import mesosphere.marathon.MarathonSchedulerActor.ScaleRunSpec
import mesosphere.marathon.core.condition.Condition
import mesosphere.marathon.core.instance.update.{ InstanceChange, InstanceChangeHandler }
import org.slf4j.LoggerFactory
import scala.concurrent.Future
//scalastyle:on
/**
* Trigger rescale of affected app if a task died or a reserved task timed out.
*/
class ScaleAppUpdateStepImpl @Inject() (
@Named("schedulerActor") schedulerActorProvider: Provider[ActorRef]) extends InstanceChangeHandler {
private[this] val log = LoggerFactory.getLogger(getClass)
private[this] lazy val schedulerActor = schedulerActorProvider.get()
private[this] def scalingWorthy: Condition => Boolean = {
case Condition.Reserved | Condition.UnreachableInactive | _: Condition.Terminal => true
case _ => false
}
override def name: String = "scaleApp"
override def process(update: InstanceChange): Future[Done] = {
// TODO(PODS): it should be up to a tbd TaskUnreachableBehavior how to handle Unreachable
calcScaleEvent(update).foreach(event => schedulerActor ! event)
Future.successful(Done)
}
def calcScaleEvent(update: InstanceChange): Option[ScaleRunSpec] = {
if (scalingWorthy(update.condition) && update.lastState.forall(lastState => !scalingWorthy(lastState.condition))) {
val runSpecId = update.runSpecId
val instanceId = update.id
val state = update.condition
log.info(s"initiating a scale check for runSpec [$runSpecId] due to [$instanceId] $state")
// TODO(PODS): we should rename the Message and make the SchedulerActor generic
Some(ScaleRunSpec(runSpecId))
} else {
None
}
}
}
| guenter/marathon | src/main/scala/mesosphere/marathon/core/task/update/impl/steps/ScaleAppUpdateStepImpl.scala | Scala | apache-2.0 | 1,883 |
package de.leanovate.swaggercheck.schema
import de.leanovate.swaggercheck.schema.model.{ValidationSuccess, ValidationFailure, ValidationResult}
import org.scalacheck.Prop
import org.scalacheck.Prop.Result
import scala.language.implicitConversions
object ValidationResultToProp {
/**
* Convert to a scala-check `Prop`.
*/
implicit def verifyProp(verifyResult: ValidationResult): Prop = verifyResult match {
case ValidationSuccess => Prop.proved
case ValidationFailure(failures) => Prop(Result(status = Prop.False, labels = failures.toSet))
}
}
| leanovate/swagger-check | json-schema-gen/src/main/scala/de/leanovate/swaggercheck/schema/ValidationResultToProp.scala | Scala | mit | 568 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.features.kryo.serialization
import com.esotericsoftware.kryo.io.{Input, Output}
import org.locationtech.geomesa.features.serialization.GeometrySerialization
object KryoGeometrySerialization extends GeometrySerialization[Output, Input]
| jahhulbert-ccri/geomesa | geomesa-features/geomesa-feature-kryo/src/main/scala/org/locationtech/geomesa/features/kryo/serialization/KryoGeometrySerialization.scala | Scala | apache-2.0 | 737 |
package scalapb
import java.io.File
import protocbridge.{ProtocBridge, ProtocCodeGenerator}
import coursier.parse.DependencyParser
import coursier.core.Configuration
import com.github.ghik.silencer.silent
import coursier.core.Dependency
import java.net.URLClassLoader
import java.util.jar.JarInputStream
import java.io.FileInputStream
import protocbridge.SandboxedJvmGenerator
import scala.util.{Try, Success, Failure}
import protocbridge.ProtocRunner
case class Config(
version: String = scalapb.compiler.Version.protobufVersion,
throwException: Boolean = false,
args: Seq[String] = Seq.empty,
customProtocLocation: Option[String] = None,
namedGenerators: Seq[(String, ProtocCodeGenerator)] = Seq("scala" -> ScalaPbCodeGenerator),
executableArtifacts: Seq[String] = Seq.empty,
jvmPlugins: Seq[(String, String)] = Seq.empty
)
class ScalaPbcException(msg: String) extends RuntimeException(msg)
object ScalaPBC {
private val CustomPathArgument = "--protoc="
private val CustomGenArgument = "--custom-gen="
private val PluginArtifactArgument = "--plugin-artifact="
private val JvmPluginArgument = "--jvm-plugin="
def processArgs(args: Array[String]): Config = {
case class State(cfg: Config, passThrough: Boolean)
args
.foldLeft(State(Config(), false)) { case (state, item) =>
(state.passThrough, item) match {
case (false, "--") => state.copy(passThrough = true)
case (false, "--throw") => state.copy(cfg = state.cfg.copy(throwException = true))
case (false, p) if p.startsWith(CustomGenArgument) =>
val Array(genName, klassName) = p.substring(CustomGenArgument.length).split('=')
val klass = Class.forName(klassName + "$")
val gen = klass.getField("MODULE$").get(klass).asInstanceOf[ProtocCodeGenerator]
state.copy(
cfg = state.cfg.copy(namedGenerators = state.cfg.namedGenerators :+ (genName -> gen))
)
case (false, p) if p.startsWith(JvmPluginArgument) =>
val Array(genName, artifactName) = p.substring(JvmPluginArgument.length).split('=')
state.copy(
cfg = state.cfg.copy(jvmPlugins = state.cfg.jvmPlugins :+ (genName -> artifactName))
)
case (false, p) if p.startsWith(CustomPathArgument) =>
state.copy(
cfg = state.cfg
.copy(customProtocLocation = Some(p.substring(CustomPathArgument.length)))
)
case (false, p) if p.startsWith(PluginArtifactArgument) =>
state.copy(cfg =
state.cfg
.copy(executableArtifacts =
state.cfg.executableArtifacts :+ p.substring(PluginArtifactArgument.length())
)
)
case (false, v) if v.startsWith("-v") =>
state.copy(cfg = state.cfg.copy(version = v.substring(2).trim))
case (_, other) =>
state.copy(passThrough = true, cfg = state.cfg.copy(args = state.cfg.args :+ other))
}
}
.cfg
}
@silent("method right in class Either is deprecated")
def fetchArtifact(artifact: String): Either[String, (Dependency, Seq[File])] = {
import coursier._
for {
dep <- DependencyParser
.dependency(
artifact,
scala.util.Properties.versionNumberString,
Configuration.empty
)
.right
runResult = Fetch().addDependencies(dep).run()
outcome <-
if (runResult.isEmpty) Left(s"Could not find artifact for $artifact")
else Right(runResult)
} yield (dep, outcome)
}
def fetchArtifacts(
artifacts: Seq[(String, String)]
): Either[String, Seq[(String, (Dependency, Seq[File]))]] =
artifacts.foldLeft[Either[String, Seq[(String, (Dependency, Seq[File]))]]](Right(Seq())) {
case (Left(error), _) => Left(error)
case (Right(result), (name, artifact)) =>
fetchArtifact(artifact) match {
case Right((dep, files)) => Right(result :+ ((name, (dep, files))))
case Left(error) => Left(error)
}
}
def findMainClass(f: File): Either[String, String] = {
val jin = new JarInputStream(new FileInputStream(f))
try {
val manifest = jin.getManifest()
Option(manifest.getMainAttributes().getValue("Main-Class"))
.toRight("Could not find main class for plugin")
.map(_ + "$")
} finally {
jin.close()
}
}
private def getProtoc(version: String): Either[String, String] = {
Try(protocbridge.CoursierProtocCache.getProtoc(version)) match {
case Success(f) => Right(f.getAbsolutePath())
case Failure(e) => Left(e.getMessage)
}
}
@silent("method right in class Either is deprecated")
private[scalapb] def runProtoc(config: Config): Int = {
if (
config.namedGenerators
.map(_._1)
.toSet
.intersect(config.jvmPlugins.map(_._1).toSet)
.nonEmpty
) {
throw new RuntimeException(
s"Same plugin name provided by $PluginArtifactArgument and $JvmPluginArgument"
)
}
def fatalError(err: String): Nothing = {
if (config.throwException) {
throw new ScalaPbcException(s"Error: $err")
} else {
System.err.println(err)
sys.exit(1)
}
}
val jvmGenerators = fetchArtifacts(
config.jvmPlugins
) match {
case Left(error) => fatalError(error)
case Right(arts) =>
arts.map { case (name, (_, files)) =>
val urls = files.map(_.toURI().toURL()).toArray
val loader = new URLClassLoader(urls, null)
val mainClass = findMainClass(files.head) match {
case Right(v) => v
case Left(err) => fatalError(err)
}
name -> SandboxedJvmGenerator.load(mainClass, loader)
}
}
val pluginArgs = fetchArtifacts(
config.executableArtifacts.map(a => ("", a))
) match {
case Left(error) => fatalError(error)
case Right(arts) =>
arts.map {
case (_, (dep, file :: Nil)) =>
file.setExecutable(true)
s"--plugin=${dep.module.name.value}=${file.getAbsolutePath()}"
case (_, (dep, files)) =>
fatalError(s"Got ${files.length} files for dependency $dep. Only one expected.")
}
}
val protoc =
config.customProtocLocation
.getOrElse(getProtoc(config.version).fold(fatalError(_), identity(_)))
ProtocBridge.runWithGenerators(
ProtocRunner(protoc),
namedGenerators = config.namedGenerators ++ jvmGenerators,
params = config.args ++ pluginArgs
)
}
def main(args: Array[String]): Unit = {
val config = processArgs(args)
val code = runProtoc(config)
if (!config.throwException) {
sys.exit(code)
} else {
if (code != 0) {
throw new ScalaPbcException(s"Exit with code $code")
}
}
}
}
| scalapb/ScalaPB | scalapbc/src/main/scala/scalapb/ScalaPBC.scala | Scala | apache-2.0 | 7,010 |
package com.sksamuel.elastic4s
import scala.concurrent.{ExecutionContext, Future}
import scala.language.higherKinds
trait Functor[F[_]] {
def map[A, B](fa: F[A])(f: A => B): F[B]
}
object Functor {
def apply[F[_]: Functor](): Functor[F] = implicitly[Functor[F]]
implicit def FutureFunctor(implicit ec: ExecutionContext = ExecutionContext.Implicits.global): Functor[Future] =
new Functor[Future] {
override def map[A, B](fa: Future[A])(f: A => B): Future[B] = fa.map(f)
}
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/Functor.scala | Scala | apache-2.0 | 498 |
package persistence.dal
import com.typesafe.scalalogging.LazyLogging
import persistence.entities.{ Project, Projects }
import slick.driver.JdbcProfile
import utils.DbModule
import scala.concurrent.Future
trait ProjectsDal {
def save(proj: Project): Future[Int]
def getProjects(): Future[Seq[Project]]
def getProjectById(id: Int): Future[Option[Project]]
def getProjectByGitRepo(repo: String): Future[Option[Project]]
def createTables(): Future[Unit]
}
class ProjectsDalImpl(implicit val db: JdbcProfile#Backend#Database, implicit val profile: JdbcProfile) extends ProjectsDal with DbModule with Projects with LazyLogging {
import profile.api._
override def save(proj: Project): Future[Int] = db.run((projects returning projects.map(_.id)) += proj)
override def getProjects(): Future[Seq[Project]] = db.run(projects.result)
override def getProjectById(id: Int): Future[Option[Project]] = db.run(projects.filter(_.id === id).result.headOption)
override def getProjectByGitRepo(repo: String): Future[Option[Project]] = db.run(projects.filter(_.gitRepo === repo).result.headOption)
override def createTables(): Future[Unit] = db.run(projects.schema.create)
}
| ShiftForward/ridgeback | src/main/scala/persistence/dal/ProjectsDal.scala | Scala | mit | 1,186 |
package com.nulabinc.backlog.migration.common.convert.writes
import javax.inject.Inject
import com.nulabinc.backlog.migration.common.convert.Writes
import com.nulabinc.backlog.migration.common.domain.BacklogAttributeInfo
import com.nulabinc.backlog.migration.common.utils.Logging
import com.nulabinc.backlog4j.AttributeInfo
/**
* @author
* uchida
*/
private[common] class AttributeInfoWrites @Inject() ()
extends Writes[AttributeInfo, BacklogAttributeInfo]
with Logging {
override def writes(attributeInfo: AttributeInfo): BacklogAttributeInfo = {
BacklogAttributeInfo(
optId = Option(attributeInfo).map(_.getId),
typeId = attributeInfo.getTypeId
)
}
}
| nulab/backlog-migration-common | core/src/main/scala/com/nulabinc/backlog/migration/common/convert/writes/AttributeInfoWrites.scala | Scala | mit | 695 |
package br.edu.ifrn.potigol.editor
import java.awt.{ Color, Dimension, Font }
import java.awt.event.{ ActionEvent, KeyEvent }
import java.io.PrintWriter
import scala.collection.mutable.Stack
import scala.swing.{ Action, BorderPanel }
import scala.swing.{ FileChooser, MainFrame, Menu, MenuBar, MenuItem, Separator, SimpleSwingApplication }
import scala.swing.BorderPanel.Position.{ Center, West }
import scala.swing.event.{ Key, KeyReleased, KeyTyped }
import javax.swing.{ BorderFactory, KeyStroke }
import javax.swing.text.SimpleAttributeSet
import br.edu.ifrn.potigol.swing.TextPane
import javax.swing.text.StyleConstants.{ setBold, setFontFamily, setForeground }
import scala.swing.Frame
import java.awt.Point
import scala.swing.Dialog
import br.edu.ifrn.potigol.parser.potigolParser.BOOLEANO
import br.edu.ifrn.potigol.parser.potigolParser.BS
import br.edu.ifrn.potigol.parser.potigolParser.CHAR
import br.edu.ifrn.potigol.parser.potigolParser.COMMENT
import br.edu.ifrn.potigol.parser.potigolParser.ES
import br.edu.ifrn.potigol.parser.potigolParser.FLOAT
import br.edu.ifrn.potigol.parser.potigolParser.ID
import br.edu.ifrn.potigol.parser.potigolParser.INT
import br.edu.ifrn.potigol.parser.potigolParser.MS
import br.edu.ifrn.potigol.parser.potigolParser.STRING
import javax.swing.text.Caret
import scala.swing.ScrollPane
import javax.swing.text.DefaultCaret
import javafx.scene.control.ScrollPane.ScrollBarPolicy
import java.awt.Robot
object Editor extends SimpleSwingApplication {
System.setErr(new java.io.PrintStream(new java.io.OutputStream() {
override def write(i: Int) {}
}))
val tipos = List("Inteiro", "Real", "Texto", "Lógico", "Logico")
val metodos = List("inverta", "cabeça", "ordene", "Lista", "Matriz", "Cubo",
"inteiro", "arredonde", "texto", "real", "tamanho", "posição", "posiçao",
"posicão", "posicao", "contém", "contem", "maiúsculo", "maiusculo",
"minúsculo", "minusculo", "inverta", "divida", "lista", "cabeça", "cabeca",
"cauda", "último", "ultimo", "pegue", "descarte", "selecione", "mapeie",
"descarte_enquanto", "pegue_enquanto", "ordene", "junte", "insira",
"remova", "mutável", "mutavel", "imutável", "imutavel", "vazia", "injete",
"primeiro", "segundo", "terceiro", "quarto", "quinto", "sexto", "sétimo",
"setimo", "oitavo", "nono", "décimo", "decimo")
val funcoes = List("leia_inteiro", "leia_inteiros", "leia_real", "leia_reais",
"leia_texto", "leia_textos", "sen", "cos", "tg", "aleatório", "aleatorio",
"arcsen", "arccos", "arctg", "abs", "raiz", "log", "log10")
val compilador = new br.edu.ifrn.potigol.Compilador(false)
var arq: Option[String] = None
var modificado = false
val is = getClass().getResource("/fonts/DejaVuSansMono.ttf")
// val fontname = "DejaVu Sans Mono"
val fonte = Font.createFont(Font.TRUETYPE_FONT, is.openStream()).deriveFont(Font.BOLD, 20);
// var fonte = new java.awt.Font(fontname, Font.BOLD, 20)
var corFrente = new Color(248, 248, 242)
var corFundo = new Color(39, 40, 34)
val undo = Stack[(String, Int)]()
val robot = new Robot()
override def main(args: Array[String]) {
super.main(args)
if (args.length > 1) {
arq = Some(args(1))
modificado = false
}
}
/*
override def startup(args: Array[String]) {
println(args.length)
if (args.length > 0) {
arq = Some(args(1))
modificado = false
}
super.startup(args)
}
*/
def top = new MainFrame {
import javax.imageio.ImageIO
val i = ImageIO.read(getClass().getResource("/potigol.png"));
iconImage = i
this.location = new Point(200, 100)
title = s"${arq.getOrElse("Sem nome")} - Potigol"
/* arq match {
case Some(nome) =>
editor.text = scala.io.Source.fromFile(nome, "utf-8").getLines.mkString("\\n")
atualizar()
case None=>
}*/
val numeracao = new TextPane() {
border = BorderFactory.createCompoundBorder(
border,
BorderFactory.createEmptyBorder(10, 10, 10, 10));
background = new Color(56, 57, 49)
foreground = new Color(170, 167, 149)
enabled = false
editable = false
focusable = false
font = Font.createFont(Font.TRUETYPE_FONT, is.openStream()).deriveFont(Font.BOLD, 20);
//new java.awt.Font(fontname, Font.PLAIN, 20)
}
def ed = editor
val editor = new TextPane() {
this.border = BorderFactory.createCompoundBorder(
border,
BorderFactory.createEmptyBorder(10, 10, 10, 10));
caret.color = corFrente
background = corFundo
foreground = corFrente
font = fonte
text = "\\n"
caret.position = 0
}
contents = new scala.swing.ScrollPane() {
contents = new BorderPanel() {
layout(numeracao) = West
layout(editor) = Center
}
}
val arquivo = new FileChooser() {
fileFilter = FiltroPotigol
}
var texto = ""
size = new Dimension(800, 600)
menuBar = new MenuBar {
val menuEditar = new Menu("Editar") {
peer.setMnemonic('E')
val itemAumentar = new MenuItem("Aumentar Fonte") {
action = Action("Aumentar Fonte") {
editor.font = Font.createFont(Font.TRUETYPE_FONT, is.openStream()).deriveFont(Font.BOLD, editor.font.getSize() + 2);
//editor.font = new java.awt.Font(fontname, Font.BOLD, editor.font.getSize() + 2)
numeracao.font = editor.font
}
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_EQUALS, ActionEvent.CTRL_MASK))
}
val itemDiminuir = new MenuItem("Diminuir Fonte") {
action = Action("Diminuir Fonte") {
if (editor.font.getSize() > 2) {
editor.font = Font.createFont(Font.TRUETYPE_FONT, is.openStream()).deriveFont(Font.BOLD, editor.font.getSize() - 2);
// editor.font = new java.awt.Font(fontname, Font.BOLD, editor.font.getSize() - 2)
numeracao.font = editor.font
}
}
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_MINUS, ActionEvent.CTRL_MASK))
}
val itemDesfazer = new MenuItem("Desfazer digitação") {
action = Action("Desfazer digitação") {
if (!undo.isEmpty) {
val ultimo = undo.pop
editor.text = ultimo._1
editor.caret.position = ultimo._2
texto = ultimo._1
}
colorir()
}
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_Z, ActionEvent.CTRL_MASK))
}
val itemRecortar = new MenuItem("Recortar") {
action = Action("Recortar") {
editor.cut()
}
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_X, ActionEvent.CTRL_MASK))
}
val itemCopiar = new MenuItem("Copiar") {
action = Action("Copiar") {
editor.copy()
}
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_C, ActionEvent.CTRL_MASK))
}
val itemColar = new MenuItem("Colar") {
action = Action("Colar") {
editor.paste()
}
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_V, ActionEvent.CTRL_MASK))
}
val itemSelecionarTudo = new MenuItem("Selecionar tudo") {
action = Action("Selecionar tudo") {
editor.selectAll
}
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_T, ActionEvent.CTRL_MASK))
}
val itemFormatar = new MenuItem("Formatar código") {
action = Action("Formatar") {
editor.text = ParserEditor.pretty(editor.text)
texto = ""
atualizar
}
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F, ActionEvent.CTRL_MASK))
}
contents += itemDesfazer
contents += new Separator
contents += itemRecortar
contents += itemCopiar
contents += itemColar
contents += new Separator
contents += itemSelecionarTudo
contents += new Separator
contents += itemFormatar
contents += itemAumentar
contents += itemDiminuir
}
val menuArquivo = new Menu("Arquivo") {
peer.setMnemonic('A')
val itemNovo = new MenuItem("Novo") {
action = Action("Novo") {
if (!modificado) {
editor.text = ""
undo.clear()
arq = None
}
else {
salvar {
itemSalvar.action.apply()
if (!modificado) {
editor.text = ""
undo.clear()
atualizar()
arq = None
}
} {
editor.text = ""
undo.clear()
atualizar()
arq = None
}
}
title = s"${arq.getOrElse("Sem nome").split("/").last} - Potigol"
}
peer.setMnemonic('N')
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_N, ActionEvent.CTRL_MASK))
}
val itemAbrir = new MenuItem("Abrir") {
action = Action("Abrir") {
if (!modificado) {
val res = arquivo.showOpenDialog(editor)
res match {
case FileChooser.Result.Approve =>
arq = Some(arquivo.selectedFile.getPath)
editor.text = scala.io.Source.fromFile(arq.get, "utf-8").getLines.mkString("\\n")
atualizar()
modificado = false
}
}
else {
salvar {
itemSalvar.action.apply()
if (!modificado) {
val res = arquivo.showOpenDialog(editor)
res match {
case FileChooser.Result.Approve =>
arq = Some(arquivo.selectedFile.getPath)
editor.text = scala.io.Source.fromFile(arq.get, "utf-8").getLines.mkString("\\n")
atualizar()
modificado = false
}
}
} {
val res = arquivo.showOpenDialog(editor)
res match {
case FileChooser.Result.Approve =>
arq = Some(arquivo.selectedFile.getPath)
println(arq)
editor.text = scala.io.Source.fromFile(arq.get, "utf-8").getLines.mkString("\\n")
atualizar()
modificado = false
}
}
}
title = s"${arq.getOrElse("Sem nome").split("/").last} - Potigol"
}
this.enabled = true
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_A, ActionEvent.CTRL_MASK))
}
val itemSalvar = new MenuItem("Salvar") {
action = Action("Salvar") {
if (arq != None) {
val writer = new PrintWriter(arq.get, "UTF-8")
writer.print(editor.text)
writer.close()
modificado = false
}
else {
itemSalvarComo.action.apply()
}
title = s"${arq.getOrElse("Sem nome")} - Potigol"
}
iconTextGap = 20
this.enabled = true
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, ActionEvent.CTRL_MASK))
}
val itemSalvarComo = new MenuItem("SalvarComo") {
action = Action("Salvar Como ...") {
val res = arquivo.showSaveDialog(editor)
res match {
case FileChooser.Result.Cancel =>
case FileChooser.Result.Approve =>
arq = Some(arquivo.selectedFile.getPath)
if (!arq.get.endsWith(".poti")) arq = Some(arq.get + ".poti")
val writer = new PrintWriter(arq.get, "UTF-8")
writer.print(editor.text)
writer.close()
modificado = false
title = s"${arq.getOrElse("Sem nome")} - Potigol"
}
}
iconTextGap = 20
this.enabled = true
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, ActionEvent.CTRL_MASK))
}
val itemImprimir = new MenuItem("Imprimir") {
action = Action("Imprimir") {
val writer = new PrintWriter("print.html", "UTF-8")
writer.print(ParserEditor.print(editor.text))
writer.close()
Css.save
import java.awt.Desktop
import java.net.URI
Desktop.getDesktop.browse(new URI("print.html"))
}
iconTextGap = 20
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, ActionEvent.CTRL_MASK))
}
import scala.sys.process._
val executar = new MenuItem("Executar") {
iconTextGap = 20
action = Action("Executar") {
val rt = Runtime.getRuntime()
if (!modificado) {
if (System.getProperty("os.name").startsWith("Windows")) {
rt.exec("cmd.exe /T:1F /c start exec.bat " + arq.get)
}
else { rt.exec("./exec.sh " + arq.get) }
}
else {
salvar {
itemSalvar.action.apply()
} {
if (System.getProperty("os.name").startsWith("Windows")) {
rt.exec("cmd.exe /T:1F /c start exec.bat " + arq.get)
}
else { rt.exec("./exec.sh " + arq.get) }
}
}
}
iconTextGap = 20
this.enabled = true
peer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_R, ActionEvent.CTRL_MASK))
}
val itemSair = new MenuItem("Sair") {
iconTextGap = 20
action = Action("Sair") {
if (!modificado) {
sys.exit(0)
}
else {
salvar {
itemSalvar.action.apply()
} {
sys.exit(0)
}
}
}
}
contents += itemAbrir
contents += itemNovo
contents += new Separator
contents += itemSalvar
contents += itemSalvarComo
contents += itemImprimir
contents += new Separator
contents += executar
contents += new Separator
contents += itemSair
}
val menuAjuda = new Menu("Ajuda") {
peer.setMnemonic('J')
val itemConteudo = new MenuItem("Conteúdo de Ajuda") {
iconTextGap = 20
this.enabled = true
action = Action("Ajuda") {
import java.awt.Desktop
import java.net.URI
Desktop.getDesktop.browse(new URI("http://potigol.github.io"))
}
}
val itemSobre = new MenuItem("Sobre") {
iconTextGap = 20
action = Action("Sobre") {
Sobre.visible = true
}
}
contents += itemConteudo
contents += new Separator
contents += itemSobre
}
contents += menuArquivo
contents += menuEditar
contents += menuAjuda
}
listenTo(editor.keys)
atualizar()
reactions += {
case KeyTyped(_, c @ ('}' | ']' | ')'), _, _) => {
modificado = true
val p = editor.caret.position
if (editor.text.drop(p).headOption == Some(c))
editor.text = editor.text.take(p) + editor.text.drop(p + 1)
editor.caret.position = p
atualizar
}
case KeyTyped(_, a @ ('"' | '{' | '(' | '['), _, _) => {
modificado = true
val p = editor.caret.position
if (a == '"' && editor.text.drop(p).headOption == Some('"')) {
editor.text = editor.text.take(p) + editor.text.drop(p + 1)
editor.caret.position = p
}
else {
val c = a match { case '"' => '"' case '{' => '}' case '(' => ')' case '[' => ']' }
editor.text = editor.text.take(p) + c + editor.text.drop(p)
editor.caret.position = p
}
atualizar
}
case KeyReleased(_, Key.Enter, _, _) =>
{
val p = editor.caret.position - 1
val p1 = editor.caret.dot
val fim = editor.text.take(p)
val linha = fim.drop(fim.lastIndexOf('\\n') + 1)
val espacos = linha.prefixLength(_ == ' ')
val loc = contents(0).asInstanceOf[ScrollPane].verticalScrollBar.location;
if (fim.endsWith("senão") ||
fim.endsWith("senao")) {
editor.text = editor.text.take(p) + "\\n " + " " * espacos + editor.text.drop(p + 1)
editor.caret.position = p + 3 + espacos
}
else if (fim.endsWith("faça") ||
linha.trim.startsWith("tipo") ||
fim.endsWith("faca") ||
fim.endsWith("então") ||
fim.endsWith("entao")) {
editor.text = editor.text.take(p) + "\\n " + " " * espacos + "\\n" + " " * espacos + "fim" + editor.text.drop(p + 1)
editor.caret.position = p + 3 + espacos
}
else if (linha.trim.startsWith("escolha")) {
editor.text = editor.text.take(p) + "\\n " + " " * espacos + "caso => \\n" + " " * espacos + "fim" + editor.text.drop(p + 1)
editor.caret.position = p + 8 + espacos
}
else // String multilinha
if (editor.text.drop(p + 1).startsWith("\\"") ||
editor.text.drop(p + 1).startsWith("|")) {
val c = editor.text.drop(p + 1).head
val u = linha.lastIndexOf("\\"") + linha.lastIndexOf("|")
editor.text = editor.text.take(p) + "\\n" + " " * (u + 1 + (if (c == '"') 0 else 1)) + "|" + editor.text.drop(p + 1)
editor.caret.position = p + 3 + u
}
else {
linha.prefixLength(_ == ' ')
editor.text = editor.text.take(p) + "\\n" + " " * espacos + editor.text.drop(p + 1)
editor.caret.position = p + 1 + espacos
}
robot.mouseWheel(-100);
atualizar()
robot.mouseWheel(-100);
robot.mouseWheel(-100);
}
case KeyReleased(_, _, _, _) => {
atualizar()
}
}
def salvar(sim: => Unit)(nao: => Unit) {
val resp = Dialog.showOptions(
editor,
s"Deseja salvar as alterações em ${arq.getOrElse("Sem nome")}?",
"Potigol",
Dialog.Options.YesNoCancel,
Dialog.Message.Warning, null, Seq("Sim", "Não", "Cancelar"), 2)
resp match {
case Dialog.Result.Yes => sim
case Dialog.Result.No => nao
case Dialog.Result.Cancel =>
}
}
def atualizar() {
if (editor.text != texto) {
// modificado = true
texto = editor.text
val y = editor.caret.position
contents(0) match {
case p: scala.swing.ScrollPane =>
val a = p.verticalScrollBar.value
colorir()
// p.verticalScrollBar.value = a
editor.caret.position = y
// editor.caret.position = y-1
// editor.caret.position = y+1
/* p.peer.updateUI()
p.peer.repaint()
p.verticalScrollBar.value = a
editor.caret.position = y
editor.caret.dot = y
editor.repaint()
p.peer.setLocation(1, 1)
p.peer.getVerticalScrollBar.setValue(10)
println(p.verticalScrollBar.value)*/
}
if (undo.isEmpty || texto != undo.top._1) {
undo.push((texto, y))
}
}
}
private def colorir() {
val linhas = editor.text.filter(_ == '\\n').length + 1
if (linhas != numeracao.text.filter(_ == '\\n').length + 1) {
numeracao.text = (for (i <- 1 to linhas) yield f"$i%3d").toList.mkString("\\n")
}
val elementos = ParserEditor.parse(editor.text)
val styledDocument = editor.styledDocument
styledDocument.setCharacterAttributes(0, 10000, config.cinza, true)
for (elem <- elementos) {
val a = elem.getStartIndex
val b = elem.getStopIndex - a + 1
import br.edu.ifrn.potigol.parser.potigolParser._
val s = elem.getType match {
case INT | FLOAT | BOOLEANO | CHAR => config.azul
case STRING | BS | MS | ES => config.amarelo
case COMMENT => config.vermelho
case ID if List("verdadeiro", "falso").contains(elem.getText) => config.azul
case ID if tipos.contains(elem.getText) => config.cyan
case ID if a > 0 && editor.text.charAt(a - 1) == '.' && metodos.contains(elem.getText) => config.cyan
case ID if funcoes.contains(elem.getText) => config.cyan
case ID => config.bege
case _ if elem.getText == "isto" => config.azul
case _ => config.vermelho
}
styledDocument.setCharacterAttributes(a, b, s, true)
}
}
if (arq.isDefined) {
editor.text = scala.io.Source.fromFile(arq.get, "utf-8").getLines.mkString("\\n")
// atualizar()
}
}
object config {
import javax.swing.text.StyleConstants._
case class Cor(r: Int, g: Int, b: Int) extends Color(r, g, b)
case class Atributos(cor: Color, italico: Boolean = false) extends SimpleAttributeSet {
setFontFamily(this, "DejaVu Sans Mono")
setForeground(this, cor)
setBold(this, false)
setItalic(this, italico)
}
val amarelo = Atributos(Cor(230, 219, 116))
val vermelho = Atributos(Cor(249, 38, 114))
val cinza = Atributos(Cor(118, 113, 94))
val azul = Atributos(Cor(174, 129, 255))
val bege = Atributos(Cor(248, 248, 242))
val cyan = Atributos(Cor(102, 217, 206), italico = true)
}
}
object Sobre extends Frame {
visible = false
peer.setDefaultCloseOperation(1)
resizable = false
preferredSize.setSize(300, 200)
this.location = new Point(400, 200)
contents = new TextPane() {
border = BorderFactory.createCompoundBorder(
border,
BorderFactory.createEmptyBorder(0, 20, 20, 20));
contentType = "text/html"
text = """<html><body><h1>Editor Potigol</h1>
|<p>
|Versão: 0.9.16<br/>
|28/03/2019
|<p>
|(c) Copyright Leonardo Lucena, 2015-2019.<p>
|Visite: <a href="http://potigol.github.io">http://potigol.github.io</a>
|</body></html>""".stripMargin('|')
font = Font.createFont(Font.TRUETYPE_FONT, Editor.is.openStream()).deriveFont(Font.BOLD, 14);
//new java.awt.Font("DejaVu Sans Mono", Font.BOLD, 14)
}
} | potigol/EditorPotigol | src/main/scala/br/edu/ifrn/potigol/editor/Editor.scala | Scala | gpl-2.0 | 23,686 |
package com.pragmasoft.reactive.akka.components.circuitbreaker
import akka.actor.{ActorRef, ActorSystem}
import akka.testkit.{TestProbe, TestKit}
import com.pragmasoft.reactive.akka.components.circuitbreaker.CircuitBreakerActor._
import org.scalatest.{FlatSpecLike, FlatSpec}
import scala.concurrent.duration._
import scala.language.postfixOps
class CircuitBreakerActorSpec extends TestKit(ActorSystem("CircuitBreakerActorSpec")) with FlatSpecLike {
behavior of "CircuitBreakerActor"
val baseCircuitBreakerBuilder =
CircuitBreakerActorBuilder(
maxFailures = 2,
callTimeout = 200 millis,
resetTimeout = 1 second,
failureDetector = { _ == "FAILURE" }
)
it should "act as a transparent proxy in case of successful requests-replies - forward to target" in {
val sender = TestProbe()
val receiver = TestProbe()
val circuitBreaker = system.actorOf(baseCircuitBreakerBuilder.propsForTarget(receiver.ref))
sender.send(circuitBreaker, "test message")
receiver.expectMsg("test message")
}
it should "act as a transparent proxy in case of successful requests-replies - full cycle" in {
val sender = TestProbe()
val receiver = TestProbe()
val circuitBreaker = system.actorOf(baseCircuitBreakerBuilder.propsForTarget(receiver.ref))
sender.send(circuitBreaker, "test message")
receiver.expectMsg("test message")
receiver.reply("response")
sender.expectMsg("response")
}
it should "forward further messages before receiving the response of the first one" in {
val sender = TestProbe()
val receiver = TestProbe()
val circuitBreaker = system.actorOf(baseCircuitBreakerBuilder.propsForTarget(receiver.ref))
sender.send(circuitBreaker, "test message1")
sender.send(circuitBreaker, "test message2")
sender.send(circuitBreaker, "test message3")
receiver.expectMsg("test message1")
receiver.expectMsg("test message2")
receiver.expectMsg("test message3")
}
it should "send responses to the right sender" in {
val sender1 = TestProbe()
val sender2 = TestProbe()
val receiver = TestProbe()
val circuitBreaker = system.actorOf(baseCircuitBreakerBuilder.propsForTarget(receiver.ref))
sender1.send(circuitBreaker, "test message1")
sender2.send(circuitBreaker, "test message2")
receiver.expectMsg("test message1")
receiver.reply("response1")
receiver.expectMsg("test message2")
receiver.reply("response2")
sender1.expectMsg("response1")
sender2.expectMsg("response2")
}
it should "return failed responses too" in {
val sender = TestProbe()
val receiver = TestProbe()
val circuitBreaker = system.actorOf(baseCircuitBreakerBuilder.propsForTarget(receiver.ref))
sender.send(circuitBreaker, "request")
receiver.expectMsg("request")
receiver.reply("FAILURE")
sender.expectMsg("FAILURE")
}
it should "enter open state after reaching the threshold of failed responses" in new CircuitBreakerScenario {
// GIVEN
val circuitBreaker = defaultCircuitBreaker
(1 to baseCircuitBreakerBuilder.maxFailures) foreach { index =>
receiverRespondsWithFailureToRequest( s"request$index" )
}
waitForCircuitBreakerToReceiveSelfNotificationMessage
sender.send(circuitBreaker, "request in open state")
receiver.expectNoMsg
}
it should "respond with a CircuitOpenFailure message when in open state " in new CircuitBreakerScenario {
// GIVEN
val circuitBreaker = defaultCircuitBreaker
(1 to baseCircuitBreakerBuilder.maxFailures) foreach { index =>
receiverRespondsWithFailureToRequest( s"request$index" )
}
waitForCircuitBreakerToReceiveSelfNotificationMessage
sender.send(circuitBreaker, "request in open state")
sender.expectMsg(CircuitOpenFailure("request in open state"))
}
it should "respond with the converted CircuitOpenFailure if a converter is provided" in new CircuitBreakerScenario {
val circuitBreaker = system.actorOf(
baseCircuitBreakerBuilder
.copy( openCircuitFailureConverter = { failureMsg => s"NOT SENT: ${failureMsg.failedMsg}" } )
.propsForTarget(receiver.ref)
)
(1 to baseCircuitBreakerBuilder.maxFailures) foreach { index =>
receiverRespondsWithFailureToRequest( s"request$index" )
}
waitForCircuitBreakerToReceiveSelfNotificationMessage
sender.send(circuitBreaker, "request in open state")
sender.expectMsg("NOT SENT: request in open state")
}
it should "enter open state after reaching the threshold of timed-out responses" in {
val sender = TestProbe()
val receiver = TestProbe()
val circuitBreaker = system.actorOf(baseCircuitBreakerBuilder.propsForTarget(receiver.ref))
sender.send(circuitBreaker, "request1")
sender.send(circuitBreaker, "request2")
Thread.sleep(baseCircuitBreakerBuilder.callTimeout.duration.toMillis + 100)
receiver.expectMsg("request1")
receiver.reply("this should be timed out 1")
receiver.expectMsg("request2")
receiver.reply("this should be timed out 2")
// Have to wait a bit to let the circuit breaker receive the self notification message
Thread.sleep(300)
sender.send(circuitBreaker, "request in open state")
receiver.expectNoMsg
}
it should "enter HALF OPEN state after the given state timeout, sending the first message only" in new CircuitBreakerScenario {
// GIVEN
val circuitBreaker = defaultCircuitBreaker
// WHEN - ENTERING OPEN STATE
receiverRespondsWithFailureToRequest("request1")
receiverRespondsWithFailureToRequest("request2")
waitForCircuitBreakerToReceiveSelfNotificationMessage
// THEN
messageIsRejectedWithOpenCircuitNotification("IGNORED SINCE IN OPEN STATE1")
messageIsRejectedWithOpenCircuitNotification("IGNORED SINCE IN OPEN STATE2")
// WHEN - ENTERING HALF OPEN STATE
waitForResetTimeoutToExpire
// THEN
sender.send(circuitBreaker, "First message in half-open state, should be forwarded")
sender.send(circuitBreaker, "Second message in half-open state, should be ignored")
receiver.expectMsg("First message in half-open state, should be forwarded")
receiver.expectNoMsg()
sender.expectMsg(CircuitOpenFailure("Second message in half-open state, should be ignored"))
}
it should "return to CLOSED state from HALF-OPEN if a successful message response notification is received" in new CircuitBreakerScenario {
// GIVEN
val circuitBreaker = defaultCircuitBreaker
// WHEN - Entering HALF OPEN state
receiverRespondsWithFailureToRequest("request1")
receiverRespondsWithFailureToRequest("request2")
waitForResetTimeoutToExpire
// WHEN - Receiving a successful response
receiverRespondsToRequestWith("First message in half-open state, should be forwarded", "This should close the circuit")
waitForCircuitBreakerToReceiveSelfNotificationMessage
// THEN
sender.send(circuitBreaker, "request1")
receiver.expectMsg("request1")
sender.send(circuitBreaker, "request2")
receiver.expectMsg("request2")
}
it should "return to OPEN state from HALF-OPEN if a FAILURE message response is received" in new CircuitBreakerScenario {
// GIVEN
val circuitBreaker = defaultCircuitBreaker
// WHEN - Entering HALF OPEN state
receiverRespondsWithFailureToRequest("request1")
receiverRespondsWithFailureToRequest("request2")
waitForResetTimeoutToExpire
// Failure message in HALF OPEN State
receiverRespondsWithFailureToRequest("First message in half-open state, should be forwarded")
waitForCircuitBreakerToReceiveSelfNotificationMessage
// THEN
sender.send(circuitBreaker, "this should be ignored")
receiver.expectNoMsg()
sender.expectMsg(CircuitOpenFailure("this should be ignored"))
}
it should "Notify an event status change listener when changing state" in new CircuitBreakerScenario {
// GIVEN
override val circuitBreaker = system.actorOf(
baseCircuitBreakerBuilder
.copy( circuitEventListener = Some(eventListener.ref) )
.propsForTarget(receiver.ref)
)
// WHEN - Entering OPEN state
receiverRespondsWithFailureToRequest("request1")
receiverRespondsWithFailureToRequest("request2")
waitForCircuitBreakerToReceiveSelfNotificationMessage
// THEN
eventListener.expectMsg(CircuitOpen(circuitBreaker))
// WHEN - Entering HALF OPEN state
waitForResetTimeoutToExpire
// THEN
eventListener.expectMsg(CircuitHalfOpen(circuitBreaker))
// WHEN - Entering CLOSED state
receiverRespondsToRequestWith("First message in half-open state, should be forwarded", "This should close the circuit")
waitForCircuitBreakerToReceiveSelfNotificationMessage
// THEN
eventListener.expectMsg(CircuitClosed(circuitBreaker))
}
trait CircuitBreakerScenario {
val sender = TestProbe()
val eventListener = TestProbe()
val receiver = TestProbe()
def circuitBreaker: ActorRef
def defaultCircuitBreaker = system.actorOf(baseCircuitBreakerBuilder.propsForTarget(receiver.ref))
def receiverRespondsWithFailureToRequest(request: Any) = {
sender.send(circuitBreaker, request)
receiver.expectMsg(request)
receiver.reply("FAILURE")
sender.expectMsg("FAILURE")
}
def receiverRespondsToRequestWith(request: Any, reply: Any) = {
sender.send(circuitBreaker, request)
receiver.expectMsg(request)
receiver.reply(reply)
sender.expectMsg(reply)
}
def waitForCircuitBreakerToReceiveSelfNotificationMessage = Thread.sleep(baseCircuitBreakerBuilder.resetTimeout.duration.toMillis/4)
def waitForResetTimeoutToExpire = Thread.sleep(baseCircuitBreakerBuilder.resetTimeout.duration.toMillis + 100)
def messageIsRejectedWithOpenCircuitNotification(message: Any) = {
sender.send(circuitBreaker, message)
sender.expectMsg(CircuitOpenFailure(message))
}
}
}
| galarragas/akka-components | src/test/scala/com/pragmasoft/reactive/akka/components/circuitbreaker/CircuitBreakerActorSpec.scala | Scala | apache-2.0 | 10,043 |
package sbt.complete
object JLineTest
{
import DefaultParsers._
val one = "blue" | "green" | "black"
val two = token("color" ~> Space) ~> token(one)
val three = token("color" ~> Space) ~> token(ID.examples("blue", "green", "black"))
val four = token("color" ~> Space) ~> token(ID, "<color name>")
val num = token(NatBasic)
val five = (num ~ token("+" | "-") ~ num) <~ token('=') flatMap {
case a ~ "+" ~ b => token((a+b).toString)
case a ~ "-" ~ b => token((a-b).toString)
}
val parsers = Map("1" -> one, "2" -> two, "3" -> three, "4" -> four, "5" -> five)
def main(args: Array[String])
{
import jline.{ConsoleReader,Terminal}
val reader = new ConsoleReader()
Terminal.getTerminal.disableEcho()
val parser = parsers(args(0))
JLineCompletion.installCustomCompletor(reader, parser)
def loop() {
val line = reader.readLine("> ")
if(line ne null) {
println("Result: " + apply(parser)(line).resultEmpty)
loop()
}
}
loop()
}
}
import Parser._
import org.scalacheck._
object ParserTest extends Properties("Completing Parser")
{
import Parsers._
val nested = (token("a1") ~ token("b2")) ~ "c3"
val nestedDisplay = (token("a1", "<a1>") ~ token("b2", "<b2>")) ~ "c3"
val spacePort = (token(Space) ~> Port)
def p[T](f: T): T = { println(f); f }
def checkSingle(in: String, expect: Completion)(expectDisplay: Completion = expect) =
( ("token '" + in + "'") |: checkOne(in, nested, expect)) &&
( ("display '" + in + "'") |: checkOne(in, nestedDisplay, expectDisplay) )
def checkOne(in: String, parser: Parser[_], expect: Completion): Prop =
p(completions(parser, in, 1)) == Completions.single(expect)
def checkInvalid(in: String) =
( ("token '" + in + "'") |: checkInv(in, nested) ) &&
( ("display '" + in + "'") |: checkInv(in, nestedDisplay) )
def checkInv(in: String, parser: Parser[_]): Prop =
p(completions(parser, in, 1)) == Completions.nil
property("nested tokens a") = checkSingle("", Completion.tokenStrict("","a1") )( Completion.displayStrict("<a1>"))
property("nested tokens a1") = checkSingle("a", Completion.tokenStrict("a","1") )( Completion.displayStrict("<a1>"))
property("nested tokens a inv") = checkInvalid("b")
property("nested tokens b") = checkSingle("a1", Completion.tokenStrict("","b2") )( Completion.displayStrict("<b2>"))
property("nested tokens b2") = checkSingle("a1b", Completion.tokenStrict("b","2") )( Completion.displayStrict("<b2>"))
property("nested tokens b inv") = checkInvalid("a1a")
property("nested tokens c") = checkSingle("a1b2", Completion.suggestStrict("c3") )()
property("nested tokens c3") = checkSingle("a1b2c", Completion.suggestStrict("3"))()
property("nested tokens c inv") = checkInvalid("a1b2a")
property("suggest space") = checkOne("", spacePort, Completion.tokenStrict("", " "))
property("suggest port") = checkOne(" ", spacePort, Completion.displayStrict("<port>") )
property("no suggest at end") = checkOne("asdf", "asdf", Completion.suggestStrict(""))
property("no suggest at token end") = checkOne("asdf", token("asdf"), Completion.suggestStrict(""))
property("empty suggest for examples") = checkOne("asdf", any.+.examples("asdf", "qwer"), Completion.suggestStrict(""))
property("empty suggest for examples token") = checkOne("asdf", token(any.+.examples("asdf", "qwer")), Completion.suggestStrict(""))
}
object ParserExample
{
val ws = charClass(_.isWhitespace)+
val notws = charClass(!_.isWhitespace)+
val name = token("test")
val options = (ws ~> token("quick" | "failed" | "new") )*
val include = (ws ~> token(examples(notws.string, Set("am", "is", "are", "was", "were") )) )*
val t = name ~ options ~ include
// Get completions for some different inputs
println(completions(t, "te", 1))
println(completions(t, "test ",1))
println(completions(t, "test w", 1))
// Get the parsed result for different inputs
println(apply(t)("te").resultEmpty)
println(apply(t)("test").resultEmpty)
println(apply(t)("test w").resultEmpty)
println(apply(t)("test was were").resultEmpty)
def run(n: Int)
{
val a = 'a'.id
val aq = a.?
val aqn = repeat(aq, min = n, max = n)
val an = repeat(a, min = n, max = n)
val ann = aqn ~ an
def r = apply(ann)("a"*(n*2)).resultEmpty
println(r.isValid)
}
def run2(n: Int)
{
val ab = "ab".?.*
val r = apply(ab)("a"*n).resultEmpty
println(r)
}
} | kuochaoyi/xsbt | util/complete/src/test/scala/ParserTest.scala | Scala | bsd-3-clause | 4,362 |
package safe.io
import safe.SafeVector
trait Writeable[A, O] {
def apply(a: A): O
}
object Writeable {
def of[A, B](f: A => B) = new Writeable[A, B] {
def apply(a: A) = f(a)
}
def identity[A] = new Writeable[A, A] {
def apply(a: A) = a
}
} | devonbryant/safe | safe-core/src/main/scala/safe/io/Writeable.scala | Scala | epl-1.0 | 263 |
package com.sksamuel.elastic4s.search.aggs
import com.sksamuel.elastic4s.ElasticsearchClientUri
import com.sksamuel.elastic4s.http.search.Bucket
import com.sksamuel.elastic4s.http.{ElasticDsl, HttpClient}
import com.sksamuel.elastic4s.testkit.SharedElasticSugar
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy
import org.scalatest.{FreeSpec, Matchers}
class TermsAggregationHttpTest extends FreeSpec with SharedElasticSugar with Matchers with ElasticDsl {
val http = HttpClient(ElasticsearchClientUri("elasticsearch://" + node.ipAndPort))
http.execute {
createIndex("termsagg") mappings {
mapping("curry") fields(
textField("name").fielddata(true),
textField("strength").fielddata(true).stored(true),
keywordField("origin")
)
}
}.await
http.execute(
bulk(
indexInto("termsagg/curry") fields("name" -> "Jalfrezi", "strength" -> "mild", "origin" -> "india"),
indexInto("termsagg/curry") fields("name" -> "Madras", "strength" -> "hot", "origin" -> "india"),
indexInto("termsagg/curry") fields("name" -> "Chilli Masala", "strength" -> "hot", "origin" -> "india"),
indexInto("termsagg/curry") fields("name" -> "Tikka Masala", "strength" -> "medium")
).refresh(RefreshPolicy.IMMEDIATE)
).await
"terms aggregation" - {
"should group by field" ignore {
val resp = http.execute {
search("termsagg/curry").matchAllQuery().aggs {
termsAgg("agg1", "strength")
}
}.await
resp.totalHits shouldBe 4
val agg = resp.termsAgg("agg1")
agg.buckets.toSet shouldBe Set(Bucket("hot", 2), Bucket("medium", 1), Bucket("mild", 1))
}
"should only include matching documents in the query" ignore {
val resp = http.execute {
// should match 2 documents
search("termsagg/curry").matchQuery("name", "masala").aggregations {
termsAgg("agg1", "strength")
}
}.await
resp.size shouldBe 2
val agg = resp.termsAgg("agg1")
agg.buckets.toSet shouldBe Set(Bucket("hot", 1), Bucket("medium", 1))
}
"should support missing value" ignore {
val resp = http.execute {
search("termsagg/curry").aggregations {
termsAggregation("agg1") field "origin" missing "unknown"
}
}.await
resp.totalHits shouldBe 4
val agg = resp.termsAgg("agg1")
agg.buckets.toSet shouldBe Set(Bucket("india", 3), Bucket("unknown", 1))
}
"should support min doc count" ignore {
val resp = http.execute {
search("termsagg/curry").aggregations {
termsAggregation("agg1") field "strength" minDocCount 2
}
}.await
resp.totalHits shouldBe 4
val agg = resp.termsAgg("agg1")
agg.buckets.toSet shouldBe Set(Bucket("hot", 2))
}
"should support size" ignore {
val resp = http.execute {
search("termsagg/curry").aggregations {
termsAggregation("agg1") field "strength" size 1
}
}.await
resp.totalHits shouldBe 4
val agg = resp.termsAgg("agg1")
agg.buckets.toSet shouldBe Set(Bucket("hot", 2))
}
// "should only return included fields" in {
// val resp = client.execute {
// search("aggregations/breakingbad") aggregations {
// termsAggregation("agg1") field "job" includeExclude("lawyer", "")
// }
// }.await
// resp.totalHits shouldBe 10
// val agg = resp.aggregations.map("agg1").asInstanceOf[StringTerms]
// agg.getBuckets.size shouldBe 1
// agg.getBucketByKey("lawyer").getDocCount shouldBe 1
// }
//
// "should not return excluded fields" in {
// val resp = client.execute {
// search("aggregations/breakingbad") aggregations {
// termsAggregation("agg1") field "job" includeExclude("", "lawyer")
// }
// }.await
// resp.totalHits shouldBe 10
//
//
// val agg = resp.aggregations.stringTermsResult("agg1")
// agg.getBuckets.size shouldBe 4
// agg.getBucketByKey("meth sidekick").getDocCount shouldBe 3
// agg.getBucketByKey("meth kingpin").getDocCount shouldBe 2
// agg.getBucketByKey("dea agent").getDocCount shouldBe 2
// agg.getBucketByKey("heavy").getDocCount shouldBe 2
// }
//
// "should only return included fields (given a seq)" in {
// val resp = client.execute {
// search("aggregations/breakingbad") aggregations {
// termsAggregation("agg1") field "job" includeExclude(Seq("meth kingpin", "lawyer"), Nil)
// }
// }.await
// resp.totalHits shouldBe 10
// val agg = resp.aggregations.map("agg1").asInstanceOf[StringTerms]
// agg.getBuckets.size shouldBe 2
// agg.getBucketByKey("meth kingpin").getDocCount shouldBe 2
// agg.getBucketByKey("lawyer").getDocCount shouldBe 1
// }
//
// "should not return excluded fields (given a seq)" in {
// val resp = client.execute {
// search("aggregations/breakingbad") aggregations {
// termsAggregation("agg1") field "job" includeExclude(Nil, Iterable("lawyer"))
// }
// }.await
// resp.totalHits shouldBe 10
//
// val agg = resp.aggregations.stringTermsResult("agg1")
// agg.getBuckets.size shouldBe 4
// agg.getBucketByKey("meth sidekick").getDocCount shouldBe 3
// agg.getBucketByKey("meth kingpin").getDocCount shouldBe 2
// agg.getBucketByKey("dea agent").getDocCount shouldBe 2
// agg.getBucketByKey("heavy").getDocCount shouldBe 2
// }
//
}
}
| aroundus-inc/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/search/aggs/TermsAggregationHttpTest.scala | Scala | apache-2.0 | 5,780 |
package models
case class LineIndex(indentation: Int, title: String)
| raychenon/play-table-of-contents | app/models/LineIndex.scala | Scala | mit | 70 |
package io.findify.sqsmock
import com.amazonaws.services.sqs.AmazonSQSClient
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import scala.collection.JavaConversions._
/**
* Created by shutty on 3/30/16.
*/
class SendReceiveTest extends FlatSpec with Matchers with SQSStartStop {
val queue = "http://localhost:8001/123/foo"
"sqs mock" should "create queue" in {
val response = client.createQueue("foo")
assert(response.getQueueUrl == "http://localhost:8001/123/foo")
}
it should "be able to push message to queue" in {
val result = client.sendMessage(queue, "hello_world")
assert(result.getMessageId.length > 10)
}
it should "receive message from queue" in {
val result = client.receiveMessage(queue)
assert(result.getMessages.head.getBody == "hello_world")
}
it should "detect empty queue" in {
val result = client.receiveMessage(queue)
assert(result.getMessages.isEmpty)
}
}
| findify/sqsmock | src/test/scala/io/findify/sqsmock/SendReceiveTest.scala | Scala | mit | 945 |
/*
* Copyright 2001-2009 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package samples
/*
ScalaTest facilitates different styles of testing by providing traits you can mix
together to get the behavior and syntax you prefer. A few examples are
included here. For more information, visit:
http://www.scalatest.org/
One way to use ScalaTest is to help make JUnit or TestNG tests more
clear and concise. Here's an example:
*/
import scala.collection.mutable.Stack
import org.scalatest.Assertions
import org.junit.Test
class StackSuite extends Assertions {
@Test def stackShouldPopValuesIinLastInFirstOutOrder() {
val stack = new Stack[Int]
stack.push(1)
stack.push(2)
assert(stack.pop() === 2)
assert(stack.pop() === 1)
}
@Test def stackShouldThrowNoSuchElementExceptionIfAnEmptyStackIsPopped() {
val emptyStack = new Stack[String]
intercept[NoSuchElementException] {
emptyStack.pop()
}
}
}
/*
Here's an example of a FunSuite with ShouldMatchers mixed in:
*/
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ListSuite extends FunSuite with ShouldMatchers {
test("An empty list should be empty") {
List() should be ('empty)
Nil should be ('empty)
}
test("A non-empty list should not be empty") {
List(1, 2, 3) should not be ('empty)
List("fee", "fie", "foe", "fum") should not be ('empty)
}
test("A list's length should equal the number of elements it contains") {
List() should have length (0)
List(1, 2) should have length (2)
List("fee", "fie", "foe", "fum") should have length (4)
}
}
/*
ScalaTest also supports the behavior-driven development style, in which you
combine tests with text that specifies the behavior being tested. Here's
an example whose text output when run looks like:
A Map
- should only contain keys and values that were added to it
- should report its size as the number of key/value pairs it contains
*/
import org.scalatest.Spec
import org.scalatest.matchers.MustMatchers
class MapSpec extends Spec with MustMatchers {
describe("A Map") {
it("should only contain keys and values that were added to it") {
Map("ho" -> 12) must (not contain key ("hi") and not contain value (13))
Map("hi" -> 13) must (contain key ("hi") and contain value (13))
}
it("should report its size as the number of key/value pairs it contains") {
Map() must have size (0)
Map("ho" -> 12) must have size (1)
Map("hi" -> 13, "ho" -> 12) must have size (2)
}
}
}
| lordkret/profit | profit/src/test/scala/samples/scalatest.scala | Scala | mit | 3,176 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Linked}
case class CP290a(value: Option[Int]) extends CtBoxIdentifier(name = "Pre 1/4/17 losses brought forward against TP") with CtOptionalInteger
object CP290a extends Linked[CP283a, CP290a] {
def apply(source: CP283a): CP290a = CP290a(source.value)
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP290a.scala | Scala | apache-2.0 | 963 |
package org.liquidizer.view
import scala.xml._
import scala.io._
import net.liftweb._
import net.liftweb.util._
import net.liftweb.http._
import net.liftweb.common._
import net.liftweb.util.Helpers.TheStrBindParam
import org.liquidizer.model._
import org.liquidizer.lib._
/** Display code for the display of emoticons in HTML */
object EmotionView {
lazy val morpher= new Mesmerizer
lazy val sleeping= {
val root="src/main/resources/"
val src= scala.io.Source.fromFile(new java.io.File(root+"sleeping.svg"))
scala.xml.parsing.XhtmlParser.apply(src).first
}
def doubleParam(id : String, default : Double) =
S.param(id).map { _.toDouble }.getOrElse(default)
/** snippet to create a html embed tag for an emoticon */
def face() : Box[XmlResponse] = {
val v= doubleParam("v", 0.5)
val a= doubleParam("a", 0.5)
val p= doubleParam("p", 0.5)
val w= doubleParam("w", 1.0)
val size= doubleParam("size", 100).toInt
val scale= doubleParam("scale", 1.0)
val view= S.param("view").getOrElse("front")
var node= view match {
case "front" => morpher.emoticon(v,a,p,w)
case "sleeping" => sleeping
}
node= SVGUtil.resize(node, size, size, scale)
Full(new XmlResponse(node, 200, "image/svg+xml", Nil) {
// override the cache expiry
override def headers =
TheStrBindParam("Date", (new java.util.Date).toString) ::
TheStrBindParam("Cache-Control", "max-age= 600000, public") ::
super.headers
})
}
/** Create an embed tag for an inclusion of the emoticon */
def emoticon(other : Votable, attribs:MetaData) : Node = {
val size={attribs.get("size").getOrElse(Text("100"))}
var uri= "/emoticons/face.svg" + {
attribs.asAttrMap ++ {
User.currentUser match {
case Full(me) if other.isUser => {
// compute face size based on distance metrics
val room= other.room.obj.get
val maxPref= VoteMap.getMaxDelegationPref(me, room)
val dist= if (other.is(me)) 1.0 else {
val w= Math.sqrt(VoteMap.getWeight(me, other))
1.0 + 0.2*(w - 0.5)*(maxPref min 3)
}
val fdist= SVGUtil.format(dist min 1.25)
// extract corresponding emotion
val user= other.user.obj.get
VoteMap.getEmotion(me, user, room) match {
case Some(emo) => {
val p= emo.potency.is
val v= Math.pow(emo.valence.is / (.9*p + .1) / 2.0 + 0.5, 2.0)
val a= emo.arousal.is min 1.0 max 0.
val w= VoteMap.getCurrentWeight(user, room)
Map("v" -> SVGUtil.format(v),
"a" -> SVGUtil.format(a),
"p" -> SVGUtil.format(p),
"w" -> SVGUtil.format(w),
"scale" -> fdist)
}
case None => Map("view" -> "sleeping", "scale" -> fdist)
}}
case _ => Map("view" -> "sleeping")
}}
}.map { case (a,b) => a+"="+b }.mkString("?","&","")
// The HTML tag
<embed alt="Emoticon" src={uri} width={size} height={size}/>
}
}
| liquidizer/liquidizer | src/main/scala/org/liquidizer/view/EmotionView.scala | Scala | mit | 2,906 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.util
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.types.StructType
/**
* Utils for handling schemas.
*
* TODO: Merge this file with [[org.apache.spark.ml.util.SchemaUtils]].
*/
private[spark] object SchemaUtils {
/**
* Checks if an input schema has duplicate column names. This throws an exception if the
* duplication exists.
*
* @param schema schema to check
* @param colType column type name, used in an exception message
* @param caseSensitiveAnalysis whether duplication checks should be case sensitive or not
*/
def checkSchemaColumnNameDuplication(
schema: StructType, colType: String, caseSensitiveAnalysis: Boolean = false): Unit = {
checkColumnNameDuplication(schema.map(_.name), colType, caseSensitiveAnalysis)
}
// Returns true if a given resolver is case-sensitive
private def isCaseSensitiveAnalysis(resolver: Resolver): Boolean = {
if (resolver == caseSensitiveResolution) {
true
} else if (resolver == caseInsensitiveResolution) {
false
} else {
sys.error("A resolver to check if two identifiers are equal must be " +
"`caseSensitiveResolution` or `caseInsensitiveResolution` in o.a.s.sql.catalyst.")
}
}
/**
* Checks if input column names have duplicate identifiers. This throws an exception if
* the duplication exists.
*
* @param columnNames column names to check
* @param colType column type name, used in an exception message
* @param resolver resolver used to determine if two identifiers are equal
*/
def checkColumnNameDuplication(
columnNames: Seq[String], colType: String, resolver: Resolver): Unit = {
checkColumnNameDuplication(columnNames, colType, isCaseSensitiveAnalysis(resolver))
}
/**
* Checks if input column names have duplicate identifiers. This throws an exception if
* the duplication exists.
*
* @param columnNames column names to check
* @param colType column type name, used in an exception message
* @param caseSensitiveAnalysis whether duplication checks should be case sensitive or not
*/
def checkColumnNameDuplication(
columnNames: Seq[String], colType: String, caseSensitiveAnalysis: Boolean): Unit = {
val names = if (caseSensitiveAnalysis) columnNames else columnNames.map(_.toLowerCase)
if (names.distinct.length != names.length) {
val duplicateColumns = names.groupBy(identity).collect {
case (x, ys) if ys.length > 1 => s"`$x`"
}
throw new AnalysisException(
s"Found duplicate column(s) $colType: ${duplicateColumns.mkString(", ")}")
}
}
}
| bravo-zhang/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/util/SchemaUtils.scala | Scala | apache-2.0 | 3,513 |
/* Title: Pure/Isar/outer_syntax.scala
Author: Makarius
Isabelle/Isar outer syntax.
*/
package isabelle
import scala.collection.mutable
object Outer_Syntax
{
/* syntax */
val empty: Outer_Syntax = new Outer_Syntax()
def init(): Outer_Syntax = new Outer_Syntax(completion = Completion.init())
/* string literals */
def quote_string(str: String): String =
{
val result = new StringBuilder(str.length + 10)
result += '"'
for (s <- Symbol.iterator(str)) {
if (s.length == 1) {
val c = s(0)
if (c < 32 && c != YXML.X && c != YXML.Y || c == '\\\\' || c == '"') {
result += '\\\\'
if (c < 10) result += '0'
if (c < 100) result += '0'
result ++= c.asInstanceOf[Int].toString
}
else result += c
}
else result ++= s
}
result += '"'
result.toString
}
}
final class Outer_Syntax private(
val keywords: Keyword.Keywords = Keyword.Keywords.empty,
val completion: Completion = Completion.empty,
val rev_abbrevs: Thy_Header.Abbrevs = Nil,
val language_context: Completion.Language_Context = Completion.Language_Context.outer,
val has_tokens: Boolean = true)
{
/** syntax content **/
override def toString: String = keywords.toString
/* keywords */
def + (name: String, kind: String = "", exts: List[String] = Nil): Outer_Syntax =
{
val keywords1 = keywords + (name, kind, exts)
val completion1 =
completion.add_keyword(name).
add_abbrevs(
(if (Keyword.theory_block.contains(kind)) List((name, name + "\\nbegin\\n\\u0007\\nend"))
else Nil) :::
(if (Completion.Word_Parsers.is_word(name)) List((name, name)) else Nil))
new Outer_Syntax(keywords1, completion1, rev_abbrevs, language_context, true)
}
def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax =
(this /: keywords) {
case (syntax, (name, spec)) =>
syntax +
(Symbol.decode(name), spec.kind, spec.exts) +
(Symbol.encode(name), spec.kind, spec.exts)
}
/* abbrevs */
def abbrevs: Thy_Header.Abbrevs = rev_abbrevs.reverse
def add_abbrevs(new_abbrevs: Thy_Header.Abbrevs): Outer_Syntax =
if (new_abbrevs.isEmpty) this
else {
val completion1 =
completion.add_abbrevs(
(for ((a, b) <- new_abbrevs) yield {
val a1 = Symbol.decode(a)
val a2 = Symbol.encode(a)
val b1 = Symbol.decode(b)
List((a1, b1), (a2, b1))
}).flatten)
val rev_abbrevs1 = Library.distinct(new_abbrevs) reverse_::: rev_abbrevs
new Outer_Syntax(keywords, completion1, rev_abbrevs1, language_context, has_tokens)
}
/* merge */
def ++ (other: Outer_Syntax): Outer_Syntax =
if (this eq other) this
else {
val keywords1 = keywords ++ other.keywords
val completion1 = completion ++ other.completion
val rev_abbrevs1 = Library.merge(rev_abbrevs, other.rev_abbrevs)
if ((keywords eq keywords1) && (completion eq completion1)) this
else new Outer_Syntax(keywords1, completion1, rev_abbrevs1, language_context, has_tokens)
}
/* load commands */
def load_command(name: String): Option[List[String]] = keywords.load_commands.get(name)
def load_commands_in(text: String): Boolean = keywords.load_commands_in(text)
/* language context */
def set_language_context(context: Completion.Language_Context): Outer_Syntax =
new Outer_Syntax(keywords, completion, rev_abbrevs, context, has_tokens)
def no_tokens: Outer_Syntax =
{
require(keywords.is_empty)
new Outer_Syntax(
completion = completion,
rev_abbrevs = rev_abbrevs,
language_context = language_context,
has_tokens = false)
}
/** parsing **/
/* command spans */
def parse_spans(toks: List[Token]): List[Command_Span.Span] =
{
val result = new mutable.ListBuffer[Command_Span.Span]
val content = new mutable.ListBuffer[Token]
val improper = new mutable.ListBuffer[Token]
def ship(span: List[Token])
{
val kind =
if (span.forall(_.is_improper)) Command_Span.Ignored_Span
else if (span.exists(_.is_error)) Command_Span.Malformed_Span
else
span.find(_.is_command) match {
case None => Command_Span.Malformed_Span
case Some(cmd) =>
val name = cmd.source
val offset =
(0 /: span.takeWhile(_ != cmd)) {
case (i, tok) => i + Symbol.length(tok.source) }
val end_offset = offset + Symbol.length(name)
val pos = Position.Range(Text.Range(offset, end_offset) + 1)
Command_Span.Command_Span(name, pos)
}
result += Command_Span.Span(kind, span)
}
def flush()
{
if (content.nonEmpty) { ship(content.toList); content.clear }
if (improper.nonEmpty) { ship(improper.toList); improper.clear }
}
for (tok <- toks) {
if (tok.is_improper) improper += tok
else if (keywords.is_before_command(tok) ||
tok.is_command &&
(!content.exists(keywords.is_before_command(_)) || content.exists(_.is_command)))
{ flush(); content += tok }
else { content ++= improper; improper.clear; content += tok }
}
flush()
result.toList
}
def parse_spans(input: CharSequence): List[Command_Span.Span] =
parse_spans(Token.explode(keywords, input))
}
| larsrh/libisabelle | modules/pide/2017/src/main/scala/Isar/outer_syntax.scala | Scala | apache-2.0 | 5,463 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box.{CtInteger, CtBoxIdentifier, Linked}
import uk.gov.hmrc.ct.computations.CP258
case class B165(value: Int) extends CtBoxIdentifier(name = "Net trading profits") with CtInteger
object B165 extends Linked[CP258, B165] {
override def apply(source: CP258): B165 = B165(source.value)
}
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/B165.scala | Scala | apache-2.0 | 949 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.tez
import org.apache.spark.Dependency
import org.apache.spark.InterruptibleIterator
import org.apache.spark.OneToOneDependency
import org.apache.spark.Partition
import org.apache.spark.SparkEnv
import org.apache.spark.TaskContext
import org.apache.spark.rdd.CoGroupPartition
import org.apache.spark.rdd.CoGroupSplitDep
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.ShuffledRDDPartition
import org.apache.spark.scheduler.MapStatus
import org.apache.spark.scheduler.Task
import org.apache.tez.runtime.library.common.ValuesIterator
import org.apache.spark.shuffle.ShuffleWriter
import org.apache.spark.rdd.ShuffledRDD
import org.apache.spark.shuffle.BaseShuffleHandle
import org.apache.spark.ShuffleDependency
import org.apache.spark.Partitioner
import org.apache.hadoop.io.Writable
import org.apache.spark.tez.io.TezResultWriter
import org.apache.spark.tez.utils.ReflectionUtils
import scala.reflect.ClassTag
import org.apache.hadoop.fs.FileSystem
import org.apache.tez.dag.api.TezConfiguration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.NullWritable
import org.apache.spark.rdd.NarrowCoGroupSplitDep
/**
* Tez vertex Task modeled after Spark's ResultTask
*/
class VertexResultTask[T, U](
stageId: Int,
rdd: RDD[T],
partitions:Array[Partition],
func: (TaskContext, Iterator[T]) => U = null)
extends TezTask[U](stageId, 0, rdd) {
private[tez] var keyClass:Class[Writable] = null
private[tez] var valueClass:Class[Writable] = null
//TODO review. Need a cleaner way
private[tez] def setKeyClass(keyClass:Class[Writable]) {
this.keyClass = keyClass
}
//TODO review. Need a cleaner way
private[tez] def setValueClass(valueClass:Class[Writable]) {
this.valueClass = valueClass
}
/**
*
*/
override def runTask(context: TaskContext): U = {
try {
val partition = if (partitions.length == 1) partitions(0) else partitions(context.partitionId())
this.resetPartitionIndex(partition, context.partitionId())
val iterator = if (func == null) {
rdd.iterator(partition, context).asInstanceOf[Iterator[Product2[_, _]]]
} else {
val result = func(context, rdd.iterator(partition, context))
new InterruptibleIterator(context, Map(partition.index -> result).iterator)
}
this.toHdfs(partition.index, iterator)
} catch {
case e: Exception => e.printStackTrace(); throw new IllegalStateException(e)
}
}
/**
*
*/
override def toString = "VertexResultTask(" + stageId + ", " + partitionId + ")"
/**
*
*/
private def toHdfs(index:Int, iter: Iterator[Product2[Any, Any]]): U = {
val manager = SparkEnv.get.shuffleManager
val handle =
if (rdd.dependencies != Nil && rdd.dependencies.head.isInstanceOf[ShuffleDependency[_, _, _]]) {
new BaseShuffleHandle(index, 0, rdd.dependencies.head.asInstanceOf[ShuffleDependency[_, _, _]])
} else {
null
}
val writer = manager.getWriter(handle, index, context).asInstanceOf[TezResultWriter[Any, Any, _]]
writer.setKeyClass(this.keyClass)
writer.setValueClass(this.valueClass)
writer.write(iter)
().asInstanceOf[U]
}
}
| hortonworks/spark-native-yarn | src/main/scala/org/apache/spark/tez/VertexResultTask.scala | Scala | apache-2.0 | 4,014 |
package lila.tournament
import org.joda.time.DateTime
private[tournament] case class WaitingUsers(
hash: Map[String, DateTime],
clock: Option[chess.Clock],
date: DateTime) {
// 1+0 -> 8 -> 10
// 3+0 -> 16 -> 16
// 5+0 -> 24 -> 24
// 10+0 -> 44 -> 35
private val waitSeconds = {
(clock.fold(60)(_.estimateTotalTime) / 15) + 4
} min 35 max 10
lazy val all = hash.keys.toList
lazy val size = hash.size
def isOdd = size % 2 == 1
// skips the most recent user if odd
def evenNumber: List[String] = {
if (isOdd) hash.toList.sortBy(-_._2.getMillis).drop(1).map(_._1)
else all
}
def waitSecondsOf(userId: String) = hash get userId map { d =>
nowSeconds - d.getSeconds
}
def waiting = {
val since = date minusSeconds waitSeconds
hash.collect {
case (u, d) if d.isBefore(since) => u
}.toList
}
def update(us: Seq[String], clock: Option[chess.Clock]) = {
val newDate = DateTime.now
copy(
date = newDate,
clock = clock,
hash = hash.filterKeys(us.contains) ++
us.filterNot(hash.contains).map { _ -> newDate }
)
}
def intersect(us: Seq[String]) = copy(hash = hash filterKeys us.contains)
def diff(us: Set[String]) = copy(hash = hash filterKeys { k => !us.contains(k) })
}
private[tournament] object WaitingUsers {
def empty = WaitingUsers(Map.empty, none, DateTime.now)
}
| r0k3/lila | modules/tournament/src/main/WaitingUsers.scala | Scala | mit | 1,400 |
package mesosphere.mesos
import com.google.common.collect.Lists
import mesosphere.marathon.MarathonSpec
import mesosphere.marathon.Protos.{ Constraint, MarathonTask }
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state.{ AppDefinition, Container, PathId, Timestamp }
import mesosphere.marathon.state.Container.Docker
import mesosphere.marathon.state.Container.Docker.PortMapping
import mesosphere.marathon.tasks.{ MarathonTasks, TaskTracker }
import mesosphere.mesos.protos._
import org.apache.mesos.Protos.{ Offer, TaskInfo }
import org.apache.mesos.Protos.ContainerInfo.DockerInfo.Network
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import scala.collection.immutable.Seq
import scala.collection.JavaConverters._
class TaskBuilderTest extends MarathonSpec {
import mesosphere.mesos.protos.Implicits._
test("BuildIfMatches") {
val offer = makeBasicOffer(cpus = 1.0, mem = 128.0, disk = 2000.0, beginPort = 31000, endPort = 32000)
.addResources(ScalarResource("cpus", 1))
.addResources(ScalarResource("mem", 128))
.addResources(ScalarResource("disk", 2000))
.build
val task: Option[(TaskInfo, Seq[Long])] = buildIfMatches(
offer,
AppDefinition(
id = "/product/frontend".toPath,
cmd = Some("foo"),
cpus = 1,
mem = 64,
disk = 1,
executor = "//cmd",
ports = Seq(8080, 8081)
)
)
assert(task.isDefined)
val (taskInfo, taskPorts) = task.get
val range = taskInfo.getResourcesList.asScala
.find(r => r.getName == Resource.PORTS)
.map(r => r.getRanges.getRange(0))
assert(range.isDefined)
// The taskName is the elements of the path, reversed, and joined by dots
assert("frontend.product" == taskInfo.getName)
assert(2 == taskPorts.size)
assert(taskPorts(0) == range.get.getBegin.toInt)
assert(taskPorts(1) == range.get.getEnd.toInt)
assert(!taskInfo.hasExecutor)
assert(taskInfo.hasCommand)
val cmd = taskInfo.getCommand
assert(cmd.getShell)
assert(cmd.hasValue)
assert(cmd.getArgumentsList.asScala.isEmpty)
assert(cmd.getValue == "foo")
assert(cmd.hasEnvironment)
val envVars = cmd.getEnvironment.getVariablesList.asScala
assert(envVars.exists(v => v.getName == "HOST" && v.getValue == offer.getHostname))
assert(envVars.exists(v => v.getName == "PORT0" && v.getValue.nonEmpty))
assert(envVars.exists(v => v.getName == "PORT1" && v.getValue.nonEmpty))
assert(envVars.exists(v => v.getName == "PORT_8080" && v.getValue.nonEmpty))
assert(envVars.exists(v => v.getName == "PORT_8081" && v.getValue.nonEmpty))
val exposesFirstPort =
envVars.find(v => v.getName == "PORT0").get.getValue == envVars.find(v => v.getName == "PORT_8080").get.getValue
assert(exposesFirstPort)
val exposesSecondPort =
envVars.find(v => v.getName == "PORT1").get.getValue == envVars.find(v => v.getName == "PORT_8081").get.getValue
assert(exposesSecondPort)
for (r <- taskInfo.getResourcesList.asScala) {
assert("*" == r.getRole)
}
// TODO test for resources etc.
}
test("BuildIfMatchesWithArgs") {
val offer = makeBasicOffer(cpus = 1.0, mem = 128.0, disk = 2000.0, beginPort = 31000, endPort = 32000)
.addResources(ScalarResource("cpus", 1))
.addResources(ScalarResource("mem", 128))
.addResources(ScalarResource("disk", 2000))
.build
val task: Option[(TaskInfo, Seq[Long])] = buildIfMatches(
offer,
AppDefinition(
id = "testApp".toPath,
args = Some(Seq("a", "b", "c")),
cpus = 1,
mem = 64,
disk = 1,
executor = "//cmd",
ports = Seq(8080, 8081)
)
)
assert(task.isDefined)
val (taskInfo, taskPorts) = task.get
val range = taskInfo.getResourcesList.asScala
.find(r => r.getName == Resource.PORTS)
.map(r => r.getRanges.getRange(0))
assert(range.isDefined)
assert(2 == taskPorts.size)
assert(taskPorts(0) == range.get.getBegin.toInt)
assert(taskPorts(1) == range.get.getEnd.toInt)
assert(!taskInfo.hasExecutor)
assert(taskInfo.hasCommand)
val cmd = taskInfo.getCommand
assert(!cmd.getShell)
assert(!cmd.hasValue)
assert(cmd.getArgumentsList.asScala == Seq("a", "b", "c"))
for (r <- taskInfo.getResourcesList.asScala) {
assert("*" == r.getRole)
}
// TODO test for resources etc.
}
test("BuildIfMatchesWithCommandAndExecutor") {
val offer = makeBasicOffer(cpus = 1.0, mem = 128.0, disk = 2000.0, beginPort = 31000, endPort = 32000)
.addResources(ScalarResource("cpus", 1))
.addResources(ScalarResource("mem", 128))
.addResources(ScalarResource("disk", 2000))
.build
val task: Option[(TaskInfo, Seq[Long])] = buildIfMatches(
offer,
AppDefinition(
id = "testApp".toPath,
cpus = 1,
mem = 64,
disk = 1,
cmd = Some("foo"),
executor = "/custom/executor",
ports = Seq(8080, 8081)
)
)
assert(task.isDefined)
val (taskInfo, taskPorts) = task.get
assert(taskInfo.hasExecutor)
assert(!taskInfo.hasCommand)
val cmd = taskInfo.getExecutor.getCommand
assert(cmd.getShell)
assert(cmd.hasValue)
assert(cmd.getArgumentsList.asScala.isEmpty)
assert(cmd.getValue == "chmod ug+rx '/custom/executor' && exec '/custom/executor' foo")
}
test("BuildIfMatchesWithArgsAndExecutor") {
val offer = makeBasicOffer(cpus = 1.0, mem = 128.0, disk = 2000.0, beginPort = 31000, endPort = 32000)
.addResources(ScalarResource("cpus", 1))
.addResources(ScalarResource("mem", 128))
.addResources(ScalarResource("disk", 2000))
.build
val task: Option[(TaskInfo, Seq[Long])] = buildIfMatches(
offer,
AppDefinition(
id = "testApp".toPath,
cpus = 1,
mem = 64,
disk = 1,
args = Some(Seq("a", "b", "c")),
executor = "/custom/executor",
ports = Seq(8080, 8081)
)
)
assert(task.isDefined)
val (taskInfo, taskPorts) = task.get
val cmd = taskInfo.getExecutor.getCommand
assert(!taskInfo.hasCommand)
assert(cmd.getValue == "chmod ug+rx '/custom/executor' && exec '/custom/executor' a b c")
}
test("BuildIfMatchesWithRole") {
val offer = makeBasicOfferWithRole(cpus = 1.0, mem = 128.0, disk = 1000.0, beginPort = 31000, endPort = 32000, role = "marathon")
.addResources(ScalarResource("cpus", 1, "*"))
.addResources(ScalarResource("mem", 128, "*"))
.addResources(ScalarResource("disk", 1000, "*"))
.addResources(ScalarResource("cpus", 2, "marathon"))
.addResources(ScalarResource("mem", 256, "marathon"))
.addResources(ScalarResource("disk", 2000, "marathon"))
.addResources(RangesResource(Resource.PORTS, Seq(protos.Range(33000, 34000)), "marathon"))
.build
val task: Option[(TaskInfo, Seq[Long])] = buildIfMatches(
offer,
AppDefinition(
id = "testApp".toPath,
cpus = 2,
mem = 200,
disk = 2,
executor = "//cmd",
ports = Seq(8080, 8081)
)
)
assert(task.isDefined)
val (taskInfo, taskPorts) = task.get
val range = taskInfo.getResourcesList.asScala
.find(r => r.getName == Resource.PORTS)
.map(r => r.getRanges.getRange(0))
assert(range.isDefined)
assert(2 == taskPorts.size)
assert(taskPorts(0) == range.get.getBegin.toInt)
assert(taskPorts(1) == range.get.getEnd.toInt)
for (r <- taskInfo.getResourcesList.asScala) {
assert("marathon" == r.getRole)
}
// TODO test for resources etc.
}
test("BuildIfMatchesWithRole2") {
val offer = makeBasicOfferWithRole(cpus = 1.0, mem = 128.0, disk = 1000.0, beginPort = 31000, endPort = 32000, role = "*")
.addResources(ScalarResource("cpus", 1, "*"))
.addResources(ScalarResource("mem", 128, "*"))
.addResources(ScalarResource("disk", 1000, "*"))
.addResources(ScalarResource("cpus", 2, "marathon"))
.addResources(ScalarResource("mem", 256, "marathon"))
.addResources(ScalarResource("disk", 2000, "marathon"))
.addResources(RangesResource(Resource.PORTS, Seq(protos.Range(33000, 34000)), "marathon"))
.build
val task: Option[(TaskInfo, Seq[Long])] = buildIfMatches(
offer,
AppDefinition(
id = "testApp".toPath,
cpus = 1,
mem = 64,
disk = 1,
executor = "//cmd",
ports = Seq(8080, 8081)
)
)
assert(task.isDefined)
val (taskInfo, taskPorts) = task.get
val range = taskInfo.getResourcesList.asScala
.find(r => r.getName == Resource.PORTS)
.map(r => r.getRanges.getRange(0))
assert(range.isDefined)
assert(2 == taskPorts.size)
assert(taskPorts(0) == range.get.getBegin.toInt)
assert(taskPorts(1) == range.get.getEnd.toInt)
// In this case, the first roles are sufficient so we'll use those first.
for (r <- taskInfo.getResourcesList.asScala) {
assert("*" == r.getRole)
}
// TODO test for resources etc.
}
test("BuildIfMatchesWithRackIdConstraint") {
val taskTracker = mock[TaskTracker]
val offer = makeBasicOffer(1.0, 128.0, 31000, 32000)
.addAttributes(TextAttribute("rackid", "1"))
.build
val app = makeBasicApp().copy(
constraints = Set(
Constraint.newBuilder
.setField("rackid")
.setOperator(Constraint.Operator.UNIQUE)
.build()
)
)
val t1 = makeSampleTask(app.id, "rackid", "2")
val t2 = makeSampleTask(app.id, "rackid", "3")
val s = Set(t1, t2)
when(taskTracker.get(app.id)).thenReturn(s)
val builder = new TaskBuilder(app,
s => TaskID(s.toString), taskTracker, defaultConfig())
val task = builder.buildIfMatches(offer)
assert(task.isDefined)
// TODO test for resources etc.
}
test("RackAndHostConstraints") {
// Test the case where we want tasks to be balanced across racks/AZs
// and run only one per machine
val app = makeBasicApp().copy(
instances = 10,
constraints = Set(
Constraint.newBuilder.setField("rackid").setOperator(Constraint.Operator.GROUP_BY).setValue("3").build,
Constraint.newBuilder.setField("hostname").setOperator(Constraint.Operator.UNIQUE).build
)
)
var runningTasks = Set.empty[MarathonTask]
val taskTracker = mock[TaskTracker]
when(taskTracker.get(app.id)).thenAnswer(new Answer[Set[MarathonTask]] {
override def answer(p1: InvocationOnMock): Set[MarathonTask] = runningTasks
})
val builder = new TaskBuilder(app,
s => TaskID(s.toString), taskTracker, defaultConfig())
def shouldBuildTask(message: String, offer: Offer) {
val tupleOption = builder.buildIfMatches(offer)
assert(tupleOption.isDefined, message)
val marathonTask = MarathonTasks.makeTask(
tupleOption.get._1.getTaskId.getValue,
offer.getHostname,
tupleOption.get._2,
offer.getAttributesList.asScala.toList,
Timestamp.now)
runningTasks += marathonTask
}
def shouldNotBuildTask(message: String, offer: Offer) {
val tupleOption = builder.buildIfMatches(offer)
assert(tupleOption.isEmpty, message)
}
val offerRack1HostA = makeBasicOffer()
.setHostname("alpha")
.addAttributes(TextAttribute("rackid", "1"))
.build
shouldBuildTask("Should take first offer", offerRack1HostA)
val offerRack1HostB = makeBasicOffer()
.setHostname("beta")
.addAttributes(TextAttribute("rackid", "1"))
.build
shouldNotBuildTask("Should not take offer for the same rack", offerRack1HostB)
val offerRack2HostC = makeBasicOffer()
.setHostname("gamma")
.addAttributes(TextAttribute("rackid", "2"))
.build
shouldBuildTask("Should take offer for different rack", offerRack2HostC)
// Nothing prevents having two hosts with the same name in different racks
val offerRack3HostA = makeBasicOffer()
.setHostname("alpha")
.addAttributes(TextAttribute("rackid", "3"))
.build
shouldNotBuildTask("Should not take offer in different rack with non-unique hostname", offerRack3HostA)
}
test("UniqueHostNameAndClusterAttribute") {
val app = makeBasicApp().copy(
instances = 10,
constraints = Set(
Constraint.newBuilder.setField("spark").setOperator(Constraint.Operator.CLUSTER).setValue("enabled").build,
Constraint.newBuilder.setField("hostname").setOperator(Constraint.Operator.UNIQUE).build
)
)
var runningTasks = Set.empty[MarathonTask]
val taskTracker = mock[TaskTracker]
when(taskTracker.get(app.id)).thenAnswer(new Answer[Set[MarathonTask]] {
override def answer(p1: InvocationOnMock): Set[MarathonTask] = runningTasks
})
val builder = new TaskBuilder(app,
s => TaskID(s.toString), taskTracker, defaultConfig())
def shouldBuildTask(message: String, offer: Offer) {
val tupleOption = builder.buildIfMatches(offer)
assert(tupleOption.isDefined, message)
val marathonTask = MarathonTasks.makeTask(
tupleOption.get._1.getTaskId.getValue,
offer.getHostname,
tupleOption.get._2,
offer.getAttributesList.asScala.toList, Timestamp.now)
runningTasks += marathonTask
}
def shouldNotBuildTask(message: String, offer: Offer) {
val tupleOption = builder.buildIfMatches(offer)
assert(tupleOption.isEmpty, message)
}
val offerHostA = makeBasicOffer()
.setHostname("alpha")
.addAttributes(TextAttribute("spark", "disabled"))
.build
shouldNotBuildTask("Should not take an offer with spark:disabled", offerHostA)
val offerHostB = makeBasicOffer()
.setHostname("beta")
.addAttributes(TextAttribute("spark", "enabled"))
.build
shouldBuildTask("Should take offer with spark:enabled", offerHostB)
}
test("PortsEnv") {
val env = TaskBuilder.portsEnv(Seq(0, 0), Seq(1001, 1002))
assert("1001" == env("PORT"))
assert("1001" == env("PORT0"))
assert("1002" == env("PORT1"))
assert(!env.contains("PORT_0"))
}
test("PortsEnvEmpty") {
val env = TaskBuilder.portsEnv(Seq(), Seq())
assert(Map.empty == env)
}
test("DeclaredPortsEnv") {
val env = TaskBuilder.portsEnv(Seq(80, 8080), Seq(1001, 1002))
assert("1001" == env("PORT"))
assert("1001" == env("PORT0"))
assert("1002" == env("PORT1"))
assert("1001" == env("PORT_80"))
assert("1002" == env("PORT_8080"))
}
test("AppContextEnvironment") {
val command =
TaskBuilder.commandInfo(
AppDefinition(
id = "/test".toPath,
ports = Seq(8080, 8081),
version = Timestamp(0)
),
Some(TaskID("task-123")),
Some ("host.mega.corp"),
Seq(1000, 1001)
)
val env: Map[String, String] =
command.getEnvironment().getVariablesList().asScala.toList.map(v => v.getName() -> v.getValue()).toMap
assert("task-123" == env("MESOS_TASK_ID"))
assert("/test" == env("MARATHON_APP_ID"))
assert("1970-01-01T00:00:00.000Z" == env("MARATHON_APP_VERSION"))
}
test("PortsEnvWithOnlyPorts") {
val command =
TaskBuilder.commandInfo(
AppDefinition(
ports = Seq(8080, 8081)
),
Some(TaskID("task-123")),
Some ("host.mega.corp"),
Seq(1000, 1001)
)
val env: Map[String, String] =
command.getEnvironment().getVariablesList().asScala.toList.map(v => v.getName() -> v.getValue()).toMap
assert("1000" == env("PORT_8080"))
assert("1001" == env("PORT_8081"))
}
test("PortsEnvWithOnlyMappings") {
val command =
TaskBuilder.commandInfo(
AppDefinition(
container = Some(Container(
docker = Some(Docker(
network = Some(Network.BRIDGE),
portMappings = Some(Seq(
PortMapping(containerPort = 8080, hostPort = 0, servicePort = 9000, protocol = "tcp"),
PortMapping(containerPort = 8081, hostPort = 0, servicePort = 9000, protocol = "tcp")
))
))
))
),
Some(TaskID("task-123")),
Some("host.mega.corp"),
Seq(1000, 1001)
)
val env: Map[String, String] =
command.getEnvironment().getVariablesList().asScala.toList.map(v => v.getName() -> v.getValue()).toMap
assert("1000" == env("PORT_8080"))
assert("1001" == env("PORT_8081"))
}
test("PortsEnvWithBothPortsAndMappings") {
val command =
TaskBuilder.commandInfo(
AppDefinition(
ports = Seq(22, 23),
container = Some(Container(
docker = Some(Docker(
network = Some(Network.BRIDGE),
portMappings = Some(Seq(
PortMapping(containerPort = 8080, hostPort = 0, servicePort = 9000, protocol = "tcp"),
PortMapping(containerPort = 8081, hostPort = 0, servicePort = 9000, protocol = "tcp")
))
))
))
),
Some(TaskID("task-123")),
Some("host.mega.corp"),
Seq(1000, 1001)
)
val env: Map[String, String] =
command.getEnvironment().getVariablesList().asScala.toList.map(v => v.getName() -> v.getValue()).toMap
assert("1000" == env("PORT_8080"))
assert("1001" == env("PORT_8081"))
assert(!env.contains("PORT_22"))
assert(!env.contains("PORT_23"))
}
test("TaskNoURIExtraction") {
val command =
TaskBuilder.commandInfo(
AppDefinition(
id = "testApp".toPath,
cpus = 1,
mem = 64,
disk = 1,
executor = "//cmd",
uris = Seq("http://www.example.com", "http://www.example.com/test.tgz",
"example.tar.gz"),
ports = Seq(8080, 8081)
),
Some(TaskID("task-123")),
Some("host.mega.corp"),
Seq(1000, 1001)
)
val uriinfo1 = command.getUris(0)
assert(!uriinfo1.getExtract)
val uriinfo2 = command.getUris(1)
assert(uriinfo2.getExtract)
val uriinfo3 = command.getUris(2)
assert(uriinfo3.getExtract)
}
def buildIfMatches(offer: Offer, app: AppDefinition) = {
val taskTracker = mock[TaskTracker]
val builder = new TaskBuilder(app,
s => TaskID(s.toString), taskTracker, defaultConfig())
builder.buildIfMatches(offer)
}
def makeSampleTask(id: PathId, attr: String, attrVal: String) = {
MarathonTask.newBuilder()
.setHost("host")
.addAllPorts(Lists.newArrayList(999))
.setId(id.toString)
.addAttributes(TextAttribute(attr, attrVal))
.build()
}
}
| 14Zen/marathon | src/test/scala/mesosphere/mesos/TaskBuilderTest.scala | Scala | apache-2.0 | 18,827 |
package mesosphere.marathon
package core.launcher.impl
import akka.Done
import mesosphere.UnitTest
import mesosphere.marathon.test.SettableClock
import mesosphere.marathon.core.instance.TestInstanceBuilder._
import mesosphere.marathon.core.instance.update.{InstanceUpdateEffect, InstanceUpdateOperation}
import mesosphere.marathon.core.instance.{Instance, TestInstanceBuilder}
import mesosphere.marathon.core.launcher.{InstanceOp, OfferProcessorConfig, TaskLauncher}
import mesosphere.marathon.core.matcher.base.OfferMatcher
import mesosphere.marathon.core.matcher.base.OfferMatcher.{InstanceOpSource, InstanceOpWithSource, MatchedInstanceOps}
import mesosphere.marathon.core.task.{Task, Tasks}
import mesosphere.marathon.core.task.state.{AgentInfoPlaceholder, NetworkInfoPlaceholder}
import mesosphere.marathon.core.task.tracker.InstanceTracker
import mesosphere.marathon.state.{AppDefinition, PathId}
import mesosphere.marathon.metrics.dummy.DummyMetrics
import mesosphere.marathon.test.MarathonTestHelper
import org.scalatest.concurrent.PatienceConfiguration.Timeout
import scala.collection.immutable.Seq
import scala.concurrent.Future
import scala.concurrent.duration._
class OfferProcessorImplTest extends UnitTest {
private[this] val offer = MarathonTestHelper.makeBasicOffer().build()
private[this] val offerId = offer.getId
private val appId: PathId = PathId("/testapp")
private[this] val instanceId1 = Instance.Id.forRunSpec(appId)
private[this] val instanceId2 = Instance.Id.forRunSpec(appId)
private[this] val taskInfo1 = MarathonTestHelper.makeOneCPUTask(Task.Id(instanceId1)).build()
private[this] val taskInfo2 = MarathonTestHelper.makeOneCPUTask(Task.Id(instanceId2)).build()
private[this] val instance1 = TestInstanceBuilder.newBuilderWithInstanceId(instanceId1).addTaskWithBuilder().taskFromTaskInfo(taskInfo1).build().getInstance()
private[this] val instance2 = TestInstanceBuilder.newBuilderWithInstanceId(instanceId2).addTaskWithBuilder().taskFromTaskInfo(taskInfo2).build().getInstance()
private[this] val task1: Task = instance1.appTask
private[this] val task2: Task = instance2.appTask
private[this] val tasks = Seq((taskInfo1, task1, instance1), (taskInfo2, task2, instance2))
private[this] val arbitraryInstanceUpdateEffect = InstanceUpdateEffect.Noop(instanceId1)
case class Fixture(
conf: OfferProcessorConfig = new OfferProcessorConfig { verify() },
clock: SettableClock = new SettableClock(),
offerMatcher: OfferMatcher = mock[OfferMatcher],
taskLauncher: TaskLauncher = mock[TaskLauncher],
instanceTracker: InstanceTracker = mock[InstanceTracker]) {
val metrics = DummyMetrics
val offerProcessor = new OfferProcessorImpl(
metrics, conf, offerMatcher, taskLauncher, instanceTracker)
}
object f {
import org.apache.mesos.{Protos => Mesos}
val metrics = DummyMetrics
val provision = new InstanceOpFactoryHelper(metrics, Some("principal"), Some("role"))
.provision(_: Mesos.TaskInfo, _: InstanceUpdateOperation.Provision)
val launchWithNewTask = new InstanceOpFactoryHelper(metrics, Some("principal"), Some("role"))
.launchOnReservation(_: Mesos.TaskInfo, _: InstanceUpdateOperation.Provision, _: Instance)
}
class DummySource extends InstanceOpSource {
var rejected = Vector.empty[(InstanceOp, String)]
var accepted = Vector.empty[InstanceOp]
override def instanceOpRejected(op: InstanceOp, reason: String): Unit = rejected :+= op -> reason
override def instanceOpAccepted(op: InstanceOp): Unit = accepted :+= op
}
"OfferProcessorImpl" should {
"match successful, launch tasks successful" in new Fixture {
Given("an offer")
val dummySource = new DummySource
val tasksWithSource = tasks.map {
case (taskInfo, task, Instance(instanceId, agentInfo, _, tasksMap, runSpec, _)) =>
val stateOp = InstanceUpdateOperation.Provision(instanceId, agentInfo.get, runSpec, tasksMap, clock.now())
InstanceOpWithSource(dummySource, f.provision(taskInfo, stateOp))
}
And("a cooperative offerMatcher and taskTracker")
offerMatcher.matchOffer(offer) returns Future.successful(MatchedInstanceOps(offerId, tasksWithSource))
for ((_, _, Instance(instanceId, agentInfo, _, tasksMap, runSpec, _)) <- tasks) {
val stateOp = InstanceUpdateOperation.Provision(instanceId, agentInfo.get, runSpec, tasksMap, clock.now())
instanceTracker.process(stateOp) returns Future.successful(arbitraryInstanceUpdateEffect)
}
And("a working taskLauncher")
val ops: Seq[InstanceOp] = tasksWithSource.map(_.op)
taskLauncher.acceptOffer(offerId, ops) returns true
When("processing the offer")
offerProcessor.processOffer(offer).futureValue(Timeout(1.second))
Then("we saw the offerMatch request and the task launches")
verify(offerMatcher).matchOffer(offer)
verify(taskLauncher).acceptOffer(offerId, ops)
And("all task launches have been accepted")
assert(dummySource.rejected.isEmpty)
assert(dummySource.accepted == tasksWithSource.map(_.op))
And("the tasks have been stored")
for (task <- tasksWithSource) {
val ordered = inOrder(instanceTracker)
ordered.verify(instanceTracker).process(task.op.stateOp)
}
}
"match successful, launch tasks unsuccessful" in new Fixture {
Given("an offer")
val dummySource = new DummySource
val tasksWithSource = tasks.map {
case (taskInfo, task, Instance(instanceId, agentInfo, _, tasksMap, runSpec, _)) =>
val stateOp = InstanceUpdateOperation.Provision(instanceId, agentInfo.get, runSpec, tasksMap, clock.now())
val op = f.provision(taskInfo, stateOp)
InstanceOpWithSource(dummySource, op)
}
And("a cooperative offerMatcher and taskTracker")
offerMatcher.matchOffer(offer) returns Future.successful(MatchedInstanceOps(offerId, tasksWithSource))
for (task <- tasksWithSource) {
val op = task.op
instanceTracker.process(op.stateOp) returns Future.successful(arbitraryInstanceUpdateEffect)
instanceTracker.forceExpunge(op.stateOp.instanceId) returns Future.successful(Done)
}
And("a dysfunctional taskLauncher")
taskLauncher.acceptOffer(offerId, tasksWithSource.map(_.op)) returns false
When("processing the offer")
offerProcessor.processOffer(offer).futureValue(Timeout(1.second))
Then("we saw the matchOffer request and the task launch attempt")
verify(offerMatcher).matchOffer(offer)
verify(taskLauncher).acceptOffer(offerId, tasksWithSource.map(_.op))
And("all task launches were rejected")
assert(dummySource.accepted.isEmpty)
assert(dummySource.rejected.map(_._1) == tasksWithSource.map(_.op))
And("the tasks where first stored and then expunged again")
for (task <- tasksWithSource) {
val ordered = inOrder(instanceTracker)
val op = task.op
ordered.verify(instanceTracker).process(op.stateOp)
ordered.verify(instanceTracker).forceExpunge(op.stateOp.instanceId)
}
}
"match successful, launch tasks unsuccessful, revert to prior task state" in new Fixture {
Given("an offer")
val dummySource = new DummySource
val tasksWithSource = tasks.map {
case (taskInfo, _, _) =>
val dummyInstance = TestInstanceBuilder.scheduledWithReservation(AppDefinition(appId))
val taskId = Task.Id.parse(taskInfo.getTaskId)
val app = AppDefinition(appId)
val launch = f.launchWithNewTask(
taskInfo,
InstanceUpdateOperation.Provision(
dummyInstance.instanceId, AgentInfoPlaceholder(), app, Tasks.provisioned(taskId, NetworkInfoPlaceholder(), app.version, clock.now()), clock.now()
),
dummyInstance
)
InstanceOpWithSource(dummySource, launch)
}
And("a cooperative offerMatcher and taskTracker")
offerMatcher.matchOffer(offer) returns Future.successful(MatchedInstanceOps(offerId, tasksWithSource))
for (task <- tasksWithSource) {
val op = task.op
instanceTracker.process(op.stateOp) returns Future.successful(arbitraryInstanceUpdateEffect)
instanceTracker.revert(op.oldInstance.get) returns Future.successful(Done)
}
And("a dysfunctional taskLauncher")
taskLauncher.acceptOffer(offerId, tasksWithSource.map(_.op)) returns false
When("processing the offer")
offerProcessor.processOffer(offer).futureValue(Timeout(1.second))
Then("we saw the matchOffer request and the task launch attempt")
verify(offerMatcher).matchOffer(offer)
verify(taskLauncher).acceptOffer(offerId, tasksWithSource.map(_.op))
And("all task launches were rejected")
assert(dummySource.accepted.isEmpty)
assert(dummySource.rejected.map(_._1) == tasksWithSource.map(_.op))
And("the tasks where first stored and then expunged again")
for (task <- tasksWithSource) {
val op = task.op
val ordered = inOrder(instanceTracker)
ordered.verify(instanceTracker).process(op.stateOp)
ordered.verify(instanceTracker).revert(op.oldInstance.get)
}
}
"match empty => decline" in new Fixture {
offerMatcher.matchOffer(offer) returns Future.successful(MatchedInstanceOps(offerId, Seq.empty))
offerProcessor.processOffer(offer).futureValue(Timeout(1.second))
verify(offerMatcher).matchOffer(offer)
verify(taskLauncher).declineOffer(offerId, refuseMilliseconds = Some(conf.declineOfferDuration()))
}
"match crashed => decline" in new Fixture {
offerMatcher.matchOffer(offer) returns Future.failed(new RuntimeException("failed matching"))
offerProcessor.processOffer(offer).futureValue(Timeout(1.second))
verify(offerMatcher).matchOffer(offer)
verify(taskLauncher).declineOffer(offerId, refuseMilliseconds = None)
}
}
}
| gsantovena/marathon | src/test/scala/mesosphere/marathon/core/launcher/impl/OfferProcessorImplTest.scala | Scala | apache-2.0 | 10,044 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 Mark Harrah
*/
package sbt
package classpath
import java.lang.ref.{ Reference, SoftReference, WeakReference }
import java.io.File
import java.net.{ URI, URL, URLClassLoader }
import java.util.Collections
import scala.collection.{ mutable, JavaConversions, Set }
import mutable.{ HashSet, ListBuffer }
import IO.{ createTemporaryDirectory, write }
object ClasspathUtilities {
def toLoader(finder: PathFinder): ClassLoader = toLoader(finder, rootLoader)
def toLoader(finder: PathFinder, parent: ClassLoader): ClassLoader = new URLClassLoader(finder.getURLs, parent)
def toLoader(paths: Seq[File]): ClassLoader = toLoader(paths, rootLoader)
def toLoader(paths: Seq[File], parent: ClassLoader): ClassLoader = new URLClassLoader(Path.toURLs(paths), parent)
def toLoader(paths: Seq[File], parent: ClassLoader, resourceMap: Map[String, String]): ClassLoader =
new URLClassLoader(Path.toURLs(paths), parent) with RawResources { override def resources = resourceMap }
def toLoader(paths: Seq[File], parent: ClassLoader, resourceMap: Map[String, String], nativeTemp: File): ClassLoader =
new URLClassLoader(Path.toURLs(paths), parent) with RawResources with NativeCopyLoader {
override def resources = resourceMap
override val config = new NativeCopyConfig(nativeTemp, paths, javaLibraryPaths)
override def toString =
s"""|URLClassLoader with NativeCopyLoader with RawResources(
| urls = $paths,
| parent = $parent,
| resourceMap = ${resourceMap.keySet},
| nativeTemp = $nativeTemp
|)""".stripMargin
}
def javaLibraryPaths: Seq[File] = IO.parseClasspath(System.getProperty("java.library.path"))
lazy val rootLoader =
{
def parent(loader: ClassLoader): ClassLoader =
{
val p = loader.getParent
if (p eq null) loader else parent(p)
}
val systemLoader = ClassLoader.getSystemClassLoader
if (systemLoader ne null) parent(systemLoader)
else parent(getClass.getClassLoader)
}
lazy val xsbtiLoader = classOf[xsbti.Launcher].getClassLoader
final val AppClassPath = "app.class.path"
final val BootClassPath = "boot.class.path"
def createClasspathResources(classpath: Seq[File], instance: ScalaInstance): Map[String, String] =
createClasspathResources(classpath, instance.jars)
def createClasspathResources(appPaths: Seq[File], bootPaths: Seq[File]): Map[String, String] =
{
def make(name: String, paths: Seq[File]) = name -> Path.makeString(paths)
Map(make(AppClassPath, appPaths), make(BootClassPath, bootPaths))
}
private[sbt] def filterByClasspath(classpath: Seq[File], loader: ClassLoader): ClassLoader =
new ClasspathFilter(loader, xsbtiLoader, classpath.toSet)
def makeLoader(classpath: Seq[File], instance: ScalaInstance): ClassLoader =
filterByClasspath(classpath, makeLoader(classpath, instance.loader, instance))
def makeLoader(classpath: Seq[File], instance: ScalaInstance, nativeTemp: File): ClassLoader =
filterByClasspath(classpath, makeLoader(classpath, instance.loader, instance, nativeTemp))
def makeLoader(classpath: Seq[File], parent: ClassLoader, instance: ScalaInstance): ClassLoader =
toLoader(classpath, parent, createClasspathResources(classpath, instance))
def makeLoader(classpath: Seq[File], parent: ClassLoader, instance: ScalaInstance, nativeTemp: File): ClassLoader =
toLoader(classpath, parent, createClasspathResources(classpath, instance), nativeTemp)
private[sbt] def printSource(c: Class[_]) =
println(c.getName + " loader=" + c.getClassLoader + " location=" + IO.classLocationFile(c))
def isArchive(file: File): Boolean = isArchive(file, contentFallback = false)
def isArchive(file: File, contentFallback: Boolean): Boolean =
file.isFile && (isArchiveName(file.getName) || (contentFallback && hasZipContent(file)))
def isArchiveName(fileName: String) = fileName.endsWith(".jar") || fileName.endsWith(".zip")
def hasZipContent(file: File): Boolean = try {
Using.fileInputStream(file) { in =>
(in.read() == 0x50) &&
(in.read() == 0x4b) &&
(in.read() == 0x03) &&
(in.read() == 0x04)
}
} catch { case e: Exception => false }
/** Returns all entries in 'classpath' that correspond to a compiler plugin.*/
private[sbt] def compilerPlugins(classpath: Seq[File]): Iterable[File] =
{
import collection.JavaConversions._
val loader = new URLClassLoader(Path.toURLs(classpath))
loader.getResources("scalac-plugin.xml").toList.flatMap(asFile(true))
}
/** Converts the given URL to a File. If the URL is for an entry in a jar, the File for the jar is returned. */
private[sbt] def asFile(url: URL): List[File] = asFile(false)(url)
private[sbt] def asFile(jarOnly: Boolean)(url: URL): List[File] =
{
try {
url.getProtocol match {
case "file" if !jarOnly => IO.toFile(url) :: Nil
case "jar" =>
val path = url.getPath
val end = path.indexOf('!')
new File(new URI(if (end == -1) path else path.substring(0, end))) :: Nil
case _ => Nil
}
} catch { case e: Exception => Nil }
}
}
| twitter-forks/sbt | util/classpath/src/main/scala/sbt/classpath/ClasspathUtilities.scala | Scala | bsd-3-clause | 5,305 |
package com.eevolution.context.dictionary.domain.api.service
import com.eevolution.context.dictionary.api
import com.eevolution.context.dictionary.domain.model.PrintTableFormat
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 10/11/17.
*/
/**
* Print Table Format Service
*/
trait PrintTableFormatService extends api.Service[PrintTableFormat, Int] {
//Definition
}
| adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/service/PrintTableFormatService.scala | Scala | gpl-3.0 | 1,246 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.sql.join
import org.apache.flink.api.scala._
import org.apache.flink.table.api.TableException
import org.apache.flink.table.api.scala._
import org.apache.flink.table.planner.plan.utils.WindowJoinUtil
import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.PythonScalarFunction
import org.apache.flink.table.planner.utils.{StreamTableTestUtil, TableTestBase, TableTestUtil}
import org.apache.calcite.rel.logical.LogicalJoin
import org.junit.Assert.assertEquals
import org.junit.Test
class WindowJoinTest extends TableTestBase {
private val util: StreamTableTestUtil = streamTestUtil()
util.addDataStream[(Int, String, Long)](
"MyTable", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
util.addDataStream[(Int, String, Long)](
"MyTable2", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
/** There should exist exactly two time conditions **/
@Test(expected = classOf[TableException])
def testWindowJoinSingleTimeCondition(): Unit = {
val sql =
"""
|SELECT t2.a FROM MyTable t1 JOIN MyTable2 t2 ON
| t1.a = t2.a AND t1.proctime > t2.proctime - INTERVAL '5' SECOND
""".stripMargin
util.verifyPlan(sql)
}
/** Both time attributes in a join condition must be of the same type **/
@Test(expected = classOf[TableException])
def testWindowJoinDiffTimeIndicator(): Unit = {
val sql =
"""
|SELECT t2.a FROM MyTable t1 JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.proctime > t2.proctime - INTERVAL '5' SECOND AND
| t1.proctime < t2.rowtime + INTERVAL '5' SECOND
""".stripMargin
util.verifyPlan(sql)
}
/** The time conditions should be an And condition **/
@Test(expected = classOf[TableException])
def testWindowJoinNotCnfCondition(): Unit = {
val sql =
"""
|SELECT t2.a FROM MyTable t1 JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| (t1.proctime > t2.proctime - INTERVAL '5' SECOND OR
| t1.proctime < t2.rowtime + INTERVAL '5' SECOND)
""".stripMargin
util.verifyPlan(sql)
}
/** Validates that no rowtime attribute is in the output schema **/
@Test(expected = classOf[TableException])
def testNoRowtimeAttributeInResult(): Unit = {
val sql =
"""
|SELECT * FROM MyTable t1, MyTable2 t2 WHERE
| t1.a = t2.a AND
| t1.proctime BETWEEN t2.proctime - INTERVAL '5' SECOND AND t2.proctime
""".stripMargin
util.verifyPlan(sql)
}
/**
* Currently only the inner join condition can support the Python UDF taking the inputs from
* the left table and the right table at the same time.
*/
@Test(expected = classOf[TableException])
def testWindowOuterJoinWithPythonFunctionInCondition(): Unit = {
util.addFunction("pyFunc", new PythonScalarFunction("pyFunc"))
val sql =
"""
|SELECT t1.a, t2.b FROM MyTable t1 LEFT OUTER JOIN MyTable2 t2 ON
| t1.a = t2.a AND pyFunc(t1.a, t2.a) = t1.a + t2.a AND
| t1.proctime BETWEEN t2.proctime - INTERVAL '1' HOUR AND t2.proctime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sql)
}
@Test
def testProcessingTimeInnerJoinWithOnClause(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b FROM MyTable t1 JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.proctime BETWEEN t2.proctime - INTERVAL '1' HOUR AND t2.proctime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testProcessingTimeInnerJoinWithWhereClause(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b FROM MyTable t1, MyTable2 t2 WHERE
| t1.a = t2.a AND
| t1.proctime BETWEEN t2.proctime - INTERVAL '1' HOUR AND t2.proctime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRowTimeInnerJoinWithOnClause(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b FROM MyTable t1 JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.rowtime BETWEEN t2.rowtime - INTERVAL '10' SECOND AND t2.rowtime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRowTimeInnerJoinWithWhereClause(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b FROM MyTable t1, MyTable2 t2 WHERE
| t1.a = t2.a AND
| t1.rowtime BETWEEN t2.rowtime - INTERVAL '10' MINUTE AND t2.rowtime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testJoinWithEquiProcTime(): Unit = {
// TODO: this should be translated into window join
val sqlQuery =
"""
|SELECT t1.a, t2.b FROM MyTable t1, MyTable2 t2 WHERE
| t1.a = t2.a AND t1.proctime = t2.proctime
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testJoinWithEquiRowTime(): Unit = {
// TODO: this should be translated into window join
val sqlQuery =
"""
|SELECT t1.a, t2.b FROM MyTable t1, MyTable2 t2 WHERE
| t1.a = t2.a AND t1.rowtime = t2.rowtime
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testJoinWithNullLiteral(): Unit = {
val sqlQuery =
"""
|WITH T1 AS (SELECT a, b, c, proctime, CAST(null AS BIGINT) AS nullField FROM MyTable),
| T2 AS (SELECT a, b, c, proctime, CAST(12 AS BIGINT) AS nullField FROM MyTable2)
|
|SELECT t2.a, t2.c, t1.c
|FROM T1 AS t1
|JOIN T2 AS t2 ON t1.a = t2.a AND t1.nullField = t2.nullField AND
| t1.proctime BETWEEN t2.proctime - INTERVAL '5' SECOND AND
| t2.proctime + INTERVAL '5' SECOND
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRowTimeInnerJoinAndWindowAggregationOnFirst(): Unit = {
val sqlQuery =
"""
|SELECT t1.b, SUM(t2.a) AS aSum, COUNT(t2.b) AS bCnt
|FROM MyTable t1, MyTable2 t2
|WHERE t1.a = t2.a AND
| t1.rowtime BETWEEN t2.rowtime - INTERVAL '10' MINUTE AND t2.rowtime + INTERVAL '1' HOUR
|GROUP BY TUMBLE(t1.rowtime, INTERVAL '6' HOUR), t1.b
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRowTimeInnerJoinAndWindowAggregationOnSecond(): Unit = {
val sqlQuery =
"""
|SELECT t2.b, SUM(t1.a) AS aSum, COUNT(t1.b) AS bCnt
|FROM MyTable t1, MyTable2 t2
|WHERE t1.a = t2.a AND
| t1.rowtime BETWEEN t2.rowtime - INTERVAL '10' MINUTE AND t2.rowtime + INTERVAL '1' HOUR
|GROUP BY TUMBLE(t2.rowtime, INTERVAL '6' HOUR), t2.b
""".stripMargin
util.verifyPlan(sqlQuery)
}
// Tests for left outer join
@Test
def testProcTimeLeftOuterJoin(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b
|FROM MyTable t1 LEFT OUTER JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.proctime BETWEEN t2.proctime - INTERVAL '1' HOUR AND t2.proctime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRowTimeLeftOuterJoin(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b
|FROM MyTable t1 LEFT OUTER JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.rowtime BETWEEN t2.rowtime - INTERVAL '10' SECOND AND t2.rowtime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
// Tests for right outer join
@Test
def testProcTimeRightOuterJoin(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b
|FROM MyTable t1 RIGHT OUTER JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.proctime BETWEEN t2.proctime - INTERVAL '1' HOUR AND t2.proctime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRowTimeRightOuterJoin(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b
|FROM MyTable t1 RIGHT OUTER JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.rowtime BETWEEN t2.rowtime - INTERVAL '10' SECOND AND t2.rowtime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
// Tests for full outer join
@Test
def testProcTimeFullOuterJoin(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b
|FROM MyTable t1 Full OUTER JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.proctime BETWEEN t2.proctime - INTERVAL '1' HOUR AND t2.proctime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRowTimeFullOuterJoin(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b
|FROM MyTable t1 FULL OUTER JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.rowtime BETWEEN t2.rowtime - INTERVAL '10' SECOND AND t2.rowtime + INTERVAL '1' HOUR
""".stripMargin
util.verifyPlan(sqlQuery)
}
// Test for outer join optimization
@Test
def testOuterJoinOpt(): Unit = {
val sqlQuery =
"""
|SELECT t1.a, t2.b
|FROM MyTable t1 FULL OUTER JOIN MyTable2 t2 ON
| t1.a = t2.a AND
| t1.rowtime BETWEEN t2.rowtime - INTERVAL '10' SECOND AND t2.rowtime + INTERVAL '1' HOUR
| WHERE t1.b LIKE t2.b
""".stripMargin
util.verifyPlan(sqlQuery)
}
// Other tests
@Test
def testJoinTimeBoundary(): Unit = {
verifyTimeBoundary(
"t1.proctime BETWEEN t2.proctime - INTERVAL '1' HOUR AND t2.proctime + INTERVAL '1' HOUR",
-3600000,
3600000,
"proctime")
verifyTimeBoundary(
"t1.proctime > t2.proctime - INTERVAL '1' SECOND AND " +
"t1.proctime < t2.proctime + INTERVAL '1' SECOND",
-999,
999,
"proctime")
verifyTimeBoundary(
"t1.rowtime >= t2.rowtime - INTERVAL '1' SECOND AND " +
"t1.rowtime <= t2.rowtime + INTERVAL '1' SECOND",
-1000,
1000,
"rowtime")
verifyTimeBoundary(
"t1.rowtime >= t2.rowtime AND " +
"t1.rowtime <= t2.rowtime + INTERVAL '1' SECOND",
0,
1000,
"rowtime")
verifyTimeBoundary(
"t1.rowtime >= t2.rowtime + INTERVAL '1' SECOND AND " +
"t1.rowtime <= t2.rowtime + INTERVAL '10' SECOND",
1000,
10000,
"rowtime")
verifyTimeBoundary(
"t2.rowtime - INTERVAL '1' SECOND <= t1.rowtime AND " +
"t2.rowtime + INTERVAL '10' SECOND >= t1.rowtime",
-1000,
10000,
"rowtime")
verifyTimeBoundary(
"t1.rowtime - INTERVAL '2' SECOND >= t2.rowtime + INTERVAL '1' SECOND " +
"- INTERVAL '10' SECOND AND t1.rowtime <= t2.rowtime + INTERVAL '10' SECOND",
-7000,
10000,
"rowtime")
verifyTimeBoundary(
"t1.rowtime >= t2.rowtime - INTERVAL '10' SECOND AND " +
"t1.rowtime <= t2.rowtime - INTERVAL '5' SECOND",
-10000,
-5000,
"rowtime")
}
@Test
def testJoinRemainConditionConvert(): Unit = {
util.addDataStream[(Int, Long, Int)]("MyTable3", 'a, 'rowtime.rowtime, 'c, 'proctime.proctime)
util.addDataStream[(Int, Long, Int)]("MyTable4", 'a, 'rowtime.rowtime, 'c, 'proctime.proctime)
val query =
"""
|SELECT t1.a, t2.c FROM MyTable3 AS t1 JOIN MyTable4 AS t2 ON
| t1.a = t2.a AND
| t1.rowtime >= t2.rowtime - INTERVAL '10' SECOND AND
| t1.rowtime <= t2.rowtime - INTERVAL '5' SECOND AND
| t1.c > t2.c
""".stripMargin
verifyRemainConditionConvert(
query,
">($2, $6)")
val query1 =
"""
|SELECT t1.a, t2.c FROM MyTable3 as t1 JOIN MyTable4 AS t2 ON
| t1.a = t2.a AND
| t1.rowtime >= t2.rowtime - INTERVAL '10' SECOND AND
| t1.rowtime <= t2.rowtime - INTERVAL '5' SECOND
""".stripMargin
verifyRemainConditionConvert(
query1,
"")
util.addDataStream[(Int, Long, Int)]("MyTable5", 'a, 'b, 'c, 'proctime.proctime)
util.addDataStream[(Int, Long, Int)]("MyTable6", 'a, 'b, 'c, 'proctime.proctime)
val query2 =
"""
|SELECT t1.a, t2.c FROM MyTable5 AS t1 JOIN MyTable6 AS t2 ON
| t1.a = t2.a AND
| t1.proctime >= t2.proctime - INTERVAL '10' SECOND AND
| t1.proctime <= t2.proctime - INTERVAL '5' SECOND AND
| t1.c > t2.c
""".stripMargin
verifyRemainConditionConvert(
query2,
">($2, $6)")
}
private def verifyTimeBoundary(
timeConditionSql: String,
expLeftSize: Long,
expRightSize: Long,
expTimeType: String): Unit = {
val query =
s"""
|SELECT t1.a, t2.b FROM MyTable AS t1 JOIN MyTable2 AS t2 ON
| t1.a = t2.a AND
| $timeConditionSql
""".stripMargin
val table = util.tableEnv.sqlQuery(query)
val relNode = TableTestUtil.toRelNode(table)
val joinNode = relNode.getInput(0).asInstanceOf[LogicalJoin]
val rexNode = joinNode.getCondition
val (windowBounds, _) = WindowJoinUtil.extractWindowBoundsFromPredicate(
rexNode,
joinNode.getLeft.getRowType.getFieldCount,
joinNode.getRowType,
joinNode.getCluster.getRexBuilder,
util.tableEnv.getConfig)
val timeTypeStr = if (windowBounds.get.isEventTime) "rowtime" else "proctime"
assertEquals(expLeftSize, windowBounds.get.leftLowerBound)
assertEquals(expRightSize, windowBounds.get.leftUpperBound)
assertEquals(expTimeType, timeTypeStr)
}
private def verifyRemainConditionConvert(
sqlQuery: String,
expectConditionStr: String): Unit = {
val table = util.tableEnv.sqlQuery(sqlQuery)
val relNode = TableTestUtil.toRelNode(table)
val joinNode = relNode.getInput(0).asInstanceOf[LogicalJoin]
val joinInfo = joinNode.analyzeCondition
val rexNode = joinInfo.getRemaining(joinNode.getCluster.getRexBuilder)
val (_, remainCondition) =
WindowJoinUtil.extractWindowBoundsFromPredicate(
rexNode,
joinNode.getLeft.getRowType.getFieldCount,
joinNode.getRowType,
joinNode.getCluster.getRexBuilder,
util.tableEnv.getConfig)
val actual: String = remainCondition.getOrElse("").toString
assertEquals(expectConditionStr, actual)
}
}
| bowenli86/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/join/WindowJoinTest.scala | Scala | apache-2.0 | 14,947 |
package algorithms.greedy
import org.scalatest.FunSuite
import GreedyFlorist._
/**
* Created by yujieshui on 2017/5/12.
*/
class GreedyFloristTest extends FunSuite {
test("0") {
println(solution(Seq(2, 5, 6), 3))
}
test("1"){
println(solution(Seq(2, 5, 6), 2))
}
}
| 1178615156/hackerrank | src/test/scala/algorithms/greedy/GreedyFloristTest.scala | Scala | apache-2.0 | 287 |
package fr.univ_lille.cristal.emeraude.chasqui.core.synchronization
import akka.actor.{Actor, ActorRef, ActorSystem, ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider, Props}
import akka.util.Timeout
import fr.univ_lille.cristal.emeraude.chasqui.core.Node.ReceiveMessage
import fr.univ_lille.cristal.emeraude.chasqui.core.{Message, Node, SynchronizationMessage, SynchronizerStrategy}
import scala.concurrent.ExecutionContext
case class SynchronizeMessage(sender: ActorRef, receiver: ActorRef, timestamp: Long, message: Any)
class GlobalSynchronizerWithGlobalQueueStrategy(system: ActorSystem) extends SynchronizerStrategy {
private var sentMessagesInQuantum = 0
private var receivedMessagesInQuantum = 0
private val messageQueue = scala.collection.mutable.PriorityQueue[Message]()(Ordering.fromLessThan((s1, s2) => s1.getTimestamp > s2.getTimestamp))
def registerNode(node: Node): Unit = {
}
def notifyFinishedTime(nodeActorRef: ActorRef, node: Node, t: Long, messageDelta: Int): Unit = {
}
def getSynchronizerActor() = {
GlobalSynchronizerWithGlobalQueueStrategyAccessor(system).instance
}
override def handleSynchronizationMessage(message: SynchronizationMessage, sender: ActorRef, receiver: Node, t: Long): Unit = {
//Nothing
}
override def sendMessage(senderNode: Node, receiverActor: ActorRef, messageTimestamp: Long, message: Any): Unit = {
this.getSynchronizerActor() ! SynchronizeMessage(senderNode.getActorRef, receiverActor, messageTimestamp, message)
}
override def scheduleMessage(receiverNode: Node, senderActor: ActorRef, messageTimestamp: Long, message: Any): Unit = {
this.receivedMessagesInQuantum += 1
if (messageTimestamp < receiverNode.getCurrentSimulationTime) {
//The message is in the past.
//This is a Causality error
if (!message.isInstanceOf[SynchronizationMessage]) {
receiverNode.getCausalityErrorStrategy.handleCausalityError(messageTimestamp, receiverNode.getCurrentSimulationTime, receiverNode, senderActor, message)
}
return
}
if (receiverNode.getCurrentSimulationTime == messageTimestamp) {
receiverNode.handleIncomingMessage(message, senderActor)
} else {
this.queueMessage(senderActor, messageTimestamp, message)
}
receiverNode.notifyFinishedQuantum()
}
private def queueMessage(senderActor: ActorRef, messageTimestamp: Long, message: Any) = {
messageQueue += new Message(message, messageTimestamp, senderActor)
}
override def getMessageQueue: scala.collection.mutable.PriorityQueue[Message] = this.messageQueue
}
/** **************************************************************************
*
* Actor Singleton Accessor
*
* ***************************************************************************/
class GlobalSynchronizerWithGlobalQueueSingletonActor extends Actor {
import scala.concurrent.duration._
implicit val ec = ExecutionContext.Implicits.global
implicit lazy val timeout = Timeout(5 seconds)
private var getCurrentTimestamp = 0
private var remainingAcks = 0
private val messageQueue = scala.collection.mutable.PriorityQueue[SynchronizeMessage]()(Ordering.fromLessThan((s1, s2) => s1.timestamp > s2.timestamp))
val nodesFinishedThisQuantum = new collection.mutable.HashSet[ActorRef]()
var messagesToBeProcessedFollowingQuantums: Int = 0
def synchronizeMessage(aSynchronizeMessage: SynchronizeMessage): Unit = {
if (this.getCurrentTimestamp == aSynchronizeMessage.timestamp) {
aSynchronizeMessage.receiver ! ReceiveMessage(aSynchronizeMessage.message, aSynchronizeMessage.sender)
this.remainingAcks += 1
} else {
this.messageQueue += aSynchronizeMessage
}
}
override def receive: Receive = {
case synchronizeMessage: SynchronizeMessage => this.synchronizeMessage(synchronizeMessage)
}
}
/** **************************************************************************
*
* Actor Singleton Accessor
*
* ***************************************************************************/
class GlobalSynchronizerWithGlobalQueueSingleton(system: ActorSystem, props: Props, name: String) extends Extension {
val instance: ActorRef = system.actorOf(props, name)
}
object GlobalSynchronizerWithGlobalQueueStrategyAccessor extends ExtensionId[GlobalSynchronizerWithGlobalQueueSingleton] with ExtensionIdProvider {
final override def lookup = this
final override def createExtension(system: ExtendedActorSystem) = new GlobalSynchronizerWithGlobalQueueSingleton(system, instanceProps, instanceName)
lazy val instanceProps = Props[GlobalSynchronizerWithGlobalQueueSingletonActor]()
lazy val instanceName = "global-synchronizer-global-queue-actor"
} | guillep/chasqui | src/main/scala/fr/univ_lille/cristal/emeraude/chasqui/core/synchronization/GlobalSynchronizerWithGlobalQueueStrategy.scala | Scala | mit | 4,739 |
package beam.sim.vehiclesharing
import beam.agentsim.agents.vehicles.{BeamVehicle, BeamVehicleType}
import beam.agentsim.events.SpaceTime
import beam.agentsim.infrastructure.taz.{TAZ, TAZTreeMap}
import beam.router.BeamSkimmer
import beam.router.Modes.BeamMode
import beam.sim.BeamServices
import org.matsim.api.core.v01.Id
import scala.collection.mutable
case class AvailabilityBasedRepositioning(
repositionTimeBin: Int,
statTimeBin: Int,
matchLimit: Int,
vehicleManager: Id[VehicleManager],
beamServices: BeamServices,
beamSkimmer: BeamSkimmer
) extends RepositionAlgorithm {
case class RepositioningRequest(taz: TAZ, availableVehicles: Int, shortage: Int)
val minAvailabilityMap = mutable.HashMap.empty[(Int, Id[TAZ]), Int]
val unboardedVehicleInquiry = mutable.HashMap.empty[(Int, Id[TAZ]), Int]
val orderingAvailVeh = Ordering.by[RepositioningRequest, Int](_.availableVehicles)
val orderingShortage = Ordering.by[RepositioningRequest, Int](_.shortage)
beamServices.beamScenario.tazTreeMap.getTAZs.foreach { taz =>
(0 to 108000 / repositionTimeBin).foreach { i =>
val time = i * repositionTimeBin
val availVal = getCollectedDataFromPreviousSimulation(time, taz.tazId, RepositionManager.availability)
val availValMin = availVal.drop(1).foldLeft(availVal.headOption.getOrElse(0.0).toInt) { (minV, cur) =>
Math.min(minV, cur.toInt)
}
minAvailabilityMap.put((i, taz.tazId), availValMin)
val inquiryVal = getCollectedDataFromPreviousSimulation(time, taz.tazId, RepositionManager.inquiry).sum.toInt
val boardingVal = getCollectedDataFromPreviousSimulation(time, taz.tazId, RepositionManager.boarded).sum.toInt
unboardedVehicleInquiry.put((i, taz.tazId), inquiryVal - boardingVal)
}
}
def getCollectedDataFromPreviousSimulation(time: Int, idTAZ: Id[TAZ], label: String) = {
val fromBin = time / statTimeBin
val untilBin = (time + repositionTimeBin) / statTimeBin
beamSkimmer.getPreviousSkimPlusValues(fromBin, untilBin, idTAZ, vehicleManager, label)
}
override def getVehiclesForReposition(
now: Int,
timeBin: Int,
availableFleet: List[BeamVehicle]
): List[(BeamVehicle, SpaceTime, Id[TAZ], SpaceTime, Id[TAZ])] = {
val oversuppliedTAZ = mutable.TreeSet.empty[RepositioningRequest](orderingAvailVeh)
val undersuppliedTAZ = mutable.TreeSet.empty[RepositioningRequest](orderingShortage)
val nowRepBin = now / timeBin
val futureRepBin = nowRepBin + 1
beamServices.beamScenario.tazTreeMap.getTAZs.foreach { taz =>
val availValMin = minAvailabilityMap((nowRepBin, taz.tazId))
val InquiryUnboarded = unboardedVehicleInquiry((futureRepBin, taz.tazId))
if (availValMin > 0) {
oversuppliedTAZ.add(RepositioningRequest(taz, availValMin, 0))
} else if (InquiryUnboarded > 0) {
undersuppliedTAZ.add(RepositioningRequest(taz, 0, InquiryUnboarded))
}
}
val topOversuppliedTAZ = oversuppliedTAZ.take(matchLimit)
val topUndersuppliedTAZ = undersuppliedTAZ.take(matchLimit)
val ODs = new mutable.ListBuffer[(RepositioningRequest, RepositioningRequest, Int, Int)]
while (topOversuppliedTAZ.nonEmpty && topUndersuppliedTAZ.nonEmpty) {
val org = topOversuppliedTAZ.head
var destTimeOpt: Option[(RepositioningRequest, Int)] = None
topUndersuppliedTAZ.foreach { dst =>
val skim = beamSkimmer.getTimeDistanceAndCost(
org.taz.coord,
dst.taz.coord,
now,
BeamMode.CAR,
Id.create( // FIXME Vehicle type borrowed from ridehail -- pass the vehicle type of the car sharing fleet instead
beamServices.beamConfig.beam.agentsim.agents.rideHail.initialization.procedural.vehicleTypeId,
classOf[BeamVehicleType]
)
)
if (destTimeOpt.isEmpty || (destTimeOpt.isDefined && skim.time < destTimeOpt.get._2)) {
destTimeOpt = Some((dst, skim.time))
}
}
destTimeOpt foreach {
case (dst, tt) =>
val fleetSize = Math.min(org.availableVehicles, dst.shortage)
topOversuppliedTAZ.remove(org)
if (org.availableVehicles > fleetSize) {
topOversuppliedTAZ.add(org.copy(availableVehicles = org.availableVehicles - fleetSize))
}
topUndersuppliedTAZ.remove(dst)
if (dst.shortage > fleetSize) {
topUndersuppliedTAZ.add(dst.copy(shortage = dst.shortage - fleetSize))
}
ODs.append((org, dst, tt, fleetSize))
}
}
val vehiclesForReposition = mutable.ListBuffer.empty[(BeamVehicle, SpaceTime, Id[TAZ], SpaceTime, Id[TAZ])]
val rand = new scala.util.Random(System.currentTimeMillis())
var fleetTemp = availableFleet
ODs.foreach {
case (org, dst, tt, fleetSizeToReposition) =>
val arrivalTime = now + tt
val vehiclesForRepositionTemp =
mutable.ListBuffer.empty[(BeamVehicle, SpaceTime, Id[TAZ], SpaceTime, Id[TAZ])]
fleetTemp
.filter(
v =>
org.taz == beamServices.beamScenario.tazTreeMap
.getTAZ(v.spaceTime.loc.getX, v.spaceTime.loc.getY)
)
.take(fleetSizeToReposition)
.map(
(
_,
SpaceTime(org.taz.coord, now),
org.taz.tazId,
SpaceTime(TAZTreeMap.randomLocationInTAZ(dst.taz, rand), arrivalTime),
dst.taz.tazId
)
)
.foreach(vehiclesForRepositionTemp.append(_))
val orgKey = (nowRepBin, org.taz.tazId)
minAvailabilityMap.update(orgKey, minAvailabilityMap(orgKey) - vehiclesForRepositionTemp.size)
fleetTemp = fleetTemp.filter(x => !vehiclesForRepositionTemp.exists(_._1 == x))
vehiclesForReposition.appendAll(vehiclesForRepositionTemp)
}
vehiclesForReposition.toList
}
}
| colinsheppard/beam | src/main/scala/beam/sim/vehiclesharing/AvailabilityBasedRepositioning.scala | Scala | gpl-3.0 | 5,897 |
package tethys.readers
import org.scalatest.matchers.should.Matchers
import org.scalatest.flatspec.AnyFlatSpec
import tethys.JsonReader
import tethys.commons.{Token, TokenNode}
import tethys.commons.TokenNode._
import tethys.readers.JsonReaderBuilderTest._
import tethys.readers.tokens.QueueIterator
class JsonReaderBuilderTest extends AnyFlatSpec with Matchers {
behavior of "JsonReaderBuilder"
def read[A: JsonReader](nodes: List[TokenNode]): A = {
val it = QueueIterator(nodes)
val res = it.readJson[A].fold(throw _, identity)
it.currentToken() shouldBe Token.Empty
res
}
it should "build reader from fields" in {
implicit val reader: JsonReader[B] = {
JsonReader.builder
.addField[Int]("i")
.buildReader(i => B(i))
}
read[B](obj("i" -> 1)) shouldBe B(1)
}
it should "build selecting reader from fields" in {
implicit val readerB: JsonReader[B] = {
JsonReader.builder
.addField[Int]("i")
.buildReader(i => B(i))
}
implicit val readerC: JsonReader[C] = {
JsonReader.builder
.addField[String]("s")
.buildReader(s => C(s))
}
implicit val readerA: JsonReader[A] = {
JsonReader.builder
.addField[String]("clazz")
.selectReader[A] {
case "B" => readerB
case "C" => readerC
}
}
read[A](obj("clazz" -> "B", "i" -> 2)) shouldBe B(2)
read[A](obj("s" -> "str", "clazz" -> "C")) shouldBe C("str")
}
it should "build reader for fat object" in {
implicit val reader: JsonReader[FatClass] = {
JsonReader.builder
.addField[Int]("a")
.addField[String]("b")
.addField[Boolean]("c")
.addField[Seq[String]]("d")
.addField[Double]("e")
.addField[Option[Int]]("opt")
.buildReader(FatClass.apply)
}
read[FatClass](obj(
"a" -> 1,
"b" -> "s",
"c" -> true,
"d" -> arr("a", "b", "c"),
"e" -> 4
)) shouldBe FatClass(
a = 1,
b = "s",
c = true,
d = Seq("a", "b", "c"),
e = 4.0D,
opt = None
)
}
it should "build strict reader from fields" in {
implicit val reader: JsonReader[B] = {
JsonReader.builder
.addField[Option[Int]]("i")
.buildStrictReader(i => B(i.getOrElse(0)))
}
the [ReaderError] thrownBy read[B](obj("j" -> 1)) should have message "Illegal json at '[ROOT]': unexpected field 'j', expected one of 'i'"
}
it should "allow to build reader with more than 22 fields" in {
implicit val reader: JsonReader[((Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int), Int, Int)] = {
JsonReader.builder
.addField[Int]("f1")
.addField[Int]("f2")
.addField[Int]("f3")
.addField[Int]("f4")
.addField[Int]("f5")
.addField[Int]("f6")
.addField[Int]("f7")
.addField[Int]("f8")
.addField[Int]("f9")
.addField[Int]("f10")
.addField[Int]("f11")
.addField[Int]("f12")
.addField[Int]("f13")
.addField[Int]("f14")
.addField[Int]("f15")
.addField[Int]("f16")
.addField[Int]("f17")
.addField[Int]("f18")
.addField[Int]("f19")
.addField[Int]("f20")
.addField[Int]("f21")
.addField[Int]("f22")
.addField[Int]("f23")
.addField[Int]("f24")
.buildReader((tuple, f23, f24) => (tuple, f23, f24))
}
read(obj(
"f1" -> 1,
"f2" -> 2,
"f3" -> 3,
"f4" -> 4,
"f5" -> 5,
"f6" -> 6,
"f7" -> 7,
"f8" -> 8,
"f9" -> 9,
"f10" -> 10,
"f11" -> 11,
"f12" -> 12,
"f13" -> 13,
"f14" -> 14,
"f15" -> 15,
"f16" -> 16,
"f17" -> 17,
"f18" -> 18,
"f19" -> 19,
"f20" -> 20,
"f21" -> 21,
"f22" -> 22,
"f23" -> 23,
"f24" -> 24
))(reader) shouldBe ((1 ,2 ,3 ,4 ,5 ,6 ,7 ,8 ,9 ,10 ,11 ,12 ,13 ,14 ,15 ,16 ,17 ,18 ,19 ,20 ,21 ,22), 23, 24)
}
}
object JsonReaderBuilderTest {
trait A
case class B(i: Int) extends A
case class C(s: String) extends A
case class FatClass(a: Int,
b: String,
c: Boolean,
d: Seq[String],
e: Double,
opt: Option[Int])
}
| tethys-json/tethys | modules/core/src/test/scala/tethys/readers/JsonReaderBuilderTest.scala | Scala | apache-2.0 | 4,412 |
package fpinScala.fpErrors.Option
sealed trait Option[+A]
case class Some[+A] (get: A) extends Option[+A]
case object None extends Option[Nothing]
def mean(xs: Seq[Double]): Option[Double] = {
if (xs.isEmpty) None
else Some(xs.sum / xs.length)
}
def map[B](f: A => B): Option[B] = this match{
case None => None
case Some(a) => Some(f(a))
}
/*
def flatMap[B](f: A => Option[B]): Option[B] ={
map(f(a)) getOrElse None
}
the parameters of the function map should be a fucntion!
*/
def flatMap[B](f: A => Option[B]): Option[B] = {
map(f) getOrElse None
}
// and anthoer solution using explicit pattern matching
def flatMap2[B](f: A => Option[B]): Option[B] = this {
case None => None
case Some(a) => f(a)
}
def getOrElse[B >: A](default: => B): B = this match {
case None => default
case Some(a) => a
}
def orElse[B >: A](ob: => Option[B]): Option[B] = {
this map(Some(_)) getOrElse ob
}
def orElse_1[B >: A](ob: => Option[B]): Option[B] = this match{
case None => ob
case _ => this
}
def filter(f: A=> Boolean): Option[A] = this match{
case Some(a) if f(a) => this
case _ => None
}
/*
def variance(xs: Seq[Double]): Option[Double] = {
val m = this.Mean()
xs.flatMap(math.pow(xs-m,2)) //flatMap()参数应为函数
}
//is "this" really solid here
*/
def variance(xs: Seq[Double]): Option[Double] = {
mean(xs) flatMap(m => mean(xs.map(x => math.pow(x-m,2))))
}
def variance_right(xs: Seq[Double]): Option[Double] =
mean(xs) flatmap (m => mean(xs.map(x => math.pow(x-m,2))))
// Important
def lift[A,B](f: A => B): Option[A] => Option[B] = _ map f
//try to lift funciton abs
val absO: Option[Double] => Option[Double] = lift(math.abs)
def insuranceRateQuote(age: Int, numberOfSpeedingTickets: Int): Double
def parseInsuranceRateQuote{
age: String,
numberOfSpeedingTickets: String
val optAge: Option[Int] = Try{age.toInt}
val optTickets: Option[Int] = Try{numberOfSpeedingTickets.toInt}
map2(optAge, optTickets)(insuranceRateQuote)
}
def Try[A](a: => A): Option[A] = {
try Some(a)
catch {case e : Exception => None}
}
def map2[A, B, C](a: Option[A], b: Option[B])(f: (A, B) => C): Option[C] = {
a flatMap (aa =>
b map (bb =>
f(aa, bb)))
}
//the answer on github is not user friendly.
//map has higher priority than => since => begins with
//= ???
def sequence[A](a: List[Option[A]]): Option[List[A]] = a match{
case Nil => Some(Nil)
case h::t => h flatMap (hh => sequence(t) map (hh::_))
}
def parseInt(a: List[String]): Option[List[Int]] = {
sequence(a map (x => Try(x.toInt)))
}
def traverse[A, B](a: List[A])(f: A => Option[B]): Option[List[B]] = {
a.foldRight[Option[List[B]]](Some(Nil))((h, t) => map2(f(h),t)(_::_))
}
def map2for [A, B, C](a: Option[A], b: Option[B])(f: (A, B) => C):
Option[C] =
for {
aa <- a
bb <- b
} yield f(aa,bb) | Tomcruseal/FunctionalLearn | fpinscala/src/main/scala/fpinScala/fpErrors/Option.scala | Scala | mit | 2,921 |
package com.github.agourlay.cornichon.steps.wrapped
import cats.data.{ NonEmptyList, StateT }
import cats.effect.IO
import com.github.agourlay.cornichon.core._
import com.github.agourlay.cornichon.core.Done._
case class RepeatStep(nested: List[Step], occurrence: Int, indexName: Option[String]) extends WrapperStep {
require(occurrence > 0, "repeat block must contain a positive number of occurrence")
val title = s"Repeat block with occurrence '$occurrence'"
override val stateUpdate: StepState = StateT { runState =>
def repeatSuccessSteps(retriesNumber: Int, runState: RunState): IO[(Int, RunState, Either[FailedStep, Done])] = {
// reset logs at each loop to have the possibility to not aggregate in failure case
val rs = runState.resetLogStack
val runStateWithIndex = indexName.fold(rs)(in => rs.addToSession(in, (retriesNumber + 1).toString))
ScenarioRunner.runStepsShortCircuiting(nested, runStateWithIndex).flatMap {
case (onceMoreRunState, stepResult) =>
stepResult.fold(
failed => {
// In case of failure only the logs of the last run are shown to avoid giant traces.
IO.pure((retriesNumber, onceMoreRunState, Left(failed)))
},
_ => {
val successState = runState.withSession(onceMoreRunState.session).recordLogStack(onceMoreRunState.logStack)
// only show last successful run to avoid giant traces.
if (retriesNumber == occurrence - 1) IO.pure((retriesNumber, successState, rightDone))
else repeatSuccessSteps(retriesNumber + 1, runState.withSession(onceMoreRunState.session))
}
)
}
}
repeatSuccessSteps(0, runState.nestedContext)
.timed
.map {
case (executionTime, run) =>
val (retries, repeatedState, report) = run
val depth = runState.depth
val (logStack, res) = report.fold(
failedStep => {
val wrappedLogStack = FailureLogInstruction(s"Repeat block with occurrence '$occurrence' failed after '$retries' occurrence", depth, Some(executionTime)) +: repeatedState.logStack :+ failedTitleLog(depth)
val artificialFailedStep = FailedStep.fromSingle(failedStep.step, RepeatBlockContainFailedSteps(retries, failedStep.errors))
(wrappedLogStack, Left(artificialFailedStep))
},
_ => {
val wrappedLockStack = SuccessLogInstruction(s"Repeat block with occurrence '$occurrence' succeeded", depth, Some(executionTime)) +: repeatedState.logStack :+ successTitleLog(depth)
(wrappedLockStack, rightDone)
}
)
(runState.mergeNested(repeatedState, logStack), res)
}
}
}
case class RepeatBlockContainFailedSteps(failedOccurrence: Int, errors: NonEmptyList[CornichonError]) extends CornichonError {
lazy val baseErrorMessage = s"Repeat block failed at occurrence $failedOccurrence"
override val causedBy = errors.toList
}
| agourlay/cornichon | cornichon-core/src/main/scala/com/github/agourlay/cornichon/steps/wrapped/RepeatStep.scala | Scala | apache-2.0 | 3,032 |
package db.generators
import db.Authorization
import io.apibuilder.api.v0.models.{GeneratorForm, GeneratorService, GeneratorServiceForm, GeneratorWithService}
import io.apibuilder.generator.v0.models.Generator
import java.util.UUID
trait Helpers extends db.Helpers {
def createGeneratorService(
form: GeneratorServiceForm = createGeneratorServiceForm()
): GeneratorService = {
servicesDao.create(testUser, form)
}
def createGeneratorServiceForm(
uri: String = s"http://test.generator.${UUID.randomUUID}"
): GeneratorServiceForm = {
GeneratorServiceForm(
uri = uri
)
}
def createGenerator(
service: GeneratorService = createGeneratorService()
): GeneratorWithService = {
val form = createGeneratorForm(service = service)
generatorsDao.upsert(testUser, form)
generatorsDao.findAll(
Authorization.All,
serviceGuid = Some(service.guid),
key = Some(form.generator.key),
limit = 1
).headOption.getOrElse {
sys.error("Failed to create generator")
}
}
def createGeneratorForm(
service: GeneratorService = createGeneratorService(),
attributes: Seq[String] = Nil
): GeneratorForm = {
val value = UUID.randomUUID.toString.toLowerCase
GeneratorForm(
serviceGuid = service.guid,
generator = Generator(
key = "test_" + value,
name = "Test " + value,
description = None,
language = None,
attributes = attributes
)
)
}
}
| apicollective/apibuilder | api/test/db/generators/Helpers.scala | Scala | mit | 1,495 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.executionplan.builders
import org.neo4j.cypher.internal.executionplan.PlanBuilder
import org.neo4j.cypher.internal.commands._
import org.neo4j.helpers.ThisShouldNotHappenError
import org.neo4j.graphdb
import graphdb.{Node, GraphDatabaseService}
import org.neo4j.cypher.internal.pipes.{ParameterPipe, TraversalMatchPipe, ExecutionContext}
import org.neo4j.cypher.internal.pipes.matching.{MonoDirectionalTraversalMatcher, BidirectionalTraversalMatcher}
import org.neo4j.cypher.internal.executionplan.ExecutionPlanInProgress
import scala.Some
import org.neo4j.cypher.internal.commands.NodeByIndex
import org.neo4j.cypher.internal.commands.NodeByIndexQuery
class TraversalMatcherBuilder(graph: GraphDatabaseService) extends PlanBuilder {
def apply(plan: ExecutionPlanInProgress): ExecutionPlanInProgress = extractExpanderStepsFromQuery(plan) match {
case None => throw new ThisShouldNotHappenError("Andres", "This plan should not have been accepted")
case Some(longestPath) =>
val LongestTrail(start, end, longestTrail) = longestPath
val unsolvedItems = plan.query.start.filter(_.unsolved)
val (startToken, startNodeFn) = identifier2nodeFn(graph, start, unsolvedItems)
val (matcher,tokens) = if (end.isEmpty) {
val matcher = new MonoDirectionalTraversalMatcher(longestPath.step, startNodeFn)
(matcher, Seq(startToken))
} else {
val (endToken, endNodeFn) = identifier2nodeFn(graph, end.get, unsolvedItems)
val matcher = new BidirectionalTraversalMatcher(longestPath.step, startNodeFn, endNodeFn)
(matcher, Seq(startToken, endToken))
}
val solvedPatterns = longestTrail.patterns
val newQ = plan.query.copy(
patterns = plan.query.patterns.filterNot(p => solvedPatterns.contains(p.token)) ++ solvedPatterns.map(Solved(_)),
start = plan.query.start.filterNot(tokens.contains) ++ tokens.map(_.solve)
)
val pipe = new TraversalMatchPipe(plan.pipe, matcher, longestTrail)
plan.copy(pipe = pipe, query = newQ)
}
def identifier2nodeFn(graph: GraphDatabaseService, identifier: String, unsolvedItems: Seq[QueryToken[StartItem]]):
(QueryToken[StartItem], (ExecutionContext) => Iterable[Node]) = {
val token = unsolvedItems.filter { (item) => identifier == item.token.identifierName }.head
(token, IndexQueryBuilder.getNodeGetter(token.token, graph))
}
def canWorkWith(plan: ExecutionPlanInProgress) = {
val steps = extractExpanderStepsFromQuery(plan)
steps.nonEmpty && plan.pipe.isInstanceOf[ParameterPipe]
}
private def extractExpanderStepsFromQuery(plan: ExecutionPlanInProgress): Option[LongestTrail] = {
val startPoints = plan.query.start.flatMap {
case Unsolved(NodeByIndexQuery(id, _, _)) => Some(id)
case Unsolved(NodeByIndex(id, _, _, _)) => Some(id)
case Unsolved(NodeById(id, _)) => Some(id)
case _ => None
}
val pattern = plan.query.patterns.flatMap {
case Unsolved(r: RelatedTo) if !r.optional && r.left != r.right => Some(r)
case _ => None
}
val preds = plan.query.where.filter(_.unsolved).map(_.token)
TrailBuilder.findLongestTrail(pattern, startPoints, preds)
}
def priority = PlanBuilder.TraversalMatcher
}
| dksaputra/community | cypher/src/main/scala/org/neo4j/cypher/internal/executionplan/builders/TraversalMatcherBuilder.scala | Scala | gpl-3.0 | 4,197 |
import android.support.test.runner.AndroidJUnit4
import com.github.aafa.model.User
import io.realm.RealmResults
import org.junit.Test
import org.junit.runner.RunWith
/**
* Created by Alexey Afanasev on 25.02.16.
*/
@RunWith(classOf[AndroidJUnit4])
class RealmTestCase extends AbstractRealmTestCase{
@Test
def createUser() = {
addUser(10, "demo")
val firstUser: User = realm.where(classOf[User]).equalTo("id", new Integer(10)).findFirst()
assert(firstUser.name == "demo")
val tenthUser: User = realm.where(classOf[User]).equalTo("name", "demo").findFirst()
assert(tenthUser.id == 10)
}
def addUser(id: Long = 1, name: String = "demo"): Unit = {
val user: User = new User
user.id = id
user.name = name
realmTransaction(_.copyToRealm(user))
}
@Test
def deleteUser() = {
addUser(2, "demo")
val toDelete = realm.where(classOf[User]).equalTo("id", new Integer(2)).findAll()
assert(toDelete.size() == 1)
realmTransaction(_ => {toDelete.removeLast()})
val emptyList: RealmResults[User] = realm.where(classOf[User]).findAll()
assert(emptyList.isEmpty)
}
}
| aafa/realm-sbt-plugin | realm-android/src/androidTest/scala/RealmTestCase.scala | Scala | mit | 1,136 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import scala.concurrent.duration._
import org.apache.spark.TestUtils.createTempScriptWithExpectedOutput
import org.apache.spark.internal.config._
import org.apache.spark.rdd.{PartitionPruningRDD, RDD}
import org.apache.spark.resource.TestResourceIDs.{EXECUTOR_GPU_ID, TASK_GPU_ID, WORKER_GPU_ID}
import org.apache.spark.scheduler.BarrierJobAllocationFailed._
import org.apache.spark.scheduler.BarrierJobSlotsNumberCheckFailed
import org.apache.spark.util.ThreadUtils
/**
* This test suite covers all the cases that shall fail fast on job submitted that contains one
* of more barrier stages.
*/
class BarrierStageOnSubmittedSuite extends SparkFunSuite with LocalSparkContext {
private def createSparkContext(conf: Option[SparkConf] = None): SparkContext = {
new SparkContext(conf.getOrElse(
new SparkConf()
.setMaster("local[4]")
.setAppName("test")))
}
private def testSubmitJob(
sc: SparkContext,
rdd: RDD[Int],
partitions: Option[Seq[Int]] = None,
message: String): Unit = {
val futureAction = sc.submitJob(
rdd,
(iter: Iterator[Int]) => iter.toArray,
partitions.getOrElse(0 until rdd.partitions.length),
{ case (_, _) => return }: (Int, Array[Int]) => Unit,
{ return }
)
val error = intercept[SparkException] {
ThreadUtils.awaitResult(futureAction, 5.seconds)
}.getCause.getMessage
assert(error.contains(message))
}
test("submit a barrier ResultStage that contains PartitionPruningRDD") {
sc = createSparkContext()
val prunedRdd = new PartitionPruningRDD(sc.parallelize(1 to 10, 4), index => index > 1)
val rdd = prunedRdd
.barrier()
.mapPartitions(iter => iter)
testSubmitJob(sc, rdd,
message = ERROR_MESSAGE_RUN_BARRIER_WITH_UNSUPPORTED_RDD_CHAIN_PATTERN)
}
test("submit a barrier ShuffleMapStage that contains PartitionPruningRDD") {
sc = createSparkContext()
val prunedRdd = new PartitionPruningRDD(sc.parallelize(1 to 10, 4), index => index > 1)
val rdd = prunedRdd
.barrier()
.mapPartitions(iter => iter)
.repartition(2)
.map(x => x + 1)
testSubmitJob(sc, rdd,
message = ERROR_MESSAGE_RUN_BARRIER_WITH_UNSUPPORTED_RDD_CHAIN_PATTERN)
}
test("submit a barrier stage that doesn't contain PartitionPruningRDD") {
sc = createSparkContext()
val prunedRdd = new PartitionPruningRDD(sc.parallelize(1 to 10, 4), index => index > 1)
val rdd = prunedRdd
.repartition(2)
.barrier()
.mapPartitions(iter => iter)
// Should be able to submit job and run successfully.
val result = rdd.collect().sorted
assert(result === Seq(6, 7, 8, 9, 10))
}
test("submit a barrier stage with partial partitions") {
sc = createSparkContext()
val rdd = sc.parallelize(1 to 10, 4)
.barrier()
.mapPartitions(iter => iter)
testSubmitJob(sc, rdd, Some(Seq(1, 3)),
message = ERROR_MESSAGE_RUN_BARRIER_WITH_UNSUPPORTED_RDD_CHAIN_PATTERN)
}
test("submit a barrier stage with union()") {
sc = createSparkContext()
val rdd1 = sc.parallelize(1 to 10, 2)
.barrier()
.mapPartitions(iter => iter)
val rdd2 = sc.parallelize(1 to 20, 2)
val rdd3 = rdd1
.union(rdd2)
.map(x => x * 2)
// Fail the job on submit because the barrier RDD (rdd1) may be not assigned Task 0.
testSubmitJob(sc, rdd3,
message = ERROR_MESSAGE_RUN_BARRIER_WITH_UNSUPPORTED_RDD_CHAIN_PATTERN)
}
test("submit a barrier stage with coalesce()") {
sc = createSparkContext()
val rdd = sc.parallelize(1 to 10, 4)
.barrier()
.mapPartitions(iter => iter)
.coalesce(1)
// Fail the job on submit because the barrier RDD requires to run on 4 tasks, but the stage
// only launches 1 task.
testSubmitJob(sc, rdd,
message = ERROR_MESSAGE_RUN_BARRIER_WITH_UNSUPPORTED_RDD_CHAIN_PATTERN)
}
test("submit a barrier stage that contains an RDD that depends on multiple barrier RDDs") {
sc = createSparkContext()
val rdd1 = sc.parallelize(1 to 10, 4)
.barrier()
.mapPartitions(iter => iter)
val rdd2 = sc.parallelize(11 to 20, 4)
.barrier()
.mapPartitions(iter => iter)
val rdd3 = rdd1
.zip(rdd2)
.map(x => x._1 + x._2)
testSubmitJob(sc, rdd3,
message = ERROR_MESSAGE_RUN_BARRIER_WITH_UNSUPPORTED_RDD_CHAIN_PATTERN)
}
test("submit a barrier stage with zip()") {
sc = createSparkContext()
val rdd1 = sc.parallelize(1 to 10, 4)
.barrier()
.mapPartitions(iter => iter)
val rdd2 = sc.parallelize(11 to 20, 4)
val rdd3 = rdd1
.zip(rdd2)
.map(x => x._1 + x._2)
// Should be able to submit job and run successfully.
val result = rdd3.collect().sorted
assert(result === Seq(12, 14, 16, 18, 20, 22, 24, 26, 28, 30))
}
test("submit a barrier ResultStage with dynamic resource allocation enabled") {
val conf = new SparkConf()
.set(DYN_ALLOCATION_ENABLED, true)
.set(DYN_ALLOCATION_TESTING, true)
.setMaster("local[4]")
.setAppName("test")
sc = createSparkContext(Some(conf))
val rdd = sc.parallelize(1 to 10, 4)
.barrier()
.mapPartitions(iter => iter)
testSubmitJob(sc, rdd,
message = ERROR_MESSAGE_RUN_BARRIER_WITH_DYN_ALLOCATION)
}
test("submit a barrier ShuffleMapStage with dynamic resource allocation enabled") {
val conf = new SparkConf()
.set(DYN_ALLOCATION_ENABLED, true)
.set(DYN_ALLOCATION_TESTING, true)
.setMaster("local[4]")
.setAppName("test")
sc = createSparkContext(Some(conf))
val rdd = sc.parallelize(1 to 10, 4)
.barrier()
.mapPartitions(iter => iter)
.repartition(2)
.map(x => x + 1)
testSubmitJob(sc, rdd,
message = ERROR_MESSAGE_RUN_BARRIER_WITH_DYN_ALLOCATION)
}
test("submit a barrier ResultStage that requires more slots than current total under local " +
"mode") {
val conf = new SparkConf()
// Shorten the time interval between two failed checks to make the test fail faster.
.set(BARRIER_MAX_CONCURRENT_TASKS_CHECK_INTERVAL.key, "1s")
// Reduce max check failures allowed to make the test fail faster.
.set(BARRIER_MAX_CONCURRENT_TASKS_CHECK_MAX_FAILURES, 3)
.setMaster("local[4]")
.setAppName("test")
sc = createSparkContext(Some(conf))
val rdd = sc.parallelize(1 to 10, 5)
.barrier()
.mapPartitions(iter => iter)
testSubmitJob(sc, rdd,
message = ERROR_MESSAGE_BARRIER_REQUIRE_MORE_SLOTS_THAN_CURRENT_TOTAL_NUMBER)
}
test("submit a barrier ShuffleMapStage that requires more slots than current total under " +
"local mode") {
val conf = new SparkConf()
// Shorten the time interval between two failed checks to make the test fail faster.
.set(BARRIER_MAX_CONCURRENT_TASKS_CHECK_INTERVAL.key, "1s")
// Reduce max check failures allowed to make the test fail faster.
.set(BARRIER_MAX_CONCURRENT_TASKS_CHECK_MAX_FAILURES, 3)
.setMaster("local[4]")
.setAppName("test")
sc = createSparkContext(Some(conf))
val rdd = sc.parallelize(1 to 10, 5)
.barrier()
.mapPartitions(iter => iter)
.repartition(2)
.map(x => x + 1)
testSubmitJob(sc, rdd,
message = ERROR_MESSAGE_BARRIER_REQUIRE_MORE_SLOTS_THAN_CURRENT_TOTAL_NUMBER)
}
test("submit a barrier ResultStage that requires more slots than current total under " +
"local-cluster mode") {
val conf = new SparkConf()
.set(CPUS_PER_TASK, 2)
// Shorten the time interval between two failed checks to make the test fail faster.
.set(BARRIER_MAX_CONCURRENT_TASKS_CHECK_INTERVAL.key, "1s")
// Reduce max check failures allowed to make the test fail faster.
.set(BARRIER_MAX_CONCURRENT_TASKS_CHECK_MAX_FAILURES, 3)
.setMaster("local-cluster[4, 3, 1024]")
.setAppName("test")
sc = createSparkContext(Some(conf))
val rdd = sc.parallelize(1 to 10, 5)
.barrier()
.mapPartitions(iter => iter)
testSubmitJob(sc, rdd,
message = ERROR_MESSAGE_BARRIER_REQUIRE_MORE_SLOTS_THAN_CURRENT_TOTAL_NUMBER)
}
test("submit a barrier ShuffleMapStage that requires more slots than current total under " +
"local-cluster mode") {
val conf = new SparkConf()
.set(CPUS_PER_TASK, 2)
// Shorten the time interval between two failed checks to make the test fail faster.
.set(BARRIER_MAX_CONCURRENT_TASKS_CHECK_INTERVAL.key, "1s")
// Reduce max check failures allowed to make the test fail faster.
.set(BARRIER_MAX_CONCURRENT_TASKS_CHECK_MAX_FAILURES, 3)
.setMaster("local-cluster[4, 3, 1024]")
.setAppName("test")
sc = createSparkContext(Some(conf))
val rdd = sc.parallelize(1 to 10, 5)
.barrier()
.mapPartitions(iter => iter)
.repartition(2)
.map(x => x + 1)
testSubmitJob(sc, rdd,
message = ERROR_MESSAGE_BARRIER_REQUIRE_MORE_SLOTS_THAN_CURRENT_TOTAL_NUMBER)
}
test("SPARK-32518: CoarseGrainedSchedulerBackend.maxNumConcurrentTasks should " +
"consider all kinds of resources for the barrier stage") {
withTempDir { dir =>
val discoveryScript = createTempScriptWithExpectedOutput(
dir, "gpuDiscoveryScript", """{"name": "gpu","addresses":["0"]}""")
val conf = new SparkConf()
// Setup a local cluster which would only has one executor with 2 CPUs and 1 GPU.
.setMaster("local-cluster[1, 2, 1024]")
.setAppName("test-cluster")
.set(WORKER_GPU_ID.amountConf, "1")
.set(WORKER_GPU_ID.discoveryScriptConf, discoveryScript)
.set(EXECUTOR_GPU_ID.amountConf, "1")
.set(TASK_GPU_ID.amountConf, "1")
// disable barrier stage retry to fail the application as soon as possible
.set(BARRIER_MAX_CONCURRENT_TASKS_CHECK_MAX_FAILURES, 1)
sc = new SparkContext(conf)
TestUtils.waitUntilExecutorsUp(sc, 1, 60000)
val exception = intercept[BarrierJobSlotsNumberCheckFailed] {
// Setup a barrier stage which contains 2 tasks and each task requires 1 CPU and 1 GPU.
// Therefore, the total resources requirement (2 CPUs and 2 GPUs) of this barrier stage
// can not be satisfied since the cluster only has 2 CPUs and 1 GPU in total.
sc.parallelize(Range(1, 10), 2)
.barrier()
.mapPartitions { iter => iter }
.collect()
}
assert(exception.getMessage.contains("[SPARK-24819]: Barrier execution " +
"mode does not allow run a barrier stage that requires more slots"))
}
}
}
| ueshin/apache-spark | core/src/test/scala/org/apache/spark/BarrierStageOnSubmittedSuite.scala | Scala | apache-2.0 | 11,447 |
package eu.ace_design.island.game
import eu.ace_design.island.map.IslandMap
import eu.ace_design.island.stdlib.Biomes.{MANGROVE, BEACH, OCEAN}
import eu.ace_design.island.stdlib.PointOfInterests.{Hideout, Creek}
import eu.ace_design.island.stdlib.Resources._
import org.specs2.mock.Mockito
import org.specs2.mutable._
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class GameTest extends SpecificationWithJUnit with Mockito {
"GameTest Specifications".title
"A game" should {
val g = Game(budget = Budget(100), crew = Crew(50), objectives = Set((WOOD, 600)))
"be instantiated like a case class" in {
g must beAnInstanceOf[Game]
g.visited must_== Set()
g.boat must beNone
g.isOK must beTrue
}
"support harvesting of resources" in {
g.harvested(WOOD, (0,0)) must_== 0
val g2 = g.harvest(WOOD, (0,0), 100)
g2.harvested(WOOD, (0,0)) must_== 100
val g3 = g2.harvest(WOOD, (0,0), 2).harvest(WOOD, (0,1), 100).harvest(FLOWER, (0,0), 20)
g3.harvested(WOOD, (0,1)) must_== 100
g3.harvested(WOOD, (0,0)) must_== 102
g3.harvested(FLOWER, (0,0)) must_== 20
}
"compute the distance to go back to the home port" in {
g.distanceToPort must beNone
val (g1,_) = g updatedBy MovedBoatResult(loc = (10,10), men = 10)
g1.distanceToPort must beSome
g1.distanceToPort.get must beGreaterThan(0.0)
}
"compute the distance to reach the boat when on the island" in {
g.distanceToBoat must_== 0.0 // everybody on the boat
val (g1,_) = g updatedBy MovedBoatResult(loc = (10,10), men = 10)
g1.distanceToBoat must_== 0.0 // still close to the boat
val (g2,_) = g1 updatedBy MovedCrewResult(loc = (10,11))
g2.distanceToBoat must beGreaterThan(0.0)
}
"support the projection of landed mens into something much smaller (costs models)" in {
val (g1,_) = g updatedBy MovedBoatResult(loc = (10,10), men = 10)
g1.menRatio must beLessThan(10.0)
}
"support normalization of landed mens (projection into [0,1]" in {
g.normalizeMen must_== 0.0
val (g1,_) = g updatedBy MovedBoatResult(loc = (10,10), men = 10)
g1.normalizeMen must beLessThan(1.0)
}
"compute the distance to move the boat from one place to another one" in {
g.distanceByBoat((10,10)) must beGreaterThan(0.0)
}
"be flagged as KO when relevant" in {
val g1 = g.flaggedAsKO
g.isOK must beTrue
g1.isOK must beFalse
}
"support update with an EmptyResult" in {
val r = EmptyResult(cost = 20)
val (updated, _) = g updatedBy r
updated.budget.remaining must_== 80
}
"support update with a MoveBoatResult" in {
val r = MovedBoatResult(cost = 30, loc = (10,14), men = 20)
val (updated, _) = g updatedBy r
updated.budget.remaining must beLessThan(g.budget.remaining)
updated.boat must_== Some((10,14))
updated.crew.landed must_== 20
}
"support harvest update" in {
g.harvested(WOOD, (0,0)) must_== 0
val g1 = g.harvest(WOOD, (0,0), 100)
g1.harvested(WOOD, (0,0)) must_== 100
val g2 = g.harvest(WOOD, (0,0), 25)
g2.harvested(WOOD, (0,0)) must_== 25
val g3 = g.harvest(FUR, (0,0), 25)
g3.harvested(FUR, (0,0)) must_== 25
}
"support the consumption of resources" in {
g.consumeResource(WOOD, 10) must throwAn[IllegalArgumentException]
val g1 = g.harvest(WOOD, (0,0), 100)
g1.collectedResources must contain(WOOD -> 100)
val g2 = g1.consumeResource(WOOD, 90)
g2.collectedResources must contain(WOOD -> 10)
g2.consumeResource(WOOD, 20) must throwAn[IllegalArgumentException]
val g3 = g2.consumeResource(WOOD, 10)
g3.collectedResources must contain(WOOD -> 0)
}
"support the storage of manufactured resources in the ship hold" in {
val g1 = g.storeTransformedResources(PLANK, 10)
g1.collectedResources must contain(PLANK -> 10)
val g2 = g1.storeTransformedResources(INGOT, 5)
g2.collectedResources must contain(PLANK -> 10)
g2.collectedResources must contain(INGOT -> 5)
val g3 = g2.storeTransformedResources(PLANK, 32)
g3.collectedResources must contain(PLANK -> 42)
g3.collectedResources must contain(INGOT -> 5)
}
"locate no plane initially" in {
g.plane must beNone
}
}
"a plane" should {
"be initialized easily" in {
val p: Plane = Plane(0,0,Directions.SOUTH)
p.initial must_== (0,0)
p.position must_== (0,0)
p.heading must_== Directions.SOUTH
}
"define a bounding box under the plane" in {
val p = Plane(5,7,Directions.NORTH)
val box = p.boundingBox
box must_== Set((4,6), (4,7), (4,8),
(5,6), (5,7), (5,8),
(6,6), (6,7), (6,8))
}
"fly forward" in {
val p1 = Plane(5,7,Directions.NORTH)
val pNorth = p1.forward
pNorth.position must_== (5, 7-3)
val p2 = Plane(5,7,Directions.SOUTH)
val pSouth = p2.forward
pSouth.position must_== (5, 7+3)
val p3 = Plane(5,7,Directions.WEST)
val pWest = p3.forward
pWest.position must_== (5-3, 7)
val p4 = Plane(5,7,Directions.EAST)
val pEast = p4.forward
pEast.position must_== (5+3, 7)
}
"reject invalid changes in heading" in {
val p1 = Plane(50,50,Directions.NORTH)
p1.turn(Directions.SOUTH) must throwAn[IllegalArgumentException]
val p2 = Plane(50,50, Directions.SOUTH)
p2.turn(Directions.NORTH) must throwAn[IllegalArgumentException]
val p3 = Plane(50,50, Directions.EAST)
p3.turn(Directions.WEST) must throwAn[IllegalArgumentException]
val p4 = Plane(50,50, Directions.WEST)
p4.turn(Directions.EAST) must throwAn[IllegalArgumentException]
}
"support turning while flying" in {
val p = Plane(50, 35, Directions.NORTH)
val turned = p.turn(Directions.EAST)
turned.position must_== (50+3, 35-3)
}
"reject rear radar" in {
val p = Plane(50, 35, Directions.NORTH)
p.radar(Directions.SOUTH, null) must throwAn[IllegalArgumentException]
}
"Support radar information retrieval" in {
val p = Plane(1,1, Directions.EAST)
// Create an empty ocean
val ocean = ( for(x <- 0 until 100; y <- 0 until 100) yield (x,y) -> Tile(biomes = Set((OCEAN,100.0))) ).toMap
val withGround = ocean + ((50,2) -> Tile(biomes = Set((BEACH,100.0))))
val gameBoard = GameBoard(100, mock[IslandMap], tiles = withGround)
p.radar(Directions.NORTH, gameBoard) must_== (0, RadarValue.OUT_OF_RANGE)
p.radar(Directions.SOUTH, gameBoard) must_== (32, RadarValue.OUT_OF_RANGE)
p.radar(Directions.EAST, gameBoard) must_== (15, RadarValue.GROUND)
}
"Support snapshot analysis" in {
val p = Plane(1,1, Directions.EAST)
val ocean = ( for(x <- 0 until 100; y <- 0 until 100) yield (x,y) -> Tile(biomes = Set((OCEAN,1.0))) ).toMap
val (b1, c1, _) = p.snapshot(GameBoard(100, mock[IslandMap], tiles = ocean))
b1 must_== Set(OCEAN)
c1 must_== Set()
val withGround = ocean + ((0,0) -> Tile(biomes = Set((BEACH,0.70), (MANGROVE, 0.30)))) +
((0,1) -> Tile(biomes = Set((BEACH,0.90), (MANGROVE, 0.10)))) +
((0,2) -> Tile(biomes = Set((BEACH,0.80), (MANGROVE, 0.20))))
val pois: Map[(Int, Int), Set[PointOfInterest]] = Map(
(1,1) -> Set(Creek(identifier = "aCreek", None)),
(0,1) -> Set(Hideout(identifier = "anHideout", None))
)
val (b2,c2, _) = p.snapshot(GameBoard(100, mock[IslandMap], tiles = withGround, pois = pois))
b2 must_== Set(OCEAN, BEACH)
c2 must_== Set(Creek(identifier = "aCreek", None))
}
}
"A budget" should {
"reject negative or null initial value" in {
Budget(-1) must throwAn[IllegalArgumentException]
Budget(0) must throwAn[IllegalArgumentException]
}
"support action point spending in a functional way" in {
val init = Budget(100)
val remains = init - 40
init.initial must_== 100; init.remaining must_== 100
remains.initial must_== 100; remains.remaining must_== 60
}
"throw an exception when spending too much action points" in {
val b = Budget(40)
(b - 42) must throwA[NotEnoughBudgetException]
}
}
"A crew" should {
"reject crew with less than 2 men operating the boat" in {
Crew(-1) must throwAn[IllegalArgumentException]
Crew(0) must throwAn[IllegalArgumentException]
Crew(1) must throwAn[IllegalArgumentException]
}
"be initialized with default values" in {
val crew = Crew(50)
crew.complete must_== 50
crew.landed must_== 0
crew.used must_== 0
crew.location must_== None
}
"log how men are used in the crew" in {
val crew = Crew(50)
crew must beAnInstanceOf[Crew]
crew.complete must_== 50; crew.landed must_== 0; crew.used must_== 0
val c1 = crew using 15
c1.complete must_== crew.complete; c1.landed must_== 15; c1.used must_== 15
val c2 = c1 using 5
c2.complete must_== crew.complete; c2.landed must_== 5; c2.used must_== 20
}
"know where the men who landed on the island are" in {
val crew = Crew(50)
val c1 = crew movedTo (14,17)
c1.location must_== Some((14,17))
}
}
} | ace-design/island | engine/src/test/scala/eu/ace_design/island/game/GameTest.scala | Scala | lgpl-3.0 | 9,492 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package genc
package ir
import PrimitiveTypes.{ PrimitiveType => PT, _ } // For desambiguation
import Literals._
import Operators._
import collection.mutable.{ Map => MutableMap }
trait NoEnv {
type Env = Unit
final val Ø = ()
}
/*
* Transform an IR into another version of the IR.
*
* NOTE Subclasses can selectively override the rec/recImpl methods that aren't final on a need to basis.
*
* NOTE Subclasses have to define an `Env` type and an empty enviornment Ø. They can be Unit and (),
* respectively. In that case, inherit from NoEnv.
*
* NOTE No caching is done, hence subclasses should be aware that a definition/expression/type can
* be transformed several times with the same env.
* EXCEPT for FunDef. In order to support recursive functions, function definitions are cached.
* When building partial function definitions (i.e. with body = null), subclasses are required
* to call `registerFunction` before proceeding with the conversion of their bodies.
*/
abstract class Transformer[From <: IR, To <: IR](final val from: From, final val to: To) {
import from._
type Env
val Ø: Env // empty env
// Entry point of the transformation
final def apply(prog: ProgDef): to.ProgDef = rec(prog)(Ø)
final def apply(vd: ValDef): to.ValDef = rec(vd)(Ø)
final def apply(e: Expr): to.Expr = rec(e)(Ø)
final def apply(typ: Type): to.Type = rec(typ)(Ø)
// See note above about caching & partial function definition
private val funCache = MutableMap[FunDef, to.FunDef]()
protected final def registerFunction(older: FunDef, newer: to.FunDef) {
funCache.update(older, newer)
}
protected def rec(prog: ProgDef)(implicit env: Env): to.ProgDef =
to.ProgDef(rec(prog.entryPoint))
protected final def rec(fd: FunDef)(implicit env: Env): to.FunDef = funCache.getOrElse(fd, recImpl(fd))
protected def recImpl(fd: FunDef)(implicit env: Env): to.FunDef = {
val newer = to.FunDef(fd.id, rec(fd.returnType), fd.ctx map rec, fd.params map rec, null)
registerFunction(fd, newer)
newer.body = rec(fd.body)
newer
}
protected def rec(fb: FunBody)(implicit env: Env): to.FunBody = (fb: @unchecked) match {
case FunBodyAST(body) => to.FunBodyAST(rec(body))
case FunBodyManual(includes, body) => to.FunBodyManual(includes, body)
}
// NOTE Due to the mutability nature of ClassDef and its children registration process,
// we need to traverse class hierarchies in a top down fashion. See recImpl.
protected final def rec(cd: ClassDef)(implicit env: Env): to.ClassDef = {
type ClassTranslation = Map[from.ClassDef, to.ClassDef]
def topDown(transformedParent: Option[to.ClassDef], current: from.ClassDef, acc: ClassTranslation): ClassTranslation = {
val transformed = recImpl(current, transformedParent)
val acc2 = acc + (current -> transformed)
val subs = current.getDirectChildren
(acc2 /: subs) { case (acc3, sub) => topDown(Some(transformed), sub, acc3) }
}
val top = cd.hierarchyTop
val translation = topDown(None, top, Map.empty)
val transformed = translation(cd)
transformed
}
protected def recImpl(cd: ClassDef, parent: Option[to.ClassDef])(implicit env: Env): to.ClassDef =
to.ClassDef(cd.id, parent, cd.fields map rec, cd.isAbstract)
protected def rec(vd: ValDef)(implicit env: Env): to.ValDef = to.ValDef(vd.id, rec(vd.typ), vd.isVar)
protected def rec(alloc: ArrayAlloc)(implicit env: Env): to.ArrayAlloc = (alloc: @unchecked) match {
case ArrayAllocStatic(ArrayType(base), length, Right(values)) =>
to.ArrayAllocStatic(to.ArrayType(rec(base)), length, Right(values map rec))
case ArrayAllocStatic(ArrayType(base), length, Left(_)) =>
to.ArrayAllocStatic(to.ArrayType(rec(base)), length, Left(to.Zero))
case ArrayAllocVLA(ArrayType(base), length, valueInit) =>
to.ArrayAllocVLA(to.ArrayType(rec(base)), rec(length), rec(valueInit))
}
protected final def rec(e: Expr)(implicit env: Env): to.Expr = recImpl(e)._1
// We need to propagate the enviornement accross the whole blocks, not simply by recusring
protected def recImpl(e: Expr)(implicit env: Env): (to.Expr, Env) = (e: @unchecked) match {
case Binding(vd) => to.Binding(rec(vd)) -> env
// Consider blocks as a sequence of let statements
case Block(exprs0) =>
var newEnv = env
val exprs = for { e0 <- exprs0 } yield {
val (e, nextEnv) = recImpl(e0)(newEnv)
newEnv = nextEnv
e
}
to.buildBlock(exprs) -> newEnv
case Decl(vd) => to.Decl(rec(vd)) -> env
case DeclInit(vd, value) => to.DeclInit(rec(vd), rec(value)) -> env
case App(fd, extra, args) => to.App(rec(fd), extra map rec, args map rec) -> env
case Construct(cd, args) => to.Construct(rec(cd), args map rec) -> env
case ArrayInit(alloc) => to.ArrayInit(rec(alloc)) -> env
case FieldAccess(objekt, fieldId) => to.FieldAccess(rec(objekt), fieldId) -> env
case ArrayAccess(array, index) => to.ArrayAccess(rec(array), rec(index)) -> env
case ArrayLength(array) => to.ArrayLength(rec(array)) -> env
case Assign(lhs, rhs) => to.Assign(rec(lhs), rec(rhs)) -> env
case BinOp(op, lhs, rhs) => to.BinOp(op, rec(lhs), rec(rhs)) -> env
case UnOp(op, expr) => to.UnOp(op, rec(expr)) -> env
case If(cond, thenn) => to.If(rec(cond), rec(thenn)) -> env
case IfElse(cond, thenn, elze) => to.IfElse(rec(cond), rec(thenn), rec(elze)) -> env
case While(cond, body) => to.While(rec(cond), rec(body)) -> env
case IsA(expr, ct) => to.IsA(rec(expr), to.ClassType(rec(ct.clazz))) -> env
case AsA(expr, ct) => to.AsA(rec(expr), to.ClassType(rec(ct.clazz))) -> env
case IntegralCast(expr, t) => to.IntegralCast(rec(expr), t) -> env
case Lit(lit) => to.Lit(lit) -> env
case Ref(e) => to.Ref(rec(e)) -> env
case Deref(e) => to.Deref(rec(e)) -> env
case Return(e) => to.Return(rec(e)) -> env
case Break => to.Break -> env
}
protected def rec(typ: Type)(implicit env: Env): to.Type = (typ: @unchecked) match {
case PrimitiveType(pt) => to.PrimitiveType(pt)
case ClassType(clazz) => to.ClassType(rec(clazz))
case ArrayType(base) => to.ArrayType(rec(base))
case ReferenceType(t) => to.ReferenceType(rec(t))
case TypedefType(original, alias, include) => to.TypedefType(original, alias, include)
case DroppedType => to.DroppedType
case NoType => to.NoType
}
}
| regb/leon | src/main/scala/leon/genc/ir/Transformer.scala | Scala | gpl-3.0 | 6,509 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.DataFormat
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.T
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import org.scalatest.{FlatSpec, Matchers}
import scala.util.Random
class ResizeBilinearSpec extends FlatSpec with Matchers {
private val input = Tensor[Float](T(T(
T(
T(1, 2, 3),
T(4, 5, 6)
),
T(
T(7, 8, 9),
T(2, 3, 1)
),
T(
T(4, 8, 2),
T(5, 3, 0)
)
)))
"ResizeBilinear forward" should "not change content while input/output width/height match" in {
println(input)
val layer = ResizeBilinear[Float](3, 2, dataFormat = DataFormat.NHWC)
val output = layer.forward(input)
println(output)
input should be(output)
}
"ResizeBilinear forward" should "be correct while double height" in {
println(input)
val layer = ResizeBilinear[Float](6, 2, dataFormat = DataFormat.NHWC)
val output = layer.forward(input)
println(output)
val expectOutput = Tensor[Float](T(T(
T(
T(1, 2, 3),
T(4, 5, 6)
),
T(
T(4, 5, 6),
T(3, 4, 3.5)
),
T(
T(7, 8, 9),
T(2, 3, 1)
),
T(
T(5.5, 8, 5.5),
T(3.5, 3, 0.5)
),
T(
T(4, 8, 2),
T(5, 3, 0)
),
T(
T(4, 8, 2),
T(5, 3, 0)
)
)))
output should be(expectOutput)
}
"ResizeBilinear forward" should "be correct while double width" in {
println(input)
val layer = ResizeBilinear[Float](3, 4, dataFormat = DataFormat.NHWC)
val output = layer.forward(input)
println(output)
val expectOutput = Tensor[Float](T(T(
T(
T(1, 2, 3),
T(2.5, 3.5, 4.5),
T(4, 5, 6),
T(4, 5, 6)
),
T(
T(7, 8, 9),
T(4.5, 5.5, 5),
T(2, 3, 1),
T(2, 3, 1)
),
T(
T(4, 8, 2),
T(4.5, 5.5, 1),
T(5, 3, 0),
T(5, 3, 0)
)
)))
output should be(expectOutput)
}
"ResizeBilinear forward and backward" should "be correct with NCHW" in {
case class Param(inHeight: Int, inWidth: Int,
outHeight: Int, outWidth: Int, alignCorners: Boolean)
val params = Seq(
Param(3, 2, 3, 2, true),
Param(3, 2, 6, 2, true),
Param(3, 2, 3, 4, true),
Param(3, 2, 3, 2, false),
Param(3, 2, 6, 2, false),
Param(3, 2, 3, 4, false)
)
for (param <- params) {
val inputCFirst = Tensor[Float](Array(1, 3, param.inHeight, param.inWidth)).rand()
val inputCLast = inputCFirst.clone().transpose(2, 4).transpose(2, 3).contiguous()
val gradOutputCFirst = Tensor[Float](Array(1, 3, param.outHeight, param.outWidth)).rand()
val gradOutputCLast = gradOutputCFirst.clone().transpose(2, 4).transpose(2, 3).contiguous()
val layerCLast = ResizeBilinear[Float](param.outHeight, param.outWidth,
param.alignCorners, dataFormat = DataFormat.NHWC)
val layerCFirst = ResizeBilinear[Float](param.outHeight, param.outWidth,
param.alignCorners, dataFormat = DataFormat.NCHW)
// NCHW
val outputCFirst = layerCFirst.forward(inputCFirst)
val gradInputCFirst = layerCFirst.backward(inputCFirst, gradOutputCFirst)
val outputCLast = layerCLast.forward(inputCLast)
val gradInputCLast = layerCLast.backward(inputCLast, gradOutputCLast)
outputCFirst
.transpose(2, 4)
.transpose(2, 3).contiguous() should be(outputCLast)
gradInputCFirst
.transpose(2, 4)
.transpose(2, 3).contiguous() should be(gradInputCLast)
}
}
}
class ResizeBilinearSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val input = Tensor[Float](1, 3, 2, 3).apply1( _ => Random.nextFloat())
val resizeBilinear = ResizeBilinear[Float](3, 2).setName("resizeBilinear")
runSerializationTest(resizeBilinear, input)
}
}
| wzhongyuan/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/ResizeBilinearSpec.scala | Scala | apache-2.0 | 4,655 |
package pt1.week5
import breeze.linalg.DenseMatrix
import common.Propagation
object ShortRateLattice {
def generate(r0: Double, u: Double, d: Double, terms: Int): DenseMatrix[Double] = {
Propagation.propagateValueForward(r0, u, d, terms + 1)
}
}
| ligasgr/fe-and-rm | src/main/scala/pt1/week5/ShortRateLattice.scala | Scala | apache-2.0 | 256 |
package com.whitepages.kafka.concurrent.consumer
import com.whitepages.kafka.concurrent.Ack.{AckedMessage, Acknowledgement, AckableMessage}
import com.whitepages.kafka.concurrent._
import scala.concurrent.duration.FiniteDuration
import scala.concurrent._
class AsyncKafkaConsumer[T](
val ackTimeout: FiniteDuration,
val desiredCommitThreshold: Int,
source: BatchAckingMessageSource[T],
failureHandler: (List[AckedMessage[T]]) => Unit)
extends AsyncAckingConsumer[T] with BackgroundWorker {
val bridge: AsyncAckingBridge[T] = new HandlingAsyncAckingBridge[T](failureHandler)
def backgroundWorker(shutdownHook: () => Boolean) = {
new AsyncBatchAckingMessageSourceImpl(ackTimeout, bridge, desiredCommitThreshold, source) {
override def shuttingDown: () => Boolean = shutdownHook
}
}
/**
* Acks a message
* @param ack Ack object containing info needed to ack a particular message
*/
override def ack(ack: Acknowledgement): Unit = bridge.outstandingAcks.put(ack)
/**
* Get the next message from the kafka topic
* @param ec implicit execution context used to create the future
* @return Future containing an AckableMessage with the next message. This message should be acked at some
* point in the future by the caller
*/
override def next(implicit ec: ExecutionContext): Future[AckableMessage[T]] = {
if (workerException.isDefined) {
// TODO: Future.failed?
throw workerException.get
}
if (!running) {
// TODO: Future.failed?
throw new RuntimeException("Client worker is not running")
}
Future {
//This can block, let the execution context take care of it
//TODO: make default blocking execution context to provide to the users
blocking {
bridge.syncPoint.take
}
}
}
}
| whitepages/concurrent-kafka-consumer | src/main/scala/com/whitepages/kafka/concurrent/consumer/AsyncKafkaConsumer.scala | Scala | apache-2.0 | 1,931 |
package io.github.mandar2812.PlasmaML.dynamics.diffusion
import breeze.linalg.{DenseMatrix, DenseVector, norm}
import io.github.mandar2812.dynaml.kernels.{LocalScalarKernel, SVMKernel}
import io.github.mandar2812.dynaml.models.gp.AbstractGPRegressionModel
import io.github.mandar2812.dynaml.optimization.GloballyOptimizable
import io.github.mandar2812.dynaml.pipes._
import org.apache.log4j.Logger
/**
* Inverse inference over plasma radial diffusion parameters.
*
* @param Kp A function which returns the Kp value for a given
* time coordinate. Must be cast as a [[DataPipe]]
*
* @param dll_params A [[Tuple4]] containing the diffusion field
* parameters. See [[io.github.mandar2812.PlasmaML.utils.MagConfigEncoding]] and
* [[MagnetosphericProcessTrend]].
*
* @param tau_params A [[Tuple4]] containing the loss process parameters.
*
*
* @param noise_psd A kernel function representing the measurement noise of the
* Phase Space Density at a pair of space time locations.
*
* @param psd_data A Stream of space time locations and measured PSD values.
*
* @param colocation_points A collection of "ghost" points on which Particle diffusion is computed
* and its dependence on PSD is enforced with square loss.
*
*
* */
class KernelRadialDiffusionModel(
val Kp: DataPipe[Double, Double],
dll_params: (Double, Double, Double, Double),
tau_params: (Double, Double, Double, Double),
q_params: (Double, Double, Double, Double))(
sigma: Double, thetaS: Double, thetaT: Double,
val noise_psd: LocalScalarKernel[(Double, Double)],
val noise_injection: LocalScalarKernel[(Double, Double)],
val psd_data: Stream[((Double, Double), Double)],
val colocation_points: Stream[(Double, Double)]) extends
GloballyOptimizable {
protected val logger: Logger = Logger.getLogger(this.getClass)
val baseNoiseID: String = "base_noise::"+noise_psd.toString.split("\\\\.").last
val baseInjNoiseID: String = "base_inj_noise::"+noise_injection.toString.split("\\\\.").last
val injection_process: MagTrend = new MagTrend(Kp, "Q")
val num_observations: Int = psd_data.length
val num_colocation_points: Int = colocation_points.length
val psd_mean: Double = psd_data.map(_._2).sum/num_observations
private lazy val targets = DenseVector(psd_data.map(_._2).toArray)
private val (noiseStEncoder, injNoiseStEncoder) = (
BasisFuncRadialDiffusionModel.stateEncoder(baseNoiseID),
BasisFuncRadialDiffusionModel.stateEncoder(baseInjNoiseID)
)
val covariance = new SE1dExtRadDiffusionKernel(
sigma, thetaS, thetaT, Kp)(
dll_params, tau_params,
normSpace = "L2", normTime = "L1")
protected val operator_hyper_parameters: List[String] = {
val dll_hyp = covariance.diffusionField.transform.keys
val tau_hyp = covariance.lossTimeScale.transform.keys
val q_hyp = injection_process.transform.keys
List(
dll_hyp._1, dll_hyp._2, dll_hyp._3, dll_hyp._4,
tau_hyp._1, tau_hyp._2, tau_hyp._3, tau_hyp._4,
q_hyp._1, q_hyp._2, q_hyp._3, q_hyp._4
)
}
def _operator_hyper_parameters: List[String] = operator_hyper_parameters
/**
* Stores the value of the operator parameters
* as a [[Map]].
* */
protected var operator_state: Map[String, Double] = {
val dll_hyp = covariance.diffusionField.transform.keys
val tau_hyp = covariance.lossTimeScale.transform.keys
val q_hyp = injection_process.transform.keys
Map(
dll_hyp._1 -> dll_params._1, dll_hyp._2 -> dll_params._2,
dll_hyp._3 -> dll_params._3, dll_hyp._4 -> dll_params._4,
tau_hyp._1 -> tau_params._1, tau_hyp._2 -> tau_params._2,
tau_hyp._3 -> tau_params._3, tau_hyp._4 -> tau_params._4,
q_hyp._1 -> q_params._1, q_hyp._2 -> q_params._2,
q_hyp._3 -> q_params._3, q_hyp._4 -> q_params._4
)
}
override var hyper_parameters: List[String] =
covariance._base_hyper_parameters ++
noise_psd.hyper_parameters.map(h => baseNoiseID+"/"+h) ++
noise_injection.hyper_parameters.map(h => baseInjNoiseID+"/"+h) ++
operator_hyper_parameters
/**
* A Map which stores the current state of the system.
* */
override protected var current_state: Map[String, Double] =
covariance._base_state ++
noiseStEncoder(noise_psd.state) ++
injNoiseStEncoder(noise_injection.state) ++
operator_state
var blocked_hyper_parameters: List[String] =
covariance.blocked_hyper_parameters ++
noise_psd.blocked_hyper_parameters.map(h => baseNoiseID+"/"+h) ++
noise_injection.blocked_hyper_parameters.map(h => baseInjNoiseID+"/"+h)
def block(hyp: String*): Unit = {
val proc_cov_hyp = hyp.filter(
h => h.contains(covariance.baseID) || h.contains("tau") || h.contains("dll"))
val proc_noise_hyp = hyp.filter(_.contains(baseNoiseID)).map(h => h.replace(baseNoiseID, "").tail)
val proc_inj_noise_hyp = hyp.filter(
_.contains(baseInjNoiseID)).map(
h => h.replace(baseInjNoiseID, "").tail)
covariance.block(proc_cov_hyp:_*)
noise_psd.block(proc_noise_hyp:_*)
noise_injection.block(proc_inj_noise_hyp:_*)
blocked_hyper_parameters = hyp.toList
}
def block_++(h: String*): Unit = block(blocked_hyper_parameters.union(h):_*)
def effective_hyper_parameters: List[String] =
hyper_parameters.filterNot(h => blocked_hyper_parameters.contains(h))
def effective_state: Map[String, Double] = _current_state.filterKeys(effective_hyper_parameters.contains)
def setState(h: Map[String, Double]): Unit = {
require(
effective_hyper_parameters.forall(h.contains),
"All Hyper-parameters must be contained in state assignment")
val base_kernel_state = h.filterKeys(
c => c.contains(covariance.baseID) || c.contains("tau") || c.contains("dll")
)
val base_noise_state = h.filterKeys(
_.contains(baseNoiseID)).map(
c => (c._1.replace(baseNoiseID, "").tail, c._2)
)
val base_inj_noise_state = h.filterKeys(
_.contains(baseInjNoiseID)).map(
c => (c._1.replace(baseInjNoiseID, "").tail, c._2)
)
covariance.setHyperParameters(base_kernel_state)
noise_psd.setHyperParameters(base_noise_state)
noise_injection.setHyperParameters(base_inj_noise_state)
val op_state = h.filterNot(
c => c._1.contains(covariance.baseID) || c._1.contains(baseNoiseID) || c._1.contains(baseInjNoiseID))
op_state.foreach((keyval) => operator_state += (keyval._1 -> keyval._2))
current_state = operator_state ++
covariance._base_state ++
noiseStEncoder(noise_psd.state)
}
def getGalerkinParams(h: Map[String, Double]): (DenseVector[Double], DenseMatrix[Double], DenseMatrix[Double]) = {
setState(h)
logger.info("Constructing Kernel Primal-Dual Model for PSD")
val q = injection_process(operator_state)
val (no, nc) = (num_observations, num_colocation_points)
val ones_obs = DenseVector.fill[Double](no)(1d)
val zeros_col = DenseVector.zeros[Double](nc)
val omega_phi = covariance.baseKernel.buildKernelMatrix(psd_data.map(_._1), no).getKernelMatrix()
val omega_cross = SVMKernel.crossKernelMatrix(psd_data.map(_._1), colocation_points, covariance.invOperatorKernel)
val omega_psi = covariance.buildKernelMatrix(colocation_points, nc).getKernelMatrix()
val responses = DenseVector.vertcat(
DenseVector(0d), targets,
DenseVector(colocation_points.map(p => q(p)).toArray)
)
def I(n: Int) = DenseMatrix.eye[Double](n)
val A = DenseMatrix.vertcat(
DenseMatrix.horzcat(DenseMatrix(0d), ones_obs.toDenseMatrix, zeros_col.toDenseMatrix),
DenseMatrix.horzcat(ones_obs.toDenseMatrix.t, omega_phi+I(no)*noise_psd.state("noiseLevel"), omega_cross),
DenseMatrix.horzcat(zeros_col.toDenseMatrix.t, omega_cross.t, omega_psi+I(nc)*noise_injection.state("noiseLevel"))
)
(A\\responses, omega_phi, omega_cross)
}
/**
* Calculates the energy of the configuration,
* in most global optimization algorithms
* we aim to find an approximate value of
* the hyper-parameters such that this function
* is minimized.
*
* @param h The value of the hyper-parameters in the configuration space
* @param options Optional parameters about configuration
* @return Configuration Energy E(h)
* */
def energy(
h: Map[String, Double],
options: Map[String, String] = Map()): Double = try {
val (params, omega, omega_cross) = getGalerkinParams(h)
val dMat = DenseMatrix.vertcat(
DenseVector.ones[Double](num_observations).toDenseMatrix,
omega,
omega_cross.t
)
val mean = dMat.t*params
val modelVariance = norm(targets - mean)/targets.length
logger.info("variance: "+modelVariance)
/*
* Construct partitioned covariance matrix
* */
logger.info("Constructing partitions of covariance matrix")
logger.info("Partition K_uu")
val k_uu = covariance.baseKernel.buildKernelMatrix(
psd_data.map(_._1),
num_observations).getKernelMatrix
logger.info("Partition K_nn")
val noise_mat_psd = noise_psd.buildKernelMatrix(
psd_data.map(_._1),
num_observations).getKernelMatrix
AbstractGPRegressionModel.logLikelihood(targets - mean, k_uu + noise_mat_psd )
} catch {
case _: breeze.linalg.MatrixSingularException => Double.NaN
case _: breeze.linalg.NotConvergedException => Double.PositiveInfinity
case _: breeze.linalg.MatrixNotSymmetricException => Double.NaN
}
}
| mandar2812/PlasmaML | mag-core/src/main/scala/io/github/mandar2812/PlasmaML/dynamics/diffusion/KernelRadialDiffusionModel.scala | Scala | lgpl-2.1 | 9,593 |
package com.radkrish.euler.problem19
import java.util.{Calendar, Date}
/**
* Topic - Counting Sundays
*
* Description -
*
* You are given the following information, but you may prefer to do some research for yourself.
*
* 1 Jan 1900 was a Monday.
* Thirty days has September,
* April, June and November.
* All the rest have thirty-one,
* Saving February alone,
* Which has twenty-eight, rain or shine.
* And on leap years, twenty-nine.
* A leap year occurs on any year evenly divisible by 4, but not on a century unless it is divisible by 400.
*
* How many Sundays fell on the first of the month during the twentieth century (1 Jan 1901 to 31 Dec 2000)?
*
* Created by radkrish on 3/28/17.
*/
object Problem19 {
def main(args: Array[String]): Unit = {
val format = new java.text.SimpleDateFormat("yyyy-MM-dd")
val cal = Calendar.getInstance()
var occurrence = 0
(1901 to 2000).foreach(year => {
(1 to 12).foreach(month => {
val date = format.parse(year + "-" + month + "-01")
cal.setTime(date)
if (cal.get(Calendar.DAY_OF_WEEK) == 1)
occurrence += 1
})
})
print(occurrence)
}
}
| radkrish/scala-euler | src/main/scala-2.12/com/radkrish/euler/problem19/Problem19.scala | Scala | unlicense | 1,197 |
/**
* Angles
* Copyright (C) 2014 Sebastian Schelter
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package io.ssc.angles.pipeline
import java.io.File
import io.ssc.angles.Config
import io.ssc.angles.pipeline.data.Storage
import io.ssc.angles.pipeline.filters.{ArticleFilter, GermanFilter}
import io.ssc.angles.pipeline.nlp.{German, NLPUtils}
import org.apache.commons.io.FileUtils
import org.apache.commons.lang.StringEscapeUtils
import org.apache.lucene.analysis.de.GermanAnalyzer
import org.apache.lucene.document.{Document, Field, FieldType}
import org.apache.lucene.index.{IndexWriter, IndexWriterConfig}
import org.apache.lucene.store.FSDirectory
import org.apache.lucene.util.Version
import org.joda.time.DateTime
import org.slf4j.LoggerFactory
import twitter4j.Status
class IndexArticles extends Step {
val log = LoggerFactory.getLogger(classOf[IndexArticles])
def execute(since: DateTime): Unit = {
val now = new DateTime()
val TYPE_STORED_WITH_TERMVECTORS: FieldType = new FieldType()
TYPE_STORED_WITH_TERMVECTORS.setIndexed(true)
TYPE_STORED_WITH_TERMVECTORS.setTokenized(true)
TYPE_STORED_WITH_TERMVECTORS.setStored(true)
TYPE_STORED_WITH_TERMVECTORS.setStoreTermVectors(true)
TYPE_STORED_WITH_TERMVECTORS.setStoreTermVectorPositions(true)
TYPE_STORED_WITH_TERMVECTORS.freeze()
val websites = Storage.crawledWebsites(since)
val websitesWithMetadata = websites map { website =>
website -> Storage.metadataFor(website.id)
}
val selection = websitesWithMetadata
.filter { case (website, metadata) => GermanFilter.passes(website, metadata)}
.filter { case (website, metadata) => ArticleFilter.passes(website, metadata)}
val metasToConsider = Array("title", "meta-description", "meta-keywords", "meta-og:title", "meta-og:description")
val withEntities = selection map { case (website, metadata) =>
val entities = Storage.namedEntities(website.id)
(website, metadata, entities)
}
val pathToIndex = new File(Config.property("angles.dataDir"), "articleIndex")
FileUtils.deleteDirectory(pathToIndex)
val analyzer = new GermanAnalyzer(Version.LUCENE_42, German.stopSet())
val directory = FSDirectory.open(pathToIndex)
val config = new IndexWriterConfig(Version.LUCENE_42, analyzer)
val indexWriter = new IndexWriter(directory, config)
withEntities foreach { case (website, metadata, entities) =>
val titleString = if (metadata.contains("meta-og:title")) {
metadata("meta-og:title").mkString(" ")
} else if (metadata.contains("title")) {
metadata("title").mkString(" ")
} else {
""
}
val descriptionString = if (metadata.contains("meta-og:description")) {
metadata("meta-og:description").mkString(" ")
} else if (metadata.contains("meta-description")) {
metadata("meta-description").mkString(" ")
} else {
""
}
val keywordsString = if (metadata.contains("meta-keywords")) {
metadata("meta-keywords").mkString(" ")
} else {
""
}
val nerString = (entities map { entity => entity.name.replaceAll(" ", "_") + "_" + entity.entityType}).mkString(" ")
val terms = NLPUtils.toGermanStemmedTerms(nerString)
val explorer = Storage.explorerOfWebsite(website.id).get
val status = Storage.statusOfWebsite(website.id).get
val retweets = ExtractStatusData.retweetsPerHoursInDayOne(now, status)
val document = new Document()
document.add(new Field("uri", website.realUri, TYPE_STORED_WITH_TERMVECTORS))
document.add(new Field("screenname", explorer.screenname, TYPE_STORED_WITH_TERMVECTORS))
document.add(new Field("retweetsPerHourInDayOne", retweets.toString, TYPE_STORED_WITH_TERMVECTORS))
document.add(new Field("hashtags", hashtags(status), TYPE_STORED_WITH_TERMVECTORS))
document.add(new Field("title", clean(titleString.toString), TYPE_STORED_WITH_TERMVECTORS))
document.add(new Field("description", clean(descriptionString.toString), TYPE_STORED_WITH_TERMVECTORS))
document.add(new Field("keywords", keywordsString.toString, TYPE_STORED_WITH_TERMVECTORS))
document.add(new Field("entities", nerString.toString, TYPE_STORED_WITH_TERMVECTORS))
document.add(new Field("body", clean(NLPUtils.extractArticle(website.html)), TYPE_STORED_WITH_TERMVECTORS))
indexWriter.addDocument(document)
println(website.realUri)
println(terms.mkString(" ") + "\\n")
}
indexWriter.close()
}
//TODO this should be in metadata extractor
def clean(str: String) = {
StringEscapeUtils.unescapeHtml(str.trim)
}
def hashtags(status: Status) = {
(for (hashtagEntity <- status.getHashtagEntities) yield {
hashtagEntity.getText.toLowerCase
}).mkString(" ")
}
}
| sscdotopen/angles | src/main/scala/io/ssc/angles/pipeline/IndexArticles.scala | Scala | gpl-3.0 | 5,462 |
package com.github.pcejrowski.grasca
import java.time.Instant
import com.github.pcejrowski.grasca.model.render.RenderedValues
import com.github.pcejrowski.wiremock.WiremockServer
import com.github.tomakehurst.wiremock.client.WireMock.{aResponse, get, stubFor, urlMatching}
import org.scalatest.{FlatSpec, Matchers}
import scala.collection.immutable.ListMap
class RenderAPITest extends FlatSpec with Matchers with WiremockServer {
val testee: RenderAPI = RenderAPI(host, port)
behavior of "A RenderAPI"
it should "parse rendered server response correctly" in {
val indexResponse =
"""
|[
| {
| "target": "1",
| "datapoints": [
| [
| 2,
| 1496158910
| ],
| [
| 1,
| 1496158920
| ],
| [
| null,
| 1496158930
| ]
| ]
| },
| {
| "target": "2",
| "datapoints": [
| [
| 4,
| 1496158910
| ]
| ]
| }
|]
""".stripMargin
val urlRegexp = "\\\\/render\\\\?target=dummy(.*)"
stubFor(get(urlMatching(urlRegexp))
.willReturn(aResponse().withBody(indexResponse).withStatus(200)))
val actual: Option[RenderedValues] = testee.values("dummy")
val expected: Option[ListMap[String, ListMap[Instant, Option[Long]]]] =
Some(ListMap(
"1" -> ListMap(
Instant.ofEpochSecond(1496158930) -> None,
Instant.ofEpochSecond(1496158920) -> Some(1),
Instant.ofEpochSecond(1496158910) -> Some(2)),
"2" -> ListMap(
Instant.ofEpochSecond(1496158910) -> Some(4)
)))
actual should be(expected)
}
}
| pcejrowski/grasca | src/test/scala/com/github/pcejrowski/grasca/RenderAPITest.scala | Scala | mit | 1,821 |
package spark.storage
import java.io.{InputStream, OutputStream}
import java.nio.{ByteBuffer, MappedByteBuffer}
import java.util.concurrent.{ConcurrentHashMap, LinkedBlockingQueue}
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, Queue}
import scala.collection.JavaConversions._
import akka.actor.{ActorSystem, Cancellable, Props}
import akka.dispatch.{Await, Future}
import akka.util.Duration
import akka.util.duration._
import com.ning.compress.lzf.{LZFInputStream, LZFOutputStream}
import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
import spark.{Logging, SizeEstimator, SparkEnv, SparkException, Utils}
import spark.network._
import spark.serializer.Serializer
import spark.util.{ByteBufferInputStream, IdGenerator, MetadataCleaner, TimeStampedHashMap}
import sun.nio.ch.DirectBuffer
private[spark]
case class BlockException(blockId: String, message: String, ex: Exception = null)
extends Exception(message)
private[spark]
class BlockManager(
executorId: String,
actorSystem: ActorSystem,
val master: BlockManagerMaster,
val serializer: Serializer,
maxMemory: Long)
extends Logging {
class BlockInfo(val level: StorageLevel, val tellMaster: Boolean) {
var pending: Boolean = true
var size: Long = -1L
/** Wait for this BlockInfo to be marked as ready (i.e. block is finished writing) */
def waitForReady() {
if (pending) {
synchronized {
while (pending) this.wait()
}
}
}
/** Mark this BlockInfo as ready (i.e. block is finished writing) */
def markReady(sizeInBytes: Long) {
pending = false
size = sizeInBytes
synchronized {
this.notifyAll()
}
}
}
private val blockInfo = new TimeStampedHashMap[String, BlockInfo]
private[storage] val memoryStore: BlockStore = new MemoryStore(this, maxMemory)
private[storage] val diskStore: BlockStore =
new DiskStore(this, System.getProperty("spark.local.dir", System.getProperty("java.io.tmpdir")))
val connectionManager = new ConnectionManager(0)
implicit val futureExecContext = connectionManager.futureExecContext
val blockManagerId = BlockManagerId(
executorId, connectionManager.id.host, connectionManager.id.port)
// Max megabytes of data to keep in flight per reducer (to avoid over-allocating memory
// for receiving shuffle outputs)
val maxBytesInFlight =
System.getProperty("spark.reducer.maxMbInFlight", "48").toLong * 1024 * 1024
// Whether to compress broadcast variables that are stored
val compressBroadcast = System.getProperty("spark.broadcast.compress", "true").toBoolean
// Whether to compress shuffle output that are stored
val compressShuffle = System.getProperty("spark.shuffle.compress", "true").toBoolean
// Whether to compress RDD partitions that are stored serialized
val compressRdds = System.getProperty("spark.rdd.compress", "false").toBoolean
val heartBeatFrequency = BlockManager.getHeartBeatFrequencyFromSystemProperties
val host = System.getProperty("spark.hostname", Utils.localHostName())
val slaveActor = master.actorSystem.actorOf(Props(new BlockManagerSlaveActor(this)),
name = "BlockManagerActor" + BlockManager.ID_GENERATOR.next)
// Pending reregistration action being executed asynchronously or null if none
// is pending. Accesses should synchronize on asyncReregisterLock.
var asyncReregisterTask: Future[Unit] = null
val asyncReregisterLock = new Object
private def heartBeat() {
if (!master.sendHeartBeat(blockManagerId)) {
reregister()
}
}
var heartBeatTask: Cancellable = null
val metadataCleaner = new MetadataCleaner("BlockManager", this.dropOldBlocks)
initialize()
/**
* Construct a BlockManager with a memory limit set based on system properties.
*/
def this(execId: String, actorSystem: ActorSystem, master: BlockManagerMaster,
serializer: Serializer) = {
this(execId, actorSystem, master, serializer, BlockManager.getMaxMemoryFromSystemProperties)
}
/**
* Initialize the BlockManager. Register to the BlockManagerMaster, and start the
* BlockManagerWorker actor.
*/
private def initialize() {
master.registerBlockManager(blockManagerId, maxMemory, slaveActor)
BlockManagerWorker.startBlockManagerWorker(this)
if (!BlockManager.getDisableHeartBeatsForTesting) {
heartBeatTask = actorSystem.scheduler.schedule(0.seconds, heartBeatFrequency.milliseconds) {
heartBeat()
}
}
}
/**
* Report all blocks to the BlockManager again. This may be necessary if we are dropped
* by the BlockManager and come back or if we become capable of recovering blocks on disk after
* an executor crash.
*
* This function deliberately fails silently if the master returns false (indicating that
* the slave needs to reregister). The error condition will be detected again by the next
* heart beat attempt or new block registration and another try to reregister all blocks
* will be made then.
*/
private def reportAllBlocks() {
logInfo("Reporting " + blockInfo.size + " blocks to the master.")
for ((blockId, info) <- blockInfo) {
if (!tryToReportBlockStatus(blockId, info)) {
logError("Failed to report " + blockId + " to master; giving up.")
return
}
}
}
/**
* Reregister with the master and report all blocks to it. This will be called by the heart beat
* thread if our heartbeat to the block amnager indicates that we were not registered.
*
* Note that this method must be called without any BlockInfo locks held.
*/
def reregister() {
// TODO: We might need to rate limit reregistering.
logInfo("BlockManager reregistering with master")
master.registerBlockManager(blockManagerId, maxMemory, slaveActor)
reportAllBlocks()
}
/**
* Reregister with the master sometime soon.
*/
def asyncReregister() {
asyncReregisterLock.synchronized {
if (asyncReregisterTask == null) {
asyncReregisterTask = Future[Unit] {
reregister()
asyncReregisterLock.synchronized {
asyncReregisterTask = null
}
}
}
}
}
/**
* For testing. Wait for any pending asynchronous reregistration; otherwise, do nothing.
*/
def waitForAsyncReregister() {
val task = asyncReregisterTask
if (task != null) {
Await.ready(task, Duration.Inf)
}
}
/**
* Get storage level of local block. If no info exists for the block, then returns null.
*/
def getLevel(blockId: String): StorageLevel = blockInfo.get(blockId).map(_.level).orNull
/**
* Tell the master about the current storage status of a block. This will send a block update
* message reflecting the current status, *not* the desired storage level in its block info.
* For example, a block with MEMORY_AND_DISK set might have fallen out to be only on disk.
*/
def reportBlockStatus(blockId: String, info: BlockInfo) {
val needReregister = !tryToReportBlockStatus(blockId, info)
if (needReregister) {
logInfo("Got told to reregister updating block " + blockId)
// Reregistering will report our new block for free.
asyncReregister()
}
logDebug("Told master about block " + blockId)
}
/**
* Actually send a UpdateBlockInfo message. Returns the mater's response,
* which will be true if the block was successfully recorded and false if
* the slave needs to re-register.
*/
private def tryToReportBlockStatus(blockId: String, info: BlockInfo): Boolean = {
val (curLevel, inMemSize, onDiskSize, tellMaster) = info.synchronized {
info.level match {
case null =>
(StorageLevel.NONE, 0L, 0L, false)
case level =>
val inMem = level.useMemory && memoryStore.contains(blockId)
val onDisk = level.useDisk && diskStore.contains(blockId)
val storageLevel = StorageLevel(onDisk, inMem, level.deserialized, level.replication)
val memSize = if (inMem) memoryStore.getSize(blockId) else 0L
val diskSize = if (onDisk) diskStore.getSize(blockId) else 0L
(storageLevel, memSize, diskSize, info.tellMaster)
}
}
if (tellMaster) {
master.updateBlockInfo(blockManagerId, blockId, curLevel, inMemSize, onDiskSize)
} else {
true
}
}
/**
* Get locations of the block.
*/
def getLocations(blockId: String): Seq[String] = {
val startTimeMs = System.currentTimeMillis
var managers = master.getLocations(blockId)
val locations = managers.map(_.ip)
logDebug("Got block locations in " + Utils.getUsedTimeMs(startTimeMs))
return locations
}
/**
* Get locations of an array of blocks.
*/
def getLocations(blockIds: Array[String]): Array[Seq[String]] = {
val startTimeMs = System.currentTimeMillis
val locations = master.getLocations(blockIds).map(_.map(_.ip).toSeq).toArray
logDebug("Got multiple block location in " + Utils.getUsedTimeMs(startTimeMs))
return locations
}
/**
* Get block from local block manager.
*/
def getLocal(blockId: String): Option[Iterator[Any]] = {
logDebug("Getting local block " + blockId)
// As an optimization for map output fetches, if the block is for a shuffle, return it
// without acquiring a lock; the disk store never deletes (recent) items so this should work
if (blockId.startsWith("shuffle_")) {
return diskStore.getValues(blockId) match {
case Some(iterator) =>
Some(iterator)
case None =>
throw new Exception("Block " + blockId + " not found on disk, though it should be")
}
}
val info = blockInfo.get(blockId).orNull
if (info != null) {
info.synchronized {
info.waitForReady() // In case the block is still being put() by another thread
val level = info.level
logDebug("Level for block " + blockId + " is " + level)
// Look for the block in memory
if (level.useMemory) {
logDebug("Getting block " + blockId + " from memory")
memoryStore.getValues(blockId) match {
case Some(iterator) =>
return Some(iterator)
case None =>
logDebug("Block " + blockId + " not found in memory")
}
}
// Look for block on disk, potentially loading it back into memory if required
if (level.useDisk) {
logDebug("Getting block " + blockId + " from disk")
if (level.useMemory && level.deserialized) {
diskStore.getValues(blockId) match {
case Some(iterator) =>
// Put the block back in memory before returning it
// TODO: Consider creating a putValues that also takes in a iterator ?
val elements = new ArrayBuffer[Any]
elements ++= iterator
memoryStore.putValues(blockId, elements, level, true).data match {
case Left(iterator2) =>
return Some(iterator2)
case _ =>
throw new Exception("Memory store did not return back an iterator")
}
case None =>
throw new Exception("Block " + blockId + " not found on disk, though it should be")
}
} else if (level.useMemory && !level.deserialized) {
// Read it as a byte buffer into memory first, then return it
diskStore.getBytes(blockId) match {
case Some(bytes) =>
// Put a copy of the block back in memory before returning it. Note that we can't
// put the ByteBuffer returned by the disk store as that's a memory-mapped file.
val copyForMemory = ByteBuffer.allocate(bytes.limit)
copyForMemory.put(bytes)
memoryStore.putBytes(blockId, copyForMemory, level)
bytes.rewind()
return Some(dataDeserialize(blockId, bytes))
case None =>
throw new Exception("Block " + blockId + " not found on disk, though it should be")
}
} else {
diskStore.getValues(blockId) match {
case Some(iterator) =>
return Some(iterator)
case None =>
throw new Exception("Block " + blockId + " not found on disk, though it should be")
}
}
}
}
} else {
logDebug("Block " + blockId + " not registered locally")
}
return None
}
/**
* Get block from the local block manager as serialized bytes.
*/
def getLocalBytes(blockId: String): Option[ByteBuffer] = {
// TODO: This whole thing is very similar to getLocal; we need to refactor it somehow
logDebug("Getting local block " + blockId + " as bytes")
// As an optimization for map output fetches, if the block is for a shuffle, return it
// without acquiring a lock; the disk store never deletes (recent) items so this should work
if (blockId.startsWith("shuffle_")) {
return diskStore.getBytes(blockId) match {
case Some(bytes) =>
Some(bytes)
case None =>
throw new Exception("Block " + blockId + " not found on disk, though it should be")
}
}
val info = blockInfo.get(blockId).orNull
if (info != null) {
info.synchronized {
info.waitForReady() // In case the block is still being put() by another thread
val level = info.level
logDebug("Level for block " + blockId + " is " + level)
// Look for the block in memory
if (level.useMemory) {
logDebug("Getting block " + blockId + " from memory")
memoryStore.getBytes(blockId) match {
case Some(bytes) =>
return Some(bytes)
case None =>
logDebug("Block " + blockId + " not found in memory")
}
}
// Look for block on disk
if (level.useDisk) {
// Read it as a byte buffer into memory first, then return it
diskStore.getBytes(blockId) match {
case Some(bytes) =>
if (level.useMemory) {
if (level.deserialized) {
memoryStore.putBytes(blockId, bytes, level)
} else {
// The memory store will hang onto the ByteBuffer, so give it a copy instead of
// the memory-mapped file buffer we got from the disk store
val copyForMemory = ByteBuffer.allocate(bytes.limit)
copyForMemory.put(bytes)
memoryStore.putBytes(blockId, copyForMemory, level)
}
}
bytes.rewind()
return Some(bytes)
case None =>
throw new Exception("Block " + blockId + " not found on disk, though it should be")
}
}
}
} else {
logDebug("Block " + blockId + " not registered locally")
}
return None
}
/**
* Get block from remote block managers.
*/
def getRemote(blockId: String): Option[Iterator[Any]] = {
if (blockId == null) {
throw new IllegalArgumentException("Block Id is null")
}
logDebug("Getting remote block " + blockId)
// Get locations of block
val locations = master.getLocations(blockId)
// Get block from remote locations
for (loc <- locations) {
logDebug("Getting remote block " + blockId + " from " + loc)
val data = BlockManagerWorker.syncGetBlock(
GetBlock(blockId), ConnectionManagerId(loc.ip, loc.port))
if (data != null) {
logDebug("Data is not null: " + data)
return Some(dataDeserialize(blockId, data))
}
logDebug("Data is null")
}
logDebug("Data not found")
return None
}
/**
* Get a block from the block manager (either local or remote).
*/
def get(blockId: String): Option[Iterator[Any]] = {
getLocal(blockId).orElse(getRemote(blockId))
}
/**
* Get multiple blocks from local and remote block manager using their BlockManagerIds. Returns
* an Iterator of (block ID, value) pairs so that clients may handle blocks in a pipelined
* fashion as they're received. Expects a size in bytes to be provided for each block fetched,
* so that we can control the maxMegabytesInFlight for the fetch.
*/
def getMultiple(blocksByAddress: Seq[(BlockManagerId, Seq[(String, Long)])])
: Iterator[(String, Option[Iterator[Any]])] = {
if (blocksByAddress == null) {
throw new IllegalArgumentException("BlocksByAddress is null")
}
val totalBlocks = blocksByAddress.map(_._2.size).sum
logDebug("Getting " + totalBlocks + " blocks")
var startTime = System.currentTimeMillis
val localBlockIds = new ArrayBuffer[String]()
val remoteBlockIds = new HashSet[String]()
// A result of a fetch. Includes the block ID, size in bytes, and a function to deserialize
// the block (since we want all deserializaton to happen in the calling thread); can also
// represent a fetch failure if size == -1.
class FetchResult(val blockId: String, val size: Long, val deserialize: () => Iterator[Any]) {
def failed: Boolean = size == -1
}
// A queue to hold our results.
val results = new LinkedBlockingQueue[FetchResult]
// A request to fetch one or more blocks, complete with their sizes
class FetchRequest(val address: BlockManagerId, val blocks: Seq[(String, Long)]) {
val size = blocks.map(_._2).sum
}
// Queue of fetch requests to issue; we'll pull requests off this gradually to make sure that
// the number of bytes in flight is limited to maxBytesInFlight
val fetchRequests = new Queue[FetchRequest]
// Current bytes in flight from our requests
var bytesInFlight = 0L
def sendRequest(req: FetchRequest) {
logDebug("Sending request for %d blocks (%s) from %s".format(
req.blocks.size, Utils.memoryBytesToString(req.size), req.address.ip))
val cmId = new ConnectionManagerId(req.address.ip, req.address.port)
val blockMessageArray = new BlockMessageArray(req.blocks.map {
case (blockId, size) => BlockMessage.fromGetBlock(GetBlock(blockId))
})
bytesInFlight += req.size
val sizeMap = req.blocks.toMap // so we can look up the size of each blockID
val future = connectionManager.sendMessageReliably(cmId, blockMessageArray.toBufferMessage)
future.onSuccess {
case Some(message) => {
val bufferMessage = message.asInstanceOf[BufferMessage]
val blockMessageArray = BlockMessageArray.fromBufferMessage(bufferMessage)
for (blockMessage <- blockMessageArray) {
if (blockMessage.getType != BlockMessage.TYPE_GOT_BLOCK) {
throw new SparkException(
"Unexpected message " + blockMessage.getType + " received from " + cmId)
}
val blockId = blockMessage.getId
results.put(new FetchResult(
blockId, sizeMap(blockId), () => dataDeserialize(blockId, blockMessage.getData)))
logDebug("Got remote block " + blockId + " after " + Utils.getUsedTimeMs(startTime))
}
}
case None => {
logError("Could not get block(s) from " + cmId)
for ((blockId, size) <- req.blocks) {
results.put(new FetchResult(blockId, -1, null))
}
}
}
}
// Partition local and remote blocks. Remote blocks are further split into FetchRequests of size
// at most maxBytesInFlight in order to limit the amount of data in flight.
val remoteRequests = new ArrayBuffer[FetchRequest]
for ((address, blockInfos) <- blocksByAddress) {
if (address == blockManagerId) {
localBlockIds ++= blockInfos.map(_._1)
} else {
remoteBlockIds ++= blockInfos.map(_._1)
// Make our requests at least maxBytesInFlight / 5 in length; the reason to keep them
// smaller than maxBytesInFlight is to allow multiple, parallel fetches from up to 5
// nodes, rather than blocking on reading output from one node.
val minRequestSize = math.max(maxBytesInFlight / 5, 1L)
logInfo("maxBytesInFlight: " + maxBytesInFlight + ", minRequest: " + minRequestSize)
val iterator = blockInfos.iterator
var curRequestSize = 0L
var curBlocks = new ArrayBuffer[(String, Long)]
while (iterator.hasNext) {
val (blockId, size) = iterator.next()
curBlocks += ((blockId, size))
curRequestSize += size
if (curRequestSize >= minRequestSize) {
// Add this FetchRequest
remoteRequests += new FetchRequest(address, curBlocks)
curRequestSize = 0
curBlocks = new ArrayBuffer[(String, Long)]
}
}
// Add in the final request
if (!curBlocks.isEmpty) {
remoteRequests += new FetchRequest(address, curBlocks)
}
}
}
// Add the remote requests into our queue in a random order
fetchRequests ++= Utils.randomize(remoteRequests)
// Send out initial requests for blocks, up to our maxBytesInFlight
while (!fetchRequests.isEmpty &&
(bytesInFlight == 0 || bytesInFlight + fetchRequests.front.size <= maxBytesInFlight)) {
sendRequest(fetchRequests.dequeue())
}
val numGets = remoteBlockIds.size - fetchRequests.size
logInfo("Started " + numGets + " remote gets in " + Utils.getUsedTimeMs(startTime))
// Get the local blocks while remote blocks are being fetched. Note that it's okay to do
// these all at once because they will just memory-map some files, so they won't consume
// any memory that might exceed our maxBytesInFlight
startTime = System.currentTimeMillis
for (id <- localBlockIds) {
getLocal(id) match {
case Some(iter) => {
results.put(new FetchResult(id, 0, () => iter)) // Pass 0 as size since it's not in flight
logDebug("Got local block " + id)
}
case None => {
throw new BlockException(id, "Could not get block " + id + " from local machine")
}
}
}
logDebug("Got local blocks in " + Utils.getUsedTimeMs(startTime) + " ms")
// Return an iterator that will read fetched blocks off the queue as they arrive.
return new Iterator[(String, Option[Iterator[Any]])] {
var resultsGotten = 0
def hasNext: Boolean = resultsGotten < totalBlocks
def next(): (String, Option[Iterator[Any]]) = {
resultsGotten += 1
val result = results.take()
bytesInFlight -= result.size
while (!fetchRequests.isEmpty &&
(bytesInFlight == 0 || bytesInFlight + fetchRequests.front.size <= maxBytesInFlight)) {
sendRequest(fetchRequests.dequeue())
}
(result.blockId, if (result.failed) None else Some(result.deserialize()))
}
}
}
def put(blockId: String, values: Iterator[Any], level: StorageLevel, tellMaster: Boolean)
: Long = {
val elements = new ArrayBuffer[Any]
elements ++= values
put(blockId, elements, level, tellMaster)
}
/**
* Put a new block of values to the block manager. Returns its (estimated) size in bytes.
*/
def put(blockId: String, values: ArrayBuffer[Any], level: StorageLevel,
tellMaster: Boolean = true) : Long = {
if (blockId == null) {
throw new IllegalArgumentException("Block Id is null")
}
if (values == null) {
throw new IllegalArgumentException("Values is null")
}
if (level == null || !level.isValid) {
throw new IllegalArgumentException("Storage level is null or invalid")
}
val oldBlock = blockInfo.get(blockId).orNull
if (oldBlock != null) {
logWarning("Block " + blockId + " already exists on this machine; not re-adding it")
oldBlock.waitForReady()
return oldBlock.size
}
// Remember the block's storage level so that we can correctly drop it to disk if it needs
// to be dropped right after it got put into memory. Note, however, that other threads will
// not be able to get() this block until we call markReady on its BlockInfo.
val myInfo = new BlockInfo(level, tellMaster)
blockInfo.put(blockId, myInfo)
val startTimeMs = System.currentTimeMillis
// If we need to replicate the data, we'll want access to the values, but because our
// put will read the whole iterator, there will be no values left. For the case where
// the put serializes data, we'll remember the bytes, above; but for the case where it
// doesn't, such as deserialized storage, let's rely on the put returning an Iterator.
var valuesAfterPut: Iterator[Any] = null
// Ditto for the bytes after the put
var bytesAfterPut: ByteBuffer = null
// Size of the block in bytes (to return to caller)
var size = 0L
myInfo.synchronized {
logTrace("Put for block " + blockId + " took " + Utils.getUsedTimeMs(startTimeMs)
+ " to get into synchronized block")
if (level.useMemory) {
// Save it just to memory first, even if it also has useDisk set to true; we will later
// drop it to disk if the memory store can't hold it.
val res = memoryStore.putValues(blockId, values, level, true)
size = res.size
res.data match {
case Right(newBytes) => bytesAfterPut = newBytes
case Left(newIterator) => valuesAfterPut = newIterator
}
} else {
// Save directly to disk.
val askForBytes = level.replication > 1 // Don't get back the bytes unless we replicate them
val res = diskStore.putValues(blockId, values, level, askForBytes)
size = res.size
res.data match {
case Right(newBytes) => bytesAfterPut = newBytes
case _ =>
}
}
// Now that the block is in either the memory or disk store, let other threads read it,
// and tell the master about it.
myInfo.markReady(size)
if (tellMaster) {
reportBlockStatus(blockId, myInfo)
}
}
logDebug("Put block " + blockId + " locally took " + Utils.getUsedTimeMs(startTimeMs))
// Replicate block if required
if (level.replication > 1) {
val remoteStartTime = System.currentTimeMillis
// Serialize the block if not already done
if (bytesAfterPut == null) {
if (valuesAfterPut == null) {
throw new SparkException(
"Underlying put returned neither an Iterator nor bytes! This shouldn't happen.")
}
bytesAfterPut = dataSerialize(blockId, valuesAfterPut)
}
replicate(blockId, bytesAfterPut, level)
logDebug("Put block " + blockId + " remotely took " + Utils.getUsedTimeMs(remoteStartTime))
}
BlockManager.dispose(bytesAfterPut)
return size
}
/**
* Put a new block of serialized bytes to the block manager.
*/
def putBytes(
blockId: String, bytes: ByteBuffer, level: StorageLevel, tellMaster: Boolean = true) {
if (blockId == null) {
throw new IllegalArgumentException("Block Id is null")
}
if (bytes == null) {
throw new IllegalArgumentException("Bytes is null")
}
if (level == null || !level.isValid) {
throw new IllegalArgumentException("Storage level is null or invalid")
}
if (blockInfo.contains(blockId)) {
logWarning("Block " + blockId + " already exists on this machine; not re-adding it")
return
}
// Remember the block's storage level so that we can correctly drop it to disk if it needs
// to be dropped right after it got put into memory. Note, however, that other threads will
// not be able to get() this block until we call markReady on its BlockInfo.
val myInfo = new BlockInfo(level, tellMaster)
blockInfo.put(blockId, myInfo)
val startTimeMs = System.currentTimeMillis
// Initiate the replication before storing it locally. This is faster as
// data is already serialized and ready for sending
val replicationFuture = if (level.replication > 1) {
val bufferView = bytes.duplicate() // Doesn't copy the bytes, just creates a wrapper
Future {
replicate(blockId, bufferView, level)
}
} else {
null
}
myInfo.synchronized {
logDebug("PutBytes for block " + blockId + " took " + Utils.getUsedTimeMs(startTimeMs)
+ " to get into synchronized block")
if (level.useMemory) {
// Store it only in memory at first, even if useDisk is also set to true
bytes.rewind()
memoryStore.putBytes(blockId, bytes, level)
} else {
bytes.rewind()
diskStore.putBytes(blockId, bytes, level)
}
// Now that the block is in either the memory or disk store, let other threads read it,
// and tell the master about it.
myInfo.markReady(bytes.limit)
if (tellMaster) {
reportBlockStatus(blockId, myInfo)
}
}
// If replication had started, then wait for it to finish
if (level.replication > 1) {
if (replicationFuture == null) {
throw new Exception("Unexpected")
}
Await.ready(replicationFuture, Duration.Inf)
}
if (level.replication > 1) {
logDebug("PutBytes for block " + blockId + " with replication took " +
Utils.getUsedTimeMs(startTimeMs))
} else {
logDebug("PutBytes for block " + blockId + " without replication took " +
Utils.getUsedTimeMs(startTimeMs))
}
}
/**
* Replicate block to another node.
*/
var cachedPeers: Seq[BlockManagerId] = null
private def replicate(blockId: String, data: ByteBuffer, level: StorageLevel) {
val tLevel = StorageLevel(level.useDisk, level.useMemory, level.deserialized, 1)
if (cachedPeers == null) {
cachedPeers = master.getPeers(blockManagerId, level.replication - 1)
}
for (peer: BlockManagerId <- cachedPeers) {
val start = System.nanoTime
data.rewind()
logDebug("Try to replicate BlockId " + blockId + " once; The size of the data is "
+ data.limit() + " Bytes. To node: " + peer)
if (!BlockManagerWorker.syncPutBlock(PutBlock(blockId, data, tLevel),
new ConnectionManagerId(peer.ip, peer.port))) {
logError("Failed to call syncPutBlock to " + peer)
}
logDebug("Replicated BlockId " + blockId + " once used " +
(System.nanoTime - start) / 1e6 + " s; The size of the data is " +
data.limit() + " bytes.")
}
}
/**
* Read a block consisting of a single object.
*/
def getSingle(blockId: String): Option[Any] = {
get(blockId).map(_.next())
}
/**
* Write a block consisting of a single object.
*/
def putSingle(blockId: String, value: Any, level: StorageLevel, tellMaster: Boolean = true) {
put(blockId, Iterator(value), level, tellMaster)
}
/**
* Drop a block from memory, possibly putting it on disk if applicable. Called when the memory
* store reaches its limit and needs to free up space.
*/
def dropFromMemory(blockId: String, data: Either[ArrayBuffer[Any], ByteBuffer]) {
logInfo("Dropping block " + blockId + " from memory")
val info = blockInfo.get(blockId).orNull
if (info != null) {
info.synchronized {
val level = info.level
if (level.useDisk && !diskStore.contains(blockId)) {
logInfo("Writing block " + blockId + " to disk")
data match {
case Left(elements) =>
diskStore.putValues(blockId, elements, level, false)
case Right(bytes) =>
diskStore.putBytes(blockId, bytes, level)
}
}
val blockWasRemoved = memoryStore.remove(blockId)
if (!blockWasRemoved) {
logWarning("Block " + blockId + " could not be dropped from memory as it does not exist")
}
if (info.tellMaster) {
reportBlockStatus(blockId, info)
}
if (!level.useDisk) {
// The block is completely gone from this node; forget it so we can put() it again later.
blockInfo.remove(blockId)
}
}
} else {
// The block has already been dropped
}
}
/**
* Remove a block from both memory and disk.
*/
def removeBlock(blockId: String) {
logInfo("Removing block " + blockId)
val info = blockInfo.get(blockId).orNull
if (info != null) info.synchronized {
// Removals are idempotent in disk store and memory store. At worst, we get a warning.
val removedFromMemory = memoryStore.remove(blockId)
val removedFromDisk = diskStore.remove(blockId)
if (!removedFromMemory && !removedFromDisk) {
logWarning("Block " + blockId + " could not be removed as it was not found in either " +
"the disk or memory store")
}
blockInfo.remove(blockId)
if (info.tellMaster) {
reportBlockStatus(blockId, info)
}
} else {
// The block has already been removed; do nothing.
logWarning("Asked to remove block " + blockId + ", which does not exist")
}
}
def dropOldBlocks(cleanupTime: Long) {
logInfo("Dropping blocks older than " + cleanupTime)
val iterator = blockInfo.internalMap.entrySet().iterator()
while (iterator.hasNext) {
val entry = iterator.next()
val (id, info, time) = (entry.getKey, entry.getValue._1, entry.getValue._2)
if (time < cleanupTime) {
info.synchronized {
val level = info.level
if (level.useMemory) {
memoryStore.remove(id)
}
if (level.useDisk) {
diskStore.remove(id)
}
iterator.remove()
logInfo("Dropped block " + id)
}
reportBlockStatus(id, info)
}
}
}
def shouldCompress(blockId: String): Boolean = {
if (blockId.startsWith("shuffle_")) {
compressShuffle
} else if (blockId.startsWith("broadcast_")) {
compressBroadcast
} else if (blockId.startsWith("rdd_")) {
compressRdds
} else {
false // Won't happen in a real cluster, but it can in tests
}
}
/**
* Wrap an output stream for compression if block compression is enabled for its block type
*/
def wrapForCompression(blockId: String, s: OutputStream): OutputStream = {
if (shouldCompress(blockId)) new LZFOutputStream(s) else s
}
/**
* Wrap an input stream for compression if block compression is enabled for its block type
*/
def wrapForCompression(blockId: String, s: InputStream): InputStream = {
if (shouldCompress(blockId)) new LZFInputStream(s) else s
}
def dataSerialize(blockId: String, values: Iterator[Any]): ByteBuffer = {
val byteStream = new FastByteArrayOutputStream(4096)
val ser = serializer.newInstance()
ser.serializeStream(wrapForCompression(blockId, byteStream)).writeAll(values).close()
byteStream.trim()
ByteBuffer.wrap(byteStream.array)
}
/**
* Deserializes a ByteBuffer into an iterator of values and disposes of it when the end of
* the iterator is reached.
*/
def dataDeserialize(blockId: String, bytes: ByteBuffer): Iterator[Any] = {
bytes.rewind()
val stream = wrapForCompression(blockId, new ByteBufferInputStream(bytes, true))
serializer.newInstance().deserializeStream(stream).asIterator
}
def stop() {
if (heartBeatTask != null) {
heartBeatTask.cancel()
}
connectionManager.stop()
master.actorSystem.stop(slaveActor)
blockInfo.clear()
memoryStore.clear()
diskStore.clear()
metadataCleaner.cancel()
logInfo("BlockManager stopped")
}
}
private[spark]
object BlockManager extends Logging {
val ID_GENERATOR = new IdGenerator
def getMaxMemoryFromSystemProperties: Long = {
val memoryFraction = System.getProperty("spark.storage.memoryFraction", "0.66").toDouble
(Runtime.getRuntime.maxMemory * memoryFraction).toLong
}
def getHeartBeatFrequencyFromSystemProperties: Long =
System.getProperty("spark.storage.blockManagerHeartBeatMs", "5000").toLong
def getDisableHeartBeatsForTesting: Boolean =
System.getProperty("spark.test.disableBlockManagerHeartBeat", "false").toBoolean
/**
* Attempt to clean up a ByteBuffer if it is memory-mapped. This uses an *unsafe* Sun API that
* might cause errors if one attempts to read from the unmapped buffer, but it's better than
* waiting for the GC to find it because that could lead to huge numbers of open files. There's
* unfortunately no standard API to do this.
*/
def dispose(buffer: ByteBuffer) {
if (buffer != null && buffer.isInstanceOf[MappedByteBuffer]) {
logTrace("Unmapping " + buffer)
if (buffer.asInstanceOf[DirectBuffer].cleaner() != null) {
buffer.asInstanceOf[DirectBuffer].cleaner().clean()
}
}
}
}
| hobinyoon/spark-0.7.0 | core/src/main/scala/spark/storage/BlockManager.scala | Scala | bsd-3-clause | 36,996 |
package io.finch.internal
import com.twitter.util.Try
import org.scalacheck.Gen
import org.scalacheck.Prop.forAll
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.scalacheck.Checkers
class TooFastStringSpec extends AnyFlatSpec with Matchers with Checkers {
"TooFastString" should "parse boolean correctly" in {
check { b: Boolean =>
b.toString.tooBoolean === Some(b)
}
"".tooBoolean shouldBe None
"foobarbaz".tooBoolean shouldBe None
}
it should "parse int correctly" in {
check { i: Int =>
i.toString.tooInt === Some(i)
}
check {
forAll(Gen.numStr) { s =>
Try(s.toInt).toOption === s.tooInt
}
}
"".tooInt shouldBe None
"9999999999".tooInt shouldBe None
"foobarbaz".tooInt shouldBe None
"-9876543210".tooInt shouldBe None
}
it should "parse long correctly" in {
check { l: Long =>
l.toString.tooLong === Some(l)
}
check {
forAll(Gen.numStr) { s =>
Try(s.toLong).toOption === s.tooLong
}
}
"".tooLong shouldBe None
"99999999999999999999".tooLong shouldBe None
"foobarbazbarbazfoo".tooLong shouldBe None
"-98765432101234567890".tooLong shouldBe None
}
}
| rpless/finch | core/src/test/scala/io/finch/internal/TooFastStringSpec.scala | Scala | apache-2.0 | 1,276 |
package gapt.examples.tip.prod
import gapt.expr._
import gapt.expr.ty.TBase
import gapt.proofs.context.update.InductiveType
import gapt.proofs.Sequent
import gapt.proofs.gaptic.{ Lemma, TacticsProof, _ }
object prop_15 extends TacticsProof {
// Sorts
ctx += TBase( "sk" )
// Inductive types
ctx += InductiveType( ty"Nat", hoc"'Z' :Nat", hoc"'S' :Nat>Nat" )
//Function constants
ctx += hoc"'plus' :Nat>Nat>Nat"
val sequent =
hols"""
def_p: ∀x0 (p(S(x0:Nat): Nat): Nat) = x0,
def_plus_0: ∀y (plus(#c(Z: Nat), y:Nat): Nat) = y,
def_plus_1: ∀z ∀y (plus(S(z:Nat): Nat, y:Nat): Nat) = S(plus(z, y)),
constr_inj_0: ∀y0 ¬#c(Z: Nat) = S(y0:Nat)
:-
goal: ∀x (plus(x:Nat, S(x): Nat): Nat) = S(plus(x, x))
"""
val theory = sequent.antecedent ++: Sequent()
val proof = Lemma( sequent ) {
cut( "lemma", hof"!x!y plus(x, S(y)) = S(plus(x,y))" );
//- proof lemma
forget( "goal" )
allR; induction( hov"x:Nat" )
//-- BC lemma
allR
rewrite.many ltr "def_plus_0" in "lemma"
refl
//-- IC lemma
allR
rewrite.many ltr "def_plus_1" in "lemma"
rewrite.many ltr "IHx_0" in "lemma"
refl
//- proof goal
allR;
rewrite.many ltr "lemma" in "goal"
refl
}
val openind = Lemma( sequent ) {
allR( hov"x:Nat" )
cut( "l", hof"!y plus(x, S y) = S(plus x y)" ) right escrgt
forget( "goal" ); anaInd
}
} | gapt/gapt | examples/tip/prod/prop_15.scala | Scala | gpl-3.0 | 1,443 |
/*
* Copyright © 2014 TU Berlin (emma@dima.tu-berlin.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.emmalanguage
package compiler.lang.core
import compiler.Common
import util.Monoids._
import shapeless._
import scala.util.control.TailCalls.TailRec
/** Trampolining tail calls to avoid stack overflow. */
private[core] trait Trampoline extends Common {
self: Core =>
/** Trampolining tail calls to avoid stack overflow. */
private[core] object Trampoline {
import Core.{Lang => core}
import UniverseImplicits._
import u.internal.flags
private val module = u.rootMirror.staticModule("scala.util.control.TailCalls")
private val TailCalls = Some(core.Ref(module))
private val done = module.info.member(api.TermName("done")).asMethod
private val tailcall = module.info.member(api.TermName("tailcall")).asMethod
private val result = api.Type[TailRec[Nothing]].typeConstructor
.member(api.TermName("result")).asMethod
/**
* Wraps return values and tail calls in a trampoline, finally returning its result.
*
* == Preconditions ==
* - The input tree is in DSCF (see [[Core.dscf]]).
*
* == Postconditions ==
* - All return values and tail calls are wrapped in a trampoline.
* - The ANF shape of the input is NOT preserved.
*/
// Unsafe: Return type of methods changes to TailRec[OriginalType].
lazy val transform = TreeTransform("Trampoline.transform", api.BottomUp.unsafe
.withAncestors.inherit { // Local method definitions.
case core.Let(_, defs, _) =>
(for (core.DefDef(method, tparams, paramss, _) <- defs) yield {
val (own, nme, pos) = (method.owner, method.name, method.pos)
val flg = flags(method)
val pss = paramss.map(_.map(_.symbol.asTerm))
val res = api.Type.kind1[TailRec](method.info.finalResultType)
val ans = method.annotations
method -> api.DefSym(own, nme, tparams, pss, res, flg, pos, ans)
}).toMap
} (overwrite).transformWith {
// Return position in a method definition, wrap in trampoline.
case Attr.inh(tree, local :: (_ :+ (_: u.DefDef) :+ core.Let(_, _, expr)) :: _)
if tree == expr => wrap(expr, local)
// Local method definition, returns a trampoline.
case Attr.inh(core.DefDef(method, tparams, paramss, core.Let(vals, defs, expr)),
local :: _) =>
val pss = paramss.map(_.map(_.symbol.asTerm))
core.DefDef(local(method), tparams, pss, core.Let(vals, defs, expr))
// Local method call outside, retrieve the trampoline result.
case Attr.inh(core.DefCall(None, cont, targs, argss), local :: ancestors :: _)
if local.contains(cont) && ancestors.forall {
case _: u.DefDef => false
case _ => true
} => core.DefCall(Some(core.DefCall(None, local(cont), targs, argss)), result)
}.andThen(_.tree))
/** Wraps the return value / tail call of a method in a trampoline. */
private def wrap(expr: u.Tree, local: Map[u.MethodSymbol, u.MethodSymbol]): u.Tree =
expr match {
// Wrap both branches.
case core.Branch(cond, thn, els) =>
core.Branch(cond, wrap(thn, local), wrap(els, local))
// Wrap a tail call.
case core.DefCall(None, cont, targs, argss) if local.contains(cont) =>
val Res = cont.info.finalResultType
core.DefCall(TailCalls, tailcall, Seq(Res),
Seq(Seq(core.DefCall(None, local(cont), targs, argss))))
// Wrap a return value.
case _ =>
core.DefCall(TailCalls, done, Seq(expr.tpe), Seq(Seq(expr)))
}
}
}
| emmalanguage/emma | emma-language/src/main/scala/org/emmalanguage/compiler/lang/core/Trampoline.scala | Scala | apache-2.0 | 4,237 |
package de.m7w3.signal
import java.security.SecureRandom
import org.whispersystems.signalservice.internal.util.Base64
package object Util {
def getSecret(size: Int): String = {
val secret = new Array[Byte](size)
SecureRandom.getInstance("SHA1PRNG").nextBytes(secret)
Base64.encodeBytes(secret)
}
} | ayoub-benali/signal-desktop-client | src/main/scala/de/m7w3/signal/Util.scala | Scala | apache-2.0 | 315 |
package com.krux.hyperion.examples
import com.krux.hyperion.Implicits._
import com.krux.hyperion.activity.{GoogleStorageDownloadActivity, ShellCommandActivity}
import com.krux.hyperion.datanode.S3DataNode
import com.krux.hyperion.expression.Parameter
import com.krux.hyperion.resource.Ec2Resource
import com.krux.hyperion.{DataPipelineDef, HyperionCli, HyperionContext, Schedule}
import com.typesafe.config.ConfigFactory
object ExampleWorkflow extends DataPipelineDef with HyperionCli {
override implicit val hc: HyperionContext = new HyperionContext(ConfigFactory.load("example"))
override lazy val schedule = Schedule.cron
.startAtActivation
.every(1.day)
val price = Parameter("SpotPrice", 2.3)
override def parameters = Seq(price)
val ec2 = Ec2Resource()
.withSpotBidPrice(price) // Could also put 2.3 directly here
// First activity
val act1 = ShellCommandActivity("run act1")(ec2).named("act1")
val act2 = ShellCommandActivity("run act2")(ec2).named("act2")
val act3 = ShellCommandActivity("run act3")(ec2).named("act3")
val act4 = ShellCommandActivity("run act4")(ec2).named("act4")
val act5 = ShellCommandActivity("run act5")(ec2).named("act5")
val act6 = ShellCommandActivity("run act6")(ec2).named("act6")
val act7 = GoogleStorageDownloadActivity(s3 / "gsutil.config", "gs://input_location")(ec2).named("act7")
val act8 = GoogleStorageDownloadActivity(s3 / "gsutil.config", "gs://input_location")(ec2).named("act8").ifExists
// run act1 first, and then run act2 and act3 at the same time, and then run act4 and act5 the
// same time, at last run act6
// Anoternative syntax would be:
// act1 andThen (act2 and act3) andThen (act4 and act5) andThen act6
override def workflow = act1 ~> (act2 + act3) ~> (act4 + act5) ~> act6 ~> act7 ~> act8
}
| sethyates/hyperion | examples/src/main/scala/com/krux/hyperion/examples/ExampleWorkflow.scala | Scala | apache-2.0 | 1,812 |
package org.openurp.edu.eams.teach.schedule.service.impl
import org.beangle.commons.dao.impl.BaseServiceImpl
import org.beangle.data.jpa.dao.OqlBuilder
import org.openurp.base.Department
import org.openurp.base.Semester
import org.openurp.edu.base.Project
import org.openurp.edu.base.code.StdType
import org.openurp.edu.eams.system.security.DataRealm
import org.openurp.edu.eams.teach.schedule.model.CourseTableCheck
import org.openurp.edu.eams.teach.schedule.service.CourseTableCheckService
import org.openurp.edu.eams.util.DataRealmUtils
import org.openurp.edu.eams.util.stat.StatGroup
import org.openurp.edu.eams.util.stat.StatHelper
class CourseTableCheckServiceImpl extends BaseServiceImpl with CourseTableCheckService {
def statCheckBy(semester: Semester,
dataRealm: DataRealm,
attr: String,
clazz: Class[_]): Seq[_] = {
statCheckBy(semester, dataRealm, Array("check.std.stdType.id", "check.std.department.id"), attr,
clazz)
}
private def statCheckBy(semester: Semester,
dataRealm: DataRealm,
dataRealmAttrs: Array[String],
attr: String,
clazz: Class[_]): Seq[_] = {
val entityQuery = OqlBuilder.from(classOf[CourseTableCheck], "check")
entityQuery.select("select new org.openurp.edu.eams.util.stat.StatItem(" +
attr +
",count(check.isConfirm),sum(CASE WHEN check.isConfirm=true THEN 1 ELSE 0 END ),sum(CASE WHEN check.isConfirm=true THEN 0 ELSE 1 END))")
entityQuery.where("check.semester=:semester", semester)
DataRealmUtils.addDataRealm(entityQuery, dataRealmAttrs, dataRealm)
entityQuery.groupBy(attr)
val stats = entityDao.search(entityQuery)
new StatHelper(entityDao).setStatEntities(stats, clazz)
}
def statCheckByDepart(semester: Semester, dataRealm: DataRealm, project: Project): Seq[_] = {
val query = OqlBuilder.from(classOf[CourseTableCheck], "check")
query.select("check.std.type.id,check.std.department.id,check.std.grade,count(check.confirm),sum(CASE WHEN check.confirm=true THEN 1 ELSE 0 END )")
query.where("check.semester=:semester", semester)
DataRealmUtils.addDataRealm(query, Array("check.std.type.id", "check.std.department.id"), dataRealm)
query.groupBy("check.std.type.id").groupBy("check.std.department.id")
.groupBy("check.std.grade")
val datas = entityDao.search(query).asInstanceOf[List[_]]
new StatHelper(entityDao).replaceIdWith(datas, Array(classOf[StdType], classOf[Department]))
val statGroups = StatGroup.buildStatGroups(datas, 2)
statGroups
}
}
| openurp/edu-eams-webapp | schedule/src/main/scala/org/openurp/edu/eams/teach/schedule/service/impl/CourseTableCheckServiceImpl.scala | Scala | gpl-3.0 | 2,552 |
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.classification.NaiveBayes
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.SparkContext
import org.apache.spark.mllib.tree.DecisionTree
import org.apache.spark.mllib.tree.configuration.Algo._
import org.apache.spark.mllib.tree.impurity.Gini
object classification {
def main(args: Array[String]) = {
val conf = new SparkConf().setAppName("Naive Bayes").setMaster("local[1]")
val sc = new SparkContext(conf)
val train = sc.textFile("snr150130/BD/final-train.csv")
val test = sc.textFile("snr150130/BD/final-test.csv")
val training = train.map { line =>
val parts = line.split(",")
LabeledPoint(parts(7).toDouble, Vectors.dense(parts(2).toDouble,parts(3).toDouble,parts(4).toDouble,parts(5).toDouble,parts(6).toDouble,parts(7).toDouble))
}.cache()
/* val splits = training.randomSplit(Array(0.6, 0.4))
val train1 = splits(0)
val test1 = splits(1)*/
val testing = test.map { line =>
val parts = line.split(",")
LabeledPoint(parts(7).toDouble, Vectors.dense(parts(2).toDouble,parts(3).toDouble,parts(4).toDouble,parts(5).toDouble,parts(6).toDouble,parts(7).toDouble))
}.cache()
val model = NaiveBayes.train(training, lambda = 1.0)
val predictionAndLabel = testing.map(p => (p.label, model.predict(p.features)))
predictionAndLabel.foreach(println)
val accuracy = 1.0 * predictionAndLabel.filter(x => x._1 == x._2).count() / test.count()
println("Accuracy of Naive Bayes - " + accuracy)
}
} | SubhasisDutta/JRC-Name-Parser | jrc-classification/src/Classification/classification.scala | Scala | apache-2.0 | 1,727 |
package com.twitter.algebird
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
import org.scalacheck.{Arbitrary, Gen}
import Arbitrary.arbitrary
import org.scalatest.matchers.should.Matchers
import org.scalatest.propspec.AnyPropSpec
object Helpers {
implicit def arbitraryBatched[A: Arbitrary]: Arbitrary[Batched[A]] = {
val item = arbitrary[A].map(Batched(_))
val items = arbitrary[(A, List[A])].map { case (a, as) =>
Batched(a).append(as)
}
Arbitrary(Gen.oneOf(item, items))
}
}
import Helpers.arbitraryBatched
class BatchedLaws extends CheckProperties {
import BaseProperties._
implicit val arbitraryBigDecimalsHere: Arbitrary[BigDecimal] =
BaseProperties.arbReasonableBigDecimals
def testBatchedMonoid[A: Arbitrary: Monoid](name: String, size: Int): Unit = {
implicit val m: Monoid[Batched[A]] = Batched.compactingMonoid[A](size)
property(s"CountMinSketch[$name] batched at $size is a Monoid") {
monoidLaws[Batched[A]]
}
}
testBatchedMonoid[Int]("Int", 1)
testBatchedMonoid[Int]("Int", 10)
testBatchedMonoid[Int]("Int", 100)
testBatchedMonoid[Int]("Int", 1000000)
testBatchedMonoid[BigInt]("BigInt", 1)
testBatchedMonoid[BigInt]("BigInt", 10)
testBatchedMonoid[BigInt]("BigInt", 100)
testBatchedMonoid[BigInt]("BigInt", 1000000)
testBatchedMonoid[BigDecimal]("BigDecimal", 1)
testBatchedMonoid[BigDecimal]("BigDecimal", 10)
testBatchedMonoid[BigDecimal]("BigDecimal", 100)
testBatchedMonoid[BigDecimal]("BigDecimal", 1000000)
testBatchedMonoid[String]("String", 1)
testBatchedMonoid[String]("String", 10)
testBatchedMonoid[String]("String", 100)
testBatchedMonoid[String]("String", 1000000)
}
class BatchedTests extends AnyPropSpec with Matchers with ScalaCheckPropertyChecks {
property(".iterator works") {
forAll((x: Int, xs: List[Int]) => Batched(x).append(xs).iterator.toList shouldBe (x :: xs))
}
property(".iterator and .reverseIterator agree") {
forAll { (b: Batched[Int]) =>
b.iterator.toList.reverse shouldBe b.reverseIterator.toList
b.iterator.sum shouldBe b.reverseIterator.sum
}
}
property(".toList works") {
forAll((b: Batched[Int]) => b.toList shouldBe b.iterator.toList)
}
}
| twitter/algebird | algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala | Scala | apache-2.0 | 2,243 |
// Code adapted from github.com/dscleaver/sbt-quickfix. Those portions
// Copyright (c) 2013 David Cleaver; All rights reserved.
package zmre.sbt
import sbt._
import Keys._
// scalastyle:off
object SbtVimAsyncIntegrationPlugin extends AutoPlugin {
object autoImport {
val vimIntegrationLogDirectory = SettingKey[File]("vim-integration-logdir", "The folder where temporary log files will be written")
val vimIntegrationExecutable = SettingKey[String]("vim-integration-executable", "The path to the vim executable")
val notifyVim = taskKey[Unit]("Notify vim on certain task completions")
val startCompile = taskKey[Unit]("Add a log message indicating start of compilation")
val finishCompile = taskKey[Unit]("Add a log message indicating end of compilation")
}
import autoImport._
override def trigger = allRequirements
override val projectSettings = Seq(
finishCompile <<= Def.task {
streams.value.log.debug("compilation finished.")
}.triggeredBy(compile in (Compile, compile)),
startCompile <<= Def.task {
streams.value.log.info("Compiling...")
}.runBefore(compile in (Compile, compile)),
vimIntegrationLogDirectory in ThisBuild := baseDirectory.value / "target" / "vim",
extraLoggers <<= (extraLoggers, vimIntegrationLogDirectory, vimIntegrationExecutable) apply { (currentFunction, logdir, vimExec) =>
(key: ScopedKey[_]) => {
val loggers = currentFunction(key) // current list of loggers
val taskOption = key.scope.task.toOption
if (taskOption.exists(l => l.label.toLowerCase.contains("compile") || l.label == "scalastyle")) {
// prepend our logger to the list
//println(s"Adding custom logger for $taskOption")
CompileLogger(logdir, SbtVimAsyncIntegration.notifyToRefresh(vimExec)) +: loggers
} else {
loggers
}
}
},
testListeners += UnitTestLogger(vimIntegrationLogDirectory.value, (sources in Test).value, SbtVimAsyncIntegration.notifyToRefresh(vimIntegrationExecutable.value)),
vimIntegrationExecutable in ThisBuild := (if (System.getProperty("os.name").startsWith("Win")) "gvim.bat" else "gvim")
)
}
object SbtVimAsyncIntegration {
def notifyToRefresh(vimExec: String): () => Unit = () => {
println("Sending update notice to vim")
vimCall(vimExec, ":SyntasticCheck readsbtlogs")
()
}
def vimCall(vimExec: String, command: Seq[String]): Int = Process(List(vimExec, "--remote-send") ++ command).!
def vimCall(vimExec: String, command: String): Int =
vimCall(vimExec, List(s"<c-\\\\><c-n>$command<cr>"))
}
| zmre/sbt-vim-async-integration | src/main/scala/zmre/sbt/SbtVimAsyncIntegration.scala | Scala | apache-2.0 | 2,614 |
/*
* Copyright 2013 Stephane Godbillon (@sgodbillon)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactivemongo.bson
import exceptions.DocumentKeyNotFound
import scala.util.{ Failure, Success, Try }
import scala.collection.generic.CanBuildFrom
import buffer._
import utils.Converters
/** A BSON Double. */
case class BSONDouble(value: Double) extends BSONValue { val code = 0x01.toByte }
case class BSONString(value: String) extends BSONValue { val code = 0x02.toByte }
/**
* A `BSONDocument` structure (BSON type `0x03`).
*
* A `BSONDocument` is basically a stream of tuples `(String, BSONValue)`.
* It is completely lazy. The stream it wraps is a `Stream[Try[(String, BSONValue)]]` since
* we cannot be sure that a not yet deserialized value will be processed without error.
*/
case class BSONDocument(stream: Stream[Try[BSONElement]]) extends BSONValue {
val code = 0x03.toByte
/**
* Returns the [[BSONValue]] associated with the given `key`.
*
* If the key is not found or the matching value cannot be deserialized, returns `None`.
*/
def get(key: String): Option[BSONValue] = getTry(key).toOption
/**
* Returns the [[BSONValue]] associated with the given `key`.
*
* If the key is not found or the matching value cannot be deserialized, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getTry(key: String): Try[BSONValue] = Try {
stream.find {
case Success(element) => element._1 == key
case Failure(e) => throw e
}.map(_.get._2).getOrElse(throw DocumentKeyNotFound(key))
}
/**
* Returns the [[BSONValue]] associated with the given `key`.
*
* If the key could not be found, the resulting option will be `None`.
* If the matching value could not be deserialized, returns a `Failure`.
*/
def getUnflattenedTry(key: String): Try[Option[BSONValue]] = getTry(key) match {
case Failure(e: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/**
* Returns the [[BSONValue]] associated with the given `key`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `None`.
*/
def getAs[T](s: String)(implicit reader: BSONReader[_ <: BSONValue, T]): Option[T] = {
getTry(s).toOption.flatMap { element =>
Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element)).toOption
}
}
/**
* Returns the [[BSONValue]] associated with the given `key`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getAsTry[T](s: String)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[T] = {
val tt = getTry(s)
tt.flatMap { element => Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element)) }
}
/**
* Returns the [[BSONValue]] associated with the given `key`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, returns a `Success` holding `None`.
* If the value could not be deserialized or converted, returns a `Failure`.
*/
def getAsUnflattenedTry[T](s: String)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[Option[T]] = getAsTry(s)(reader) match {
case Failure(e: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/** Creates a new [[BSONDocument]] containing all the elements of this one and the elements of the given document. */
def add(doc: BSONDocument): BSONDocument = new BSONDocument(stream ++ doc.stream)
/** Creates a new [[BSONDocument]] containing all the elements of this one and the given `elements`. */
def add(elements: Producer[(String, BSONValue)]*): BSONDocument = new BSONDocument(
stream ++ elements.flatMap { el =>
el.produce.map(value => Seq(Try(value))).getOrElse(Seq.empty)
}.toStream)
/** Alias for `add(doc: BSONDocument): BSONDocument` */
def ++(doc: BSONDocument): BSONDocument = add(doc)
/** Alias for `add(elements: Producer[(String, BSONValue)]*): BSONDocument` */
def ++(elements: Producer[(String, BSONValue)]*): BSONDocument = add(elements: _*)
/** Returns a `Stream` for all the elements of this `BSONDocument`. */
def elements: Stream[BSONElement] = stream.filter(_.isSuccess).map(_.get)
/** Is this document empty? */
def isEmpty: Boolean = stream.isEmpty
override def toString: String = "BSONDocument(<" + (if (isEmpty) "empty" else "non-empty") + ">)"
}
object BSONDocument {
/** Creates a new [[BSONDocument]] containing all the given `elements`. */
def apply(elements: Producer[(String, BSONValue)]*): BSONDocument = new BSONDocument(
elements.flatMap { el =>
el.produce.map(value => Seq(Try(value))).getOrElse(Seq.empty)
}.toStream)
/** Creates a new [[BSONDocument]] containing all the `elements` in the given `Traversable`. */
def apply(elements: Traversable[(String, BSONValue)]): BSONDocument = {
new BSONDocument(elements.toStream.map(Success(_)))
}
/** Returns a String representing the given [[BSONDocument]]. */
def pretty(doc: BSONDocument) = BSONIterator.pretty(doc.stream.iterator)
/** Writes the `document` into the `buffer`. */
def write(value: BSONDocument, buffer: WritableBuffer)(implicit bufferHandler: BufferHandler = DefaultBufferHandler): WritableBuffer = {
bufferHandler.writeDocument(value, buffer)
}
/**
* Reads a `document` from the `buffer`.
*
* Note that the buffer's readerIndex must be set on the start of a document, or it will fail.
*/
def read(buffer: ReadableBuffer)(implicit bufferHandler: BufferHandler = DefaultBufferHandler): BSONDocument = {
bufferHandler.readDocument(buffer).get
}
/** An empty BSONDocument. */
val empty: BSONDocument = BSONDocument()
}
/**
* A `BSONArray` structure (BSON type `0x04`).
*
* A `BSONArray` is a straightforward `BSONDocument` where keys are a sequence of positive integers.
*
* A `BSONArray` is basically a stream of tuples `(String, BSONValue)` where the first member is a string representation of an index.
* It is completely lazy. The stream it wraps is a `Stream[Try[(String, BSONValue)]]` since
* we cannot be sure that a not yet deserialized value will be processed without error.
*/
case class BSONArray(stream: Stream[Try[BSONValue]]) extends BSONValue {
val code = 0x04.toByte
/**
* Returns the [[BSONValue]] at the given `index`.
*
* If there is no such `index` or the matching value cannot be deserialized, returns `None`.
*/
def get(index: Int): Option[BSONValue] = getTry(index).toOption
/**
* Returns the [[BSONValue]] at the given `index`.
*
* If there is no such `index` or the matching value cannot be deserialized, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getTry(index: Int): Try[BSONValue] = stream.drop(index).headOption.getOrElse(Failure(DocumentKeyNotFound(index.toString)))
/**
* Returns the [[BSONValue]] at the given `index`.
*
* If there is no such `index`, the resulting option will be `None`.
* If the matching value could not be deserialized, returns a `Failure`.
*/
def getUnflattenedTry(index: Int): Try[Option[BSONValue]] = getTry(index) match {
case Failure(e: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/**
* Gets the [[BSONValue]] at the given `index`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `None`.
*/
def getAs[T](index: Int)(implicit reader: BSONReader[_ <: BSONValue, T]): Option[T] = {
getTry(index).toOption.flatMap { element =>
Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element)).toOption
}
}
/**
* Gets the [[BSONValue]] at the given `index`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getAsTry[T](index: Int)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[T] = {
val tt = getTry(index)
tt.flatMap { element => Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element)) }
}
/**
* Gets the [[BSONValue]] at the given `index`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, returns a `Success` holding `None`.
* If the value could not be deserialized or converted, returns a `Failure`.
*/
def getAsUnflattenedTry[T](index: Int)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[Option[T]] = getAsTry(index)(reader) match {
case Failure(e: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/** Creates a new [[BSONDocument]] containing all the elements of this one and the elements of the given document. */
def add(doc: BSONArray): BSONArray = new BSONArray(stream ++ doc.stream)
/** Creates a new [[BSONDocument]] containing all the elements of this one and the given `elements`. */
def add(elements: Producer[BSONValue]*): BSONArray = new BSONArray(
stream ++ elements.flatMap { el =>
el.produce.map(value => Seq(Try(value))).getOrElse(Seq.empty)
}.toStream)
/** Alias for `add(doc: BSONDocument): BSONDocument` */
def ++(array: BSONArray): BSONArray = add(array)
/** Alias for `add(elements: Producer[(String, BSONValue)]*): BSONDocument` */
def ++(elements: Producer[BSONValue]*): BSONArray = add(elements: _*)
def iterator: Iterator[Try[(String, BSONValue)]] = stream.zipWithIndex.map { vv =>
vv._1.map(vv._2.toString -> _)
}.toIterator
def values: Stream[BSONValue] = stream.filter(_.isSuccess).map(_.get)
lazy val length = stream.size
/** Is this array empty? */
def isEmpty: Boolean = stream.isEmpty
override def toString: String = "BSONDocument(<" + (if (isEmpty) "empty" else "non-empty") + ">)"
}
object BSONArray {
/** Creates a new [[BSONArray]] containing all the given `elements`. */
def apply(elements: Producer[BSONValue]*): BSONArray = new BSONArray(
elements.flatMap { el =>
el.produce.map(value => Seq(Try(value))).getOrElse(Seq.empty)
}.toStream)
/** Creates a new [[BSONValue]] containing all the `elements` in the given `Traversable`. */
def apply(elements: Traversable[BSONValue]): BSONArray = {
new BSONArray(elements.toStream.map(Success(_)))
}
/** Returns a String representing the given [[BSONArray]]. */
def pretty(array: BSONArray) = BSONIterator.pretty(array.iterator)
/** An empty BSONArray. */
val empty: BSONArray = BSONArray()
}
/**
* A BSON binary value.
*
* @param value The binary content.
* @param subtype The type of the binary content.
*/
case class BSONBinary(value: ReadableBuffer, subtype: Subtype) extends BSONValue { val code = 0x05.toByte } // TODO
object BSONBinary {
def apply(value: Array[Byte], subtype: Subtype): BSONBinary =
BSONBinary(ArrayReadableBuffer(value), subtype)
}
/** BSON Undefined value */
case object BSONUndefined extends BSONValue { val code = 0x06.toByte }
/** BSON ObjectId value. */
class BSONObjectID private (raw: Array[Byte]) extends BSONValue {
val code = 0x07.toByte
import java.util.Arrays
import java.nio.ByteBuffer
/** ObjectId hexadecimal String representation */
lazy val stringify = Converters.hex2Str(raw)
override def toString = "BSONObjectID(\\"" + stringify + "\\")"
override def equals(obj: Any): Boolean = obj match {
case BSONObjectID(arr) => Arrays.equals(raw, arr)
case _ => false
}
override lazy val hashCode: Int = Arrays.hashCode(raw)
/** The time of this BSONObjectId, in milliseconds */
def time: Long = this.timeSecond * 1000L
/** The time of this BSONObjectId, in seconds */
def timeSecond: Int = ByteBuffer.wrap(raw.take(4)).getInt
def valueAsArray = Arrays.copyOf(raw, 12)
}
object BSONObjectID {
private val maxCounterValue = 16777216
private val increment = new java.util.concurrent.atomic.AtomicInteger(scala.util.Random.nextInt(maxCounterValue))
private def counter = (increment.getAndIncrement + maxCounterValue) % maxCounterValue
/**
* The following implemtation of machineId work around openjdk limitations in
* version 6 and 7
*
* Openjdk fails to parse /proc/net/if_inet6 correctly to determine macaddress
* resulting in SocketException thrown.
*
* Please see:
* * https://github.com/openjdk-mirror/jdk7u-jdk/blob/feeaec0647609a1e6266f902de426f1201f77c55/src/solaris/native/java/net/NetworkInterface.c#L1130
* * http://lxr.free-electrons.com/source/net/ipv6/addrconf.c?v=3.11#L3442
* * http://lxr.free-electrons.com/source/include/linux/netdevice.h?v=3.11#L1130
* * http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7078386
*
* and fix in openjdk8:
* * http://hg.openjdk.java.net/jdk8/tl/jdk/rev/b1814b3ea6d3
*/
private val machineId = {
import java.net._
val validPlatform = Try {
val correctVersion = System.getProperty("java.version").substring(0, 3).toFloat >= 1.8
val noIpv6 = System.getProperty("java.net.preferIPv4Stack") == true
val isLinux = System.getProperty("os.name") == "Linux"
!isLinux || correctVersion || noIpv6
}.getOrElse(false)
// Check java policies
val permitted = {
val sec = System.getSecurityManager();
Try { sec.checkPermission(new NetPermission("getNetworkInformation")) }.toOption.map(_ => true).getOrElse(false);
}
if (validPlatform && permitted) {
val networkInterfacesEnum = NetworkInterface.getNetworkInterfaces
val networkInterfaces = scala.collection.JavaConverters.enumerationAsScalaIteratorConverter(networkInterfacesEnum).asScala
val ha = networkInterfaces.find(ha => Try(ha.getHardwareAddress).isSuccess && ha.getHardwareAddress != null && ha.getHardwareAddress.length == 6)
.map(_.getHardwareAddress)
.getOrElse(InetAddress.getLocalHost.getHostName.getBytes)
Converters.md5(ha).take(3)
} else {
val threadId = Thread.currentThread.getId.toInt
val arr = new Array[Byte](3)
arr(0) = (threadId & 0xFF).toByte
arr(1) = (threadId >> 8 & 0xFF).toByte
arr(2) = (threadId >> 16 & 0xFF).toByte
arr
}
}
/**
* Constructs a BSON ObjectId element from a hexadecimal String representation.
* Throws an exception if the given argument is not a valid ObjectID.
*
* `parse(str: String): Try[BSONObjectID]` should be considered instead of this method.
*/
def apply(id: String): BSONObjectID = {
if (id.length != 24)
throw new IllegalArgumentException(s"wrong ObjectId: '$id'")
/** Constructs a BSON ObjectId element from a hexadecimal String representation */
new BSONObjectID(Converters.str2Hex(id))
}
def apply(array: Array[Byte]): BSONObjectID = {
if(array.length != 12)
throw new IllegalArgumentException(s"wrong byte array for an ObjectId (size ${array.length})")
new BSONObjectID(java.util.Arrays.copyOf(array, 12))
}
def unapply(id: BSONObjectID): Option[Array[Byte]] = Some(id.valueAsArray)
/** Tries to make a BSON ObjectId element from a hexadecimal String representation. */
def parse(str: String): Try[BSONObjectID] = Try(apply(str))
/**
* Generates a new BSON ObjectID.
*
* +------------------------+------------------------+------------------------+------------------------+
* + timestamp (in seconds) + machine identifier + thread identifier + increment +
* + (4 bytes) + (3 bytes) + (2 bytes) + (3 bytes) +
* +------------------------+------------------------+------------------------+------------------------+
*
* The returned BSONObjectID contains a timestamp set to the current time (in seconds),
* with the `machine identifier`, `thread identifier` and `increment` properly set.
*/
def generate: BSONObjectID = fromTime(System.currentTimeMillis, false)
/**
* Generates a new BSON ObjectID from the given timestamp in milliseconds.
*
* +------------------------+------------------------+------------------------+------------------------+
* + timestamp (in seconds) + machine identifier + thread identifier + increment +
* + (4 bytes) + (3 bytes) + (2 bytes) + (3 bytes) +
* +------------------------+------------------------+------------------------+------------------------+
*
* The included timestamp is the number of seconds since epoch, so a BSONObjectID time part has only
* a precision up to the second. To get a reasonably unique ID, you _must_ set `onlyTimestamp` to false.
*
* Crafting a BSONObjectID from a timestamp with `fillOnlyTimestamp` set to true is helpful for range queries,
* eg if you want of find documents an _id field which timestamp part is greater than or lesser than
* the one of another id.
*
* If you do not intend to use the produced BSONObjectID for range queries, then you'd rather use
* the `generate` method instead.
*
* @param fillOnlyTimestamp if true, the returned BSONObjectID will only have the timestamp bytes set; the other will be set to zero.
*/
def fromTime(timeMillis: Long, fillOnlyTimestamp: Boolean = true): BSONObjectID = {
// n of seconds since epoch. Big endian
val timestamp = (timeMillis / 1000).toInt
val id = new Array[Byte](12)
id(0) = (timestamp >>> 24).toByte
id(1) = (timestamp >> 16 & 0xFF).toByte
id(2) = (timestamp >> 8 & 0xFF).toByte
id(3) = (timestamp & 0xFF).toByte
if (!fillOnlyTimestamp) {
// machine id, 3 first bytes of md5(macadress or hostname)
id(4) = machineId(0)
id(5) = machineId(1)
id(6) = machineId(2)
// 2 bytes of the pid or thread id. Thread id in our case. Low endian
val threadId = Thread.currentThread.getId.toInt
id(7) = (threadId & 0xFF).toByte
id(8) = (threadId >> 8 & 0xFF).toByte
// 3 bytes of counter sequence, which start is randomized. Big endian
val c = counter
id(9) = (c >> 16 & 0xFF).toByte
id(10) = (c >> 8 & 0xFF).toByte
id(11) = (c & 0xFF).toByte
}
BSONObjectID(id)
}
}
/** BSON boolean value */
case class BSONBoolean(value: Boolean) extends BSONValue { val code = 0x08.toByte }
/** BSON date time value */
case class BSONDateTime(value: Long) extends BSONValue { val code = 0x09.toByte }
/** BSON null value */
case object BSONNull extends BSONValue { val code = 0x0A.toByte }
/**
* BSON Regex value.
*
* @param flags Regex flags.
*/
case class BSONRegex(value: String, flags: String) extends BSONValue { val code = 0x0B.toByte }
/** BSON DBPointer value. */
case class BSONDBPointer(value: String, id: Array[Byte]) extends BSONValue {
val code = 0x0C.toByte
/** The BSONObjectID representation of this reference. */
val objectId = BSONObjectID(id)
}
/**
* BSON JavaScript value.
*
* @param value The JavaScript source code.
*/
case class BSONJavaScript(value: String) extends BSONValue { val code = 0x0D.toByte }
/** BSON Symbol value. */
case class BSONSymbol(value: String) extends BSONValue { val code = 0x0E.toByte }
/**
* BSON scoped JavaScript value.
*
* @param value The JavaScript source code. TODO
*/
case class BSONJavaScriptWS(value: String) extends BSONValue { val code = 0x0F.toByte }
/** BSON Integer value */
case class BSONInteger(value: Int) extends BSONValue { val code = 0x10.toByte }
/** BSON Timestamp value. TODO */
case class BSONTimestamp(value: Long) extends BSONValue { val code = 0x11.toByte }
/** BSON Long value */
case class BSONLong(value: Long) extends BSONValue { val code = 0x12.toByte }
/** BSON Min key value */
object BSONMinKey extends BSONValue { val code = 0xFF.toByte }
/** BSON Max key value */
object BSONMaxKey extends BSONValue { val code = 0x7F.toByte }
/** Binary Subtype */
sealed trait Subtype {
/** Subtype code */
val value: Byte
}
object Subtype {
case object GenericBinarySubtype extends Subtype { val value = 0x00.toByte }
case object FunctionSubtype extends Subtype { val value = 0x01.toByte }
case object OldBinarySubtype extends Subtype { val value = 0x02.toByte }
case object OldUuidSubtype extends Subtype { val value = 0x03.toByte }
case object UuidSubtype extends Subtype { val value = 0x04.toByte }
case object Md5Subtype extends Subtype { val value = 0x05.toByte }
case object UserDefinedSubtype extends Subtype { val value = 0x80.toByte }
def apply(code: Byte) = code match {
case 0 => GenericBinarySubtype
case 1 => FunctionSubtype
case 2 => OldBinarySubtype
case 3 => OldUuidSubtype
case 4 => UuidSubtype
case 5 => Md5Subtype
case -128 => UserDefinedSubtype
case _ => throw new NoSuchElementException(s"binary type = $code")
}
}
| qubell/ReactiveMongo | bson/src/main/scala/types.scala | Scala | apache-2.0 | 22,163 |
package org.scalafmt.config
import metaconfig._
/** Configuration for scalafmt optimizations.
*
* @param dequeueOnNewStatements
* Clear the search queue on new statements.
* @param escapeInPathologicalCases
* Use heuristics to escape when the search state grows out of bounds.
*
* An optimization that trades off optimal formatting output in order to
* complete in a reasonable time. Used as a last resort.
* @param maxVisitsPerToken
* Visit the same formatToken at most [[maxVisitsPerToken]] times.
* @param maxEscapes
* How often do we try to escape before giving up and use original
* formatting.
* @param maxDepth
* Maximum depth of recursion.
* @param acceptOptimalAtHints
* Whether to listen to optimalAt fields in Splits.
* @param disableOptimizationsInsideSensitiveAreas
* Do not optimize inside certain areas such as term apply.
* @param pruneSlowStates
* Eliminate solutions that move slower than other solutions.
* - If a solution reaches a point X first and other solution that reaches
* the same point later, the first solution is preferred if it can be
* verified to be always better (see
* [[org.scalafmt.internal.State.alwaysBetter]]).
* - Note. This affects the output positively because it breaks a tie between
* two equally expensive solutions by eliminating the slower one.
*
* - Example: solution 1 is preferred even though both solutions cost the
* same:
* {{{
* // solution 1
* a + b +
* c + d
* // solution 2
* a +
* b + c + d
* }}}
* @param recurseOnBlocks
* Recursively format { ... } blocks inside no optimization zones.
*
* By starting a new search queue, we can perform aggressive optimizations
* inside optimizations zones.
* @param forceConfigStyleOnOffset
* If negative number, does nothing. If n >= 0, then scalafmt will force
* "config style" on Term.Apply nodes IF it has more than
* [[forceConfigStyleMinArgCount]] arguments AND the non-whitespace byte
* offset between the opening parens and closing parens is greater than
* [[forceConfigStyleOnOffset]]. By forcing config style on such
* applications, the search space is greatly reduced.
*/
case class ScalafmtOptimizer(
dequeueOnNewStatements: Boolean = true,
escapeInPathologicalCases: Boolean = true,
maxVisitsPerToken: Int = 10000,
maxEscapes: Int = 16,
maxDepth: Int = 100,
acceptOptimalAtHints: Boolean = true,
disableOptimizationsInsideSensitiveAreas: Boolean = true,
pruneSlowStates: Boolean = true,
recurseOnBlocks: Boolean = true,
forceConfigStyleOnOffset: Int = 150,
forceConfigStyleMinArgCount: Int = 2
)
object ScalafmtOptimizer {
implicit lazy val surface: generic.Surface[ScalafmtOptimizer] =
generic.deriveSurface
implicit lazy val codec: ConfCodecEx[ScalafmtOptimizer] =
generic.deriveCodecEx(ScalafmtOptimizer()).noTypos
val default = ScalafmtOptimizer()
// TODO(olafur) uncomment once scala.meta converter supports default args.
val noOptimizations: ScalafmtOptimizer = default.copy(
dequeueOnNewStatements = false,
escapeInPathologicalCases = false,
acceptOptimalAtHints = false,
disableOptimizationsInsideSensitiveAreas = false,
pruneSlowStates = false,
recurseOnBlocks = false
)
}
| scalameta/scalafmt | scalafmt-core/shared/src/main/scala/org/scalafmt/config/ScalafmtOptimizer.scala | Scala | apache-2.0 | 3,388 |
package provingground.learning
import provingground._, HoTT._
import GeneratorVariables._, Expression._, TermRandomVars._, GeneratorNode._,
TermGeneratorNodes._
import provingground.learning.Sort.All
import provingground.learning.Sort.Filter
import provingground.learning.Sort.Restrict
import scala.collection.parallel.CollectionConverters._
import scala.collection.parallel.immutable._
import provingground.induction.ExstInducDefn
import provingground.induction.ExstInducStrucs
class DerivedEquations(
tg: TermGeneratorNodes[TermState] = TermGeneratorNodes.Base
) {
def finalProb[X](a: X, rv: RandomVar[X]): Expression = FinalVal(Elem(a, rv))
def initProb[Y](y: Y, rv: RandomVar[Y]): Expression = InitialVal(Elem(y, rv))
def conditionedProb[O, Y](
a: O,
input: RandomVar[O],
output: RandomVar[Y],
condition: Sort[O, Y]
) =
Coeff(conditionedVar(input, output, condition)) * finalProb(
a,
input
)
def asTarget(typ: Typ[Term]) =
EquationNode(
finalProb(typ, TargetTyps),
Coeff(TargetTyps.fromTyp) * finalProb(typ, Typs)
)
def targets(typs: Set[Typ[Term]]): Set[EquationNode] = typs.map(asTarget(_))
def recTargets(vals: Set[VarVal[_]]): Set[EquationNode] = {
val base = vals.collect {
case FinalVal(Elem(typ: Typ[u], Typs)) => asTarget(typ)
}
val inner = vals
.collect {
case FinalVal(InIsle(variable, boat, isle)) =>
((boat, isle), FinalVal(variable): VarVal[_])
}
.groupMap(_._1)(_._2)
.toSet
val innerEqs = inner.flatMap {
case ((boat, isle), s) =>
recTargets(s).map(_.mapVars(InIsle.variableMap(boat, isle)))
}
base union innerEqs
}
def expressionInIsle(
exp: Expression,
boat: Any,
isle: Island[_, _, _, _]
): Option[Expression] = exp match {
case FinalVal(InIsle(variable, b, isl)) if b == boat && isl == isle =>
Some(FinalVal(variable))
case Coeff(node) => Some(Coeff(node))
case Product(x, y) =>
for {
a <- expressionInIsle(x, boat, isle)
b <- expressionInIsle(y, boat, isle)
} yield Product(a, b)
}
def recursiveDerived(
init: Set[EquationNode],
step: => (Set[EquationNode] => Set[EquationNode])
): Set[EquationNode] = {
val base = step(init)
val inner = init
.collect {
case EquationNode(FinalVal(InIsle(variable, boat, isle)), rhs) =>
expressionInIsle(rhs, boat, isle).map(
r => ((boat, isle), EquationNode(FinalVal(variable), r))
)
}
.flatten
.groupMap(_._1)(_._2)
.toSet
val innerEqs = inner.flatMap {
case ((boat, isle), s) =>
recursiveDerived(s, step).map(_.mapVars(InIsle.variableMap(boat, isle)))
}
base union innerEqs
}
def conditionWithTyp(t: Term): EquationNode =
EquationNode(
finalProb(t, termsWithTyp(t.typ)),
conditionedProb(
t,
Terms,
termsWithTyp(t.typ),
Sort.Filter[Term](WithTyp(t.typ))
)
)
def conditionAsFunction(t: Term): Set[EquationNode] =
ExstFunc
.opt(t)
.map { f =>
Set(
EquationNode(
finalProb(f, Funcs),
conditionedProb(t, Terms, Funcs, funcSort)
),
EquationNode(
finalProb(f, funcsWithDomain(f.dom)),
conditionedProb(
t,
Terms,
funcsWithDomain(f.dom),
Sort.Restrict(FuncWithDom(f.dom))
)
)
)
}
.getOrElse(Set.empty[EquationNode])
def conditionAsTypFamily(t: Term): Set[EquationNode] =
if (isTypFamily(t))
Set(
EquationNode(
finalProb(t, TypFamilies),
conditionedProb(t, Terms, TypFamilies, typFamilySort)
)
)
else Set.empty[EquationNode]
import tg._
def initEquationOpt[Y](
x: Term,
rv: RandomVar[Y],
sort: Sort[Term, Y]
): Option[EquationNode] = sort match {
case All() => None
case Filter(pred) =>
if (pred(x))
Some(
EquationNode(
finalProb(x, rv),
Coeff(Init(rv)) * finalProb(x, Terms)
)
)
else None
case Restrict(optMap) =>
optMap(x).map(
y =>
EquationNode(
finalProb(y, rv),
Coeff(Init(rv)) * finalProb(x, Terms)
)
)
}
def allInitEquations(xs: Set[Term]): Set[EquationNode] =
xs.flatMap(x => initEquationOpt(x, Funcs, Sort.Restrict(FuncOpt)))
.union(
xs.flatMap(
x => initEquationOpt(x, TypFamilies, Sort.Restrict(TypFamilyOpt))
)
)
.union(
xs.flatMap(
x =>
initEquationOpt(
x,
termsWithTyp(x.typ),
Sort.Filter[Term](WithTyp(x.typ))
)
)
)
.union(
xs.flatMap(
x =>
ExstFunc
.opt(x)
.map { fn =>
val rv = funcsWithDomain(fn.dom)
EquationNode(
finalProb(fn, rv),
Coeff(Init(rv)) * finalProb(x, Terms)
)
}
)
)
def applnFlip(eqn: EquationNode): Option[EquationNode] =
(coeffFactor(eqn.rhs), varFactors(eqn.rhs)) match {
case (
Some(`applnNode`),
Vector(Elem(Funcs, f: ExstFunc), Elem(_, a: Term))
) =>
Some(
EquationNode(
eqn.lhs,
Coeff(applnByArgNode) * finalProb(a, Terms) * finalProb(
f,
funcsWithDomain(a.typ)
)
)
)
case (
Some(`applnByArgNode`),
Vector(Elem(Terms, a: Term), Elem(_, f: ExstFunc))
) =>
Some(
EquationNode(
eqn.lhs,
Coeff(applnNode) * finalProb(f, Funcs) * finalProb(
a,
termsWithTyp(f.dom)
)
)
)
case _ => None
}
def funcFoldEqs(
fn: Term,
args: Vector[Term],
accum: Set[EquationNode] = Set()
): Set[EquationNode] =
args match {
case Vector() => accum
case a +: ys =>
val tailFunc = fold(fn)(a)
val f = ExstFunc.opt(fn).get
val lhs = finalProb(tailFunc, Terms)
val headEqs = Set(
EquationNode(
lhs,
Coeff(applnNode) * finalProb(f, Funcs) * finalProb(
a,
termsWithTyp(f.dom)
)
),
EquationNode(
lhs,
Coeff(applnByArgNode) * finalProb(a, Terms) * finalProb(
f,
funcsWithDomain(a.typ)
)
)
)
funcFoldEqs(tailFunc, ys, headEqs union (accum))
}
def typFuncFoldEquations(
fn: Term,
args: Vector[Term],
accum: Set[EquationNode] = Set()
): Set[EquationNode] =
args match {
case Vector() => accum
case a +: Vector() =>
val tailFunc = fold(fn)(a)
val f = ExstFunc.opt(fn).get
val lhs = finalProb(tailFunc, Typs)
val headEq =
EquationNode(
lhs,
Coeff(typApplnNode) * finalProb(f, Funcs) * finalProb(
a,
termsWithTyp(f.dom)
)
)
accum + headEq
case a +: ys =>
val tailFunc = fold(fn)(a)
val f = ExstFunc.opt(fn).get
val lhs = finalProb(tailFunc, Terms)
val headEqs = Set(
EquationNode(
lhs,
Coeff(applnNode) * finalProb(f, Funcs) * finalProb(
a,
termsWithTyp(f.dom)
)
),
EquationNode(
lhs,
Coeff(applnByArgNode) * finalProb(a, Terms) * finalProb(
f,
funcsWithDomain(a.typ)
)
)
)
funcFoldEqs(tailFunc, ys, headEqs union (accum))
}
def scaleValue(boat: Term, el: Elem[_]): Expression =
if (boat == el.element)
(IsleScale("%boat" :: boat.typ) * -1) + Literal(1)
else IsleScale("%boat" :: boat.typ) * FinalVal(el)
def formalEquations(
t: Term,
ctx: Context = Context.Empty
): Set[EquationNode] = {
val base: Set[EquationNode] = t match {
case MiscAppln(fn: FuncLike[u, v], a) =>
val f = ExstFunc(fn)
val lhs = finalProb(t, Terms)
val funcSet: Set[EquationNode] = Set(
EquationNode(
lhs,
Coeff(applnNode) * finalProb(f, Funcs) * finalProb(
a,
termsWithTyp(f.dom)
)
),
EquationNode(
finalProb(a, termsWithTyp(f.dom)),
finalProb(a, Terms) /
FinalVal(Event(Terms, Sort.Filter[Term](WithTyp(f.dom))))
),
EquationNode(
FinalVal(Event(Terms, Sort.Filter[Term](WithTyp(f.dom)))),
finalProb(a, Terms)
),
EquationNode(
finalProb(f, Funcs),
finalProb(fn, Terms) /
FinalVal(Event(Terms, Sort.Restrict(FuncOpt)))
),
EquationNode(
FinalVal(Event(Terms, Sort.Restrict(FuncOpt))),
finalProb(fn, Terms)
),
EquationNode(
lhs,
Coeff(applnByArgNode) * finalProb(a, Terms) * finalProb(
f,
funcsWithDomain(a.typ)
)
),
EquationNode(
finalProb(f, funcsWithDomain(a.typ)),
finalProb(fn, Terms) /
FinalVal(Event(Terms, Sort.Restrict(FuncWithDom(a.typ))))
),
EquationNode(
FinalVal(Event(Terms, Sort.Restrict(FuncWithDom(a.typ)))),
finalProb(fn, Terms)
)
)
val typFamilySet: Set[EquationNode] = TypFamilyOpt(fn).toSet.flatMap {
f: ExstFunc =>
Set(
EquationNode(
lhs,
Coeff(applnNode) * finalProb(f, TypFamilies) * finalProb(
a,
termsWithTyp(f.dom)
)
),
EquationNode(
finalProb(f, TypFamilies),
finalProb(fn, Terms) /
FinalVal(Event(Terms, Sort.Restrict(TypFamilyOpt)))
),
EquationNode(
FinalVal(Event(Terms, Sort.Restrict(TypFamilyOpt))),
finalProb(fn, Terms)
)
)
}
funcSet union (typFamilySet) union (formalEquations(fn)) union (formalEquations(
a
))
case idt: IdentityTyp[u] =>
funcFoldEqs(IdentityTyp.idFunc, Vector(idt.dom, idt.lhs, idt.rhs))
case idt: Refl[u] =>
funcFoldEqs(IdentityTyp.reflTerm, Vector(idt.dom, idt.value))
case lt: LambdaLike[u, v] =>
val coeff = Coeff(tg.lambdaNode)
val boat = lt.variable
val isle = tg.lambdaIsle(lt.dom)
val eqs = formalEquations(lt.value, ctx.addVariable(boat))
val isleEqs =
eqs.map(_.mapVars(InIsle.variableMap(boat, isle)))
val outerEqs =
eqs.filterNot(DerivedEquations.equationDepends(boat, _))
val bridgeEq = EquationNode(
FinalVal(Elem(lt, Terms)),
coeff * finalProb(lt.dom, Typs) * FinalVal(
InIsle(Elem(lt.value, isle.islandOutput(boat)), boat, isle)
)
)
val initVarElems = (eqs union formalEquations(lt.dom, ctx))
.flatMap { (eq) =>
Expression.varVals(eq.rhs) union Expression.varVals(eq.lhs)
}
.collect {
case FinalVal(Elem(el: Term, Terms))
if !el.dependsOn(lt.variable) =>
Elem(el, Terms): Elem[_]
case FinalVal(Elem(el: Typ[Term], Typs))
if !el.dependsOn(lt.variable) =>
Elem(el, Typs): Elem[_]
case FinalVal(Elem(el: ExstFunc, Funcs))
if !el.func.dependsOn(lt.variable) =>
Elem(el, Funcs): Elem[_]
} union (Set(Elem(lt.value, Terms))
.filter(_.element.indepOf(boat))
.map(t => t: Elem[_])) union typOpt(boat)
.map(typ => Elem(typ, Typs))
.toSet union ExstFunc
.opt(boat)
.map { fn =>
Elem(fn, Funcs)
}
.toSet + Elem(boat, Terms)
val isleIn: Set[EquationNode] =
(initVarElems + Elem(boat, Terms)).map { el =>
val rhs =
scaleValue(boat, el)
EquationNode(
InitialVal(InIsle(el, boat, isle)),
rhs
)
}
val initInIsle = initEquations(initVarElems.map(FinalVal(_)))
.map(_.mapVars(InIsle.variableMap(boat, isle)))
(isleIn
.union(isleEqs)
.union(initInIsle) + bridgeEq) union formalEquations(lt.dom, ctx) union (outerEqs)
case pd: PiDefn[u, v] =>
val coeff = Coeff(tg.piNode)
val boat = pd.variable
val isle = tg.piIsle(pd.domain)
val eqs = formalTypEquations(pd.value, ctx.addVariable(boat))
val isleEqs =
eqs.map(_.mapVars(InIsle.variableMap(boat, isle)))
val outerEqs =
eqs.filterNot(DerivedEquations.equationDepends(boat, _))
// pprint.log("outer equations")
// outerEqs.foreach(println(_))
val bridgeEq = EquationNode(
FinalVal(Elem(pd, Typs)),
coeff * finalProb(pd.domain, Typs) * FinalVal(
InIsle(Elem(pd.value, isle.islandOutput(boat)), boat, isle)
)
)
val initVarElems = (eqs union formalTypEquations(pd.domain, ctx))
.flatMap { (eq) =>
Expression.varVals(eq.rhs) union Expression.varVals(eq.lhs)
}
.collect {
case FinalVal(Elem(el: Term, Terms))
if !el.dependsOn(pd.variable) =>
Elem(el, Terms): Elem[_]
case FinalVal(Elem(el: Typ[Term], Typs))
if !el.dependsOn(pd.variable) =>
Elem(el, Typs): Elem[_]
case FinalVal(Elem(el: ExstFunc, Funcs))
if !el.func.dependsOn(pd.variable) =>
Elem(el, Funcs): Elem[_]
case FinalVal(Elem(el: ExstFunc, TypFamilies))
if !el.func.dependsOn(boat) =>
Elem(el, TypFamilies): Elem[_]
} union (Set(Elem(pd.value, Typs))
.filter(_.element.indepOf(boat))
.map(t => t: Elem[_])) union typOpt(boat)
.map(typ => Elem(typ, Typs))
.toSet + Elem(boat, Terms)
val isleIn: Set[EquationNode] =
(initVarElems + Elem(boat, Terms)).map { el =>
val rhs =
scaleValue(boat, el)
EquationNode(
InitialVal(InIsle(el, boat, isle)),
rhs
)
}
val initInIsle = initEquations(initVarElems.map(FinalVal(_)))
.map(_.mapVars(InIsle.variableMap(boat, isle)))
(isleIn
.union(isleEqs)
.union(initInIsle) + bridgeEq) union (formalTypEquations(
pd.domain,
ctx
)).union(outerEqs)
case pd: FuncTyp[u, v] =>
val coeff = Coeff(tg.piNode)
val boat = nextVar(pd.dom, ctx.variables)
val isle = tg.piIsle(pd.domain)
val eqs = formalTypEquations(pd.codom, ctx.addVariable(boat))
val isleEqs =
eqs.map(_.mapVars(InIsle.variableMap(boat, isle)))
val outerEqs =
eqs.filterNot(DerivedEquations.equationDepends(boat, _))
val bridgeEq = EquationNode(
FinalVal(Elem(pd, Typs)),
coeff * finalProb(pd.domain, Typs) * FinalVal(
InIsle(Elem(pd.codom, isle.islandOutput(boat)), boat, isle)
)
)
val initVarElems = (eqs union formalTypEquations(pd.domain, ctx))
.flatMap { (eq) =>
Expression.varVals(eq.rhs) union Expression.varVals(eq.lhs)
}
.collect {
case FinalVal(Elem(el: Term, Terms)) if !el.dependsOn(boat) =>
Elem(el, Terms): Elem[_]
case FinalVal(Elem(el: Typ[Term], Typs)) if !el.dependsOn(boat) =>
Elem(el, Typs): Elem[_]
case FinalVal(Elem(el: ExstFunc, Funcs))
if !el.func.dependsOn(boat) =>
Elem(el, Funcs): Elem[_]
case FinalVal(Elem(el: ExstFunc, TypFamilies))
if !el.func.dependsOn(boat) =>
Elem(el, TypFamilies): Elem[_]
} union (Set(Elem(pd.codom, Typs))
.filter(_.element.indepOf(boat))
.map(t => t: Elem[_])) union typOpt(boat)
.map(typ => Elem(typ, Typs))
.toSet + Elem(boat, Terms)
val isleIn: Set[EquationNode] =
(initVarElems + Elem(boat, Terms)).map { el =>
val rhs =
scaleValue(boat, el)
EquationNode(
InitialVal(InIsle(el, boat, isle)),
rhs
)
}
val initInIsle = initEquations(initVarElems.map(FinalVal(_)))
.map(_.mapVars(InIsle.variableMap(boat, isle)))
(isleIn
.union(isleEqs)
.union(initInIsle) + bridgeEq) union formalTypEquations(
pd.domain,
ctx
).union(outerEqs)
case pd: SigmaTyp[u, v] =>
val coeff = Coeff(tg.sigmaNode)
val boat = pd.fib.variable
val isle = tg.sigmaIsle(pd.fib.dom)
val eqs = formalEquations(pd.fib.value, ctx.addVariable(boat))
val isleEqs =
eqs.map(_.mapVars(InIsle.variableMap(boat, isle)))
val outerEqs =
eqs.filterNot(DerivedEquations.equationDepends(boat, _))
val bridgeEq = EquationNode(
FinalVal(Elem(pd, Typs)),
coeff * finalProb(pd.fib.dom, Typs) * FinalVal(
InIsle(Elem(pd.fib.value, isle.islandOutput(boat)), boat, isle)
)
)
val initVarElems = (eqs union (formalEquations(pd.fib.dom, ctx)))
.flatMap { (eq) =>
Expression.varVals(eq.rhs) union Expression.varVals(eq.lhs)
}
.collect {
case FinalVal(Elem(el: Term, Terms)) if !el.dependsOn(boat) =>
Elem(el, Terms): Elem[_]
case FinalVal(Elem(el: Typ[Term], Typs)) if !el.dependsOn(boat) =>
Elem(el, Typs): Elem[_]
case FinalVal(Elem(el: ExstFunc, Funcs))
if !el.func.dependsOn(boat) =>
Elem(el, Funcs): Elem[_]
case FinalVal(Elem(el: ExstFunc, TypFamilies))
if !el.func.dependsOn(boat) =>
Elem(el, TypFamilies): Elem[_]
} union (Set(Elem(pd.fib.value, Typs))
.filter(_.element.indepOf(boat))
.map(t => t: Elem[_])) union typOpt(boat)
.map(typ => Elem(typ, Typs))
.toSet + Elem(boat, Terms)
val isleIn: Set[EquationNode] =
(initVarElems + Elem(boat, Terms)).map { el =>
val rhs =
scaleValue(boat, el)
EquationNode(
InitialVal(InIsle(el, boat, isle)),
rhs
)
}
val initInIsle = initEquations(initVarElems.map(FinalVal(_)))
.map(_.mapVars(InIsle.variableMap(boat, isle)))
(isleIn
.union(isleEqs)
.union(initInIsle) + bridgeEq) union (formalEquations(
pd.fib.dom,
ctx
)).union(outerEqs)
case pd: ProdTyp[u, v] =>
val coeff = Coeff(tg.sigmaIsle(pd.first))
val x = pd.first
val isle = tg.sigmaIsle(pd.first)
Set(
EquationNode(
FinalVal(Elem(pd, Typs)),
coeff * FinalVal(
InIsle(Elem(pd.second, isle.islandOutput(x)), x, isle)
)
)
)
case rf: RecFunc[u, v] =>
val direct = EquationNode(
FinalVal(Elem(rf, Terms)),
Coeff(tg.targetInducNode(rf.typ)) *
finalProb(rf.typ, TargetTyps)
)
val offspring =
(rf.defnData :+ rf.typ).toSet.flatMap(formalEquations(_, ctx))
offspring + direct
case rf: InducFuncLike[u, v] =>
val direct = EquationNode(
FinalVal(Elem(rf, Terms)),
Coeff(tg.targetInducNode(rf.typ)) *
finalProb(rf.typ, TargetTyps)
)
val offspring =
(rf.defnData :+ rf.typ).toSet.flatMap(formalEquations(_, ctx))
offspring + direct
case i1: PlusTyp.FirstIncl[u, v] =>
incl1Node(i1.typ).map { node =>
EquationNode(
finalProb(i1.value, Terms),
Coeff(node) * finalProb(i1.value, termsWithTyp(i1.typ.first))
)
}.toSet
case i2: PlusTyp.ScndIncl[u, v] =>
incl2Node(i2.typ).map { node =>
EquationNode(
finalProb(i2.value, Terms),
Coeff(node) * finalProb(i2.value, termsWithTyp(i2.typ.second))
)
}.toSet
case pair @ PairTerm(first: Term, second: Term) =>
nodeForTyp(pair.typ).map { node =>
EquationNode(
finalProb(pair, Terms),
Coeff(node) * finalProb(first, termsWithTyp(first.typ)) * finalProb(
second,
termsWithTyp(second.typ)
)
)
}.toSet
case pair @ DepPair(first: Term, second: Term, _) =>
nodeForTyp(pair.typ).map { node =>
EquationNode(
finalProb(pair, Terms),
Coeff(node) * finalProb(first, termsWithTyp(first.typ)) * finalProb(
second,
termsWithTyp(second.typ)
)
)
}.toSet
case t: Term => Set()
}
base union initEquations(Set(FinalVal(Elem(t, Terms))))
}
def formalTypEquations(
t: Typ[Term],
ctx: Context = Context.Empty
): Set[EquationNode] = {
val base: Set[EquationNode] = t match {
case MiscAppln(fn: FuncLike[u, v], a) =>
val f = ExstFunc(fn)
val lhs = finalProb(t, Typs)
val funcSet: Set[EquationNode] = Set(
EquationNode(
lhs,
Coeff(typApplnNode) * finalProb(f, Funcs) * finalProb(
a,
termsWithTyp(f.dom)
)
),
EquationNode(
finalProb(a, termsWithTyp(f.dom)),
finalProb(a, Terms) /
FinalVal(Event(Terms, Sort.Filter[Term](WithTyp(f.dom))))
),
EquationNode(
FinalVal(Event(Terms, Sort.Filter[Term](WithTyp(f.dom)))),
finalProb(a, Terms)
),
EquationNode(
finalProb(f, Funcs),
finalProb(fn, Terms) /
FinalVal(Event(Terms, Sort.Restrict(FuncOpt)))
),
EquationNode(
FinalVal(Event(Terms, Sort.Restrict(FuncOpt))),
finalProb(fn, Terms)
)
)
val typFamilySet: Set[EquationNode] = TypFamilyOpt(fn).toSet.flatMap {
f: ExstFunc =>
Set(
EquationNode(
lhs,
Coeff(applnNode) * finalProb(f, TypFamilies) * finalProb(
a,
termsWithTyp(f.dom)
)
),
EquationNode(
finalProb(f, TypFamilies),
finalProb(fn, Terms) /
FinalVal(Event(Terms, Sort.Restrict(TypFamilyOpt)))
),
EquationNode(
FinalVal(Event(Terms, Sort.Restrict(TypFamilyOpt))),
finalProb(fn, Terms)
)
)
}
funcSet union (typFamilySet) union (formalEquations(fn)) union (formalEquations(
a
))
case idt: IdentityTyp[u] =>
typFuncFoldEquations(
IdentityTyp.idFunc,
Vector(idt.dom, idt.lhs, idt.rhs)
)
case idt: Refl[u] =>
typFuncFoldEquations(IdentityTyp.reflTerm, Vector(idt.dom, idt.value))
case pd: PiDefn[u, v] =>
val coeff = Coeff(tg.piNode)
val boat = pd.variable
val isle = tg.piIsle(pd.domain)
val eqs = formalTypEquations(pd.value, ctx.addVariable(boat))
val isleEqs =
eqs.map(_.mapVars(InIsle.variableMap(boat, isle)))
val outerEqs =
eqs.filterNot(DerivedEquations.equationDepends(boat, _))
// println(s"Outer equations size: ${outerEqs.size}")
val bridgeEq = EquationNode(
FinalVal(Elem(pd, Typs)),
coeff * finalProb(pd.domain, Typs) * FinalVal(
InIsle(Elem(pd.value, isle.islandOutput(boat)), boat, isle)
)
)
val initVarElems = (eqs union formalTypEquations(pd.domain, ctx))
.flatMap { (eq) =>
Expression.varVals(eq.rhs) union Expression.varVals(eq.lhs)
}
.collect {
case FinalVal(Elem(el: Term, Terms))
if !el.dependsOn(pd.variable) =>
Elem(el, Terms): Elem[_]
case FinalVal(Elem(el: Typ[Term], Typs))
if !el.dependsOn(pd.variable) =>
Elem(el, Typs): Elem[_]
case FinalVal(Elem(el: ExstFunc, Funcs))
if !el.func.dependsOn(pd.variable) =>
Elem(el, Funcs): Elem[_]
case FinalVal(Elem(el: ExstFunc, TypFamilies))
if !el.func.dependsOn(boat) =>
Elem(el, TypFamilies): Elem[_]
} union (Set(Elem(pd.value, Typs))
.filter(_.element.indepOf(boat))
.map(t => t: Elem[_])) union typOpt(boat)
.map(typ => Elem(typ, Typs))
.toSet + Elem(boat, Terms)
val isleIn: Set[EquationNode] =
(initVarElems + Elem(boat, Terms)).map { el =>
val rhs =
scaleValue(boat, el)
EquationNode(
InitialVal(InIsle(el, boat, isle)),
rhs
)
}
val initInIsle = initEquations(initVarElems.map(FinalVal(_)))
.map(_.mapVars(InIsle.variableMap(boat, isle)))
(isleIn
.union(isleEqs)
.union(initInIsle) + bridgeEq) union (formalTypEquations(
pd.domain,
ctx
)).union(outerEqs)
case pd: FuncTyp[u, v] =>
val coeff = Coeff(tg.piNode)
val boat = nextVar(pd.dom, ctx.variables)
val isle = tg.piIsle(pd.domain)
val eqs = formalTypEquations(pd.codom, ctx.addVariable(boat))
val isleEqs =
eqs.map(_.mapVars(InIsle.variableMap(boat, isle)))
val outerEqs =
eqs.filterNot(DerivedEquations.equationDepends(boat, _))
val bridgeEq = EquationNode(
FinalVal(Elem(pd, Typs)),
coeff * finalProb(pd.domain, Typs) * FinalVal(
InIsle(Elem(pd.codom, isle.islandOutput(boat)), boat, isle)
)
)
val initVarElems = (eqs union formalTypEquations(pd.domain, ctx))
.flatMap { (eq) =>
Expression.varVals(eq.rhs) union Expression.varVals(eq.lhs)
}
.collect {
case FinalVal(Elem(el: Term, Terms)) if !el.dependsOn(boat) =>
Elem(el, Terms): Elem[_]
case FinalVal(Elem(el: Typ[Term], Typs)) if !el.dependsOn(boat) =>
Elem(el, Typs): Elem[_]
case FinalVal(Elem(el: ExstFunc, Funcs))
if !el.func.dependsOn(boat) =>
Elem(el, Funcs): Elem[_]
case FinalVal(Elem(el: ExstFunc, TypFamilies))
if !el.func.dependsOn(boat) =>
Elem(el, TypFamilies): Elem[_]
} union (Set(Elem(pd.codom, Typs))
.filter(_.element.indepOf(boat))
.map(t => t: Elem[_])) union typOpt(boat)
.map(typ => Elem(typ, Typs))
.toSet + Elem(boat, Terms)
val isleIn: Set[EquationNode] =
(initVarElems + Elem(boat, Terms)).map { el =>
val rhs =
scaleValue(boat, el)
EquationNode(
InitialVal(InIsle(el, boat, isle)),
rhs
)
}
val initInIsle = initEquations(initVarElems.map(FinalVal(_)))
.map(_.mapVars(InIsle.variableMap(boat, isle)))
(isleIn
.union(isleEqs)
.union(initInIsle) + bridgeEq) union formalTypEquations(
pd.domain,
ctx
).union(outerEqs)
case pd: SigmaTyp[u, v] =>
val coeff = Coeff(tg.sigmaNode)
val boat = pd.fib.variable
val isle = tg.sigmaIsle(pd.fib.dom)
val eqs = formalEquations(pd.fib.value, ctx.addVariable(boat))
val isleEqs =
eqs.map(_.mapVars(InIsle.variableMap(boat, isle)))
val outerEqs =
eqs.filterNot(DerivedEquations.equationDepends(boat, _))
val bridgeEq = EquationNode(
FinalVal(Elem(pd, Typs)),
coeff * finalProb(pd.fib.dom, Typs) * FinalVal(
InIsle(Elem(pd.fib.value, isle.islandOutput(boat)), boat, isle)
)
)
val initVarElems = (eqs union (formalEquations(pd.fib.dom, ctx)))
.flatMap { (eq) =>
Expression.varVals(eq.rhs) union Expression.varVals(eq.lhs)
}
.collect {
case FinalVal(Elem(el: Term, Terms)) if !el.dependsOn(boat) =>
Elem(el, Terms): Elem[_]
case FinalVal(Elem(el: Typ[Term], Typs)) if !el.dependsOn(boat) =>
Elem(el, Typs): Elem[_]
case FinalVal(Elem(el: ExstFunc, Funcs))
if !el.func.dependsOn(boat) =>
Elem(el, Funcs): Elem[_]
case FinalVal(Elem(el: ExstFunc, TypFamilies))
if !el.func.dependsOn(boat) =>
Elem(el, TypFamilies): Elem[_]
} union (Set(Elem(pd.fib.value, Typs))
.filter(_.element.indepOf(boat))
.map(t => t: Elem[_])) union typOpt(boat)
.map(typ => Elem(typ, Typs))
.toSet + Elem(boat, Terms)
val isleIn: Set[EquationNode] =
(initVarElems + Elem(boat, Terms)).map { el =>
val rhs =
scaleValue(boat, el)
EquationNode(
InitialVal(InIsle(el, boat, isle)),
rhs
)
}
val initInIsle = initEquations(initVarElems.map(FinalVal(_)))
.map(_.mapVars(InIsle.variableMap(boat, isle)))
(isleIn
.union(isleEqs)
.union(initInIsle) + bridgeEq) union (formalEquations(
pd.fib.dom,
ctx
)).union(outerEqs)
case pd: ProdTyp[u, v] =>
val coeff = Coeff(tg.sigmaIsle(pd.first))
val x = pd.first
val isle = tg.sigmaIsle(pd.first)
Set(
EquationNode(
FinalVal(Elem(pd, Typs)),
coeff * FinalVal(
InIsle(Elem(pd.second, isle.islandOutput(x)), x, isle)
)
)
)
case rf: RecFunc[u, v] =>
val direct = EquationNode(
FinalVal(Elem(rf, Typs)),
Coeff(tg.targetInducNode(rf.typ)) *
finalProb(rf.typ, TargetTyps)
)
val offspring =
(rf.defnData :+ rf.typ).toSet.flatMap(formalEquations(_, ctx))
offspring + direct
case rf: InducFuncLike[u, v] =>
val direct = EquationNode(
FinalVal(Elem(rf, Typs)),
Coeff(tg.targetInducNode(rf.typ)) *
finalProb(rf.typ, TargetTyps)
)
val offspring =
(rf.defnData :+ rf.typ).toSet.flatMap(formalEquations(_, ctx))
offspring + direct
case t: Term => Set()
}
base union initEquations(Set(FinalVal(Elem(t, Typs))))
}
def initEquations(s: Set[Expression]): Set[EquationNode] =
s.collect {
case FinalVal(Elem(t, rv)) =>
EquationNode(
finalProb(t, rv),
Coeff(Init(rv)) * InitialVal(Elem(t, rv))
)
}
def initCheck(exp: Expression) =
Expression.atoms(exp).exists {
case InitialVal(Elem(_, rv)) =>
Set[RandomVar[_]](Terms, Typs, InducDefns, Goals).contains(rv)
case _ => true
}
def initPurge(s: Set[EquationNode]) =
s.filterNot(eq => initCheck(eq.rhs))
def termStateElems(ts: TermState): Set[Elem[_]] =
ts.terms.support.map { x =>
Elem(x, Terms): Elem[_]
} union
ts.typs.support.map { x =>
Elem(x, Typs): Elem[_]
} union
ts.typs.support.map { x =>
Elem(x, TargetTyps): Elem[_]
} union
ts.terms.support.flatMap(ExstFunc.opt).map { x =>
Elem(x, Funcs): Elem[_]
} union
ts.terms.condMap(TypFamilyOpt).support.map { x =>
Elem(x, TypFamilies): Elem[_]
} union
ts.terms.support.map { x =>
Elem(x, termsWithTyp(x.typ)): Elem[_]
} union
ts.terms.support.flatMap(ExstFunc.opt).map { x =>
Elem(x, funcsWithDomain(x.dom)): Elem[_]
}
def termStateInit(ts: TermState): Set[EquationNode] =
termStateElems(ts).par.map {
case Elem(t, rv) =>
EquationNode(
finalProb(t, rv),
Coeff(Init(rv)) * InitialVal(Elem(t, rv))
)
}.seq
def termStateElemVec(ts: TermState): Vector[Elem[_]] =
ts.terms.supp.distinct.map { x =>
Elem(x, Terms): Elem[_]
} ++
ts.typs.supp.distinct.map { x =>
Elem(x, Typs): Elem[_]
} ++
ts.typs.supp.distinct.map { x =>
Elem(x, TargetTyps): Elem[_]
} ++
ts.terms.supp.distinct.flatMap(ExstFunc.opt).map { x =>
Elem(x, Funcs): Elem[_]
} ++
ts.terms.condMap(TypFamilyOpt).supp.distinct.map { x =>
Elem(x, TypFamilies): Elem[_]
} ++
ts.terms.supp.distinct.map { x =>
Elem(x, termsWithTyp(x.typ)): Elem[_]
} ++
ts.terms.supp.distinct.flatMap(ExstFunc.opt).map { x =>
Elem(x, funcsWithDomain(x.dom)): Elem[_]
}
def termStateElemPar(
ts: ParTermState
): scala.collection.parallel.ParIterable[GeneratorVariables.Elem[_]] =
ts.termDistMap.keys.map { x =>
Elem(x, Terms): Elem[_]
} ++
ts.typDistMap.keys.map { x =>
Elem(x, Typs): Elem[_]
} ++
ts.typDistMap.keys.map { x =>
Elem(x, TargetTyps): Elem[_]
} ++
ts.funcDistMap.keys.map { x =>
Elem(x, Funcs): Elem[_]
} ++
ts.typFamilyDistMap.keys.map { x =>
Elem(x, TypFamilies): Elem[_]
} ++
ts.termDistMap.keys.map { x =>
Elem(x, termsWithTyp(x.typ)): Elem[_]
} ++
ts.termDistMap.keys.flatMap(ExstFunc.opt).map { x =>
Elem(x, funcsWithDomain(x.dom)): Elem[_]
}
def termStateInitMap(ts: TermState): ParMap[Expression, EquationNode] =
termStateElemPar(ParTermState(ts)).map {
case Elem(t, rv) =>
finalProb(t, rv) -> EquationNode(
finalProb(t, rv),
Coeff(Init(rv)) * InitialVal(Elem(t, rv))
)
}.toMap
}
object DE extends DerivedEquations()
object DerivedEquations {
import shapeless._
def elemDepends(x: Term, el: Elem[_]): Boolean = el match {
case Elem(a: Term, TermsWithTyp(typ: Typ[u])) =>
a.dependsOn(x) || typ.dependsOn(x)
case Elem(a: Term, _) => a.dependsOn(x)
case Elem(f: ExstFunc, RandomVar.AtCoord(_, (dom: Typ[Term]) :: HNil)) =>
f.func.dependsOn(x) || dom.dependsOn(x)
case Elem(f: ExstFunc, _) => f.func.dependsOn(x)
case Elem(ind: ExstInducDefn, _) =>
ind.typFamily.dependsOn(x) || ind.intros
.exists(_.dependsOn(x)) || ind.ind.constants.exists(_.dependsOn(x))
case Elem(ind: ExstInducStrucs, _) => ind.constants.exists(_.dependsOn(x))
}
def sortDepends(x: Term, sort: Sort[_, _]) = sort match {
case All() => false
case filter: Filter[u] =>
filter.pred match {
case WithTyp(typ) =>
typ.dependsOn(x)
case FuncWithDomFilter(dom) =>
dom.dependsOn(x)
case _ => false
}
case restrict: Restrict[u, v] =>
restrict.optMap match {
case FuncWithDom(dom) => dom.dependsOn(x)
case _ => false
}
}
def varDepends(x: Term, v: GeneratorVariables.Variable[_]): Boolean =
v match {
case Event(base, sort) => sortDepends(x, sort)
case InIsle(isleVar, boat: Term, isle) =>
boat.dependsOn(x) || varDepends(x, isleVar)
case InIsle(isleVar, boat, isle) => varDepends(x, isleVar)
case PairEvent(base1, base2, sort) => sortDepends(x, sort)
case el @ Elem(element, randomVar) => elemDepends(x, el)
}
def equationDepends(x: Term, eqn: EquationNode): Boolean =
(varVals(eqn.lhs) union (varVals(eqn.rhs)))
.map(_.variable)
.exists(varDepends(x, _))
}
| siddhartha-gadgil/ProvingGround | jvmcore/src/main/scala/provingground/learning/DerivedEquations.scala | Scala | mit | 36,773 |
package views.html
import play.twirl.api._
import play.twirl.api.TemplateMagic._
import play.api.templates.PlayMagic._
import models._
import controllers._
import java.lang._
import java.util._
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import play.api.i18n._
import play.core.j.PlayMagicForJava._
import play.mvc._
import play.data._
import play.api.data.Field
import play.mvc.Http.Context.Implicit._
import views.html._
/**/
object RubyPage extends BaseScalaTemplate[play.twirl.api.HtmlFormat.Appendable,Format[play.twirl.api.HtmlFormat.Appendable]](play.twirl.api.HtmlFormat) with play.twirl.api.Template0[play.twirl.api.HtmlFormat.Appendable] {
/**/
def apply():play.twirl.api.HtmlFormat.Appendable = {
_display_ {
Seq[Any](_display_(/*1.2*/main("Ruby Programming Language")/*1.35*/{_display_(Seq[Any](format.raw/*1.36*/("""
"""),format.raw/*2.3*/("""<div class="row">
<div class="col-sm-8" style="padding-top: 7%;">
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Morbi varius, libero molestie accumsan tempor, tellus ipsum pretium nulla, sit amet faucibus dolor nunc at velit.
Maecenas ornare ut magna id pretium. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Praesent a nisi ante. Proin aliquet erat nisi. Aenean mauris arcu, commodo eget erat sed, lobortis rhoncus tortor.
Nulla dictum arcu sit amet facilisis varius. Donec venenatis maximus velit rhoncus euismod. Maecenas blandit facilisis sem, nec lobortis nunc dignissim nec.
</p>
<br>
<p>
Sed elementum varius nibh fermentum iaculis. Ut sit amet quam a tortor tempor ullamcorper et in nisl. Aenean sodales est eros, id dictum mi bibendum ac. Integer non ex a elit tempus interdum a eget massa. Etiam sagittis diam aliquet facilisis aliquam. Nunc a malesuada leo.
Maecenas eu est iaculis, luctus mi in, fringilla nunc. Sed tincidunt vitae lacus quis ornare. Sed convallis tortor vel tellus tincidunt, vel pulvinar tellus elementum. Nunc sit amet libero at nibh sollicitudin vehicula. Nam accumsan volutpat magna non ornare. Maecenas mattis, leo id pretium posuere, orci lacus tristique diam, id fringilla mauris urna non turpis. Morbi tincidunt augue id scelerisque sagittis.
Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Lorem ipsum dolor sit amet, consectetur adipiscing elit.
</p>
<br>
<p>
Aliquam ante ipsum, malesuada et risus quis, ultrices euismod tellus. Nam vel aliquet mauris, aliquet aliquet mauris. Ut eget diam eget justo lacinia rhoncus id eget ex.
Sed dapibus eros sed tempor convallis. Praesent in turpis sed felis luctus eleifend eget egestas purus. Aenean nec libero et sapien rutrum ultrices eu vel mauris. Suspendisse a bibendum eros.
Vivamus sed purus enim. Morbi varius porta sem in pretium. Quisque non porta nibh. Cras ac nisi eros. Fusce nibh dolor, mollis in sollicitudin finibus, ultricies quis tortor.
Vivamus sollicitudin, leo id consectetur scelerisque, justo magna sollicitudin turpis, vel tristique justo lectus sed risus. Integer scelerisque orci nec ex euismod, a cursus elit consequat.
</p>
</div>
<div class="col-sm-4"><img class="img-responsive" src=""""),_display_(/*23.61*/routes/*23.67*/.Assets.versioned("img/ruby.png")),format.raw/*23.100*/("""" style="padding-top: 30%;" class="img-rounded" alt="Cinque Terre" width="304" height="236"></div>
</div>
""")))}))}
}
def render(): play.twirl.api.HtmlFormat.Appendable = apply()
def f:(() => play.twirl.api.HtmlFormat.Appendable) = () => apply()
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Wed Apr 06 14:20:47 BST 2016
SOURCE: C:/Users/A587853/Documents/GitHub/WebApp/app/views/RubyPage.scala.html
HASH: d6e6ec5d867c1c084abed9f8ba665edabd2146d0
MATRIX: 801->1|842->34|880->35|910->39|3364->2466|3379->2472|3434->2505
LINES: 29->1|29->1|29->1|30->2|51->23|51->23|51->23
-- GENERATED --
*/
| cwrobertson/WebApp | target/scala-2.10/twirl/main/views/html/RubyPage.template.scala | Scala | mit | 4,194 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.utils.aggregators
import org.apache.spark.rdd.RDD
object AggregatorBatch {
/**
* Class used for providing option to access different return type per each bundled
* aggregator in type-safe way.
*/
case class BatchedResult(rawResults: Map[Aggregator[_, _], Any]) {
def forAggregator[U, _](aggregator: Aggregator[U, _]): U = {
rawResults(aggregator).asInstanceOf[U]
}
}
def executeInBatch[T](
rdd: RDD[T],
aggregators: Seq[Aggregator[_, T]]): BatchedResult = {
val batch = SplitterAggregator[Any, T](aggregators.map(_.asInstanceOf[Aggregator[Any, T]]))
val results = batch.execute(rdd)
val rawResultsMap: Map[Aggregator[_, _], Any] = (aggregators zip results).map {
case (aggregator, result) => aggregator -> result
}.toMap
BatchedResult(rawResultsMap)
}
private case class SplitterAggregator[U, T](aggregators: Seq[Aggregator[U, T]])
extends Aggregator[Seq[U], T] {
override def initialElement: Seq[U] = aggregators.map(_.initialElement)
override def mergeValue(accSeq: Seq[U], elem: T): Seq[U] = {
(accSeq, aggregators).zipped.map { (acc, aggregator) =>
aggregator.mergeValue(acc, elem)
}
}
override def mergeCombiners(leftSeq: Seq[U], rightSeq: Seq[U]): Seq[U] = {
(leftSeq, rightSeq, aggregators).zipped.map { (left, right, aggregator) =>
aggregator.mergeCombiners(left, right)
}
}
}
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/main/scala/io/deepsense/deeplang/utils/aggregators/AggregatorBatch.scala | Scala | apache-2.0 | 2,068 |
package com.ilroberts
object Messages {
sealed trait MessageComponents
case class MessageHeader(messageType: String) extends MessageComponents
case class MessageBody(body: String) extends MessageComponents
sealed trait Message
case class BasicMessage(header: MessageHeader, body: MessageBody) extends Message
case class Person(name: String)
}
| ilroberts/KafkaConsumerService | src/main/scala/com/ilroberts/Messages.scala | Scala | mit | 361 |
package com.twitter.finatra.multiserver.Add2HttpServer
import com.twitter.finagle.http.{Request, Status}
import com.twitter.finatra.http.Controller
import com.twitter.finatra.http.response.ResponseBuilder
import com.twitter.finatra.httpclient.{HttpClient, RequestBuilder}
import com.twitter.util.Future
import javax.inject.Inject
class Add2Controller @Inject() (add1Client: HttpClient, responseBuilder: ResponseBuilder)
extends Controller {
get("/add2") { request: Request =>
for {
numPlus1 <- add1(request.getIntParam("num"))
numPlus2 <- add1(numPlus1)
} yield numPlus2
}
private def add1(num: Int): Future[Int] = {
val add1Request = RequestBuilder.get(s"/add1?num=$num")
add1Client.execute(add1Request) map { add1Response =>
if (add1Response.status == Status.Ok)
add1Response.getContentString().toInt
else
throw responseBuilder.serviceUnavailable.toException
}
}
}
| twitter/finatra | inject-thrift-client-http-mapper/src/test/scala/com/twitter/finatra/multiserver/Add2HttpServer/Add2Controller.scala | Scala | apache-2.0 | 943 |
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
package com.cloudant.clouseau
import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy
import org.apache.commons.configuration._
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import scalang._
object Main extends App {
val logger = LoggerFactory.getLogger("clouseau.main")
Thread.setDefaultUncaughtExceptionHandler(
new Thread.UncaughtExceptionHandler {
def uncaughtException(t: Thread, e: Throwable) {
logger.error("Uncaught exception: " + e.getMessage)
System.exit(1)
}
}
)
// Load and monitor configuration file.
val config = new CompositeConfiguration()
config.addConfiguration(new SystemConfiguration())
val fileName = if (args.length > 0) args(0) else "clouseau.ini"
val reloadableConfig = new HierarchicalINIConfiguration(fileName)
reloadableConfig.setReloadingStrategy(new FileChangedReloadingStrategy)
config.addConfiguration(reloadableConfig)
val name = config.getString("clouseau.name", "clouseau@127.0.0.1")
val cookie = config.getString("clouseau.cookie", "monster")
val closeIfIdleEnabled = config.getBoolean("clouseau.close_if_idle", false)
val idleTimeout = config.getInt("clouseau.idle_check_interval_secs", 300)
if (closeIfIdleEnabled) {
logger.info("Idle timout is enabled and will check the indexer idle status every %d seconds".format(idleTimeout))
}
val nodeconfig = NodeConfig(
typeFactory = ClouseauTypeFactory,
typeEncoder = ClouseauTypeEncoder,
typeDecoder = ClouseauTypeDecoder)
val node = Node(name, cookie, nodeconfig)
ClouseauSupervisor.start(node, config)
logger.info("Clouseau running as " + name)
}
| cloudant-labs/clouseau | src/main/scala/com/cloudant/clouseau/Main.scala | Scala | apache-2.0 | 2,215 |
package com.taig.tmpltr.engine
import play.api.mvc.Content
import com.taig.tmpltr.markup
package object html
{
implicit def bodyFromContent( content: Content ): markup.body = body( content )
implicit def headFromContent( content: Content ): markup.head = head( content )
} | Taig/Play-Tmpltr | app/com/taig/tmpltr/engine/html/package.scala | Scala | mit | 277 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.types._
/**
* Throws user facing errors when passed invalid queries that fail to analyze.
*/
trait CheckAnalysis {
/**
* Override to provide additional checks for correct analysis.
* These rules will be evaluated after our built-in check rules.
*/
val extendedCheckRules: Seq[LogicalPlan => Unit] = Nil
protected def failAnalysis(msg: String): Nothing = {
throw new AnalysisException(msg)
}
protected def containsMultipleGenerators(exprs: Seq[Expression]): Boolean = {
exprs.flatMap(_.collect {
case e: Generator => e
}).length > 1
}
def checkAnalysis(plan: LogicalPlan): Unit = {
// We transform up and order the rules so as to catch the first possible failure instead
// of the result of cascading resolution failures.
plan.foreachUp {
case p if p.analyzed => // Skip already analyzed sub-plans
case u: UnresolvedRelation =>
u.failAnalysis(s"Table not found: ${u.tableIdentifier}")
case operator: LogicalPlan =>
operator transformExpressionsUp {
case a: Attribute if !a.resolved =>
val from = operator.inputSet.map(_.name).mkString(", ")
a.failAnalysis(s"cannot resolve '${a.prettyString}' given input columns $from")
case e: Expression if e.checkInputDataTypes().isFailure =>
e.checkInputDataTypes() match {
case TypeCheckResult.TypeCheckFailure(message) =>
e.failAnalysis(
s"cannot resolve '${e.prettyString}' due to data type mismatch: $message")
}
case c: Cast if !c.resolved =>
failAnalysis(
s"invalid cast from ${c.child.dataType.simpleString} to ${c.dataType.simpleString}")
case WindowExpression(UnresolvedWindowFunction(name, _), _) =>
failAnalysis(
s"Could not resolve window function '$name'. " +
"Note that, using window functions currently requires a HiveContext")
case w @ WindowExpression(windowFunction, windowSpec) if windowSpec.validate.nonEmpty =>
// The window spec is not valid.
val reason = windowSpec.validate.get
failAnalysis(s"Window specification $windowSpec is not valid because $reason")
}
operator match {
case f: Filter if f.condition.dataType != BooleanType =>
failAnalysis(
s"filter expression '${f.condition.prettyString}' " +
s"of type ${f.condition.dataType.simpleString} is not a boolean.")
case j @ Join(_, _, _, Some(condition)) if condition.dataType != BooleanType =>
failAnalysis(
s"join condition '${condition.prettyString}' " +
s"of type ${condition.dataType.simpleString} is not a boolean.")
case j @ Join(_, _, _, Some(condition)) =>
def checkValidJoinConditionExprs(expr: Expression): Unit = expr match {
case p: Predicate =>
p.asInstanceOf[Expression].children.foreach(checkValidJoinConditionExprs)
case e if e.dataType.isInstanceOf[BinaryType] =>
failAnalysis(s"binary type expression ${e.prettyString} cannot be used " +
"in join conditions")
case e if e.dataType.isInstanceOf[MapType] =>
failAnalysis(s"map type expression ${e.prettyString} cannot be used " +
"in join conditions")
case _ => // OK
}
checkValidJoinConditionExprs(condition)
case Aggregate(groupingExprs, aggregateExprs, child) =>
def checkValidAggregateExpression(expr: Expression): Unit = expr match {
case _: AggregateExpression => // OK
case e: Attribute if !groupingExprs.exists(_.semanticEquals(e)) =>
failAnalysis(
s"expression '${e.prettyString}' is neither present in the group by, " +
s"nor is it an aggregate function. " +
"Add to group by or wrap in first() (or first_value) if you don't care " +
"which value you get.")
case e if groupingExprs.exists(_.semanticEquals(e)) => // OK
case e if e.references.isEmpty => // OK
case e => e.children.foreach(checkValidAggregateExpression)
}
def checkValidGroupingExprs(expr: Expression): Unit = expr.dataType match {
case BinaryType =>
failAnalysis(s"binary type expression ${expr.prettyString} cannot be used " +
"in grouping expression")
case m: MapType =>
failAnalysis(s"map type expression ${expr.prettyString} cannot be used " +
"in grouping expression")
case _ => // OK
}
aggregateExprs.foreach(checkValidAggregateExpression)
groupingExprs.foreach(checkValidGroupingExprs)
case Sort(orders, _, _) =>
orders.foreach { order =>
if (!RowOrdering.isOrderable(order.dataType)) {
failAnalysis(
s"sorting is not supported for columns of type ${order.dataType.simpleString}")
}
}
case s @ SetOperation(left, right) if left.output.length != right.output.length =>
failAnalysis(
s"${s.nodeName} can only be performed on tables with the same number of columns, " +
s"but the left table has ${left.output.length} columns and the right has " +
s"${right.output.length}")
case _ => // Fallbacks to the following checks
}
operator match {
case o if o.children.nonEmpty && o.missingInput.nonEmpty =>
val missingAttributes = o.missingInput.mkString(",")
val input = o.inputSet.mkString(",")
failAnalysis(
s"resolved attribute(s) $missingAttributes missing from $input " +
s"in operator ${operator.simpleString}")
case p @ Project(exprs, _) if containsMultipleGenerators(exprs) =>
failAnalysis(
s"""Only a single table generating function is allowed in a SELECT clause, found:
| ${exprs.map(_.prettyString).mkString(",")}""".stripMargin)
// Special handling for cases when self-join introduce duplicate expression ids.
case j @ Join(left, right, _, _) if left.outputSet.intersect(right.outputSet).nonEmpty =>
val conflictingAttributes = left.outputSet.intersect(right.outputSet)
failAnalysis(
s"""
|Failure when resolving conflicting references in Join:
|$plan
|Conflicting attributes: ${conflictingAttributes.mkString(",")}
|""".stripMargin)
case o if !o.resolved =>
failAnalysis(
s"unresolved operator ${operator.simpleString}")
case o if o.expressions.exists(!_.deterministic) &&
!o.isInstanceOf[Project] && !o.isInstanceOf[Filter] =>
failAnalysis(
s"""nondeterministic expressions are only allowed in Project or Filter, found:
| ${o.expressions.map(_.prettyString).mkString(",")}
|in operator ${operator.simpleString}
""".stripMargin)
case _ => // Analysis successful!
}
}
extendedCheckRules.foreach(_(plan))
plan.foreach(_.setAnalyzed())
}
}
| pronix/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala | Scala | apache-2.0 | 8,568 |
package breeze.linalg
/**
* A Euclidean distance metric implementation between two points
*/
object euclideanDistance extends NormBasedDistance {
override protected def normConstant: Double = 2.0
}
| scalanlp/breeze | math/src/main/scala/breeze/linalg/functions/euclideanDistance.scala | Scala | apache-2.0 | 203 |
package memnets.models.neuro.swta.fsm
import memnets.model.Activation._
import memnets.model._
class Transition private[fsm] (val src: State, val tgt: State, val input: Y)(implicit sys: DynamicSystem) {
val gate = Y(
name = s"${src.y.id} > ${tgt.y.id}",
// name = s"${src.name}>${tgt.name}",
decay = 0.2,
tau = 20.0,
act = Relu,
scale = 4.0,
)
src.y --> gate
input --> gate
gate --> tgt.y w = 2.0
}
| MemoryNetworks/memnets | models/src/main/scala/memnets/models/neuro/swta/fsm/Transition.scala | Scala | apache-2.0 | 438 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.controller.java
import scala.collection.JavaConversions._
import scala.reflect.ClassTag
import scala.reflect.runtime.universe._
/** Internal Java utilities. */
object JavaUtils {
// This "fake" tags are adopted from Spark's Java API.
// Scala requires manifest or classtag for some data. But Prediction.IO
// doesn't really need it as our system is oblivious to the actual data. We
// pass a fake ClassTag / Manifest to keep the scala compiler happy.
def fakeClassTag[T]: ClassTag[T] = {
ClassTag.AnyRef.asInstanceOf[ClassTag[T]]
}
def fakeManifest[T]: Manifest[T] = {
manifest[AnyRef].asInstanceOf[Manifest[T]]
}
}
| nvoron23/PredictionIO | core/src/main/scala/controller/java/JavaUtils.scala | Scala | apache-2.0 | 1,279 |
/*
* Copyright 1998-2016 Linux.org.ru
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.org.linux.user
import java.sql.ResultSet
import javax.sql.DataSource
import org.springframework.jdbc.core.RowMapper
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate
import org.springframework.scala.jdbc.core.JdbcTemplate
import org.springframework.stereotype.Repository
import scala.collection.JavaConverters._
@Repository
class RemarkDao(ds:DataSource) {
private val jdbcTemplate = new JdbcTemplate(ds)
private val namedTemplate = new NamedParameterJdbcTemplate(jdbcTemplate.javaTemplate)
def remarkCount(user: User):Int = {
val count:Option[Int] = jdbcTemplate.queryForObject[Integer](
"SELECT count(*) as c FROM user_remarks WHERE user_id=?",
user.getId).map(_.toInt)
count.getOrElse(0)
}
def hasRemarks(user: User):Boolean = remarkCount(user) > 0
/**
* Получить комментарий пользователя user о ref
* @param user logged user
* @param ref user
*/
def getRemark(user: User, ref: User): Option[Remark] = {
jdbcTemplate.queryAndMap("SELECT id, ref_user_id, remark_text FROM user_remarks WHERE user_id=? AND ref_user_id=?", user.getId, ref.getId) { (rs, _) =>
new Remark(rs)
}.headOption
}
def getRemarks(user: User, refs:java.lang.Iterable[User]): java.util.Map[Integer, Remark] = {
val r: Map[Integer, Remark] = if (refs.asScala.isEmpty) {
Map.empty
} else {
namedTemplate.query(
"SELECT id, ref_user_id, remark_text FROM user_remarks WHERE user_id=:user AND ref_user_id IN (:list)",
Map("list" -> refs.asScala.map(_.getId).toSeq.asJavaCollection, "user" -> user.getId).asJava,
new RowMapper[(Integer, Remark)]() {
override def mapRow(rs: ResultSet, rowNum: Int) = {
val remark = new Remark(rs)
Integer.valueOf(remark.getRefUserId) -> remark
}
}
).asScala.toMap
}
r.asJava
}
private def setRemark(user: User, ref: User, text: String):Unit = {
if (text.nonEmpty) {
jdbcTemplate.update("INSERT INTO user_remarks (user_id,ref_user_id,remark_text) VALUES (?,?,?)", user.getId, ref.getId, text)
}
}
private def updateRemark(id: Int, text: String):Unit = {
if (text.isEmpty) {
jdbcTemplate.update("DELETE FROM user_remarks WHERE id=?", id)
} else {
jdbcTemplate.update("UPDATE user_remarks SET remark_text=? WHERE id=?", text, id)
}
}
/**
* Сохранить или обновить комментарий пользователя user о ref.
* Если комментарий нулевой длины - он удаляется из базы
*
* @param user logged user
* @param ref user
* @param text текст комментария
*/
def setOrUpdateRemark(user: User, ref: User, text: String) = {
getRemark(user, ref) match {
case Some(remark) ⇒ updateRemark(remark.getId, text)
case None ⇒ setRemark(user, ref, text)
}
}
/**
* Получить комментарии пользователя user
* @param user logged user
*/
def getRemarkList(user: User, offset: Int, sortorder: Int, limit: Int): java.util.List[Remark] = {
val qs = if (sortorder == 1) {
"SELECT id, ref_user_id, remark_text FROM user_remarks WHERE user_id=? ORDER BY remark_text ASC LIMIT ? OFFSET ?"
} else {
"SELECT user_remarks.id as id, user_remarks.user_id as user_id, user_remarks.ref_user_id as ref_user_id, user_remarks.remark_text as remark_text FROM user_remarks, users WHERE user_remarks.user_id=? AND users.id = user_remarks.ref_user_id ORDER BY users.nick ASC LIMIT ? OFFSET ?"
}
jdbcTemplate.queryAndMap(qs, user.getId, limit, offset) { (rs, _) ⇒
new Remark(rs)
}.asJava
}
}
| fat0troll/lorsource | src/main/scala/ru/org/linux/user/RemarkDao.scala | Scala | apache-2.0 | 4,409 |
/*
* Copyright 2014 DataGenerator Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.datagenerator.common.NodeData
import org.finra.datagenerator.common.Graph.Node
import org.finra.datagenerator.common.GraphEngine.{DefaultNodeGenerator, DataNodeGenerator}
import scala.annotation.unchecked.{uncheckedVariance => uV}
import scala.collection.mutable.ListBuffer
/**
* Type & metadata for data stored in node of a graph
*/
object NodeDataType {
/**
* Each object extending this trait can be thought of as a value in an enumeration of all node data types.
* @tparam T_NodeData Data that uses this type.
* @tparam T_NodeDataStub Stub type for the data
* @tparam T_NodeDataTypes Data types type for this data
* @tparam T_ThisType This type
*/
abstract class NodeDataType[+T_NodeData <: NodeData,
+T_NodeDataStub <: NodeDataStub[T_ThisType, T_NodeData, T_NodeDataTypes, T_NodeDataStub],
+T_NodeDataTypes <: NodeDataTypes[T_NodeData @uV, T_NodeDataStub @uV, T_ThisType @uV, T_NodeDataTypes @uV],
+T_ThisType <: NodeDataType[T_NodeData, T_NodeDataStub, T_NodeDataTypes, T_ThisType]] extends DisplayableData {
/**
* Specifies what generator to use when trying to create a new child or parent from a specified data node
*/
lazy val dataNodeGenerator: DataNodeGenerator[T_NodeData, T_NodeData, T_NodeDataStub, T_ThisType, T_NodeDataTypes] =
new DefaultNodeGenerator[T_NodeData, T_NodeData, T_NodeDataStub, T_ThisType, T_NodeDataTypes]()
/**
* Specifies what generator to use when trying to create a new child or parent from a specified stub node
*/
lazy val dataStubNodeGenerator: DataNodeGenerator[T_NodeDataStub, T_NodeData, T_NodeDataStub, T_ThisType, T_NodeDataTypes] =
new DefaultNodeGenerator[T_NodeDataStub, T_NodeData, T_NodeDataStub, T_ThisType, T_NodeDataTypes]()
/**
* Wrapper that specifies all possible node data types in this domain, and possible metadata/groupings thereof.
*/
def nodeDataTypes: T_NodeDataTypes
/**
* Specifies an object of a class implementing DataTransitions, which define, from each type, how to create a
* child or parent of each allowable child/parent type.
*/
lazy val dataTransitions = nodeDataTypes.dataTransitions//.asInstanceOf[DataTransitions[T_NodeData, T_ThisType, T_NodeDataStub]]
/**
* Creates a stub wrapper around this type
* @return Stub wrapper around this type
*/
def asStub: T_NodeDataStub
/**
* Name that uniquely identifies this type
*/
val name: String = getClass.getSimpleName.replaceAllLiterally("$", "")
/**
* Lowercased name
* @return Lowercased name
*/
def nameLowercase: String = name.toLowerCase()
/**
* Name that uniquely identifies this type, to be used for display
* @return Default displayable data ID (defaults to name)
*/
override def defaultDisplayableDataId: String = name
/**
* Types are considered the same based on their unique names
* @param that Value to compare to
* @return True if equals
*/
override def equals(that: Any): Boolean = {
that match {
case that: this.type => true
case _ => false
}
}
/**
* Hash code is simply the name
* @return Hashcode (name)
*/
override def hashCode: Int = name.hashCode
/**
* Gets a sequence of all the node data types that may be created as a child from the current node
* @param nodeOfThisType Node from which to get allowable child types
* @return Sequence of NodeDataType
*/
def getAllowableChildTypes(nodeOfThisType: Node[T_NodeDataStub @uV]): Seq[T_ThisType]
/**
* Gets a sequence of all the node data types that may be created as a parent from the current node
* @param nodeOfThisType Node from which to get allowable parent types
* @return Sequence of NodeDataType
*/
def getAllowableParentTypes(nodeOfThisType: Node[T_NodeDataStub @uV]): Seq[T_ThisType]
/**
* Gets a sequence of predicates used by the child-generator methods. These predicate functions determine, for each allowable child type,
* whether or not, when deciding to add a child, if the generator will choose to add a child of that type.
* @param node Node from which we may wish to add a child
* @param maxToGenerate Maximum number of nodes in graph
* @param probabilityMultiplier Example usage: Predicate function might determine true/false based on a random function.
* Multiplier will make that function more probable to return true.
* @tparam T_DisplayableData Type of data. In this case it will be either a Data or a Stub.
* @return Mutable list of each possible child data type mapped to a predicate function used to determine whether or not to add a child of that type
*/
def childStateTransitionPredicates[T_DisplayableData <: DisplayableData](node: Node[T_DisplayableData @uV]
, maxToGenerate:Int, probabilityMultiplier: Int): ListBuffer[(T_ThisType @uV, (Node[T_DisplayableData @uV] => Boolean))]
/**
* Gets a sequence of predicates used by the parent-generator methods. These predicate functions determine, for each allowable parent type,
* whether or not, when deciding to add a parent, if the generator will choose to add a parent of that type.
* @param node Node from which we may wish to add a parent
* @param maxToGenerate Maximum number of nodes in graph
* @param probabilityMultiplier Example usage: Predicate function might determine true/false based on a random function.
* Multiplier will make that function more probable to return true.
* @tparam T_DisplayableData Type of data. In this case it will be either a Data or a Stub.
* @return Mutable list of each possible parent data type mapped to a predicate function used to determine whether or not to add a parent of that type
*/
def parentStateTransitionPredicates[T_DisplayableData <: DisplayableData](node: Node[T_DisplayableData @uV], maxToGenerate:Int
, probabilityMultiplier: Int): ListBuffer[(T_ThisType @uV, (Node[T_DisplayableData @uV] => Boolean))]
/**
* Probabilistically link this node to another existing node such that the other node should be a parent of this node.
* Does nothing by default, but may be overridden. Also not yet called from anywhere, but eventually we may have some generation engine that uses this.
* @param dataNode Node to be linked as a child of another node that already exists in the same graph
*/
def probabilisticallyLinkToExistingParentDataNode(dataNode: Node[T_NodeData @uV]): Unit
/**
* Probabilistically link this node to another existing node such that the other node should be a parent of this node.
* Does nothing by default, but may be overridden. Also not yet called from anywhere, but eventually we may have some generation engine that uses this.
* @param stubNode Node to be linked as a child of another node that already exists in the same graph
*/
def probabilisticallyLinkToExistingParentStubNode(stubNode: Node[T_NodeDataStub @uV]): Unit
/**
* Given a stub node, creates new child stub nodes based on the allowable child types
* and defined predicates specifying whether or not to add a child of a certain type
* @param stubNode Node of a data stub
* @param maxToGenerate Maximum number of nodes in graph
* @param probabilityMultiplier Example usage: Predicate function might determine true/false based on a random function.
* Multiplier will make that function more probable to return true.
* @return Vector of all added child nodes
*/
def generateAndAddChildStubs(stubNode: Node[T_NodeDataStub @uV], maxToGenerate: Int, probabilityMultiplier: Int): Vector[Node[T_NodeDataStub @uV]] = {
dataStubNodeGenerator.generateLinkedNodes(stubNode, maxToGenerate, childStateTransitionPredicates[T_NodeDataStub @uV](stubNode, maxToGenerate
, probabilityMultiplier), (nextEventType: T_ThisType) => stubNode.addChild(nextEventType.asStub)
)
}
/**
* Given a stub node, creates new parent stub nodes based on the allowable child types
* and defined predicates specifying whether or not to add a parent of a certain type
* @param stubNode Node of a data stub
* @param maxToGenerate Maximum number of nodes in graph
* @param probabilityMultiplier Example usage: Predicate function might determine true/false based on a random function.
* Multiplier will make that function more probable to return true.
* @return Vector of all added parent nodes
*/
def generateAndAddParentStubs(stubNode: Node[T_NodeDataStub @uV], maxToGenerate: Int, probabilityMultiplier: Int): Vector[Node[T_NodeDataStub @uV]] = {
dataStubNodeGenerator.generateLinkedNodes(stubNode, maxToGenerate, parentStateTransitionPredicates[T_NodeDataStub @uV](stubNode, maxToGenerate
, probabilityMultiplier), (nextEventType: T_ThisType) => stubNode.addParent(nextEventType.asStub)
)
}
/**
* Given a data node, creates new child data nodes based on the allowable child types and defined predicates
* specifying whether or not to add a child of a certain type
* @param dataNode Node of a data stub
* @param maxToGenerate Maximum number of nodes in graph
* @param probabilityMultiplier Example usage: Predicate function might determine true/false based on a random function.
* Multiplier will make that function more probable to return true.
* @return Vector of all added child nodes
*/
def generateAndAddChildNodes(dataNode: Node[T_NodeData @uV], maxToGenerate: Int, probabilityMultiplier: Int): Vector[Node[T_NodeData @uV]] = {
dataNodeGenerator.generateLinkedNodes(dataNode, maxToGenerate, childStateTransitionPredicates[T_NodeData @uV](dataNode, maxToGenerate
, probabilityMultiplier), (nextEventType: T_ThisType) => dataTransitions.addRandomlyGeneratedChildData(dataNode, nextEventType))
}
/**
* Given a data node, creates new parent data nodes based on the allowable child types and defined predicates specifying
* whether or not to add a parent of a certain type
* @param dataNode Node of a data stub
* @param maxToGenerate Maximum number of nodes in graph
* @param probabilityMultiplier Example usage: Predicate function might determine true/false based on a random function.
* Multiplier will make that function more probable to return true.
* @return Vector of all added parent nodes
*/
def generateAndAddParentNodes(dataNode: Node[T_NodeData @uV], maxToGenerate: Int, probabilityMultiplier: Int): Vector[Node[T_NodeData @uV]] = {
dataNodeGenerator.generateLinkedNodes(dataNode, maxToGenerate, parentStateTransitionPredicates[T_NodeData @uV](dataNode, maxToGenerate
, probabilityMultiplier), (nextEventType: T_ThisType) => dataTransitions.addRandomlyGeneratedParentData(dataNode, nextEventType))
}
}
}
| FINRAOS/DataGenerator | dg-common/src/main/scala/org/finra/datagenerator/common/NodeData/NodeDataType.scala | Scala | apache-2.0 | 12,031 |
package scala.pickling.pickler
import scala.pickling._
import PicklingErrors.NothingIsNotUnpicklable
trait NothingPicklers {
implicit object NothingPicklerUnpickler extends AbstractPicklerUnpickler[Nothing] {
override def tag: FastTypeTag[Nothing] = FastTypeTag.Nothing
/** Impossible to call in Scala, no value can be typed [[Nothing]] */
override def pickle(picklee: Nothing, builder: PBuilder): Unit = ???
/** Don't call [[unpickle]], [[Nothing]] cannot be unpickled. */
override def unpickle(tag: String, reader: PReader): Any =
throw NothingIsNotUnpicklable
}
}
| scala/pickling | core/src/main/scala/scala/pickling/pickler/Nothing.scala | Scala | bsd-3-clause | 600 |
/*
* Copyright (c) 2014 Mario Pastorelli (pastorelli.mario@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless.examples
import shapeless._
import scala.collection.immutable.{:: => Cons}
import scala.util.{Try,Success,Failure}
/*
* This example shows how to createa a serializer/deserializer from CSV to
* products.
* */
// The class to serialize or deserialize
case class Person(name: String, surname: String, age: Int, id: Option[Int], weight: Option[Int], height: Int)
object CSVExample extends App {
import CSVConverter._
val input = """John,Carmack,23,0,,100
Brian,Fargo,35,,,110
Markus,Persson,32,,,120"""
println(CSVConverter[List[Person]].from(input))
}
// Implementation
/** Exception to throw if something goes wrong during CSV parsing */
class CSVException(s: String) extends RuntimeException(s)
/** Trait for types that can be serialized to/deserialized from CSV */
trait CSVConverter[T] {
def from(s: String): Try[T]
def to(t: T): String
}
/** Instances of the CSVConverter trait */
object CSVConverter {
def apply[T](implicit st: Lazy[CSVConverter[T]]): CSVConverter[T] = st.value
def fail(s: String) = Failure(new CSVException(s))
// Primitives
implicit def stringCSVConverter: CSVConverter[String] = new CSVConverter[String] {
def from(s: String): Try[String] = Success(s)
def to(s: String): String = s
}
implicit def intCsvConverter: CSVConverter[Int] = new CSVConverter[Int] {
def from(s: String): Try[Int] = Try(s.toInt)
def to(i: Int): String = i.toString
}
def listCsvLinesConverter[A](l: List[String])(implicit ec: CSVConverter[A])
: Try[List[A]] = l match {
case Nil => Success(Nil)
case Cons(s,ss) => for {
x <- ec.from(s)
xs <- listCsvLinesConverter(ss)(ec)
} yield Cons(x, xs)
}
implicit def listCsvConverter[A](implicit ec: CSVConverter[A])
: CSVConverter[List[A]] = new CSVConverter[List[A]] {
def from(s: String): Try[List[A]] = listCsvLinesConverter(s.split("\\n").toList)(ec)
def to(l: List[A]): String = l.map(ec.to).mkString("\\n")
}
// HList
implicit def deriveHNil: CSVConverter[HNil] =
new CSVConverter[HNil] {
def from(s: String): Try[HNil] = s match {
case "" => Success(HNil)
case s => fail("Cannot convert '" ++ s ++ "' to HNil")
}
def to(n: HNil) = ""
}
implicit def deriveHCons[V, T <: HList]
(implicit scv: Lazy[CSVConverter[V]], sct: Lazy[CSVConverter[T]])
: CSVConverter[V :: T] =
new CSVConverter[V :: T] {
def from(s: String): Try[V :: T] = s.span(_ != ',') match {
case (before,after) =>
for {
front <- scv.value.from(before)
back <- sct.value.from(if (after.isEmpty) after else after.tail)
} yield front :: back
case _ => fail("Cannot convert '" ++ s ++ "' to HList")
}
def to(ft: V :: T): String = {
scv.value.to(ft.head) ++ "," ++ sct.value.to(ft.tail)
}
}
implicit def deriveHConsOption[V, T <: HList]
(implicit scv: Lazy[CSVConverter[V]], sct: Lazy[CSVConverter[T]])
: CSVConverter[Option[V] :: T] =
new CSVConverter[Option[V] :: T] {
def from(s: String): Try[Option[V] :: T] = s.span(_ != ',') match {
case (before,after) =>
(for {
front <- scv.value.from(before)
back <- sct.value.from(if (after.isEmpty) after else after.tail)
} yield Some(front) :: back).orElse {
sct.value.from(if (s.isEmpty) s else s.tail).map(None :: _)
}
case _ => fail("Cannot convert '" ++ s ++ "' to HList")
}
def to(ft: Option[V] :: T): String = {
ft.head.map(scv.value.to(_) ++ ",").getOrElse("") ++ sct.value.to(ft.tail)
}
}
// Anything with a Generic
implicit def deriveClass[A,R](implicit gen: Generic.Aux[A,R], conv: CSVConverter[R])
: CSVConverter[A] = new CSVConverter[A] {
def from(s: String): Try[A] = conv.from(s).map(gen.from)
def to(a: A): String = conv.to(gen.to(a))
}
}
| lambdista/shapeless | examples/src/main/scala/shapeless/examples/csv.scala | Scala | apache-2.0 | 4,631 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package docs.home.scaladsl.immutable
//#immutable
final case class ImmutableUser(name: String, email: String)
//#immutable
| lagom/lagom | docs/manual/scala/concepts/code/docs/home/scaladsl/immutable/ImmutableUser.scala | Scala | apache-2.0 | 192 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.